Skip to content

Commit ac48710

Browse files
Tiefen-boopEran Geva
authored andcommitted
[SW-195526] Rename LOG_LEVEL_HQT to LOG_LEVEL_INC
Rename 'HQT' occurrences in fp8_tests.py and logger.py Change-Id: Ibbf314410de627f98a54d2230bf8db72aca0c40a
1 parent c7aa37c commit ac48710

File tree

2 files changed

+7
-7
lines changed
  • neural_compressor/torch/algorithms/fp8_quant/utils
  • test/3x/torch/algorithms/fp8_quant

2 files changed

+7
-7
lines changed

neural_compressor/torch/algorithms/fp8_quant/utils/logger.py

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -108,7 +108,7 @@ def get_enable_console_val(self):
108108
return enableConsole == "TRUE" or enableConsole == "1"
109109

110110
def get_log_level(self):
111-
log_level_str = os.environ.get("LOG_LEVEL_HQT", os.environ.get("LOG_LEVEL_ALL"))
111+
log_level_str = os.environ.get("LOG_LEVEL_INC", os.environ.get("LOG_LEVEL_ALL"))
112112
if log_level_str is None:
113113
return logging.INFO
114114
if log_level_str not in log_levels:
@@ -159,7 +159,7 @@ def format(self, record):
159159
def _init_log(self):
160160
"""Setup the logger format and handler."""
161161
enableConsole = self.get_enable_console_val()
162-
self._logger = logging.getLogger("HQT")
162+
self._logger = logging.getLogger("INC")
163163
log_level = self.get_log_level()
164164
if log_level == logging.IGNORE:
165165
self._logger.disabled = True
@@ -181,18 +181,18 @@ def _init_log(self):
181181
os.makedirs(log_folder, exist_ok=True)
182182
except OSError as error:
183183
print(
184-
f"Warning: Directory '{log_folder}' can not be created for HQT logs: {error.strerror}. Logger is disabled."
184+
f"Warning: Directory '{log_folder}' can not be created for INC logs: {error.strerror}. Logger is disabled."
185185
)
186186
self._logger.disabled = True
187187
pass
188-
file_path = log_folder + "/hqt_log.txt"
189-
log_file_size = int(os.getenv("HQT_LOG_FILE_SIZE", DEFAULT_LOG_FILE_SIZE))
188+
file_path = log_folder + "/inc_log.txt"
189+
log_file_size = int(os.getenv("INC_LOG_FILE_SIZE", DEFAULT_LOG_FILE_SIZE))
190190
if log_file_size < 0:
191191
print(
192192
f"Warning: Log file size value is not valid [{log_file_size}]. Using default value [{DEFAULT_LOG_FILE_SIZE}]"
193193
)
194194
log_file_size = DEFAULT_LOG_FILE_SIZE
195-
log_file_amount = int(os.getenv("HQT_LOG_FILE_AMOUNT", DEFAULT_LOG_FILE_AMOUNT))
195+
log_file_amount = int(os.getenv("INC_LOG_FILE_AMOUNT", DEFAULT_LOG_FILE_AMOUNT))
196196
if log_file_amount < 0:
197197
print(
198198
f"Warning: Log file amount value is not valid [{log_file_amount}]. Using default value [{DEFAULT_LOG_FILE_AMOUNT}]"

test/3x/torch/algorithms/fp8_quant/fp8_tests.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -148,7 +148,7 @@ def forward(self, x, b):
148148

149149
# Test3: (Disable (comment) all other tests, delete all files from the test_outputs folder)
150150
# (Change Line 73 above to: model = TinyModel3())
151-
# Run: (add LOG_LEVEL_HQT=0/1 for additional logs)
151+
# Run: (add LOG_LEVEL_INC=0/1 for additional logs)
152152
# (Uncomment lines 164+165)
153153
# 1) QUANT_CONFIG=test_jsons/test_measure.json python3 fp8_tests.py
154154
# 2) QUANT_CONFIG=test_jsons/test_hw_quant.json python3 fp8_tests.py

0 commit comments

Comments
 (0)