-
Notifications
You must be signed in to change notification settings - Fork 379
/
logger.py
207 lines (165 loc) · 6.04 KB
/
logger.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
# -------------------------------------------------------------------------
#
# Part of the CodeChecker project, under the Apache License v2.0 with
# LLVM Exceptions. See LICENSE for license information.
# SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
#
# -------------------------------------------------------------------------
"""
"""
import argparse
import json
import logging
from logging import config
from pathlib import Path
import os
# The logging leaves can be accesses without
# importing the logging module in other modules.
DEBUG = logging.DEBUG
INFO = logging.INFO
WARNING = logging.WARNING
ERROR = logging.ERROR
CRITICAL = logging.CRITICAL
NOTSET = logging.NOTSET
CMDLINE_LOG_LEVELS = ['info', 'debug_analyzer', 'debug']
DEBUG_ANALYZER = logging.DEBUG_ANALYZER = 15 # type: ignore
logging.addLevelName(DEBUG_ANALYZER, 'DEBUG_ANALYZER')
class CCLogger(logging.Logger):
def __init__(self, name, level=NOTSET):
super(CCLogger, self).__init__(name, level)
def debug_analyzer(self, msg, *args, **kwargs):
if self.isEnabledFor(logging.DEBUG_ANALYZER):
self._log(logging.DEBUG_ANALYZER, msg, args, **kwargs)
logging.setLoggerClass(CCLogger)
data_files_dir_path = os.environ.get('CC_DATA_FILES_DIR', '')
DEFAULT_LOG_CFG_FILE = os.path.join(data_files_dir_path, 'config',
'logger.conf')
# Default config which can be used if reading log config from a
# file fails.
DEFAULT_LOG_CONFIG = '''{
"version": 1,
"disable_existing_loggers": false,
"formatters": {
"brief": {
"format": "[%(asctime)s][%(levelname)s] - %(message)s",
"datefmt": "%Y-%m-%d %H:%M"
},
"precise": {
"format": "[%(levelname)s] [%(asctime)s] {%(name)s} [%(process)d] \
<%(thread)d> - %(filename)s:%(lineno)d %(funcName)s() - %(message)s",
"datefmt": "%Y-%m-%d %H:%M"
}
},
"handlers": {
"default": {
"level": "INFO",
"formatter": "brief",
"class": "logging.StreamHandler"
}
},
"loggers": {
"": {
"handlers": ["default"],
"level": "INFO",
"propagate": true
}
}
}'''
try:
with open(DEFAULT_LOG_CFG_FILE, 'r',
encoding="utf-8", errors="ignore") as dlc:
DEFAULT_LOG_CONFIG = dlc.read()
except IOError as ex:
print(ex)
print("Failed to load logger configuration. Using built-in config.")
def add_verbose_arguments(parser):
"""
Verbosity level arguments.
"""
parser.add_argument('--verbose', type=str, dest='verbose',
choices=CMDLINE_LOG_LEVELS,
default=argparse.SUPPRESS,
help='Set verbosity level.')
def get_logger(name):
"""
Return a logger instance if already exists with the given name.
"""
return logging.getLogger(name)
def validate_loglvl(log_level):
"""
Should return a valid log level name
"""
log_level = log_level.upper()
if log_level not in {lev.upper() for lev in CMDLINE_LOG_LEVELS}:
return "INFO"
return log_level
class LOG_CFG_SERVER:
"""
Initialize a log configuration server for dynamic log configuration.
The log config server will only be started if the
'CC_LOG_CONFIG_PORT' environment variable is set.
"""
def __init__(self, log_level='INFO', workspace=None):
# Configure the logging with the default config.
setup_logger(log_level, workspace=workspace)
self.log_server = None
log_cfg_port = os.environ.get('CC_LOG_CONFIG_PORT')
if log_cfg_port:
self.log_server = config.listen(int(log_cfg_port))
self.log_server.start()
def __enter__(self, *args):
return self
def __exit__(self, *args):
if self.log_server:
config.stopListening()
self.log_server.join()
def setup_logger(log_level=None, stream=None, workspace=None):
"""
Modifies the log configuration.
Overwrites the log levels for the loggers and handlers in the
configuration.
Redirects the output of all handlers to the given stream. Short names can
be given (stderr -> ext://sys.stderr, 'stdout' -> ext://sys.stdout).
"""
LOG_CONFIG = json.loads(DEFAULT_LOG_CONFIG)
if log_level:
log_level = validate_loglvl(log_level)
loggers = LOG_CONFIG.get("loggers", {})
for k in loggers.keys():
LOG_CONFIG['loggers'][k]['level'] = log_level
handlers = LOG_CONFIG.get("handlers", {})
for k in handlers.keys():
LOG_CONFIG['handlers'][k]['level'] = log_level
if log_level == 'DEBUG' or log_level == 'DEBUG_ANALYZER':
LOG_CONFIG['handlers'][k]['formatter'] = 'precise'
if stream:
if stream == 'stderr':
stream = 'ext://sys.stderr'
elif stream == 'stdout':
stream = 'ext://sys.stdout'
handlers = LOG_CONFIG.get("handlers", {})
for k in handlers.keys():
handler = LOG_CONFIG['handlers'][k]
if 'stream' in handler:
handler['stream'] = stream
# If workspace is set, we will log to a file in the workspace.
# This is added dynamically because the declarative config
# (config/logger.conf) is not flexible enough, and will always
# create a log file in weird locations, before we can initialize
# the filename attribute, to the workspace directories.
if workspace:
# Add file_handler to store_time logger,
# and add the handler to the config
loggers = LOG_CONFIG.get("loggers", {})
loggers["store_time"]["handlers"].append('store_time_file_handler')
handlers = LOG_CONFIG.get("handlers", {})
log_path = Path(workspace, "store_time.log")
handlers["store_time_file_handler"] = {}
store_time_handler = {
'backupCount': 8,
'class': 'logging.handlers.TimedRotatingFileHandler',
"filename": log_path,
'formatter': 'store_time_formatter',
'interval': 7}
handlers["store_time_file_handler"] = store_time_handler
config.dictConfig(LOG_CONFIG)