1
+ import json
1
2
import logging
2
3
import os
3
4
import re
7
8
from logging import FileHandler , Handler , StreamHandler
8
9
from logging .handlers import RotatingFileHandler
9
10
from string import Formatter
10
- from typing import Optional
11
+ from typing import Dict , Optional
11
12
12
13
import _string
13
14
import discord
@@ -72,6 +73,71 @@ def line(self, level="info"):
72
73
)
73
74
74
75
76
+ class JsonFormatter (logging .Formatter ):
77
+ """
78
+ Formatter that outputs JSON strings after parsing the LogRecord.
79
+
80
+ Parameters
81
+ ----------
82
+ fmt_dict : Optional[Dict[str, str]]
83
+ {key: logging format attribute} pairs. Defaults to {"message": "message"}.
84
+ time_format: str
85
+ time.strftime() format string. Default: "%Y-%m-%dT%H:%M:%S"
86
+ msec_format: str
87
+ Microsecond formatting. Appended at the end. Default: "%s.%03dZ"
88
+ """
89
+
90
+ def __init__ (
91
+ self ,
92
+ fmt_dict : Optional [Dict [str , str ]] = None ,
93
+ time_format : str = "%Y-%m-%dT%H:%M:%S" ,
94
+ msec_format : str = "%s.%03dZ" ,
95
+ ):
96
+ self .fmt_dict : Dict [str , str ] = fmt_dict if fmt_dict is not None else {"message" : "message" }
97
+ self .default_time_format : str = time_format
98
+ self .default_msec_format : str = msec_format
99
+ self .datefmt : Optional [str ] = None
100
+
101
+ def usesTime (self ) -> bool :
102
+ """
103
+ Overwritten to look for the attribute in the format dict values instead of the fmt string.
104
+ """
105
+ return "asctime" in self .fmt_dict .values ()
106
+
107
+ def formatMessage (self , record ) -> Dict [str , str ]:
108
+ """
109
+ Overwritten to return a dictionary of the relevant LogRecord attributes instead of a string.
110
+ KeyError is raised if an unknown attribute is provided in the fmt_dict.
111
+ """
112
+ return {fmt_key : record .__dict__ [fmt_val ] for fmt_key , fmt_val in self .fmt_dict .items ()}
113
+
114
+ def format (self , record ) -> str :
115
+ """
116
+ Mostly the same as the parent's class method, the difference being that a dict is manipulated and dumped as JSON
117
+ instead of a string.
118
+ """
119
+ record .message = record .getMessage ()
120
+
121
+ if self .usesTime ():
122
+ record .asctime = self .formatTime (record , self .datefmt )
123
+
124
+ message_dict = self .formatMessage (record )
125
+
126
+ if record .exc_info :
127
+ # Cache the traceback text to avoid converting it multiple times
128
+ # (it's constant anyway)
129
+ if not record .exc_text :
130
+ record .exc_text = self .formatException (record .exc_info )
131
+
132
+ if record .exc_text :
133
+ message_dict ["exc_info" ] = record .exc_text
134
+
135
+ if record .stack_info :
136
+ message_dict ["stack_info" ] = self .formatStack (record .stack_info )
137
+
138
+ return json .dumps (message_dict , default = str )
139
+
140
+
75
141
class FileFormatter (logging .Formatter ):
76
142
ansi_escape = re .compile (r"\x1B\[[0-?]*[ -/]*[@-~]" )
77
143
@@ -83,11 +149,25 @@ def format(self, record):
83
149
log_stream_formatter = logging .Formatter (
84
150
"%(asctime)s %(name)s[%(lineno)d] - %(levelname)s: %(message)s" , datefmt = "%m/%d/%y %H:%M:%S"
85
151
)
152
+
86
153
log_file_formatter = FileFormatter (
87
154
"%(asctime)s %(name)s[%(lineno)d] - %(levelname)s: %(message)s" ,
88
155
datefmt = "%Y-%m-%d %H:%M:%S" ,
89
156
)
90
157
158
+ json_formatter = JsonFormatter (
159
+ {
160
+ "level" : "levelname" ,
161
+ "message" : "message" ,
162
+ "loggerName" : "name" ,
163
+ "processName" : "processName" ,
164
+ "processID" : "process" ,
165
+ "threadName" : "threadName" ,
166
+ "threadID" : "thread" ,
167
+ "timestamp" : "asctime" ,
168
+ }
169
+ )
170
+
91
171
92
172
def create_log_handler (
93
173
filename : Optional [str ] = None ,
@@ -96,6 +176,7 @@ def create_log_handler(
96
176
level : int = logging .DEBUG ,
97
177
mode : str = "a+" ,
98
178
encoding : str = "utf-8" ,
179
+ format : str = "plain" ,
99
180
maxBytes : int = 28000000 ,
100
181
backupCount : int = 1 ,
101
182
** kwargs ,
@@ -122,6 +203,9 @@ def create_log_handler(
122
203
encoding : str
123
204
If this keyword argument is specified along with filename, its value is used when the `FileHandler` is created,
124
205
and thus used when opening the output file. Defaults to 'utf-8'.
206
+ format : str
207
+ The format to output with, can either be 'json' or 'plain'. Will apply to whichever handler is created,
208
+ based on other conditional logic.
125
209
maxBytes : int
126
210
The max file size before the rollover occurs. Defaults to 28000000 (28MB). Rollover occurs whenever the current
127
211
log file is nearly `maxBytes` in length; but if either of `maxBytes` or `backupCount` is zero,
@@ -139,23 +223,28 @@ def create_log_handler(
139
223
140
224
if filename is None :
141
225
handler = StreamHandler (stream = sys .stdout , ** kwargs )
142
- handler . setFormatter ( log_stream_formatter )
226
+ formatter = log_stream_formatter
143
227
elif not rotating :
144
228
handler = FileHandler (filename , mode = mode , encoding = encoding , ** kwargs )
145
- handler . setFormatter ( log_file_formatter )
229
+ formatter = log_file_formatter
146
230
else :
147
231
handler = RotatingFileHandler (
148
232
filename , mode = mode , encoding = encoding , maxBytes = maxBytes , backupCount = backupCount , ** kwargs
149
233
)
150
- handler .setFormatter (log_file_formatter )
234
+ formatter = log_file_formatter
235
+
236
+ if format == "json" :
237
+ formatter = json_formatter
151
238
152
239
handler .setLevel (level )
240
+ handler .setFormatter (formatter )
153
241
return handler
154
242
155
243
156
244
logging .setLoggerClass (ModmailLogger )
157
245
log_level = logging .INFO
158
246
loggers = set ()
247
+
159
248
ch = create_log_handler (level = log_level )
160
249
ch_debug : Optional [RotatingFileHandler ] = None
161
250
@@ -171,7 +260,11 @@ def getLogger(name=None) -> ModmailLogger:
171
260
172
261
173
262
def configure_logging (bot ) -> None :
174
- global ch_debug , log_level
263
+ global ch_debug , log_level , ch
264
+
265
+ stream_log_format , file_log_format = bot .config ["stream_log_format" ], bot .config ["file_log_format" ]
266
+ if stream_log_format == "json" :
267
+ ch .setFormatter (json_formatter )
175
268
176
269
logger = getLogger (__name__ )
177
270
level_text = bot .config ["log_level" ].upper ()
@@ -196,8 +289,15 @@ def configure_logging(bot) -> None:
196
289
197
290
logger .info ("Log file: %s" , bot .log_file_path )
198
291
ch_debug = create_log_handler (bot .log_file_path , rotating = True )
292
+
293
+ if file_log_format == "json" :
294
+ ch_debug .setFormatter (json_formatter )
295
+
199
296
ch .setLevel (log_level )
200
297
298
+ logger .info ("Stream log format: %s" , stream_log_format )
299
+ logger .info ("File log format: %s" , file_log_format )
300
+
201
301
for log in loggers :
202
302
log .setLevel (log_level )
203
303
log .addHandler (ch_debug )
0 commit comments