Skip to content

Commit 7227b41

Browse files
committed
bug fix
1 parent 955ea8c commit 7227b41

File tree

4 files changed

+51
-33
lines changed

4 files changed

+51
-33
lines changed

snippets/decorators.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -167,7 +167,7 @@ def wrapped(data: Iterable, *args, **kwargs):
167167
# add a thread pool here
168168
executors = ThreadPoolExecutor(work_num)
169169

170-
def _func(x):
170+
def _func(x):
171171
return func(x, *args, **kwargs)
172172
rs_iter = executors.map(_func, data)
173173
total = None if not hasattr(data, '__len__') else len(data)

snippets/logs.py

+32-14
Original file line numberDiff line numberDiff line change
@@ -13,60 +13,78 @@
1313
logger = logging.getLogger(__name__)
1414

1515

16-
def getlog(env, name):
16+
def getlog(env:str, name:str):
1717
exist = name in logging.Logger.manager.loggerDict
1818
rs_logger = logging.getLogger(name)
1919
if not exist:
2020
logger.info(f"create logger with {env=}, {name=}")
21-
if env in ["dev", "local"]:
21+
if env.lower() in ["dev", "local"]:
2222
rs_logger.propagate = False
2323
rs_logger.setLevel(logging.DEBUG)
24-
streamHandler = logging.StreamHandler()
25-
streamHandler.setFormatter(fmt=logging.Formatter(
24+
stream_handler = logging.StreamHandler()
25+
stream_handler.setFormatter(fmt=logging.Formatter(
2626
"%(asctime)s [%(levelname)s][%(filename)s:%(lineno)d]:%(message)s", datefmt='%Y-%m-%d %H:%M:%S'))
27-
rs_logger.addHandler(streamHandler)
27+
rs_logger.addHandler(stream_handler)
2828
else:
2929
rs_logger.propagate = False
3030
rs_logger.setLevel(logging.INFO)
31-
streamHandler = logging.StreamHandler()
32-
streamHandler.setFormatter(fmt=logging.Formatter(
31+
stream_handler = logging.StreamHandler()
32+
stream_handler.setFormatter(fmt=logging.Formatter(
3333
"%(asctime)s [%(levelname)s]%(message)s", datefmt='%Y-%m-%d-%H:%M:%S'))
34-
rs_logger.addHandler(streamHandler)
34+
rs_logger.addHandler(stream_handler)
3535

3636
return rs_logger
3737

38+
get_log = getlog
39+
3840

3941
_FMT_MAP = {
42+
"raw": logging.Formatter("%(message)s"),
4043
"simple": logging.Formatter(
4144
"%(asctime)s [%(levelname)s]%(message)s", datefmt='%Y-%m-%d-%H:%M:%S'),
4245
"detail": logging.Formatter(
4346
"%(asctime)s [%(levelname)s][%(filename)s:%(lineno)d]:%(message)s", datefmt='%Y-%m-%d %H:%M:%S')
47+
4448

4549
}
4650

4751

48-
def getlog_detail(name, level, format_type: str = "simple", do_print=True, do_file=False, propagate=False,
49-
log_dir: str = None, file_type="time_rotate", file_config=dict(when='d', interval=1, backupCount=7)):
52+
def getlog_detail(name, level, format_type: str = "simple",
53+
do_print=True, print_format_type=None, print_level=None,
54+
do_file=False, file_format_type=None, file_level = None, file_type="time_rotate", file_config=dict(when='d', interval=1, backupCount=7),
55+
propagate=False, log_dir: str = None,):
5056
exist = name in logging.Logger.manager.loggerDict
5157
rs_logger = logging.getLogger(name)
5258
if exist:
5359
return rs_logger
54-
fmt = _FMT_MAP[format_type]
55-
60+
5661
rs_logger.propagate = propagate
5762
rs_logger.setLevel(level)
5863

5964
if do_print:
6065
streamHandler = logging.StreamHandler()
61-
streamHandler.setFormatter(fmt=fmt)
66+
streamHandler.setFormatter(fmt=_FMT_MAP[print_format_type if print_format_type else format_type])
67+
streamHandler.setLevel(print_level if print_level else level)
6268
rs_logger.addHandler(streamHandler)
69+
6370
if do_file:
6471
log_dir = log_dir or os.environ.get("LOG_DIR", "/tmp/logs")
6572
file_path = os.path.join(log_dir, name + ".log")
6673
os.makedirs(log_dir, exist_ok=True)
6774
if file_type == "time_rotate":
6875
filehandler = TimedRotatingFileHandler(file_path, **file_config)
6976
filehandler.suffix = "%Y-%m-%d_%H-%M-%S.log" # 设置历史文件 后缀
70-
filehandler.setFormatter(fmt)
77+
filehandler.setFormatter(_FMT_MAP[file_format_type if file_format_type else format_type])
78+
filehandler.setLevel(file_level if file_level else level)
7179
rs_logger.addHandler(filehandler)
7280
return rs_logger
81+
82+
83+
def get_file_log(name, log_dir):
84+
return getlog_detail(name=name, format_type="simple",
85+
do_print=True, print_format_type="raw", print_level=logging.INFO,
86+
do_file=True, file_format_type="detail", file_level=logging.DEBUG, log_dir=log_dir)
87+
88+
89+
90+

snippets/perf.py

+6-6
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@
55
from snippets import SNIPPETS_ENV
66
from snippets.decorators import batch_process
77
from snippets.logs import getlog
8-
from snippets.utils import create_dir_path, get_current_time_str, jdump, read2list, split_surfix
8+
from snippets.utils import create_dir_path, get_current_time_str, jdump, read2list, split_suffix
99

1010

1111
logger = getlog(SNIPPETS_ENV, __file__)
@@ -34,25 +34,25 @@ def req_http_service_detail(item, url, build_req_func=default_build_req, build_r
3434
def perf_test(input_path, url, req_func, output_path=None, work_num=1, max_num=None):
3535
logger.info("perf starts")
3636
logger.info(f"input_path: {input_path}, url:{url}, work_num:{work_num}")
37-
name, surfix = split_surfix(input_path)
37+
name, _ = split_suffix(input_path)
3838
if not output_path:
3939
output_path = os.path.join(
4040
name, f"{get_current_time_str()}.pef{work_num}.jsonl")
4141
create_dir_path(output_path)
4242

43-
querys = read2list(input_path)
43+
queries = read2list(input_path)
4444
st = time.time()
4545
if max_num:
46-
querys = querys[:max_num]
46+
queries = queries[:max_num]
4747

4848
func = batch_process(work_num=work_num, return_list=True)(req_func)
49-
rs = func(data=querys, url=url)
49+
rs = func(data=queries, url=url)
5050
# logger.info(rs)
5151
cost = time.time() - st
5252
costs = [e['cost'] for e in rs]
5353
latency = sum(costs)/len(costs)
5454

55-
stat = dict(test_cost=cost, latency=latency, test_num=len(querys), qps=len(querys)/cost)
55+
stat = dict(test_cost=cost, latency=latency, test_num=len(queries), qps=len(queries)/cost)
5656
# rs.append(stat)
5757

5858
logger.info(f"dump to {output_path}")

snippets/utils.py

+12-12
Original file line numberDiff line numberDiff line change
@@ -42,7 +42,7 @@ def default(self, obj):
4242
if isinstance(obj, set):
4343
return list(obj)
4444
if isinstance(obj, BaseModel):
45-
return obj.dict(exclude_none=True, exclude_defaults=True)
45+
return obj.model_dump(exclude_none=True, exclude_defaults=True)
4646
if isinstance(obj, datetime):
4747
return obj.strftime("%Y-%M-%d %H:%m:%S")
4848
if isinstance(obj, np.integer):
@@ -111,7 +111,7 @@ def jload(fp):
111111
return rs
112112

113113

114-
def split_surfix(filename: str) -> Tuple[str, str]:
114+
def split_suffix(filename: str) -> Tuple[str, str]:
115115
name, ext = filename.rsplit(".", 1)
116116
return name, ext
117117

@@ -154,11 +154,11 @@ def get_gen(f):
154154
# table类的文件转化为list of dict
155155

156156

157-
def table2json(path):
157+
def table2json(path, **kwargs):
158158
if path.endswith("csv"):
159-
df = pd.read_csv(path)
159+
df = pd.read_csv(path, **kwargs)
160160
if path.endswith("xlsx"):
161-
df = pd.read_excel(path)
161+
df = pd.read_excel(path, **kwargs)
162162
df.replace(np.nan, None, inplace=True)
163163
cols = df.columns.tolist()
164164
cols = [e for e in cols if not e.startswith("Unnamed")]
@@ -197,17 +197,17 @@ def load_lines(fp, return_generator=False):
197197
def read2list(file_path: Union[str, List], **kwargs) -> List[Union[str, dict]]:
198198

199199
def _read2list(file_path, **kwargs):
200-
surfix = os.path.splitext(file_path)[-1].lower()
201-
if surfix == ".json":
200+
suffix = os.path.splitext(file_path)[-1].lower()
201+
if suffix == ".json":
202202
return jload(file_path, **kwargs)
203-
if surfix == ".jsonl":
203+
if suffix == ".jsonl":
204204
return jload_lines(file_path, **kwargs)
205-
if surfix in [".xlsx", ".csv"]:
206-
return table2json(file_path)
207-
if surfix in [".txt"]:
205+
if suffix in [".xlsx", ".csv"]:
206+
return table2json(file_path, **kwargs)
207+
if suffix in [".txt"]:
208208
return load_lines(file_path, **kwargs)
209209
else:
210-
logger.warn(f"unkown surfix:{surfix}, read as txt")
210+
logger.warn(f"unknown suffix:{suffix}, read as txt")
211211
return load_lines(file_path, **kwargs)
212212

213213
if isinstance(file_path, list):

0 commit comments

Comments
 (0)