Skip to content

Commit 16ee4b0

Browse files
committed
chore: fix ci lint
Signed-off-by: elijah <f1renze.142857@gmail.com>
1 parent a222b45 commit 16ee4b0

File tree

1 file changed

+6
-6
lines changed

1 file changed

+6
-6
lines changed

benchmarks/backend_request_func.py

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -423,14 +423,13 @@ def get_tokenizer(
423423
**kwargs,
424424
) -> Union[PreTrainedTokenizer, PreTrainedTokenizerFast]:
425425
if pretrained_model_name_or_path is not None and not os.path.exists(
426-
pretrained_model_name_or_path
427-
):
428-
pretrained_model_name_or_path = get_model(pretrained_model_name_or_path)
426+
pretrained_model_name_or_path):
427+
pretrained_model_name_or_path = get_model(
428+
pretrained_model_name_or_path)
429429
if tokenizer_mode == "slow":
430430
if kwargs.get("use_fast", False):
431431
raise ValueError(
432-
"Cannot use the fast tokenizer in slow tokenizer mode."
433-
)
432+
"Cannot use the fast tokenizer in slow tokenizer mode.")
434433
kwargs["use_fast"] = False
435434
if tokenizer_mode == "mistral":
436435
vllm_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
@@ -439,7 +438,8 @@ def get_tokenizer(
439438
MistralTokenizer
440439
)
441440

442-
return MistralTokenizer.from_pretrained(str(pretrained_model_name_or_path))
441+
return MistralTokenizer.from_pretrained(
442+
str(pretrained_model_name_or_path))
443443
else:
444444
return AutoTokenizer.from_pretrained(
445445
pretrained_model_name_or_path,

0 commit comments

Comments
 (0)