@@ -443,29 +443,28 @@ async def _preprocess_chat(
443
443
tokenizer ,
444
444
)
445
445
446
+ _chat_template_kwargs : Dict [str , Any ] = dict (
447
+ chat_template = chat_template ,
448
+ add_generation_prompt = add_generation_prompt ,
449
+ continue_final_message = continue_final_message ,
450
+ tools = tool_dicts ,
451
+ documents = documents ,
452
+ )
453
+ _chat_template_kwargs .update (chat_template_kwargs or {})
454
+
446
455
request_prompt : Union [str , List [int ]]
447
456
is_mistral_tokenizer = isinstance (tokenizer , MistralTokenizer )
448
457
if is_mistral_tokenizer :
449
458
request_prompt = apply_mistral_chat_template (
450
459
tokenizer ,
451
460
messages = messages ,
452
- chat_template = chat_template ,
453
- add_generation_prompt = add_generation_prompt ,
454
- continue_final_message = continue_final_message ,
455
- tools = tool_dicts ,
456
- documents = documents ,
457
- ** (chat_template_kwargs or {}),
461
+ ** _chat_template_kwargs ,
458
462
)
459
463
else :
460
464
request_prompt = apply_hf_chat_template (
461
465
tokenizer ,
462
466
conversation = conversation ,
463
- chat_template = chat_template ,
464
- add_generation_prompt = add_generation_prompt ,
465
- continue_final_message = continue_final_message ,
466
- tools = tool_dicts ,
467
- documents = documents ,
468
- ** (chat_template_kwargs or {}),
467
+ ** _chat_template_kwargs ,
469
468
)
470
469
471
470
mm_data = await mm_data_future
0 commit comments