Skip to content

Commit

Permalink
[Frontend] Pass mypy check
Browse files Browse the repository at this point in the history
  • Loading branch information
DIYer22 committed Nov 6, 2024
1 parent 25f8527 commit 28d96cf
Show file tree
Hide file tree
Showing 2 changed files with 10 additions and 1 deletion.
8 changes: 8 additions & 0 deletions vllm/entrypoints/chat_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -125,6 +125,14 @@ class ConversationMessage(TypedDict, total=False):
"""The tool calls generated by the model, such as function calls."""


class ChatTemplateKwargs(TypedDict, total=False):
chat_template: Optional[str]
add_generation_prompt: Optional[bool]
continue_final_message: Optional[bool]
tools: Optional[List[Dict[str, Any]]]
documents: Optional[List[Dict[str, str]]]


ModalityStr = Literal["image", "audio", "video"]
_T = TypeVar("_T")

Expand Down
3 changes: 2 additions & 1 deletion vllm/entrypoints/openai/serving_engine.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@
from vllm.engine.protocol import EngineClient
from vllm.entrypoints.chat_utils import (ChatCompletionMessageParam,
ConversationMessage,
ChatTemplateKwargs,
apply_hf_chat_template,
apply_mistral_chat_template,
parse_chat_messages_futures)
Expand Down Expand Up @@ -431,7 +432,7 @@ async def _preprocess_chat(
continue_final_message: bool = False,
tool_dicts: Optional[List[Dict[str, Any]]] = None,
documents: Optional[List[Dict[str, str]]] = None,
chat_template_kwargs: Optional[Dict[str, Any]] = None,
chat_template_kwargs: Optional[ChatTemplateKwargs] = None,
tool_parser: Optional[Callable[[AnyTokenizer], ToolParser]] = None,
truncate_prompt_tokens: Optional[Annotated[int, Field(ge=1)]] = None,
add_special_tokens: bool = False,
Expand Down

0 comments on commit 28d96cf

Please sign in to comment.