Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add function return documentation for LLMService #1721

Merged
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Next Next commit
LLMService docs function returns
  • Loading branch information
acaklovic-nv committed May 24, 2024
commit 9e110bc22eb0b6567c20153e99b78f8c86f6a54d
30 changes: 30 additions & 0 deletions morpheus/llm/services/llm_service.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,11 @@ class LLMClient(ABC):
def get_input_names(self) -> list[str]:
"""
Returns the names of the inputs to the model.

Returns
-------
list[str]
List of input names.
"""
pass

Expand All @@ -42,6 +47,11 @@ def generate(self, **input_dict) -> str:
----------
input_dict : dict
Input containing prompt data.

Returns
-------
str
Generated response for prompt.
"""
pass

Expand All @@ -54,6 +64,11 @@ async def generate_async(self, **input_dict) -> str:
----------
input_dict : dict
Input containing prompt data.

Returns
-------
str
Generated async response for prompt.
"""
pass

Expand All @@ -80,6 +95,11 @@ def generate_batch(self, inputs: dict[str, list], return_exceptions=False) -> li
Inputs containing prompt data.
return_exceptions : bool
Whether to return exceptions in the output list or raise them immediately.

Returns
-------
list[str] | list[str | BaseException]
List of responses or list of responses and exceptions.
"""
pass

Expand Down Expand Up @@ -110,6 +130,11 @@ async def generate_batch_async(self,
Inputs containing prompt data.
return_exceptions : bool
Whether to return exceptions in the output list or raise them immediately.

Returns
-------
list[str] | list[str | BaseException]
List of responses or list of responses and exceptions.
"""
pass

Expand All @@ -131,5 +156,10 @@ def get_client(self, *, model_name: str, **model_kwargs) -> LLMClient:

model_kwargs : dict[str, typing.Any]
Additional keyword arguments to pass to the model.

Returns
-------
LLMClient
Client for interacting with LLM models.
"""
pass
Loading