Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

genai[patch]: support max_tokens init arg #517

Merged
merged 3 commits into from
Oct 4, 2024
Merged
Show file tree
Hide file tree
Changes from 2 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 5 additions & 2 deletions libs/genai/langchain_google_genai/llms.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
from langchain_core.language_models.llms import BaseLLM, create_base_retry_decorator
from langchain_core.outputs import Generation, GenerationChunk, LLMResult
from langchain_core.utils import secret_from_env
from pydantic import BaseModel, Field, SecretStr, model_validator
from pydantic import BaseModel, ConfigDict, Field, SecretStr, model_validator
from typing_extensions import Self

from langchain_google_genai._enums import (
Expand Down Expand Up @@ -139,7 +139,7 @@ class _BaseGoogleGenerativeAI(BaseModel):
top_k: Optional[int] = None
"""Decode using top-k sampling: consider the set of top_k most probable tokens.
Must be positive."""
max_output_tokens: Optional[int] = None
max_output_tokens: Optional[int] = Field(default=None, alias="max_tokens")
"""Maximum number of tokens to include in a candidate. Must be greater than zero.
If unset, will default to 64."""
n: int = 1
Expand Down Expand Up @@ -216,6 +216,9 @@ class GoogleGenerativeAI(_BaseGoogleGenerativeAI, BaseLLM):
"""

client: Any = None #: :meta private:
model_config = ConfigDict(
populate_by_name=True,
)

@model_validator(mode="after")
def validate_environment(self) -> Self:
Expand Down
4 changes: 2 additions & 2 deletions libs/genai/tests/integration_tests/test_llms.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,9 +21,9 @@
def test_google_generativeai_call(model_name: str) -> None:
"""Test valid call to Google GenerativeAI text API."""
if model_name:
llm = GoogleGenerativeAI(max_output_tokens=10, model=model_name)
llm = GoogleGenerativeAI(max_tokens=10, model=model_name)
else:
llm = GoogleGenerativeAI(max_output_tokens=10) # type: ignore[call-arg]
llm = GoogleGenerativeAI(max_tokens=10) # type: ignore[call-arg]
output = llm("Say foo:")
assert isinstance(output, str)
assert llm._llm_type == "google_palm"
Expand Down
Loading