Closed as not planned
Description
Question
If you use stream_event()
after specifying the function name in tool_choice
as follows,
- 3 validation errors for Response tool_choice.literal['none','auto','required'].
What should I do?
import random
@function_tool
def how_many_jokes() -> int:
return random.randint(1, 10)
agent = Agent(
name="Joker",
instructions="First call the `how_many_jokes` tool, then tell that many jokes.",
model=OpenAIChatCompletionsModel(
model="gpt-4o-mini",
openai_client=azure_openai_client,
),
tools =[how_many_jokes],
model_settings=ModelSettings(tool_choice ="how_many_jokes"),
)
result = Runner.run_streamed(
agent,
input="Hello",
)
print("=== Run starting ===")
try:
async for event in result.stream_events():
# We'll ignore the raw responses event deltas
if event.type == "raw_response_event":
continue
elif event.type == "agent_updated_stream_event":
print(f"Agent updated: {event.new_agent.name}")
continue
elif event.type == "run_item_stream_event":
if event.item.type == "tool_call_item":
print("-- Tool was called")
elif event.item.type == "tool_call_output_item":
print(f"-- Tool output: {event.item.output}")
elif event.item.type == "message_output_item":
print(f"-- Message output:\n {ItemHelpers.text_message_output(event.item)}")
else:
pass # Ignore other event types
except Exception as e:
print(e)
print("=== Run complete ===")