Open
Description
Please read this first
- Have you read the docs?Agents SDK docs
- Have you searched for related issues? Others may have faced similar issues.
Describe the bug
Here is my code
with trace("my-agents", group_id=context_manager.conversation_id):
context_manager.input_items.append(EasyInputMessageParam(content=message, role="user"))
result = Runner.run_streamed(
context_manager.current_agent,
context_manager.input_items,
context=context_manager.context,
max_turns=1,
run_config=RunConfig(model_provider=MODEL_PROVIDER)
)
try:
async for event in result.stream_events():
if event.type == "raw_response_event":
if isinstance(event.data, ResponseTextDeltaEvent):
final_chunk = create_chunk(conversation_id=context_manager.conversation_id,
content=event.data.delta,
role="assistant", model=MODEL_NAME)
yield f"data: {json.dumps(final_chunk)}\n\n"
else:
continue
elif event.type == "agent_updated_stream_event":
logger.info(f"Handed off to {event.new_agent.name}")
elif event.type == "run_item_stream_event":
if event.item.type == "tool_call_item":
content = f"tool call: {event.item.raw_item.name} arguments: {event.item.raw_item.arguments}"
logger.info(content)
block = create_block(conversation_id=context_manager.conversation_id,
content=content,
role="agent",
model=MODEL_NAME)
yield f"data: {json.dumps(block)}\n\n"
elif event.item.type == "tool_call_output_item":
content = f"tool call output: {event.item.output}"
logger.info(content)
block = create_block(conversation_id=context_manager.conversation_id,
content=content,
role="tool",
model=MODEL_NAME)
yield f"data: {json.dumps(block)}\n\n"
elif event.item.type == "message_output_item":
logger.info(f"AI: {ItemHelpers.text_message_output(event.item)}")
else:
continue
else:
continue
except MaxTurnsExceeded:
logger.warning(f"Max turns exceeded in conversation {context_manager.conversation_id}")
error_chunk = create_chunk(
conversation_id=context_manager.conversation_id,
content="The session has exceeded the limit. The session has been reset. Please start again.",
role="assistant",
model=MODEL_NAME
)
yield f"data: {json.dumps(error_chunk)}\n\n"
except Exception as e:
logger.error(f"Unexpected error: {str(e)}")
error_chunk = create_chunk(
conversation_id=context_manager.conversation_id,
content="An unknown error occurred during processing. Please try again later.",
role="assistant",
model=MODEL_NAME
)
yield f"data: {json.dumps(error_chunk)}\n\n"
context_manager.input_items = result.to_input_list()
context_manager.current_agent = result.last_agent
conversation.save_conversation(context_manager)
When i start a conversation and the llm's response with a tool call, the code cannot into event.item.type == "tool_call_output_item":
branch, but actually the tool call was invoked, i can find tool call result in result.to_input_list()
Debug information
- Agents SDK version: 0.0.10
- Python version 3.12.0
Repro steps
Use the code I provided above
Expected behavior
put tool_call_output event to stream_events before throw MaxTurnsExceeded