mirror of
https://github.com/HKUDS/nanobot.git
synced 2026-04-30 06:45:55 +00:00
fix(loop): add return_exceptions=True to parallel tool gather
Without this flag, a BaseException (e.g. CancelledError from /stop) in one tool would propagate immediately and discard results from the other concurrent tools, corrupting the OpenAI message format. With return_exceptions=True, all tool results are collected; any exception is converted to an error string for the LLM. Made-with: Cursor
This commit is contained in:
parent
6028b4828b
commit
a2edee145f
@ -261,12 +261,16 @@ class AgentLoop:
|
|||||||
|
|
||||||
# Execute all tool calls concurrently — the LLM batches
|
# Execute all tool calls concurrently — the LLM batches
|
||||||
# independent calls in a single response on purpose.
|
# independent calls in a single response on purpose.
|
||||||
|
# return_exceptions=True ensures all results are collected
|
||||||
|
# even if one tool is cancelled or raises BaseException.
|
||||||
results = await asyncio.gather(*(
|
results = await asyncio.gather(*(
|
||||||
self.tools.execute(tc.name, tc.arguments)
|
self.tools.execute(tc.name, tc.arguments)
|
||||||
for tc in response.tool_calls
|
for tc in response.tool_calls
|
||||||
))
|
), return_exceptions=True)
|
||||||
|
|
||||||
for tool_call, result in zip(response.tool_calls, results):
|
for tool_call, result in zip(response.tool_calls, results):
|
||||||
|
if isinstance(result, BaseException):
|
||||||
|
result = f"Error: {type(result).__name__}: {result}"
|
||||||
messages = self.context.add_tool_result(
|
messages = self.context.add_tool_result(
|
||||||
messages, tool_call.id, tool_call.name, result
|
messages, tool_call.id, tool_call.name, result
|
||||||
)
|
)
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user