Merge pull request #14886 from Constantinople-AI/fix-no-arg-mcp-call

Isolated fix to single section
This commit is contained in:
Tim Jaeryang Baek 2025-06-16 15:28:48 +04:00 committed by GitHub
commit d6664369a1
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

View File

@ -2061,28 +2061,38 @@ async def process_chat_response(
tools = metadata.get("tools", {})
results = []
for tool_call in response_tool_calls:
tool_call_id = tool_call.get("id", "")
tool_name = tool_call.get("function", {}).get("name", "")
tool_call_args = tool_call.get("function", {}).get("arguments", "{}")
tool_function_params = {}
try:
# json.loads cannot be used because some models do not produce valid JSON
tool_function_params = ast.literal_eval(
tool_call.get("function", {}).get("arguments", "{}")
tool_call_args
)
except Exception as e:
log.debug(e)
# Fallback to JSON parsing
try:
tool_function_params = json.loads(
tool_call.get("function", {}).get("arguments", "{}")
tool_call_args
)
except Exception as e:
log.debug(
f"Error parsing tool call arguments: {tool_call.get('function', {}).get('arguments', '{}')}"
log.error(
f"Error parsing tool call arguments: {tool_call_args}"
)
# Mutate the original tool call response params as they are passed back to the passed
# back to the LLM via the content blocks. If they are in a json block and are invalid json,
# this can cause downstream LLM integrations to fail (e.g. bedrock gateway) where response
# params are not valid json.
# Main case so far is no args = "" = invalid json.
log.debug(f"Parsed args from {tool_call_args} to {tool_function_params}")
tool_call.setdefault("function", {})["arguments"] = json.dumps(tool_function_params)
tool_result = None
if tool_name in tools: