my error:
openai.BadRequestError: Error code: 400 - {‘error’: {‘message’: ‘Invalid JSON payload received. Unknown name “arguments” at ‘tools[0].function_declarations[0]’: Cannot find field. (request id: 2025122307592348564637tkUFmagZk)’, ‘type’: ‘’, ‘param’: ‘’, ‘code’: 400}}
my tools:
{
"type": "function",
"function": {
"name": "AskUser",
"description": "主动向用户提问以获取更多信息或确认操作。",
"parameters": {
"type": "object",
"properties": {
"question": {"type": "string", "description": "要询问用户的问题"},
"options": {"type": "array", "description": "可选的回答选项", "items": {"type": "string"}}},
"required": ["question"]},
},
},
I don’t encounter errors when calling the GPT model using the OpenAI method, but switching to the gemini-3-pro-preview model results in errors
all code:
def chat_async_tool_select(messages):
response = client.chat.completions.create(
model="gemini-3-pro-preview",
messages=messages,
tools=[
{
"type": "function",
"function": {
"name": "AskUser",
"description": "主动向用户提问以获取更多信息或确认操作。",
"parameters": {
"type": "object",
"properties": {
"question": {"type": "string", "description": "要询问用户的问题"},
"options": {"type": "array", "description": "可选的回答选项", "items": {"type": "string"}}},
"required": ["question"]},
},
},
],
tool_choice="auto"
)
tool_use = response.choices[0].message.tool_calls
response_content = response.choices[0].message.content
output_tokens = response.usage.completion_tokens
input_tokens = response.usage.prompt_tokens
return tool_use, response_content, input_tokens, output_tokens
message:
messages = [{ 'role': 'system','content': 'you are a helpful assistant'},
{'role': 'user', 'content': 'hello, i want get some informations'}]