Hi there, I’m calling Gemini model using OpenAI client,
I have code below:
print("!!!")
print({
"messages": messages,
"model": self.model_type,
**self.model_config_dict,
})
print("!!!")
response = self._client.chat.completions.create(
messages=messages,
model=self.model_type,
**self.model_config_dict,
)
Got printed output and error:
!!!
{'messages': [{'role': 'system', 'content': 'You are a helpful assistant'}, {'role': 'user', 'content': "Search paper 'attention is all you need' for me"}], 'model': <ModelType.GEMINI_1_5_PRO: 'gemini-1.5-pro'>, 'tools': [{'type': 'function', 'function': {'name': 'search_papers', 'description': 'Searches for academic papers on arXiv using a query string and\noptional paper IDs.', 'parameters': {'properties': {'query': {'type': 'string', 'description': 'The search query string.'}, 'paper_ids': {'anyOf': [{'items': {'type': 'string'}, 'type': 'array'}, {'type': 'null'}], 'default': None, 'description': 'A list of specific arXiv paper\nIDs to search for. (default::obj: `None`)'}, 'max_results': {'anyOf': [{'type': 'integer'}, {'type': 'null'}], 'default': 5, 'description': 'The maximum number of search results\nto return. (default::obj: `5`)'}}, 'required': ['query'], 'type': 'object'}}}, {'type': 'function', 'function': {'name': 'download_papers', 'description': 'Downloads PDFs of academic papers from arXiv based on the provided\nquery.', 'parameters': {'properties': {'query': {'type': 'string', 'description': 'The search query string.'}, 'paper_ids': {'anyOf': [{'items': {'type': 'string'}, 'type': 'array'}, {'type': 'null'}], 'default': None, 'description': 'A list of specific arXiv paper\nIDs to download. (default::obj: `None`)'}, 'max_results': {'anyOf': [{'type': 'integer'}, {'type': 'null'}], 'default': 5, 'description': 'The maximum number of search results\nto download. (default::obj: `5`)'}, 'output_dir': {'anyOf': [{'type': 'string'}, {'type': 'null'}], 'default': './', 'description': 'The directory to save the downloaded\nPDFs. Defaults to the current directory.'}}, 'required': ['query'], 'type': 'object'}}}], 'temperature': 0.0, 'top_p': 1.0, 'n': 1, 'stream': False, 'stop': NOT_GIVEN, 'max_tokens': NOT_GIVEN, 'response_format': NOT_GIVEN, 'tool_choice': None}
!!!
Traceback (most recent call last):
File "/Users/enrei/Desktop/camel_1127/camel/examples/toolkits/arxiv_toolkit.py", line 54, in <module>
response = camel_agent.step(usr_msg)
^^^^^^^^^^^^^^^^^^^^^^^^^
File "/Users/enrei/Desktop/camel_1127/camel/camel/agents/chat_agent.py", line 631, in step
) = self._step_model_response(openai_messages, num_tokens)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/Users/enrei/Desktop/camel_1127/camel/camel/agents/chat_agent.py", line 947, in _step_model_response
response = self.model_backend.run(openai_messages)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/Users/enrei/Desktop/camel_1127/camel/camel/utils/commons.py", line 271, in wrapper
return func(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^
File "/Users/enrei/Desktop/camel_1127/camel/camel/models/gemini_model.py", line 104, in run
response = self._client.chat.completions.create(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/Users/enrei/Library/Caches/pypoetry/virtualenvs/camel-ai-gf_ewcJG-py3.12/lib/python3.12/site-packages/openai/_utils/_utils.py", line 275, in wrapper
return func(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^
File "/Users/enrei/Library/Caches/pypoetry/virtualenvs/camel-ai-gf_ewcJG-py3.12/lib/python3.12/site-packages/openai/resources/chat/completions.py", line 829, in create
return self._post(
^^^^^^^^^^^
File "/Users/enrei/Library/Caches/pypoetry/virtualenvs/camel-ai-gf_ewcJG-py3.12/lib/python3.12/site-packages/openai/_base_client.py", line 1278, in post
return cast(ResponseT, self.request(cast_to, opts, stream=stream, stream_cls=stream_cls))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/Users/enrei/Library/Caches/pypoetry/virtualenvs/camel-ai-gf_ewcJG-py3.12/lib/python3.12/site-packages/openai/_base_client.py", line 955, in request
return self._request(
^^^^^^^^^^^^^^
File "/Users/enrei/Library/Caches/pypoetry/virtualenvs/camel-ai-gf_ewcJG-py3.12/lib/python3.12/site-packages/openai/_base_client.py", line 1059, in _request
raise self._make_status_error_from_response(err.response) from None
openai.BadRequestError: Error code: 400 - [{'error': {'code': 400, 'message': 'Request contains an invalid argument.', 'status': 'INVALID_ARGUMENT'}}]
The same schema works for OpenAI model and other models supporting OpenAI client, are there any further limitation added from Gemini side? Thanks in advance!