Skip to content

Commit

Permalink
Feat: Add external tool support to ChatAgent & Refactor (#830)
Browse files Browse the repository at this point in the history
Co-authored-by: Wendong-Fan <[email protected]>
Co-authored-by: Wendong <[email protected]>
  • Loading branch information
3 people authored Sep 11, 2024
1 parent 2b39b7f commit 55c6fad
Show file tree
Hide file tree
Showing 13 changed files with 492 additions and 412 deletions.
2 changes: 1 addition & 1 deletion .github/ISSUE_TEMPLATE/bug_report.yml
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ body:
attributes:
label: What version of camel are you using?
description: Run command `python3 -c 'print(__import__("camel").__version__)'` in your shell and paste the output here.
placeholder: E.g., 0.1.7.2
placeholder: E.g., 0.1.8
validations:
required: true

Expand Down
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -119,7 +119,7 @@ conda create --name camel python=3.10
conda activate camel
# Clone github repo
git clone -b v0.1.7.2 https://github.com/camel-ai/camel.git
git clone -b v0.1.8 https://github.com/camel-ai/camel.git
# Change directory into project directory
cd camel
Expand Down
2 changes: 1 addition & 1 deletion camel/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
# limitations under the License.
# =========== Copyright 2023 @ CAMEL-AI.org. All Rights Reserved. ===========

__version__ = '0.1.7.2'
__version__ = '0.1.8'

__all__ = [
'__version__',
Expand Down
467 changes: 223 additions & 244 deletions camel/agents/chat_agent.py

Large diffs are not rendered by default.

4 changes: 4 additions & 0 deletions camel/types/enums.py
Original file line number Diff line number Diff line change
Expand Up @@ -92,6 +92,10 @@ def value_for_tiktoken(self) -> str:
return self.value
return "gpt-4o-mini"

@property
def supports_tool_calling(self) -> bool:
return any([self.is_openai, self.is_gemini, self.is_mistral])

@property
def is_openai(self) -> bool:
r"""Returns whether this type of models is an OpenAI-released model."""
Expand Down
11 changes: 6 additions & 5 deletions camel/utils/commons.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@
Mapping,
Optional,
Set,
Type,
TypeVar,
cast,
)
Expand Down Expand Up @@ -329,12 +330,12 @@ def get_pydantic_major_version() -> int:
return 0


def get_pydantic_object_schema(pydantic_params: BaseModel) -> Dict:
def get_pydantic_object_schema(pydantic_params: Type[BaseModel]) -> Dict:
r"""Get the JSON schema of a Pydantic model.
Args:
pydantic_params (BaseModel): The Pydantic model to retrieve the schema
for.
pydantic_params (Type[BaseModel]): The Pydantic model class to retrieve
the schema for.
Returns:
dict: The JSON schema of the Pydantic model.
Expand All @@ -354,7 +355,7 @@ def func_string_to_callable(code: str):
"""
local_vars: Mapping[str, object] = {}
exec(code, globals(), local_vars)
func = local_vars.get(Constants.FUNC_NAME_FOR_STRUCTURE_OUTPUT)
func = local_vars.get(Constants.FUNC_NAME_FOR_STRUCTURED_OUTPUT)
return func


Expand Down Expand Up @@ -397,7 +398,7 @@ def json_to_function_code(json_obj: Dict) -> str:

# function template
function_code = f'''
def {Constants.FUNC_NAME_FOR_STRUCTURE_OUTPUT}({args_str}):
def {Constants.FUNC_NAME_FOR_STRUCTURED_OUTPUT}({args_str}):
r"""Return response with a specified json format.
Args:
{docstring_args_str}
Expand Down
2 changes: 1 addition & 1 deletion camel/utils/constants.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ class Constants:
VIDEO_DEFAULT_PLUG_PYAV = "pyav"

# Return response with json format
FUNC_NAME_FOR_STRUCTURE_OUTPUT = "return_json_response"
FUNC_NAME_FOR_STRUCTURED_OUTPUT = "return_json_response"

# Default top k vaule for RAG
DEFAULT_TOP_K_RESULTS = 1
Expand Down
2 changes: 1 addition & 1 deletion docs/conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@
project = 'CAMEL'
copyright = '2023, CAMEL-AI.org'
author = 'CAMEL-AI.org'
release = '0.1.7.2'
release = '0.1.8'

html_favicon = (
'https://raw.githubusercontent.com/camel-ai/camel/master/misc/favicon.png'
Expand Down
2 changes: 1 addition & 1 deletion docs/get_started/setup.md
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,7 @@ conda create --name camel python=3.10
conda activate camel
# Clone github repo
git clone -b v0.1.7.2 https://github.com/camel-ai/camel.git
git clone -b v0.1.8 https://github.com/camel-ai/camel.git
# Change directory into project directory
cd camel
Expand Down
85 changes: 85 additions & 0 deletions examples/external_tools/use_external_tools.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,85 @@
# =========== Copyright 2023 @ CAMEL-AI.org. All Rights Reserved. ===========
# Licensed under the Apache License, Version 2.0 (the “License”);
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an “AS IS” BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =========== Copyright 2023 @ CAMEL-AI.org. All Rights Reserved. ===========

from camel.agents import ChatAgent
from camel.configs import ChatGPTConfig
from camel.messages import BaseMessage
from camel.models import ModelFactory
from camel.toolkits import MATH_FUNCS, SEARCH_FUNCS
from camel.types import ModelPlatformType, ModelType


def main():
# Set the tools for the external_tools
internal_tools = SEARCH_FUNCS
external_tools = MATH_FUNCS
tool_list = internal_tools + external_tools

model_config_dict = ChatGPTConfig(
tools=tool_list,
temperature=0.0,
).as_dict()

model = ModelFactory.create(
model_platform=ModelPlatformType.OPENAI,
model_type=ModelType.GPT_3_5_TURBO,
model_config_dict=model_config_dict,
)

# Set external_tools
external_tool_agent = ChatAgent(
system_message=BaseMessage.make_assistant_message(
role_name="Tools calling operator",
content="You are a helpful assistant",
),
model=model,
tools=internal_tools,
external_tools=external_tools,
)

usr_msg = BaseMessage.make_user_message(
role_name="User",
content="When is the release date of the video game Portal?",
)

# This will directly run the internal tool
response = external_tool_agent.step(usr_msg)
print(response.msg.content)

usr_msg = BaseMessage.make_user_message(
role_name="User",
content="What's the result of the release year of Portal subtracted by"
"the year that United States was founded?",
)
# This will first automatically run the internal tool to check the years
# Then it will request the external tool to calculate the sum
response = external_tool_agent.step(usr_msg)
# This should be empty
print(response.msg.content)
external_tool_request = response.info["external_tool_request"]
# This will print the info of the external tool request
print(external_tool_request)


if __name__ == "__main__":
main()


# flake8: noqa :E501
"""
Output:
The video game "Portal" was released in 2007 as part of a bundle called The Orange Box for Windows, Xbox 360, and PlayStation 3.
ChatCompletionMessageToolCall(id='call_U5Xju7vYtAQAEW4D1M8R1kgs', function=Function(arguments='{"a": 2007, "b": 1776}', name='sub'), type='function')
"""
Loading

0 comments on commit 55c6fad

Please sign in to comment.