Skip to content

Commit

Permalink
Native tool call support for Mistral AI API and topic notebook. (micr…
Browse files Browse the repository at this point in the history
…osoft#2135)

* Support for Mistral AI API and topic notebook.

* formatting

* formatting
  • Loading branch information
ekzhu authored and sharsha315 committed Mar 29, 2024
1 parent ec68207 commit 28c37f3
Show file tree
Hide file tree
Showing 4 changed files with 1,042 additions and 13 deletions.
38 changes: 27 additions & 11 deletions autogen/agentchat/conversable_agent.py
Original file line number Diff line number Diff line change
Expand Up @@ -696,8 +696,8 @@ def _print_received_message(self, message: Union[Dict, str], sender: Agent):
id_key = "name"
else:
id_key = "tool_call_id"

func_print = f"***** Response from calling {message['role']} \"{message[id_key]}\" *****"
id = message.get(id_key, "No id found")
func_print = f"***** Response from calling {message['role']} ({id}) *****"
print(colored(func_print, "green"), flush=True)
print(message["content"], flush=True)
print(colored("*" * len(func_print), "green"), flush=True)
Expand All @@ -714,7 +714,7 @@ def _print_received_message(self, message: Union[Dict, str], sender: Agent):
if "function_call" in message and message["function_call"]:
function_call = dict(message["function_call"])
func_print = (
f"***** Suggested function Call: {function_call.get('name', '(No function name found)')} *****"
f"***** Suggested function call: {function_call.get('name', '(No function name found)')} *****"
)
print(colored(func_print, "green"), flush=True)
print(
Expand All @@ -726,9 +726,9 @@ def _print_received_message(self, message: Union[Dict, str], sender: Agent):
print(colored("*" * len(func_print), "green"), flush=True)
if "tool_calls" in message and message["tool_calls"]:
for tool_call in message["tool_calls"]:
id = tool_call.get("id", "(No id found)")
id = tool_call.get("id", "No tool call id found")
function_call = dict(tool_call.get("function", {}))
func_print = f"***** Suggested tool Call ({id}): {function_call.get('name', '(No function name found)')} *****"
func_print = f"***** Suggested tool call ({id}): {function_call.get('name', '(No function name found)')} *****"
print(colored(func_print, "green"), flush=True)
print(
"Arguments: \n",
Expand Down Expand Up @@ -1309,6 +1309,12 @@ def _generate_oai_reply_from_client(self, llm_client, messages, cache) -> Union[
)
for tool_call in extracted_response.get("tool_calls") or []:
tool_call["function"]["name"] = self._normalize_name(tool_call["function"]["name"])
# Remove id and type if they are not present.
# This is to make the tool call object compatible with Mistral API.
if tool_call.get("id") is None:
tool_call.pop("id")
if tool_call.get("type") is None:
tool_call.pop("type")
return extracted_response

async def a_generate_oai_reply(
Expand Down Expand Up @@ -1525,7 +1531,6 @@ def generate_tool_calls_reply(
message = messages[-1]
tool_returns = []
for tool_call in message.get("tool_calls", []):
id = tool_call["id"]
function_call = tool_call.get("function", {})
func = self._function_map.get(function_call.get("name", None), None)
if inspect.iscoroutinefunction(func):
Expand All @@ -1543,13 +1548,24 @@ def generate_tool_calls_reply(
loop.close()
else:
_, func_return = self.execute_function(function_call)
tool_returns.append(
{
"tool_call_id": id,
content = func_return.get("content", "")
if content is None:
content = ""
tool_call_id = tool_call.get("id", None)
if tool_call_id is not None:
tool_call_response = {
"tool_call_id": tool_call_id,
"role": "tool",
"content": func_return.get("content", ""),
"content": content,
}
)
else:
# Do not include tool_call_id if it is not present.
# This is to make the tool call object compatible with Mistral API.
tool_call_response = {
"role": "tool",
"content": content,
}
tool_returns.append(tool_call_response)
if tool_returns:
return True, {
"role": "tool",
Expand Down
2 changes: 2 additions & 0 deletions website/.gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,8 @@ docs/topics/code-execution/*.mdx
docs/topics/task_decomposition.mdx
docs/topics/prompting-and-reasoning/*.mdx
docs/topics/non-openai-models/*.mdx
docs/topics/non-openai-models/**/*.py
docs/topics/non-openai-models/**/*.svg

# Misc
.DS_Store
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,8 @@ These proxy servers can be cloud-based or running locally within your environmen
By using cloud-based proxy servers, you are able to use models without requiring the hardware
and software to run them.

These providers can host open source/weight models, like [Hugging Face](https://huggingface.co/),
These providers can host open source/weight models, like [Hugging Face](https://huggingface.co/)
and [Mistral AI](https://mistral.ai/),
or their own closed models.

When cloud-based proxy servers provide an OpenAI-compatible API, using them in AutoGen
Expand All @@ -32,7 +33,8 @@ authentication which is usually handled through an API key.
Examples of using cloud-based proxy servers providers that have an OpenAI-compatible API
are provided below:

- [together.ai example](/docs/topics/non-openai-models/cloud-togetherai)
- [Together AI example](/docs/topics/non-openai-models/cloud-togetherai)
- [Mistral AI example](/docs/topics/non-openai-models/cloud-mistralai)


### Locally run proxy servers
Expand Down
Loading

0 comments on commit 28c37f3

Please sign in to comment.