Skip to content

Commit

Permalink
better docs
Browse files Browse the repository at this point in the history
  • Loading branch information
bboynton97 committed May 22, 2024
1 parent e73209e commit 3283c1d
Show file tree
Hide file tree
Showing 3 changed files with 8 additions and 28 deletions.
2 changes: 1 addition & 1 deletion autogen/agentchat/conversable_agent.py
Original file line number Diff line number Diff line change
Expand Up @@ -1357,7 +1357,7 @@ def _generate_oai_reply_from_client(self, llm_client, messages, cache) -> Union[

# TODO: #1143 handle token limit exceeded error
response = llm_client.create(
context=messages[-1].pop("context", None), messages=all_messages, cache=cache, source=self
context=messages[-1].pop("context", None), messages=all_messages, cache=cache, agent=self
)
extracted_response = llm_client.extract_text_or_completion_object(response)[0]

Expand Down
9 changes: 5 additions & 4 deletions autogen/oai/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -282,7 +282,6 @@ def create(self, params: Dict[str, Any]) -> ChatCompletion:
# If streaming is not enabled, send a regular chat completion request
params = params.copy()
params["stream"] = False
del params['source']
response = completions.create(**params)

return response
Expand Down Expand Up @@ -570,13 +569,15 @@ def yes_or_no_filter(context, response):
for i, client in enumerate(self._clients):
# merge the input config with the i-th config in the config list
full_config = {**config, **self._config_list[i]}
agent = full_config.get("agent")
# separate the config into create_config and extra_kwargs
create_config, extra_kwargs = self._separate_create_config(full_config)
api_type = extra_kwargs.get("api_type")
if api_type and api_type.startswith("azure") and "model" in create_config:
create_config["model"] = create_config["model"].replace(".", "")
# construct the create params
params = self._construct_create_params(create_config, extra_kwargs)
del params['agent']
# get the cache_seed, filter_func and context
cache_seed = extra_kwargs.get("cache_seed", LEGACY_DEFAULT_CACHE_SEED)
cache = extra_kwargs.get("cache")
Expand Down Expand Up @@ -619,7 +620,7 @@ def yes_or_no_filter(context, response):
invocation_id=invocation_id,
client_id=id(client),
wrapper_id=id(self),
source=full_config.get("source"),
agent=agent,
request=params,
response=response,
is_cached=1,
Expand Down Expand Up @@ -652,7 +653,7 @@ def yes_or_no_filter(context, response):
invocation_id=invocation_id,
client_id=id(client),
wrapper_id=id(self),
source=full_config.get("source"),
agent=agent,
request=params,
response=f"error_code:{error_code}, config {i} failed",
is_cached=0,
Expand Down Expand Up @@ -683,7 +684,7 @@ def yes_or_no_filter(context, response):
invocation_id=invocation_id,
client_id=id(client),
wrapper_id=id(self),
source=full_config.get("source"),
agent=agent,
request=params,
response=response,
is_cached=0,
Expand Down
25 changes: 2 additions & 23 deletions notebook/agentchat_agentops.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -84,18 +84,7 @@
"source": [
"import agentops\n",
"\n",
"agentops.init()"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "d120f777958ff17f",
"metadata": {},
"outputs": [],
"source": [
"# Optional: Set a key manually\n",
"agentops.init(api_key=\"xxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxxx\")"
"agentops.init(api_key='7c94212b-b89d-47a6-a20c-23b2077d3226') # or agentops.init(api_key=\"...\")"
]
},
{
Expand Down Expand Up @@ -176,9 +165,7 @@
"from autogen import ConversableAgent, config_list_from_json, register_function\n",
"from typing import Annotated, Literal\n",
"\n",
"import agentops\n",
"\n",
"agentops.init(tags=[\"autogen-tool-example\"])\n",
"agentops.start_session(tags=[\"autogen-tool-example\"])\n",
"\n",
"Operator = Literal[\"+\", \"-\", \"*\", \"/\"]\n",
"\n",
Expand Down Expand Up @@ -244,14 +231,6 @@
"- Each use of the `calculator` tool\n",
"- Each call to OpenAI for LLM use"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "5f5ba083",
"metadata": {},
"outputs": [],
"source": []
}
],
"metadata": {
Expand Down

0 comments on commit 3283c1d

Please sign in to comment.