Skip to content

Commit

Permalink
Merge remote-tracking branch 'origin/main' into fix-serialize-error
Browse files Browse the repository at this point in the history
  • Loading branch information
shenchucheng committed Oct 11, 2024
2 parents 1b2be7a + 4c55a97 commit 4fa0e3f
Show file tree
Hide file tree
Showing 19 changed files with 48 additions and 336 deletions.
3 changes: 1 addition & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,7 @@

### Installation

> Ensure that Python 3.9+ is installed on your system. You can check this by using: `python --version`.
> Ensure that Python 3.9 or later, but less than 3.12, is installed on your system. You can check this by using: `python --version`.
> You can use conda like this: `conda create -n metagpt python=3.9 && conda activate metagpt`
```bash
Expand Down Expand Up @@ -185,4 +185,3 @@ To cite [MetaGPT](https://openreview.net/forum?id=VtmBAGCN7o) or [Data Interpret
primaryClass={cs.AI}
}
```

82 changes: 0 additions & 82 deletions examples/sk_agent.py

This file was deleted.

2 changes: 1 addition & 1 deletion metagpt/actions/research.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,11 +6,11 @@
from typing import Any, Callable, Optional, Union

from pydantic import TypeAdapter, model_validator
from metagpt.tools.search_engine import SearchEngine

from metagpt.actions import Action
from metagpt.config2 import config
from metagpt.logs import logger
from metagpt.tools.search_engine import SearchEngine
from metagpt.tools.web_browser_engine import WebBrowserEngine
from metagpt.utils.common import OutputParser
from metagpt.utils.text import generate_prompt_chunk, reduce_message_length
Expand Down
2 changes: 1 addition & 1 deletion metagpt/ext/stanford_town/roles/st_role.py
Original file line number Diff line number Diff line change
Expand Up @@ -266,7 +266,7 @@ async def observe(self) -> list[BasicMemory]:
# We will order our percept based on the distance, with the closest ones
# getting priorities.
percept_events_list = []
# First, we put all events that are occuring in the nearby tiles into the
# First, we put all events that are occurring in the nearby tiles into the
# percept_events_list
for tile in nearby_tiles:
tile_details = self.rc.env.observe(EnvObsParams(obs_type=EnvObsType.GET_TITLE, coord=tile))
Expand Down
2 changes: 1 addition & 1 deletion metagpt/memory/memory.py
Original file line number Diff line number Diff line change
Expand Up @@ -81,7 +81,7 @@ def get(self, k=0) -> list[Message]:
return self.storage[-k:]

def find_news(self, observed: list[Message], k=0) -> list[Message]:
"""find news (previously unseen messages) from the the most recent k memories, from all memories when k=0"""
"""find news (previously unseen messages) from the most recent k memories, from all memories when k=0"""
already_observed = self.get(k)
news: list[Message] = []
for i in observed:
Expand Down
10 changes: 7 additions & 3 deletions metagpt/provider/dashscope_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,13 +48,17 @@ def build_api_arequest(
request_timeout,
form,
resources,
base_address,
_,
) = _get_protocol_params(kwargs)
task_id = kwargs.pop("task_id", None)
if api_protocol in [ApiProtocol.HTTP, ApiProtocol.HTTPS]:
if not dashscope.base_http_api_url.endswith("/"):
http_url = dashscope.base_http_api_url + "/"
if base_address is None:
base_address = dashscope.base_http_api_url
if not base_address.endswith("/"):
http_url = base_address + "/"
else:
http_url = dashscope.base_http_api_url
http_url = base_address

if is_service:
http_url = http_url + SERVICE_API_PATH + "/"
Expand Down
4 changes: 3 additions & 1 deletion metagpt/provider/general_api_requestor.py
Original file line number Diff line number Diff line change
Expand Up @@ -81,7 +81,9 @@ async def _interpret_async_response(
self, result: aiohttp.ClientResponse, stream: bool
) -> Tuple[Union[bytes, AsyncGenerator[bytes, None]], bool]:
content_type = result.headers.get("Content-Type", "")
if stream and ("text/event-stream" in content_type or "application/x-ndjson" in content_type):
if stream and (
"text/event-stream" in content_type or "application/x-ndjson" in content_type or content_type == ""
):
# the `Content-Type` of ollama stream resp is "application/x-ndjson"
return (
self._interpret_response_line(line, result.status, result.headers, stream=True)
Expand Down
16 changes: 16 additions & 0 deletions metagpt/provider/ollama_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,9 +51,17 @@ def _decode_and_load(self, chunk: bytes, encoding: str = "utf-8") -> dict:
return json.loads(chunk)

async def _achat_completion(self, messages: list[dict], timeout: int = USE_CONFIG_TIMEOUT) -> dict:
headers = (
None
if not self.config.api_key or self.config.api_key == "sk-"
else {
"Authorization": f"Bearer {self.config.api_key}",
}
)
resp, _, _ = await self.client.arequest(
method=self.http_method,
url=self.suffix_url,
headers=headers,
params=self._const_kwargs(messages),
request_timeout=self.get_timeout(timeout),
)
Expand All @@ -66,9 +74,17 @@ async def acompletion(self, messages: list[dict], timeout=USE_CONFIG_TIMEOUT) ->
return await self._achat_completion(messages, timeout=self.get_timeout(timeout))

async def _achat_completion_stream(self, messages: list[dict], timeout: int = USE_CONFIG_TIMEOUT) -> str:
headers = (
None
if not self.config.api_key or self.config.api_key == "sk-"
else {
"Authorization": f"Bearer {self.config.api_key}",
}
)
stream_resp, _, _ = await self.client.arequest(
method=self.http_method,
url=self.suffix_url,
headers=headers,
stream=True,
params=self._const_kwargs(messages, stream=True),
request_timeout=self.get_timeout(timeout),
Expand Down
8 changes: 4 additions & 4 deletions metagpt/provider/qianfan_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -106,21 +106,21 @@ def _update_costs(self, usage: dict):
def get_choice_text(self, resp: JsonBody) -> str:
return resp.get("result", "")

def completion(self, messages: list[dict]) -> JsonBody:
resp = self.aclient.do(**self._const_kwargs(messages=messages, stream=False))
def completion(self, messages: list[dict], timeout: int = USE_CONFIG_TIMEOUT) -> JsonBody:
resp = self.aclient.do(**self._const_kwargs(messages=messages, stream=False), request_timeout=timeout)
self._update_costs(resp.body.get("usage", {}))
return resp.body

async def _achat_completion(self, messages: list[dict], timeout: int = USE_CONFIG_TIMEOUT) -> JsonBody:
resp = await self.aclient.ado(**self._const_kwargs(messages=messages, stream=False))
resp = await self.aclient.ado(**self._const_kwargs(messages=messages, stream=False), request_timeout=timeout)
self._update_costs(resp.body.get("usage", {}))
return resp.body

async def acompletion(self, messages: list[dict], timeout: int = USE_CONFIG_TIMEOUT) -> JsonBody:
return await self._achat_completion(messages, timeout=self.get_timeout(timeout))

async def _achat_completion_stream(self, messages: list[dict], timeout: int = USE_CONFIG_TIMEOUT) -> str:
resp = await self.aclient.ado(**self._const_kwargs(messages=messages, stream=True))
resp = await self.aclient.ado(**self._const_kwargs(messages=messages, stream=True), request_timeout=timeout)
collected_content = []
usage = {}
async for chunk in resp:
Expand Down
4 changes: 2 additions & 2 deletions metagpt/roles/role.py
Original file line number Diff line number Diff line change
Expand Up @@ -422,8 +422,8 @@ async def _observe(self, ignore_memory=False) -> int:
"""Prepare new messages for processing from the message buffer and other sources."""
# Read unprocessed messages from the msg buffer.
news = []
if self.recovered:
news = [self.latest_observed_msg] if self.latest_observed_msg else []
if self.recovered and self.latest_observed_msg:
news = self.rc.memory.find_news(observed=[self.latest_observed_msg], k=10)
if not news:
news = self.rc.msg_buffer.pop_all()
# Store the read messages in your own memory to prevent duplicate processing.
Expand Down
87 changes: 0 additions & 87 deletions metagpt/roles/sk_agent.py

This file was deleted.

22 changes: 0 additions & 22 deletions metagpt/tools/search_engine.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,34 +9,12 @@
from typing import Annotated, Callable, Coroutine, Literal, Optional, Union, overload

from pydantic import BaseModel, ConfigDict, Field, model_validator
from semantic_kernel.skill_definition import sk_function

from metagpt.configs.search_config import SearchConfig
from metagpt.logs import logger
from metagpt.tools import SearchEngineType


class SkSearchEngine:
"""A search engine class for executing searches.
Attributes:
search_engine: The search engine instance used for executing searches.
"""

def __init__(self, **kwargs):
self.search_engine = SearchEngine(**kwargs)

@sk_function(
description="searches results from Google. Useful when you need to find short "
"and succinct answers about a specific topic. Input should be a search query.",
name="searchAsync",
input_description="search",
)
async def run(self, query: str) -> str:
result = await self.search_engine.run(query)
return result


class SearchEngine(BaseModel):
"""A model for configuring and executing searches with different search engines.
Expand Down
32 changes: 0 additions & 32 deletions metagpt/utils/make_sk_kernel.py

This file was deleted.

2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -109,7 +109,7 @@ def run(self):
license="MIT",
keywords="metagpt multi-agent multi-role programming gpt llm metaprogramming",
packages=find_packages(exclude=["contrib", "docs", "examples", "tests*"]),
python_requires=">=3.9",
python_requires=">=3.9, <3.12",
install_requires=requirements,
extras_require=extras_require,
cmdclass={
Expand Down
Loading

0 comments on commit 4fa0e3f

Please sign in to comment.