Skip to content

Commit

Permalink
Merge branch 'main' into main
Browse files Browse the repository at this point in the history
  • Loading branch information
whiskyboy authored Apr 24, 2024
2 parents 0d7c22c + 31fe75a commit fa66435
Show file tree
Hide file tree
Showing 73 changed files with 8,142 additions and 2,963 deletions.
10 changes: 7 additions & 3 deletions .github/workflows/contrib-openai.yml
Original file line number Diff line number Diff line change
Expand Up @@ -41,19 +41,23 @@ jobs:
pip install -e .
python -c "import autogen"
pip install coverage pytest-asyncio
- name: Install PostgreSQL
run: |
sudo apt install postgresql -y
- name: Start PostgreSQL service
run: sudo service postgresql start
- name: Install packages for test when needed
run: |
pip install docker
pip install qdrant_client[fastembed]
pip install -e .[retrievechat]
pip install -e .[retrievechat-qdrant,retrievechat-pgvector]
- name: Coverage
env:
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
AZURE_OPENAI_API_KEY: ${{ secrets.AZURE_OPENAI_API_KEY }}
AZURE_OPENAI_API_BASE: ${{ secrets.AZURE_OPENAI_API_BASE }}
OAI_CONFIG_LIST: ${{ secrets.OAI_CONFIG_LIST }}
run: |
coverage run -a -m pytest test/agentchat/contrib/test_retrievechat.py::test_retrievechat test/agentchat/contrib/test_qdrant_retrievechat.py::test_retrievechat
coverage run -a -m pytest test/agentchat/contrib/test_retrievechat.py::test_retrievechat test/agentchat/contrib/test_qdrant_retrievechat.py::test_retrievechat test/agentchat/contrib/test_pgvector_retrievechat.py::test_retrievechat
coverage xml
- name: Upload coverage to Codecov
uses: codecov/codecov-action@v3
Expand Down
12 changes: 8 additions & 4 deletions .github/workflows/contrib-tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -42,16 +42,20 @@ jobs:
- name: Install qdrant_client when python-version is 3.10
if: matrix.python-version == '3.10'
run: |
pip install qdrant_client[fastembed]
pip install .[retrievechat-qdrant]
- name: Install unstructured when python-version is 3.9 and on linux
if: matrix.python-version == '3.9' && matrix.os == 'ubuntu-latest'
run: |
sudo apt-get update
sudo apt-get install -y tesseract-ocr poppler-utils
pip install unstructured[all-docs]==0.13.0
- name: Install packages and dependencies for RetrieveChat
- name: Install and Start PostgreSQL
runs-on: ubuntu-latest
run: |
pip install -e .[retrievechat]
sudo apt install postgresql -y
sudo service postgresql start
- name: Install packages and dependencies for PGVector
run: |
pip install -e .[retrievechat-pgvector]
- name: Set AUTOGEN_USE_DOCKER based on OS
shell: bash
run: |
Expand Down
6 changes: 5 additions & 1 deletion .github/workflows/dotnet-build.yml
Original file line number Diff line number Diff line change
Expand Up @@ -22,13 +22,15 @@ permissions:

jobs:
build:
name: Build
name: Dotnet Build
runs-on: ubuntu-latest
defaults:
run:
working-directory: dotnet
steps:
- uses: actions/checkout@v4
with:
lfs: true
- name: Setup .NET
uses: actions/setup-dotnet@v4
with:
Expand All @@ -54,6 +56,8 @@ jobs:
needs: build
steps:
- uses: actions/checkout@v4
with:
lfs: true
- name: Setup .NET
uses: actions/setup-dotnet@v4
with:
Expand Down
4 changes: 3 additions & 1 deletion .github/workflows/dotnet-release.yml
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,8 @@ jobs:
working-directory: dotnet
steps:
- uses: actions/checkout@v4
with:
lfs: true
- name: Setup .NET
uses: actions/setup-dotnet@v4
with:
Expand Down Expand Up @@ -66,4 +68,4 @@ jobs:
$version = $metaInfoContent | Select-String -Pattern "<VersionPrefix>(.*)</VersionPrefix>" | ForEach-Object { $_.Matches.Groups[1].Value }
git tag -a "$version" -m "AutoGen.Net release $version"
git push origin --tags
shell: pwsh
shell: pwsh
15 changes: 15 additions & 0 deletions .github/workflows/lfs-check.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
name: "Git LFS Check"

on: pull_request
permissions: {}
jobs:
lfs-check:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
with:
lfs: true
- name: Check Git LFS files for consistency
run: |
git lfs fsck
2 changes: 2 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -188,3 +188,5 @@ local_cache

notebook/result.png
samples/apps/autogen-studio/autogenstudio/models/test/

notebook/coding
1 change: 1 addition & 0 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -43,6 +43,7 @@ repos:
website/static/img/ag.svg |
website/yarn.lock |
website/docs/tutorial/code-executors.ipynb |
website/docs/topics/code-execution/custom-executor.ipynb |
website/docs/topics/non-openai-models/cloud-gemini.ipynb |
notebook/.*
)$
Expand Down
3 changes: 2 additions & 1 deletion OAI_CONFIG_LIST_sample
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,8 @@
[
{
"model": "gpt-4",
"api_key": "<your OpenAI API key here>"
"api_key": "<your OpenAI API key here>",
"tags": ["gpt-4", "tool"]
},
{
"model": "<your Azure OpenAI deployment name>",
Expand Down
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@
<img src="https://github.com/microsoft/autogen/blob/main/website/static/img/flaml.svg" width=200>
<br>
</p> -->
:fire: Mar 26, 2024: Andrew Ng gave a shoutout to AutoGen in [What's next for AI agentic workflows](https://youtu.be/sal78ACtGTc?si=JduUzN_1kDnMq0vF) at Sequoia Capital's AI Ascent.
:fire: Apr 17, 2024: Andrew Ng cited AutoGen in [The Batch newsletter](https://www.deeplearning.ai/the-batch/issue-245/) and [What's next for AI agentic workflows](https://youtu.be/sal78ACtGTc?si=JduUzN_1kDnMq0vF) at Sequoia Capital's AI Ascent (Mar 26).

:fire: Mar 3, 2024: What's new in AutoGen? 📰[Blog](https://microsoft.github.io/autogen/blog/2024/03/03/AutoGen-Update); 📺[Youtube](https://www.youtube.com/watch?v=j_mtwQiaLGU).

Expand Down
12 changes: 11 additions & 1 deletion autogen/agentchat/chat.py
Original file line number Diff line number Diff line change
Expand Up @@ -171,6 +171,9 @@ def initiate_chats(chat_queue: List[Dict[str, Any]]) -> List[ChatResult]:
- `"carryover"` - It can be used to specify the carryover information to be passed
to this chat. If provided, we will combine this carryover with the "message" content when
generating the initial chat message in `generate_init_message`.
- `"finished_chat_indexes_to_exclude_from_carryover"` - It can be used by specifying a list of indexes of the finished_chats list,
from which to exclude the summaries for carryover. If 'finished_chat_indexes_to_exclude_from_carryover' is not provided or an empty list,
then summary from all the finished chats will be taken.
Returns:
(list): a list of ChatResult objects corresponding to the finished chats in the chat_queue.
"""
Expand All @@ -182,9 +185,16 @@ def initiate_chats(chat_queue: List[Dict[str, Any]]) -> List[ChatResult]:
while current_chat_queue:
chat_info = current_chat_queue.pop(0)
_chat_carryover = chat_info.get("carryover", [])
finished_chat_indexes_to_exclude_from_carryover = chat_info.get(
"finished_chat_indexes_to_exclude_from_carryover", []
)

if isinstance(_chat_carryover, str):
_chat_carryover = [_chat_carryover]
chat_info["carryover"] = _chat_carryover + [r.summary for r in finished_chats]
chat_info["carryover"] = _chat_carryover + [
r.summary for i, r in enumerate(finished_chats) if i not in finished_chat_indexes_to_exclude_from_carryover
]

__post_carryover_processing(chat_info)
sender = chat_info["sender"]
chat_res = sender.initiate_chat(**chat_info)
Expand Down
26 changes: 16 additions & 10 deletions autogen/agentchat/contrib/capabilities/teachability.py
Original file line number Diff line number Diff line change
Expand Up @@ -86,7 +86,7 @@ def prepopulate_db(self):
"""Adds a few arbitrary memos to the DB."""
self.memo_store.prepopulate()

def process_last_received_message(self, text):
def process_last_received_message(self, text: Union[Dict, str]):
"""
Appends any relevant memos to the message text, and stores any apparent teachings in new memos.
Uses TextAnalyzerAgent to make decisions about memo storage and retrieval.
Expand All @@ -103,7 +103,7 @@ def process_last_received_message(self, text):
# Return the (possibly) expanded message text.
return expanded_text

def _consider_memo_storage(self, comment):
def _consider_memo_storage(self, comment: Union[Dict, str]):
"""Decides whether to store something from one user comment in the DB."""
memo_added = False

Expand Down Expand Up @@ -161,7 +161,7 @@ def _consider_memo_storage(self, comment):
# Yes. Save them to disk.
self.memo_store._save_memos()

def _consider_memo_retrieval(self, comment):
def _consider_memo_retrieval(self, comment: Union[Dict, str]):
"""Decides whether to retrieve memos from the DB, and add them to the chat context."""

# First, use the comment directly as the lookup key.
Expand Down Expand Up @@ -195,7 +195,7 @@ def _consider_memo_retrieval(self, comment):
# Append the memos to the text of the last message.
return comment + self._concatenate_memo_texts(memo_list)

def _retrieve_relevant_memos(self, input_text):
def _retrieve_relevant_memos(self, input_text: str) -> list:
"""Returns semantically related memos from the DB."""
memo_list = self.memo_store.get_related_memos(
input_text, n_results=self.max_num_retrievals, threshold=self.recall_threshold
Expand All @@ -213,7 +213,7 @@ def _retrieve_relevant_memos(self, input_text):
memo_list = [memo[1] for memo in memo_list]
return memo_list

def _concatenate_memo_texts(self, memo_list):
def _concatenate_memo_texts(self, memo_list: list) -> str:
"""Concatenates the memo texts into a single string for inclusion in the chat context."""
memo_texts = ""
if len(memo_list) > 0:
Expand All @@ -225,7 +225,7 @@ def _concatenate_memo_texts(self, memo_list):
memo_texts = memo_texts + "\n" + info
return memo_texts

def _analyze(self, text_to_analyze, analysis_instructions):
def _analyze(self, text_to_analyze: Union[Dict, str], analysis_instructions: Union[Dict, str]):
"""Asks TextAnalyzerAgent to analyze the given text according to specific instructions."""
self.analyzer.reset() # Clear the analyzer's list of messages.
self.teachable_agent.send(
Expand All @@ -246,10 +246,16 @@ class MemoStore:
Vector embeddings are currently supplied by Chroma's default Sentence Transformers.
"""

def __init__(self, verbosity, reset, path_to_db_dir):
def __init__(
self,
verbosity: Optional[int] = 0,
reset: Optional[bool] = False,
path_to_db_dir: Optional[str] = "./tmp/teachable_agent_db",
):
"""
Args:
- verbosity (Optional, int): 1 to print memory operations, 0 to omit them. 3+ to print memo lists.
- reset (Optional, bool): True to clear the DB before starting. Default False.
- path_to_db_dir (Optional, str): path to the directory where the DB is stored.
"""
self.verbosity = verbosity
Expand Down Expand Up @@ -304,7 +310,7 @@ def reset_db(self):
self.uid_text_dict = {}
self._save_memos()

def add_input_output_pair(self, input_text, output_text):
def add_input_output_pair(self, input_text: str, output_text: str):
"""Adds an input-output pair to the vector DB."""
self.last_memo_id += 1
self.vec_db.add(documents=[input_text], ids=[str(self.last_memo_id)])
Expand All @@ -321,7 +327,7 @@ def add_input_output_pair(self, input_text, output_text):
if self.verbosity >= 3:
self.list_memos()

def get_nearest_memo(self, query_text):
def get_nearest_memo(self, query_text: str):
"""Retrieves the nearest memo to the given query text."""
results = self.vec_db.query(query_texts=[query_text], n_results=1)
uid, input_text, distance = results["ids"][0][0], results["documents"][0][0], results["distances"][0][0]
Expand All @@ -338,7 +344,7 @@ def get_nearest_memo(self, query_text):
)
return input_text, output_text, distance

def get_related_memos(self, query_text, n_results, threshold):
def get_related_memos(self, query_text: str, n_results: int, threshold: Union[int, float]):
"""Retrieves memos that are related to the given query text within the specified distance threshold."""
if n_results > len(self.uid_text_dict):
n_results = len(self.uid_text_dict)
Expand Down
Loading

0 comments on commit fa66435

Please sign in to comment.