diff --git a/.github/workflows/contrib-tests.yml b/.github/workflows/contrib-tests.yml index 719ff086183..872c2c5981a 100644 --- a/.github/workflows/contrib-tests.yml +++ b/.github/workflows/contrib-tests.yml @@ -256,6 +256,44 @@ jobs: file: ./coverage.xml flags: unittests + GeminiTest: + runs-on: ${{ matrix.os }} + strategy: + fail-fast: false + matrix: + os: [ubuntu-latest, macos-latest, windows-2019] + python-version: ["3.9", "3.10", "3.11", "3.12"] + steps: + - uses: actions/checkout@v4 + with: + lfs: true + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.python-version }} + - name: Install packages and dependencies for all tests + run: | + python -m pip install --upgrade pip wheel + pip install pytest + - name: Install packages and dependencies for Gemini + run: | + pip install -e .[gemini,test] + - name: Set AUTOGEN_USE_DOCKER based on OS + shell: bash + run: | + if [[ ${{ matrix.os }} != ubuntu-latest ]]; then + echo "AUTOGEN_USE_DOCKER=False" >> $GITHUB_ENV + fi + - name: Coverage + run: | + coverage run -a -m pytest test/oai/test_gemini.py --skip-openai + coverage xml + - name: Upload coverage to Codecov + uses: codecov/codecov-action@v3 + with: + file: ./coverage.xml + flags: unittests + ContextHandling: runs-on: ${{ matrix.os }} strategy: diff --git a/.gitignore b/.gitignore index 49a41e9ed2c..e5e6ff013d2 100644 --- a/.gitignore +++ b/.gitignore @@ -172,6 +172,10 @@ test/my_tmp/* # Storage for the AgentEval output test/test_files/agenteval-in-out/out/ +# local cache or coding foler +local_cache/ +coding/ + # Files created by tests *tmp_code_* test/agentchat/test_agent_scripts/* diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 53b6207a301..8fa68e0e344 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -43,6 +43,7 @@ repos: website/static/img/ag.svg | website/yarn.lock | website/docs/tutorial/code-executors.ipynb | + website/docs/topics/non-openai-models/cloud-gemini.ipynb | notebook/.* )$ # See https://jaredkhan.com/blog/mypy-pre-commit diff --git a/autogen/agentchat/conversable_agent.py b/autogen/agentchat/conversable_agent.py index c7351d4287c..3ebb2d72336 100644 --- a/autogen/agentchat/conversable_agent.py +++ b/autogen/agentchat/conversable_agent.py @@ -1121,7 +1121,15 @@ def my_summary_method( def _last_msg_as_summary(sender, recipient, summary_args) -> str: """Get a chat summary from the last message of the recipient.""" try: - summary = recipient.last_message(sender)["content"].replace("TERMINATE", "") + content = recipient.last_message(sender)["content"] + if isinstance(content, str): + summary = content.replace("TERMINATE", "") + elif isinstance(content, list): + # Remove the `TERMINATE` word in the content list. + summary = [ + {**x, "text": x["text"].replace("TERMINATE", "")} if isinstance(x, dict) and "text" in x else x + for x in content + ] except (IndexError, AttributeError) as e: warnings.warn(f"Cannot extract summary using last_msg: {e}. Using an empty str as summary.", UserWarning) summary = "" diff --git a/autogen/oai/client.py b/autogen/oai/client.py index de35e5c5273..3edfa40d4ec 100644 --- a/autogen/oai/client.py +++ b/autogen/oai/client.py @@ -42,6 +42,13 @@ TOOL_ENABLED = True ERROR = None +try: + from autogen.oai.gemini import GeminiClient + + gemini_import_exception: Optional[ImportError] = None +except ImportError as e: + gemini_import_exception = e + logger = logging.getLogger(__name__) if not logger.handlers: # Add the console handler. @@ -425,6 +432,10 @@ def _register_default_client(self, config: Dict[str, Any], openai_config: Dict[s self._configure_azure_openai(config, openai_config) client = AzureOpenAI(**openai_config) self._clients.append(OpenAIClient(client)) + elif api_type is not None and api_type.startswith("google"): + if gemini_import_exception: + raise ImportError("Please install `google-generativeai` to use Google OpenAI API.") + self._clients.append(GeminiClient(**openai_config)) else: client = OpenAI(**openai_config) self._clients.append(OpenAIClient(client)) diff --git a/autogen/oai/gemini.py b/autogen/oai/gemini.py new file mode 100644 index 00000000000..fcf7e09c025 --- /dev/null +++ b/autogen/oai/gemini.py @@ -0,0 +1,310 @@ +"""Create a OpenAI-compatible client for Gemini features. + + +Example: + llm_config={ + "config_list": [{ + "api_type": "google", + "model": "models/gemini-pro", + "api_key": os.environ.get("GOOGLE_API_KEY") + } + ]} + + agent = autogen.AssistantAgent("my_agent", llm_config=llm_config) + +Resources: +- https://ai.google.dev/docs +- https://cloud.google.com/vertex-ai/docs/generative-ai/migrate-from-azure +- https://blog.google/technology/ai/google-gemini-pro-imagen-duet-ai-update/ +- https://ai.google.dev/api/python/google/generativeai/ChatSession +""" + +from __future__ import annotations + +import base64 +import os +import random +import re +import time +import warnings +from io import BytesIO +from typing import Any, Dict, List, Mapping, Union + +import google.generativeai as genai +import requests +from google.ai.generativelanguage import Content, Part +from google.api_core.exceptions import InternalServerError +from openai.types.chat import ChatCompletion +from openai.types.chat.chat_completion import ChatCompletionMessage, Choice +from openai.types.completion_usage import CompletionUsage +from PIL import Image + + +class GeminiClient: + """Client for Google's Gemini API. + + Please visit this [page](https://github.com/microsoft/autogen/issues/2387) for the roadmap of Gemini integration + of AutoGen. + """ + + def __init__(self, **kwargs): + self.api_key = kwargs.get("api_key", None) + if not self.api_key: + self.api_key = os.getenv("GOOGLE_API_KEY") + + assert ( + self.api_key + ), "Please provide api_key in your config list entry for Gemini or set the GOOGLE_API_KEY env variable." + + def message_retrieval(self, response) -> List: + """ + Retrieve and return a list of strings or a list of Choice.Message from the response. + + NOTE: if a list of Choice.Message is returned, it currently needs to contain the fields of OpenAI's ChatCompletion Message object, + since that is expected for function or tool calling in the rest of the codebase at the moment, unless a custom agent is being used. + """ + return [choice.message for choice in response.choices] + + def cost(self, response) -> float: + return response.cost + + @staticmethod + def get_usage(response) -> Dict: + """Return usage summary of the response using RESPONSE_USAGE_KEYS.""" + # ... # pragma: no cover + return { + "prompt_tokens": response.usage.prompt_tokens, + "completion_tokens": response.usage.completion_tokens, + "total_tokens": response.usage.total_tokens, + "cost": response.cost, + "model": response.model, + } + + def create(self, params: Dict) -> ChatCompletion: + model_name = params.get("model", "gemini-pro") + if not model_name: + raise ValueError( + "Please provide a model name for the Gemini Client. " + "You can configurate it in the OAI Config List file. " + "See this [LLM configuration tutorial](https://microsoft.github.io/autogen/docs/topics/llm_configuration/) for more details." + ) + + params.get("api_type", "google") # not used + messages = params.get("messages", []) + stream = params.get("stream", False) + n_response = params.get("n", 1) + params.get("temperature", 0.5) + params.get("top_p", 1.0) + params.get("max_tokens", 4096) + + if stream: + # warn user that streaming is not supported + warnings.warn( + "Streaming is not supported for Gemini yet, and it will have no effect. Please set stream=False.", + UserWarning, + ) + + if n_response > 1: + warnings.warn("Gemini only supports `n=1` for now. We only generate one response.", UserWarning) + + if "vision" not in model_name: + # A. create and call the chat model. + gemini_messages = oai_messages_to_gemini_messages(messages) + + # we use chat model by default + model = genai.GenerativeModel(model_name) + genai.configure(api_key=self.api_key) + chat = model.start_chat(history=gemini_messages[:-1]) + max_retries = 5 + for attempt in range(max_retries): + ans = None + try: + response = chat.send_message(gemini_messages[-1].parts[0].text, stream=stream) + except InternalServerError: + delay = 5 * (2**attempt) + warnings.warn( + f"InternalServerError `500` occurs when calling Gemini's chat model. Retry in {delay} seconds...", + UserWarning, + ) + time.sleep(delay) + except Exception as e: + raise RuntimeError(f"Google GenAI exception occurred while calling Gemini API: {e}") + else: + # `ans = response.text` is unstable. Use the following code instead. + ans: str = chat.history[-1].parts[0].text + break + + if ans is None: + raise RuntimeError(f"Fail to get response from Google AI after retrying {attempt + 1} times.") + + prompt_tokens = model.count_tokens(chat.history[:-1]).total_tokens + completion_tokens = model.count_tokens(ans).total_tokens + elif model_name == "gemini-pro-vision": + # B. handle the vision model + # Gemini's vision model does not support chat history yet + model = genai.GenerativeModel(model_name) + genai.configure(api_key=self.api_key) + # chat = model.start_chat(history=gemini_messages[:-1]) + # response = chat.send_message(gemini_messages[-1]) + user_message = oai_content_to_gemini_content(messages[-1]["content"]) + if len(messages) > 2: + warnings.warn( + "Warning: Gemini's vision model does not support chat history yet.", + "We only use the last message as the prompt.", + UserWarning, + ) + + response = model.generate_content(user_message, stream=stream) + # ans = response.text + ans: str = response._result.candidates[0].content.parts[0].text + + prompt_tokens = model.count_tokens(user_message).total_tokens + completion_tokens = model.count_tokens(ans).total_tokens + + # 3. convert output + message = ChatCompletionMessage(role="assistant", content=ans, function_call=None, tool_calls=None) + choices = [Choice(finish_reason="stop", index=0, message=message)] + + response_oai = ChatCompletion( + id=str(random.randint(0, 1000)), + model=model_name, + created=int(time.time() * 1000), + object="chat.completion", + choices=choices, + usage=CompletionUsage( + prompt_tokens=prompt_tokens, + completion_tokens=completion_tokens, + total_tokens=prompt_tokens + completion_tokens, + ), + cost=calculate_gemini_cost(prompt_tokens, completion_tokens, model_name), + ) + + return response_oai + + +def calculate_gemini_cost(input_tokens: int, output_tokens: int, model_name: str) -> float: + if "1.5" in model_name or "gemini-experimental" in model_name: + # "gemini-1.5-pro-preview-0409" + # Cost is $7 per million input tokens and $21 per million output tokens + return 7.0 * input_tokens / 1e6 + 21.0 * output_tokens / 1e6 + + if "gemini-pro" not in model_name and "gemini-1.0-pro" not in model_name: + warnings.warn(f"Cost calculation is not implemented for model {model_name}. Using Gemini-1.0-Pro.", UserWarning) + + # Cost is $0.5 per million input tokens and $1.5 per million output tokens + return 0.5 * input_tokens / 1e6 + 1.5 * output_tokens / 1e6 + + +def oai_content_to_gemini_content(content: Union[str, List]) -> List: + """Convert content from OAI format to Gemini format""" + rst = [] + if isinstance(content, str): + rst.append(Part(text=content)) + return rst + + assert isinstance(content, list) + + for msg in content: + if isinstance(msg, dict): + assert "type" in msg, f"Missing 'type' field in message: {msg}" + if msg["type"] == "text": + rst.append(Part(text=msg["text"])) + elif msg["type"] == "image_url": + b64_img = get_image_data(msg["image_url"]["url"]) + img = _to_pil(b64_img) + rst.append(img) + else: + raise ValueError(f"Unsupported message type: {msg['type']}") + else: + raise ValueError(f"Unsupported message type: {type(msg)}") + return rst + + +def concat_parts(parts: List[Part]) -> List: + """Concatenate parts with the same type. + If two adjacent parts both have the "text" attribute, then it will be joined into one part. + """ + if not parts: + return [] + + concatenated_parts = [] + previous_part = parts[0] + + for current_part in parts[1:]: + if previous_part.text != "": + previous_part.text += current_part.text + else: + concatenated_parts.append(previous_part) + previous_part = current_part + + if previous_part.text == "": + previous_part.text = "empty" # Empty content is not allowed. + concatenated_parts.append(previous_part) + + return concatenated_parts + + +def oai_messages_to_gemini_messages(messages: list[Dict[str, Any]]) -> list[dict[str, Any]]: + """Convert messages from OAI format to Gemini format. + Make sure the "user" role and "model" role are interleaved. + Also, make sure the last item is from the "user" role. + """ + prev_role = None + rst = [] + curr_parts = [] + for i, message in enumerate(messages): + parts = oai_content_to_gemini_content(message["content"]) + role = "user" if message["role"] in ["user", "system"] else "model" + + if prev_role is None or role == prev_role: + curr_parts += parts + elif role != prev_role: + rst.append(Content(parts=concat_parts(curr_parts), role=prev_role)) + curr_parts = parts + prev_role = role + + # handle the last message + rst.append(Content(parts=concat_parts(curr_parts), role=role)) + + # The Gemini is restrict on order of roles, such that + # 1. The messages should be interleaved between user and model. + # 2. The last message must be from the user role. + # We add a dummy message "continue" if the last role is not the user. + if rst[-1].role != "user": + rst.append(Content(parts=oai_content_to_gemini_content("continue"), role="user")) + + return rst + + +def _to_pil(data: str) -> Image.Image: + """ + Converts a base64 encoded image data string to a PIL Image object. + + This function first decodes the base64 encoded string to bytes, then creates a BytesIO object from the bytes, + and finally creates and returns a PIL Image object from the BytesIO object. + + Parameters: + data (str): The base64 encoded image data string. + + Returns: + Image.Image: The PIL Image object created from the input data. + """ + return Image.open(BytesIO(base64.b64decode(data))) + + +def get_image_data(image_file: str, use_b64=True) -> bytes: + if image_file.startswith("http://") or image_file.startswith("https://"): + response = requests.get(image_file) + content = response.content + elif re.match(r"data:image/(?:png|jpeg);base64,", image_file): + return re.sub(r"data:image/(?:png|jpeg);base64,", "", image_file) + else: + image = Image.open(image_file).convert("RGB") + buffered = BytesIO() + image.save(buffered, format="PNG") + content = buffered.getvalue() + + if use_b64: + return base64.b64encode(content).decode("utf-8") + else: + return content diff --git a/autogen/token_count_utils.py b/autogen/token_count_utils.py index 9bda6c50fb2..d68e4ee8152 100644 --- a/autogen/token_count_utils.py +++ b/autogen/token_count_utils.py @@ -66,7 +66,7 @@ def count_token(input: Union[str, List, Dict], model: str = "gpt-3.5-turbo-0613" elif isinstance(input, list) or isinstance(input, dict): return _num_token_from_messages(input, model=model) else: - raise ValueError("input must be str, list or dict") + raise ValueError(f"input must be str, list or dict, but we got {type(input)}") def _num_token_from_text(text: str, model: str = "gpt-3.5-turbo-0613"): @@ -111,6 +111,9 @@ def _num_token_from_messages(messages: Union[List, Dict], model="gpt-3.5-turbo-0 elif "gpt-4" in model: logger.info("gpt-4 may update over time. Returning num tokens assuming gpt-4-0613.") return _num_token_from_messages(messages, model="gpt-4-0613") + elif "gemini" in model: + logger.info("Gemini is not supported in tiktoken. Returning num tokens assuming gpt-4-0613.") + return _num_token_from_messages(messages, model="gpt-4-0613") else: raise NotImplementedError( f"""_num_token_from_messages() is not implemented for model {model}. See https://github.com/openai/openai-python/blob/main/chatml.md for information on how messages are converted to tokens.""" diff --git a/setup.py b/setup.py index b718ab7269d..fe8610d8c93 100644 --- a/setup.py +++ b/setup.py @@ -64,6 +64,7 @@ "teachable": ["chromadb"], "lmm": ["replicate", "pillow"], "graph": ["networkx", "matplotlib"], + "gemini": ["google-generativeai>=0.5,<1", "pillow", "pydantic"], "websurfer": ["beautifulsoup4", "markdownify", "pdfminer.six", "pathvalidate"], "redis": ["redis"], "cosmosdb": ["azure-cosmos>=4.2.0"], diff --git a/test/oai/test_gemini.py b/test/oai/test_gemini.py new file mode 100644 index 00000000000..7161d605fb6 --- /dev/null +++ b/test/oai/test_gemini.py @@ -0,0 +1,148 @@ +from unittest.mock import MagicMock, patch + +import pytest + +try: + from google.api_core.exceptions import InternalServerError + + from autogen.oai.gemini import GeminiClient + + skip = False +except ImportError: + GeminiClient = object + InternalServerError = object + skip = True + + +# Fixtures for mock data +@pytest.fixture +def mock_response(): + class MockResponse: + def __init__(self, text, choices, usage, cost, model): + self.text = text + self.choices = choices + self.usage = usage + self.cost = cost + self.model = model + + return MockResponse + + +@pytest.fixture +def gemini_client(): + return GeminiClient(api_key="fake_api_key") + + +# Test initialization and configuration +@pytest.mark.skipif(skip, reason="Google GenAI dependency is not installed") +def test_initialization(): + with pytest.raises(AssertionError): + GeminiClient() # Should raise an AssertionError due to missing API key + + +@pytest.mark.skipif(skip, reason="Google GenAI dependency is not installed") +def test_valid_initialization(gemini_client): + assert gemini_client.api_key == "fake_api_key", "API Key should be correctly set" + + +# Test error handling +@patch("autogen.oai.gemini.genai") +@pytest.mark.skipif(skip, reason="Google GenAI dependency is not installed") +def test_internal_server_error_retry(mock_genai, gemini_client): + mock_genai.GenerativeModel.side_effect = [InternalServerError("Test Error"), None] # First call fails + # Mock successful response + mock_chat = MagicMock() + mock_chat.send_message.return_value = "Successful response" + mock_genai.GenerativeModel.return_value.start_chat.return_value = mock_chat + + with patch.object(gemini_client, "create", return_value="Retried Successfully"): + response = gemini_client.create({"model": "gemini-pro", "messages": [{"content": "Hello"}]}) + assert response == "Retried Successfully", "Should retry on InternalServerError" + + +# Test cost calculation +@pytest.mark.skipif(skip, reason="Google GenAI dependency is not installed") +def test_cost_calculation(gemini_client, mock_response): + response = mock_response( + text="Example response", + choices=[{"message": "Test message 1"}], + usage={"prompt_tokens": 10, "completion_tokens": 5, "total_tokens": 15}, + cost=0.01, + model="gemini-pro", + ) + assert gemini_client.cost(response) > 0, "Cost should be correctly calculated as zero" + + +@pytest.mark.skipif(skip, reason="Google GenAI dependency is not installed") +@patch("autogen.oai.gemini.genai.GenerativeModel") +@patch("autogen.oai.gemini.genai.configure") +def test_create_response(mock_configure, mock_generative_model, gemini_client): + # Mock the genai model configuration and creation process + mock_chat = MagicMock() + mock_model = MagicMock() + mock_configure.return_value = None + mock_generative_model.return_value = mock_model + mock_model.start_chat.return_value = mock_chat + + # Set up a mock for the chat history item access and the text attribute return + mock_history_part = MagicMock() + mock_history_part.text = "Example response" + mock_chat.history.__getitem__.return_value.parts.__getitem__.return_value = mock_history_part + + # Setup the mock to return a mocked chat response + mock_chat.send_message.return_value = MagicMock(history=[MagicMock(parts=[MagicMock(text="Example response")])]) + + # Call the create method + response = gemini_client.create( + {"model": "gemini-pro", "messages": [{"content": "Hello", "role": "user"}], "stream": False} + ) + + # Assertions to check if response is structured as expected + assert response.choices[0].message.content == "Example response", "Response content should match expected output" + + +@pytest.mark.skipif(skip, reason="Google GenAI dependency is not installed") +@patch("autogen.oai.gemini.genai.GenerativeModel") +@patch("autogen.oai.gemini.genai.configure") +def test_create_vision_model_response(mock_configure, mock_generative_model, gemini_client): + # Mock the genai model configuration and creation process + mock_model = MagicMock() + mock_configure.return_value = None + mock_generative_model.return_value = mock_model + + # Set up a mock to simulate the vision model behavior + mock_vision_response = MagicMock() + mock_vision_part = MagicMock(text="Vision model output") + + # Setting up the chain of return values for vision model response + mock_vision_response._result.candidates.__getitem__.return_value.content.parts.__getitem__.return_value = ( + mock_vision_part + ) + mock_model.generate_content.return_value = mock_vision_response + + # Call the create method with vision model parameters + response = gemini_client.create( + { + "model": "gemini-pro-vision", # Vision model name + "messages": [ + { + "content": [ + {"type": "text", "text": "Let's play a game."}, + { + "type": "image_url", + "image_url": { + "url": "data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAUAAAAFCAYAAACNbyblAAAAHElEQVQI12P4//8/w38GIAXDIBKE0DHxgljNBAAO9TXL0Y4OHwAAAABJRU5ErkJggg==" + }, + }, + ], + "role": "user", + } + ], # Assuming a simple content input for vision + "stream": False, + } + ) + + # Assertions to check if response is structured as expected + assert ( + response.choices[0].message.content == "Vision model output" + ), "Response content should match expected output from vision model" diff --git a/website/docs/topics/non-openai-models/cloud-gemini.ipynb b/website/docs/topics/non-openai-models/cloud-gemini.ipynb new file mode 100644 index 00000000000..a794b8552e5 --- /dev/null +++ b/website/docs/topics/non-openai-models/cloud-gemini.ipynb @@ -0,0 +1,1538 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Using Gemini in AutoGen with Other LLMs\n", + "\n", + "## Installation\n", + "\n", + "Install AutoGen with Gemini features:\n", + "\n", + "```bash\n", + "pip install pyautogen[gemini]\n", + "```\n", + "\n", + "## Dependencies of This Notebook\n", + "\n", + "In this notebook, we will explore how to use Gemini in AutoGen alongside other tools. Install the necessary dependencies with the following command:\n", + "\n", + "```bash\n", + "pip install pyautogen[gemini,retrievechat,lmm]\n", + "```\n", + "\n", + "## Features\n", + "\n", + "There's no need to handle OpenAI or Google's GenAI packages separately; AutoGen manages all of these for you. You can easily create different agents with various backend LLMs using the assistant agent. All models and agents are readily accessible at your fingertips.\n", + "\n", + "## Main Distinctions\n", + "\n", + "- Currently, Gemini does not include a \"system_message\" field. However, you can incorporate this instruction into the first message of your interaction." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Sample OAI_CONFIG_LIST \n", + "\n", + "```python\n", + "[\n", + " {\n", + " \"model\": \"gpt-35-turbo\",\n", + " \"api_key\": \"your OpenAI Key goes here\",\n", + " },\n", + " {\n", + " \"model\": \"gpt-4-vision-preview\",\n", + " \"api_key\": \"your OpenAI Key goes here\",\n", + " },\n", + " {\n", + " \"model\": \"dalle\",\n", + " \"api_key\": \"your OpenAI Key goes here\",\n", + " },\n", + " {\n", + " \"model\": \"gemini-pro\",\n", + " \"api_key\": \"your Google's GenAI Key goes here\",\n", + " \"api_type\": \"google\"\n", + " },\n", + " {\n", + " \"model\": \"gemini-pro-vision\",\n", + " \"api_key\": \"your Google's GenAI Key goes here\",\n", + " \"api_type\": \"google\"\n", + " }\n", + "]\n", + "```" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [], + "source": [ + "import os\n", + "from typing import Any, Callable, Dict, List, Optional, Tuple, Type, Union\n", + "\n", + "import chromadb\n", + "from PIL import Image\n", + "from termcolor import colored\n", + "\n", + "import autogen\n", + "from autogen import Agent, AssistantAgent, ConversableAgent, UserProxyAgent\n", + "from autogen.agentchat.contrib.img_utils import _to_pil, get_image_data\n", + "from autogen.agentchat.contrib.multimodal_conversable_agent import MultimodalConversableAgent\n", + "from autogen.agentchat.contrib.retrieve_assistant_agent import RetrieveAssistantAgent\n", + "from autogen.agentchat.contrib.retrieve_user_proxy_agent import RetrieveUserProxyAgent\n", + "from autogen.code_utils import DEFAULT_MODEL, UNKNOWN, content_str, execute_code, extract_code, infer_lang" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [], + "source": [ + "config_list_4v = autogen.config_list_from_json(\n", + " \"OAI_CONFIG_LIST\",\n", + " filter_dict={\n", + " \"model\": [\"gpt-4-vision-preview\"],\n", + " },\n", + ")\n", + "\n", + "config_list_gpt4 = autogen.config_list_from_json(\n", + " \"OAI_CONFIG_LIST\",\n", + " filter_dict={\n", + " \"model\": [\"gpt-4\", \"gpt-4-0314\", \"gpt4\", \"gpt-4-32k\", \"gpt-4-32k-0314\", \"gpt-4-32k-v0314\"],\n", + " },\n", + ")\n", + "\n", + "config_list_gemini = autogen.config_list_from_json(\n", + " \"OAI_CONFIG_LIST\",\n", + " filter_dict={\n", + " \"model\": [\"gemini-pro\"],\n", + " },\n", + ")\n", + "\n", + "config_list_gemini_vision = autogen.config_list_from_json(\n", + " \"OAI_CONFIG_LIST\",\n", + " filter_dict={\n", + " \"model\": [\"gemini-pro-vision\"],\n", + " },\n", + ")\n", + "\n", + "seed = 25 # for caching" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Gemini Assistant\n" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": { + "scrolled": true + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[33muser_proxy\u001b[0m (to assistant):\n", + "\n", + "Sort the array with Bubble Sort: [4, 1, 5, 2, 3]\n", + "\n", + "--------------------------------------------------------------------------------\n", + "\u001b[33massistant\u001b[0m (to user_proxy):\n", + "\n", + "```python\n", + "def bubble_sort(nums):\n", + " for i in range(len(nums)):\n", + " for j in range(1, len(nums)):\n", + " if nums[j] < nums[j-1]:\n", + " nums[j], nums[j-1] = nums[j-1], nums[j]\n", + "```\n", + "\n", + "--------------------------------------------------------------------------------\n", + "\u001b[31m\n", + ">>>>>>>> EXECUTING CODE BLOCK 0 (inferred language is python)...\u001b[0m\n", + "\u001b[33muser_proxy\u001b[0m (to assistant):\n", + "\n", + "exitcode: 0 (execution succeeded)\n", + "Code output: \n", + "\n", + "\n", + "--------------------------------------------------------------------------------\n", + "\u001b[33massistant\u001b[0m (to user_proxy):\n", + "\n", + "```python\n", + "print(nums)\n", + "```\n", + "\n", + "--------------------------------------------------------------------------------\n", + "\u001b[31m\n", + ">>>>>>>> EXECUTING CODE BLOCK 0 (inferred language is python)...\u001b[0m\n", + "\u001b[33muser_proxy\u001b[0m (to assistant):\n", + "\n", + "exitcode: 1 (execution failed)\n", + "Code output: \n", + "Traceback (most recent call last):\n", + " File \"\", line 1, in \n", + " print(nums)\n", + "NameError: name 'nums' is not defined\n", + "\n", + "\n", + "--------------------------------------------------------------------------------\n", + "\u001b[33massistant\u001b[0m (to user_proxy):\n", + "\n", + "```python\n", + "# filename: sort.py\n", + "\n", + "def bubble_sort(nums):\n", + " for i in range(len(nums)):\n", + " for j in range(1, len(nums)):\n", + " if nums[j] < nums[j-1]:\n", + " nums[j], nums[j-1] = nums[j-1], nums[j]\n", + " print(nums)\n", + "```\n", + "\n", + "--------------------------------------------------------------------------------\n", + "\u001b[31m\n", + ">>>>>>>> EXECUTING CODE BLOCK 0 (inferred language is python)...\u001b[0m\n", + "\u001b[33muser_proxy\u001b[0m (to assistant):\n", + "\n", + "exitcode: 0 (execution succeeded)\n", + "Code output: \n", + "\n", + "\n", + "--------------------------------------------------------------------------------\n" + ] + } + ], + "source": [ + "assistant = AssistantAgent(\n", + " \"assistant\", llm_config={\"config_list\": config_list_gemini, \"seed\": seed}, max_consecutive_auto_reply=3\n", + ")\n", + "\n", + "user_proxy = UserProxyAgent(\n", + " \"user_proxy\",\n", + " code_execution_config={\"work_dir\": \"coding\", \"use_docker\": False},\n", + " human_input_mode=\"NEVER\",\n", + " is_termination_msg=lambda x: content_str(x.get(\"content\")).find(\"TERMINATE\") >= 0,\n", + ")\n", + "\n", + "result = user_proxy.initiate_chat(assistant, message=\"Sort the array with Bubble Sort: [4, 1, 5, 2, 3]\")" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "ChatResult(chat_id=None, chat_history=[{'content': 'Sort the array with Bubble Sort: [4, 1, 5, 2, 3]', 'role': 'assistant'}, {'content': '```python\\ndef bubble_sort(nums):\\n for i in range(len(nums)):\\n for j in range(1, len(nums)):\\n if nums[j] < nums[j-1]:\\n nums[j], nums[j-1] = nums[j-1], nums[j]\\n```', 'role': 'user'}, {'content': 'exitcode: 0 (execution succeeded)\\nCode output: \\n', 'role': 'assistant'}, {'content': '```python\\nprint(nums)\\n```', 'role': 'user'}, {'content': 'exitcode: 1 (execution failed)\\nCode output: \\nTraceback (most recent call last):\\n File \"\", line 1, in \\n print(nums)\\nNameError: name \\'nums\\' is not defined\\n', 'role': 'assistant'}, {'content': '```python\\n# filename: sort.py\\n\\ndef bubble_sort(nums):\\n for i in range(len(nums)):\\n for j in range(1, len(nums)):\\n if nums[j] < nums[j-1]:\\n nums[j], nums[j-1] = nums[j-1], nums[j]\\n print(nums)\\n```', 'role': 'user'}, {'content': 'exitcode: 0 (execution succeeded)\\nCode output: \\n', 'role': 'assistant'}], summary='exitcode: 0 (execution succeeded)\\nCode output: \\n', cost={'usage_including_cached_inference': {'total_cost': 0.001116, 'gemini-pro': {'cost': 0.001116, 'prompt_tokens': 1728, 'completion_tokens': 168, 'total_tokens': 1896}}, 'usage_excluding_cached_inference': {'total_cost': 0}}, human_input=[])" + ] + }, + "execution_count": 4, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "result" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Agent Collaboration and Interactions\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[33mGPT-4\u001b[0m (to Gemini-Pro):\n", + "\n", + "Do Transformers purchase auto insurance or health insurance?\n", + "\n", + "--------------------------------------------------------------------------------\n", + "\u001b[33mGemini-Pro\u001b[0m (to GPT-4):\n", + "\n", + "Transformers are fictional characters and do not purchase insurance.\n", + "\n", + "--------------------------------------------------------------------------------\n", + "\u001b[33mGPT-4\u001b[0m (to Gemini-Pro):\n", + "\n", + "If Transformers were real, would their insurance be categorized as a type of auto insurance, health insurance, or a new category unique to sentient machines?\n", + "\n", + "--------------------------------------------------------------------------------\n", + "\u001b[33mGemini-Pro\u001b[0m (to GPT-4):\n", + "\n", + "If Transformers were real, their insurance would likely be a new category unique to sentient machines.\n", + "\n", + "--------------------------------------------------------------------------------\n", + "\u001b[33mGPT-4\u001b[0m (to Gemini-Pro):\n", + "\n", + "Considering the unique needs of sentient machines like Transformers, what special coverages might be included in their insurance policies?\n", + "\n", + "--------------------------------------------------------------------------------\n", + "\u001b[33mGemini-Pro\u001b[0m (to GPT-4):\n", + "\n", + "Sentient machine insurance policies might include coverage for repairs, maintenance, data loss, and liability.\n", + "\n", + "--------------------------------------------------------------------------------\n", + "\u001b[33mGPT-4\u001b[0m (to Gemini-Pro):\n", + "\n", + "Would these sentient machine insurance policies also potentially cover software updates and cybersecurity, similar to how health insurance covers vaccinations and preventative care?\n", + "\n", + "--------------------------------------------------------------------------------\n", + "\u001b[33mGemini-Pro\u001b[0m (to GPT-4):\n", + "\n", + "Yes, sentient machine insurance policies could potentially cover software updates and cybersecurity, similar to how health insurance covers vaccinations and preventative care.\n", + "\n", + "--------------------------------------------------------------------------------\n" + ] + }, + { + "data": { + "text/plain": [ + "ChatResult(chat_id=None, chat_history=[{'content': 'Do Transformers purchase auto insurance or health insurance?', 'role': 'assistant'}, {'content': 'Transformers are fictional characters and do not purchase insurance.', 'role': 'user'}, {'content': 'If Transformers were real, would their insurance be categorized as a type of auto insurance, health insurance, or a new category unique to sentient machines?', 'role': 'assistant'}, {'content': 'If Transformers were real, their insurance would likely be a new category unique to sentient machines.', 'role': 'user'}, {'content': 'Considering the unique needs of sentient machines like Transformers, what special coverages might be included in their insurance policies?', 'role': 'assistant'}, {'content': 'Sentient machine insurance policies might include coverage for repairs, maintenance, data loss, and liability.', 'role': 'user'}, {'content': 'Would these sentient machine insurance policies also potentially cover software updates and cybersecurity, similar to how health insurance covers vaccinations and preventative care?', 'role': 'assistant'}, {'content': 'Yes, sentient machine insurance policies could potentially cover software updates and cybersecurity, similar to how health insurance covers vaccinations and preventative care.', 'role': 'user'}], summary='Yes, sentient machine insurance policies could potentially cover software updates and cybersecurity, similar to how health insurance covers vaccinations and preventative care.', cost={'usage_including_cached_inference': {'total_cost': 0.0149985, 'gpt-4': {'cost': 0.01473, 'prompt_tokens': 339, 'completion_tokens': 76, 'total_tokens': 415}, 'gemini-pro': {'cost': 0.0002685, 'prompt_tokens': 321, 'completion_tokens': 72, 'total_tokens': 393}}, 'usage_excluding_cached_inference': {'total_cost': 0}}, human_input=[])" + ] + }, + "execution_count": 5, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "gpt = AssistantAgent(\n", + " \"GPT-4\",\n", + " system_message=\"\"\"You should ask weird, tricky, and concise questions.\n", + "Ask the next question based on (by evolving) the previous one.\"\"\",\n", + " llm_config={\"config_list\": config_list_gpt4, \"seed\": seed},\n", + " max_consecutive_auto_reply=3,\n", + ")\n", + "\n", + "gemini = AssistantAgent(\n", + " \"Gemini-Pro\",\n", + " system_message=\"\"\"Always answer questions within one sentence. \"\"\",\n", + " # system_message=\"answer:\",\n", + " llm_config={\"config_list\": config_list_gemini, \"seed\": seed},\n", + " max_consecutive_auto_reply=4,\n", + ")\n", + "\n", + "\n", + "gpt.initiate_chat(gemini, message=\"Do Transformers purchase auto insurance or health insurance?\")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Let's switch position. Now, Gemini is the question raiser. \n", + "\n", + "This time, Gemini could not follow the system instruction well or evolve questions, because the Gemini does not handle system messages similar to GPTs." + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[33mGemini-Pro\u001b[0m (to GPT-4):\n", + "\n", + "Should Spider Man invest in 401K?\n", + "\n", + "--------------------------------------------------------------------------------\n", + "\u001b[33mGPT-4\u001b[0m (to Gemini-Pro):\n", + "\n", + "As a fictional character, Spider-Man cannot invest in a 401K, but if he were a real person with income, investing in a 401k could be a wise financial move for retirement savings.\n", + "\n", + "--------------------------------------------------------------------------------\n", + "\u001b[33mGemini-Pro\u001b[0m (to GPT-4):\n", + "\n", + "Would Green Lantern prefer a 401K or a Roth IRA?\n", + "\n", + "--------------------------------------------------------------------------------\n", + "\u001b[33mGPT-4\u001b[0m (to Gemini-Pro):\n", + "\n", + "Since Green Lantern's financial preferences aren't specified in comics, it's impossible to determine whether he would prefer a 401K or a Roth IRA for retirement savings.\n", + "\n", + "--------------------------------------------------------------------------------\n", + "\u001b[33mGemini-Pro\u001b[0m (to GPT-4):\n", + "\n", + "If Superman could invest in the stock market, which companies would he choose?\n", + "\n", + "--------------------------------------------------------------------------------\n", + "\u001b[33mGPT-4\u001b[0m (to Gemini-Pro):\n", + "\n", + "Superman might choose companies that align with his values of truth, justice, and social good, but his specific preferences are not detailed in comics or other media.\n", + "\n", + "--------------------------------------------------------------------------------\n", + "\u001b[33mGemini-Pro\u001b[0m (to GPT-4):\n", + "\n", + "If Batman invested in cryptocurrency, which coins would he choose and why?\n", + "\n", + "--------------------------------------------------------------------------------\n" + ] + }, + { + "data": { + "text/plain": [ + "ChatResult(chat_id=None, chat_history=[{'content': 'Should Spider Man invest in 401K?', 'role': 'assistant'}, {'content': 'As a fictional character, Spider-Man cannot invest in a 401K, but if he were a real person with income, investing in a 401k could be a wise financial move for retirement savings.', 'role': 'user'}, {'content': 'Would Green Lantern prefer a 401K or a Roth IRA?', 'role': 'assistant'}, {'content': \"Since Green Lantern's financial preferences aren't specified in comics, it's impossible to determine whether he would prefer a 401K or a Roth IRA for retirement savings.\", 'role': 'user'}, {'content': 'If Superman could invest in the stock market, which companies would he choose?', 'role': 'assistant'}, {'content': 'Superman might choose companies that align with his values of truth, justice, and social good, but his specific preferences are not detailed in comics or other media.', 'role': 'user'}, {'content': 'If Batman invested in cryptocurrency, which coins would he choose and why?', 'role': 'assistant'}], summary='If Batman invested in cryptocurrency, which coins would he choose and why?', cost={'usage_including_cached_inference': {'total_cost': 0.014554000000000001, 'gemini-pro': {'cost': 0.000274, 'prompt_tokens': 416, 'completion_tokens': 44, 'total_tokens': 460}, 'gpt-4': {'cost': 0.014280000000000001, 'prompt_tokens': 264, 'completion_tokens': 106, 'total_tokens': 370}}, 'usage_excluding_cached_inference': {'total_cost': 0}}, human_input=[])" + ] + }, + "execution_count": 6, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "gpt = AssistantAgent(\n", + " \"GPT-4\",\n", + " system_message=\"\"\"Always answer questions within one sentence. \"\"\",\n", + " llm_config={\"config_list\": config_list_gpt4, \"seed\": seed},\n", + " max_consecutive_auto_reply=3,\n", + ")\n", + "\n", + "gemini = AssistantAgent(\n", + " \"Gemini-Pro\",\n", + " system_message=\"\"\"You should ask weird, tricky, and concise questions.\n", + "Ask the next question based on (by evolving) the previous one.\"\"\",\n", + " llm_config={\"config_list\": config_list_gemini, \"seed\": seed},\n", + " max_consecutive_auto_reply=4,\n", + ")\n", + "\n", + "gemini.initiate_chat(gpt, message=\"Should Spider Man invest in 401K?\")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Gemini Multimodal\n", + "\n", + "You can create multimodal agent for Gemini the same way as the GPT-4V and LLaVA.\n", + "\n", + "\n", + "Note that the Gemini-pro-vision does not support chat yet. So, we only use the last message in the prompt for multi-turn chat. The behavior might be strange compared to GPT-4V and LLaVA models.\n", + "\n", + "Here, we ask a question about \n", + "![](https://github.com/microsoft/autogen/blob/main/website/static/img/chat_example.png?raw=true)" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[33muser_proxy\u001b[0m (to Gemini Vision):\n", + "\n", + "Describe what is in this image?\n", + ".\n", + "\n", + "--------------------------------------------------------------------------------\n", + "\u001b[31m\n", + ">>>>>>>> USING AUTO REPLY...\u001b[0m\n", + "\u001b[33mGemini Vision\u001b[0m (to user_proxy):\n", + "\n", + " The image is a user interacting with an assistant agent. The user is requesting the assistant to plot a chart of stock prices and the assistant is asking for clarification on the request. The user then provides more information and the assistant is able to generate the chart.\n", + "\n", + "--------------------------------------------------------------------------------\n" + ] + }, + { + "data": { + "text/plain": [ + "ChatResult(chat_id=None, chat_history=[{'content': 'Describe what is in this image?\\n.', 'role': 'assistant'}, {'content': ' The image is a user interacting with an assistant agent. The user is requesting the assistant to plot a chart of stock prices and the assistant is asking for clarification on the request. The user then provides more information and the assistant is able to generate the chart.', 'role': 'user'}], summary=[{'type': 'text', 'text': ' The image is a user interacting with an assistant agent. The user is requesting the assistant to plot a chart of stock prices and the assistant is asking for clarification on the request. The user then provides more information and the assistant is able to generate the chart.'}], cost={'usage_including_cached_inference': {'total_cost': 0.00021, 'gemini-pro-vision': {'cost': 0.00021, 'prompt_tokens': 267, 'completion_tokens': 51, 'total_tokens': 318}}, 'usage_excluding_cached_inference': {'total_cost': 0}}, human_input=[])" + ] + }, + "execution_count": 8, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "image_agent = MultimodalConversableAgent(\n", + " \"Gemini Vision\", llm_config={\"config_list\": config_list_gemini_vision, \"seed\": seed}, max_consecutive_auto_reply=1\n", + ")\n", + "\n", + "user_proxy = UserProxyAgent(\"user_proxy\", human_input_mode=\"NEVER\", max_consecutive_auto_reply=0)\n", + "\n", + "user_proxy.initiate_chat(\n", + " image_agent,\n", + " message=\"\"\"Describe what is in this image?\n", + ".\"\"\",\n", + ")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## GroupChat with Gemini and GPT Agents" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": {}, + "outputs": [], + "source": [ + "agent1 = AssistantAgent(\n", + " \"Gemini-agent\",\n", + " llm_config={\"config_list\": config_list_gemini, \"seed\": seed},\n", + " max_consecutive_auto_reply=1,\n", + " system_message=\"Answer questions about Google.\",\n", + " description=\"I am good at answering questions about Google and Research papers.\",\n", + ")\n", + "\n", + "agent2 = AssistantAgent(\n", + " \"GPT-agent\",\n", + " llm_config={\"config_list\": config_list_gpt4, \"seed\": seed},\n", + " max_consecutive_auto_reply=1,\n", + " description=\"I am good at writing code.\",\n", + ")\n", + "\n", + "user_proxy = UserProxyAgent(\n", + " \"user_proxy\",\n", + " code_execution_config={\"work_dir\": \"coding\", \"use_docker\": False},\n", + " human_input_mode=\"NEVER\",\n", + " max_consecutive_auto_reply=1,\n", + " is_termination_msg=lambda x: content_str(x.get(\"content\")).find(\"TERMINATE\") >= 0\n", + " or content_str(x.get(\"content\")) == \"\",\n", + " description=\"I stands for user, and can run code.\",\n", + ")\n", + "\n", + "groupchat = autogen.GroupChat(agents=[agent1, agent2, user_proxy], messages=[], max_round=10)\n", + "manager = autogen.GroupChatManager(groupchat=groupchat, llm_config={\"config_list\": config_list_gemini, \"seed\": seed})" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[33muser_proxy\u001b[0m (to chat_manager):\n", + "\n", + "Show me the release year of famous Google products in a markdown table.\n", + "\n", + "--------------------------------------------------------------------------------\n", + "\u001b[33mGemini-agent\u001b[0m (to chat_manager):\n", + "\n", + "| Product | Release Year |\n", + "|---|---|\n", + "| Google Search | 1998 |\n", + "| Gmail | 2004 |\n", + "| Google Maps | 2005 |\n", + "| YouTube | 2005 |\n", + "| Google Chrome | 2008 |\n", + "| Android | 2008 |\n", + "| Google Drive | 2012 |\n", + "| Google Home | 2016 |\n", + "| Google Stadia | 2019 |\n", + "\n", + "--------------------------------------------------------------------------------\n", + "\u001b[33muser_proxy\u001b[0m (to chat_manager):\n", + "\n", + "\n", + "\n", + "--------------------------------------------------------------------------------\n" + ] + } + ], + "source": [ + "# user_proxy.initiate_chat(manager, message=\"Show me the release year of famous Google products.\")\n", + "user_proxy.send(\n", + " \"Show me the release year of famous Google products in a markdown table.\", recipient=manager, request_reply=True\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[33muser_proxy\u001b[0m (to chat_manager):\n", + "\n", + "Plot the products (as y-axis) and years (as x-axis) in scatter plot and save to `graph.png`\n", + "\n", + "--------------------------------------------------------------------------------\n", + "\u001b[33mGPT-agent\u001b[0m (to chat_manager):\n", + "\n", + "To plot the products on the y-axis and the years on the x-axis in a scatter plot and save it to `graph.png`, we will use the `matplotlib` library in Python.\n", + "\n", + "First, you'll need to install `matplotlib` if you haven't already. You can do so by running the following command in your shell.\n", + "\n", + "```sh\n", + "pip install matplotlib\n", + "```\n", + "\n", + "Once installed, you can execute the following code to generate the scatter plot and save it as `graph.png`.\n", + "\n", + "```python\n", + "# filename: plot_google_products.py\n", + "import matplotlib.pyplot as plt\n", + "\n", + "# Data for plotting\n", + "products = ['Google Search', 'Gmail', 'Google Maps', 'YouTube', \n", + " 'Google Chrome', 'Android', 'Google Drive', 'Google Home', 'Google Stadia']\n", + "release_years = [1998, 2004, 2005, 2005, 2008, 2008, 2012, 2016, 2019]\n", + "\n", + "# Placing the products on the y-axis and years on the x-axis\n", + "y_positions = range(len(products))\n", + "\n", + "# Creating the scatter plot\n", + "plt.scatter(release_years, y_positions)\n", + "plt.yticks(y_positions, products)\n", + "\n", + "# Adding title and labels\n", + "plt.title('Release Years of Google Products')\n", + "plt.xlabel('Year')\n", + "plt.ylabel('Product')\n", + "\n", + "# Saving the plot\n", + "plt.savefig('graph.png')\n", + "\n", + "# Show the plot for verification (this is optional and can be removed if only the file is needed)\n", + "plt.show()\n", + "```\n", + "After you have executed the above script, `graph.png` will be saved in your current directory. Please run this code in your Python environment, and then you should find the `graph.png` file with the scatter plot of Google products and their release years.\n", + "\n", + "--------------------------------------------------------------------------------\n", + "\u001b[31m\n", + ">>>>>>>> EXECUTING CODE BLOCK 0 (inferred language is sh)...\u001b[0m\n", + "\u001b[31m\n", + ">>>>>>>> EXECUTING CODE BLOCK 1 (inferred language is python)...\u001b[0m\n", + "\u001b[33muser_proxy\u001b[0m (to chat_manager):\n", + "\n", + "exitcode: 0 (execution succeeded)\n", + "Code output: \n", + "Requirement already satisfied: matplotlib in /home/beibinli/anaconda3/lib/python3.9/site-packages (3.7.1)\n", + "Requirement already satisfied: contourpy>=1.0.1 in /home/beibinli/anaconda3/lib/python3.9/site-packages (from matplotlib) (1.0.5)\n", + "Requirement already satisfied: cycler>=0.10 in /home/beibinli/anaconda3/lib/python3.9/site-packages (from matplotlib) (0.11.0)\n", + "Requirement already satisfied: fonttools>=4.22.0 in /home/beibinli/anaconda3/lib/python3.9/site-packages (from matplotlib) (4.25.0)\n", + "Requirement already satisfied: kiwisolver>=1.0.1 in /home/beibinli/anaconda3/lib/python3.9/site-packages (from matplotlib) (1.4.4)\n", + "Requirement already satisfied: numpy>=1.20 in /home/beibinli/anaconda3/lib/python3.9/site-packages (from matplotlib) (1.24.4)\n", + "Requirement already satisfied: packaging>=20.0 in /home/beibinli/anaconda3/lib/python3.9/site-packages (from matplotlib) (23.2)\n", + "Requirement already satisfied: pillow>=6.2.0 in /home/beibinli/anaconda3/lib/python3.9/site-packages (from matplotlib) (10.1.0)\n", + "Requirement already satisfied: pyparsing>=2.3.1 in /home/beibinli/anaconda3/lib/python3.9/site-packages (from matplotlib) (3.0.9)\n", + "Requirement already satisfied: python-dateutil>=2.7 in /home/beibinli/anaconda3/lib/python3.9/site-packages (from matplotlib) (2.8.2)\n", + "Requirement already satisfied: importlib-resources>=3.2.0 in /home/beibinli/anaconda3/lib/python3.9/site-packages (from matplotlib) (5.13.0)\n", + "Requirement already satisfied: zipp>=3.1.0 in /home/beibinli/anaconda3/lib/python3.9/site-packages (from importlib-resources>=3.2.0->matplotlib) (3.11.0)\n", + "Requirement already satisfied: six>=1.5 in /home/beibinli/anaconda3/lib/python3.9/site-packages (from python-dateutil>=2.7->matplotlib) (1.16.0)\n", + "\n", + "Figure(640x480)\n", + "\n", + "\n", + "--------------------------------------------------------------------------------\n" + ] + } + ], + "source": [ + "user_proxy.send(\n", + " \"Plot the products (as y-axis) and years (as x-axis) in scatter plot and save to `graph.png`\",\n", + " recipient=manager,\n", + " request_reply=True,\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "metadata": {}, + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAoAAAAHgCAYAAAA10dzkAABnKUlEQVR4Ae3dCdyNdf7/8c/NLftSylZCskbJRElFxTDKpDRKlkrbiFISY1qQIiOVmf5Ji/ZJTdI6Iyn6Zd8ykW1sUUOKQrIU53+9v9N1Ouc49+1wjnPf575e38fjdq7le13X93p+z3E+93e57qyQl4yEAAIIIIAAAgggEBiBQoG5U24UAQQQQAABBBBAwAkQAPJGQAABBBBAAAEEAiZAABiwCud2EUAAAQQQQAABAkDeAwgggAACCCCAQMAECAADVuHcLgIIIIAAAgggQADIewABBBBAAAEEEAiYAAFgwCqc20UAAQQQQAABBAgAeQ8ggAACCCCAAAIBEyAADFiFc7sIIIAAAggggAABIO8BBBBAAAEEEEAgYAIEgAGrcG4XAQQQQAABBBAgAOQ9gAACCCCAAAIIBEyAADBgFc7tIoAAAggggAACBIC8BxBAAAEEEEAAgYAJEAAGrMK5XQQQQAABBBBAgACQ9wACCCCAAAIIIBAwAQLAgFU4t4sAAggggAACCBAA8h5AAAEEEEAAAQQCJkAAGLAK53YRQAABBBBAAAECQN4DCCCAAAIIIIBAwAQIAANW4dwuAggggAACCCBAAMh7AAEEEEAAAQQQCJgAAWDAKpzbRQABBBBAAAEECAB5DyCAAAIIIIAAAgETIAAMWIVzuwgggAACCCCAAAEg7wEEEEAAAQQQQCBgAgSAAatwbhcBBBBAAAEEECAA5D2AAAIIIIAAAggETIAAMGAVzu0igAACCCCAAAIEgLwHEEAAAQQQQACBgAkQAAaswrldBBBAAAEEEECAAJD3AAIIIIAAAgggEDABAsCAVTi3iwACCCCAAAIIEADyHkAAAQQQQAABBAImQAAYsArndhFAAAEEEEAAAQJA3gMIIIAAAggggEDABAgAA1bh3O6vAs8995xlZWWFf7Kzs61y5cp25ZVX2n/+859fMx7C0rRp09z59JqJae3atVa6dGnr2LFj3OL//e9/d/c3duzYuPszdeOrr75qp5xyihUvXtzd36JFi3K9FTndeuutVq9ePStZsqQVK1bMqlevbl27drWpU6daKBTK9fh07bzmmmtcuVJ1vXifmRNOOMGuvfZa++qrr1J1mVzPo8/s4MGDc82TzM7HH3/cdJ8kBAq6AAFgQa9h7u+gAs8++6zNmjXLpkyZYr1797a3337bzjnnHPvuu+8OemxBy1CjRg17+OGH7Y033jAFe5Fp06ZNdsstt1ibNm3spptuityV0cvffPONdevWzWrWrGmTJk1y74XatWvneE96fzRs2NC9T66++mqbOHGivf/++3bPPffYli1b7IILLrCPPvoox+MLwg7/M/PBBx/YDTfcYK+88oqde+65tnPnzoy/PQLAjK9CbiBBgewE85ENgQIr0KBBAzvjjDPc/bVs2dL27dtngwYNsjfffNO1bBTYG8/hxvSFrqBGwd7555/vWkWVVUGfWraeeeaZHI5M7eYff/zRSpQokdqTxjnbypUr7aeffnKtdy1atIiT49dNq1evts6dO7vWQv3CUKZMmfBOHXvdddeZWn+PPvro8PaCuBD5mdF7RJ+ZoUOHus9Mly5d4t5yuuoz7sXZiAACBwjQAngACRuCLuAHg19//XUUxfz58+33v/+9HXPMMa7L7/TTT7fXXnstKk9OK4kcq5aom2++2erXr2+lSpWyChUquNakTz755IDTjhkzxk477TSXT122devWtT//+c9R+dRip6BNXXRHHXWUqXVvyJAh9vPPP0fli7fiB3k33nij2/3iiy+6Fq/HHnvMjj/+eBcIqqWkUaNGrttUAc/ll19ua9asiTqdWoguueQSVwZ1k5588smuTN9++21UPnXpqWtv4cKF7jw6n1rklHROdctXqVLFihYtahUrVrQLL7zQDtZNq2PVWtesWTMXSMqpdevWroVP+5TURarWXqUrrrjClUG/BOSU1DqqQEb3Hhn8RebX8aqbyDR9+nRXZpVBQe3ZZ59t7733XmQWt7xkyRLnpfuXl3yff/75A/J9/vnn9tvf/tad67jjjrNevXq588lQAWhuSUF8InWX2zli95111llu0xdffOFe5ar38OLFi105dd+qM6WtW7e697neR3pfnnTSSXbXXXfZnj173H7/n+3bt7vWxfLly7tztW3b1hSsx6acurn991Rk/v3799vf/va38Pu2XLlyprLrfaKkbnzZfvzxx+69IE9tU9Kx999/v9WpU8e953XsqaeeaqNHj3b7+QeBTBOgBTDTaozyHnEBje9SiuwG1LgufQGdeeaZ9sQTT1jZsmVt/PjxLmhQQKAvoZxSosfqi1FJrY+VKlWyH374wbXEKaD48MMPTa9Kuq4CRbXQPfTQQ1aoUCFbtWqVLV261O3XPwr+mjZt6vbde++9LphSN7e+wNatW2fqwsstaSzk//t//8+1dg0fPtxGjhzpxgVeddVV7jAFlhonpXFwI0aMcF/q9913nwts/v3vf7sgTRnVYqYA7Prrr3dmuraCKAVdCg6KFCkSVYzLLrvMBXt//OMfw92J7dq1cy1Mf/nLX+zEE080BY8zZ86077//PurY2BV1Yas1SoGSuigVYOgcvqfKoG5bOSmAGjZsmGvxzCmw0/kV0MrG/yUh9prx1hVMKPBUsKDAWkGsArD27du7cinwVFqxYoXzU+D/17/+1RT4vPTSS+69pV9G+vfv7/Jt3LjR1NqosYf6RUD5dX8avpBISrTuEjmXn0fvPyUFo37au3ev+4VJ1/vTn/7kfvHYvXu3M9b7Qr+MyES/4Og9poDeD4oVpHbo0MHVs96/TZo0sRkzZtjvfvc7//SH9arPqUzVUqv3qwJQ/dKh96WSWr71i4w+36ojJdWXkt47CirvvvtuO++881yr8fLlyw/6PnQH8w8C+VHA+6CREAikgBcEhbzPZGj27NkhrwswtGPHjpA3BizkBV8h7z94t82H8VrYQl6LX9Q27bv44otDXkAQ8rrAXFYv2HPn1KufEj3Wz++/ei117npey0no0ksv9TeHvC/6kNf6EF6Pt+B96Ya8FpiQ1yITtdsLGF35vFaOqO05rXTq1Mnl91rdQl4LpcvmBZJu26hRo6IO27BhQ8ibRBHyApWo7f6K14Li7kdlkvtbb73l7wp5Qa/b5n3Zh7dpwQv23PZHH300avvBVlQfXothyBurF64bHaM69gKmkNcCFz6FX2f/+Mc/wttyWvBa5UJei9EBu3U9vYf8H//9oIzKr2vq2n5S3XrdqCGvdTYkFyWvlTPkBRuh9evX+9ncqxf0hLxWw5AX8Lr1O++8M+S1TIVi69Abm+msIt973hjFULVq1cLnO9y6808Q7zPz7rvvhrzAL+S18oW8XzxcVl1XdTxu3Dj/UPfq/fLktnst51HbvV8i3PbJkye77f/617/cute6FpXvgQcecNv1fvFT7D362/33lL/+f//3f+5Yr7XR3xT31ZsMFPIC7AP26bPutcgesJ0NCGSqAF3A+TEqp0xpFVAXkFqi1E2lVj51v3nBiWlWsJJaN/Sbvj+2SV2o/o9ap9Qio9abeOlQj1XrYuPGjV33n66vcqn1b9myZeHTq8VKrV8ai6ZyxnanKqP3pexaWtRt6pdVr34LilqlEklqJVFSS9+xxx7rlnVudY1pxmvkudVqqa7PyC7IzZs3m1rzqlat6jx1P15A4s4TeU9ug/dP7OxjdberK1gtkGo5/PTTT11XnJ8/p1fVx3//+183uUMtpH5St6Su4QX9rivX357sq1oudW/+j7yUNClizpw5rlVJ1/ZT4cKFXdm+/PLL8HtHE0fUTSqryKRWK7UyqwVXSXWnMXgaKhCZ9H44WDqUusvtXJGfGS8wci3WXtAWbvn1j42tT92jWi7VyhaZ/BZ0vdeVvCDWvfqfObfi/eO3QPvrh/Kq8imptfdwkj53at1W67sm/aiLmoRAJgv8+j9jJt8FZUcgCYEXXnjB5s2b52ZuqrtKgUnkl6k/FrBfv37hL3j/i15fBkrxgjBtP5RjFeD07NnTdTNPmDDBBSkql4LSXbt26XQuacaq17JiGm+lL1h1AaprWt2TftJ133nnnQPKq0edKOVUXv94/9Xv/lJXmZ90bu83Xvdl7zv4rwqs/HNrzJS6XzWjWN2X+nKfO3euuy+dK/Ke/HOrezUyKdDUcZp5rC44BcfqZlSA5bWoRWaNWtZsXKXY82mbgmKV7XBmeasL2h/npnP5yWsNde8h1Vdk0jVklVM5lNcvq14TzadxkLEp3rbYPInWXexxsev+Z0YBuQLtzz77zJo3bx6VTWMdY7vTdY/6RUH1Gpn0HtYvPJEWWlc3eGTSsYebNMZWgffhnmPgwIFuyIXe4/pFSmVTwK7xvSQEMlGAMYCZWGuUOaUCepabP6bLn9H49NNP2+uvv+5aKvyWL30BqKUnXtLA8HjpUI7V2KSW3jg/jeuKTPECHT13TT9qYfK6tty4QbXEaJC8Wth0XY2v8rrMIk8VXlYQdLhJ59YXuMZu+QFi5Ln8bZrQoBYTjRXU41L85I8X89cjX2MDA+3T/fiTUnR/mnijsVgaY6YW03jJDxzUOhubFLCoVVAtvYeaNJZPYyP1pe+/Z3QOf8JK7Pl0DV0rp3Iov/8eUZkTzef/YhF5PY37PFhKtO4Odp7Iz0xOeePVpe5RLaIKiiP3q6VYrcmRFlpXQOjXpa4T7x41WSZ2Aony+r+IaFlJvzh4XfPuHPEC7f/lyvlfBaR9+/Z1P2qB1yxwTbzSLyfe8Ie0zFjPuXTsQeDQBWgBPHQzjijgAmpp0he3Bp+rpUjBXa1atVwwoy/9eD/qPo6XDuVYfSH6wZN/LrWs+F1//rbIV3WnqTVCsygVEGkGo5KCQQVgCkzilTeZAFDn1he4Hvwb79x6Rp6S/wUfe0/JPERaE3M0CF/X0OD9nJLcNctUE0FUVj8pYFbrqj8z2N+e6Ovtt9/uvujVjRgvMI89j+pHrbNqBY1s8dT7SgG/Zmj7k43UmqQuUgWokUmtbWpNU7erkjc+zdVt5KQfbdfkoIOlROvuYOc53P26R01u0iOWIpPuUUn7lfSLmNLLL7/sXv1/Yp9Nqe2apasAMjIo1mdB3bSRyR/+EPsLVmQeLev9GllXsfu1rhnA6sbW+0CTt/xJJPHysg2B/CpAC2B+rRnKlWcCCv7U2qduS33haKybghZ9gei3fY1XUnCh//jVXaxAxJtAkGN5Ez1WX856lpo3eN19yWscm8bg6fEtag3xk57Tp79YoS43tWSoVUSzKDVzUbMllXScuoT1uBF1lyog0gxMfVH985//dC1nCj4OJ+m6ejyMWiDVEqYZkQp01Hqlx50oOFNXth5NowBUM0AVhGk8n7qlI7uqD3Z9BcCa3fqHP/zBBeHqilaQpO06b05JrW4K5DWGTK7q2lcrkcYSqvXmwQcfzOnQXLfrfjTjVkME/PtUt7SCBgUh3iQGd3xk16fqRi2HCmo0jED3oBmmCtB1Lj9QVr37Yzf1y4e8FABpZqzuRfWrdNttt7khAHo/qp7V9av3qcapKkWOeXQbIv5JtO4iDknpYvfu3V0LqlqE9V6Uod4zmoGt8bStWrVy19PQAb2v9BlU0K5fNDQLWI8jik2aRS0vPSrImyDj3ueaRa3WvsikB1Vr+IRmwitY1PtC9aZubAXYmlWvpDIpmNZfh9EjatTCqG2ate0//1CtiRoK4E1Oci3U+gWRhEDGCXj/MZMQCKSAP6PRG7d1wP17LQAhb7xXyPuPPeQFX26/150Z0qxYzej0xry52cLeX30IaWajn/wZpZEzMbUvkWO9ACXkBQghL7gMabapF1iEvJaSUOwsR++5cCEvmAhpZq4XTLjZriqXFxT5xXCvmrXrBX8hL4B05fUCitBvfvObkGZBeq0wUXlzWvEeiaPms5AXOB2QRTM8vdatkBf8udm/XnAU8r7gQ15QGM7rtVKFvODHzRD1AuuQF8i5Wa46p2Zp+knL2ubPNPa3e1/UIS/gDmkmta6jmc1e13bokUceCdeLnzfeq/xURnnqeK+FKeQFElFZ/TpLZBawf6D3GJOQFzCEvMDa3bsXSIS8rmp3f96jRMIze/38Xnd5SO8V30ozg71g2N8dfvUejRPyAo2QF+y5uvUm1YT0Po1NXvAY8oIld1+qV++xJiG9L2So95qfYt87/vZE6s7PG/ma22cmMp+uq3uNl7xu3ZA3McjNnve6VZ2b9wtXyPsFJSq7F6iHevTo4Wa8axa03kdekOvuMfK9o4O8X2rcDF3NQveCtpD3vEr3/pJHZNLsbL13NANbnx05e63BUXXhBaYhLwB171kdr3pV0qx3zR73uqndsfr/Qe7KT0IgEwWyVGjvTU5CAAEEEMhgAbXKqkVR4+bUykhCAAEEchOgCzg3HfYhgAAC+VBAXb8ax6kuSo2pU9exJi5pfCTBXz6sMIqEQD4UIADMh5VCkRBAAIHcBPTYHY1n1HMENT5UY9D0GKE+ffrkdhj7EEAAgbAAXcBhChYQQAABBBBAAIFgCPAYmGDUM3eJAAIIIIAAAgiEBQgAwxQsIIAAAggggAACwRAgAAxGPXOXCCCAAAIIIIBAWIAAMEzBAgIIIIAAAgggEAwBZgEnUc/6c076s036M2D+0/yTOB2HIoAAAggggEAaBPQIZP05Rz1OKbe/npOGouTZJQgAk6BX8Fe1atUkzsChCCCAAAIIIJBXAhs2bHB/kzuvrp+X1yUATEJfLX9KegNF/u3PJE7JoQgggAACCCBwhAW2b9/uGnD87/EjfLl8eXoCwCSqxe/2VfBHAJgEJIcigAACCCCQBwL+93geXDrPL8kkkDyvAgqAAAIIIIAAAgikV4AAML3eXA0BBBBAAAEEEMhzAQLAPK8CCoAAAggggAACCKRXgAAwvd5cDQEEEEAAAQQQyHMBAsA8rwIKgAACCCCAAAIIpFeAADC93lwNAQQQQAABBBDIcwECwDyvAgqAAAIIIIAAAgikV4AAML3eXA0BBBBAAAEEEMhzgYQDwHXr1rm/d7to0aI8L3SiBRg8eLA1atQonP2aa66xDh06hNdZQAABBBBAAIH8J7Bvf8hmrd5iby36yr1qnZRagbT/JZCxY8fa448/bqtWrbIiRYpYjRo17Morr7QBAwa4O1OQ9v3339ubb76Z2jv1zjZ69GjTH4AmIYAAAggggED+FJi0ZKMNeWepbdy2O1zAymWL2aD29a1tg8rhbSwkJ5DWAPCZZ56xvn372l//+ldr0aKF7dmzxz777DNbunRpcneR4NFly5ZNMCfZEEAAAQQQQCDdAgr+er600GKbajZ5waC2j+namCAwRZUS7gJevny5nXPOOVasWDGrX7++TZkyxXX55tYSp8CtXbt2VqpUKatYsaJ169bNvv322xyL9s4771inTp3suuuus5NPPtlOOeUU69y5sw0dOtQdoy7b559/3t566y13bf2NvmnTprl9aiGsXbu2lShRwk466SS755577Keffoq61oMPPujKoT/urGvs3v3rbw/KGNsFPGnSJHfP5cqVs/Lly9vFF19sq1evjjonKwgggAACCCBw5AXUzauWv9jgT1f2t2k/3cGpqQsXAO7fv9+NjVNwNWfOHHvyySftrrvuyvUKGzdudK14GmM3f/58UzD19ddfuwAvpwMrVapks2fPti+++CJuln79+rnj27Ztazq/fs4++2yXV0Hdc88951oL1ZX71FNP2SOPPBI+z2uvvWaDBg2yBx54wJWncuXKrqs5nCHOws6dO12L5Lx58+zDDz+0QoUK2aWXXmryiJfUYrl9+/aon3j52IYAAggggAAChyYwd+3WqG7f2KMVBKpbWPlIyQu4LuDJkye7li+1tilIU1Ig1bp16xyvMGbMGGvcuLENGzYsnGfcuHFWtWpVW7lypWutC+/4ZUEB2mWXXWbVq1d3+5s1a+ZaEC+//HIXfKklsXjx4q5r2C+Hf467777bX3TH33HHHfbqq69a//793fZHH33UevToYddff71bv//++10rZmwrYPgk3kLHjh0jV01d1BUqVHBBZoMGDaL2aWX48OE2ZMiQA7azAQEEEEAAAQSSE9i8I7rXLqezJZovp+PZ/j8B1wK4YsUKF7hFBl1NmzbN1WjBggU2depU1/2rwE0/devWdcfk1I2qVrlZs2bZ4sWL7dZbb3VduFdffbWpxS+nVje/EK+//rrrrlUZdS11Aa9fv97fbcuWLTMFlJEpdj1yn5ZVzquuusp1KZcpU8ZNSNH2yPNq3U8DBw60bdu2hX82bNjg7+IVAQQQQAABBJIQqFC6WEJHJ5ovoZMFOJNrAdTMWI23O5SkgK19+/Y2YsSIAw5ToJdbUuuafnr16mXTp0+3c8891z7++GM7//zz4x6mbmPNFFbrW5s2bUyTOcaPH2+jRo2Kmz/RjSq/WizVnVylShUXhKpce/fujXuKokWLmn5ICCCAAAIIIJBagaY1jjHN9tWED3/MX+QVFKVU8vYrHyl5ARcAquVOrV4aw6fJHEoaF5dbUvfvhAkTXHdsdrY7TW7Zc9ynCSdKGo+ndNRRR9m+ffvcsv/PjBkzrFq1alHjEmPHEdarV8+NL+zevbt/mFsPr8QsbNmyxbUa6rE0CkCVFIySEEAAAQQQQCD9AoULZblHvWi2r4K9yCDQb6LSo2CUj5S8gOsC1li/mjVrmrpj9VgWBVz+JJCcWgbVerd161Y3i3fu3Lm2Zs0a01hCjcOLDeD8Yvbs2dPN+NX5FcCpZU8B23HHHRfuvtX4QJVB3dKaUayZvpoxrABVrX7qttVjZCZOnOif1r326dPHNAZRPxqDqPGGn3/+eVSeyJWjjz7azfzVhBc9k/Cjjz5yE0Ii87CMAAIIIIAAAukT0HP+9KgXtfRFJq3zCJhIkeSXXQBYuHBh9+DlH374wZo0aeImUviTLvRYmHhJXaYK5BTsqVtWXacKwtQ9q9m08VKrVq1c0PeHP/zBTQLRJAydXzNw9RgWpRtuuMHq1KljZ5xxhgsMdY1LLrnEbr/9duvdu7f7yx4zZ850YwAjr3HFFVfYvffe6x4o/Zvf/MYFmAo4c0oqowJKjWVU2XX+kSNH5pSd7QgggAACCCCQBgEFgdMHXGCv3HCWjb6ykXvVOg+BTi1+ljf+L7KVNXx2BV56LqBax9Q6SDpQQI+EUcCriSGaREJCAAEEEEAAgfwvwPe3WXjwnrpUNbu2Vq1aLuhTa17z5s0J/vL/+5gSIoAAAggggAAChyQQDgB37NjhnqmnR5sce+yxpu7aZGfZHlJJyIwAAggggAACCCCQFoEcu4DTcvUMvwhNyBlegRQfAQQQQCCQAnx/m8WfrRHItwM3jQACCCCAAAIIBEOAADAY9cxdIoAAAggggAACYQECwDAFCwgggAACCCCAQDAECACDUc/cJQIIIIAAAgggEBYgAAxTsIAAAggggAACCARDgAAwGPXMXSKAAAIIIIAAAmEBAsAwBQsIIIAAAggggEAwBAgAg1HP3CUCCCCAAAIIIBAWIAAMU7CAAAIIIIAAAggEQ4AAMBj1zF0igAACCCCAAAJhAQLAMAULCCCAAAIIIIBAMAQIAINRz9wlAggggAACCCAQFiAADFOwgAACCCCAAAIIBEOAADAY9cxdIoAAAggggAACYQECwDAFCwgggAACCCCAQDAECACDUc/cJQIIIIAAAgggEBYgAAxTsIAAAggggAACCARDgAAwGPXMXSKAAAIIIIAAAmEBAsAwBQsIIIAAAggggEAwBAgAg1HP3CUCCCCAAAIIIBAWIAAMU7CAAAIIIIAAAggEQ4AAMBj1zF0igAACCCCAAAJhAQLAMAULCCCAAAIIIIBAMAQIAINRz9wlAggggAACCCAQFiAADFOwgAACCCCAAAIIBEOAADAY9cxdIoAAAggggAACYQECwDAFCwgggAACCCCAQDAEDisAXLdunWVlZdmiRYuCocRdIoAAAgggkEaBfftDNmv1Fntr0VfuVeskBFIpkJ3Kkx3KuaZNm2bnn3++fffdd1auXLmoQ6tXr2633Xab+4nawQoCCCCAAAIFXGDSko025J2ltnHb7vCdVi5bzAa1r29tG1QOb2MBgWQEDqsFMJkLciwCCCCAAAIIxBdQ8NfzpYVRwZ9ybvKCQW3XfhICqRAoVKdOHXvxxRejzrV8+XI755xzrFixYla/fn2bMmWK6/J98803o/JFrixdutTatWtnpUqVsooVK1q3bt3s22+/jcxy2Mvr16+3Sy65xJ27TJky1qlTJ/v666/D5xs8eLA1atTIxo0bZyeeeKLL17NnT9u3b5/95S9/sUqVKlmFChXsgQceCB+jhW3bttmNN97o9um8F1xwgf373/+OysMKAggggAAC6RBQN69a/uJ19vrbtJ/u4HTURsG/RqGbbrrJrr32Wps6daq72/3791uHDh2sRIkSNmfOHHvyySftrrvuylVi48aN1qJFCxeEzZ8/3yZNmuQCNAVqyaZQKOTKs3XrVvv444/tgw8+sNWrV9sVV1wRdWpt+9e//uWu/corr7hg8KKLLrIvv/zSHTdixAi7++67bfbs2e44nVf7N23aZP/85z9twYIF1rhxY7vwwgtN14qX9uzZY9u3b4/6iZePbQgggAACCByqwNy1Ww9o+Ys8h4JAdQsrHwmBZAWy+/bt64Kihx56yI3Jmzx5sguwNEZPLWdKajlr3bp1jtcaM2aMC56GDRsWzqPWuKpVq9rKlSutdu3a4e2xCyeccELsJvvxxx/D29T6+Nlnn9natWvd+bRDLZannHKKzZs3z5o0aeLyKnDVNUuXLu1aLTW+cMWKFS64K1SokKmlU0Gg7uuss85yAe/ixYtt8+bNVrRoUXcOGaiV8/XXX3ctg25jxD/Dhw+3IUOGRGxhEQEEEEAAgdQIbN7x65i/3M6YaL7czsE+BNwkkObNm9vo0aOdhoImBW5+8KeNTZs2zVVKrWdqQVT3b2xSy1xuAeAnn3zigrbI41q2bBleXbZsmSuPyuQndUtr4oj2+QGgJo4o+POTuqELFy5sCv78pG0K+JRU5h9++MHKly/v73avu3btcgFw1MZfVgYOHGgKmP2k1sDIcvnbeUUAAQQQQOBQBSqULpbQIYnmS+hkZAqsgAsA1R2qx7ooRS4nqqLWt/bt27sWtthjKlfOfcZSjRo1DpgFnJ396+TknMoTu71IkSJRl9b9xNumsirpVWVTi2Bsip2V7O9XS6HfWuhv4xUBBBBAAIFUCDStcYxptq8mfPhj/iLPq2/pSt5+5SMhkKyAi7Rmzpxp9erVc+eqW7euadKFJlmoxUxJXa25JY2dmzBhgqkVLjJ4y+2YRPeptU/l2bBhQ7i1TRNONIHDL3Oi54rMpzJr/J/Kq3KTEEAAAQQQyEuBwoWy3KNeNNtXwV5kEPi/Jhpz+5WPhECyAoUefvhhe+ONN6xfv37uXBrrV7NmTbv66qvd2LsZM2aEJ4H4rYSxF+3Vq5ebONG5c2ebO3eurVmzxjSWsEePHm4mbmz+Q1lv1aqVnXrqqdalSxdbuHChO3/37t3dpJMzzjjjUE4VlVfnbdasmZtg8v7775sebq1AWBNFNJGFhAACCCCAQLoF9Jy/MV0bu5a+yGur5U/beQ5gpArLyQhkjx071p599lnzx91p3JwmQlx//fVufN1JJ51kI0eOdF28eixMvFSlShVToDhgwABr06aNabZstWrVrG3btlFj8OIde7BtCjpVnltuucXOO+88dz6d929/+9vBDs11v86r2b+a4axA9ZtvvnHjHnUNv+Uz1xOwEwEEEEAAgSMgoCCvdf1KbravJnxozJ+6fWn5OwLYAT5lljeWLrKVOS6Fgjs9F3DVqlWudTBupgBu1CSQsmXLuu5oPUeQhAACCCCAAAL5X4Dvb7NfZ1tE1NfEiRPdjN5atWq5oK9Pnz6mmcLqGiYhgAACCCCAAAIIZLZA3ABwx44d1r9/fzfx4thjjzWNlxs1alRm3ymlRwABBBBAAAEEEHACCXUBYxVfgCbk+C5sRQABBBBAID8L8P1t9utTkvNzTVE2BBBAAAEEEEAAgZQJEACmjJITIYAAAggggAACmSFAAJgZ9UQpEUAAAQQQQACBlAkQAKaMkhMhgAACCCCAAAKZIUAAmBn1RCkRQAABBBBAAIGUCRAApoySEyGAAAIIIIAAApkhQACYGfVEKRFAAAEEEEAAgZQJEACmjJITIYAAAggggAACmSFAAJgZ9UQpEUAAAQQQQACBlAkQAKaMkhMhgAACCCCAAAKZIUAAmBn1RCkRQAABBBBAAIGUCRAApoySEyGAAAIIIIAAApkhQACYGfVEKRFAAAEEEEAAgZQJEACmjJITIYAAAggggAACmSFAAJgZ9UQpEUAAAQQQQACBlAkQAKaMkhMhgAACCCCAAAKZIUAAmBn1RCkRQAABBBBAAIGUCRAApoySEyGAAAIIIIAAApkhQACYGfVEKRFAAAEEEEAAgZQJEACmjJITIYAAAggggAACmSFAAJgZ9UQpEUAAAQQQQACBlAkQAKaMkhMhgAACCCCAAAKZIUAAmBn1RCkRQAABBBBAAIGUCRAApoySEyGAAAIIIIAAApkhQACYGfVEKRFAAAEEEEAAgZQJEACmjJITIYAAAggggAACmSFAAJgZ9UQpEUAAAQQQQACBlAmkJABct26dZWVl2aJFi1JWsMM5UfXq1e3RRx89nEM5BgEEEEDgMAT27Q/ZrNVb7K1FX7lXrZMQQCD/C2TnlyJOmzbNzj//fFccBZOlS5e2k046yVq3bm233367Va5c+aBFnTdvnpUsWfKg+ciAAAIIIJC8wKQlG23IO0tt47bd4ZNVLlvMBrWvb20bHPz/7PBBLCCAQNoFUtICmMpSr1ixwv773/+agrkBAwbYlClTrEGDBrZ48eIcL7N3716377jjjrMSJUrkmI8dCCCAAAKpEVDw1/OlhVHBn868yQsGtV37SQggkH8FXAA4ZswYq1mzph111FFWp04de/HFF6NKvHz5cjvnnHOsWLFiVr9+fReUqZXuzTffjMoXubJ06VJr166dlSpVyipWrGjdunWzb7/9NjJL3OUKFSpYpUqVrHbt2nbllVfajBkzTIFdz549w/mvueYa69Chgw0fPtyqVKni8mpnZBdw586d3fHhg7yFn376yY499lh79tln3eZQKGR/+ctfXEtj8eLF7bTTTrPXX3898hCWEUAAAQRiBNTNq5a/eJ29/jbtpzs4Bo5VBPKRQKGJEydanz597I477rAlS5bYTTfdZNdee61NnTrVFXP//v0u2FLL2pw5c+zJJ5+0u+66K9db2Lhxo7Vo0cIaNWpk8+fPt0mTJtnXX39tnTp1yvW4eDsVmP3xj390geDmzZvDWT788ENbtmyZffDBB/buu++Gt/sLXbp0sbffftt++OEHf5O9//77tnPnTuvYsaPbdvfdd7tgUAHw559/7rqau3btah9//HH4mMiFPXv22Pbt26N+IvezjAACCARBYO7arQe0/EXet4JAdQsrHwkBBPKnQPZDDz1kalG7+eabXQn79u1rs2fPNm3XmLzJkyfb6tWrTWP01DKn9MADD7ixeW4lzj8KqBo3bmzDhg0L7x03bpxVrVrVVq5cGW6xC+88yELdunVdDk02UQuhksb6Pf30067V0m2I+adNmzYujwJctT4q/f3vf7f27dtbmTJlXCD48MMP20cffWTNmjVz+zXmcPr06TZ27FgXwLqNEf+oxXHIkCERW1hEAAEEgiewecevY/5yu/tE8+V2DvYhgMCRESikVrTmzZtHnV3r2q6kMXkK3PzgT9uaNm2qlxzTggULXAuiun/9Hz+IUzB5qEldtUrqdvZTw4YNcwz+lKdIkSL2hz/8wV5++WV3iFr+3nrrLVPLoJK6qHfv3u0CWb+Men3hhRdcwOsyxfwzcOBA27ZtW/hnw4YNMTlYRQABBAq+QIXSxRK6yUTzJXQyMiGAQEoF3CzgyMBKZ1fA5W+LXE70yuo2VkvbiBEjDjgkkdm8sQf5wajG+Pkpkdm+CvbUFa2uY3UVawzj7373O3cKlVHpvffes+OPP94t+/8ULVrUX4x61fac9kVlZAUBBBAowAJNaxxjmu2rCR/+mL/I29Wv6pW8/cpHQgCB/CmQXa9ePdft2b1793AJZ86cadqupJa79evXuzF8msyhpBm6uSV1/06YMMFNysjOTu5JM7t27XLjDs877zw3GSS368buO/vss13r5auvvmr/+te/XIugJrooaTKLgjndm4JEEgIIIIBAYgKFC2W5R71otq+Cvcgg0O+n0aNglI+EAAL5UyD7zjvvdJMzFLRdeOGF9s4779gbb7zhZvqqyHoOn2YIX3311W7G7I4dO8KTQPxWwthb69Wrlz311FOmmbg6v2berlq1ysaPH++2Fy5cOPaQ8Lpa69Q1q+uoK1mzdDV7WGU61KTyXXXVVfbEE0+4sYf+xBadR88Z7Nevn5v4odZAzXLWBA8Fv+oK1v2SEEAAAQTiC+g5f2O6Nj7gOYBq+eM5gPHN2IpAfhLI1uNURo8ebSNHjrRbb73VatSo4WbGtmzZ0pVTwZoe93L99ddbkyZN3CNTlFddvOpSjZf0aBY9vkXP8dNkDM2erVatmrVt29YKFcr90YN6DI0CNwVhmpTx29/+1jQxJXIMYrxr5rRN3cCajKLrx451HDp0qJtUoskda9assXLlyrnJK3/+859zOh3bEUAAAQR+EVAQ2Lp+JTfbVxM+NOZP3b60/PEWQSD/C2R5Y/wiW+8TKrGCO7WYqVVPrYNBTWoxLFu2rJsUopnFJAQQQAABBBDI/wJ8f5slNEBPj1JRi1ytWrVc0KfnBqo1LcjBX/5/e1NCBBBAAAEEEEAgvkBCAaDG4/Xv39/02BON52vVqpWNGjUq/hnZigACCCCAAAIIIJCvBQ6rCzhf31EaC0cTchqxuRQCCCCAAAIpEuD72yz3GRkpguY0CCCAAAIIIIAAAvlHgAAw/9QFJUEAAQQQQAABBNIiQACYFmYuggACCCCAAAII5B8BAsD8UxeUBAEEEEAAAQQQSIsAAWBamLkIAggggAACCCCQfwQIAPNPXVASBBBAAAEEEEAgLQIEgGlh5iIIIIAAAggggED+ESAAzD91QUkQQAABBBBAAIG0CBAApoWZiyCAAAIIIIAAAvlHgAAw/9QFJUEAAQQQQAABBNIiQACYFmYuggACCCCAAAII5B8BAsD8UxeUBAEEEEAAAQQQSIsAAWBamLkIAggggAACCCCQfwQIAPNPXVASBBBAAAEEEEAgLQIEgGlh5iIIIIAAAggggED+ESAAzD91QUkQQAABBBBAAIG0CBAApoWZiyCAAAIIIIAAAvlHgAAw/9QFJUEAAQQQQAABBNIiQACYFmYuggACCCCAAAII5B8BAsD8UxeUBAEEEEAAAQQQSIsAAWBamLkIAggggAACCCCQfwQIAPNPXVASBBBAAAEEEEAgLQIEgGlh5iIIIIAAAggggED+ESAAzD91QUkQQAABBBBAAIG0CBAApoWZiyCAAAIIIIAAAvlHgAAw/9QFJUEAAQQQQAABBNIiQACYFmYuggACCCCAAAII5B+BfB8ADh482Bo1apS02DXXXGMdOnTI9TwtW7a02267Ldc87EQAgcwU2Lc/ZLNWb7G3Fn3lXrVOQgABBIIqkJ3qG585c6ade+651rp1a5s0aVKqT3/Y5xs9erSFQvyHf9iAHIhABgtMWrLRhryz1DZu2x2+i8pli9mg9vWtbYPK4W0sIIAAAkERSHkL4Lhx4+yWW26x6dOn2/r164+44969exO6RtmyZa1cuXIJ5SUTAggUHAEFfz1fWhgV/OnuNnnBoLZrPwkBBBAImkBKA8CdO3faa6+9Zj179rSLL77YnnvuubDntGnTLCsryz788EM744wzrESJEnb22WfbihUrwnm08OCDD1rFihWtdOnSdt1119nu3b/+xq79flfu8OHDrUqVKla7dm1ttsWLF9sFF1xgxYsXt/Lly9uNN95oP/zwg9unf/zj/A0qa/fu3a1UqVJWuXJlGzVqlL+LVwQQKCAC6uZVy1+8tn9/m/bTHVxAKpzbQACBhAVSGgC++uqrVqdOHffTtWtXe/bZZw/odr3rrrtcsDV//nzLzs62Hj16hAur4HHQoEH2wAMPmPYrMHv88cfD+/0FBZHLli2zDz74wN5991378ccfrW3btnb00UfbvHnz7B//+IdNmTLFevfu7R9ywOudd95pU6dOtYkTJ9rkyZNNAeqCBQsOyBe5Yc+ePbZ9+/aon8j9LCOAQP4SmLt26wEtf5ElVBCobmHlIyGAAAJBEkjpGMBnnnnGFPgpKSBTC5yCtVatWoVNFdy1aNHCrf/pT3+yiy66yLXyFStWzB599FEXEF5//fVu//333+8CudhWwJIlS9rTTz9tRx11lMv31FNP2a5du+yFF14w7VN67LHHrH379jZixAjXoug2/vKPyqWyKr/GKio9//zzdsIJJ/ySI/6LWh2HDBkSfydbEUAg3wls3hHdg5BTARPNl9PxbEcAAQQyTSBlLYDqyp07d65deeWVzkCte1dccYVpTGBkOvXUU8OrauFT2rx5s3tVq16zZs3csv9P7Lq2N2zYMBz8aV3HnXbaaeHgT9uaN29u+/fvP6CLWftWr15tGjsYee5jjjnGtVxqf05p4MCBtm3btvDPhg0bcsrKdgQQyAcCFUoXS6gUieZL6GRkQgABBDJAIGUtgGpR+/nnn+34448P37Zm3RYpUsS+++678Dat+0ljApUUqB1K8lv5/GN0Hf9c/jb/Nd72w50NXLRoUdMPCQEEMkOgaY1jTLN9NeHDH/MXWXL9D1TJ2698JAQQQCBIAilpAVTgp+5UTaRYtGhR+Off//63VatWzV5++eWETOvVq2ezZ8+Oyhu7HrXzl5X69eu7a2pih59mzJhhhQoVCk8S8bfr9eSTT3aBaeS5FaSuXLkyMhvLCCCQ4QKFC2W5R73oNv736+avN+Sv61EwykdCAAEEgiSQkgBQEzEUQGnWboMGDaJ+Lr/8cjfeLhHUPn36uC5jdRsrGNOEkM8///ygh3bp0sU0hvDqq6+2JUuWuMkdehRNt27dDhj/p5Np5q/KqokgGqOoYzRLWAEjCQEECpaAnvM3pmtj19IXeWdq+dN2ngMYqcIyAggERSAlXcDq/tVEDz1rLzZ17NjRhg0bZgsXLozddcC6xgxqfN6AAQPcxBAdq0fKvP/++wfkjdygR8oojwLIJk2auEfM6NiHH344MlvU8siRI90kld///vfukTN33HGHG9sXlYkVBBAoEAIK8lrXr+Rm+2rCh8b8qduXlr8CUb3cBAIIHIZAljceLt7QmMM4VfAO0SNhFPRqYkiZMmWCB8AdI4AAAgggkIECfH+b0eeZgW9ciowAAggggAACCCQjQACYjB7HIoAAAggggAACGShAAJiBlUaREUAAAQQQQACBZAQIAJPR41gEEEAAAQQQQCADBQgAM7DSKDICCCCAAAIIIJCMAAFgMnociwACCCCAAAIIZKAAAWAGVhpFRgABBBBAAAEEkhEgAExGj2MRQAABBBBAAIEMFCAAzMBKo8gIIIAAAggggEAyAgSAyehxLAIIIIAAAgggkIECBIAZWGkUGQEEEEAAAQQQSEaAADAZPY5FAAEEEEAAAQQyUIAAMAMrjSIjgAACCCCAAALJCBAAJqPHsQgggAACCCCAQAYKEABmYKVRZAQQQAABBBBAIBkBAsBk9DgWAQQQQAABBBDIQAECwAysNIqMAAIIIIAAAggkI0AAmIwexyKAAAIIIIAAAhkoQACYgZVGkRFAAAEEEEAAgWQECACT0eNYBBBAAAEEEEAgAwUIADOw0igyAggggAACCCCQjAABYDJ6HIsAAggggAACCGSgAAFgBlYaRUYAAQQQQAABBJIRIABMRo9jEUAAAQQQQACBDBQgAMzASqPICCCAAAIIIIBAMgIEgMnocSwCCCCAAAIIIJCBAgSAGVhpFBkBBBBAAAEEEEhGgAAwGT2ORQABBBBAAAEEMlCAADADK40iI4AAAggggAACyQgQACajx7EIIIAAAggggEAGChTKysqyRYsWHfGiX3PNNdahQ4cjfh0ugAACCMQT2Lc/ZLNWb7G3Fn3lXrVOQgABBIIqkJ2KGw+FQvbUU0/ZM888Y59//rllZ2fbySefbF27drUbb7zRSpQokYrLcA4EEEDgsAQmLdloQ95Zahu37Q4fX7lsMRvUvr61bVA5vI0FBBBAICgCKekC7tatm9122212ySWX2NSpU12L4j333GNvvfWWTZ48+bAt9+7de9jHciACCCAgAQV/PV9aGBX8afsmLxjUdu0nIYAAAkETOCAAXLp0qbVr185KlSplFStWNAV33377bY4ur732mr388sv2yiuv2J///Gdr0qSJVa9e3QWDH330kZ1//vlRxz700ENWuXJlK1++vPXq1ct++umn8H4dd//995u6i8uWLWs33HCD2zdhwgQ75ZRTrGjRou7co0aNCh+jBf+47t27u3JXq1bNBZ/ffPONK4fupWHDhjZ//vyo42bOnGnnnXeeFS9e3KpWrWq33nqr7dy5MyoPKwggkLkC6uZVy1+8zl5/m/bTHZy5dUzJEUDg8ASiAsCNGzdaixYtrFGjRi5YmjRpkn399dfWqVOnHM+u4K9OnTou0IrNpPGFCuT8pNbB1atXu1bC559/3p577jn34+/X68iRI61Bgwa2YMECUyuiXnX9K6+80hYvXmyDBw9223VsZHrkkUesefPm9umnn9pFF13kAlcFhOqGXrhwoeuS1rq6q5V0rjZt2thll11mn332mb366qs2ffp06927d+Rpo5b37Nlj27dvj/qJysAKAgjkK4G5a7ce0PIXWUD9b6BuYeUjIYAAAkESyPJuNqSgSUHfvffea3PmzLH3338/bPDll1+61rEVK1ZY7dq1w9v9hfr161utWrVci5u/Ld6rWvWmTZvmAsDChQu7LArsChUqZOPHj3frask7/fTTbeLEieFTdOnSxdSSF9mV3L9/f3vvvffceENl1HHnnnuuvfjii+64TZs2uVZGBZD33Xef2zZ79mxr1qyZKcitVKmSKRhUy9/YsWPdfv2jAFABsFoBixUrFt7uLyj4HDJkiL8aft22bZuVKVMmvM4CAgjkDwFN+OgzftFBCzP6ykZ2SaPjD5qPDAggUDAE1JijBqogf39HtQCqtU2tdOoy9X/q1q3ralstd/GSWtTU0pdIUjeuH/wpv7qCN2/eHHXoGWecEbW+bNky17IXuVEtff/5z39s37594c2nnnpqeFld10rq9vWTv82/nu5VrYj+fepVLYL79++3tWvX+odFvQ4cONC9WfSG0c+GDRui9rOCAAL5S6BC6QN/kYtXwkTzxTuWbQgggEAmCkTNAlbw0759exsxYsQB96JgLV5Sq6CCtERSkSJForIpcNQ1I1PJkiUjV12XbWyA6XfjRmaMPLefP942/3p6vemmm9y4v8jzaPnEE0+M3eTWNQZRPyQEEMgMgaY1jjHN9tWEj/8N/ogut351reTtVz4SAgggECSBqACwcePGpgkX6lLVo1wSSVdddZUbn6cZv5oFHJkUqPnNrJHbD2VZXczqmo1MmryhwDOyNTFyfyLLulc9skaPqyEhgEDBFChcKMs96kWzfd14l4jb9Pst9CgY5SMhgAACQRKI6gLWrNytW7da586dbe7cubZmzRo39q5Hjx5R3a2RQBrHd8UVV7hjhg8f7iaPfPHFF/buu+9aq1atXJdyZP5DXb7jjjvsww8/tKFDh9rKlStNk0cee+wx69ev36GeKir/gAEDbNasWW4msh6ErS7lt99+22655ZaofKwggEBmC+g5f2O6NnYtfZF3opY/bec5gJEqLCOAQFAEopr5qlSpYjNmzDAFRxoPp1mveqRK27Zt3WSNeCjqbv373/9uTz75pI0bN849xkWth5oYookWOk8ySS11etSMJqgoCFRXtCZ2aFJJMkljBj/++GO766673AQStVbWrFnTBbPJnJdjEUAg/wkoyGtdv5Kb7bt5x27TmD91+9Lyl//qihIhgEB6BLK8wCfe0Jj0XD3Dr+J3bwd5FlGGVyHFRwABBBAIoADf32ZRXcABfA9wywgggAACCCCAQOAECAADV+XcMAIIIIAAAggEXYAAMOjvAO4fAQQQQAABBAInQAAYuCrnhhFAAAEEEEAg6AIEgEF/B3D/CCCAAAIIIBA4AQLAwFU5N4wAAggggAACQRcgAAz6O4D7RwABBBBAAIHACRAABq7KuWEEEEAAAQQQCLoAAWDQ3wHcPwIIIIAAAggEToAAMHBVzg0jgAACCCCAQNAFCACD/g7g/hFAAAEEEEAgcAIEgIGrcm4YAQQQQAABBIIuQAAY9HcA948AAggggAACgRMgAAxclXPDCCCAAAIIIBB0AQLAoL8DuH8EEEAAAQQQCJwAAWDgqpwbRgABBBBAAIGgCxAABv0dwP0jgAACCCCAQOAECAADV+XcMAIIIIAAAggEXYAAMOjvAO4fAQQQQAABBAInQAAYuCrnhhFAAAEEEEAg6AIEgEF/B3D/CCCAAAIIIBA4AQLAwFU5N4wAAggggAACQRcgAAz6O4D7RwABBBBAAIHACRAABq7KuWEEEEAAAQQQCLoAAWDQ3wHcPwIIIIAAAggEToAAMHBVzg0jgAACCCCAQNAFCACD/g7g/hFAAAEEEEAgcAIEgIGrcm4YAQQQQAABBIIuQAAY9HcA948AAggggAACgRMgAAxclXPDCCCAAAIIIBB0gQIbALZs2dJuu+22oNcv949AWGDf/pDNWr3F3lr0lXvVOgkBBBBAIJgChx0AhkIha9WqlbVp0+YAuccff9zKli1r69evP2BfIhumTZtmWVlZuf4899xziZyKPAgg4AlMWrLRzhnxkXV+arb1Gb/IvWpd20kIIIAAAsETOOwAUAHas88+a3PmzLGxY8eG5dauXWsDBgyw0aNH24knnhjefigLZ599tm3cuDH806lTJ2vbtm14XfuuuOKKQzkleREIrICCvJ4vLbSN23ZHGWzy1rWdIDCKhRUEEEAgEAKHHQBKp2rVqi7Q69evnynwU6vgddddZxdeeKHVqFHDmjZtakWLFrXKlSvbn/70J/v555/DqNWrV7dHH300vK6FRo0a2eDBg+2oo46ySpUqhX+KFy/uzuNv07k6d+4cday6e9XtG5l0vd69e1u5cuWsfPnydvfdd7sy+nn27t1r/fv3t+OPP95KlixpZ555pqn1kYRAQRFQN++Qd5ZavM5ef5v20x1cUGqc+0AAAQQSE8hOLFvOua6++mqbOHGiXXvttdaxY0dbsmSJzZs3z+rXr2/XXHONvfDCC7Z8+XK74YYbrFixYi7Ay/lsqd3z/PPPu4BUrZTz58+3G2+80apVq+bKoiupzOvWrbPx48dblSpV3H2opXHx4sVWq1atAwqzZ88e04+ftm/f7i/yikC+FJi7dusBLX+RBVUQqJZB5WtWs3zkLpYRQAABBAqwQNIBoGyefPJJa9CggX3yySf2+uuvu3W1Dj722GNuHF/dunXtv//9r+savvfee61QoaQaHhOuDpXhkUcecWWoU6eOC+y0rmB09erV9sorr9iXX37pgj+dVC2ZkyZNcl3bw4YNO+A6w4cPtyFDhhywnQ0I5FeBzTuiu31zKmei+XI6nu0IIIAAApklkJJIrEKFCq51rV69enbppZfasmXLrFmzZi7w8jmaN29uP/zwgwu4/G1H+vWss86KKoPK9J///Mf27dtnCxcudN3BtWvXtlKlSoV/Pv74YxccxivbwIEDbdu2beGfDRs2xMvGNgTyjUCF0sUSKkui+RI6GZkQQAABBPK9QEpaAHWX2dnZ7kfLGguoSSKRSduU/O1qBfS3+fl++uknfzHX12SO9U+8f/9+K1y4sC1YsMC9+tv1qoAwXtJ4Rv2QEMgUgaY1jrHKZYuZJnz4Y/4iy65PaSVvv/KREEAAAQSCI5CSFsBYLo3/mzlzZlSAp/XSpUu7CRfKf9xxx7lZvf6xGk+niSSJpNhjdcyiRYsOOHT27NlR27SusX0K/E4//XTXErh582Y7+eSTo3402YSEQEEQKFwoywa1r+9uJfpXMu+XsV9uUPuVj4QAAgggEByBIxIA3nzzzabu0VtuucVNAHnrrbds0KBB1rdv3/D4vwsuuMBefPFFN25QE0c0mUSBWSJJx2pShyaYqEtX59Y5YpPKoGuuWLHCjff729/+Zn369HHZ1PXbpUsX6969u73xxhsu+NTklREjRtg///nP2FOxjkDGCrRtUNnGdG3sWvoib0Itf9qu/SQEEEAAgWAJpKwLOJJNj1VREHXnnXfaaaedZsccc4ybjavHsPhJ4+nWrFljF198sXto9NChQxNuAdTDp++55x73CJfdu3dbjx49XCCn2buRScHdrl273ONoFFwqINVMYD/pOYb333+/3XHHHfbVV1+5R8VonGC7du38LLwiUCAEFOS1rl/JzfbVhA+N+VO3Ly1/BaJ6uQkEEEDgkAWyvHF48YYGHfKJgniAuq31F080MaRMmTJBJOCeEUAAAQQQyDgBvr/NjkgXcMa9EygwAggggAACCCAQIAECwABVNreKAAIIIIAAAghIgACQ9wECCCCAAAIIIBAwAQLAgFU4t4sAAggggAACCBAA8h5AAAEEEEAAAQQCJkAAGLAK53YRQAABBBBAAAECQN4DCCCAAAIIIIBAwAQIAANW4dwuAggggAACCCBAAMh7AAEEEEAAAQQQCJgAAWDAKpzbRQABBBBAAAEECAB5DyCAAAIIIIAAAgETIAAMWIVzuwgggAACCCCAAAEg7wEEEEAAAQQQQCBgAgSAAatwbhcBBBBAAAEEECAA5D2AAAIIIIAAAggETIAAMGAVzu0igAACCCCAAAIEgLwHEEAAAQQQQACBgAkQAAaswrldBBBAAAEEEECAAJD3AAIIIIAAAgggEDABAsCAVTi3iwACCCCAAAIIEADyHkAAAQQQQAABBAImQAAYsArndhFAAAEEEEAAAQJA3gMIIIAAAggggEDABAgAA1bh3C4CCCCAAAIIIEAAyHsAAQQQQAABBBAImAABYMAqnNtFAAEEEEAAAQQIAHkPIIAAAggggAACARMgAAxYhXO7CCCAAAIIIIAAASDvAQQQQAABBBBAIGACBIABq3BuFwEEEEAAAQQQSEkAuG7dOsvKyrJFixYhigAC+VRg3/6QzVq9xd5a9JV71ToJAQQQQCCYAtn55banTZtm559/vpUrV842btxoxYoVCxdt7ty5duaZZ7r1UIgvrTAMCwgkKDBpyUYb8s5S27htd/iIymWL2aD29a1tg8rhbSwggAACCARDICUtgKmkKl26tE2cODHqlOPGjbMTTzwxahsrCCCQmICCv54vLYwK/nTkJi8Y1HbtJyGAAAIIBEvABYBjxoyxmjVr2lFHHWV16tSxF198MUph+fLlds4557hWufr169uUKVNcl++bb74ZlS9yZenSpdauXTsrVaqUVaxY0bp162bffvttZJa4y1dffbUp4PPTrl27bPz48abtkWnLli3WuXNnO+GEE6xEiRLWsGFDe+WVVyKzWMuWLa13797uRy2L5cuXt7vvvtsiWxEff/xxq1Wrlrs3lfPyyy+POgcrCGSygLp51fIXr93c36b9dAdnci1TdgQQQODQBQqpta1Pnz52xx132JIlS+ymm26ya6+91qZOnerOtn//fuvQoYMLsubMmWNPPvmk3XXXXbleSV24LVq0sEaNGtn8+fNt0qRJ9vXXX1unTp1yPU47FSh+8skntn79epd3woQJVr16dWvcuHHUsbt377bf/OY39u6777py33jjje5YlTEyPf/885adnW3a/te//tUeeeQRe/rpp10Wle3WW2+1++67z1asWOHKed5550UeHrW8Z88e2759e9RPVAZWEMhnAnPXbj2g5S+yiAoC1S2sfCQEEEAAgeAIZD/00EN2zTXX2M033+zuum/fvjZ79mzTdo3Jmzx5sq1evdo0Rq9SpUouzwMPPGCtW7fOUUktigrYhg0bFs6jVr2qVavaypUrrXbt2uHtsQsVKlSw3/3ud/bcc8/Zvffe61oDe/ToEZvNjj/+eOvXr194+y233OICuH/84x/h8YLaqWsq6NMkFbVuLl682K3fcMMNLsgsWbKkXXzxxaau52rVqtnpp58ePmfswvDhw23IkCGxm1lHIN8KbN7x65i/3AqZaL7czsE+BBBAAIHMESi0bNkya968eVSJta7tSmoZUxDlB3/a1rRpU73kmBYsWOBaENX96//UrVvX5VcwebCkgE8B4Jo1a2zWrFnWpUuXAw7Zt2+fKRA99dRTXdeurqNg1W859A8466yzXPDnrzdr1sz+85//mI5XEKug76STTnKthy+//LL9+OOPftYDXgcOHGjbtm0L/2zYsOGAPGxAID8JVChdLKHiJJovoZORCQEEEEAg3wu4MYBqHYtMGiPnb4tcjsyT27K6jdu3b+8eC6NHw/g/Crxy62L1z6mxg+rive6669x5NHYvNo0aNcq15PXv398++ugjd402bdrY3r17Y7PmuK5Wv4ULF7qxg5UrV3Ytjqeddpp9//33cY8pWrSolSlTJuonbkY2IpBPBJrWOMY02zf6E/5r4bRd+5WPhAACCCAQHIFC9erVs+nTp0fd8cyZM03bldRyp1Y1jeHz07x58/zFuK/q/v3888/d2L2TTz7ZIn/U5XqwVLhwYdcip27neN2/Ol7jBC+55BLr2rWrKWhTK54CzNik7uzIpHVN+tA1lDQ+sFWrVvaXv/zFPvvsM9MzDRVQkhAoCAKFC2W5R73oXmKDQH9dj4JRPhICCCCAQHAECt15552uu/WJJ55wAdTDDz9sb7zxRnh8nbpJNUNYs3AVIM2YMSM8CcRvJYzl6tWrl23dutXN0tUz/NSVq+5ZBXPqek0kDR061L755htTq168pKDygw8+MAWr6q7W5JVNmzYdkFXdtBrXqK5szRL+29/+5ia9KKMmkGhiiFoov/jiC3vhhRdMrZcaK0hCoKAI6Dl/Y7o2tkpeS19k0rq28xzASBWWEUAAgWAIZGuG7+jRo23kyJFuRmyNGjXs2WefdY9QEYFayvS4l+uvv96aNGniWtqUV128kQ9rjuSqUqWKCxQHDBjgAjjNntVYu7Zt21qhQq7XOTJ73GU9kubYY4+Nu08b77nnHlu7dq07vx4Do1nAuheN0YtM3bt3Nz1KRuMWdS+aLKK8Sno0jILdwYMHuy5ntQwqSDzllFMiT8EyAhkvoCCvdf1KbravJnxozJ+6fWn5y/iq5QYQQACBwxLI8sb4+Y8DS/gEagXUcwFXrVrlWgcTPjDNGfUcQD2K5tFHHz0iV9YjYcqWLeuCTo0NJCGAAAIIIIBA/hfg+9sbApdINelZgZplqxYyBX16bqBmCqtrmIQAAggggAACCCCQWQIJBYA7duwwzbbVeDp1y2rShGbhkhBAAAEEEEAAAQQyT+CwuoAz7zaPTIlpQj4yrpwVAQQQQACBIynA97dZYjMyjmQtcG4EEEAAAQQQQACBtAoQAKaVm4shgAACCCCAAAJ5L0AAmPd1QAkQQAABBBBAAIG0ChAAppWbiyGAAAIIIIAAAnkvQACY93VACRBAAAEEEEAAgbQKEACmlZuLIYAAAggggAACeS9AAJj3dUAJEEAAAQQQQACBtAoQAKaVm4shgAACCCCAAAJ5L0AAmPd1QAkQQAABBBBAAIG0ChAAppWbiyGAAAIIIIAAAnkvQACY93VACRBAAAEEEEAAgbQKEACmlZuLIYAAAggggAACeS9AAJj3dUAJEEAAAQQQQACBtAoQAKaVm4shgAACCCCAAAJ5L0AAmPd1QAkQQAABBBBAAIG0ChAAppWbiyGAAAIIIIAAAnkvQACY93VACRBAAAEEEEAAgbQKEACmlZuLIYAAAggggAACeS9AAJj3dUAJEEAAAQQQQACBtAoQAKaVm4shgAACCCCAAAJ5L0AAmPd1QAkQQAABBBBAAIG0ChAAppWbiyGAAAIIIIAAAnkvQACY93VACRBAAAEEEEAAgbQKEACmlZuLIYAAAggggAACeS9AAJj3dUAJEEAAAQQQQACBtAoQAKaVm4shgAACCCCAAAJ5L0AAmPd1QAkQQAABBBBAAIG0CgQ6AKxevbo9+uijYfCsrCx78803w+ssIJAXAvv2h2zW6i321qKv3KvWSQgggAACCKRSIDuVJzucc23atMmGDx9u7733nn355ZdWtmxZq1WrlnXt2tW6d+9uJUqUOJzTJnTMvHnzrGTJkgnlJRMC6RCYtGSjDXlnqW3ctjt8ucpli9mg9vWtbYPK4W0sIIAAAgggkIxAngaAa9assebNm1u5cuVs2LBh1rBhQ/v5559t5cqVNm7cOKtSpYr9/ve/T+b+cj32uOOOy3U/OxFIp4CCv54vLbTY9r5NXjCo7WO6NiYITGeFcC0EEECgAAvkaRfwzTffbNnZ2TZ//nzr1KmT1atXzwWBHTt2dC2C7du3d/Tqmh07dqxdfPHFrkVQ+WbNmmWrVq2yli1bula8Zs2a2erVq8NVpeVLLrnEKlasaKVKlbImTZrYlClTwvu1ENsFHLWTFQTSKKBuXrX8xQZ/KoK/TfvpDk5jpXApBBBAoAAL5FkAuGXLFps8ebL16tUrx25YBX5+Gjp0qOsSXrRokdWtW9euuuoqu+mmm2zgwIEugFS+3r17+9nthx9+sHbt2rmg79NPP7U2bdqYAsr169eH8xzqwp49e2z79u1RP4d6DvIjEE9g7tqtUd2+sXkUBKpbWPlICCCAAAIIJCuQZwGgWu9CoZDVqVMn6h6OPfZY12KnVrsBAwaE91177bWulbB27dpu+7p166xLly4usFOLYJ8+fWzatGnh/KeddpoLENWtrDGF999/v5100kn29ttvh/Mc6oLGKmqMov9TtWrVQz0F+RGIK7B5x69j/uJm+GVjovlyOwf7EEAAAQQQyLMA0KePbOXTtrlz55pa+U455RRTi5ufTj31VH/RdetqRcGdn9TVu3v3btc6p207d+60/v37W/369d0YQwWUy5cvT6oFUK2N27ZtC/9s2LDBvzyvCCQlUKF0sYSOTzRfQicjEwIIIIBAYAXybBLIySefbAr+FJRFJrXSKRUvXjxysxUpUiS87geN8bbt37/f5bvzzjvt/ffft4ceesh0LZ3v8ssvt71794bPc6gLRYsWNf2QEEi1QNMax5hm+2rChz/mL/IaGgxRyduvfCQEEEAAAQSSFcizFsDy5ctb69at7bHHHnOtdcneSOzxn3zyiV1zzTV26aWXupbCSpUqmbqNSQjkR4HChbLco15Utl9Hvv6vpP66HgWjfCQEEEAAAQSSFcizAFAFf/zxx91jX8444wx79dVXbdmyZbZixQp76aWXXMtg4cKFD/v+1Or3xhtvuO7kf//7327SiN86eNgn5UAEjqCAnvOnR72opS8yaZ1HwESKsIwAAgggkKxAnnUBq+A1a9Y0zdDVMwA1vk4PglYXq8bt9evXz/SYmMNNjzzyiPXo0cPOPvts08QSTSjRDF4SAvlZQEFg6/qV3GxfTfjQmD91+9Lyl59rjbIhgAACmSeQ5c3EjTfkKPPuJA9KrIBSM4I1MaRMmTJ5UAIuiQACCCCAAAKHKsD3t1medgEfaoWRHwEEEEAAAQQQQCB5AQLA5A05AwIIIIAAAgggkFECBIAZVV0UFgEEEEAAAQQQSF6AADB5Q86AAAIIIIAAAghklAABYEZVF4VFAAEEEEAAAQSSFyAATN6QMyCAAAIIIIAAAhklQACYUdVFYRFAAAEEEEAAgeQFCACTN+QMCCCAAAIIIIBARgkQAGZUdVFYBBBAAAEEEEAgeQECwOQNOQMCCCCAAAIIIJBRAgSAGVVdFBYBBBBAAAEEEEhegAAweUPOgAACCCCAAAIIZJQAAWBGVReFRQABBBBAAAEEkhcgAEzekDMggAACCCCAAAIZJUAAmFHVRWERQAABBBBAAIHkBQgAkzfkDAgggAACCCCAQEYJEABmVHVRWAQQQAABBBBAIHkBAsDkDTkDAggggAACCCCQUQIEgBlVXRQWAQQQQAABBBBIXoAAMHlDzoAAAggggAACCGSUAAFgRlUXhUUAAQQQQAABBJIXIABM3pAzIIAAAggggAACGSVAAJhR1UVhEUAAAQQQQACB5AUIAJM35AwIIIAAAggggEBGCRAAZlR1UVgEEEAAAQQQQCB5AQLA5A05AwIIIIAAAgggkFECBIAZVV0UFgEEEEAAAQQQSF6AADB5Q86AAAIIIIAAAghklAABYEZVF4VFAAEEEEAAAQSSFyAATN6QMyCAAAIIIIAAAhklkGMAuG7dOsvKyrJFixZl1A0lWtjnnnvOypUrl2j2tObbtz9ks1ZvsbcWfeVetU5CAAEEEEAAAQRSJZCdqhPldJ6xY8fa448/bqtWrbIiRYpYjRo17Morr7QBAwbkdEigt09astGGvLPUNm7bHXaoXLaYDWpf39o2qBzexgICCCCAAAIIIHC4Akc0AHzmmWesb9++9te//tVatGhhe/bssc8++8yWLl16uOVN6LiffvrJBZsJZc5HmRT89XxpocW2923ygkFtH9O1MUFgPqovioIAAggggECmChQqVqyY1a9f36ZMmeK6fN98880c70WBW7t27axUqVJWsWJF69atm3377bc55n/nnXesU6dOdt1119nJJ59sp5xyinXu3NmGDh0adcyzzz5r9erVM5Wlbt26rsUwMoNaC2vXrm0lSpSwk046ye655x5TkOenwYMHW6NGjWzcuHFuf9GiRS0UCtn3339vN954oyurzt2gQQN79913/cPc6/vvv++urXtq27atbdy4MWp/ulbUzauWv9jgT9f3t2k/3cHpqhGugwACCCCAQMEVyJ4zZ47t2LHD7rjjjlzvUoGRWvFuuOEGe/jhh23Xrl2uG1cB3kcffRT32EqVKtnHH39sX3zxhVWrVi1unqeeesoGDRpkjz32mJ1++un26aefumuULFnSrr76andM6dKlTWP2qlSpYosXL3b7ta1///7hc6qL+bXXXrMJEyZY4cKFbf/+/fa73/3O3dtLL71kNWvWdC2P2uenH3/80R566CF78cUXrVChQta1a1fr16+fvfzyy36WqFe1YOrHT9u3b/cXk36du3ZrVLdv7AkVBKpbWPma1Swfu5t1BBBAAAEEEEAgYYHs0047zWV+4IEHrHXr1jkeOGbMGGvcuLENGzYsnEctblWrVrWVK1e6Frrwjl8WFNhddtllVr16dbe/WbNmrgXx8ssvdwGXsqk1cNSoUS6f1jVGUC2NGjvoB4B33323drmkcylYffXVV6MCwL1797pA7rjjjnP5Jk+ebHPnzrVly5aFy6bWw8ikVsQnnnjCBYfa3rt3b7vvvvsis0QtDx8+3IYMGRK1LVUrm3f8OuYvt3Mmmi+3c7APAQQQQAABBIItUMi//aZNm/qLcV8XLFhgU6dOdd2/6i7Vj7prlVavXh33mMqVK9usWbNcq92tt97qum0V1KmrVS1033zzjW3YsMF1Efvn1Ov9998fdc7XX3/dzjnnHFOLovarC3j9+vVR11QLox/8aYdmL59wwgnh4C8q8y8r6lJWy6CfVN7Nmzf7qwe8Dhw40LZt2xb+UdlTlSqULpbQqRLNl9DJyIQAAggggAACgRRIeBKIArb27dvbiBEjDoBS4JRb0tg7/fTq1cumT59u5557rusa1thDJXUDn3nmmVGn8LtqZ8+e7WYNq+WtTZs2VrZsWRs/frxrNYw8QF3Gkal48eKRq3GXNSs5MumxNxo7mFPS2EL9HInUtMYxptm+mvARrwRZ3kUrefuVj4QAAggggAACCCQjEA4A582bl+t51P2r8XXqgs3ODh+W6zHxdvpB386dO93kjOOPP97WrFljXbp0iZfdZsyY4cYP3nXXXeH9GlN4sHTqqafal19+mWP39MGOT/f+woWy3KNeNNtXwV5kEKh1JT0KRvlICCCAAAIIIIBAMgLZeiyLJoH4AZZaweIltd6ppU6zeO+880479thj3bP91Bqn7X6LXeSxPXv2dBM3LrjgAtcdq4kk6t5VV63GAyppBq+6h8uUKeMmbWiSxfz58+27775zj5DR7GF19+o6TZo0sffee88mTpwYeZm4y5qwct5551nHjh3dpBWdZ/ny5W6ms7qg82PSc/70qJfY5wCq5Y/nAObHGqNMCCCAAAIIZKZAtoIqTY4YOXKk6+LV41LiJc3AVWucHsmirlgFahp3p2BKM2jjpVatWrlHs2gCyZYtW1zQqMDvww8/tPLl/zeT9frrr3ePd9H1NatXXbkNGza02267zZ3ykksusdtvv91N0NA1L7roIjcGUIHjwZJaLDWrV0GrWhwVBD744IMHOyxP9ysIbF2/kpvtqwkfGvOnbl9a/vK0Wrg4AggggAACBUogyxvz5nobFdxpooUepxI5MaJA3W2Kb0aPgdGYRE0MUQsmCQEEEEAAAQTyvwDf32bZ+pu/Cvr69OljzZs3J/jL/+9bSogAAggggAACCCQlkK1HuWg8n7pr9Tw+EgIIIIAAAggggEDBFsjevXt3wb5D7g4BBBBAAAEEEEAgSiD+7I2oLKwggAACCCCAAAIIFCQBAsCCVJvcCwIIIIAAAgggkIAAAWACSGRBAAEEEEAAAQQKkgABYEGqTe4FAQQQQAABBBBIQIAAMAEksiCAAAIIIIAAAgVJgACwINUm94IAAggggAACCCQgkJ1AHrLkIPDLH1ExPVGchAACCCCAAAKZIeB/b/vf45lR6tSWkgAwCc8dO3a4o6tWrZrEWTgUAQQQQAABBPJCQN/j+pOuQUzhvwUcxJtP9p73799v//3vf6106dKWlZWV7Oky8nj9FqUAeMOGDfw95AyoQeorAyrplyJSV5lTVyop9ZU59eXX1dKlS61OnTpWqFAwR8PRApjEe1ZvmhNOOCGJMxScQ8uUKUMAmEHVSX1lTmVRV5lTVyop9ZU59XX88ccHNvhTLQUz7M2c9yclRQABBBBAAAEEUi5AAJhyUk6IAAIIIIAAAgjkbwECwPxdP/m+dEWLFrVBgwaZXkn5X4D6yv915JeQuvIlMuOV+sqMelIpqav/1RWTQDLnPUtJEUAAAQQQQACBlAjQApgSRk6CAAIIIIAAAghkjgABYObUFSVFAAEEEEAAAQRSIkAAmBJGToIAAggggAACCGSOAAFg5tQVJUUAAQQQQAABBFIiQACYEsbMPsnw4cOtSZMm7i+aVKhQwTp06GArVqyIuin9vcTBgwdblSpVrHjx4tayZUv7/PPPo/Ls2bPHbrnlFjv22GOtZMmS9vvf/96+/PLLqDzfffeddevWzf3pHf35HS1///33UXlYyVkgnXVVvXp19xdu9Fdu/J8//elPOReOPVECqaqrJ5980n3e9IBh1UO8zwufqyj6w1pJZ33x2TqsKgoflIq62rp1q/u+0l8CKVGihJ144ol266232rZt28LX0UKB/mx5X+ykgAu0adMm9Oyzz4aWLFkSWrRoUeiiiy4KeR+G0A8//BCWefDBB0Pen7wLTZgwIbR48eLQFVdcEapcuXLI+5M64Tx//OMfQ96T1UMffPBBaOHChaHzzz8/dNppp4V+/vnncJ62bduGGjRoEJo5c6b70fLFF18c3s9C7gLprKtq1aqF7rvvvtDGjRvDP97fzcy9gOwNC6Sqrh555JGQ94Xnfrzvo5D3hRS+hr/A58qXOPzXdNYXn63DrycdmYq60vfYZZddFnr77bdDq1atCn344YehWrVqhTp27BhVuIL82bKoO2UFAU9g8+bNIX3RfPzxx87D+5vHoUqVKoUUBPpp9+7dIa8FL/TEE0+4TV6rRKhIkSKh8ePH+1lCX331Vcj7c3mhSZMmuW3e31105509e3Y4z6xZs9y25cuXh7exkLjAkaorlUBfUgo+SKkROJy6irzy1KlT3WclNgDkcxWplLrlI1VfKiGfrdTVk86UbF35pXnttddCRx11VOinn35ymwr6Z4suYLXxkqIE/CbwY445xm1fu3atbdq0yX7729+G8+lBmi1atDCvJc9tW7BggXkfmqg86i72WvjCebxgz3X9nnnmmeHznHXWWW6bf57wDhYSEjhSdeVffMSIEVa+fHlr1KiRPfDAA7Z3715/F6+HKHA4dZXIJfhcJaJ06HmOVH35JeGz5Usk/5qqutJ5NNQiOzvbFaqgf7b+d5fJ+3OGAiLg/dpjffv2tXPOOccFb7otBX9KFStWdK/+P1r/4osv3KryeL852dFHH+3vdq/K4x+vV40xjE3a5ueJ3cd6zgJHsq501T59+ljjxo1dnc6dO9cGDhxo+mXg6aefzrlQ7IkrcLh1FfdkMRv5XMWApGD1SNaXisdnKwWV9MspUlVXW7ZssaFDh9pNN90ULlxB/2wRAIarmgUJ9O7d2z777DObPn36ASAagB6Z9MGL3Ra5X8uxeeLlj80Tew7W4wsc6bq6/fbbwxc+9dRTXSB4+eWXm99yEd7JwkEFUl1XsRfkcxUrktz6ka4vPlvJ1U/k0amoK28su3lj361+/fruT5tGnr8gf7boAo6s6YAvawavNyDWvLFGdsIJJ4Q1vPF/bjm2lc4bdxFuFVQedQ9qxlRkis3z9ddfR+52y9988034PAfsZENcgSNdV/Euqu56JW/AdLzdbMtBIJm6yuGUUZv12eNzFUWS1MqRrq94heOzFU/l4NtSUVfexDbzJnpYqVKlbOLEieaNZQ9fuKB/tggAw1Ud3AW1wOm3qDfeeMM++ugjq1GjRhSG1vVB8Gb3hrcr2PMmidjZZ5/ttv3mN79xH5zIPN7sUfNmFofzNGvWzE2xV3ein+bMmeO2+efxt/MaXyBddRXv6p9++qnb7M3+jrebbTECqairmFPGXeVzFZflkDemq77iFYzPVjyVnLelqq7U8qex7Rq+pMaPYsWKRV20wH+2PEhSwAV69uzpZvROmzYt/LgPPfrjxx9/DMtoBrBm/XpBonsMTOfOneM+BsZrOQxNmTLFPQbmggsuiPsYGK87MaTZv/pp2LAhj4EJKx98IV11pcf0PPzwwyHviym0Zs2a0KuvvhryJvWEvGc7HryQ5HACqaorfRZVD0899ZSbBfx///d/bt0bsxSW1qMq+FyFOQ5rIV31xWfrsKon6qBU1JUeYeZNSHTfQXoMTOTjrmIfXVZQP1s8BibqbRXMFe9XHvfFEvuqZwP6SY+CGTRokHscjDcDOHTeeee5QNDfr9ddu3aFvJbEkDd7OOQ9LNoFduvXr4/MEtKXVpcuXdwzBfVcQS3HPtYi6gBWogRi68hfT3VdebO63X+OCvq934pD3sNSXf3v3Lkzqjys5Czg103s66HWlT53sefQeuR5+FzlXA+J7olnHOucyP+DB6svPluJ1kjO+VJRV/5jleKdy5vsFr54Qf5sZekuPQASAggggAACCCCAQEAEGAMYkIrmNhFAAAEEEEAAAV+AANCX4BUBBBBAAAEEEAiIAAFgQCqa20QAAQQQQAABBHwBAkBfglcEEEAAAQQQQCAgAgSAAalobhMBBBBAAAEEEPAFCAB9CV4RQAABBBBAAIGACBAABqSiuU0EEEAAAQQQQMAXIAD0JXhFAAEEEEAAAQQCIkAAGJCK5jYRCKqAnnXfqlUra9OmzQEEjz/+uHl/7cS8v1hzwD42IIAAAgVZgACwINcu94YAApaVlWXen02zOXPm2NixY8Mi3p97sgEDBtjo0aPtxBNPDG9PxcJPP/2UitNwDgQQQOCICRAAHjFaTowAAvlFoGrVqi7Q69evnynwU6vgddddZxdeeKE1bdrU2rVrZ6VKlbKKFStat27d7Ntvvw0XfdKkSXbOOedYuXLlrHz58nbxxRfb6tWrw/vXrVvngszXXnvNWrZsad7fTraXXnopvJ8FBBBAID8K8LeA82OtUCYEEDgiAh06dLDvv//eOnbsaEOHDrV58+bZGWecYTfccIN1797ddu3a5VoFf/75Z/voo49cGSZMmOACvIYNG9rOnTvt3nvvNQV9ixYtskKFCrnlGjVqWPXq1W3UqFF2+umnW9GiRa1KlSpH5B44KQIIIJAKAQLAVChyDgQQyAiBzZs3W4MGDWzLli32+uuv26effuq6ht9///1w+b/88ktTi+GKFSusdu3a4e3+wjfffGMVKlSwxYsXu3MpGFQA+Oijj1qfPn38bLwigAAC+VqALuB8XT0UDgEEUimgwO3GG2+0evXq2aWXXmoLFiywqVOnuu5fdQHrp27duu6SfjevXq+66io76aSTrEyZMi7YU4bYiSNqSSQhgAACmSKQnSkFpZwIIIBAKgSys7NNP0r79++39u3b24gRIw44deXKld027VeL4FNPPeW6dXWMWhH37t0bdUzJkiWj1llBAAEE8rMAAWB+rh3KhgACR1SgcePGpjF+Gr/nB4WRF1RX8bJly9zs4XPPPdftmj59emQWlhFAAIGMFKALOCOrjUIjgEAqBHr16mVbt261zp0729y5c23NmjU2efJk69Gjh+3bt8+OPvpoN/P3ySeftFWrVrmJIX379k3FpTkHAgggkKcCBIB5ys/FEUAgLwU0U3fGjBku2NODotW1q4kceji0ZvjqZ/z48W6soPbdfvvtNnLkyLwsMtdGAAEEUiLALOCUMHISBBBAAAEEEEAgcwRoAcycuqKkCCCAAAIIIIBASgQIAFPCyEkQQAABBBBAAIHMESAAzJy6oqQIIIAAAggggEBKBAgAU8LISRBAAAEEEEAAgcwRIADMnLqipAgggAACCCCAQEoECABTwshJEEAAAQQQQACBzBEgAMycuqKkCCCAAAIIIIBASgQIAFPCyEkQQAABBBBAAIHMESAAzJy6oqQIIIAAAggggEBKBAgAU8LISRBAAAEEEEAAgcwRIADMnLqipAgggAACCCCAQEoECABTwshJEEAAAQQQQACBzBH4/7gGCWiiComwAAAAAElFTkSuQmCC", + "text/plain": [ + "" + ] + }, + "execution_count": 12, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "Image.open(\"coding/graph.png\")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## A Larger Example of Group Chat" + ] + }, + { + "cell_type": "code", + "execution_count": 13, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[33mUser_proxy\u001b[0m (to chat_manager):\n", + "\n", + "Design and implement a multimodal product for people with vision disabilities.\n", + "The pipeline will take an image and run Gemini model to describe:\n", + "1. what objects are in the image, and\n", + "2. where these objects are located.\n", + "\n", + "--------------------------------------------------------------------------------\n", + "\u001b[33mProduct_manager\u001b[0m (to chat_manager):\n", + "\n", + "**Product Name:** VisionAid\n", + "\n", + "**Target Audience:** People with vision disabilities\n", + "\n", + "**Product Concept:** A comprehensive and multimodal software platform that empowers individuals with vision impairments by providing them with essential information about their surroundings through image recognition and localization.\n", + "\n", + "**Product Design and Implementation:**\n", + "\n", + "**1. Image Acquisition:**\n", + "* The platform utilizes a camera or other image acquisition device to capture images of the user's surroundings.\n", + "* The device can be integrated into a smartphone, wearable device, or smart home appliance.\n", + "\n", + "**2. Image Processing:**\n", + "* The captured image is preprocessed to optimize the performance of the computer vision model.\n", + "* This includes scaling, cropping, and enhancing the image for better recognition.\n", + "\n", + "**3. Object Detection and Localization:**\n", + "* The Gemini computer vision model is then employed to analyze the preprocessed image.\n", + "* The model identifies the presence of objects in the image and determines their location with bounding boxes and spatial descriptions.\n", + "\n", + "**4. Multimodal Output:**\n", + "* **Audio Output:** The platform provides detailed audio descriptions of the detected objects and their locations.\n", + "* **Haptic Output:** Haptic feedback is used to convey the spatial arrangement of objects through vibrations or tactile sensations.\n", + "* **Visual Output (for partial vision):** If the user has partial vision, the platform can provide a simplified visual representation of the detected objects and their locations on a screen.\n", + "\n", + "**5. Accessibility and Customization:**\n", + "* The platform is designed to be highly accessible, with adjustable settings for audio volume, haptic intensity, and visual contrast.\n", + "* Users can customize the output to suit their individual preferences and needs.\n", + "\n", + "**Innovative Features:**\n", + "\n", + "* **Real-Time Object Detection:** The platform operates in real-time, providing continuous feedback about the user's surroundings as they move.\n", + "* **Scene Interpretation:** Advanced algorithms analyze the relationship between objects and provide contextual descriptions. For example, the platform can differentiate between a stove and a coffee maker.\n", + "* **Integration with Assistive Technology:** The platform can be integrated with other assistive technologies, such as screen readers and navigation apps, to enhance the user experience.\n", + "\n", + "**Benefits for Users with Vision Disabilities:**\n", + "\n", + "* **Improved Spatial Awareness:** The platform empowers users to navigate their environment confidently and independently.\n", + "* **Enhanced Safety:** By identifying hazards and obstacles, the platform helps users avoid accidents and stay safe.\n", + "* **Increased Independence:** The platform allows users to perform daily tasks and engage in activities that would otherwise be challenging with limited vision.\n", + "\n", + "--------------------------------------------------------------------------------\n", + "\u001b[33mProduct_manager\u001b[0m (to chat_manager):\n", + "\n", + "**Additional Innovative Features:**\n", + "\n", + "* **Object Recognition by Sound:** The platform can be trained to recognize objects based on their unique sounds. This feature is particularly useful for identifying objects that are difficult to see, such as small items or objects in low-light conditions.\n", + "* **Augmented Reality Integration:** By leveraging augmented reality technology, the platform can overlay virtual information onto the user's surroundings. This can provide additional context and guidance, such as highlighting the location of a specific object or providing directions to a destination.\n", + "* **Machine Learning for Personalized Experiences:** The platform can employ machine learning algorithms to learn the user's preferences and adapt its output accordingly. For example, it can prioritize the detection of objects that are of particular interest to the user.\n", + "\n", + "**Potential Applications:**\n", + "\n", + "* **Navigation and Wayfinding:** The platform can assist users in navigating indoor and outdoor environments, providing directions and identifying obstacles.\n", + "* **Object Identification and Interaction:** Users can identify and interact with objects in their surroundings, such as appliances, furniture, and food items.\n", + "* **Social Interaction and Communication:** The platform can facilitate social interactions by providing descriptions of people and objects in the user's environment.\n", + "* **Education and Learning:** The platform can be used as an educational tool to help students with vision impairments learn about their surroundings and develop their spatial reasoning skills.\n", + "\n", + "**Impact on the Lives of People with Vision Disabilities:**\n", + "\n", + "VisionAid has the potential to transform the lives of people with vision disabilities by providing them with a greater sense of independence, safety, and confidence. By empowering them with essential information about their surroundings, the platform enables them to navigate the world more effectively and participate fully in society.\n", + "\n", + "--------------------------------------------------------------------------------\n", + "\u001b[33mProduct_manager\u001b[0m (to chat_manager):\n", + "\n", + "**Additional Innovative Features:**\n", + "\n", + "* **Crowd-Sourced Object Recognition:** The platform can leverage a crowd-sourced database to expand its object recognition capabilities. Users can contribute images and descriptions of objects to the database, which can then be used to train the computer vision model and improve the platform's accuracy.\n", + "* **Integration with Smart Home Devices:** The platform can be integrated with smart home devices, such as smart speakers and smart lights, to provide a more comprehensive and automated experience. For example, the platform can trigger smart lights to illuminate a specific object or provide audio descriptions of objects in a room.\n", + "* **Gamification and Motivation:** The platform can incorporate gamification elements to motivate users and make the learning and exploration process more enjoyable. For example, users can earn points or badges for identifying objects correctly or completing challenges.\n", + "\n", + "**Potential Applications:**\n", + "\n", + "* **Assistive Reading:** The platform can be used as an assistive reading tool for people with low vision or dyslexia. It can scan printed text and provide audio descriptions of the words and their arrangement on the page.\n", + "* **Virtual Reality Exploration:** The platform can be integrated with virtual reality technology to create immersive and interactive experiences for people with vision disabilities. Users can explore virtual environments and interact with objects in a safe and controlled setting.\n", + "* **Art and Culture Accessibility:** The platform can be used to make art and cultural experiences more accessible to people with vision disabilities. It can provide audio descriptions of paintings, sculptures, and other works of art, as well as provide tactile tours of museums and galleries.\n", + "\n", + "**Impact on Society:**\n", + "\n", + "VisionAid has the potential to make a significant impact on society by promoting inclusivity and empowering people with vision disabilities. By providing them with the tools they need to navigate the world more effectively, the platform can help break down barriers and create a more equitable society for all.\n", + "\n", + "--------------------------------------------------------------------------------\n", + "\u001b[33mProduct_manager\u001b[0m (to chat_manager):\n", + "\n", + "**Additional Innovative Features:**\n", + "\n", + "* **Object Tracking:** The platform can employ advanced computer vision algorithms to track the movement of objects in real-time. This feature is particularly useful for monitoring moving objects, such as people or vehicles, and providing continuous updates to the user.\n", + "* **Gesture Recognition:** The platform can incorporate gesture recognition technology to allow users to interact with the platform using simple hand gestures. This can provide a more intuitive and hands-free way to control the platform's functionality.\n", + "* **Multi-Language Support:** The platform can be localized to support multiple languages, making it accessible to users from diverse linguistic backgrounds.\n", + "\n", + "**Potential Applications:**\n", + "\n", + "* **Safety and Security:** The platform can be used to enhance safety and security for people with vision disabilities. It can detect and identify potential hazards, such as obstacles, uneven surfaces, or suspicious individuals, and provide timely alerts to the user.\n", + "* **Health and Wellness:** The platform can be integrated with health and wellness devices to provide users with information about their physical condition and surroundings. For example, it can monitor blood glucose levels, heart rate, or activity levels, and provide audio feedback to the user.\n", + "* **Accessibility in Public Spaces:** The platform can be deployed in public spaces, such as museums, libraries, and retail stores, to make these spaces more accessible and inclusive for people with vision disabilities. It can provide audio descriptions of exhibits, books, and products, as well as guidance on how to navigate the space.\n", + "\n", + "**Impact on the Future:**\n", + "\n", + "VisionAid has the potential to shape the future of assistive technology by providing a comprehensive and innovative solution for people with vision disabilities. By leveraging cutting-edge technologies and incorporating user-centric design principles, the platform can empower individuals to live more independent, fulfilling, and connected lives.\n", + "\n", + "--------------------------------------------------------------------------------\n", + "\u001b[33mProduct_manager\u001b[0m (to chat_manager):\n", + "\n", + "**Additional Innovative Features:**\n", + "\n", + "* **AI-Powered Object Recognition:** The platform can utilize advanced artificial intelligence (AI) algorithms to continuously improve its object recognition capabilities. By analyzing large datasets of images and descriptions, the AI can learn to identify and classify a wide range of objects with high accuracy.\n", + "* **Contextual Awareness:** The platform can leverage contextual information to provide more meaningful and personalized descriptions. For example, it can identify the user's current location and provide relevant information about nearby objects, such as store names, street signs, or landmarks.\n", + "* **Integration with Navigation Apps:** The platform can be integrated with navigation apps to provide users with turn-by-turn directions and guidance. This can help users navigate unfamiliar environments and reach their destinations safely and efficiently.\n", + "\n", + "**Potential Applications:**\n", + "\n", + "* **Job Training and Employment:** The platform can be used to train people with vision disabilities for various jobs and occupations. It can provide audio descriptions of work instructions, equipment, and materials, as well as guidance on how to perform specific tasks.\n", + "* **Transportation Accessibility:** The platform can be integrated with public transportation systems to make them more accessible for people with vision disabilities. It can provide real-time information about bus and train schedules, as well as guidance on how to navigate stations and platforms.\n", + "* **Social and Community Engagement:** The platform can facilitate social and community engagement for people with vision disabilities. It can provide audio descriptions of social events, activities, and gatherings, as well as information about local organizations and resources.\n", + "\n", + "**Impact on the Future of Healthcare:**\n", + "\n", + "VisionAid has the potential to revolutionize the delivery of healthcare for people with vision disabilities. By providing them with real-time access to information about their surroundings, the platform can empower them to make more informed decisions about their health and well-being. It can also facilitate communication between patients and healthcare providers, leading to improved patient outcomes and satisfaction.\n", + "\n", + "**Long-Term Vision:**\n", + "\n", + "The long-term vision for VisionAid is to create a world where people with vision disabilities have equal access to information and opportunities. The platform will continue to evolve and incorporate new technologies to provide users with the most comprehensive and innovative assistive experience possible.\n", + "\n", + "By empowering individuals with vision impairments, VisionAid aims to foster a more inclusive and equitable society where everyone has the chance to reach their full potential.\n", + "\n", + "--------------------------------------------------------------------------------\n", + "\u001b[33mProduct_manager\u001b[0m (to chat_manager):\n", + "\n", + "**Additional Innovative Features:**\n", + "\n", + "* **Personalized Learning:** The platform can incorporate machine learning algorithms to tailor its content and interactions to the individual needs and preferences of each user. This can include adjusting the difficulty of object recognition tasks, providing customized feedback, and recommending relevant resources.\n", + "* **Gamification and Motivation:** The platform can incorporate gamification elements to make the learning and exploration process more engaging and motivating. Users can earn points, badges, and rewards for completing tasks, identifying objects correctly, and exploring new environments.\n", + "* **Community Building:** The platform can foster a sense of community among users with vision disabilities. It can provide a space for users to connect with each other, share experiences, and support each other on their journey.\n", + "\n", + "**Potential Applications:**\n", + "\n", + "* **Education and Learning:** The platform can be used as a powerful educational tool for students with vision disabilities. It can provide audio descriptions of textbooks, assignments, and educational materials, as well as guidance on how to navigate classrooms and participate in group activities.\n", + "* **Employment and Career Development:** The platform can assist people with vision disabilities in finding and securing employment opportunities. It can provide information about job openings, training programs, and assistive technologies that can help them succeed in the workplace.\n", + "* **Independent Living:** The platform can empower people with vision disabilities to live more independently and confidently. It can provide guidance on how to perform daily tasks, such as cooking, cleaning, and managing finances, as well as information about accessible housing and transportation options.\n", + "\n", + "**Impact on Society:**\n", + "\n", + "VisionAid has the potential to create a more inclusive and equitable society for people with vision disabilities. By providing them with the tools and resources they need to succeed, the platform can help break down barriers and empower them to participate fully in all aspects of life.\n", + "\n", + "**Long-Term Vision:**\n", + "\n", + "The long-term vision for VisionAid is to create a world where people with vision disabilities have equal access to information, opportunities, and experiences. The platform will continue to evolve and incorporate new technologies to provide users with the most comprehensive and innovative assistive experience possible.\n", + "\n", + "By empowering individuals with vision impairments, VisionAid aims to foster a more inclusive and equitable society where everyone has the chance to reach their full potential.\n", + "\n", + "--------------------------------------------------------------------------------\n", + "\u001b[33mProduct_manager\u001b[0m (to chat_manager):\n", + "\n", + "**Additional Innovative Features:**\n", + "\n", + "* **Augmented Reality Integration:** The platform can leverage augmented reality (AR) technology to overlay virtual information onto the user's surroundings. This can provide additional context and guidance, such as highlighting the location of a specific object or providing directions to a destination.\n", + "* **Real-Time Obstacle Detection:** The platform can employ advanced computer vision algorithms to detect and identify obstacles in the user's path in real-time. This can help users avoid collisions and navigate their environment more safely.\n", + "* **Smart Home Integration:** The platform can be integrated with smart home devices, such as smart speakers and smart lights, to provide a more comprehensive and automated experience. For example, the platform can trigger smart lights to illuminate a specific object or provide audio descriptions of objects in a room.\n", + "\n", + "**Potential Applications:**\n", + "\n", + "* **Travel and Exploration:** The platform can assist people with vision disabilities in traveling and exploring new places. It can provide audio descriptions of landmarks, tourist attractions, and transportation options, as well as guidance on how to navigate unfamiliar environments.\n", + "* **Accessibility in Public Spaces:** The platform can be deployed in public spaces, such as museums, libraries, and retail stores, to make these spaces more accessible and inclusive for people with vision disabilities. It can provide audio descriptions of exhibits, books, and products, as well as guidance on how to navigate the space.\n", + "* **Social and Community Engagement:** The platform can facilitate social and community engagement for people with vision disabilities. It can provide audio descriptions of social events, activities, and gatherings, as well as information about local organizations and resources.\n", + "\n", + "**Impact on Society:**\n", + "\n", + "VisionAid has the potential to make a significant impact on society by promoting inclusivity and empowering people with vision disabilities. By providing them with the tools they need to navigate the world more effectively, the platform can help break down barriers and create a more equitable society for all.\n", + "\n", + "**Long-Term Vision:**\n", + "\n", + "The long-term vision for VisionAid is to create a world where people with vision disabilities have equal access to information, opportunities, and experiences. The platform will continue to evolve and incorporate new technologies to provide users with the most comprehensive and innovative assistive experience possible.\n", + "\n", + "By empowering individuals with vision impairments, VisionAid aims to foster a more inclusive and equitable society where everyone has the chance to reach their full potential.\n", + "\n", + "--------------------------------------------------------------------------------\n", + "\u001b[33mProduct_manager\u001b[0m (to chat_manager):\n", + "\n", + "**Additional Innovative Features:**\n", + "\n", + "* **Personalized Content Recommendations:** The platform can leverage machine learning algorithms to recommend personalized content and experiences to users based on their interests, preferences, and usage patterns.\n", + "* **Multi-Sensory Feedback:** The platform can incorporate multiple sensory modalities, such as audio, haptic, and tactile feedback, to provide a more immersive and engaging experience for users with different sensory preferences.\n", + "* **Open Source and Community Involvement:** The platform can be released as open source software, allowing the community to contribute to its development and create custom integrations and add-ons.\n", + "\n", + "**Potential Applications:**\n", + "\n", + "* **Education and Learning:** The platform can be used as a powerful educational tool for students with vision disabilities, providing them with access to a wide range of educational resources and interactive learning experiences.\n", + "* **Employment and Career Development:** The platform can assist people with vision disabilities in finding and securing employment opportunities, providing them with the skills and resources they need to succeed in the workplace.\n", + "* **Independent Living:** The platform can empower people with vision disabilities to live more independently and confidently, providing them with the tools and information they need to navigate their environment, manage their finances, and participate in social activities.\n", + "\n", + "**Impact on Society:**\n", + "\n", + "VisionAid has the potential to create a more inclusive and equitable society for people with vision disabilities. By providing them with the tools and resources they need to succeed, the platform can help break down barriers and empower them to participate fully in all aspects of life.\n", + "\n", + "**Long-Term Vision:**\n", + "\n", + "The long-term vision for VisionAid is to create a world where people with vision disabilities have equal access to information, opportunities, and experiences. The platform will continue to evolve and incorporate new technologies to provide users with the most comprehensive and innovative assistive experience possible.\n", + "\n", + "By empowering individuals with vision impairments, VisionAid aims to foster a more inclusive and equitable society where everyone has the chance to reach their full potential.**Additional Innovative Features:**\n", + "\n", + "* **AI-Powered Object Recognition:** The platform can utilize advanced artificial intelligence (AI) algorithms to continuously improve its object recognition capabilities. By analyzing large datasets of images and descriptions, the AI can learn to identify and classify a wide range of objects with high accuracy.\n", + "* **Contextual Awareness:** The platform can leverage contextual information to provide more meaningful and personalized descriptions. For example, it can identify the user's current location and provide relevant information about nearby objects, such as store names, street signs, or landmarks.\n", + "* **Integration with Navigation Apps:** The platform can be integrated with navigation apps to provide users with turn-by-turn directions and guidance. This can help users navigate unfamiliar environments and reach their destinations safely and efficiently.\n", + "\n", + "**Potential Applications:**\n", + "\n", + "* **Job Training and Employment:** The platform can be used to train people with vision disabilities for various jobs and occupations. It can provide audio descriptions of work instructions, equipment, and materials, as well as guidance on how to perform specific tasks.\n", + "* **Transportation Accessibility:** The platform can be integrated with public transportation systems to make them more accessible for people with vision disabilities. It can provide real-time information about bus and train schedules, as well as guidance on how to navigate stations and platforms.\n", + "* **Social and Community Engagement:** The platform can facilitate social and community engagement for people with vision disabilities. It can provide audio descriptions of social events, activities, and gatherings, as well as information about local organizations and resources.\n", + "\n", + "**Impact on the Future of Healthcare:**\n", + "\n", + "VisionAid has the potential to revolutionize the delivery of healthcare for people with vision disabilities. By providing them with real-time access to information about their surroundings, the platform can empower them to make more informed decisions about their health and well-being. It can also facilitate communication between patients and healthcare providers, leading to improved patient outcomes and satisfaction.\n", + "\n", + "**Long-Term Vision:**\n", + "\n", + "The long-term vision for VisionAid is to create a world where people with vision disabilities have equal access to information and opportunities. The platform will continue to evolve and incorporate new technologies to provide users with the most comprehensive and innovative assistive experience possible.\n", + "\n", + "By empowering individuals with vision impairments, VisionAid aims to foster a more inclusive and equitable society where everyone has the chance to reach their full potential.**Additional Innovative Features:**\n", + "\n", + "* **Personalized Learning:** The platform can incorporate machine learning algorithms to tailor its content and interactions to the individual needs and preferences of each user. This can include adjusting the difficulty of object recognition tasks, providing customized feedback, and recommending relevant resources.\n", + "* **Gamification and Motivation:** The platform can incorporate gamification elements to make the learning and exploration process more engaging and motivating. Users can earn points, badges, and rewards for completing tasks, identifying objects correctly, and exploring new environments.\n", + "* **Community Building:** The platform can foster a sense of community among users with vision disabilities. It can provide a space for users to connect with each other, share experiences, and support each other on their journey.\n", + "\n", + "**Potential Applications:**\n", + "\n", + "* **Education and Learning:** The platform can be used as a powerful educational tool for students with vision disabilities. It can provide audio descriptions of textbooks, assignments, and educational materials, as well as guidance on how to navigate classrooms and participate in group activities.\n", + "* **Employment and Career Development:** The platform can assist people with vision disabilities in finding and securing employment opportunities. It can provide information about job openings, training programs, and assistive technologies that can help them succeed in the workplace.\n", + "* **Independent Living:** The platform can empower people with vision disabilities to live more independently and confidently. It can provide guidance on how to perform daily tasks, such as cooking, cleaning, and managing finances, as well as information about accessible housing and transportation options.\n", + "\n", + "**Impact on Society:**\n", + "\n", + "VisionAid has the potential to create a more inclusive and equitable society for people with vision disabilities. By providing them with the tools and resources they need to succeed, the platform can help break down barriers and empower them to participate fully in all aspects of life.\n", + "\n", + "**Long-Term Vision:**\n", + "\n", + "The long-term vision for VisionAid is to create a world where people with vision disabilities have equal access to information, opportunities, and experiences. The platform will continue to evolve and incorporate new technologies to provide users with the most comprehensive and innovative assistive experience possible.\n", + "\n", + "By empowering individuals with vision impairments, VisionAid aims to foster a more inclusive and equitable society where everyone has the chance to reach their full potential.\n", + "\n", + "--------------------------------------------------------------------------------\n", + "\u001b[33mProduct_manager\u001b[0m (to chat_manager):\n", + "\n", + "**Additional Innovative Features:**\n", + "\n", + "* **Augmented Reality Integration:** The platform can leverage augmented reality (AR) technology to overlay virtual information onto the user's surroundings. This can provide additional context and guidance, such as highlighting the location of a specific object or providing directions to a destination.\n", + "* **Real-Time Obstacle Detection:** The platform can employ advanced computer vision algorithms to detect and identify obstacles in the user's path in real-time. This can help users avoid collisions and navigate their environment more safely.\n", + "* **Smart Home Integration:** The platform can be integrated with smart home devices, such as smart speakers and smart lights, to provide a more comprehensive and automated experience. For example, the platform can trigger smart lights to illuminate a specific object or provide audio descriptions of objects in a room.\n", + "\n", + "**Potential Applications:**\n", + "\n", + "* **Travel and Exploration:** The platform can assist people with vision disabilities in traveling and exploring new places. It can provide audio descriptions of landmarks, tourist attractions, and transportation options, as well as guidance on how to navigate unfamiliar environments.\n", + "* **Accessibility in Public Spaces:** The platform can be deployed in public spaces, such as museums, libraries, and retail stores, to make these spaces more accessible and inclusive for people with vision disabilities. It can provide audio descriptions of exhibits, books, and products, as well as guidance on how to navigate the space.\n", + "* **Social and Community Engagement:** The platform can facilitate social and community engagement for people with vision disabilities. It can provide audio descriptions of social events, activities, and gatherings, as well as information about local organizations and resources.\n", + "\n", + "**Impact on Society:**\n", + "\n", + "VisionAid has the potential to make a significant impact on society by promoting inclusivity and empowering people with vision disabilities. By providing them with the tools they need to navigate the world more effectively, the platform can help break down barriers and create a more equitable society for all.\n", + "\n", + "**Long-Term Vision:**\n", + "\n", + "The long-term vision for VisionAid is to create a world where people with vision disabilities have equal access to information, opportunities, and experiences. The platform will continue to evolve and incorporate new technologies to provide users with the most comprehensive and innovative assistive experience possible.\n", + "\n", + "By empowering individuals with vision impairments, VisionAid aims to foster a more inclusive and equitable society where everyone has the chance to reach their full potential.**Additional Innovative Features:**\n", + "\n", + "* **Personalized Content Recommendations:** The platform can leverage machine learning algorithms to recommend personalized content and experiences to users based on their interests, preferences, and usage patterns.\n", + "* **Multi-Sensory Feedback:** The platform can incorporate multiple sensory modalities, such as audio, haptic, and tactile feedback, to provide a more immersive and engaging experience for users with different sensory preferences.\n", + "* **Open Source and Community Involvement:** The platform can be released as open source software, allowing the community to contribute to its development and create custom integrations and add-ons.\n", + "\n", + "**Potential Applications:**\n", + "\n", + "* **Education and Learning:** The platform can be used as a powerful educational tool for students with vision disabilities, providing them with access to a wide range of educational resources and interactive learning experiences.\n", + "* **Employment and Career Development:** The platform can assist people with vision disabilities in finding and securing employment opportunities, providing them with the skills and resources they need to succeed in the workplace.\n", + "* **Independent Living:** The platform can empower people with vision disabilities to live more independently and confidently, providing them with the tools and information they need to navigate their environment, manage their finances, and participate in social activities.\n", + "\n", + "**Impact on Society:**\n", + "\n", + "VisionAid has the potential to create a more inclusive and equitable society for people with vision disabilities. By providing them with the tools and resources they need to succeed, the platform can help break down barriers and empower them to participate fully in all aspects of life.\n", + "\n", + "**Long-Term Vision:**\n", + "\n", + "The long-term vision for VisionAid is to create a world where people with vision disabilities have equal access to information, opportunities, and experiences. The platform will continue to evolve and incorporate new technologies to provide users with the most comprehensive and innovative assistive experience possible.\n", + "\n", + "By empowering individuals with vision impairments, VisionAid aims to foster a more inclusive and equitable society where everyone has the chance to reach their full potential.**Additional Innovative Features:**\n", + "\n", + "* **AI-Powered Object Recognition:** The platform can utilize advanced artificial intelligence (AI) algorithms to continuously improve its object recognition capabilities. By analyzing large datasets of images and descriptions, the AI can learn to identify and classify a wide range of objects with high accuracy.\n", + "* **Contextual Awareness:** The platform can leverage contextual information to provide more meaningful and personalized descriptions. For example, it can identify the user's current location and provide relevant information about nearby objects, such as store names, street signs, or landmarks.\n", + "* **Integration with Navigation Apps:** The platform can be integrated with navigation apps to provide users with turn-by-turn directions and guidance. This can help users navigate unfamiliar environments and reach their destinations safely and efficiently.\n", + "\n", + "**Potential Applications:**\n", + "\n", + "* **Job Training and Employment:** The platform can be used to train people with vision disabilities for various jobs and occupations. It can provide audio descriptions of work instructions, equipment, and materials, as well as guidance on how to perform specific tasks.\n", + "* **Transportation Accessibility:** The platform can be integrated with public transportation systems to make them more accessible for people with vision disabilities. It can provide real-time information about bus and train schedules, as well as guidance on how to navigate stations and platforms.\n", + "* **Social and Community Engagement:** The platform can facilitate social and community engagement for people with vision disabilities. It can provide audio descriptions of social events, activities, and gatherings, as well as information about local organizations and resources.\n", + "\n", + "**Impact on the Future of Healthcare:**\n", + "\n", + "VisionAid has the potential to revolutionize the delivery of healthcare for people with vision disabilities. By providing them with real-time access to information about their surroundings, the platform can empower them to make more informed decisions about their health and well-being. It can also facilitate communication between patients and healthcare providers, leading to improved patient outcomes and satisfaction.\n", + "\n", + "**Long-Term Vision:**\n", + "\n", + "The long-term vision for VisionAid is to create a world where people with vision disabilities have equal access to information and opportunities. The platform will continue to evolve and incorporate new technologies to provide users with the most comprehensive and innovative assistive experience possible.\n", + "\n", + "By empowering individuals with vision impairments, VisionAid aims to foster a more inclusive and equitable society where everyone has the chance to reach their full potential.**Additional Innovative Features:**\n", + "\n", + "* **Personalized Learning:** The platform can incorporate machine learning algorithms to tailor its content and interactions to the individual needs and preferences of each user. This can include adjusting the difficulty of object recognition tasks, providing customized feedback, and recommending relevant resources.\n", + "* **Gamification and Motivation:** The platform can incorporate gamification elements to make the learning and exploration process more engaging and motivating. Users can earn points, badges, and rewards for completing tasks, identifying objects correctly, and exploring new environments.\n", + "* **Community Building:** The platform can foster a sense of community among users with vision disabilities. It can provide a space for users to connect with each other, share experiences, and support each other on their journey.\n", + "\n", + "**Potential Applications:**\n", + "\n", + "* **Education and Learning:** The platform can be used as a powerful educational tool for students with vision disabilities. It can provide audio descriptions of textbooks, assignments, and educational materials, as well as guidance on how to navigate classrooms and participate in group activities.\n", + "* **Employment and Career Development:** The platform can assist people with vision disabilities in finding and securing employment opportunities. It can provide information about job openings, training programs, and assistive technologies that can help them succeed in the workplace.\n", + "* **Independent Living:** The platform can empower people with vision disabilities to live more independently and confidently. It can provide guidance on how to perform daily tasks, such as cooking, cleaning, and managing finances, as well as information about accessible housing and transportation options.\n", + "\n", + "**Impact on Society:**\n", + "\n", + "VisionAid has the potential to create a more inclusive and equitable society for people with vision disabilities. By providing them with the tools and resources they need to succeed, the platform can help break down barriers and empower them to participate fully in all aspects of life.\n", + "\n", + "**Long-Term Vision:**\n", + "\n", + "The long-term vision for VisionAid is to create a world where people with vision disabilities have equal access to information, opportunities, and experiences. The platform will continue to evolve and incorporate new technologies to provide users with the most comprehensive and innovative assistive experience possible.\n", + "\n", + "By empowering individuals with vision impairments, VisionAid aims to foster a more inclusive and equitable society where everyone has the chance to reach their full potential.\n", + "\n", + "--------------------------------------------------------------------------------\n", + "\u001b[33mProduct_manager\u001b[0m (to chat_manager):\n", + "\n", + "**Technical Implementation:**\n", + "\n", + "The technical implementation of VisionAid involves the following key components:\n", + "\n", + "* **Image Acquisition:** The platform utilizes a camera or other image acquisition device to capture images of the user's surroundings.\n", + "* **Image Preprocessing:** The captured image is preprocessed to optimize the performance of the computer vision model. This includes scaling, cropping, and enhancing the image for better recognition.\n", + "* **Object Detection and Localization:** The Gemini computer vision model is employed to analyze the preprocessed image. The model identifies the presence of objects in the image and determines their location with bounding boxes and spatial descriptions.\n", + "* **Multimodal Output:** The platform provides detailed audio descriptions of the detected objects and their locations. Haptic feedback is used to convey the spatial arrangement of objects through vibrations or tactile sensations. If the user has partial vision, the platform can provide a simplified visual representation of the detected objects and their locations on a screen.\n", + "\n", + "**Accessibility and Customization:**\n", + "\n", + "VisionAid is designed to be highly accessible, with adjustable settings for audio volume, haptic intensity, and visual contrast. Users can customize the output to suit their individual preferences and needs.\n", + "\n", + "**Additional Innovative Features:**\n", + "\n", + "To enhance the user experience and address specific challenges faced by people with vision disabilities, VisionAid incorporates the following innovative features:\n", + "\n", + "* **Real-Time Object Detection:** The platform operates in real-time, providing continuous feedback about the user's surroundings as they move.\n", + "* **Scene Interpretation:** Advanced algorithms analyze the relationship between objects and provide contextual descriptions. For example, the platform can differentiate between a stove and a coffee maker.\n", + "* **Integration with Assistive Technology:** VisionAid can be integrated with other assistive technologies, such as screen readers and navigation apps, to enhance the user experience.\n", + "\n", + "**Potential Applications:**\n", + "\n", + "VisionAid has a wide range of potential applications, including:\n", + "\n", + "* **Navigation and Wayfinding:** The platform can assist users in navigating indoor and outdoor environments, providing directions and identifying obstacles.\n", + "* **Object Identification and Interaction:** Users can identify and interact with objects in their surroundings, such as appliances, furniture, and food items.\n", + "* **Social Interaction and Communication:** The platform can facilitate social interactions by providing descriptions of people and objects in the user's environment.\n", + "* **Education and Learning:** VisionAid can be used as an educational tool to help students with vision impairments learn about their surroundings and develop their spatial reasoning skills.\n", + "\n", + "**Impact on the Lives of People with Vision Disabilities:**\n", + "\n", + "VisionAid has the potential to transform the lives of people with vision disabilities by providing them with a greater sense of independence, safety, and confidence. By empowering them with essential information about their surroundings, the platform enables them to navigate the world more effectively and participate fully in society.\n", + "\n", + "**Long-Term Vision:**\n", + "\n", + "The long-term vision for VisionAid is to create a world where people with vision disabilities have equal access to information and opportunities. The platform will continue to evolve and incorporate new technologies to provide users with the most comprehensive and innovative assistive experience possible.\n", + "\n", + "**By empowering individuals with vision impairments, VisionAid aims to foster a more inclusive and equitable society where everyone has the chance to reach their full potential.**\n", + "\n", + "--------------------------------------------------------------------------------\n" + ] + }, + { + "data": { + "text/plain": [ + "ChatResult(chat_id=None, chat_history=[{'content': 'Design and implement a multimodal product for people with vision disabilities.\\nThe pipeline will take an image and run Gemini model to describe:\\n1. what objects are in the image, and\\n2. where these objects are located.', 'role': 'assistant'}, {'content': \"**Product Name:** VisionAid\\n\\n**Target Audience:** People with vision disabilities\\n\\n**Product Concept:** A comprehensive and multimodal software platform that empowers individuals with vision impairments by providing them with essential information about their surroundings through image recognition and localization.\\n\\n**Product Design and Implementation:**\\n\\n**1. Image Acquisition:**\\n* The platform utilizes a camera or other image acquisition device to capture images of the user's surroundings.\\n* The device can be integrated into a smartphone, wearable device, or smart home appliance.\\n\\n**2. Image Processing:**\\n* The captured image is preprocessed to optimize the performance of the computer vision model.\\n* This includes scaling, cropping, and enhancing the image for better recognition.\\n\\n**3. Object Detection and Localization:**\\n* The Gemini computer vision model is then employed to analyze the preprocessed image.\\n* The model identifies the presence of objects in the image and determines their location with bounding boxes and spatial descriptions.\\n\\n**4. Multimodal Output:**\\n* **Audio Output:** The platform provides detailed audio descriptions of the detected objects and their locations.\\n* **Haptic Output:** Haptic feedback is used to convey the spatial arrangement of objects through vibrations or tactile sensations.\\n* **Visual Output (for partial vision):** If the user has partial vision, the platform can provide a simplified visual representation of the detected objects and their locations on a screen.\\n\\n**5. Accessibility and Customization:**\\n* The platform is designed to be highly accessible, with adjustable settings for audio volume, haptic intensity, and visual contrast.\\n* Users can customize the output to suit their individual preferences and needs.\\n\\n**Innovative Features:**\\n\\n* **Real-Time Object Detection:** The platform operates in real-time, providing continuous feedback about the user's surroundings as they move.\\n* **Scene Interpretation:** Advanced algorithms analyze the relationship between objects and provide contextual descriptions. For example, the platform can differentiate between a stove and a coffee maker.\\n* **Integration with Assistive Technology:** The platform can be integrated with other assistive technologies, such as screen readers and navigation apps, to enhance the user experience.\\n\\n**Benefits for Users with Vision Disabilities:**\\n\\n* **Improved Spatial Awareness:** The platform empowers users to navigate their environment confidently and independently.\\n* **Enhanced Safety:** By identifying hazards and obstacles, the platform helps users avoid accidents and stay safe.\\n* **Increased Independence:** The platform allows users to perform daily tasks and engage in activities that would otherwise be challenging with limited vision.\", 'name': 'Product_manager', 'role': 'user'}, {'content': \"**Additional Innovative Features:**\\n\\n* **Object Recognition by Sound:** The platform can be trained to recognize objects based on their unique sounds. This feature is particularly useful for identifying objects that are difficult to see, such as small items or objects in low-light conditions.\\n* **Augmented Reality Integration:** By leveraging augmented reality technology, the platform can overlay virtual information onto the user's surroundings. This can provide additional context and guidance, such as highlighting the location of a specific object or providing directions to a destination.\\n* **Machine Learning for Personalized Experiences:** The platform can employ machine learning algorithms to learn the user's preferences and adapt its output accordingly. For example, it can prioritize the detection of objects that are of particular interest to the user.\\n\\n**Potential Applications:**\\n\\n* **Navigation and Wayfinding:** The platform can assist users in navigating indoor and outdoor environments, providing directions and identifying obstacles.\\n* **Object Identification and Interaction:** Users can identify and interact with objects in their surroundings, such as appliances, furniture, and food items.\\n* **Social Interaction and Communication:** The platform can facilitate social interactions by providing descriptions of people and objects in the user's environment.\\n* **Education and Learning:** The platform can be used as an educational tool to help students with vision impairments learn about their surroundings and develop their spatial reasoning skills.\\n\\n**Impact on the Lives of People with Vision Disabilities:**\\n\\nVisionAid has the potential to transform the lives of people with vision disabilities by providing them with a greater sense of independence, safety, and confidence. By empowering them with essential information about their surroundings, the platform enables them to navigate the world more effectively and participate fully in society.\", 'name': 'Product_manager', 'role': 'user'}, {'content': \"**Additional Innovative Features:**\\n\\n* **Crowd-Sourced Object Recognition:** The platform can leverage a crowd-sourced database to expand its object recognition capabilities. Users can contribute images and descriptions of objects to the database, which can then be used to train the computer vision model and improve the platform's accuracy.\\n* **Integration with Smart Home Devices:** The platform can be integrated with smart home devices, such as smart speakers and smart lights, to provide a more comprehensive and automated experience. For example, the platform can trigger smart lights to illuminate a specific object or provide audio descriptions of objects in a room.\\n* **Gamification and Motivation:** The platform can incorporate gamification elements to motivate users and make the learning and exploration process more enjoyable. For example, users can earn points or badges for identifying objects correctly or completing challenges.\\n\\n**Potential Applications:**\\n\\n* **Assistive Reading:** The platform can be used as an assistive reading tool for people with low vision or dyslexia. It can scan printed text and provide audio descriptions of the words and their arrangement on the page.\\n* **Virtual Reality Exploration:** The platform can be integrated with virtual reality technology to create immersive and interactive experiences for people with vision disabilities. Users can explore virtual environments and interact with objects in a safe and controlled setting.\\n* **Art and Culture Accessibility:** The platform can be used to make art and cultural experiences more accessible to people with vision disabilities. It can provide audio descriptions of paintings, sculptures, and other works of art, as well as provide tactile tours of museums and galleries.\\n\\n**Impact on Society:**\\n\\nVisionAid has the potential to make a significant impact on society by promoting inclusivity and empowering people with vision disabilities. By providing them with the tools they need to navigate the world more effectively, the platform can help break down barriers and create a more equitable society for all.\", 'name': 'Product_manager', 'role': 'user'}, {'content': \"**Additional Innovative Features:**\\n\\n* **Object Tracking:** The platform can employ advanced computer vision algorithms to track the movement of objects in real-time. This feature is particularly useful for monitoring moving objects, such as people or vehicles, and providing continuous updates to the user.\\n* **Gesture Recognition:** The platform can incorporate gesture recognition technology to allow users to interact with the platform using simple hand gestures. This can provide a more intuitive and hands-free way to control the platform's functionality.\\n* **Multi-Language Support:** The platform can be localized to support multiple languages, making it accessible to users from diverse linguistic backgrounds.\\n\\n**Potential Applications:**\\n\\n* **Safety and Security:** The platform can be used to enhance safety and security for people with vision disabilities. It can detect and identify potential hazards, such as obstacles, uneven surfaces, or suspicious individuals, and provide timely alerts to the user.\\n* **Health and Wellness:** The platform can be integrated with health and wellness devices to provide users with information about their physical condition and surroundings. For example, it can monitor blood glucose levels, heart rate, or activity levels, and provide audio feedback to the user.\\n* **Accessibility in Public Spaces:** The platform can be deployed in public spaces, such as museums, libraries, and retail stores, to make these spaces more accessible and inclusive for people with vision disabilities. It can provide audio descriptions of exhibits, books, and products, as well as guidance on how to navigate the space.\\n\\n**Impact on the Future:**\\n\\nVisionAid has the potential to shape the future of assistive technology by providing a comprehensive and innovative solution for people with vision disabilities. By leveraging cutting-edge technologies and incorporating user-centric design principles, the platform can empower individuals to live more independent, fulfilling, and connected lives.\", 'name': 'Product_manager', 'role': 'user'}, {'content': \"**Additional Innovative Features:**\\n\\n* **AI-Powered Object Recognition:** The platform can utilize advanced artificial intelligence (AI) algorithms to continuously improve its object recognition capabilities. By analyzing large datasets of images and descriptions, the AI can learn to identify and classify a wide range of objects with high accuracy.\\n* **Contextual Awareness:** The platform can leverage contextual information to provide more meaningful and personalized descriptions. For example, it can identify the user's current location and provide relevant information about nearby objects, such as store names, street signs, or landmarks.\\n* **Integration with Navigation Apps:** The platform can be integrated with navigation apps to provide users with turn-by-turn directions and guidance. This can help users navigate unfamiliar environments and reach their destinations safely and efficiently.\\n\\n**Potential Applications:**\\n\\n* **Job Training and Employment:** The platform can be used to train people with vision disabilities for various jobs and occupations. It can provide audio descriptions of work instructions, equipment, and materials, as well as guidance on how to perform specific tasks.\\n* **Transportation Accessibility:** The platform can be integrated with public transportation systems to make them more accessible for people with vision disabilities. It can provide real-time information about bus and train schedules, as well as guidance on how to navigate stations and platforms.\\n* **Social and Community Engagement:** The platform can facilitate social and community engagement for people with vision disabilities. It can provide audio descriptions of social events, activities, and gatherings, as well as information about local organizations and resources.\\n\\n**Impact on the Future of Healthcare:**\\n\\nVisionAid has the potential to revolutionize the delivery of healthcare for people with vision disabilities. By providing them with real-time access to information about their surroundings, the platform can empower them to make more informed decisions about their health and well-being. It can also facilitate communication between patients and healthcare providers, leading to improved patient outcomes and satisfaction.\\n\\n**Long-Term Vision:**\\n\\nThe long-term vision for VisionAid is to create a world where people with vision disabilities have equal access to information and opportunities. The platform will continue to evolve and incorporate new technologies to provide users with the most comprehensive and innovative assistive experience possible.\\n\\nBy empowering individuals with vision impairments, VisionAid aims to foster a more inclusive and equitable society where everyone has the chance to reach their full potential.\", 'name': 'Product_manager', 'role': 'user'}, {'content': '**Additional Innovative Features:**\\n\\n* **Personalized Learning:** The platform can incorporate machine learning algorithms to tailor its content and interactions to the individual needs and preferences of each user. This can include adjusting the difficulty of object recognition tasks, providing customized feedback, and recommending relevant resources.\\n* **Gamification and Motivation:** The platform can incorporate gamification elements to make the learning and exploration process more engaging and motivating. Users can earn points, badges, and rewards for completing tasks, identifying objects correctly, and exploring new environments.\\n* **Community Building:** The platform can foster a sense of community among users with vision disabilities. It can provide a space for users to connect with each other, share experiences, and support each other on their journey.\\n\\n**Potential Applications:**\\n\\n* **Education and Learning:** The platform can be used as a powerful educational tool for students with vision disabilities. It can provide audio descriptions of textbooks, assignments, and educational materials, as well as guidance on how to navigate classrooms and participate in group activities.\\n* **Employment and Career Development:** The platform can assist people with vision disabilities in finding and securing employment opportunities. It can provide information about job openings, training programs, and assistive technologies that can help them succeed in the workplace.\\n* **Independent Living:** The platform can empower people with vision disabilities to live more independently and confidently. It can provide guidance on how to perform daily tasks, such as cooking, cleaning, and managing finances, as well as information about accessible housing and transportation options.\\n\\n**Impact on Society:**\\n\\nVisionAid has the potential to create a more inclusive and equitable society for people with vision disabilities. By providing them with the tools and resources they need to succeed, the platform can help break down barriers and empower them to participate fully in all aspects of life.\\n\\n**Long-Term Vision:**\\n\\nThe long-term vision for VisionAid is to create a world where people with vision disabilities have equal access to information, opportunities, and experiences. The platform will continue to evolve and incorporate new technologies to provide users with the most comprehensive and innovative assistive experience possible.\\n\\nBy empowering individuals with vision impairments, VisionAid aims to foster a more inclusive and equitable society where everyone has the chance to reach their full potential.', 'name': 'Product_manager', 'role': 'user'}, {'content': \"**Additional Innovative Features:**\\n\\n* **Augmented Reality Integration:** The platform can leverage augmented reality (AR) technology to overlay virtual information onto the user's surroundings. This can provide additional context and guidance, such as highlighting the location of a specific object or providing directions to a destination.\\n* **Real-Time Obstacle Detection:** The platform can employ advanced computer vision algorithms to detect and identify obstacles in the user's path in real-time. This can help users avoid collisions and navigate their environment more safely.\\n* **Smart Home Integration:** The platform can be integrated with smart home devices, such as smart speakers and smart lights, to provide a more comprehensive and automated experience. For example, the platform can trigger smart lights to illuminate a specific object or provide audio descriptions of objects in a room.\\n\\n**Potential Applications:**\\n\\n* **Travel and Exploration:** The platform can assist people with vision disabilities in traveling and exploring new places. It can provide audio descriptions of landmarks, tourist attractions, and transportation options, as well as guidance on how to navigate unfamiliar environments.\\n* **Accessibility in Public Spaces:** The platform can be deployed in public spaces, such as museums, libraries, and retail stores, to make these spaces more accessible and inclusive for people with vision disabilities. It can provide audio descriptions of exhibits, books, and products, as well as guidance on how to navigate the space.\\n* **Social and Community Engagement:** The platform can facilitate social and community engagement for people with vision disabilities. It can provide audio descriptions of social events, activities, and gatherings, as well as information about local organizations and resources.\\n\\n**Impact on Society:**\\n\\nVisionAid has the potential to make a significant impact on society by promoting inclusivity and empowering people with vision disabilities. By providing them with the tools they need to navigate the world more effectively, the platform can help break down barriers and create a more equitable society for all.\\n\\n**Long-Term Vision:**\\n\\nThe long-term vision for VisionAid is to create a world where people with vision disabilities have equal access to information, opportunities, and experiences. The platform will continue to evolve and incorporate new technologies to provide users with the most comprehensive and innovative assistive experience possible.\\n\\nBy empowering individuals with vision impairments, VisionAid aims to foster a more inclusive and equitable society where everyone has the chance to reach their full potential.\", 'name': 'Product_manager', 'role': 'user'}, {'content': \"**Additional Innovative Features:**\\n\\n* **Personalized Content Recommendations:** The platform can leverage machine learning algorithms to recommend personalized content and experiences to users based on their interests, preferences, and usage patterns.\\n* **Multi-Sensory Feedback:** The platform can incorporate multiple sensory modalities, such as audio, haptic, and tactile feedback, to provide a more immersive and engaging experience for users with different sensory preferences.\\n* **Open Source and Community Involvement:** The platform can be released as open source software, allowing the community to contribute to its development and create custom integrations and add-ons.\\n\\n**Potential Applications:**\\n\\n* **Education and Learning:** The platform can be used as a powerful educational tool for students with vision disabilities, providing them with access to a wide range of educational resources and interactive learning experiences.\\n* **Employment and Career Development:** The platform can assist people with vision disabilities in finding and securing employment opportunities, providing them with the skills and resources they need to succeed in the workplace.\\n* **Independent Living:** The platform can empower people with vision disabilities to live more independently and confidently, providing them with the tools and information they need to navigate their environment, manage their finances, and participate in social activities.\\n\\n**Impact on Society:**\\n\\nVisionAid has the potential to create a more inclusive and equitable society for people with vision disabilities. By providing them with the tools and resources they need to succeed, the platform can help break down barriers and empower them to participate fully in all aspects of life.\\n\\n**Long-Term Vision:**\\n\\nThe long-term vision for VisionAid is to create a world where people with vision disabilities have equal access to information, opportunities, and experiences. The platform will continue to evolve and incorporate new technologies to provide users with the most comprehensive and innovative assistive experience possible.\\n\\nBy empowering individuals with vision impairments, VisionAid aims to foster a more inclusive and equitable society where everyone has the chance to reach their full potential.**Additional Innovative Features:**\\n\\n* **AI-Powered Object Recognition:** The platform can utilize advanced artificial intelligence (AI) algorithms to continuously improve its object recognition capabilities. By analyzing large datasets of images and descriptions, the AI can learn to identify and classify a wide range of objects with high accuracy.\\n* **Contextual Awareness:** The platform can leverage contextual information to provide more meaningful and personalized descriptions. For example, it can identify the user's current location and provide relevant information about nearby objects, such as store names, street signs, or landmarks.\\n* **Integration with Navigation Apps:** The platform can be integrated with navigation apps to provide users with turn-by-turn directions and guidance. This can help users navigate unfamiliar environments and reach their destinations safely and efficiently.\\n\\n**Potential Applications:**\\n\\n* **Job Training and Employment:** The platform can be used to train people with vision disabilities for various jobs and occupations. It can provide audio descriptions of work instructions, equipment, and materials, as well as guidance on how to perform specific tasks.\\n* **Transportation Accessibility:** The platform can be integrated with public transportation systems to make them more accessible for people with vision disabilities. It can provide real-time information about bus and train schedules, as well as guidance on how to navigate stations and platforms.\\n* **Social and Community Engagement:** The platform can facilitate social and community engagement for people with vision disabilities. It can provide audio descriptions of social events, activities, and gatherings, as well as information about local organizations and resources.\\n\\n**Impact on the Future of Healthcare:**\\n\\nVisionAid has the potential to revolutionize the delivery of healthcare for people with vision disabilities. By providing them with real-time access to information about their surroundings, the platform can empower them to make more informed decisions about their health and well-being. It can also facilitate communication between patients and healthcare providers, leading to improved patient outcomes and satisfaction.\\n\\n**Long-Term Vision:**\\n\\nThe long-term vision for VisionAid is to create a world where people with vision disabilities have equal access to information and opportunities. The platform will continue to evolve and incorporate new technologies to provide users with the most comprehensive and innovative assistive experience possible.\\n\\nBy empowering individuals with vision impairments, VisionAid aims to foster a more inclusive and equitable society where everyone has the chance to reach their full potential.**Additional Innovative Features:**\\n\\n* **Personalized Learning:** The platform can incorporate machine learning algorithms to tailor its content and interactions to the individual needs and preferences of each user. This can include adjusting the difficulty of object recognition tasks, providing customized feedback, and recommending relevant resources.\\n* **Gamification and Motivation:** The platform can incorporate gamification elements to make the learning and exploration process more engaging and motivating. Users can earn points, badges, and rewards for completing tasks, identifying objects correctly, and exploring new environments.\\n* **Community Building:** The platform can foster a sense of community among users with vision disabilities. It can provide a space for users to connect with each other, share experiences, and support each other on their journey.\\n\\n**Potential Applications:**\\n\\n* **Education and Learning:** The platform can be used as a powerful educational tool for students with vision disabilities. It can provide audio descriptions of textbooks, assignments, and educational materials, as well as guidance on how to navigate classrooms and participate in group activities.\\n* **Employment and Career Development:** The platform can assist people with vision disabilities in finding and securing employment opportunities. It can provide information about job openings, training programs, and assistive technologies that can help them succeed in the workplace.\\n* **Independent Living:** The platform can empower people with vision disabilities to live more independently and confidently. It can provide guidance on how to perform daily tasks, such as cooking, cleaning, and managing finances, as well as information about accessible housing and transportation options.\\n\\n**Impact on Society:**\\n\\nVisionAid has the potential to create a more inclusive and equitable society for people with vision disabilities. By providing them with the tools and resources they need to succeed, the platform can help break down barriers and empower them to participate fully in all aspects of life.\\n\\n**Long-Term Vision:**\\n\\nThe long-term vision for VisionAid is to create a world where people with vision disabilities have equal access to information, opportunities, and experiences. The platform will continue to evolve and incorporate new technologies to provide users with the most comprehensive and innovative assistive experience possible.\\n\\nBy empowering individuals with vision impairments, VisionAid aims to foster a more inclusive and equitable society where everyone has the chance to reach their full potential.\", 'name': 'Product_manager', 'role': 'user'}, {'content': \"**Additional Innovative Features:**\\n\\n* **Augmented Reality Integration:** The platform can leverage augmented reality (AR) technology to overlay virtual information onto the user's surroundings. This can provide additional context and guidance, such as highlighting the location of a specific object or providing directions to a destination.\\n* **Real-Time Obstacle Detection:** The platform can employ advanced computer vision algorithms to detect and identify obstacles in the user's path in real-time. This can help users avoid collisions and navigate their environment more safely.\\n* **Smart Home Integration:** The platform can be integrated with smart home devices, such as smart speakers and smart lights, to provide a more comprehensive and automated experience. For example, the platform can trigger smart lights to illuminate a specific object or provide audio descriptions of objects in a room.\\n\\n**Potential Applications:**\\n\\n* **Travel and Exploration:** The platform can assist people with vision disabilities in traveling and exploring new places. It can provide audio descriptions of landmarks, tourist attractions, and transportation options, as well as guidance on how to navigate unfamiliar environments.\\n* **Accessibility in Public Spaces:** The platform can be deployed in public spaces, such as museums, libraries, and retail stores, to make these spaces more accessible and inclusive for people with vision disabilities. It can provide audio descriptions of exhibits, books, and products, as well as guidance on how to navigate the space.\\n* **Social and Community Engagement:** The platform can facilitate social and community engagement for people with vision disabilities. It can provide audio descriptions of social events, activities, and gatherings, as well as information about local organizations and resources.\\n\\n**Impact on Society:**\\n\\nVisionAid has the potential to make a significant impact on society by promoting inclusivity and empowering people with vision disabilities. By providing them with the tools they need to navigate the world more effectively, the platform can help break down barriers and create a more equitable society for all.\\n\\n**Long-Term Vision:**\\n\\nThe long-term vision for VisionAid is to create a world where people with vision disabilities have equal access to information, opportunities, and experiences. The platform will continue to evolve and incorporate new technologies to provide users with the most comprehensive and innovative assistive experience possible.\\n\\nBy empowering individuals with vision impairments, VisionAid aims to foster a more inclusive and equitable society where everyone has the chance to reach their full potential.**Additional Innovative Features:**\\n\\n* **Personalized Content Recommendations:** The platform can leverage machine learning algorithms to recommend personalized content and experiences to users based on their interests, preferences, and usage patterns.\\n* **Multi-Sensory Feedback:** The platform can incorporate multiple sensory modalities, such as audio, haptic, and tactile feedback, to provide a more immersive and engaging experience for users with different sensory preferences.\\n* **Open Source and Community Involvement:** The platform can be released as open source software, allowing the community to contribute to its development and create custom integrations and add-ons.\\n\\n**Potential Applications:**\\n\\n* **Education and Learning:** The platform can be used as a powerful educational tool for students with vision disabilities, providing them with access to a wide range of educational resources and interactive learning experiences.\\n* **Employment and Career Development:** The platform can assist people with vision disabilities in finding and securing employment opportunities, providing them with the skills and resources they need to succeed in the workplace.\\n* **Independent Living:** The platform can empower people with vision disabilities to live more independently and confidently, providing them with the tools and information they need to navigate their environment, manage their finances, and participate in social activities.\\n\\n**Impact on Society:**\\n\\nVisionAid has the potential to create a more inclusive and equitable society for people with vision disabilities. By providing them with the tools and resources they need to succeed, the platform can help break down barriers and empower them to participate fully in all aspects of life.\\n\\n**Long-Term Vision:**\\n\\nThe long-term vision for VisionAid is to create a world where people with vision disabilities have equal access to information, opportunities, and experiences. The platform will continue to evolve and incorporate new technologies to provide users with the most comprehensive and innovative assistive experience possible.\\n\\nBy empowering individuals with vision impairments, VisionAid aims to foster a more inclusive and equitable society where everyone has the chance to reach their full potential.**Additional Innovative Features:**\\n\\n* **AI-Powered Object Recognition:** The platform can utilize advanced artificial intelligence (AI) algorithms to continuously improve its object recognition capabilities. By analyzing large datasets of images and descriptions, the AI can learn to identify and classify a wide range of objects with high accuracy.\\n* **Contextual Awareness:** The platform can leverage contextual information to provide more meaningful and personalized descriptions. For example, it can identify the user's current location and provide relevant information about nearby objects, such as store names, street signs, or landmarks.\\n* **Integration with Navigation Apps:** The platform can be integrated with navigation apps to provide users with turn-by-turn directions and guidance. This can help users navigate unfamiliar environments and reach their destinations safely and efficiently.\\n\\n**Potential Applications:**\\n\\n* **Job Training and Employment:** The platform can be used to train people with vision disabilities for various jobs and occupations. It can provide audio descriptions of work instructions, equipment, and materials, as well as guidance on how to perform specific tasks.\\n* **Transportation Accessibility:** The platform can be integrated with public transportation systems to make them more accessible for people with vision disabilities. It can provide real-time information about bus and train schedules, as well as guidance on how to navigate stations and platforms.\\n* **Social and Community Engagement:** The platform can facilitate social and community engagement for people with vision disabilities. It can provide audio descriptions of social events, activities, and gatherings, as well as information about local organizations and resources.\\n\\n**Impact on the Future of Healthcare:**\\n\\nVisionAid has the potential to revolutionize the delivery of healthcare for people with vision disabilities. By providing them with real-time access to information about their surroundings, the platform can empower them to make more informed decisions about their health and well-being. It can also facilitate communication between patients and healthcare providers, leading to improved patient outcomes and satisfaction.\\n\\n**Long-Term Vision:**\\n\\nThe long-term vision for VisionAid is to create a world where people with vision disabilities have equal access to information and opportunities. The platform will continue to evolve and incorporate new technologies to provide users with the most comprehensive and innovative assistive experience possible.\\n\\nBy empowering individuals with vision impairments, VisionAid aims to foster a more inclusive and equitable society where everyone has the chance to reach their full potential.**Additional Innovative Features:**\\n\\n* **Personalized Learning:** The platform can incorporate machine learning algorithms to tailor its content and interactions to the individual needs and preferences of each user. This can include adjusting the difficulty of object recognition tasks, providing customized feedback, and recommending relevant resources.\\n* **Gamification and Motivation:** The platform can incorporate gamification elements to make the learning and exploration process more engaging and motivating. Users can earn points, badges, and rewards for completing tasks, identifying objects correctly, and exploring new environments.\\n* **Community Building:** The platform can foster a sense of community among users with vision disabilities. It can provide a space for users to connect with each other, share experiences, and support each other on their journey.\\n\\n**Potential Applications:**\\n\\n* **Education and Learning:** The platform can be used as a powerful educational tool for students with vision disabilities. It can provide audio descriptions of textbooks, assignments, and educational materials, as well as guidance on how to navigate classrooms and participate in group activities.\\n* **Employment and Career Development:** The platform can assist people with vision disabilities in finding and securing employment opportunities. It can provide information about job openings, training programs, and assistive technologies that can help them succeed in the workplace.\\n* **Independent Living:** The platform can empower people with vision disabilities to live more independently and confidently. It can provide guidance on how to perform daily tasks, such as cooking, cleaning, and managing finances, as well as information about accessible housing and transportation options.\\n\\n**Impact on Society:**\\n\\nVisionAid has the potential to create a more inclusive and equitable society for people with vision disabilities. By providing them with the tools and resources they need to succeed, the platform can help break down barriers and empower them to participate fully in all aspects of life.\\n\\n**Long-Term Vision:**\\n\\nThe long-term vision for VisionAid is to create a world where people with vision disabilities have equal access to information, opportunities, and experiences. The platform will continue to evolve and incorporate new technologies to provide users with the most comprehensive and innovative assistive experience possible.\\n\\nBy empowering individuals with vision impairments, VisionAid aims to foster a more inclusive and equitable society where everyone has the chance to reach their full potential.\", 'name': 'Product_manager', 'role': 'user'}, {'content': \"**Technical Implementation:**\\n\\nThe technical implementation of VisionAid involves the following key components:\\n\\n* **Image Acquisition:** The platform utilizes a camera or other image acquisition device to capture images of the user's surroundings.\\n* **Image Preprocessing:** The captured image is preprocessed to optimize the performance of the computer vision model. This includes scaling, cropping, and enhancing the image for better recognition.\\n* **Object Detection and Localization:** The Gemini computer vision model is employed to analyze the preprocessed image. The model identifies the presence of objects in the image and determines their location with bounding boxes and spatial descriptions.\\n* **Multimodal Output:** The platform provides detailed audio descriptions of the detected objects and their locations. Haptic feedback is used to convey the spatial arrangement of objects through vibrations or tactile sensations. If the user has partial vision, the platform can provide a simplified visual representation of the detected objects and their locations on a screen.\\n\\n**Accessibility and Customization:**\\n\\nVisionAid is designed to be highly accessible, with adjustable settings for audio volume, haptic intensity, and visual contrast. Users can customize the output to suit their individual preferences and needs.\\n\\n**Additional Innovative Features:**\\n\\nTo enhance the user experience and address specific challenges faced by people with vision disabilities, VisionAid incorporates the following innovative features:\\n\\n* **Real-Time Object Detection:** The platform operates in real-time, providing continuous feedback about the user's surroundings as they move.\\n* **Scene Interpretation:** Advanced algorithms analyze the relationship between objects and provide contextual descriptions. For example, the platform can differentiate between a stove and a coffee maker.\\n* **Integration with Assistive Technology:** VisionAid can be integrated with other assistive technologies, such as screen readers and navigation apps, to enhance the user experience.\\n\\n**Potential Applications:**\\n\\nVisionAid has a wide range of potential applications, including:\\n\\n* **Navigation and Wayfinding:** The platform can assist users in navigating indoor and outdoor environments, providing directions and identifying obstacles.\\n* **Object Identification and Interaction:** Users can identify and interact with objects in their surroundings, such as appliances, furniture, and food items.\\n* **Social Interaction and Communication:** The platform can facilitate social interactions by providing descriptions of people and objects in the user's environment.\\n* **Education and Learning:** VisionAid can be used as an educational tool to help students with vision impairments learn about their surroundings and develop their spatial reasoning skills.\\n\\n**Impact on the Lives of People with Vision Disabilities:**\\n\\nVisionAid has the potential to transform the lives of people with vision disabilities by providing them with a greater sense of independence, safety, and confidence. By empowering them with essential information about their surroundings, the platform enables them to navigate the world more effectively and participate fully in society.\\n\\n**Long-Term Vision:**\\n\\nThe long-term vision for VisionAid is to create a world where people with vision disabilities have equal access to information and opportunities. The platform will continue to evolve and incorporate new technologies to provide users with the most comprehensive and innovative assistive experience possible.\\n\\n**By empowering individuals with vision impairments, VisionAid aims to foster a more inclusive and equitable society where everyone has the chance to reach their full potential.**\", 'name': 'Product_manager', 'role': 'user'}], summary=\"**Technical Implementation:**\\n\\nThe technical implementation of VisionAid involves the following key components:\\n\\n* **Image Acquisition:** The platform utilizes a camera or other image acquisition device to capture images of the user's surroundings.\\n* **Image Preprocessing:** The captured image is preprocessed to optimize the performance of the computer vision model. This includes scaling, cropping, and enhancing the image for better recognition.\\n* **Object Detection and Localization:** The Gemini computer vision model is employed to analyze the preprocessed image. The model identifies the presence of objects in the image and determines their location with bounding boxes and spatial descriptions.\\n* **Multimodal Output:** The platform provides detailed audio descriptions of the detected objects and their locations. Haptic feedback is used to convey the spatial arrangement of objects through vibrations or tactile sensations. If the user has partial vision, the platform can provide a simplified visual representation of the detected objects and their locations on a screen.\\n\\n**Accessibility and Customization:**\\n\\nVisionAid is designed to be highly accessible, with adjustable settings for audio volume, haptic intensity, and visual contrast. Users can customize the output to suit their individual preferences and needs.\\n\\n**Additional Innovative Features:**\\n\\nTo enhance the user experience and address specific challenges faced by people with vision disabilities, VisionAid incorporates the following innovative features:\\n\\n* **Real-Time Object Detection:** The platform operates in real-time, providing continuous feedback about the user's surroundings as they move.\\n* **Scene Interpretation:** Advanced algorithms analyze the relationship between objects and provide contextual descriptions. For example, the platform can differentiate between a stove and a coffee maker.\\n* **Integration with Assistive Technology:** VisionAid can be integrated with other assistive technologies, such as screen readers and navigation apps, to enhance the user experience.\\n\\n**Potential Applications:**\\n\\nVisionAid has a wide range of potential applications, including:\\n\\n* **Navigation and Wayfinding:** The platform can assist users in navigating indoor and outdoor environments, providing directions and identifying obstacles.\\n* **Object Identification and Interaction:** Users can identify and interact with objects in their surroundings, such as appliances, furniture, and food items.\\n* **Social Interaction and Communication:** The platform can facilitate social interactions by providing descriptions of people and objects in the user's environment.\\n* **Education and Learning:** VisionAid can be used as an educational tool to help students with vision impairments learn about their surroundings and develop their spatial reasoning skills.\\n\\n**Impact on the Lives of People with Vision Disabilities:**\\n\\nVisionAid has the potential to transform the lives of people with vision disabilities by providing them with a greater sense of independence, safety, and confidence. By empowering them with essential information about their surroundings, the platform enables them to navigate the world more effectively and participate fully in society.\\n\\n**Long-Term Vision:**\\n\\nThe long-term vision for VisionAid is to create a world where people with vision disabilities have equal access to information and opportunities. The platform will continue to evolve and incorporate new technologies to provide users with the most comprehensive and innovative assistive experience possible.\\n\\n**By empowering individuals with vision impairments, VisionAid aims to foster a more inclusive and equitable society where everyone has the chance to reach their full potential.**\", cost={'usage_including_cached_inference': {'total_cost': 0.015432000000000001, 'gemini-pro': {'cost': 0.015432000000000001, 'prompt_tokens': 30744, 'completion_tokens': 40, 'total_tokens': 30784}}, 'usage_excluding_cached_inference': {'total_cost': 0}}, human_input=[])" + ] + }, + "execution_count": 13, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "coder = AssistantAgent(\n", + " name=\"Coder\",\n", + " llm_config={\"config_list\": config_list_gemini, \"seed\": seed},\n", + " max_consecutive_auto_reply=10,\n", + " description=\"I am good at writing code\",\n", + ")\n", + "\n", + "pm = AssistantAgent(\n", + " name=\"Product_manager\",\n", + " system_message=\"Creative in software product ideas.\",\n", + " llm_config={\"config_list\": config_list_gemini, \"seed\": seed},\n", + " max_consecutive_auto_reply=10,\n", + " description=\"I am good at design products and software.\",\n", + ")\n", + "\n", + "user_proxy = UserProxyAgent(\n", + " name=\"User_proxy\",\n", + " code_execution_config={\"last_n_messages\": 20, \"work_dir\": \"coding\", \"use_docker\": False},\n", + " human_input_mode=\"NEVER\",\n", + " is_termination_msg=lambda x: content_str(x.get(\"content\")).find(\"TERMINATE\") >= 0,\n", + " description=\"I stands for user, and can run code.\",\n", + ")\n", + "\n", + "groupchat = autogen.GroupChat(agents=[user_proxy, coder, pm], messages=[], max_round=12)\n", + "manager = autogen.GroupChatManager(\n", + " groupchat=groupchat,\n", + " llm_config={\"config_list\": config_list_gemini, \"seed\": seed},\n", + " is_termination_msg=lambda x: content_str(x.get(\"content\")).find(\"TERMINATE\") >= 0,\n", + ")\n", + "user_proxy.initiate_chat(\n", + " manager,\n", + " message=\"\"\"Design and implement a multimodal product for people with vision disabilities.\n", + "The pipeline will take an image and run Gemini model to describe:\n", + "1. what objects are in the image, and\n", + "2. where these objects are located.\"\"\",\n", + ")" + ] + } + ], + "metadata": { + "front_matter": { + "description": "Using Gemini with AutoGen", + "tags": [ + "gemini" + ] + }, + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.9.16" + }, + "vscode": { + "interpreter": { + "hash": "949777d72b0d2535278d3dc13498b2535136f6dfe0678499012e853ee9abcab1" + } + }, + "widgets": { + "application/vnd.jupyter.widget-state+json": { + "state": { + "2d910cfd2d2a4fc49fc30fbbdc5576a7": { + "model_module": "@jupyter-widgets/base", + "model_module_version": "2.0.0", + "model_name": "LayoutModel", + "state": { + "_model_module": "@jupyter-widgets/base", + "_model_module_version": "2.0.0", + "_model_name": "LayoutModel", + "_view_count": null, + "_view_module": "@jupyter-widgets/base", + "_view_module_version": "2.0.0", + "_view_name": "LayoutView", + "align_content": null, + "align_items": null, + "align_self": null, + "border_bottom": null, + "border_left": null, + "border_right": null, + "border_top": null, + "bottom": null, + "display": null, + "flex": null, + "flex_flow": null, + "grid_area": null, + "grid_auto_columns": null, + "grid_auto_flow": null, + "grid_auto_rows": null, + "grid_column": null, + "grid_gap": null, + "grid_row": null, + "grid_template_areas": null, + "grid_template_columns": null, + "grid_template_rows": null, + "height": null, + "justify_content": null, + "justify_items": null, + "left": null, + "margin": null, + "max_height": null, + "max_width": null, + "min_height": null, + "min_width": null, + "object_fit": null, + "object_position": null, + "order": null, + "overflow": null, + "padding": null, + "right": null, + "top": null, + "visibility": null, + "width": null + } + }, + "454146d0f7224f038689031002906e6f": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "HBoxModel", + "state": { + "_dom_classes": [], + "_model_module": "@jupyter-widgets/controls", + "_model_module_version": "2.0.0", + "_model_name": "HBoxModel", + "_view_count": null, + "_view_module": "@jupyter-widgets/controls", + "_view_module_version": "2.0.0", + "_view_name": "HBoxView", + "box_style": "", + "children": [ + "IPY_MODEL_e4ae2b6f5a974fd4bafb6abb9d12ff26", + "IPY_MODEL_577e1e3cc4db4942b0883577b3b52755", + "IPY_MODEL_b40bdfb1ac1d4cffb7cefcb870c64d45" + ], + "layout": "IPY_MODEL_dc83c7bff2f241309537a8119dfc7555", + "tabbable": null, + "tooltip": null + } + }, + "577e1e3cc4db4942b0883577b3b52755": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "FloatProgressModel", + "state": { + "_dom_classes": [], + "_model_module": "@jupyter-widgets/controls", + "_model_module_version": "2.0.0", + "_model_name": "FloatProgressModel", + "_view_count": null, + "_view_module": "@jupyter-widgets/controls", + "_view_module_version": "2.0.0", + "_view_name": "ProgressView", + "bar_style": "success", + "description": "", + "description_allow_html": false, + "layout": "IPY_MODEL_2d910cfd2d2a4fc49fc30fbbdc5576a7", + "max": 1, + "min": 0, + "orientation": "horizontal", + "style": "IPY_MODEL_74a6ba0c3cbc4051be0a83e152fe1e62", + "tabbable": null, + "tooltip": null, + "value": 1 + } + }, + "6086462a12d54bafa59d3c4566f06cb2": { + "model_module": "@jupyter-widgets/base", + "model_module_version": "2.0.0", + "model_name": "LayoutModel", + "state": { + "_model_module": "@jupyter-widgets/base", + "_model_module_version": "2.0.0", + "_model_name": "LayoutModel", + "_view_count": null, + "_view_module": "@jupyter-widgets/base", + "_view_module_version": "2.0.0", + "_view_name": "LayoutView", + "align_content": null, + "align_items": null, + "align_self": null, + "border_bottom": null, + "border_left": null, + "border_right": null, + "border_top": null, + "bottom": null, + "display": null, + "flex": null, + "flex_flow": null, + "grid_area": null, + "grid_auto_columns": null, + "grid_auto_flow": null, + "grid_auto_rows": null, + "grid_column": null, + "grid_gap": null, + "grid_row": null, + "grid_template_areas": null, + "grid_template_columns": null, + "grid_template_rows": null, + "height": null, + "justify_content": null, + "justify_items": null, + "left": null, + "margin": null, + "max_height": null, + "max_width": null, + "min_height": null, + "min_width": null, + "object_fit": null, + "object_position": null, + "order": null, + "overflow": null, + "padding": null, + "right": null, + "top": null, + "visibility": null, + "width": null + } + }, + "74a6ba0c3cbc4051be0a83e152fe1e62": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "ProgressStyleModel", + "state": { + "_model_module": "@jupyter-widgets/controls", + "_model_module_version": "2.0.0", + "_model_name": "ProgressStyleModel", + "_view_count": null, + "_view_module": "@jupyter-widgets/base", + "_view_module_version": "2.0.0", + "_view_name": "StyleView", + "bar_color": null, + "description_width": "" + } + }, + "7d3f3d9e15894d05a4d188ff4f466554": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "HTMLStyleModel", + "state": { + "_model_module": "@jupyter-widgets/controls", + "_model_module_version": "2.0.0", + "_model_name": "HTMLStyleModel", + "_view_count": null, + "_view_module": "@jupyter-widgets/base", + "_view_module_version": "2.0.0", + "_view_name": "StyleView", + "background": null, + "description_width": "", + "font_size": null, + "text_color": null + } + }, + "b40bdfb1ac1d4cffb7cefcb870c64d45": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "HTMLModel", + "state": { + "_dom_classes": [], + "_model_module": "@jupyter-widgets/controls", + "_model_module_version": "2.0.0", + "_model_name": "HTMLModel", + "_view_count": null, + "_view_module": "@jupyter-widgets/controls", + "_view_module_version": "2.0.0", + "_view_name": "HTMLView", + "description": "", + "description_allow_html": false, + "layout": "IPY_MODEL_f1355871cc6f4dd4b50d9df5af20e5c8", + "placeholder": "​", + "style": "IPY_MODEL_ca245376fd9f4354af6b2befe4af4466", + "tabbable": null, + "tooltip": null, + "value": " 1/1 [00:00<00:00, 44.69it/s]" + } + }, + "ca245376fd9f4354af6b2befe4af4466": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "HTMLStyleModel", + "state": { + "_model_module": "@jupyter-widgets/controls", + "_model_module_version": "2.0.0", + "_model_name": "HTMLStyleModel", + "_view_count": null, + "_view_module": "@jupyter-widgets/base", + "_view_module_version": "2.0.0", + "_view_name": "StyleView", + "background": null, + "description_width": "", + "font_size": null, + "text_color": null + } + }, + "dc83c7bff2f241309537a8119dfc7555": { + "model_module": "@jupyter-widgets/base", + "model_module_version": "2.0.0", + "model_name": "LayoutModel", + "state": { + "_model_module": "@jupyter-widgets/base", + "_model_module_version": "2.0.0", + "_model_name": "LayoutModel", + "_view_count": null, + "_view_module": "@jupyter-widgets/base", + "_view_module_version": "2.0.0", + "_view_name": "LayoutView", + "align_content": null, + "align_items": null, + "align_self": null, + "border_bottom": null, + "border_left": null, + "border_right": null, + "border_top": null, + "bottom": null, + "display": null, + "flex": null, + "flex_flow": null, + "grid_area": null, + "grid_auto_columns": null, + "grid_auto_flow": null, + "grid_auto_rows": null, + "grid_column": null, + "grid_gap": null, + "grid_row": null, + "grid_template_areas": null, + "grid_template_columns": null, + "grid_template_rows": null, + "height": null, + "justify_content": null, + "justify_items": null, + "left": null, + "margin": null, + "max_height": null, + "max_width": null, + "min_height": null, + "min_width": null, + "object_fit": null, + "object_position": null, + "order": null, + "overflow": null, + "padding": null, + "right": null, + "top": null, + "visibility": null, + "width": null + } + }, + "e4ae2b6f5a974fd4bafb6abb9d12ff26": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "HTMLModel", + "state": { + "_dom_classes": [], + "_model_module": "@jupyter-widgets/controls", + "_model_module_version": "2.0.0", + "_model_name": "HTMLModel", + "_view_count": null, + "_view_module": "@jupyter-widgets/controls", + "_view_module_version": "2.0.0", + "_view_name": "HTMLView", + "description": "", + "description_allow_html": false, + "layout": "IPY_MODEL_6086462a12d54bafa59d3c4566f06cb2", + "placeholder": "​", + "style": "IPY_MODEL_7d3f3d9e15894d05a4d188ff4f466554", + "tabbable": null, + "tooltip": null, + "value": "100%" + } + }, + "f1355871cc6f4dd4b50d9df5af20e5c8": { + "model_module": "@jupyter-widgets/base", + "model_module_version": "2.0.0", + "model_name": "LayoutModel", + "state": { + "_model_module": "@jupyter-widgets/base", + "_model_module_version": "2.0.0", + "_model_name": "LayoutModel", + "_view_count": null, + "_view_module": "@jupyter-widgets/base", + "_view_module_version": "2.0.0", + "_view_name": "LayoutView", + "align_content": null, + "align_items": null, + "align_self": null, + "border_bottom": null, + "border_left": null, + "border_right": null, + "border_top": null, + "bottom": null, + "display": null, + "flex": null, + "flex_flow": null, + "grid_area": null, + "grid_auto_columns": null, + "grid_auto_flow": null, + "grid_auto_rows": null, + "grid_column": null, + "grid_gap": null, + "grid_row": null, + "grid_template_areas": null, + "grid_template_columns": null, + "grid_template_rows": null, + "height": null, + "justify_content": null, + "justify_items": null, + "left": null, + "margin": null, + "max_height": null, + "max_width": null, + "min_height": null, + "min_width": null, + "object_fit": null, + "object_position": null, + "order": null, + "overflow": null, + "padding": null, + "right": null, + "top": null, + "visibility": null, + "width": null + } + } + }, + "version_major": 2, + "version_minor": 0 + } + } + }, + "nbformat": 4, + "nbformat_minor": 4 +}