From 0df123d38143681731c24e3128b2ce7efbc18133 Mon Sep 17 00:00:00 2001 From: Maxim Saplin Date: Sat, 13 Jan 2024 23:59:46 +0300 Subject: [PATCH 1/9] Switched to AzureOpenAI for api_type=="azure" --- autogen/oai/client.py | 58 ++++++++++++------------------------------- 1 file changed, 16 insertions(+), 42 deletions(-) diff --git a/autogen/oai/client.py b/autogen/oai/client.py index 1bdfd835d1e..d067c35816e 100644 --- a/autogen/oai/client.py +++ b/autogen/oai/client.py @@ -11,7 +11,7 @@ from autogen.oai import completion -from autogen.oai.openai_utils import get_key, OAI_PRICE1K +from autogen.oai.openai_utils import DEFAULT_AZURE_API_VERSION, get_key, OAI_PRICE1K from autogen.token_count_utils import count_token from autogen._pydantic import model_dump @@ -23,7 +23,7 @@ OpenAI = object else: # raises exception if openai>=1 is installed and something is wrong with imports - from openai import OpenAI, APIError, __version__ as OPENAIVERSION + from openai import OpenAI, AzureOpenAI, APIError, __version__ as OPENAIVERSION from openai.resources import Completions from openai.types.chat import ChatCompletion from openai.types.chat.chat_completion import ChatCompletionMessage, Choice # type: ignore [attr-defined] @@ -105,46 +105,10 @@ def __init__(self, *, config_list: Optional[List[Dict[str, Any]]] = None, **base self._clients = [self._client(extra_kwargs, openai_config)] self._config_list = [extra_kwargs] - def _process_for_azure( - self, config: Dict[str, Any], extra_kwargs: Dict[str, Any], segment: str = "default" - ) -> None: - # deal with api_version - query_segment = f"{segment}_query" - headers_segment = f"{segment}_headers" - api_version = extra_kwargs.get("api_version") - if api_version is not None and query_segment not in config: - config[query_segment] = {"api-version": api_version} - if segment == "default": - # remove the api_version from extra_kwargs - extra_kwargs.pop("api_version") - if segment == "extra": - return - # deal with api_type - api_type = extra_kwargs.get("api_type") - if api_type is not None and api_type.startswith("azure") and headers_segment not in config: - api_key = config.get("api_key", os.environ.get("AZURE_OPENAI_API_KEY")) - config[headers_segment] = {"api-key": api_key} - # remove the api_type from extra_kwargs - extra_kwargs.pop("api_type") - # deal with model - model = extra_kwargs.get("model") - if model is None: - return - if "gpt-3.5" in model: - # hack for azure gpt-3.5 - extra_kwargs["model"] = model = model.replace("gpt-3.5", "gpt-35") - base_url = config.get("base_url") - if base_url is None: - raise ValueError("to use azure openai api, base_url must be specified.") - suffix = f"/openai/deployments/{model}" - if not base_url.endswith(suffix): - config["base_url"] += suffix[1:] if base_url.endswith("/") else suffix - def _separate_openai_config(self, config: Dict[str, Any]) -> Tuple[Dict[str, Any], Dict[str, Any]]: """Separate the config into openai_config and extra_kwargs.""" openai_config = {k: v for k, v in config.items() if k in self.openai_kwargs} extra_kwargs = {k: v for k, v in config.items() if k not in self.openai_kwargs} - self._process_for_azure(openai_config, extra_kwargs) return openai_config, extra_kwargs def _separate_create_config(self, config: Dict[str, Any]) -> Tuple[Dict[str, Any], Dict[str, Any]]: @@ -158,8 +122,19 @@ def _client(self, config: Dict[str, Any], openai_config: Dict[str, Any]) -> Open after removing extra kwargs. """ openai_config = {**openai_config, **{k: v for k, v in config.items() if k in self.openai_kwargs}} - self._process_for_azure(openai_config, config) - client = OpenAI(**openai_config) + api_type = config.get("api_type") + if api_type is not None and api_type.startswith("azure"): + api_key = config.get("api_key", os.environ.get("AZURE_OPENAI_API_KEY")) + api_version = config.get("api_version", DEFAULT_AZURE_API_VERSION) + model = config.get("model") + base_url = config.get("base_url") + if base_url is None: + raise ValueError("to use azure openai api, base_url must be specified.") + client = AzureOpenAI( + azure_deployment=model, api_version=api_version, api_key=api_key, azure_endpoint=base_url + ) + else: + client = OpenAI(**openai_config) return client @classmethod @@ -242,8 +217,6 @@ def yes_or_no_filter(context, response): full_config = {**config, **self._config_list[i]} # separate the config into create_config and extra_kwargs create_config, extra_kwargs = self._separate_create_config(full_config) - # process for azure - self._process_for_azure(create_config, extra_kwargs, "extra") # construct the create params params = self._construct_create_params(create_config, extra_kwargs) # get the cache_seed, filter_func and context @@ -540,6 +513,7 @@ def _completions_create(self, client: OpenAI, params: Dict[str, Any]) -> ChatCom # If streaming is not enabled, send a regular chat completion request params = params.copy() params["stream"] = False + params.pop("api_type", None) response = completions.create(**params) return response From 3cb411749ba0bd69b9c486b4290f91d2f65d28a8 Mon Sep 17 00:00:00 2001 From: Maxim Saplin Date: Sun, 14 Jan 2024 00:27:13 +0300 Subject: [PATCH 2/9] Setting AzureOpenAI to empty object if no `openai` --- autogen/oai/client.py | 1 + 1 file changed, 1 insertion(+) diff --git a/autogen/oai/client.py b/autogen/oai/client.py index d067c35816e..6e433b93c4f 100644 --- a/autogen/oai/client.py +++ b/autogen/oai/client.py @@ -21,6 +21,7 @@ except ImportError: ERROR: Optional[ImportError] = ImportError("Please install openai>=1 and diskcache to use autogen.OpenAIWrapper.") OpenAI = object + AzureOpenAI = object else: # raises exception if openai>=1 is installed and something is wrong with imports from openai import OpenAI, AzureOpenAI, APIError, __version__ as OPENAIVERSION From 9155cc433e48d317f2ae29026c392a672fedbec2 Mon Sep 17 00:00:00 2001 From: Maxim Saplin Date: Sun, 14 Jan 2024 16:04:59 +0300 Subject: [PATCH 3/9] extra_ and openai_ kwargs --- autogen/oai/client.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/autogen/oai/client.py b/autogen/oai/client.py index 6e433b93c4f..9ae98c9bb61 100644 --- a/autogen/oai/client.py +++ b/autogen/oai/client.py @@ -53,8 +53,10 @@ class OpenAIWrapper: """A wrapper class for openai client.""" cache_path_root: str = ".cache" - extra_kwargs = {"cache_seed", "filter_func", "allow_format_str_template", "context", "api_version"} + extra_kwargs = {"cache_seed", "filter_func", "allow_format_str_template", "context", "api_version", "api_type"} openai_kwargs = set(inspect.getfullargspec(OpenAI.__init__).kwonlyargs) + aopenai_kwargs = set(inspect.getfullargspec(AzureOpenAI.__init__).kwonlyargs) + openai_kwargs = openai_kwargs & aopenai_kwargs total_usage_summary: Optional[Dict[str, Any]] = None actual_usage_summary: Optional[Dict[str, Any]] = None @@ -514,7 +516,6 @@ def _completions_create(self, client: OpenAI, params: Dict[str, Any]) -> ChatCom # If streaming is not enabled, send a regular chat completion request params = params.copy() params["stream"] = False - params.pop("api_type", None) response = completions.create(**params) return response From e08d5b67fa3da5d551a7cccafe97ea8dd57ac743 Mon Sep 17 00:00:00 2001 From: Maxim Saplin Date: Sun, 14 Jan 2024 16:31:25 +0300 Subject: [PATCH 4/9] test_client, support for Azure and "gpt-35-turbo-instruct" --- test/oai/test_client.py | 20 +++++++++++++------- 1 file changed, 13 insertions(+), 7 deletions(-) diff --git a/test/oai/test_client.py b/test/oai/test_client.py index 26a05396160..f3a935b331f 100644 --- a/test/oai/test_client.py +++ b/test/oai/test_client.py @@ -93,21 +93,25 @@ def test_chat_completion(): def test_completion(): config_list = config_list_openai_aoai(KEY_LOC) client = OpenAIWrapper(config_list=config_list) - response = client.create(prompt="1+1=", model="gpt-3.5-turbo-instruct") + # Azure can't have dot in model/deployment name + model = "gpt-35-turbo-instruct" if config_list[0].get("api_type") == "azure" else "gpt-3.5-turbo-instruct" + response = client.create(prompt="1+1=", model=model) print(response) print(client.extract_text_or_completion_object(response)) @pytest.mark.skipif(skip, reason="openai>=1 not installed") @pytest.mark.parametrize( - "cache_seed, model", + "cache_seed", [ - (None, "gpt-3.5-turbo-instruct"), - (42, "gpt-3.5-turbo-instruct"), + None, + 42, ], ) -def test_cost(cache_seed, model): +def test_cost(cache_seed): config_list = config_list_openai_aoai(KEY_LOC) + # Azure can't have dot in model/deployment name + model = "gpt-35-turbo-instruct" if config_list[0].get("api_type") == "azure" else "gpt-3.5-turbo-instruct" client = OpenAIWrapper(config_list=config_list, cache_seed=cache_seed) response = client.create(prompt="1+3=", model=model) print(response.cost) @@ -117,7 +121,9 @@ def test_cost(cache_seed, model): def test_usage_summary(): config_list = config_list_openai_aoai(KEY_LOC) client = OpenAIWrapper(config_list=config_list) - response = client.create(prompt="1+3=", model="gpt-3.5-turbo-instruct", cache_seed=None) + # Azure can't have dot in model/deployment name + model = "gpt-35-turbo-instruct" if config_list[0].get("api_type") == "azure" else "gpt-3.5-turbo-instruct" + response = client.create(prompt="1+3=", model=model, cache_seed=None) # usage should be recorded assert client.actual_usage_summary["total_cost"] > 0, "total_cost should be greater than 0" @@ -138,7 +144,7 @@ def test_usage_summary(): assert client.total_usage_summary is None, "total_usage_summary should be None" # actual usage and all usage should be different - response = client.create(prompt="1+3=", model="gpt-3.5-turbo-instruct", cache_seed=42) + response = client.create(prompt="1+3=", model=model, cache_seed=42) assert client.total_usage_summary["total_cost"] > 0, "total_cost should be greater than 0" assert client.actual_usage_summary is None, "No actual cost should be recorded" From 8005b9ef1235f63891bb7e3dd5674d3a6d9371b3 Mon Sep 17 00:00:00 2001 From: Maxim Saplin Date: Sun, 14 Jan 2024 17:04:51 +0300 Subject: [PATCH 5/9] instruct/azure model in test_client_stream --- test/oai/test_client.py | 4 ---- test/oai/test_client_stream.py | 4 +++- 2 files changed, 3 insertions(+), 5 deletions(-) diff --git a/test/oai/test_client.py b/test/oai/test_client.py index f3a935b331f..0ac3aae0f6e 100644 --- a/test/oai/test_client.py +++ b/test/oai/test_client.py @@ -31,10 +31,6 @@ def test_aoai_chat_completion(): filter_dict={"api_type": ["azure"], "model": ["gpt-3.5-turbo", "gpt-35-turbo"]}, ) client = OpenAIWrapper(config_list=config_list) - # for config in config_list: - # print(config) - # client = OpenAIWrapper(**config) - # response = client.create(messages=[{"role": "user", "content": "2+2="}], cache_seed=None) response = client.create(messages=[{"role": "user", "content": "2+2="}], cache_seed=None) print(response) print(client.extract_text_or_completion_object(response)) diff --git a/test/oai/test_client_stream.py b/test/oai/test_client_stream.py index 6a20c4ffa21..63ee782f68e 100644 --- a/test/oai/test_client_stream.py +++ b/test/oai/test_client_stream.py @@ -286,7 +286,9 @@ def test_chat_tools_stream() -> None: def test_completion_stream() -> None: config_list = config_list_openai_aoai(KEY_LOC) client = OpenAIWrapper(config_list=config_list) - response = client.create(prompt="1+1=", model="gpt-3.5-turbo-instruct", stream=True) + # Azure can't have dot in model/deployment name + model = "gpt-35-turbo-instruct" if config_list[0].get("api_type") == "azure" else "gpt-3.5-turbo-instruct" + response = client.create(prompt="1+1=", model=model, stream=True) print(response) print(client.extract_text_or_completion_object(response)) From 68b3990568b6bef258d5362cfae9a091e037a1bb Mon Sep 17 00:00:00 2001 From: Chi Wang Date: Mon, 15 Jan 2024 10:53:00 -0800 Subject: [PATCH 6/9] generalize aoai support (#1) * generalize aoai support * Null check, fixing tests * cleanup test --------- Co-authored-by: Maxim Saplin --- autogen/oai/client.py | 21 +++++++++++---------- test/oai/test_client.py | 28 +++++++++++++++++----------- 2 files changed, 28 insertions(+), 21 deletions(-) diff --git a/autogen/oai/client.py b/autogen/oai/client.py index 9ae98c9bb61..3796ef7fe92 100644 --- a/autogen/oai/client.py +++ b/autogen/oai/client.py @@ -56,7 +56,7 @@ class OpenAIWrapper: extra_kwargs = {"cache_seed", "filter_func", "allow_format_str_template", "context", "api_version", "api_type"} openai_kwargs = set(inspect.getfullargspec(OpenAI.__init__).kwonlyargs) aopenai_kwargs = set(inspect.getfullargspec(AzureOpenAI.__init__).kwonlyargs) - openai_kwargs = openai_kwargs & aopenai_kwargs + openai_kwargs = openai_kwargs | aopenai_kwargs total_usage_summary: Optional[Dict[str, Any]] = None actual_usage_summary: Optional[Dict[str, Any]] = None @@ -123,19 +123,20 @@ def _separate_create_config(self, config: Dict[str, Any]) -> Tuple[Dict[str, Any def _client(self, config: Dict[str, Any], openai_config: Dict[str, Any]) -> OpenAI: """Create a client with the given config to override openai_config, after removing extra kwargs. + + For Azure models/deployment names there's a convenience modification of model removing dots in + the it's value (Azure deploment names can't have dots). I.e. if you have Azure deployment name + "gpt-35-turbo" and define model "gpt-3.5-turbo" in the config the function will remove the dot + from the name and create a client that connects to "gpt-35-turbo" Azure deployment. """ openai_config = {**openai_config, **{k: v for k, v in config.items() if k in self.openai_kwargs}} api_type = config.get("api_type") if api_type is not None and api_type.startswith("azure"): - api_key = config.get("api_key", os.environ.get("AZURE_OPENAI_API_KEY")) - api_version = config.get("api_version", DEFAULT_AZURE_API_VERSION) - model = config.get("model") - base_url = config.get("base_url") - if base_url is None: - raise ValueError("to use azure openai api, base_url must be specified.") - client = AzureOpenAI( - azure_deployment=model, api_version=api_version, api_key=api_key, azure_endpoint=base_url - ) + openai_config["azure_deployment"] = openai_config.get("azure_deployment", config.get("model")) + if openai_config["azure_deployment"] is not None: + openai_config["azure_deployment"] = openai_config["azure_deployment"].replace(".", "") + openai_config["azure_endpoint"] = openai_config.get("azure_endpoint", openai_config.pop("base_url", None)) + client = AzureOpenAI(**openai_config) else: client = OpenAI(**openai_config) return client diff --git a/test/oai/test_client.py b/test/oai/test_client.py index 0ac3aae0f6e..e1f443095b9 100644 --- a/test/oai/test_client.py +++ b/test/oai/test_client.py @@ -35,6 +35,15 @@ def test_aoai_chat_completion(): print(response) print(client.extract_text_or_completion_object(response)) + # test dialect + config = config_list[0] + config["azure_deployment"] = config["model"] + config["azure_endpoint"] = config.pop("base_url") + client = OpenAIWrapper(**config) + response = client.create(messages=[{"role": "user", "content": "2+2="}], cache_seed=None) + print(response) + print(client.extract_text_or_completion_object(response)) + @pytest.mark.skipif(skip or not TOOL_ENABLED, reason="openai>=1.1.0 not installed") def test_oai_tool_calling_extraction(): @@ -89,8 +98,7 @@ def test_chat_completion(): def test_completion(): config_list = config_list_openai_aoai(KEY_LOC) client = OpenAIWrapper(config_list=config_list) - # Azure can't have dot in model/deployment name - model = "gpt-35-turbo-instruct" if config_list[0].get("api_type") == "azure" else "gpt-3.5-turbo-instruct" + model = "gpt-3.5-turbo-instruct" response = client.create(prompt="1+1=", model=model) print(response) print(client.extract_text_or_completion_object(response)) @@ -106,8 +114,7 @@ def test_completion(): ) def test_cost(cache_seed): config_list = config_list_openai_aoai(KEY_LOC) - # Azure can't have dot in model/deployment name - model = "gpt-35-turbo-instruct" if config_list[0].get("api_type") == "azure" else "gpt-3.5-turbo-instruct" + model = "gpt-3.5-turbo-instruct" client = OpenAIWrapper(config_list=config_list, cache_seed=cache_seed) response = client.create(prompt="1+3=", model=model) print(response.cost) @@ -117,8 +124,7 @@ def test_cost(cache_seed): def test_usage_summary(): config_list = config_list_openai_aoai(KEY_LOC) client = OpenAIWrapper(config_list=config_list) - # Azure can't have dot in model/deployment name - model = "gpt-35-turbo-instruct" if config_list[0].get("api_type") == "azure" else "gpt-3.5-turbo-instruct" + model = "gpt-3.5-turbo-instruct" response = client.create(prompt="1+3=", model=model, cache_seed=None) # usage should be recorded @@ -147,8 +153,8 @@ def test_usage_summary(): if __name__ == "__main__": test_aoai_chat_completion() - test_oai_tool_calling_extraction() - test_chat_completion() - test_completion() - # test_cost() - test_usage_summary() + # test_oai_tool_calling_extraction() + # test_chat_completion() + # test_completion() + # # test_cost() + # test_usage_summary() From f2e50f75f7ca186720e4a4fd5ac2de5469748bbf Mon Sep 17 00:00:00 2001 From: Maxim Saplin Date: Mon, 15 Jan 2024 23:26:18 +0300 Subject: [PATCH 7/9] Returning back model names for instruct --- autogen/oai/client.py | 10 +++++++++- test/oai/test_client.py | 9 ++++++--- 2 files changed, 15 insertions(+), 4 deletions(-) diff --git a/autogen/oai/client.py b/autogen/oai/client.py index 2a353cc51f8..dc908d0528d 100644 --- a/autogen/oai/client.py +++ b/autogen/oai/client.py @@ -53,7 +53,15 @@ class OpenAIWrapper: """A wrapper class for openai client.""" cache_path_root: str = ".cache" - extra_kwargs = {"cache_seed", "filter_func", "allow_format_str_template", "context", "api_version", "api_type", , "tags"} + extra_kwargs = { + "cache_seed", + "filter_func", + "allow_format_str_template", + "context", + "api_version", + "api_type", + "tags", + } openai_kwargs = set(inspect.getfullargspec(OpenAI.__init__).kwonlyargs) aopenai_kwargs = set(inspect.getfullargspec(AzureOpenAI.__init__).kwonlyargs) openai_kwargs = openai_kwargs | aopenai_kwargs diff --git a/test/oai/test_client.py b/test/oai/test_client.py index e1f443095b9..a25e9fd2875 100644 --- a/test/oai/test_client.py +++ b/test/oai/test_client.py @@ -98,7 +98,8 @@ def test_chat_completion(): def test_completion(): config_list = config_list_openai_aoai(KEY_LOC) client = OpenAIWrapper(config_list=config_list) - model = "gpt-3.5-turbo-instruct" + # Azure can't have dot in model/deployment name + model = "gpt-35-turbo-instruct" if config_list[0].get("api_type") == "azure" else "gpt-3.5-turbo-instruct" response = client.create(prompt="1+1=", model=model) print(response) print(client.extract_text_or_completion_object(response)) @@ -114,7 +115,8 @@ def test_completion(): ) def test_cost(cache_seed): config_list = config_list_openai_aoai(KEY_LOC) - model = "gpt-3.5-turbo-instruct" + # Azure can't have dot in model/deployment name + model = "gpt-35-turbo-instruct" if config_list[0].get("api_type") == "azure" else "gpt-3.5-turbo-instruct" client = OpenAIWrapper(config_list=config_list, cache_seed=cache_seed) response = client.create(prompt="1+3=", model=model) print(response.cost) @@ -124,7 +126,8 @@ def test_cost(cache_seed): def test_usage_summary(): config_list = config_list_openai_aoai(KEY_LOC) client = OpenAIWrapper(config_list=config_list) - model = "gpt-3.5-turbo-instruct" + # Azure can't have dot in model/deployment name + model = "gpt-35-turbo-instruct" if config_list[0].get("api_type") == "azure" else "gpt-3.5-turbo-instruct" response = client.create(prompt="1+3=", model=model, cache_seed=None) # usage should be recorded From c567f5967c9a4250ff3cb405fa9a59c47b5dd23a Mon Sep 17 00:00:00 2001 From: Chi Wang Date: Mon, 15 Jan 2024 22:55:43 +0000 Subject: [PATCH 8/9] process model in create --- autogen/oai/client.py | 2 ++ test/oai/test_client.py | 13 +++++-------- 2 files changed, 7 insertions(+), 8 deletions(-) diff --git a/autogen/oai/client.py b/autogen/oai/client.py index dc908d0528d..5b4c575ec2b 100644 --- a/autogen/oai/client.py +++ b/autogen/oai/client.py @@ -229,6 +229,8 @@ def yes_or_no_filter(context, response): full_config = {**config, **self._config_list[i]} # separate the config into create_config and extra_kwargs create_config, extra_kwargs = self._separate_create_config(full_config) + if extra_kwargs.get("api_type").startswith("azure") and "model" in create_config: + create_config["model"] = create_config["model"].replace(".", "") # construct the create params params = self._construct_create_params(create_config, extra_kwargs) # get the cache_seed, filter_func and context diff --git a/test/oai/test_client.py b/test/oai/test_client.py index a25e9fd2875..7f561187d49 100644 --- a/test/oai/test_client.py +++ b/test/oai/test_client.py @@ -98,8 +98,7 @@ def test_chat_completion(): def test_completion(): config_list = config_list_openai_aoai(KEY_LOC) client = OpenAIWrapper(config_list=config_list) - # Azure can't have dot in model/deployment name - model = "gpt-35-turbo-instruct" if config_list[0].get("api_type") == "azure" else "gpt-3.5-turbo-instruct" + model = "gpt-3.5-turbo-instruct" response = client.create(prompt="1+1=", model=model) print(response) print(client.extract_text_or_completion_object(response)) @@ -115,8 +114,7 @@ def test_completion(): ) def test_cost(cache_seed): config_list = config_list_openai_aoai(KEY_LOC) - # Azure can't have dot in model/deployment name - model = "gpt-35-turbo-instruct" if config_list[0].get("api_type") == "azure" else "gpt-3.5-turbo-instruct" + model = "gpt-3.5-turbo-instruct" client = OpenAIWrapper(config_list=config_list, cache_seed=cache_seed) response = client.create(prompt="1+3=", model=model) print(response.cost) @@ -126,8 +124,7 @@ def test_cost(cache_seed): def test_usage_summary(): config_list = config_list_openai_aoai(KEY_LOC) client = OpenAIWrapper(config_list=config_list) - # Azure can't have dot in model/deployment name - model = "gpt-35-turbo-instruct" if config_list[0].get("api_type") == "azure" else "gpt-3.5-turbo-instruct" + model = "gpt-3.5-turbo-instruct" response = client.create(prompt="1+3=", model=model, cache_seed=None) # usage should be recorded @@ -155,9 +152,9 @@ def test_usage_summary(): if __name__ == "__main__": - test_aoai_chat_completion() + # test_aoai_chat_completion() # test_oai_tool_calling_extraction() # test_chat_completion() - # test_completion() + test_completion() # # test_cost() # test_usage_summary() From 58150a7a8fe7337d06f8107aa8a79f8777023aea Mon Sep 17 00:00:00 2001 From: Chi Wang Date: Mon, 15 Jan 2024 23:06:14 +0000 Subject: [PATCH 9/9] None check --- autogen/oai/client.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/autogen/oai/client.py b/autogen/oai/client.py index 5b4c575ec2b..65ad1425409 100644 --- a/autogen/oai/client.py +++ b/autogen/oai/client.py @@ -229,7 +229,8 @@ def yes_or_no_filter(context, response): full_config = {**config, **self._config_list[i]} # separate the config into create_config and extra_kwargs create_config, extra_kwargs = self._separate_create_config(full_config) - if extra_kwargs.get("api_type").startswith("azure") and "model" in create_config: + api_type = extra_kwargs.get("api_type") + if api_type and api_type.startswith("azure") and "model" in create_config: create_config["model"] = create_config["model"].replace(".", "") # construct the create params params = self._construct_create_params(create_config, extra_kwargs)