From c99749f7968214cd5d724ec6b6ae01df36414336 Mon Sep 17 00:00:00 2001 From: Umer Mansoor Date: Thu, 18 Jul 2024 22:50:45 -0700 Subject: [PATCH] Update token_count_utils.py - Replace `print` with `logger.warning` for consistency The code was using both `logger.warning` and `print` for showing warning. This commit fixes this inconsistency which can be an issue on production environments / logging systems --- autogen/token_count_utils.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/autogen/token_count_utils.py b/autogen/token_count_utils.py index 2842a749453..365285e0955 100644 --- a/autogen/token_count_utils.py +++ b/autogen/token_count_utils.py @@ -95,7 +95,7 @@ def _num_token_from_messages(messages: Union[List, Dict], model="gpt-3.5-turbo-0 try: encoding = tiktoken.encoding_for_model(model) except KeyError: - print("Warning: model not found. Using cl100k_base encoding.") + logger.warning(f"Model {model} not found. Using cl100k_base encoding.") encoding = tiktoken.get_encoding("cl100k_base") if model in { "gpt-3.5-turbo-0613", @@ -166,7 +166,7 @@ def num_tokens_from_functions(functions, model="gpt-3.5-turbo-0613") -> int: try: encoding = tiktoken.encoding_for_model(model) except KeyError: - print("Warning: model not found. Using cl100k_base encoding.") + logger.warning(f"Model {model} not found. Using cl100k_base encoding.") encoding = tiktoken.get_encoding("cl100k_base") num_tokens = 0 @@ -193,7 +193,7 @@ def num_tokens_from_functions(functions, model="gpt-3.5-turbo-0613") -> int: function_tokens += 3 function_tokens += len(encoding.encode(o)) else: - print(f"Warning: not supported field {field}") + logger.warning(f"Not supported field {field}") function_tokens += 11 if len(parameters["properties"]) == 0: function_tokens -= 2