diff --git a/autogen/token_count_utils.py b/autogen/token_count_utils.py index 2842a749453..365285e0955 100644 --- a/autogen/token_count_utils.py +++ b/autogen/token_count_utils.py @@ -95,7 +95,7 @@ def _num_token_from_messages(messages: Union[List, Dict], model="gpt-3.5-turbo-0 try: encoding = tiktoken.encoding_for_model(model) except KeyError: - print("Warning: model not found. Using cl100k_base encoding.") + logger.warning(f"Model {model} not found. Using cl100k_base encoding.") encoding = tiktoken.get_encoding("cl100k_base") if model in { "gpt-3.5-turbo-0613", @@ -166,7 +166,7 @@ def num_tokens_from_functions(functions, model="gpt-3.5-turbo-0613") -> int: try: encoding = tiktoken.encoding_for_model(model) except KeyError: - print("Warning: model not found. Using cl100k_base encoding.") + logger.warning(f"Model {model} not found. Using cl100k_base encoding.") encoding = tiktoken.get_encoding("cl100k_base") num_tokens = 0 @@ -193,7 +193,7 @@ def num_tokens_from_functions(functions, model="gpt-3.5-turbo-0613") -> int: function_tokens += 3 function_tokens += len(encoding.encode(o)) else: - print(f"Warning: not supported field {field}") + logger.warning(f"Not supported field {field}") function_tokens += 11 if len(parameters["properties"]) == 0: function_tokens -= 2