From 8ba1e1c6acb3388210cac70fd371facee595a3d9 Mon Sep 17 00:00:00 2001 From: Xander Song Date: Fri, 19 Jan 2024 09:37:17 -0800 Subject: [PATCH] chore: remove tokenizer warnings (#2108) --- src/phoenix/experimental/evals/models/anthropic.py | 4 ---- src/phoenix/experimental/evals/models/bedrock.py | 1 - 2 files changed, 5 deletions(-) diff --git a/src/phoenix/experimental/evals/models/anthropic.py b/src/phoenix/experimental/evals/models/anthropic.py index 5b6e485486..496e864481 100644 --- a/src/phoenix/experimental/evals/models/anthropic.py +++ b/src/phoenix/experimental/evals/models/anthropic.py @@ -1,4 +1,3 @@ -import logging from dataclasses import dataclass, field from typing import TYPE_CHECKING, Any, Dict, List, Optional @@ -8,8 +7,6 @@ if TYPE_CHECKING: from tiktoken import Encoding -logger = logging.getLogger(__name__) - MODEL_TOKEN_LIMIT_MAPPING = { "claude-2.1": 200000, "claude-2.0": 100000, @@ -80,7 +77,6 @@ def _init_tiktoken(self) -> None: try: encoding = self._tiktoken.encoding_for_model(self.model) except KeyError: - logger.warning("Warning: model not found. Using cl100k_base encoding.") encoding = self._tiktoken.get_encoding("cl100k_base") self._tiktoken_encoding = encoding diff --git a/src/phoenix/experimental/evals/models/bedrock.py b/src/phoenix/experimental/evals/models/bedrock.py index 88f063272e..337e68f468 100644 --- a/src/phoenix/experimental/evals/models/bedrock.py +++ b/src/phoenix/experimental/evals/models/bedrock.py @@ -87,7 +87,6 @@ def _init_tiktoken(self) -> None: try: encoding = self._tiktoken.encoding_for_model(self.model_id) except KeyError: - logger.warning("Warning: model not found. Using cl100k_base encoding.") encoding = self._tiktoken.get_encoding("cl100k_base") self._tiktoken_encoding = encoding