Skip to content

Commit

Permalink
Fallback to gpt-4o for tiktoken if openai model unrecognized
Browse files Browse the repository at this point in the history
  • Loading branch information
whoisarpit committed Feb 5, 2025
1 parent a4cfca0 commit d368576
Showing 1 changed file with 7 additions and 1 deletion.
8 changes: 7 additions & 1 deletion patchwork/common/client/llm/openai_.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@
from typing_extensions import Dict, Iterable, List, Optional, Union

from patchwork.common.client.llm.protocol import NOT_GIVEN, LlmClient, NotGiven
from patchwork.logger import logger


@functools.lru_cache
Expand Down Expand Up @@ -87,7 +88,12 @@ def is_prompt_supported(

model_limit = self.__get_model_limits(model)
token_count = 0
encoding = tiktoken.encoding_for_model(model)
encoding = None
try:
encoding = tiktoken.encoding_for_model(model)
except Exception as e:
logger.error(f"Error getting encoding for model {model}: {e}, using gpt-4o as fallback")
encoding = tiktoken.encoding_for_model("gpt-4o")
for message in messages:
message_token_count = len(encoding.encode(message.get("content")))
token_count = token_count + message_token_count
Expand Down

0 comments on commit d368576

Please sign in to comment.