Skip to content

Commit

Permalink
Fix typo in OpenAI model.
Browse files Browse the repository at this point in the history
  • Loading branch information
norpadon committed Nov 21, 2024
1 parent 48353b5 commit d163c9c
Show file tree
Hide file tree
Showing 5 changed files with 14 additions and 14 deletions.
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[project]
name = "wanga"
version = "0.1.1"
version = "0.2.1"
description = "A library for interacting with Large Language Models."
authors = [{ name = "Artur Chakhvadze", email = "norpadon@gmail.com" }]
license = { text = "MIT" }
Expand Down
4 changes: 2 additions & 2 deletions tests/test_ai_function_integration.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,10 +5,10 @@
import pytest

from wanga.function import ai_function
from wanga.models.openai import OpenaAIModel
from wanga.models.openai import OpenAIModel
from wanga.runtime import Runtime

model = OpenaAIModel("gpt-4o-mini")
model = OpenAIModel("gpt-4o-mini")


@pytest.fixture(scope="module")
Expand Down
14 changes: 7 additions & 7 deletions tests/test_models/test_openai.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,12 +9,12 @@
from wanga.models.messages import parse_messages
from wanga.models.model import FinishReason, ModelResponse, ToolParams
from wanga.models.model import RateLimitError as WangaRateLimitError
from wanga.models.openai import OpenaAIModel
from wanga.models.openai import OpenAIModel
from wanga.schema import DEFAULT_SCHEMA_EXTRACTOR


def test_reply():
model = OpenaAIModel("gpt-4o-mini")
model = OpenAIModel("gpt-4o-mini")
prompt = r"""
[|system|]
You are a helpful assistant.
Expand All @@ -36,12 +36,12 @@ def test_reply():


def test_context_size():
assert OpenaAIModel("gpt-4-turbo").context_length == 128000
assert OpenaAIModel("gpt-4").context_length == 8192
assert OpenAIModel("gpt-4-turbo").context_length == 128000
assert OpenAIModel("gpt-4").context_length == 8192


def test_num_tokens():
model = OpenaAIModel("gpt-4o-mini")
model = OpenAIModel("gpt-4o-mini")
prompt = r"""
[|system|]
You are a helpful assistant.
Expand All @@ -61,12 +61,12 @@ def tool(x: int, y: str):

@pytest.fixture
def model():
return OpenaAIModel("gpt-4o-mini", num_retries=2, retry_on_request_limit=True)
return OpenAIModel("gpt-4o-mini", num_retries=2, retry_on_request_limit=True)


@pytest.fixture
def vision_model():
return OpenaAIModel("gpt-4o", num_retries=2, retry_on_request_limit=True)
return OpenAIModel("gpt-4o", num_retries=2, retry_on_request_limit=True)


def test_retry_on_rate_limit(model):
Expand Down
4 changes: 2 additions & 2 deletions wanga/__init__.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,10 @@
from importlib.util import find_spec

from .function import ai_function
from .models.openai import OpenaAIModel
from .models.openai import OpenAIModel
from .runtime import Runtime

__all__ = ["ai_function", "OpenaAIModel", "Runtime"]
__all__ = ["ai_function", "OpenAIModel", "Runtime"]

_PIL_INSTALLED = find_spec("Pillow") is None
_OPENAI_INSTALLED = find_spec("openai") is None
Expand Down
4 changes: 2 additions & 2 deletions wanga/models/openai.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,10 +28,10 @@
_NUM_TOKENS_ERR_RE = re.compile(r"\((?P<messages>\d+) in the messages(, (?P<functions>\d+) in the functions,)?")


__all__ = ["OpenaAIModel"]
__all__ = ["OpenAIModel"]


class OpenaAIModel(Model):
class OpenAIModel(Model):
_NAME_PREFIX_TO_CONTEXT_LENGTH = {
"gpt-3.5-turbo": 16538,
"gpt-4": 8192,
Expand Down

0 comments on commit d163c9c

Please sign in to comment.