From 5161f626a0bec757b96217dc0f81e8908546f29a Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Thu, 18 Jul 2024 16:52:57 +0000 Subject: [PATCH 1/9] chore(docs): document how to do per-request http client customization (#603) --- README.md | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/README.md b/README.md index fcacca0f..0fd0f870 100644 --- a/README.md +++ b/README.md @@ -508,6 +508,12 @@ client = Anthropic( ) ``` +You can also customize the client on a per-request basis by using `with_options()`: + +```python +client.with_options(http_client=DefaultHttpxClient(...)) +``` + ### Managing HTTP resources By default the library closes underlying HTTP connections whenever the client is [garbage collected](https://docs.python.org/3/reference/datamodel.html#object.__del__). You can manually close the client using the `.close()` method if desired, or with a context manager that closes when exiting. From 6b7707f62788fca2e166209e82935a2a2fa8204a Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Thu, 18 Jul 2024 21:29:35 +0000 Subject: [PATCH 2/9] chore: sync spec (#605) --- .stats.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.stats.yml b/.stats.yml index 3ad8fd53..4f03232f 100644 --- a/.stats.yml +++ b/.stats.yml @@ -1,2 +1,2 @@ configured_endpoints: 2 -openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/anthropic-e2a51f04a202c13736b6fa2061a89a0c443f99ab166d965d702baf371eb1ca8f.yml +openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/anthropic-e38cd52aed438cef6e0a25eeeab8ff6000583c3cf152a10f0c3610ceb3da7b4e.yml From 1797dc6139ffaca6436ed897972471e67ba1b828 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Tue, 23 Jul 2024 10:32:34 +0000 Subject: [PATCH 3/9] chore(tests): update prism version (#607) --- scripts/mock | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/scripts/mock b/scripts/mock index fe89a1d0..f5861576 100755 --- a/scripts/mock +++ b/scripts/mock @@ -21,7 +21,7 @@ echo "==> Starting mock server with URL ${URL}" # Run prism mock on the given spec if [ "$1" == "--daemon" ]; then - npm exec --package=@stoplight/prism-cli@~5.8 -- prism mock "$URL" &> .prism.log & + npm exec --package=@stainless-api/prism-cli@5.8.4 -- prism mock "$URL" &> .prism.log & # Wait for server to come online echo -n "Waiting for server" @@ -37,5 +37,5 @@ if [ "$1" == "--daemon" ]; then echo else - npm exec --package=@stoplight/prism-cli@~5.8 -- prism mock "$URL" + npm exec --package=@stainless-api/prism-cli@5.8.4 -- prism mock "$URL" fi From c53efc786fa95831a398f37740a81b42f7b64c94 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Wed, 24 Jul 2024 20:14:11 +0000 Subject: [PATCH 4/9] refactor: extract model out to a named type and rename partialjson (#612) --- .stats.yml | 2 +- api.md | 3 +- src/anthropic/resources/completions.py | 49 +++---- src/anthropic/resources/messages.py | 135 +++--------------- src/anthropic/types/__init__.py | 4 +- src/anthropic/types/completion.py | 9 +- .../types/completion_create_params.py | 10 +- src/anthropic/types/input_json_delta.py | 4 +- src/anthropic/types/message.py | 9 +- src/anthropic/types/message_create_params.py | 22 +-- src/anthropic/types/model.py | 19 +++ src/anthropic/types/model_param.py | 21 +++ .../types/raw_content_block_delta_event.py | 4 +- tests/api_resources/test_completions.py | 32 ++--- 14 files changed, 134 insertions(+), 189 deletions(-) create mode 100644 src/anthropic/types/model.py create mode 100644 src/anthropic/types/model_param.py diff --git a/.stats.yml b/.stats.yml index 4f03232f..ff2805b6 100644 --- a/.stats.yml +++ b/.stats.yml @@ -1,2 +1,2 @@ configured_endpoints: 2 -openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/anthropic-e38cd52aed438cef6e0a25eeeab8ff6000583c3cf152a10f0c3610ceb3da7b4e.yml +openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/anthropic-5903ec2fd4efd7f261908bc4ec8ecd6b19cb9efa79637ad273583f1b763f80fd.yml diff --git a/api.md b/api.md index 5ac54252..63896541 100644 --- a/api.md +++ b/api.md @@ -9,7 +9,7 @@ from anthropic.types import ( ContentBlockStartEvent, ContentBlockStopEvent, ImageBlockParam, - InputJsonDelta, + InputJSONDelta, Message, MessageDeltaEvent, MessageDeltaUsage, @@ -17,6 +17,7 @@ from anthropic.types import ( MessageStartEvent, MessageStopEvent, MessageStreamEvent, + Model, RawContentBlockDeltaEvent, RawContentBlockStartEvent, RawContentBlockStopEvent, diff --git a/src/anthropic/resources/completions.py b/src/anthropic/resources/completions.py index f0c9afaf..d34a42f2 100644 --- a/src/anthropic/resources/completions.py +++ b/src/anthropic/resources/completions.py @@ -2,7 +2,7 @@ from __future__ import annotations -from typing import List, Union, overload +from typing import List, overload from typing_extensions import Literal import httpx @@ -21,6 +21,7 @@ from .._streaming import Stream, AsyncStream from .._base_client import make_request_options from ..types.completion import Completion +from ..types.model_param import ModelParam __all__ = ["Completions", "AsyncCompletions"] @@ -39,7 +40,7 @@ def create( self, *, max_tokens_to_sample: int, - model: Union[str, Literal["claude-2.0", "claude-2.1", "claude-instant-1.2"]], + model: ModelParam, prompt: str, metadata: completion_create_params.Metadata | NotGiven = NOT_GIVEN, stop_sequences: List[str] | NotGiven = NOT_GIVEN, @@ -71,9 +72,8 @@ def create( Note that our models may stop _before_ reaching this maximum. This parameter only specifies the absolute maximum number of tokens to generate. - model: The model that will complete your prompt. - - See [models](https://docs.anthropic.com/en/docs/models-overview) for additional + model: The model that will complete your prompt.\n\nSee + [models](https://docs.anthropic.com/en/docs/models-overview) for additional details and options. prompt: The prompt that you want Claude to complete. @@ -144,7 +144,7 @@ def create( self, *, max_tokens_to_sample: int, - model: Union[str, Literal["claude-2.0", "claude-2.1", "claude-instant-1.2"]], + model: ModelParam, prompt: str, stream: Literal[True], metadata: completion_create_params.Metadata | NotGiven = NOT_GIVEN, @@ -176,9 +176,8 @@ def create( Note that our models may stop _before_ reaching this maximum. This parameter only specifies the absolute maximum number of tokens to generate. - model: The model that will complete your prompt. - - See [models](https://docs.anthropic.com/en/docs/models-overview) for additional + model: The model that will complete your prompt.\n\nSee + [models](https://docs.anthropic.com/en/docs/models-overview) for additional details and options. prompt: The prompt that you want Claude to complete. @@ -249,7 +248,7 @@ def create( self, *, max_tokens_to_sample: int, - model: Union[str, Literal["claude-2.0", "claude-2.1", "claude-instant-1.2"]], + model: ModelParam, prompt: str, stream: bool, metadata: completion_create_params.Metadata | NotGiven = NOT_GIVEN, @@ -281,9 +280,8 @@ def create( Note that our models may stop _before_ reaching this maximum. This parameter only specifies the absolute maximum number of tokens to generate. - model: The model that will complete your prompt. - - See [models](https://docs.anthropic.com/en/docs/models-overview) for additional + model: The model that will complete your prompt.\n\nSee + [models](https://docs.anthropic.com/en/docs/models-overview) for additional details and options. prompt: The prompt that you want Claude to complete. @@ -354,7 +352,7 @@ def create( self, *, max_tokens_to_sample: int, - model: Union[str, Literal["claude-2.0", "claude-2.1", "claude-instant-1.2"]], + model: ModelParam, prompt: str, metadata: completion_create_params.Metadata | NotGiven = NOT_GIVEN, stop_sequences: List[str] | NotGiven = NOT_GIVEN, @@ -408,7 +406,7 @@ async def create( self, *, max_tokens_to_sample: int, - model: Union[str, Literal["claude-2.0", "claude-2.1", "claude-instant-1.2"]], + model: ModelParam, prompt: str, metadata: completion_create_params.Metadata | NotGiven = NOT_GIVEN, stop_sequences: List[str] | NotGiven = NOT_GIVEN, @@ -440,9 +438,8 @@ async def create( Note that our models may stop _before_ reaching this maximum. This parameter only specifies the absolute maximum number of tokens to generate. - model: The model that will complete your prompt. - - See [models](https://docs.anthropic.com/en/docs/models-overview) for additional + model: The model that will complete your prompt.\n\nSee + [models](https://docs.anthropic.com/en/docs/models-overview) for additional details and options. prompt: The prompt that you want Claude to complete. @@ -513,7 +510,7 @@ async def create( self, *, max_tokens_to_sample: int, - model: Union[str, Literal["claude-2.0", "claude-2.1", "claude-instant-1.2"]], + model: ModelParam, prompt: str, stream: Literal[True], metadata: completion_create_params.Metadata | NotGiven = NOT_GIVEN, @@ -545,9 +542,8 @@ async def create( Note that our models may stop _before_ reaching this maximum. This parameter only specifies the absolute maximum number of tokens to generate. - model: The model that will complete your prompt. - - See [models](https://docs.anthropic.com/en/docs/models-overview) for additional + model: The model that will complete your prompt.\n\nSee + [models](https://docs.anthropic.com/en/docs/models-overview) for additional details and options. prompt: The prompt that you want Claude to complete. @@ -618,7 +614,7 @@ async def create( self, *, max_tokens_to_sample: int, - model: Union[str, Literal["claude-2.0", "claude-2.1", "claude-instant-1.2"]], + model: ModelParam, prompt: str, stream: bool, metadata: completion_create_params.Metadata | NotGiven = NOT_GIVEN, @@ -650,9 +646,8 @@ async def create( Note that our models may stop _before_ reaching this maximum. This parameter only specifies the absolute maximum number of tokens to generate. - model: The model that will complete your prompt. - - See [models](https://docs.anthropic.com/en/docs/models-overview) for additional + model: The model that will complete your prompt.\n\nSee + [models](https://docs.anthropic.com/en/docs/models-overview) for additional details and options. prompt: The prompt that you want Claude to complete. @@ -723,7 +718,7 @@ async def create( self, *, max_tokens_to_sample: int, - model: Union[str, Literal["claude-2.0", "claude-2.1", "claude-instant-1.2"]], + model: ModelParam, prompt: str, metadata: completion_create_params.Metadata | NotGiven = NOT_GIVEN, stop_sequences: List[str] | NotGiven = NOT_GIVEN, diff --git a/src/anthropic/resources/messages.py b/src/anthropic/resources/messages.py index c7d3639f..dd7f7ef5 100644 --- a/src/anthropic/resources/messages.py +++ b/src/anthropic/resources/messages.py @@ -24,6 +24,7 @@ from ..lib.streaming import MessageStreamManager, AsyncMessageStreamManager from ..types.message import Message from ..types.tool_param import ToolParam +from ..types.model_param import ModelParam from ..types.message_param import MessageParam from ..types.text_block_param import TextBlockParam from ..types.raw_message_stream_event import RawMessageStreamEvent @@ -46,18 +47,7 @@ def create( *, max_tokens: int, messages: Iterable[MessageParam], - model: Union[ - str, - Literal[ - "claude-3-5-sonnet-20240620", - "claude-3-opus-20240229", - "claude-3-sonnet-20240229", - "claude-3-haiku-20240307", - "claude-2.1", - "claude-2.0", - "claude-instant-1.2", - ], - ], + model: ModelParam, metadata: message_create_params.Metadata | NotGiven = NOT_GIVEN, stop_sequences: List[str] | NotGiven = NOT_GIVEN, stream: Literal[False] | NotGiven = NOT_GIVEN, @@ -178,9 +168,8 @@ def create( the top-level `system` parameter — there is no `"system"` role for input messages in the Messages API. - model: The model that will complete your prompt. - - See [models](https://docs.anthropic.com/en/docs/models-overview) for additional + model: The model that will complete your prompt.\n\nSee + [models](https://docs.anthropic.com/en/docs/models-overview) for additional details and options. metadata: An object describing metadata about the request. @@ -321,18 +310,7 @@ def create( *, max_tokens: int, messages: Iterable[MessageParam], - model: Union[ - str, - Literal[ - "claude-3-5-sonnet-20240620", - "claude-3-opus-20240229", - "claude-3-sonnet-20240229", - "claude-3-haiku-20240307", - "claude-2.1", - "claude-2.0", - "claude-instant-1.2", - ], - ], + model: ModelParam, stream: Literal[True], metadata: message_create_params.Metadata | NotGiven = NOT_GIVEN, stop_sequences: List[str] | NotGiven = NOT_GIVEN, @@ -453,9 +431,8 @@ def create( the top-level `system` parameter — there is no `"system"` role for input messages in the Messages API. - model: The model that will complete your prompt. - - See [models](https://docs.anthropic.com/en/docs/models-overview) for additional + model: The model that will complete your prompt.\n\nSee + [models](https://docs.anthropic.com/en/docs/models-overview) for additional details and options. stream: Whether to incrementally stream the response using server-sent events. @@ -596,18 +573,7 @@ def create( *, max_tokens: int, messages: Iterable[MessageParam], - model: Union[ - str, - Literal[ - "claude-3-5-sonnet-20240620", - "claude-3-opus-20240229", - "claude-3-sonnet-20240229", - "claude-3-haiku-20240307", - "claude-2.1", - "claude-2.0", - "claude-instant-1.2", - ], - ], + model: ModelParam, stream: bool, metadata: message_create_params.Metadata | NotGiven = NOT_GIVEN, stop_sequences: List[str] | NotGiven = NOT_GIVEN, @@ -728,9 +694,8 @@ def create( the top-level `system` parameter — there is no `"system"` role for input messages in the Messages API. - model: The model that will complete your prompt. - - See [models](https://docs.anthropic.com/en/docs/models-overview) for additional + model: The model that will complete your prompt.\n\nSee + [models](https://docs.anthropic.com/en/docs/models-overview) for additional details and options. stream: Whether to incrementally stream the response using server-sent events. @@ -871,18 +836,7 @@ def create( *, max_tokens: int, messages: Iterable[MessageParam], - model: Union[ - str, - Literal[ - "claude-3-5-sonnet-20240620", - "claude-3-opus-20240229", - "claude-3-sonnet-20240229", - "claude-3-haiku-20240307", - "claude-2.1", - "claude-2.0", - "claude-instant-1.2", - ], - ], + model: ModelParam, metadata: message_create_params.Metadata | NotGiven = NOT_GIVEN, stop_sequences: List[str] | NotGiven = NOT_GIVEN, stream: Literal[False] | Literal[True] | NotGiven = NOT_GIVEN, @@ -1008,18 +962,7 @@ async def create( *, max_tokens: int, messages: Iterable[MessageParam], - model: Union[ - str, - Literal[ - "claude-3-5-sonnet-20240620", - "claude-3-opus-20240229", - "claude-3-sonnet-20240229", - "claude-3-haiku-20240307", - "claude-2.1", - "claude-2.0", - "claude-instant-1.2", - ], - ], + model: ModelParam, metadata: message_create_params.Metadata | NotGiven = NOT_GIVEN, stop_sequences: List[str] | NotGiven = NOT_GIVEN, stream: Literal[False] | NotGiven = NOT_GIVEN, @@ -1140,9 +1083,8 @@ async def create( the top-level `system` parameter — there is no `"system"` role for input messages in the Messages API. - model: The model that will complete your prompt. - - See [models](https://docs.anthropic.com/en/docs/models-overview) for additional + model: The model that will complete your prompt.\n\nSee + [models](https://docs.anthropic.com/en/docs/models-overview) for additional details and options. metadata: An object describing metadata about the request. @@ -1283,18 +1225,7 @@ async def create( *, max_tokens: int, messages: Iterable[MessageParam], - model: Union[ - str, - Literal[ - "claude-3-5-sonnet-20240620", - "claude-3-opus-20240229", - "claude-3-sonnet-20240229", - "claude-3-haiku-20240307", - "claude-2.1", - "claude-2.0", - "claude-instant-1.2", - ], - ], + model: ModelParam, stream: Literal[True], metadata: message_create_params.Metadata | NotGiven = NOT_GIVEN, stop_sequences: List[str] | NotGiven = NOT_GIVEN, @@ -1415,9 +1346,8 @@ async def create( the top-level `system` parameter — there is no `"system"` role for input messages in the Messages API. - model: The model that will complete your prompt. - - See [models](https://docs.anthropic.com/en/docs/models-overview) for additional + model: The model that will complete your prompt.\n\nSee + [models](https://docs.anthropic.com/en/docs/models-overview) for additional details and options. stream: Whether to incrementally stream the response using server-sent events. @@ -1558,18 +1488,7 @@ async def create( *, max_tokens: int, messages: Iterable[MessageParam], - model: Union[ - str, - Literal[ - "claude-3-5-sonnet-20240620", - "claude-3-opus-20240229", - "claude-3-sonnet-20240229", - "claude-3-haiku-20240307", - "claude-2.1", - "claude-2.0", - "claude-instant-1.2", - ], - ], + model: ModelParam, stream: bool, metadata: message_create_params.Metadata | NotGiven = NOT_GIVEN, stop_sequences: List[str] | NotGiven = NOT_GIVEN, @@ -1690,9 +1609,8 @@ async def create( the top-level `system` parameter — there is no `"system"` role for input messages in the Messages API. - model: The model that will complete your prompt. - - See [models](https://docs.anthropic.com/en/docs/models-overview) for additional + model: The model that will complete your prompt.\n\nSee + [models](https://docs.anthropic.com/en/docs/models-overview) for additional details and options. stream: Whether to incrementally stream the response using server-sent events. @@ -1833,18 +1751,7 @@ async def create( *, max_tokens: int, messages: Iterable[MessageParam], - model: Union[ - str, - Literal[ - "claude-3-5-sonnet-20240620", - "claude-3-opus-20240229", - "claude-3-sonnet-20240229", - "claude-3-haiku-20240307", - "claude-2.1", - "claude-2.0", - "claude-instant-1.2", - ], - ], + model: ModelParam, metadata: message_create_params.Metadata | NotGiven = NOT_GIVEN, stop_sequences: List[str] | NotGiven = NOT_GIVEN, stream: Literal[False] | Literal[True] | NotGiven = NOT_GIVEN, diff --git a/src/anthropic/types/__init__.py b/src/anthropic/types/__init__.py index f6165eca..9839b5a2 100644 --- a/src/anthropic/types/__init__.py +++ b/src/anthropic/types/__init__.py @@ -2,16 +2,18 @@ from __future__ import annotations +from .model import Model as Model from .usage import Usage as Usage from .message import Message as Message from .completion import Completion as Completion from .text_block import TextBlock as TextBlock from .text_delta import TextDelta as TextDelta from .tool_param import ToolParam as ToolParam +from .model_param import ModelParam as ModelParam from .content_block import ContentBlock as ContentBlock from .message_param import MessageParam as MessageParam from .tool_use_block import ToolUseBlock as ToolUseBlock -from .input_json_delta import InputJsonDelta as InputJsonDelta +from .input_json_delta import InputJSONDelta as InputJSONDelta from .text_block_param import TextBlockParam as TextBlockParam from .image_block_param import ImageBlockParam as ImageBlockParam from .message_stop_event import MessageStopEvent as MessageStopEvent diff --git a/src/anthropic/types/completion.py b/src/anthropic/types/completion.py index d55a0fe6..e6293210 100644 --- a/src/anthropic/types/completion.py +++ b/src/anthropic/types/completion.py @@ -3,6 +3,7 @@ from typing import Optional from typing_extensions import Literal +from .model import Model from .._models import BaseModel __all__ = ["Completion"] @@ -18,8 +19,12 @@ class Completion(BaseModel): completion: str """The resulting completion up to and excluding the stop sequences.""" - model: str - """The model that handled the request.""" + model: Model + """ + The model that will complete your prompt.\n\nSee + [models](https://docs.anthropic.com/en/docs/models-overview) for additional + details and options. + """ stop_reason: Optional[str] = None """The reason that we stopped. diff --git a/src/anthropic/types/completion_create_params.py b/src/anthropic/types/completion_create_params.py index be824ebf..5c9ed7a6 100644 --- a/src/anthropic/types/completion_create_params.py +++ b/src/anthropic/types/completion_create_params.py @@ -5,6 +5,8 @@ from typing import List, Union, Optional from typing_extensions import Literal, Required, TypedDict +from .model_param import ModelParam + __all__ = [ "CompletionRequestStreamingMetadata", "CompletionRequestNonStreamingMetadata", @@ -25,10 +27,10 @@ class CompletionCreateParamsBase(TypedDict, total=False): only specifies the absolute maximum number of tokens to generate. """ - model: Required[Union[str, Literal["claude-2.0", "claude-2.1", "claude-instant-1.2"]]] - """The model that will complete your prompt. - - See [models](https://docs.anthropic.com/en/docs/models-overview) for additional + model: Required[ModelParam] + """ + The model that will complete your prompt.\n\nSee + [models](https://docs.anthropic.com/en/docs/models-overview) for additional details and options. """ diff --git a/src/anthropic/types/input_json_delta.py b/src/anthropic/types/input_json_delta.py index 6391d4bf..14a33feb 100644 --- a/src/anthropic/types/input_json_delta.py +++ b/src/anthropic/types/input_json_delta.py @@ -4,10 +4,10 @@ from .._models import BaseModel -__all__ = ["InputJsonDelta"] +__all__ = ["InputJSONDelta"] -class InputJsonDelta(BaseModel): +class InputJSONDelta(BaseModel): partial_json: str type: Literal["input_json_delta"] diff --git a/src/anthropic/types/message.py b/src/anthropic/types/message.py index 9ef967ea..3ddbf20b 100644 --- a/src/anthropic/types/message.py +++ b/src/anthropic/types/message.py @@ -3,6 +3,7 @@ from typing import List, Optional from typing_extensions import Literal +from .model import Model from .usage import Usage from .._models import BaseModel from .content_block import ContentBlock, ContentBlock as ContentBlock @@ -52,8 +53,12 @@ class Message(BaseModel): ``` """ - model: str - """The model that handled the request.""" + model: Model + """ + The model that will complete your prompt.\n\nSee + [models](https://docs.anthropic.com/en/docs/models-overview) for additional + details and options. + """ role: Literal["assistant"] """Conversational role of the generated message. diff --git a/src/anthropic/types/message_create_params.py b/src/anthropic/types/message_create_params.py index a76bc0f7..a50e5946 100644 --- a/src/anthropic/types/message_create_params.py +++ b/src/anthropic/types/message_create_params.py @@ -6,6 +6,7 @@ from typing_extensions import Literal, Required, TypedDict from .tool_param import ToolParam +from .model_param import ModelParam from .message_param import MessageParam from .text_block_param import TextBlockParam @@ -120,23 +121,10 @@ class MessageCreateParamsBase(TypedDict, total=False): messages in the Messages API. """ - model: Required[ - Union[ - str, - Literal[ - "claude-3-5-sonnet-20240620", - "claude-3-opus-20240229", - "claude-3-sonnet-20240229", - "claude-3-haiku-20240307", - "claude-2.1", - "claude-2.0", - "claude-instant-1.2", - ], - ] - ] - """The model that will complete your prompt. - - See [models](https://docs.anthropic.com/en/docs/models-overview) for additional + model: Required[ModelParam] + """ + The model that will complete your prompt.\n\nSee + [models](https://docs.anthropic.com/en/docs/models-overview) for additional details and options. """ diff --git a/src/anthropic/types/model.py b/src/anthropic/types/model.py new file mode 100644 index 00000000..0ada4aeb --- /dev/null +++ b/src/anthropic/types/model.py @@ -0,0 +1,19 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import Union +from typing_extensions import Literal + +__all__ = ["Model"] + +Model = Union[ + str, + Literal[ + "claude-3-5-sonnet-20240620", + "claude-3-opus-20240229", + "claude-3-sonnet-20240229", + "claude-3-haiku-20240307", + "claude-2.1", + "claude-2.0", + "claude-instant-1.2", + ], +] diff --git a/src/anthropic/types/model_param.py b/src/anthropic/types/model_param.py new file mode 100644 index 00000000..d932500f --- /dev/null +++ b/src/anthropic/types/model_param.py @@ -0,0 +1,21 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import Union +from typing_extensions import Literal + +__all__ = ["ModelParam"] + +ModelParam = Union[ + str, + Literal[ + "claude-3-5-sonnet-20240620", + "claude-3-opus-20240229", + "claude-3-sonnet-20240229", + "claude-3-haiku-20240307", + "claude-2.1", + "claude-2.0", + "claude-instant-1.2", + ], +] diff --git a/src/anthropic/types/raw_content_block_delta_event.py b/src/anthropic/types/raw_content_block_delta_event.py index e1370fdb..845d502a 100644 --- a/src/anthropic/types/raw_content_block_delta_event.py +++ b/src/anthropic/types/raw_content_block_delta_event.py @@ -6,11 +6,11 @@ from .._utils import PropertyInfo from .._models import BaseModel from .text_delta import TextDelta -from .input_json_delta import InputJsonDelta +from .input_json_delta import InputJSONDelta __all__ = ["RawContentBlockDeltaEvent", "Delta"] -Delta = Annotated[Union[TextDelta, InputJsonDelta], PropertyInfo(discriminator="type")] +Delta = Annotated[Union[TextDelta, InputJSONDelta], PropertyInfo(discriminator="type")] class RawContentBlockDeltaEvent(BaseModel): diff --git a/tests/api_resources/test_completions.py b/tests/api_resources/test_completions.py index f46b7bb7..cf87d9fe 100644 --- a/tests/api_resources/test_completions.py +++ b/tests/api_resources/test_completions.py @@ -21,7 +21,7 @@ class TestCompletions: def test_method_create_overload_1(self, client: Anthropic) -> None: completion = client.completions.create( max_tokens_to_sample=256, - model="claude-2.1", + model="string", prompt="\n\nHuman: Hello, world!\n\nAssistant:", ) assert_matches_type(Completion, completion, path=["response"]) @@ -30,7 +30,7 @@ def test_method_create_overload_1(self, client: Anthropic) -> None: def test_method_create_with_all_params_overload_1(self, client: Anthropic) -> None: completion = client.completions.create( max_tokens_to_sample=256, - model="claude-2.1", + model="string", prompt="\n\nHuman: Hello, world!\n\nAssistant:", metadata={"user_id": "13803d75-b4b5-4c3e-b2a2-6f21399b021b"}, stop_sequences=["string", "string", "string"], @@ -45,7 +45,7 @@ def test_method_create_with_all_params_overload_1(self, client: Anthropic) -> No def test_raw_response_create_overload_1(self, client: Anthropic) -> None: response = client.completions.with_raw_response.create( max_tokens_to_sample=256, - model="claude-2.1", + model="string", prompt="\n\nHuman: Hello, world!\n\nAssistant:", ) @@ -58,7 +58,7 @@ def test_raw_response_create_overload_1(self, client: Anthropic) -> None: def test_streaming_response_create_overload_1(self, client: Anthropic) -> None: with client.completions.with_streaming_response.create( max_tokens_to_sample=256, - model="claude-2.1", + model="string", prompt="\n\nHuman: Hello, world!\n\nAssistant:", ) as response: assert not response.is_closed @@ -73,7 +73,7 @@ def test_streaming_response_create_overload_1(self, client: Anthropic) -> None: def test_method_create_overload_2(self, client: Anthropic) -> None: completion_stream = client.completions.create( max_tokens_to_sample=256, - model="claude-2.1", + model="string", prompt="\n\nHuman: Hello, world!\n\nAssistant:", stream=True, ) @@ -83,7 +83,7 @@ def test_method_create_overload_2(self, client: Anthropic) -> None: def test_method_create_with_all_params_overload_2(self, client: Anthropic) -> None: completion_stream = client.completions.create( max_tokens_to_sample=256, - model="claude-2.1", + model="string", prompt="\n\nHuman: Hello, world!\n\nAssistant:", stream=True, metadata={"user_id": "13803d75-b4b5-4c3e-b2a2-6f21399b021b"}, @@ -98,7 +98,7 @@ def test_method_create_with_all_params_overload_2(self, client: Anthropic) -> No def test_raw_response_create_overload_2(self, client: Anthropic) -> None: response = client.completions.with_raw_response.create( max_tokens_to_sample=256, - model="claude-2.1", + model="string", prompt="\n\nHuman: Hello, world!\n\nAssistant:", stream=True, ) @@ -111,7 +111,7 @@ def test_raw_response_create_overload_2(self, client: Anthropic) -> None: def test_streaming_response_create_overload_2(self, client: Anthropic) -> None: with client.completions.with_streaming_response.create( max_tokens_to_sample=256, - model="claude-2.1", + model="string", prompt="\n\nHuman: Hello, world!\n\nAssistant:", stream=True, ) as response: @@ -131,7 +131,7 @@ class TestAsyncCompletions: async def test_method_create_overload_1(self, async_client: AsyncAnthropic) -> None: completion = await async_client.completions.create( max_tokens_to_sample=256, - model="claude-2.1", + model="string", prompt="\n\nHuman: Hello, world!\n\nAssistant:", ) assert_matches_type(Completion, completion, path=["response"]) @@ -140,7 +140,7 @@ async def test_method_create_overload_1(self, async_client: AsyncAnthropic) -> N async def test_method_create_with_all_params_overload_1(self, async_client: AsyncAnthropic) -> None: completion = await async_client.completions.create( max_tokens_to_sample=256, - model="claude-2.1", + model="string", prompt="\n\nHuman: Hello, world!\n\nAssistant:", metadata={"user_id": "13803d75-b4b5-4c3e-b2a2-6f21399b021b"}, stop_sequences=["string", "string", "string"], @@ -155,7 +155,7 @@ async def test_method_create_with_all_params_overload_1(self, async_client: Asyn async def test_raw_response_create_overload_1(self, async_client: AsyncAnthropic) -> None: response = await async_client.completions.with_raw_response.create( max_tokens_to_sample=256, - model="claude-2.1", + model="string", prompt="\n\nHuman: Hello, world!\n\nAssistant:", ) @@ -168,7 +168,7 @@ async def test_raw_response_create_overload_1(self, async_client: AsyncAnthropic async def test_streaming_response_create_overload_1(self, async_client: AsyncAnthropic) -> None: async with async_client.completions.with_streaming_response.create( max_tokens_to_sample=256, - model="claude-2.1", + model="string", prompt="\n\nHuman: Hello, world!\n\nAssistant:", ) as response: assert not response.is_closed @@ -183,7 +183,7 @@ async def test_streaming_response_create_overload_1(self, async_client: AsyncAnt async def test_method_create_overload_2(self, async_client: AsyncAnthropic) -> None: completion_stream = await async_client.completions.create( max_tokens_to_sample=256, - model="claude-2.1", + model="string", prompt="\n\nHuman: Hello, world!\n\nAssistant:", stream=True, ) @@ -193,7 +193,7 @@ async def test_method_create_overload_2(self, async_client: AsyncAnthropic) -> N async def test_method_create_with_all_params_overload_2(self, async_client: AsyncAnthropic) -> None: completion_stream = await async_client.completions.create( max_tokens_to_sample=256, - model="claude-2.1", + model="string", prompt="\n\nHuman: Hello, world!\n\nAssistant:", stream=True, metadata={"user_id": "13803d75-b4b5-4c3e-b2a2-6f21399b021b"}, @@ -208,7 +208,7 @@ async def test_method_create_with_all_params_overload_2(self, async_client: Asyn async def test_raw_response_create_overload_2(self, async_client: AsyncAnthropic) -> None: response = await async_client.completions.with_raw_response.create( max_tokens_to_sample=256, - model="claude-2.1", + model="string", prompt="\n\nHuman: Hello, world!\n\nAssistant:", stream=True, ) @@ -221,7 +221,7 @@ async def test_raw_response_create_overload_2(self, async_client: AsyncAnthropic async def test_streaming_response_create_overload_2(self, async_client: AsyncAnthropic) -> None: async with async_client.completions.with_streaming_response.create( max_tokens_to_sample=256, - model="claude-2.1", + model="string", prompt="\n\nHuman: Hello, world!\n\nAssistant:", stream=True, ) as response: From 25a5b6c81ffb5996ef697aab22a22d8be5751bc1 Mon Sep 17 00:00:00 2001 From: Young-Jin Park Date: Wed, 24 Jul 2024 16:20:54 -0400 Subject: [PATCH 5/9] feat: add back compat alias for InputJsonDelta --- src/anthropic/types/input_json_delta.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/src/anthropic/types/input_json_delta.py b/src/anthropic/types/input_json_delta.py index 14a33feb..3028d7ac 100644 --- a/src/anthropic/types/input_json_delta.py +++ b/src/anthropic/types/input_json_delta.py @@ -4,10 +4,12 @@ from .._models import BaseModel -__all__ = ["InputJSONDelta"] - +__all__ = ["InputJSONDelta", "InputJsonDelta"] class InputJSONDelta(BaseModel): partial_json: str type: Literal["input_json_delta"] + +InputJsonDelta = InputJSONDelta + From c9eb11b1f9656202ee88e9869e59160bc37f5434 Mon Sep 17 00:00:00 2001 From: Young-Jin Park Date: Wed, 24 Jul 2024 16:30:41 -0400 Subject: [PATCH 6/9] fix: change signatures for the stream function --- src/anthropic/_utils/_reflection.py | 2 +- src/anthropic/resources/messages.py | 26 ++------------------------ tests/lib/streaming/test_messages.py | 2 +- 3 files changed, 4 insertions(+), 26 deletions(-) diff --git a/src/anthropic/_utils/_reflection.py b/src/anthropic/_utils/_reflection.py index 9a53c7bd..89aa712a 100644 --- a/src/anthropic/_utils/_reflection.py +++ b/src/anthropic/_utils/_reflection.py @@ -34,7 +34,7 @@ def assert_signatures_in_sync( if custom_param.annotation != source_param.annotation: errors.append( - f"types for the `{name}` param are do not match; source={repr(source_param.annotation)} checking={repr(source_param.annotation)}" + f"types for the `{name}` param are do not match; source={repr(source_param.annotation)} checking={repr(custom_param.annotation)}" ) continue diff --git a/src/anthropic/resources/messages.py b/src/anthropic/resources/messages.py index dd7f7ef5..21fd4285 100644 --- a/src/anthropic/resources/messages.py +++ b/src/anthropic/resources/messages.py @@ -885,18 +885,7 @@ def stream( *, max_tokens: int, messages: Iterable[MessageParam], - model: Union[ - str, - Literal[ - "claude-3-5-sonnet-20240620", - "claude-3-opus-20240229", - "claude-3-sonnet-20240229", - "claude-3-haiku-20240307", - "claude-2.1", - "claude-2.0", - "claude-instant-1.2", - ], - ], + model: ModelParam, metadata: message_create_params.Metadata | NotGiven = NOT_GIVEN, stop_sequences: List[str] | NotGiven = NOT_GIVEN, system: Union[str, Iterable[TextBlockParam]] | NotGiven = NOT_GIVEN, @@ -1800,18 +1789,7 @@ def stream( *, max_tokens: int, messages: Iterable[MessageParam], - model: Union[ - str, - Literal[ - "claude-3-5-sonnet-20240620", - "claude-3-opus-20240229", - "claude-3-sonnet-20240229", - "claude-3-haiku-20240307", - "claude-2.1", - "claude-2.0", - "claude-instant-1.2", - ], - ], + model: ModelParam, metadata: message_create_params.Metadata | NotGiven = NOT_GIVEN, stop_sequences: List[str] | NotGiven = NOT_GIVEN, system: Union[str, Iterable[TextBlockParam]] | NotGiven = NOT_GIVEN, diff --git a/tests/lib/streaming/test_messages.py b/tests/lib/streaming/test_messages.py index 408546de..e719516e 100644 --- a/tests/lib/streaming/test_messages.py +++ b/tests/lib/streaming/test_messages.py @@ -192,7 +192,7 @@ def test_stream_method_definition_in_sync(sync: bool) -> None: if custom_param.annotation != generated_param.annotation: errors.append( - f"types for the `{name}` param are do not match; generated={repr(generated_param.annotation)} custom={repr(generated_param.annotation)}" + f"types for the `{name}` param are do not match; generated={repr(generated_param.annotation)} custom={repr(custom_param.annotation)}" ) continue From 5e36940a42e401c3f0c1e42aa248d431fdf7192c Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Mon, 29 Jul 2024 14:54:40 +0000 Subject: [PATCH 7/9] chore(internal): add type construction helper (#613) --- src/anthropic/_models.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/src/anthropic/_models.py b/src/anthropic/_models.py index eb7ce3bd..5148d5a7 100644 --- a/src/anthropic/_models.py +++ b/src/anthropic/_models.py @@ -406,6 +406,15 @@ def build( return cast(_BaseModelT, construct_type(type_=base_model_cls, value=kwargs)) +def construct_type_unchecked(*, value: object, type_: type[_T]) -> _T: + """Loose coercion to the expected type with construction of nested values. + + Note: the returned value from this function is not guaranteed to match the + given type. + """ + return cast(_T, construct_type(value=value, type_=type_)) + + def construct_type(*, value: object, type_: object) -> object: """Loose coercion to the expected type with construction of nested values. From 5f8d88f6fcc2ba05cd9fc6f8ae7aa8c61dc6b0d0 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Mon, 29 Jul 2024 19:46:08 +0000 Subject: [PATCH 8/9] fix(client): correctly apply client level timeout for messages (#615) --- src/anthropic/resources/completions.py | 22 ++++++++++++++-------- src/anthropic/resources/messages.py | 22 ++++++++++++++-------- 2 files changed, 28 insertions(+), 16 deletions(-) diff --git a/src/anthropic/resources/completions.py b/src/anthropic/resources/completions.py index d34a42f2..7369f485 100644 --- a/src/anthropic/resources/completions.py +++ b/src/anthropic/resources/completions.py @@ -11,6 +11,7 @@ from ..types import completion_create_params from .._types import NOT_GIVEN, Body, Query, Headers, NotGiven from .._utils import ( + is_given, required_args, maybe_transform, async_maybe_transform, @@ -18,6 +19,7 @@ from .._compat import cached_property from .._resource import SyncAPIResource, AsyncAPIResource from .._response import to_streamed_response_wrapper, async_to_streamed_response_wrapper +from .._constants import DEFAULT_TIMEOUT from .._streaming import Stream, AsyncStream from .._base_client import make_request_options from ..types.completion import Completion @@ -53,7 +55,7 @@ def create( extra_headers: Headers | None = None, extra_query: Query | None = None, extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = 600, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, ) -> Completion: """[Legacy] Create a Text Completion. @@ -157,7 +159,7 @@ def create( extra_headers: Headers | None = None, extra_query: Query | None = None, extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = 600, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, ) -> Stream[Completion]: """[Legacy] Create a Text Completion. @@ -261,7 +263,7 @@ def create( extra_headers: Headers | None = None, extra_query: Query | None = None, extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = 600, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, ) -> Completion | Stream[Completion]: """[Legacy] Create a Text Completion. @@ -365,8 +367,10 @@ def create( extra_headers: Headers | None = None, extra_query: Query | None = None, extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = 600, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, ) -> Completion | Stream[Completion]: + if not is_given(timeout) and self._client.timeout == DEFAULT_TIMEOUT: + timeout = 600 return self._post( "/v1/complete", body=maybe_transform( @@ -419,7 +423,7 @@ async def create( extra_headers: Headers | None = None, extra_query: Query | None = None, extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = 600, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, ) -> Completion: """[Legacy] Create a Text Completion. @@ -523,7 +527,7 @@ async def create( extra_headers: Headers | None = None, extra_query: Query | None = None, extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = 600, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, ) -> AsyncStream[Completion]: """[Legacy] Create a Text Completion. @@ -627,7 +631,7 @@ async def create( extra_headers: Headers | None = None, extra_query: Query | None = None, extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = 600, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, ) -> Completion | AsyncStream[Completion]: """[Legacy] Create a Text Completion. @@ -731,8 +735,10 @@ async def create( extra_headers: Headers | None = None, extra_query: Query | None = None, extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = 600, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, ) -> Completion | AsyncStream[Completion]: + if not is_given(timeout) and self._client.timeout == DEFAULT_TIMEOUT: + timeout = 600 return await self._post( "/v1/complete", body=await async_maybe_transform( diff --git a/src/anthropic/resources/messages.py b/src/anthropic/resources/messages.py index 21fd4285..8b6ab106 100644 --- a/src/anthropic/resources/messages.py +++ b/src/anthropic/resources/messages.py @@ -12,6 +12,7 @@ from ..types import message_create_params from .._types import NOT_GIVEN, Body, Query, Headers, NotGiven from .._utils import ( + is_given, required_args, maybe_transform, async_maybe_transform, @@ -19,6 +20,7 @@ from .._compat import cached_property from .._resource import SyncAPIResource, AsyncAPIResource from .._response import to_streamed_response_wrapper, async_to_streamed_response_wrapper +from .._constants import DEFAULT_TIMEOUT from .._streaming import Stream, AsyncStream from .._base_client import make_request_options from ..lib.streaming import MessageStreamManager, AsyncMessageStreamManager @@ -62,7 +64,7 @@ def create( extra_headers: Headers | None = None, extra_query: Query | None = None, extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = 600, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, ) -> Message: """ Create a Message. @@ -325,7 +327,7 @@ def create( extra_headers: Headers | None = None, extra_query: Query | None = None, extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = 600, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, ) -> Stream[RawMessageStreamEvent]: """ Create a Message. @@ -588,7 +590,7 @@ def create( extra_headers: Headers | None = None, extra_query: Query | None = None, extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = 600, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, ) -> Message | Stream[RawMessageStreamEvent]: """ Create a Message. @@ -851,8 +853,10 @@ def create( extra_headers: Headers | None = None, extra_query: Query | None = None, extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = 600, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, ) -> Message | Stream[RawMessageStreamEvent]: + if not is_given(timeout) and self._client.timeout == DEFAULT_TIMEOUT: + timeout = 600 return self._post( "/v1/messages", body=maybe_transform( @@ -966,7 +970,7 @@ async def create( extra_headers: Headers | None = None, extra_query: Query | None = None, extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = 600, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, ) -> Message: """ Create a Message. @@ -1229,7 +1233,7 @@ async def create( extra_headers: Headers | None = None, extra_query: Query | None = None, extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = 600, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, ) -> AsyncStream[RawMessageStreamEvent]: """ Create a Message. @@ -1492,7 +1496,7 @@ async def create( extra_headers: Headers | None = None, extra_query: Query | None = None, extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = 600, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, ) -> Message | AsyncStream[RawMessageStreamEvent]: """ Create a Message. @@ -1755,8 +1759,10 @@ async def create( extra_headers: Headers | None = None, extra_query: Query | None = None, extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = 600, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, ) -> Message | AsyncStream[RawMessageStreamEvent]: + if not is_given(timeout) and self._client.timeout == DEFAULT_TIMEOUT: + timeout = 600 return await self._post( "/v1/messages", body=await async_maybe_transform( From adf57c5122457dfb2d19d7741635dd15a346e9e9 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Mon, 29 Jul 2024 19:46:31 +0000 Subject: [PATCH 9/9] release: 0.32.0 --- .release-please-manifest.json | 2 +- CHANGELOG.md | 27 +++++++++++++++++++++++++++ pyproject.toml | 2 +- src/anthropic/_version.py | 2 +- 4 files changed, 30 insertions(+), 3 deletions(-) diff --git a/.release-please-manifest.json b/.release-please-manifest.json index 61d831bd..f04d0896 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "0.31.2" + ".": "0.32.0" } \ No newline at end of file diff --git a/CHANGELOG.md b/CHANGELOG.md index eb6ddbf4..516cd35c 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,32 @@ # Changelog +## 0.32.0 (2024-07-29) + +Full Changelog: [v0.31.2...v0.32.0](https://github.com/anthropics/anthropic-sdk-python/compare/v0.31.2...v0.32.0) + +### Features + +* add back compat alias for InputJsonDelta ([25a5b6c](https://github.com/anthropics/anthropic-sdk-python/commit/25a5b6c81ffb5996ef697aab22a22d8be5751bc1)) + + +### Bug Fixes + +* change signatures for the stream function ([c9eb11b](https://github.com/anthropics/anthropic-sdk-python/commit/c9eb11b1f9656202ee88e9869e59160bc37f5434)) +* **client:** correctly apply client level timeout for messages ([#615](https://github.com/anthropics/anthropic-sdk-python/issues/615)) ([5f8d88f](https://github.com/anthropics/anthropic-sdk-python/commit/5f8d88f6fcc2ba05cd9fc6f8ae7aa8c61dc6b0d0)) + + +### Chores + +* **docs:** document how to do per-request http client customization ([#603](https://github.com/anthropics/anthropic-sdk-python/issues/603)) ([5161f62](https://github.com/anthropics/anthropic-sdk-python/commit/5161f626a0bec757b96217dc0f81e8908546f29a)) +* **internal:** add type construction helper ([#613](https://github.com/anthropics/anthropic-sdk-python/issues/613)) ([5e36940](https://github.com/anthropics/anthropic-sdk-python/commit/5e36940a42e401c3f0c1e42aa248d431fdf7192c)) +* sync spec ([#605](https://github.com/anthropics/anthropic-sdk-python/issues/605)) ([6b7707f](https://github.com/anthropics/anthropic-sdk-python/commit/6b7707f62788fca2e166209e82935a2a2fa8204a)) +* **tests:** update prism version ([#607](https://github.com/anthropics/anthropic-sdk-python/issues/607)) ([1797dc6](https://github.com/anthropics/anthropic-sdk-python/commit/1797dc6139ffaca6436ed897972471e67ba1b828)) + + +### Refactors + +* extract model out to a named type and rename partialjson ([#612](https://github.com/anthropics/anthropic-sdk-python/issues/612)) ([c53efc7](https://github.com/anthropics/anthropic-sdk-python/commit/c53efc786fa95831a398f37740a81b42f7b64c94)) + ## 0.31.2 (2024-07-17) Full Changelog: [v0.31.1...v0.31.2](https://github.com/anthropics/anthropic-sdk-python/compare/v0.31.1...v0.31.2) diff --git a/pyproject.toml b/pyproject.toml index 10921bf7..ac92a41f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "anthropic" -version = "0.31.2" +version = "0.32.0" description = "The official Python library for the anthropic API" dynamic = ["readme"] license = "MIT" diff --git a/src/anthropic/_version.py b/src/anthropic/_version.py index 040dba2e..722453a0 100644 --- a/src/anthropic/_version.py +++ b/src/anthropic/_version.py @@ -1,4 +1,4 @@ # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. __title__ = "anthropic" -__version__ = "0.31.2" # x-release-please-version +__version__ = "0.32.0" # x-release-please-version