Skip to content

Commit

Permalink
refactor: extract model out to a named type and rename partialjson (#612
Browse files Browse the repository at this point in the history
)
  • Loading branch information
stainless-app[bot] committed Jul 24, 2024
1 parent b534dc8 commit 83f4f75
Show file tree
Hide file tree
Showing 14 changed files with 134 additions and 189 deletions.
2 changes: 1 addition & 1 deletion .stats.yml
Original file line number Diff line number Diff line change
@@ -1,2 +1,2 @@
configured_endpoints: 2
openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/anthropic-e38cd52aed438cef6e0a25eeeab8ff6000583c3cf152a10f0c3610ceb3da7b4e.yml
openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/anthropic-5903ec2fd4efd7f261908bc4ec8ecd6b19cb9efa79637ad273583f1b763f80fd.yml
3 changes: 2 additions & 1 deletion api.md
Original file line number Diff line number Diff line change
Expand Up @@ -9,14 +9,15 @@ from anthropic.types import (
ContentBlockStartEvent,
ContentBlockStopEvent,
ImageBlockParam,
InputJsonDelta,
InputJSONDelta,
Message,
MessageDeltaEvent,
MessageDeltaUsage,
MessageParam,
MessageStartEvent,
MessageStopEvent,
MessageStreamEvent,
Model,
RawContentBlockDeltaEvent,
RawContentBlockStartEvent,
RawContentBlockStopEvent,
Expand Down
49 changes: 22 additions & 27 deletions src/anthropic/resources/completions.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

from __future__ import annotations

from typing import List, Union, overload
from typing import List, overload
from typing_extensions import Literal

import httpx
Expand All @@ -21,6 +21,7 @@
from .._streaming import Stream, AsyncStream
from .._base_client import make_request_options
from ..types.completion import Completion
from ..types.model_param import ModelParam

__all__ = ["Completions", "AsyncCompletions"]

Expand All @@ -39,7 +40,7 @@ def create(
self,
*,
max_tokens_to_sample: int,
model: Union[str, Literal["claude-2.0", "claude-2.1", "claude-instant-1.2"]],
model: ModelParam,
prompt: str,
metadata: completion_create_params.Metadata | NotGiven = NOT_GIVEN,
stop_sequences: List[str] | NotGiven = NOT_GIVEN,
Expand Down Expand Up @@ -71,9 +72,8 @@ def create(
Note that our models may stop _before_ reaching this maximum. This parameter
only specifies the absolute maximum number of tokens to generate.
model: The model that will complete your prompt.
See [models](https://docs.anthropic.com/en/docs/models-overview) for additional
model: The model that will complete your prompt.\n\nSee
[models](https://docs.anthropic.com/en/docs/models-overview) for additional
details and options.
prompt: The prompt that you want Claude to complete.
Expand Down Expand Up @@ -144,7 +144,7 @@ def create(
self,
*,
max_tokens_to_sample: int,
model: Union[str, Literal["claude-2.0", "claude-2.1", "claude-instant-1.2"]],
model: ModelParam,
prompt: str,
stream: Literal[True],
metadata: completion_create_params.Metadata | NotGiven = NOT_GIVEN,
Expand Down Expand Up @@ -176,9 +176,8 @@ def create(
Note that our models may stop _before_ reaching this maximum. This parameter
only specifies the absolute maximum number of tokens to generate.
model: The model that will complete your prompt.
See [models](https://docs.anthropic.com/en/docs/models-overview) for additional
model: The model that will complete your prompt.\n\nSee
[models](https://docs.anthropic.com/en/docs/models-overview) for additional
details and options.
prompt: The prompt that you want Claude to complete.
Expand Down Expand Up @@ -249,7 +248,7 @@ def create(
self,
*,
max_tokens_to_sample: int,
model: Union[str, Literal["claude-2.0", "claude-2.1", "claude-instant-1.2"]],
model: ModelParam,
prompt: str,
stream: bool,
metadata: completion_create_params.Metadata | NotGiven = NOT_GIVEN,
Expand Down Expand Up @@ -281,9 +280,8 @@ def create(
Note that our models may stop _before_ reaching this maximum. This parameter
only specifies the absolute maximum number of tokens to generate.
model: The model that will complete your prompt.
See [models](https://docs.anthropic.com/en/docs/models-overview) for additional
model: The model that will complete your prompt.\n\nSee
[models](https://docs.anthropic.com/en/docs/models-overview) for additional
details and options.
prompt: The prompt that you want Claude to complete.
Expand Down Expand Up @@ -354,7 +352,7 @@ def create(
self,
*,
max_tokens_to_sample: int,
model: Union[str, Literal["claude-2.0", "claude-2.1", "claude-instant-1.2"]],
model: ModelParam,
prompt: str,
metadata: completion_create_params.Metadata | NotGiven = NOT_GIVEN,
stop_sequences: List[str] | NotGiven = NOT_GIVEN,
Expand Down Expand Up @@ -408,7 +406,7 @@ async def create(
self,
*,
max_tokens_to_sample: int,
model: Union[str, Literal["claude-2.0", "claude-2.1", "claude-instant-1.2"]],
model: ModelParam,
prompt: str,
metadata: completion_create_params.Metadata | NotGiven = NOT_GIVEN,
stop_sequences: List[str] | NotGiven = NOT_GIVEN,
Expand Down Expand Up @@ -440,9 +438,8 @@ async def create(
Note that our models may stop _before_ reaching this maximum. This parameter
only specifies the absolute maximum number of tokens to generate.
model: The model that will complete your prompt.
See [models](https://docs.anthropic.com/en/docs/models-overview) for additional
model: The model that will complete your prompt.\n\nSee
[models](https://docs.anthropic.com/en/docs/models-overview) for additional
details and options.
prompt: The prompt that you want Claude to complete.
Expand Down Expand Up @@ -513,7 +510,7 @@ async def create(
self,
*,
max_tokens_to_sample: int,
model: Union[str, Literal["claude-2.0", "claude-2.1", "claude-instant-1.2"]],
model: ModelParam,
prompt: str,
stream: Literal[True],
metadata: completion_create_params.Metadata | NotGiven = NOT_GIVEN,
Expand Down Expand Up @@ -545,9 +542,8 @@ async def create(
Note that our models may stop _before_ reaching this maximum. This parameter
only specifies the absolute maximum number of tokens to generate.
model: The model that will complete your prompt.
See [models](https://docs.anthropic.com/en/docs/models-overview) for additional
model: The model that will complete your prompt.\n\nSee
[models](https://docs.anthropic.com/en/docs/models-overview) for additional
details and options.
prompt: The prompt that you want Claude to complete.
Expand Down Expand Up @@ -618,7 +614,7 @@ async def create(
self,
*,
max_tokens_to_sample: int,
model: Union[str, Literal["claude-2.0", "claude-2.1", "claude-instant-1.2"]],
model: ModelParam,
prompt: str,
stream: bool,
metadata: completion_create_params.Metadata | NotGiven = NOT_GIVEN,
Expand Down Expand Up @@ -650,9 +646,8 @@ async def create(
Note that our models may stop _before_ reaching this maximum. This parameter
only specifies the absolute maximum number of tokens to generate.
model: The model that will complete your prompt.
See [models](https://docs.anthropic.com/en/docs/models-overview) for additional
model: The model that will complete your prompt.\n\nSee
[models](https://docs.anthropic.com/en/docs/models-overview) for additional
details and options.
prompt: The prompt that you want Claude to complete.
Expand Down Expand Up @@ -723,7 +718,7 @@ async def create(
self,
*,
max_tokens_to_sample: int,
model: Union[str, Literal["claude-2.0", "claude-2.1", "claude-instant-1.2"]],
model: ModelParam,
prompt: str,
metadata: completion_create_params.Metadata | NotGiven = NOT_GIVEN,
stop_sequences: List[str] | NotGiven = NOT_GIVEN,
Expand Down
Loading

0 comments on commit 83f4f75

Please sign in to comment.