Skip to content

Commit

Permalink
refactor(api): remove deprecated endpoints (#1067)
Browse files Browse the repository at this point in the history
The fine tunes and edits APIs are no longer
provided by OpenAI.
This is not a breaking change as attempting to
call these APIs, even on older versions, will
result in an error at runtime.
  • Loading branch information
stainless-bot committed Jan 12, 2024
1 parent 745b9e0 commit 199ddcd
Show file tree
Hide file tree
Showing 24 changed files with 59 additions and 2,032 deletions.
2 changes: 1 addition & 1 deletion .stats.yml
Original file line number Diff line number Diff line change
@@ -1 +1 @@
configured_endpoints: 57
configured_endpoints: 51
5 changes: 3 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -296,8 +296,9 @@ from openai import OpenAI
client = OpenAI()

try:
client.fine_tunes.create(
training_file="file-XGinujblHPwGLSztz8cPS8XY",
client.fine_tuning.jobs.create(
model="gpt-3.5-turbo",
training_file="file-abc123",
)
except openai.APIConnectionError as e:
print("The server could not be reached")
Expand Down
28 changes: 0 additions & 28 deletions api.md
Original file line number Diff line number Diff line change
Expand Up @@ -50,18 +50,6 @@ Methods:

- <code title="post /chat/completions">client.chat.completions.<a href="./src/openai/resources/chat/completions.py">create</a>(\*\*<a href="src/openai/types/chat/completion_create_params.py">params</a>) -> <a href="./src/openai/types/chat/chat_completion.py">ChatCompletion</a></code>

# Edits

Types:

```python
from openai.types import Edit
```

Methods:

- <code title="post /edits">client.edits.<a href="./src/openai/resources/edits.py">create</a>(\*\*<a href="src/openai/types/edit_create_params.py">params</a>) -> <a href="./src/openai/types/edit.py">Edit</a></code>

# Embeddings

Types:
Expand Down Expand Up @@ -182,22 +170,6 @@ Methods:
- <code title="post /fine_tuning/jobs/{fine_tuning_job_id}/cancel">client.fine_tuning.jobs.<a href="./src/openai/resources/fine_tuning/jobs.py">cancel</a>(fine_tuning_job_id) -> <a href="./src/openai/types/fine_tuning/fine_tuning_job.py">FineTuningJob</a></code>
- <code title="get /fine_tuning/jobs/{fine_tuning_job_id}/events">client.fine_tuning.jobs.<a href="./src/openai/resources/fine_tuning/jobs.py">list_events</a>(fine_tuning_job_id, \*\*<a href="src/openai/types/fine_tuning/job_list_events_params.py">params</a>) -> <a href="./src/openai/types/fine_tuning/fine_tuning_job_event.py">SyncCursorPage[FineTuningJobEvent]</a></code>

# FineTunes

Types:

```python
from openai.types import FineTune, FineTuneEvent, FineTuneEventsListResponse
```

Methods:

- <code title="post /fine-tunes">client.fine_tunes.<a href="./src/openai/resources/fine_tunes.py">create</a>(\*\*<a href="src/openai/types/fine_tune_create_params.py">params</a>) -> <a href="./src/openai/types/fine_tune.py">FineTune</a></code>
- <code title="get /fine-tunes/{fine_tune_id}">client.fine_tunes.<a href="./src/openai/resources/fine_tunes.py">retrieve</a>(fine_tune_id) -> <a href="./src/openai/types/fine_tune.py">FineTune</a></code>
- <code title="get /fine-tunes">client.fine_tunes.<a href="./src/openai/resources/fine_tunes.py">list</a>() -> <a href="./src/openai/types/fine_tune.py">SyncPage[FineTune]</a></code>
- <code title="post /fine-tunes/{fine_tune_id}/cancel">client.fine_tunes.<a href="./src/openai/resources/fine_tunes.py">cancel</a>(fine_tune_id) -> <a href="./src/openai/types/fine_tune.py">FineTune</a></code>
- <code title="get /fine-tunes/{fine_tune_id}/events">client.fine_tunes.<a href="./src/openai/resources/fine_tunes.py">list_events</a>(fine_tune_id, \*\*<a href="src/openai/types/fine_tune_list_events_params.py">params</a>) -> <a href="./src/openai/types/fine_tune_events_list_response.py">FineTuneEventsListResponse</a></code>

# Beta

## Assistants
Expand Down
2 changes: 0 additions & 2 deletions src/openai/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -316,12 +316,10 @@ def _reset_client() -> None: # type: ignore[reportUnusedFunction]
beta as beta,
chat as chat,
audio as audio,
edits as edits,
files as files,
images as images,
models as models,
embeddings as embeddings,
fine_tunes as fine_tunes,
completions as completions,
fine_tuning as fine_tuning,
moderations as moderations,
Expand Down
12 changes: 0 additions & 12 deletions src/openai/_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,15 +49,13 @@
class OpenAI(SyncAPIClient):
completions: resources.Completions
chat: resources.Chat
edits: resources.Edits
embeddings: resources.Embeddings
files: resources.Files
images: resources.Images
audio: resources.Audio
moderations: resources.Moderations
models: resources.Models
fine_tuning: resources.FineTuning
fine_tunes: resources.FineTunes
beta: resources.Beta
with_raw_response: OpenAIWithRawResponse

Expand Down Expand Up @@ -125,15 +123,13 @@ def __init__(

self.completions = resources.Completions(self)
self.chat = resources.Chat(self)
self.edits = resources.Edits(self)
self.embeddings = resources.Embeddings(self)
self.files = resources.Files(self)
self.images = resources.Images(self)
self.audio = resources.Audio(self)
self.moderations = resources.Moderations(self)
self.models = resources.Models(self)
self.fine_tuning = resources.FineTuning(self)
self.fine_tunes = resources.FineTunes(self)
self.beta = resources.Beta(self)
self.with_raw_response = OpenAIWithRawResponse(self)

Expand Down Expand Up @@ -249,15 +245,13 @@ def _make_status_error(
class AsyncOpenAI(AsyncAPIClient):
completions: resources.AsyncCompletions
chat: resources.AsyncChat
edits: resources.AsyncEdits
embeddings: resources.AsyncEmbeddings
files: resources.AsyncFiles
images: resources.AsyncImages
audio: resources.AsyncAudio
moderations: resources.AsyncModerations
models: resources.AsyncModels
fine_tuning: resources.AsyncFineTuning
fine_tunes: resources.AsyncFineTunes
beta: resources.AsyncBeta
with_raw_response: AsyncOpenAIWithRawResponse

Expand Down Expand Up @@ -325,15 +319,13 @@ def __init__(

self.completions = resources.AsyncCompletions(self)
self.chat = resources.AsyncChat(self)
self.edits = resources.AsyncEdits(self)
self.embeddings = resources.AsyncEmbeddings(self)
self.files = resources.AsyncFiles(self)
self.images = resources.AsyncImages(self)
self.audio = resources.AsyncAudio(self)
self.moderations = resources.AsyncModerations(self)
self.models = resources.AsyncModels(self)
self.fine_tuning = resources.AsyncFineTuning(self)
self.fine_tunes = resources.AsyncFineTunes(self)
self.beta = resources.AsyncBeta(self)
self.with_raw_response = AsyncOpenAIWithRawResponse(self)

Expand Down Expand Up @@ -450,31 +442,27 @@ class OpenAIWithRawResponse:
def __init__(self, client: OpenAI) -> None:
self.completions = resources.CompletionsWithRawResponse(client.completions)
self.chat = resources.ChatWithRawResponse(client.chat)
self.edits = resources.EditsWithRawResponse(client.edits)
self.embeddings = resources.EmbeddingsWithRawResponse(client.embeddings)
self.files = resources.FilesWithRawResponse(client.files)
self.images = resources.ImagesWithRawResponse(client.images)
self.audio = resources.AudioWithRawResponse(client.audio)
self.moderations = resources.ModerationsWithRawResponse(client.moderations)
self.models = resources.ModelsWithRawResponse(client.models)
self.fine_tuning = resources.FineTuningWithRawResponse(client.fine_tuning)
self.fine_tunes = resources.FineTunesWithRawResponse(client.fine_tunes)
self.beta = resources.BetaWithRawResponse(client.beta)


class AsyncOpenAIWithRawResponse:
def __init__(self, client: AsyncOpenAI) -> None:
self.completions = resources.AsyncCompletionsWithRawResponse(client.completions)
self.chat = resources.AsyncChatWithRawResponse(client.chat)
self.edits = resources.AsyncEditsWithRawResponse(client.edits)
self.embeddings = resources.AsyncEmbeddingsWithRawResponse(client.embeddings)
self.files = resources.AsyncFilesWithRawResponse(client.files)
self.images = resources.AsyncImagesWithRawResponse(client.images)
self.audio = resources.AsyncAudioWithRawResponse(client.audio)
self.moderations = resources.AsyncModerationsWithRawResponse(client.moderations)
self.models = resources.AsyncModelsWithRawResponse(client.models)
self.fine_tuning = resources.AsyncFineTuningWithRawResponse(client.fine_tuning)
self.fine_tunes = resources.AsyncFineTunesWithRawResponse(client.fine_tunes)
self.beta = resources.AsyncBetaWithRawResponse(client.beta)


Expand Down
14 changes: 0 additions & 14 deletions src/openai/_module_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,12 +18,6 @@ def __load__(self) -> resources.Beta:
return _load_client().beta


class EditsProxy(LazyProxy[resources.Edits]):
@override
def __load__(self) -> resources.Edits:
return _load_client().edits


class FilesProxy(LazyProxy[resources.Files]):
@override
def __load__(self) -> resources.Files:
Expand Down Expand Up @@ -54,12 +48,6 @@ def __load__(self) -> resources.Embeddings:
return _load_client().embeddings


class FineTunesProxy(LazyProxy[resources.FineTunes]):
@override
def __load__(self) -> resources.FineTunes:
return _load_client().fine_tunes


class CompletionsProxy(LazyProxy[resources.Completions]):
@override
def __load__(self) -> resources.Completions:
Expand All @@ -80,13 +68,11 @@ def __load__(self) -> resources.FineTuning:

chat: resources.Chat = ChatProxy().__as_proxied__()
beta: resources.Beta = BetaProxy().__as_proxied__()
edits: resources.Edits = EditsProxy().__as_proxied__()
files: resources.Files = FilesProxy().__as_proxied__()
audio: resources.Audio = AudioProxy().__as_proxied__()
images: resources.Images = ImagesProxy().__as_proxied__()
models: resources.Models = ModelsProxy().__as_proxied__()
embeddings: resources.Embeddings = EmbeddingsProxy().__as_proxied__()
fine_tunes: resources.FineTunes = FineTunesProxy().__as_proxied__()
completions: resources.Completions = CompletionsProxy().__as_proxied__()
moderations: resources.Moderations = ModerationsProxy().__as_proxied__()
fine_tuning: resources.FineTuning = FineTuningProxy().__as_proxied__()
10 changes: 0 additions & 10 deletions src/openai/resources/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,12 +3,10 @@
from .beta import Beta, AsyncBeta, BetaWithRawResponse, AsyncBetaWithRawResponse
from .chat import Chat, AsyncChat, ChatWithRawResponse, AsyncChatWithRawResponse
from .audio import Audio, AsyncAudio, AudioWithRawResponse, AsyncAudioWithRawResponse
from .edits import Edits, AsyncEdits, EditsWithRawResponse, AsyncEditsWithRawResponse
from .files import Files, AsyncFiles, FilesWithRawResponse, AsyncFilesWithRawResponse
from .images import Images, AsyncImages, ImagesWithRawResponse, AsyncImagesWithRawResponse
from .models import Models, AsyncModels, ModelsWithRawResponse, AsyncModelsWithRawResponse
from .embeddings import Embeddings, AsyncEmbeddings, EmbeddingsWithRawResponse, AsyncEmbeddingsWithRawResponse
from .fine_tunes import FineTunes, AsyncFineTunes, FineTunesWithRawResponse, AsyncFineTunesWithRawResponse
from .completions import Completions, AsyncCompletions, CompletionsWithRawResponse, AsyncCompletionsWithRawResponse
from .fine_tuning import FineTuning, AsyncFineTuning, FineTuningWithRawResponse, AsyncFineTuningWithRawResponse
from .moderations import Moderations, AsyncModerations, ModerationsWithRawResponse, AsyncModerationsWithRawResponse
Expand All @@ -22,10 +20,6 @@
"AsyncChat",
"ChatWithRawResponse",
"AsyncChatWithRawResponse",
"Edits",
"AsyncEdits",
"EditsWithRawResponse",
"AsyncEditsWithRawResponse",
"Embeddings",
"AsyncEmbeddings",
"EmbeddingsWithRawResponse",
Expand Down Expand Up @@ -54,10 +48,6 @@
"AsyncFineTuning",
"FineTuningWithRawResponse",
"AsyncFineTuningWithRawResponse",
"FineTunes",
"AsyncFineTunes",
"FineTunesWithRawResponse",
"AsyncFineTunesWithRawResponse",
"Beta",
"AsyncBeta",
"BetaWithRawResponse",
Expand Down
12 changes: 6 additions & 6 deletions src/openai/resources/chat/completions.py
Original file line number Diff line number Diff line change
Expand Up @@ -185,7 +185,7 @@ def create(
will not call a function and instead generates a message. `auto` means the model
can pick between generating a message or calling a function. Specifying a
particular function via
`{"type: "function", "function": {"name": "my_function"}}` forces the model to
`{"type": "function", "function": {"name": "my_function"}}` forces the model to
call that function.
`none` is the default when no functions are present. `auto` is the default if
Expand Down Expand Up @@ -371,7 +371,7 @@ def create(
will not call a function and instead generates a message. `auto` means the model
can pick between generating a message or calling a function. Specifying a
particular function via
`{"type: "function", "function": {"name": "my_function"}}` forces the model to
`{"type": "function", "function": {"name": "my_function"}}` forces the model to
call that function.
`none` is the default when no functions are present. `auto` is the default if
Expand Down Expand Up @@ -557,7 +557,7 @@ def create(
will not call a function and instead generates a message. `auto` means the model
can pick between generating a message or calling a function. Specifying a
particular function via
`{"type: "function", "function": {"name": "my_function"}}` forces the model to
`{"type": "function", "function": {"name": "my_function"}}` forces the model to
call that function.
`none` is the default when no functions are present. `auto` is the default if
Expand Down Expand Up @@ -833,7 +833,7 @@ async def create(
will not call a function and instead generates a message. `auto` means the model
can pick between generating a message or calling a function. Specifying a
particular function via
`{"type: "function", "function": {"name": "my_function"}}` forces the model to
`{"type": "function", "function": {"name": "my_function"}}` forces the model to
call that function.
`none` is the default when no functions are present. `auto` is the default if
Expand Down Expand Up @@ -1019,7 +1019,7 @@ async def create(
will not call a function and instead generates a message. `auto` means the model
can pick between generating a message or calling a function. Specifying a
particular function via
`{"type: "function", "function": {"name": "my_function"}}` forces the model to
`{"type": "function", "function": {"name": "my_function"}}` forces the model to
call that function.
`none` is the default when no functions are present. `auto` is the default if
Expand Down Expand Up @@ -1205,7 +1205,7 @@ async def create(
will not call a function and instead generates a message. `auto` means the model
can pick between generating a message or calling a function. Specifying a
particular function via
`{"type: "function", "function": {"name": "my_function"}}` forces the model to
`{"type": "function", "function": {"name": "my_function"}}` forces the model to
call that function.
`none` is the default when no functions are present. `auto` is the default if
Expand Down
Loading

0 comments on commit 199ddcd

Please sign in to comment.