Skip to content

Commit

Permalink
Merge pull request #256 from Portkey-AI/fix/retrieveCalls
Browse files Browse the repository at this point in the history
extra_body
  • Loading branch information
csgulati09 authored Dec 11, 2024
2 parents 142d951 + f3eb5d4 commit 5c723ec
Show file tree
Hide file tree
Showing 7 changed files with 255 additions and 95 deletions.
26 changes: 20 additions & 6 deletions portkey_ai/api_resources/apis/assistants.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,9 +48,14 @@ def create(
return data

def retrieve(self, assistant_id, **kwargs) -> Assistant:
response = self.openai_client.with_raw_response.beta.assistants.retrieve(
assistant_id=assistant_id, **kwargs
)
if kwargs:
response = self.openai_client.with_raw_response.beta.assistants.retrieve(
assistant_id=assistant_id, extra_body=kwargs
)
else:
response = self.openai_client.with_raw_response.beta.assistants.retrieve(
assistant_id=assistant_id
)
data = Assistant(**json.loads(response.text))
data._headers = response.headers

Expand Down Expand Up @@ -156,9 +161,18 @@ async def create(
return data

async def retrieve(self, assistant_id, **kwargs) -> Assistant:
response = await self.openai_client.with_raw_response.beta.assistants.retrieve(
assistant_id=assistant_id, **kwargs
)
if kwargs:
response = (
await self.openai_client.with_raw_response.beta.assistants.retrieve(
assistant_id=assistant_id, extra_body=kwargs
)
)
else:
response = (
await self.openai_client.with_raw_response.beta.assistants.retrieve(
assistant_id=assistant_id
)
)
data = Assistant(**json.loads(response.text))
data._headers = response.headers

Expand Down
22 changes: 16 additions & 6 deletions portkey_ai/api_resources/apis/batches.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,9 +36,14 @@ def create(
return data

def retrieve(self, batch_id, **kwargs) -> Batch:
response = self.openai_client.with_raw_response.batches.retrieve(
batch_id=batch_id, extra_body=kwargs
)
if kwargs:
response = self.openai_client.with_raw_response.batches.retrieve(
batch_id=batch_id, extra_body=kwargs
)
else:
response = self.openai_client.with_raw_response.batches.retrieve(
batch_id=batch_id
)
data = Batch(**json.loads(response.text))
data._headers = response.headers

Expand Down Expand Up @@ -97,9 +102,14 @@ async def create(
return data

async def retrieve(self, batch_id, **kwargs) -> Batch:
response = await self.openai_client.with_raw_response.batches.retrieve(
batch_id=batch_id, extra_body=kwargs
)
if kwargs:
response = await self.openai_client.with_raw_response.batches.retrieve(
batch_id=batch_id, extra_body=kwargs
)
else:
response = await self.openai_client.with_raw_response.batches.retrieve(
batch_id=batch_id
)
data = Batch(**json.loads(response.text))
data._headers = response.headers

Expand Down
27 changes: 21 additions & 6 deletions portkey_ai/api_resources/apis/fine_tuning.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,9 +56,14 @@ def create(
return data

def retrieve(self, fine_tuning_job_id: str, **kwargs) -> FineTuningJob:
response = self.openai_client.with_raw_response.fine_tuning.jobs.retrieve(
fine_tuning_job_id=fine_tuning_job_id, extra_body=kwargs
)
if kwargs:
response = self.openai_client.with_raw_response.fine_tuning.jobs.retrieve(
fine_tuning_job_id=fine_tuning_job_id, extra_body=kwargs
)
else:
response = self.openai_client.with_raw_response.fine_tuning.jobs.retrieve(
fine_tuning_job_id=fine_tuning_job_id
)
data = FineTuningJob(**json.loads(response.text))
data._headers = response.headers

Expand Down Expand Up @@ -179,9 +184,19 @@ async def create(
return data

async def retrieve(self, fine_tuning_job_id: str, **kwargs) -> FineTuningJob:
response = await self.openai_client.with_raw_response.fine_tuning.jobs.retrieve(
fine_tuning_job_id=fine_tuning_job_id, extra_body=kwargs
)
if kwargs:
response = (
await self.openai_client.with_raw_response.fine_tuning.jobs.retrieve(
fine_tuning_job_id=fine_tuning_job_id, extra_body=kwargs
)
)
else:
response = (
await self.openai_client.with_raw_response.fine_tuning.jobs.retrieve(
fine_tuning_job_id=fine_tuning_job_id
)
)

data = FineTuningJob(**json.loads(response.text))
data._headers = response.headers

Expand Down
54 changes: 40 additions & 14 deletions portkey_ai/api_resources/apis/main_files.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,9 +35,14 @@ def list(self, purpose: Union[str, NotGiven] = NOT_GIVEN, **kwargs) -> FileList:
return data

def retrieve(self, file_id, **kwargs) -> FileObject:
response = self.openai_client.with_raw_response.files.retrieve(
file_id=file_id, extra_body=kwargs
)
if kwargs:
response = self.openai_client.with_raw_response.files.retrieve(
file_id=file_id, extra_body=kwargs
)
else:
response = self.openai_client.with_raw_response.files.retrieve(
file_id=file_id
)
data = FileObject(**json.loads(response.text))
data._headers = response.headers

Expand All @@ -53,11 +58,21 @@ def delete(self, file_id, **kwargs) -> FileDeleted:
return data

def content(self, file_id, **kwargs) -> Any:
response = self.openai_client.files.content(file_id=file_id, extra_body=kwargs)
if kwargs:
response = self.openai_client.files.content(
file_id=file_id, extra_body=kwargs
)
else:
response = self.openai_client.files.content(file_id=file_id)
return response

def retrieve_content(self, file_id, **kwargs) -> Any:
response = self.openai_client.files.content(file_id=file_id, extra_body=kwargs)
if kwargs:
response = self.openai_client.files.content(
file_id=file_id, extra_body=kwargs
)
else:
response = self.openai_client.files.content(file_id=file_id)
return response

def wait_for_processing(
Expand Down Expand Up @@ -102,9 +117,14 @@ async def list(
return data

async def retrieve(self, file_id, **kwargs) -> FileObject:
response = await self.openai_client.with_raw_response.files.retrieve(
file_id=file_id, extra_body=kwargs
)
if kwargs:
response = await self.openai_client.with_raw_response.files.retrieve(
file_id=file_id, extra_body=kwargs
)
else:
response = await self.openai_client.with_raw_response.files.retrieve(
file_id=file_id
)
data = FileObject(**json.loads(response.text))
data._headers = response.headers

Expand All @@ -120,15 +140,21 @@ async def delete(self, file_id, **kwargs) -> FileDeleted:
return data

async def content(self, file_id, **kwargs) -> Any:
response = await self.openai_client.files.content(
file_id=file_id, extra_body=kwargs
)
if kwargs:
response = await self.openai_client.files.content(
file_id=file_id, extra_body=kwargs
)
else:
response = await self.openai_client.files.content(file_id=file_id)
return response

async def retrieve_content(self, file_id, **kwargs) -> Any:
response = await self.openai_client.files.content(
file_id=file_id, extra_body=kwargs
)
if kwargs:
response = await self.openai_client.files.content(
file_id=file_id, extra_body=kwargs
)
else:
response = await self.openai_client.files.content(file_id=file_id)
return response

async def wait_for_processing(
Expand Down
22 changes: 16 additions & 6 deletions portkey_ai/api_resources/apis/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,9 +20,14 @@ def list(self, **kwargs) -> ModelList:
def retrieve(
self, model: str, *, timeout: Union[float, NotGiven] = NOT_GIVEN, **kwargs
) -> Model:
response = self.openai_client.with_raw_response.models.retrieve(
model=model, timeout=timeout, extra_body=kwargs
)
if kwargs:
response = self.openai_client.with_raw_response.models.retrieve(
model=model, timeout=timeout, extra_body=kwargs
)
else:
response = self.openai_client.with_raw_response.models.retrieve(
model=model, timeout=timeout
)
data = Model(**json.loads(response.text))
data._headers = response.headers
return data
Expand Down Expand Up @@ -52,9 +57,14 @@ async def list(self, **kwargs) -> ModelList:
async def retrieve(
self, model: str, *, timeout: Union[float, NotGiven] = NOT_GIVEN, **kwargs
) -> Model:
response = await self.openai_client.with_raw_response.models.retrieve(
model=model, timeout=timeout, extra_body=kwargs
)
if kwargs:
response = await self.openai_client.with_raw_response.models.retrieve(
model=model, timeout=timeout, extra_body=kwargs
)
else:
response = await self.openai_client.with_raw_response.models.retrieve(
model=model, timeout=timeout
)
data = Model(**json.loads(response.text))
data._headers = response.headers
return data
Expand Down
Loading

0 comments on commit 5c723ec

Please sign in to comment.