Skip to content

Commit

Permalink
feat(api): add batch API (#768)
Browse files Browse the repository at this point in the history
  • Loading branch information
stainless-bot authored Apr 15, 2024
1 parent 52bcc47 commit 7fe34f2
Show file tree
Hide file tree
Showing 6 changed files with 319 additions and 1 deletion.
2 changes: 1 addition & 1 deletion .stats.yml
Original file line number Diff line number Diff line change
@@ -1 +1 @@
configured_endpoints: 52
configured_endpoints: 55
14 changes: 14 additions & 0 deletions api.md
Original file line number Diff line number Diff line change
Expand Up @@ -337,3 +337,17 @@ Methods:

- <code title="get /threads/{thread_id}/messages/{message_id}/files/{file_id}">client.beta.threads.messages.files.<a href="./src/resources/beta/threads/messages/files.ts">retrieve</a>(threadId, messageId, fileId) -> MessageFile</code>
- <code title="get /threads/{thread_id}/messages/{message_id}/files">client.beta.threads.messages.files.<a href="./src/resources/beta/threads/messages/files.ts">list</a>(threadId, messageId, { ...params }) -> MessageFilesPage</code>

# Batches

Types:

- <code><a href="./src/resources/batches.ts">Batch</a></code>
- <code><a href="./src/resources/batches.ts">BatchError</a></code>
- <code><a href="./src/resources/batches.ts">BatchRequestCounts</a></code>

Methods:

- <code title="post /batches">client.batches.<a href="./src/resources/batches.ts">create</a>({ ...params }) -> Batch</code>
- <code title="get /batches/{batch_id}">client.batches.<a href="./src/resources/batches.ts">retrieve</a>(batchId) -> Batch</code>
- <code title="post /batches/{batch_id}/cancel">client.batches.<a href="./src/resources/batches.ts">cancel</a>(batchId) -> Batch</code>
7 changes: 7 additions & 0 deletions src/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -150,6 +150,7 @@ export class OpenAI extends Core.APIClient {
models: API.Models = new API.Models(this);
fineTuning: API.FineTuning = new API.FineTuning(this);
beta: API.Beta = new API.Beta(this);
batches: API.Batches = new API.Batches(this);

protected override defaultQuery(): Core.DefaultQuery | undefined {
return this._options.defaultQuery;
Expand Down Expand Up @@ -285,6 +286,12 @@ export namespace OpenAI {

export import Beta = API.Beta;

export import Batches = API.Batches;
export import Batch = API.Batch;
export import BatchError = API.BatchError;
export import BatchRequestCounts = API.BatchRequestCounts;
export import BatchCreateParams = API.BatchCreateParams;

export import ErrorObject = API.ErrorObject;
export import FunctionDefinition = API.FunctionDefinition;
export import FunctionParameters = API.FunctionParameters;
Expand Down
225 changes: 225 additions & 0 deletions src/resources/batches.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,225 @@
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.

import * as Core from 'openai/core';
import { APIResource } from 'openai/resource';
import * as BatchesAPI from 'openai/resources/batches';

export class Batches extends APIResource {
/**
* Creates and executes a batch from an uploaded file of requests
*/
create(body: BatchCreateParams, options?: Core.RequestOptions): Core.APIPromise<Batch> {
return this._client.post('/batches', { body, ...options });
}

/**
* Retrieves a batch.
*/
retrieve(batchId: string, options?: Core.RequestOptions): Core.APIPromise<Batch> {
return this._client.get(`/batches/${batchId}`, options);
}

/**
* Cancels an in-progress batch.
*/
cancel(batchId: string, options?: Core.RequestOptions): Core.APIPromise<Batch> {
return this._client.post(`/batches/${batchId}/cancel`, options);
}
}

export interface Batch {
id: string;

/**
* The time frame within which the batch should be processed.
*/
completion_window: string;

/**
* The Unix timestamp (in seconds) for when the batch was created.
*/
created_at: string;

/**
* The OpenAI API endpoint used by the batch.
*/
endpoint: string;

/**
* The ID of the input file for the batch.
*/
input_file_id: string;

/**
* The object type, which is always `batch`.
*/
object: 'batch';

/**
* The current status of the batch.
*/
status:
| 'validating'
| 'failed'
| 'in_progress'
| 'finalizing'
| 'completed'
| 'expired'
| 'cancelling'
| 'cancelled';

/**
* The Unix timestamp (in seconds) for when the batch was cancelled.
*/
cancelled_at?: string;

/**
* The Unix timestamp (in seconds) for when the batch started cancelling.
*/
cancelling_at?: string;

/**
* The Unix timestamp (in seconds) for when the batch was completed.
*/
completed_at?: string;

/**
* The ID of the file containing the outputs of requests with errors.
*/
error_file_id?: string;

errors?: Batch.Errors;

/**
* The Unix timestamp (in seconds) for when the batch expired.
*/
expired_at?: string;

/**
* The Unix timestamp (in seconds) for when the batch will expire.
*/
expires_at?: string;

/**
* The Unix timestamp (in seconds) for when the batch failed.
*/
failed_at?: string;

/**
* The Unix timestamp (in seconds) for when the batch started finalizing.
*/
finalizing_at?: string;

/**
* The Unix timestamp (in seconds) for when the batch started processing.
*/
in_progress_at?: string;

/**
* Set of 16 key-value pairs that can be attached to an object. This can be useful
* for storing additional information about the object in a structured format. Keys
* can be a maximum of 64 characters long and values can be a maxium of 512
* characters long.
*/
metadata?: unknown | null;

/**
* The ID of the file containing the outputs of successfully executed requests.
*/
output_file_id?: string;

/**
* The request counts for different statuses within the batch.
*/
request_counts?: BatchRequestCounts;
}

export namespace Batch {
export interface Errors {
data?: Array<BatchesAPI.BatchError>;

/**
* The object type, which is always `list`.
*/
object?: string;
}
}

export interface BatchError {
/**
* An error code identifying the error type.
*/
code?: string;

/**
* The line number of the input file where the error occurred, if applicable.
*/
line?: number | null;

/**
* A human-readable message providing more details about the error.
*/
message?: string;

/**
* The name of the parameter that caused the error, if applicable.
*/
param?: string | null;
}

/**
* The request counts for different statuses within the batch.
*/
export interface BatchRequestCounts {
/**
* Number of requests that have been completed successfully.
*/
completed: number;

/**
* Number of requests that have failed.
*/
failed: number;

/**
* Total number of requests in the batch.
*/
total: number;
}

export interface BatchCreateParams {
/**
* The time frame within which the batch should be processed. Currently only `24h`
* is supported.
*/
completion_window: '24h';

/**
* The endpoint to be used for all requests in the batch. Currently only
* `/v1/chat/completions` is supported.
*/
endpoint: '/v1/chat/completions';

/**
* The ID of an uploaded file that contains requests for the new batch.
*
* See [upload file](https://platform.openai.com/docs/api-reference/files/create)
* for how to upload a file.
*
* Your input file must be formatted as a JSONL file, and must be uploaded with the
* purpose `batch`.
*/
input_file_id: string;

/**
* Optional custom metadata for the batch.
*/
metadata?: Record<string, string> | null;
}

export namespace Batches {
export import Batch = BatchesAPI.Batch;
export import BatchError = BatchesAPI.BatchError;
export import BatchRequestCounts = BatchesAPI.BatchRequestCounts;
export import BatchCreateParams = BatchesAPI.BatchCreateParams;
}
1 change: 1 addition & 0 deletions src/resources/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
export * from './chat/index';
export * from './shared';
export { Audio } from './audio/audio';
export { Batch, BatchError, BatchRequestCounts, BatchCreateParams, Batches } from './batches';
export { Beta } from './beta/beta';
export {
Completion,
Expand Down
71 changes: 71 additions & 0 deletions tests/api-resources/batches.test.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,71 @@
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.

import OpenAI from 'openai';
import { Response } from 'node-fetch';

const openai = new OpenAI({
apiKey: 'My API Key',
baseURL: process.env['TEST_API_BASE_URL'] ?? 'http://127.0.0.1:4010',
});

describe('resource batches', () => {
test('create: only required params', async () => {
const responsePromise = openai.batches.create({
completion_window: '24h',
endpoint: '/v1/chat/completions',
input_file_id: 'string',
});
const rawResponse = await responsePromise.asResponse();
expect(rawResponse).toBeInstanceOf(Response);
const response = await responsePromise;
expect(response).not.toBeInstanceOf(Response);
const dataAndResponse = await responsePromise.withResponse();
expect(dataAndResponse.data).toBe(response);
expect(dataAndResponse.response).toBe(rawResponse);
});

test('create: required and optional params', async () => {
const response = await openai.batches.create({
completion_window: '24h',
endpoint: '/v1/chat/completions',
input_file_id: 'string',
metadata: { foo: 'string' },
});
});

test('retrieve', async () => {
const responsePromise = openai.batches.retrieve('string');
const rawResponse = await responsePromise.asResponse();
expect(rawResponse).toBeInstanceOf(Response);
const response = await responsePromise;
expect(response).not.toBeInstanceOf(Response);
const dataAndResponse = await responsePromise.withResponse();
expect(dataAndResponse.data).toBe(response);
expect(dataAndResponse.response).toBe(rawResponse);
});

test('retrieve: request options instead of params are passed correctly', async () => {
// ensure the request options are being passed correctly by passing an invalid HTTP method in order to cause an error
await expect(openai.batches.retrieve('string', { path: '/_stainless_unknown_path' })).rejects.toThrow(
OpenAI.NotFoundError,
);
});

test('cancel', async () => {
const responsePromise = openai.batches.cancel('string');
const rawResponse = await responsePromise.asResponse();
expect(rawResponse).toBeInstanceOf(Response);
const response = await responsePromise;
expect(response).not.toBeInstanceOf(Response);
const dataAndResponse = await responsePromise.withResponse();
expect(dataAndResponse.data).toBe(response);
expect(dataAndResponse.response).toBe(rawResponse);
});

test('cancel: request options instead of params are passed correctly', async () => {
// ensure the request options are being passed correctly by passing an invalid HTTP method in order to cause an error
await expect(openai.batches.cancel('string', { path: '/_stainless_unknown_path' })).rejects.toThrow(
OpenAI.NotFoundError,
);
});
});

0 comments on commit 7fe34f2

Please sign in to comment.