diff --git a/.release-please-manifest.json b/.release-please-manifest.json
index 485bcd4e9..a2b09ee37 100644
--- a/.release-please-manifest.json
+++ b/.release-please-manifest.json
@@ -1,3 +1,3 @@
{
- ".": "4.31.0"
+ ".": "4.32.0"
}
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 6a28f8d3c..3be8b4c02 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,5 +1,29 @@
# Changelog
+## 4.32.0 (2024-04-01)
+
+Full Changelog: [v4.31.0...v4.32.0](https://github.com/openai/openai-node/compare/v4.31.0...v4.32.0)
+
+### Features
+
+* **api:** add support for filtering messages by run_id ([#747](https://github.com/openai/openai-node/issues/747)) ([9a397ac](https://github.com/openai/openai-node/commit/9a397acffa9f10c3f48e86e3bdb3851770f87b42))
+* **api:** run polling helpers ([#749](https://github.com/openai/openai-node/issues/749)) ([02920ae](https://github.com/openai/openai-node/commit/02920ae082480fc7a7ffe9fa583d053a40dc7120))
+
+
+### Chores
+
+* **deps:** remove unused dependency digest-fetch ([#748](https://github.com/openai/openai-node/issues/748)) ([5376837](https://github.com/openai/openai-node/commit/537683734d39dd956a7dcef4339c1167ce6fe13c))
+
+
+### Documentation
+
+* **readme:** change undocumented params wording ([#744](https://github.com/openai/openai-node/issues/744)) ([8796691](https://github.com/openai/openai-node/commit/87966911045275db86844dfdcde59653edaef264))
+
+
+### Refactors
+
+* rename createAndStream to stream ([02920ae](https://github.com/openai/openai-node/commit/02920ae082480fc7a7ffe9fa583d053a40dc7120))
+
## 4.31.0 (2024-03-30)
Full Changelog: [v4.30.0...v4.31.0](https://github.com/openai/openai-node/compare/v4.30.0...v4.31.0)
diff --git a/README.md b/README.md
index 787dd25ae..2adc81afc 100644
--- a/README.md
+++ b/README.md
@@ -19,7 +19,7 @@ You can import in Deno via:
```ts
-import OpenAI from 'https://deno.land/x/openai@v4.31.0/mod.ts';
+import OpenAI from 'https://deno.land/x/openai@v4.32.0/mod.ts';
```
@@ -100,13 +100,30 @@ Documentation for each method, request param, and response field are available i
> [!IMPORTANT]
> Previous versions of this SDK used a `Configuration` class. See the [v3 to v4 migration guide](https://github.com/openai/openai-node/discussions/217).
+### Polling Helpers
+
+When interacting with the API some actions such as starting a Run may take time to complete. The SDK includes
+helper functions which will poll the status until it reaches a terminal state and then return the resulting object.
+If an API method results in an action which could benefit from polling there will be a corresponding version of the
+method ending in 'AndPoll'.
+
+For instance to create a Run and poll until it reaches a terminal state you can run:
+
+```ts
+const run = await openai.beta.threads.runs.createAndPoll(thread.id, {
+ assistant_id: assistantId,
+});
+```
+
+More information on the lifecycle of a Run can be found in the [Run Lifecycle Documentation](https://platform.openai.com/docs/assistants/how-it-works/run-lifecycle)
+
### Streaming Helpers
The SDK also includes helpers to process streams and handle the incoming events.
```ts
const run = openai.beta.threads.runs
- .createAndStream(thread.id, {
+ .stream(thread.id, {
assistant_id: assistant.id,
})
.on('textCreated', (text) => process.stdout.write('\nassistant > '))
@@ -454,7 +471,7 @@ await client.post('/some/path', {
});
```
-#### Undocumented params
+#### Undocumented request params
To make requests using undocumented parameters, you may use `// @ts-expect-error` on the undocumented
parameter. This library doesn't validate at runtime that the request matches the type, so any extra values you
@@ -475,7 +492,7 @@ extra param in the body.
If you want to explicitly send an extra argument, you can do so with the `query`, `body`, and `headers` request
options.
-#### Undocumented properties
+#### Undocumented response properties
To access undocumented response properties, you may access the response object with `// @ts-expect-error` on
the response object, or cast the response object to the requisite type. Like the request params, we do not
diff --git a/api.md b/api.md
index 504a103c7..2f82dd17b 100644
--- a/api.md
+++ b/api.md
@@ -224,6 +224,7 @@ Methods:
- client.beta.threads.update(threadId, { ...params }) -> Thread
- client.beta.threads.del(threadId) -> ThreadDeleted
- client.beta.threads.createAndRun({ ...params }) -> Run
+- client.beta.threads.createAndRunPoll(body, options?) -> Promise<Threads.Run>
- client.beta.threads.createAndRunStream(body, options?) -> AssistantStream
### Runs
@@ -242,7 +243,11 @@ Methods:
- client.beta.threads.runs.list(threadId, { ...params }) -> RunsPage
- client.beta.threads.runs.cancel(threadId, runId) -> Run
- client.beta.threads.runs.submitToolOutputs(threadId, runId, { ...params }) -> Run
+- client.beta.threads.runs.createAndPoll(threadId, body, options?) -> Promise<Run>
- client.beta.threads.runs.createAndStream(threadId, body, options?) -> AssistantStream
+- client.beta.threads.runs.poll(threadId, runId, options?) -> Promise<Run>
+- client.beta.threads.runs.stream(threadId, body, options?) -> AssistantStream
+- client.beta.threads.runs.submitToolOutputsAndPoll(threadId, runId, body, options?) -> Promise<Run>
- client.beta.threads.runs.submitToolOutputsStream(threadId, runId, body, options?) -> AssistantStream
#### Steps
diff --git a/build-deno b/build-deno
index 66639030f..19eefa7c3 100755
--- a/build-deno
+++ b/build-deno
@@ -14,7 +14,7 @@ This is a build produced from https://github.com/openai/openai-node – please g
Usage:
\`\`\`ts
-import OpenAI from "https://deno.land/x/openai@v4.31.0/mod.ts";
+import OpenAI from "https://deno.land/x/openai@v4.32.0/mod.ts";
const client = new OpenAI();
\`\`\`
diff --git a/examples/assistant-stream-raw.ts b/examples/assistant-stream-raw.ts
old mode 100644
new mode 100755
diff --git a/examples/assistant-stream.ts b/examples/assistant-stream.ts
old mode 100644
new mode 100755
index 36c4ed152..6c71bf23b
--- a/examples/assistant-stream.ts
+++ b/examples/assistant-stream.ts
@@ -31,7 +31,7 @@ async function main() {
console.log('Created thread with Id: ' + threadId);
const run = openai.beta.threads.runs
- .createAndStream(threadId, {
+ .stream(threadId, {
assistant_id: assistantId,
})
//Subscribe to streaming events and log them
diff --git a/examples/assistants.ts b/examples/assistants.ts
old mode 100644
new mode 100755
index bbc2f80ce..40238ac86
--- a/examples/assistants.ts
+++ b/examples/assistants.ts
@@ -1,7 +1,6 @@
#!/usr/bin/env -S npm run tsn -T
import OpenAI from 'openai';
-import { sleep } from 'openai/core';
/**
* Example of polling for a complete response from an assistant
@@ -32,24 +31,17 @@ async function main() {
let threadId = thread.id;
console.log('Created thread with Id: ' + threadId);
- const run = await openai.beta.threads.runs.create(thread.id, {
+ const run = await openai.beta.threads.runs.createAndPoll(thread.id, {
assistant_id: assistantId,
additional_instructions: 'Please address the user as Jane Doe. The user has a premium account.',
});
- console.log('Created run with Id: ' + run.id);
-
- while (true) {
- const result = await openai.beta.threads.runs.retrieve(thread.id, run.id);
- if (result.status == 'completed') {
- const messages = await openai.beta.threads.messages.list(thread.id);
- for (const message of messages.getPaginatedItems()) {
- console.log(message);
- }
- break;
- } else {
- console.log('Waiting for completion. Current status: ' + result.status);
- await sleep(5000);
+ console.log('Run finished with status: ' + run.status);
+
+ if (run.status == 'completed') {
+ const messages = await openai.beta.threads.messages.list(thread.id);
+ for (const message of messages.getPaginatedItems()) {
+ console.log(message);
}
}
}
diff --git a/helpers.md b/helpers.md
index 9a94a618e..7a34c3023 100644
--- a/helpers.md
+++ b/helpers.md
@@ -13,7 +13,7 @@ More information can be found in the documentation: [Assistant Streaming](https:
```ts
const run = openai.beta.threads.runs
- .createAndStream(thread.id, {
+ .stream(thread.id, {
assistant_id: assistant.id,
})
.on('textCreated', (text) => process.stdout.write('\nassistant > '))
@@ -41,7 +41,7 @@ const run = openai.beta.threads.runs
There are three helper methods for creating streams:
```ts
-openai.beta.threads.runs.createAndStream();
+openai.beta.threads.runs.stream();
```
This method can be used to start and stream the response to an existing run with an associated thread
diff --git a/package.json b/package.json
index 250e0939a..11fa0c5e2 100644
--- a/package.json
+++ b/package.json
@@ -1,6 +1,6 @@
{
"name": "openai",
- "version": "4.31.0",
+ "version": "4.32.0",
"description": "The official TypeScript library for the OpenAI API",
"author": "OpenAI ",
"types": "dist/index.d.ts",
@@ -29,7 +29,6 @@
"@types/node-fetch": "^2.6.4",
"abort-controller": "^3.0.0",
"agentkeepalive": "^4.2.1",
- "digest-fetch": "^1.3.0",
"form-data-encoder": "1.7.2",
"formdata-node": "^4.3.2",
"node-fetch": "^2.6.7",
diff --git a/src/resources/beta/beta.ts b/src/resources/beta/beta.ts
index 43ee8c7e7..7d4457319 100644
--- a/src/resources/beta/beta.ts
+++ b/src/resources/beta/beta.ts
@@ -37,5 +37,6 @@ export namespace Beta {
export import ThreadCreateAndRunParams = ThreadsAPI.ThreadCreateAndRunParams;
export import ThreadCreateAndRunParamsNonStreaming = ThreadsAPI.ThreadCreateAndRunParamsNonStreaming;
export import ThreadCreateAndRunParamsStreaming = ThreadsAPI.ThreadCreateAndRunParamsStreaming;
+ export import ThreadCreateAndRunPollParams = ThreadsAPI.ThreadCreateAndRunPollParams;
export import ThreadCreateAndRunStreamParams = ThreadsAPI.ThreadCreateAndRunStreamParams;
}
diff --git a/src/resources/beta/index.ts b/src/resources/beta/index.ts
index 7f35730fb..e43ff7315 100644
--- a/src/resources/beta/index.ts
+++ b/src/resources/beta/index.ts
@@ -28,6 +28,7 @@ export {
ThreadCreateAndRunParams,
ThreadCreateAndRunParamsNonStreaming,
ThreadCreateAndRunParamsStreaming,
+ ThreadCreateAndRunPollParams,
ThreadCreateAndRunStreamParams,
Threads,
} from './threads/index';
diff --git a/src/resources/beta/threads/index.ts b/src/resources/beta/threads/index.ts
index 097a52819..ac2f9a4fa 100644
--- a/src/resources/beta/threads/index.ts
+++ b/src/resources/beta/threads/index.ts
@@ -36,10 +36,13 @@ export {
RunCreateParamsStreaming,
RunUpdateParams,
RunListParams,
+ RunCreateAndPollParams,
RunCreateAndStreamParams,
+ RunStreamParams,
RunSubmitToolOutputsParams,
RunSubmitToolOutputsParamsNonStreaming,
RunSubmitToolOutputsParamsStreaming,
+ RunSubmitToolOutputsAndPollParams,
RunSubmitToolOutputsStreamParams,
RunsPage,
Runs,
@@ -52,6 +55,7 @@ export {
ThreadCreateAndRunParams,
ThreadCreateAndRunParamsNonStreaming,
ThreadCreateAndRunParamsStreaming,
+ ThreadCreateAndRunPollParams,
ThreadCreateAndRunStreamParams,
Threads,
} from './threads';
diff --git a/src/resources/beta/threads/messages/messages.ts b/src/resources/beta/threads/messages/messages.ts
index 1c37eb2ff..28026f3ff 100644
--- a/src/resources/beta/threads/messages/messages.ts
+++ b/src/resources/beta/threads/messages/messages.ts
@@ -551,6 +551,11 @@ export interface MessageListParams extends CursorPageParams {
* order and `desc` for descending order.
*/
order?: 'asc' | 'desc';
+
+ /**
+ * Filter messages by the run ID that generated them.
+ */
+ run_id?: string;
}
export namespace Messages {
diff --git a/src/resources/beta/threads/runs/index.ts b/src/resources/beta/threads/runs/index.ts
index 636b5d850..c9b2d1ef5 100644
--- a/src/resources/beta/threads/runs/index.ts
+++ b/src/resources/beta/threads/runs/index.ts
@@ -31,10 +31,13 @@ export {
RunCreateParamsStreaming,
RunUpdateParams,
RunListParams,
+ RunCreateAndPollParams,
RunCreateAndStreamParams,
+ RunStreamParams,
RunSubmitToolOutputsParams,
RunSubmitToolOutputsParamsNonStreaming,
RunSubmitToolOutputsParamsStreaming,
+ RunSubmitToolOutputsAndPollParams,
RunSubmitToolOutputsStreamParams,
RunsPage,
Runs,
diff --git a/src/resources/beta/threads/runs/runs.ts b/src/resources/beta/threads/runs/runs.ts
index 54c671131..5dfc7d595 100644
--- a/src/resources/beta/threads/runs/runs.ts
+++ b/src/resources/beta/threads/runs/runs.ts
@@ -5,6 +5,7 @@ import { APIPromise } from 'openai/core';
import { APIResource } from 'openai/resource';
import { isRequestOptions } from 'openai/core';
import { AssistantStream, RunCreateParamsBaseStream } from 'openai/lib/AssistantStream';
+import { sleep } from 'openai/core';
import { RunSubmitToolOutputsParamsStream } from 'openai/lib/AssistantStream';
import * as RunsAPI from 'openai/resources/beta/threads/runs/runs';
import * as AssistantsAPI from 'openai/resources/beta/assistants/assistants';
@@ -102,8 +103,24 @@ export class Runs extends APIResource {
});
}
+ /**
+ * A helper to create a run an poll for a terminal state. More information on Run
+ * lifecycles can be found here:
+ * https://platform.openai.com/docs/assistants/how-it-works/runs-and-run-steps
+ */
+ async createAndPoll(
+ threadId: string,
+ body: RunCreateParamsNonStreaming,
+ options?: Core.RequestOptions & { pollIntervalMs?: number },
+ ): Promise {
+ const run = await this.create(threadId, body, options);
+ return await this.poll(threadId, run.id, options);
+ }
+
/**
* Create a Run stream
+ *
+ * @deprecated use `stream` instead
*/
createAndStream(
threadId: string,
@@ -113,6 +130,66 @@ export class Runs extends APIResource {
return AssistantStream.createAssistantStream(threadId, this._client.beta.threads.runs, body, options);
}
+ /**
+ * A helper to poll a run status until it reaches a terminal state. More
+ * information on Run lifecycles can be found here:
+ * https://platform.openai.com/docs/assistants/how-it-works/runs-and-run-steps
+ */
+ async poll(
+ threadId: string,
+ runId: string,
+ options?: Core.RequestOptions & { pollIntervalMs?: number },
+ ): Promise {
+ const headers: { [key: string]: string } = { ...options?.headers, 'X-Stainless-Poll-Helper': 'true' };
+
+ if (options?.pollIntervalMs) {
+ headers['X-Stainless-Custom-Poll-Interval'] = options.pollIntervalMs.toString();
+ }
+
+ while (true) {
+ const { data: run, response } = await this.retrieve(threadId, runId, {
+ ...options,
+ headers: { ...options?.headers, ...headers },
+ }).withResponse();
+
+ switch (run.status) {
+ //If we are in any sort of intermediate state we poll
+ case 'queued':
+ case 'in_progress':
+ case 'cancelling':
+ let sleepInterval = 5000;
+
+ if (options?.pollIntervalMs) {
+ sleepInterval = options.pollIntervalMs;
+ } else {
+ const headerInterval = response.headers.get('openai-poll-after-ms');
+ if (headerInterval) {
+ const headerIntervalMs = parseInt(headerInterval);
+ if (!isNaN(headerIntervalMs)) {
+ sleepInterval = headerIntervalMs;
+ }
+ }
+ }
+ await sleep(sleepInterval);
+ break;
+ //We return the run in any terminal state.
+ case 'requires_action':
+ case 'cancelled':
+ case 'completed':
+ case 'failed':
+ case 'expired':
+ return run;
+ }
+ }
+ }
+
+ /**
+ * Create a Run stream
+ */
+ stream(threadId: string, body: RunCreateParamsBaseStream, options?: Core.RequestOptions): AssistantStream {
+ return AssistantStream.createAssistantStream(threadId, this._client.beta.threads.runs, body, options);
+ }
+
/**
* When a run has the `status: "requires_action"` and `required_action.type` is
* `submit_tool_outputs`, this endpoint can be used to submit the outputs from the
@@ -151,9 +228,25 @@ export class Runs extends APIResource {
}) as APIPromise | APIPromise>;
}
+ /**
+ * A helper to submit a tool output to a run and poll for a terminal run state.
+ * More information on Run lifecycles can be found here:
+ * https://platform.openai.com/docs/assistants/how-it-works/runs-and-run-steps
+ */
+ async submitToolOutputsAndPoll(
+ threadId: string,
+ runId: string,
+ body: RunSubmitToolOutputsParamsNonStreaming,
+ options?: Core.RequestOptions & { pollIntervalMs?: number },
+ ): Promise {
+ const run = await this.submitToolOutputs(threadId, runId, body, options);
+ return await this.poll(threadId, run.id, options);
+ }
+
/**
* Submit the tool outputs from a previous run and stream the run to a terminal
- * state.
+ * state. More information on Run lifecycles can be found here:
+ * https://platform.openai.com/docs/assistants/how-it-works/runs-and-run-steps
*/
submitToolOutputsStream(
threadId: string,
@@ -529,6 +622,58 @@ export interface RunListParams extends CursorPageParams {
order?: 'asc' | 'desc';
}
+export interface RunCreateAndPollParams {
+ /**
+ * The ID of the
+ * [assistant](https://platform.openai.com/docs/api-reference/assistants) to use to
+ * execute this run.
+ */
+ assistant_id: string;
+
+ /**
+ * Appends additional instructions at the end of the instructions for the run. This
+ * is useful for modifying the behavior on a per-run basis without overriding other
+ * instructions.
+ */
+ additional_instructions?: string | null;
+
+ /**
+ * Overrides the
+ * [instructions](https://platform.openai.com/docs/api-reference/assistants/createAssistant)
+ * of the assistant. This is useful for modifying the behavior on a per-run basis.
+ */
+ instructions?: string | null;
+
+ /**
+ * Set of 16 key-value pairs that can be attached to an object. This can be useful
+ * for storing additional information about the object in a structured format. Keys
+ * can be a maximum of 64 characters long and values can be a maxium of 512
+ * characters long.
+ */
+ metadata?: unknown | null;
+
+ /**
+ * The ID of the [Model](https://platform.openai.com/docs/api-reference/models) to
+ * be used to execute this run. If a value is provided here, it will override the
+ * model associated with the assistant. If not, the model associated with the
+ * assistant will be used.
+ */
+ model?: string | null;
+
+ /**
+ * What sampling temperature to use, between 0 and 2. Higher values like 0.8 will
+ * make the output more random, while lower values like 0.2 will make it more
+ * focused and deterministic.
+ */
+ temperature?: number | null;
+
+ /**
+ * Override the tools the assistant can use for this run. This is useful for
+ * modifying the behavior on a per-run basis.
+ */
+ tools?: Array | null;
+}
+
export interface RunCreateAndStreamParams {
/**
* The ID of the
@@ -581,6 +726,58 @@ export interface RunCreateAndStreamParams {
tools?: Array | null;
}
+export interface RunStreamParams {
+ /**
+ * The ID of the
+ * [assistant](https://platform.openai.com/docs/api-reference/assistants) to use to
+ * execute this run.
+ */
+ assistant_id: string;
+
+ /**
+ * Appends additional instructions at the end of the instructions for the run. This
+ * is useful for modifying the behavior on a per-run basis without overriding other
+ * instructions.
+ */
+ additional_instructions?: string | null;
+
+ /**
+ * Overrides the
+ * [instructions](https://platform.openai.com/docs/api-reference/assistants/createAssistant)
+ * of the assistant. This is useful for modifying the behavior on a per-run basis.
+ */
+ instructions?: string | null;
+
+ /**
+ * Set of 16 key-value pairs that can be attached to an object. This can be useful
+ * for storing additional information about the object in a structured format. Keys
+ * can be a maximum of 64 characters long and values can be a maxium of 512
+ * characters long.
+ */
+ metadata?: unknown | null;
+
+ /**
+ * The ID of the [Model](https://platform.openai.com/docs/api-reference/models) to
+ * be used to execute this run. If a value is provided here, it will override the
+ * model associated with the assistant. If not, the model associated with the
+ * assistant will be used.
+ */
+ model?: string | null;
+
+ /**
+ * What sampling temperature to use, between 0 and 2. Higher values like 0.8 will
+ * make the output more random, while lower values like 0.2 will make it more
+ * focused and deterministic.
+ */
+ temperature?: number | null;
+
+ /**
+ * Override the tools the assistant can use for this run. This is useful for
+ * modifying the behavior on a per-run basis.
+ */
+ tools?: Array | null;
+}
+
export type RunSubmitToolOutputsParams =
| RunSubmitToolOutputsParamsNonStreaming
| RunSubmitToolOutputsParamsStreaming;
@@ -635,6 +832,28 @@ export interface RunSubmitToolOutputsParamsStreaming extends RunSubmitToolOutput
stream: true;
}
+export interface RunSubmitToolOutputsAndPollParams {
+ /**
+ * A list of tools for which the outputs are being submitted.
+ */
+ tool_outputs: Array;
+}
+
+export namespace RunSubmitToolOutputsAndPollParams {
+ export interface ToolOutput {
+ /**
+ * The output of the tool call to be submitted to continue the run.
+ */
+ output?: string;
+
+ /**
+ * The ID of the tool call in the `required_action` object within the run object
+ * the output is being submitted for.
+ */
+ tool_call_id?: string;
+ }
+}
+
export interface RunSubmitToolOutputsStreamParams {
/**
* A list of tools for which the outputs are being submitted.
@@ -667,10 +886,13 @@ export namespace Runs {
export import RunCreateParamsStreaming = RunsAPI.RunCreateParamsStreaming;
export import RunUpdateParams = RunsAPI.RunUpdateParams;
export import RunListParams = RunsAPI.RunListParams;
+ export import RunCreateAndPollParams = RunsAPI.RunCreateAndPollParams;
export import RunCreateAndStreamParams = RunsAPI.RunCreateAndStreamParams;
+ export import RunStreamParams = RunsAPI.RunStreamParams;
export import RunSubmitToolOutputsParams = RunsAPI.RunSubmitToolOutputsParams;
export import RunSubmitToolOutputsParamsNonStreaming = RunsAPI.RunSubmitToolOutputsParamsNonStreaming;
export import RunSubmitToolOutputsParamsStreaming = RunsAPI.RunSubmitToolOutputsParamsStreaming;
+ export import RunSubmitToolOutputsAndPollParams = RunsAPI.RunSubmitToolOutputsAndPollParams;
export import RunSubmitToolOutputsStreamParams = RunsAPI.RunSubmitToolOutputsStreamParams;
export import Steps = StepsAPI.Steps;
export import CodeInterpreterLogs = StepsAPI.CodeInterpreterLogs;
diff --git a/src/resources/beta/threads/threads.ts b/src/resources/beta/threads/threads.ts
index 9b4785850..1b4b3f7d5 100644
--- a/src/resources/beta/threads/threads.ts
+++ b/src/resources/beta/threads/threads.ts
@@ -92,6 +92,19 @@ export class Threads extends APIResource {
}) as APIPromise | APIPromise>;
}
+ /**
+ * A helper to create a thread, start a run and then poll for a terminal state.
+ * More information on Run lifecycles can be found here:
+ * https://platform.openai.com/docs/assistants/how-it-works/runs-and-run-steps
+ */
+ async createAndRunPoll(
+ body: ThreadCreateAndRunParamsNonStreaming,
+ options?: Core.RequestOptions & { pollIntervalMs?: number },
+ ): Promise {
+ const run = await this.createAndRun(body, options);
+ return await this.runs.poll(run.thread_id, run.id, options);
+ }
+
/**
* Create a thread and stream the run back
*/
@@ -340,6 +353,113 @@ export interface ThreadCreateAndRunParamsStreaming extends ThreadCreateAndRunPar
stream: true;
}
+export interface ThreadCreateAndRunPollParams {
+ /**
+ * The ID of the
+ * [assistant](https://platform.openai.com/docs/api-reference/assistants) to use to
+ * execute this run.
+ */
+ assistant_id: string;
+
+ /**
+ * Override the default system message of the assistant. This is useful for
+ * modifying the behavior on a per-run basis.
+ */
+ instructions?: string | null;
+
+ /**
+ * Set of 16 key-value pairs that can be attached to an object. This can be useful
+ * for storing additional information about the object in a structured format. Keys
+ * can be a maximum of 64 characters long and values can be a maxium of 512
+ * characters long.
+ */
+ metadata?: unknown | null;
+
+ /**
+ * The ID of the [Model](https://platform.openai.com/docs/api-reference/models) to
+ * be used to execute this run. If a value is provided here, it will override the
+ * model associated with the assistant. If not, the model associated with the
+ * assistant will be used.
+ */
+ model?: string | null;
+
+ /**
+ * What sampling temperature to use, between 0 and 2. Higher values like 0.8 will
+ * make the output more random, while lower values like 0.2 will make it more
+ * focused and deterministic.
+ */
+ temperature?: number | null;
+
+ /**
+ * If no thread is provided, an empty thread will be created.
+ */
+ thread?: ThreadCreateAndRunPollParams.Thread;
+
+ /**
+ * Override the tools the assistant can use for this run. This is useful for
+ * modifying the behavior on a per-run basis.
+ */
+ tools?: Array<
+ AssistantsAPI.CodeInterpreterTool | AssistantsAPI.RetrievalTool | AssistantsAPI.FunctionTool
+ > | null;
+}
+
+export namespace ThreadCreateAndRunPollParams {
+ /**
+ * If no thread is provided, an empty thread will be created.
+ */
+ export interface Thread {
+ /**
+ * A list of [messages](https://platform.openai.com/docs/api-reference/messages) to
+ * start the thread with.
+ */
+ messages?: Array;
+
+ /**
+ * Set of 16 key-value pairs that can be attached to an object. This can be useful
+ * for storing additional information about the object in a structured format. Keys
+ * can be a maximum of 64 characters long and values can be a maxium of 512
+ * characters long.
+ */
+ metadata?: unknown | null;
+ }
+
+ export namespace Thread {
+ export interface Message {
+ /**
+ * The content of the message.
+ */
+ content: string;
+
+ /**
+ * The role of the entity that is creating the message. Allowed values include:
+ *
+ * - `user`: Indicates the message is sent by an actual user and should be used in
+ * most cases to represent user-generated messages.
+ * - `assistant`: Indicates the message is generated by the assistant. Use this
+ * value to insert messages from the assistant into the conversation.
+ */
+ role: 'user' | 'assistant';
+
+ /**
+ * A list of [File](https://platform.openai.com/docs/api-reference/files) IDs that
+ * the message should use. There can be a maximum of 10 files attached to a
+ * message. Useful for tools like `retrieval` and `code_interpreter` that can
+ * access and use files.
+ */
+ file_ids?: Array;
+
+ /**
+ * Set of 16 key-value pairs that can be attached to an object. This can be useful
+ * for storing additional information about the object in a structured format. Keys
+ * can be a maximum of 64 characters long and values can be a maxium of 512
+ * characters long.
+ */
+ metadata?: unknown | null;
+ }
+ }
+}
+
export interface ThreadCreateAndRunStreamParams {
/**
* The ID of the
@@ -455,6 +575,7 @@ export namespace Threads {
export import ThreadCreateAndRunParams = ThreadsAPI.ThreadCreateAndRunParams;
export import ThreadCreateAndRunParamsNonStreaming = ThreadsAPI.ThreadCreateAndRunParamsNonStreaming;
export import ThreadCreateAndRunParamsStreaming = ThreadsAPI.ThreadCreateAndRunParamsStreaming;
+ export import ThreadCreateAndRunPollParams = ThreadsAPI.ThreadCreateAndRunPollParams;
export import ThreadCreateAndRunStreamParams = ThreadsAPI.ThreadCreateAndRunStreamParams;
export import Runs = RunsAPI.Runs;
export import RequiredActionFunctionToolCall = RunsAPI.RequiredActionFunctionToolCall;
@@ -466,10 +587,13 @@ export namespace Threads {
export import RunCreateParamsStreaming = RunsAPI.RunCreateParamsStreaming;
export import RunUpdateParams = RunsAPI.RunUpdateParams;
export import RunListParams = RunsAPI.RunListParams;
+ export import RunCreateAndPollParams = RunsAPI.RunCreateAndPollParams;
export import RunCreateAndStreamParams = RunsAPI.RunCreateAndStreamParams;
+ export import RunStreamParams = RunsAPI.RunStreamParams;
export import RunSubmitToolOutputsParams = RunsAPI.RunSubmitToolOutputsParams;
export import RunSubmitToolOutputsParamsNonStreaming = RunsAPI.RunSubmitToolOutputsParamsNonStreaming;
export import RunSubmitToolOutputsParamsStreaming = RunsAPI.RunSubmitToolOutputsParamsStreaming;
+ export import RunSubmitToolOutputsAndPollParams = RunsAPI.RunSubmitToolOutputsAndPollParams;
export import RunSubmitToolOutputsStreamParams = RunsAPI.RunSubmitToolOutputsStreamParams;
export import Messages = MessagesAPI.Messages;
export import Annotation = MessagesAPI.Annotation;
diff --git a/src/version.ts b/src/version.ts
index 8eb5423f5..7e04c79b5 100644
--- a/src/version.ts
+++ b/src/version.ts
@@ -1 +1 @@
-export const VERSION = '4.31.0'; // x-release-please-version
+export const VERSION = '4.32.0'; // x-release-please-version
diff --git a/tests/api-resources/beta/threads/messages/messages.test.ts b/tests/api-resources/beta/threads/messages/messages.test.ts
index 3a80bfe1e..7f62944e0 100644
--- a/tests/api-resources/beta/threads/messages/messages.test.ts
+++ b/tests/api-resources/beta/threads/messages/messages.test.ts
@@ -81,7 +81,7 @@ describe('resource messages', () => {
await expect(
openai.beta.threads.messages.list(
'string',
- { after: 'string', before: 'string', limit: 0, order: 'asc' },
+ { after: 'string', before: 'string', limit: 0, order: 'asc', run_id: 'string' },
{ path: '/_stainless_unknown_path' },
),
).rejects.toThrow(OpenAI.NotFoundError);
diff --git a/tsconfig.build.json b/tsconfig.build.json
index 6adad0d06..45811cb8b 100644
--- a/tsconfig.build.json
+++ b/tsconfig.build.json
@@ -7,7 +7,6 @@
"paths": {
"openai/*": ["dist/src/*"],
"openai": ["dist/src/index.ts"],
- "digest-fetch": ["./typings/digest-fetch"]
},
"noEmit": false,
"declaration": true,
diff --git a/tsconfig.deno.json b/tsconfig.deno.json
index 5d6467665..d0e9473d9 100644
--- a/tsconfig.deno.json
+++ b/tsconfig.deno.json
@@ -9,7 +9,6 @@
"openai/_shims/auto/*": ["deno/_shims/auto/*-deno"],
"openai/*": ["deno/*"],
"openai": ["deno/index.ts"],
- "digest-fetch": ["./typings/digest-fetch"]
},
"noEmit": true,
"declaration": true,
diff --git a/tsconfig.json b/tsconfig.json
index 9908b2c80..5f99085fc 100644
--- a/tsconfig.json
+++ b/tsconfig.json
@@ -12,7 +12,6 @@
"openai/_shims/auto/*": ["src/_shims/auto/*-node"],
"openai/*": ["src/*"],
"openai": ["src/index.ts"],
- "digest-fetch": ["./typings/digest-fetch"]
},
"noEmit": true,
diff --git a/typings/digest-fetch/index.d.ts b/typings/digest-fetch/index.d.ts
deleted file mode 100644
index f6bcbfda9..000000000
--- a/typings/digest-fetch/index.d.ts
+++ /dev/null
@@ -1,33 +0,0 @@
-declare module 'digest-fetch';
-
-import type { RequestInfo, RequestInit, Response } from 'node-fetch';
-
-type Algorithm = 'MD5' | 'MD5-sess';
-
-type Options = {
- algorithm?: Algorithm;
- statusCode?: number;
- cnonceSize?: number;
- basic?: boolean;
- precomputeHash?: boolean;
- logger?: typeof console;
-};
-
-class DigestClient {
- user: string;
- password: string;
-
- private nonceRaw: string;
- private logger?: typeof console;
- private precomputedHash?: boolean;
- private statusCode?: number;
- private basic: boolean;
- private cnonceSize: number;
- private hasAuth: boolean;
- private digest: { nc: number; algorithm: Algorithm; realm: string };
-
- constructor(user: string, password: string, options: Options = {});
- async fetch(url: RequestInfo, options: RequestInit = {}): Promise;
-}
-
-export default DigestClient;
diff --git a/yarn.lock b/yarn.lock
index a79485a26..9cef21d9b 100644
--- a/yarn.lock
+++ b/yarn.lock
@@ -1076,11 +1076,6 @@ balanced-match@^1.0.0:
resolved "https://registry.yarnpkg.com/balanced-match/-/balanced-match-1.0.2.tgz#e83e3a7e3f300b34cb9d87f615fa0cbf357690ee"
integrity sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==
-base-64@^0.1.0:
- version "0.1.0"
- resolved "https://registry.yarnpkg.com/base-64/-/base-64-0.1.0.tgz#780a99c84e7d600260361511c4877613bf24f6bb"
- integrity sha512-Y5gU45svrR5tI2Vt/X9GPd3L0HNIKzGu202EjxrXMpuc2V2CiKgemAbUUsqYmZJvPtCXoUKjNZwBJzsNScUbXA==
-
big-integer@^1.6.44:
version "1.6.52"
resolved "https://registry.yarnpkg.com/big-integer/-/big-integer-1.6.52.tgz#60a887f3047614a8e1bffe5d7173490a97dc8c85"
@@ -1193,11 +1188,6 @@ char-regex@^1.0.2:
resolved "https://registry.yarnpkg.com/char-regex/-/char-regex-1.0.2.tgz#d744358226217f981ed58f479b1d6bcc29545dcf"
integrity sha512-kWWXztvZ5SBQV+eRgKFeh8q5sLuZY2+8WUIzlxWVTg+oGwY14qylx1KbKzHd8P6ZYkAg0xyIDU9JMHhyJMZ1jw==
-charenc@0.0.2:
- version "0.0.2"
- resolved "https://registry.yarnpkg.com/charenc/-/charenc-0.0.2.tgz#c0a1d2f3a7092e03774bfa83f14c0fc5790a8667"
- integrity sha512-yrLQ/yVUFXkzg7EDQsPieE/53+0RlaWTs+wBrvW36cyilJ2SaDWfl4Yj7MtLTXleV9uEKefbAGUPv2/iWSooRA==
-
ci-info@^3.2.0:
version "3.9.0"
resolved "https://registry.yarnpkg.com/ci-info/-/ci-info-3.9.0.tgz#4279a62028a7b1f262f3473fc9605f5e218c59b4"
@@ -1305,11 +1295,6 @@ cross-spawn@^7.0.2, cross-spawn@^7.0.3:
shebang-command "^2.0.0"
which "^2.0.1"
-crypt@0.0.2:
- version "0.0.2"
- resolved "https://registry.yarnpkg.com/crypt/-/crypt-0.0.2.tgz#88d7ff7ec0dfb86f713dc87bbb42d044d3e6c41b"
- integrity sha512-mCxBlsHFYh9C+HVpiEacem8FEBnMXgU9gy4zmNC+SXAZNB/1idgp/aulFJ4FgCi7GPEVbfyng092GqL2k2rmow==
-
debug@^4.1.0, debug@^4.1.1, debug@^4.3.1, debug@^4.3.2, debug@^4.3.4:
version "4.3.4"
resolved "https://registry.yarnpkg.com/debug/-/debug-4.3.4.tgz#1319f6579357f2338d3337d2cdd4914bb5dcc865"
@@ -1380,14 +1365,6 @@ diff@^4.0.1:
resolved "https://registry.yarnpkg.com/diff/-/diff-4.0.2.tgz#60f3aecb89d5fae520c11aa19efc2bb982aade7d"
integrity sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A==
-digest-fetch@^1.3.0:
- version "1.3.0"
- resolved "https://registry.yarnpkg.com/digest-fetch/-/digest-fetch-1.3.0.tgz#898e69264d00012a23cf26e8a3e40320143fc661"
- integrity sha512-CGJuv6iKNM7QyZlM2T3sPAdZWd/p9zQiRNS9G+9COUCwzWFTs0Xp8NF5iePx7wtvhDykReiRRrSeNb4oMmB8lA==
- dependencies:
- base-64 "^0.1.0"
- md5 "^2.3.0"
-
dir-glob@^3.0.1:
version "3.0.1"
resolved "https://registry.yarnpkg.com/dir-glob/-/dir-glob-3.0.1.tgz#56dbf73d992a4a93ba1584f4534063fd2e41717f"
@@ -1934,11 +1911,6 @@ is-arrayish@^0.2.1:
resolved "https://registry.yarnpkg.com/is-arrayish/-/is-arrayish-0.2.1.tgz#77c99840527aa8ecb1a8ba697b80645a7a926a9d"
integrity sha512-zz06S8t0ozoDXMG+ube26zeCTNXcKIPJZJi8hBrF4idCLms4CG9QtK7qBl1boi5ODzFpjswb5JPmHCbMpjaYzg==
-is-buffer@~1.1.6:
- version "1.1.6"
- resolved "https://registry.yarnpkg.com/is-buffer/-/is-buffer-1.1.6.tgz#efaa2ea9daa0d7ab2ea13a97b2b8ad51fefbe8be"
- integrity sha512-NcdALwpXkTm5Zvvbk7owOUSvVvBKDgKP5/ewfXEznmQFfs4ZRmanOeKBTjRVjka3QFoN6XJ+9F3USqfHqTaU5w==
-
is-core-module@^2.13.0:
version "2.13.1"
resolved "https://registry.yarnpkg.com/is-core-module/-/is-core-module-2.13.1.tgz#ad0d7532c6fea9da1ebdc82742d74525c6273384"
@@ -2553,15 +2525,6 @@ makeerror@1.0.12:
dependencies:
tmpl "1.0.5"
-md5@^2.3.0:
- version "2.3.0"
- resolved "https://registry.yarnpkg.com/md5/-/md5-2.3.0.tgz#c3da9a6aae3a30b46b7b0c349b87b110dc3bda4f"
- integrity sha512-T1GITYmFaKuO91vxyoQMFETst+O71VUPEU3ze5GNzDm0OWdP8v1ziTaAEPUr/3kLsY3Sftgz242A1SetQiDL7g==
- dependencies:
- charenc "0.0.2"
- crypt "0.0.2"
- is-buffer "~1.1.6"
-
merge-stream@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/merge-stream/-/merge-stream-2.0.0.tgz#52823629a14dd00c9770fb6ad47dc6310f2c1f60"