From 88253e613b9c166fcbbc97ce7081275ea3a2803a Mon Sep 17 00:00:00 2001 From: Stainless Bot <107565488+stainless-bot@users.noreply.github.com> Date: Mon, 18 Dec 2023 22:32:38 -0500 Subject: [PATCH] docs: upgrade models in examples to latest version (#585) --- ecosystem-tests/vercel-edge/src/pages/api/response.ts | 2 +- ecosystem-tests/vercel-edge/src/pages/api/streaming.ts | 2 +- examples/raw-response.ts | 4 ++-- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/ecosystem-tests/vercel-edge/src/pages/api/response.ts b/ecosystem-tests/vercel-edge/src/pages/api/response.ts index 4599bba78..9c2d2d26c 100644 --- a/ecosystem-tests/vercel-edge/src/pages/api/response.ts +++ b/ecosystem-tests/vercel-edge/src/pages/api/response.ts @@ -16,7 +16,7 @@ export default async (request: NextRequest) => { const result = await openai.completions.create({ prompt: 'Say this is a test', - model: 'text-davinci-003', + model: 'gpt-3.5-turbo-instruct', }); return NextResponse.json(result); }; diff --git a/ecosystem-tests/vercel-edge/src/pages/api/streaming.ts b/ecosystem-tests/vercel-edge/src/pages/api/streaming.ts index 42259c479..33d3f15c0 100644 --- a/ecosystem-tests/vercel-edge/src/pages/api/streaming.ts +++ b/ecosystem-tests/vercel-edge/src/pages/api/streaming.ts @@ -18,7 +18,7 @@ export default async (request: NextRequest) => { const stream = await openai.completions.create({ prompt: 'Say this is a test', - model: 'text-davinci-003', + model: 'gpt-3.5-turbo-instruct', stream: true, }); diff --git a/examples/raw-response.ts b/examples/raw-response.ts index 6b1df31e8..eb991ae78 100644 --- a/examples/raw-response.ts +++ b/examples/raw-response.ts @@ -11,7 +11,7 @@ async function main() { const response = await client.completions .create({ prompt: 'Say this is a test', - model: 'text-davinci-003', + model: 'gpt-3.5-turbo-instruct', }) .asResponse(); console.log(`response headers: `, Object.fromEntries(response.headers.entries())); @@ -23,7 +23,7 @@ async function main() { const { data: completion, response } = await client.completions .create({ prompt: 'Say this is a test', - model: 'text-davinci-003', + model: 'gpt-3.5-turbo-instruct', }) .withResponse(); console.log(`response headers: `, Object.fromEntries(response.headers.entries()));