Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat(api): add token logprobs to chat completions #576

Merged
merged 1 commit into from
Dec 17, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions api.md
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,7 @@ Types:
- <code><a href="./src/resources/chat/completions.ts">ChatCompletionNamedToolChoice</a></code>
- <code><a href="./src/resources/chat/completions.ts">ChatCompletionRole</a></code>
- <code><a href="./src/resources/chat/completions.ts">ChatCompletionSystemMessageParam</a></code>
- <code><a href="./src/resources/chat/completions.ts">ChatCompletionTokenLogprob</a></code>
- <code><a href="./src/resources/chat/completions.ts">ChatCompletionTool</a></code>
- <code><a href="./src/resources/chat/completions.ts">ChatCompletionToolChoiceOption</a></code>
- <code><a href="./src/resources/chat/completions.ts">ChatCompletionToolMessageParam</a></code>
Expand Down
23 changes: 23 additions & 0 deletions examples/logprobs.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
#!/usr/bin/env -S npm run tsn -T

import OpenAI from 'openai';

// gets API Key from environment variable OPENAI_API_KEY
const openai = new OpenAI();

async function main() {
const stream = await openai.beta.chat.completions
.stream({
model: 'gpt-4',
messages: [{ role: 'user', content: 'Say this is a test' }],
stream: true,
logprobs: true,
})
.on('logprob', (logprob) => {
console.log(logprob);
});

console.dir(await stream.finalChatCompletion(), { depth: null });
}

main();
1 change: 1 addition & 0 deletions src/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -242,6 +242,7 @@ export namespace OpenAI {
export import ChatCompletionNamedToolChoice = API.ChatCompletionNamedToolChoice;
export import ChatCompletionRole = API.ChatCompletionRole;
export import ChatCompletionSystemMessageParam = API.ChatCompletionSystemMessageParam;
export import ChatCompletionTokenLogprob = API.ChatCompletionTokenLogprob;
export import ChatCompletionTool = API.ChatCompletionTool;
export import ChatCompletionToolChoiceOption = API.ChatCompletionToolChoiceOption;
export import ChatCompletionToolMessageParam = API.ChatCompletionToolMessageParam;
Expand Down
15 changes: 15 additions & 0 deletions src/lib/ChatCompletionRunFunctions.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -146,6 +146,7 @@ function* contentChoiceDeltas(
yield {
index,
finish_reason: i === deltas.length - 1 ? 'stop' : null,
logprobs: null,
delta: {
role,
content: deltas[i] ? `${deltas[i]}${i === deltas.length - 1 ? '' : ' '}` : null,
Expand Down Expand Up @@ -593,6 +594,7 @@ describe('resource completions', () => {
{
index: 0,
finish_reason: 'function_call',
logprobs: null,
message: {
role: 'assistant',
content: null,
Expand Down Expand Up @@ -645,6 +647,7 @@ describe('resource completions', () => {
{
index: 0,
finish_reason: 'stop',
logprobs: null,
message: {
role: 'assistant',
content: `it's raining`,
Expand Down Expand Up @@ -716,6 +719,7 @@ describe('resource completions', () => {
{
index: 0,
finish_reason: 'function_call',
logprobs: null,
message: {
role: 'assistant',
content: null,
Expand Down Expand Up @@ -808,6 +812,7 @@ describe('resource completions', () => {
{
index: 0,
finish_reason: 'function_call',
logprobs: null,
message: {
role: 'assistant',
content: null,
Expand Down Expand Up @@ -867,6 +872,7 @@ describe('resource completions', () => {
{
index: 0,
finish_reason: 'stop',
logprobs: null,
message: {
role: 'assistant',
content: `there are 3 properties in {"a": 1, "b": 2, "c": 3}`,
Expand Down Expand Up @@ -953,6 +959,7 @@ describe('resource completions', () => {
{
index: 0,
finish_reason: 'function_call',
logprobs: null,
message: {
role: 'assistant',
content: null,
Expand Down Expand Up @@ -1006,6 +1013,7 @@ describe('resource completions', () => {
{
index: 0,
finish_reason: 'function_call',
logprobs: null,
message: {
role: 'assistant',
content: null,
Expand Down Expand Up @@ -1078,6 +1086,7 @@ describe('resource completions', () => {
{
index: 0,
finish_reason: 'stop',
logprobs: null,
message: {
role: 'assistant',
content: `there are 3 properties in {"a": 1, "b": 2, "c": 3}`,
Expand Down Expand Up @@ -1164,6 +1173,7 @@ describe('resource completions', () => {
{
index: 0,
finish_reason: 'function_call',
logprobs: null,
message: {
role: 'assistant',
content: null,
Expand Down Expand Up @@ -1241,6 +1251,7 @@ describe('resource completions', () => {
{
index: 0,
finish_reason: 'function_call',
logprobs: null,
message: {
role: 'assistant',
content: null,
Expand Down Expand Up @@ -1291,6 +1302,7 @@ describe('resource completions', () => {
{
index: 0,
finish_reason: 'function_call',
logprobs: null,
message: {
role: 'assistant',
content: null,
Expand Down Expand Up @@ -1360,6 +1372,7 @@ describe('resource completions', () => {
{
index: 0,
finish_reason: 'stop',
logprobs: null,
message: {
role: 'assistant',
content: `it's raining`,
Expand Down Expand Up @@ -1436,6 +1449,7 @@ describe('resource completions', () => {
{
index: 0,
finish_reason: 'function_call',
logprobs: null,
delta: {
role: 'assistant',
content: null,
Expand Down Expand Up @@ -2071,6 +2085,7 @@ describe('resource completions', () => {
{
index: 0,
finish_reason: 'function_call',
logprobs: null,
delta: {
role: 'assistant',
content: null,
Expand Down
30 changes: 25 additions & 5 deletions src/lib/ChatCompletionStream.ts
Original file line number Diff line number Diff line change
Expand Up @@ -153,13 +153,22 @@ export class ChatCompletionStream
Object.assign(snapshot, rest);
}

for (const { delta, finish_reason, index, ...other } of chunk.choices) {
for (const { delta, finish_reason, index, logprobs = null, ...other } of chunk.choices) {
let choice = snapshot.choices[index];
if (!choice) {
snapshot.choices[index] = { finish_reason, index, message: delta, ...other };
snapshot.choices[index] = { finish_reason, index, message: delta, logprobs, ...other };
continue;
}

if (logprobs) {
if (!choice.logprobs) {
choice.logprobs = logprobs;
} else if (logprobs.content) {
choice.logprobs.content ??= [];
choice.logprobs.content.push(...logprobs.content);
}
}

if (finish_reason) choice.finish_reason = finish_reason;
Object.assign(choice, other);

Expand Down Expand Up @@ -242,7 +251,7 @@ function finalizeChatCompletion(snapshot: ChatCompletionSnapshot): ChatCompletio
const { id, choices, created, model } = snapshot;
return {
id,
choices: choices.map(({ message, finish_reason, index }): ChatCompletion.Choice => {
choices: choices.map(({ message, finish_reason, index, logprobs }): ChatCompletion.Choice => {
if (!finish_reason) throw new OpenAIError(`missing finish_reason for choice ${index}`);
const { content = null, function_call, tool_calls } = message;
const role = message.role as 'assistant'; // this is what we expect; in theory it could be different which would make our types a slight lie but would be fine.
Expand All @@ -251,12 +260,18 @@ function finalizeChatCompletion(snapshot: ChatCompletionSnapshot): ChatCompletio
const { arguments: args, name } = function_call;
if (args == null) throw new OpenAIError(`missing function_call.arguments for choice ${index}`);
if (!name) throw new OpenAIError(`missing function_call.name for choice ${index}`);
return { message: { content, function_call: { arguments: args, name }, role }, finish_reason, index };
return {
message: { content, function_call: { arguments: args, name }, role },
finish_reason,
index,
logprobs,
};
}
if (tool_calls) {
return {
index,
finish_reason,
logprobs,
message: {
role,
content,
Expand All @@ -281,7 +296,7 @@ function finalizeChatCompletion(snapshot: ChatCompletionSnapshot): ChatCompletio
},
};
}
return { message: { content: content, role }, finish_reason, index };
return { message: { content: content, role }, finish_reason, index, logprobs };
}),
created,
model,
Expand Down Expand Up @@ -336,6 +351,11 @@ export namespace ChatCompletionSnapshot {
*/
finish_reason: ChatCompletion.Choice['finish_reason'] | null;

/**
* Log probability information for the choice.
*/
logprobs: ChatCompletion.Choice.Logprobs | null;

/**
* The index of the choice in the list of choices.
*/
Expand Down
4 changes: 2 additions & 2 deletions src/resources/beta/threads/runs/steps.ts
Original file line number Diff line number Diff line change
Expand Up @@ -180,7 +180,7 @@ export interface MessageCreationStepDetails {
message_creation: MessageCreationStepDetails.MessageCreation;

/**
* Always `message_creation``.
* Always `message_creation`.
*/
type: 'message_creation';
}
Expand Down Expand Up @@ -269,7 +269,7 @@ export interface RunStep {
metadata: unknown | null;

/**
* The object type, which is always `thread.run.step``.
* The object type, which is always `thread.run.step`.
*/
object: 'thread.run.step';

Expand Down
1 change: 1 addition & 0 deletions src/resources/chat/chat.ts
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@ export namespace Chat {
export import ChatCompletionNamedToolChoice = CompletionsAPI.ChatCompletionNamedToolChoice;
export import ChatCompletionRole = CompletionsAPI.ChatCompletionRole;
export import ChatCompletionSystemMessageParam = CompletionsAPI.ChatCompletionSystemMessageParam;
export import ChatCompletionTokenLogprob = CompletionsAPI.ChatCompletionTokenLogprob;
export import ChatCompletionTool = CompletionsAPI.ChatCompletionTool;
export import ChatCompletionToolChoiceOption = CompletionsAPI.ChatCompletionToolChoiceOption;
export import ChatCompletionToolMessageParam = CompletionsAPI.ChatCompletionToolMessageParam;
Expand Down
Loading