-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathOpenAIAdapter.ts
131 lines (125 loc) · 3.79 KB
/
OpenAIAdapter.ts
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
import { assert, type Falsy } from "@std/assert"
import type Openai from "openai"
import type { ChatModel } from "openai/resources/chat/chat"
import type {
ChatCompletion,
ChatCompletionMessage,
ChatCompletionMessageParam,
} from "openai/resources/chat/completions"
import { DescriptionContext, L } from "../../core/mod.ts"
import type { Adapter, AdapterDefaults, LoadSession, SaveSession } from "../Adapter.ts"
import { DEFAULT_INSTRUCTIONS } from "../constants.ts"
export interface OpenAIAdapterDescriptor {
message: ChatCompletionMessageParam
role: "system" | "user"
model: (string & {}) | ChatModel
messageParams: [prompt?: string]
}
export interface OpenAIAdapterConfig {
openai: Openai
defaultModel: (string & {}) | ChatModel
defaultInstructions?: string
loadSession?: LoadSession<OpenAIAdapterDescriptor>
saveSession?: SaveSession<OpenAIAdapterDescriptor>
}
export function OpenAIAdapter({
openai,
defaultModel,
defaultInstructions,
loadSession,
saveSession,
}: OpenAIAdapterConfig): Adapter<OpenAIAdapterDescriptor> {
const defaults: AdapterDefaults<OpenAIAdapterDescriptor> = {
model: defaultModel,
instructions: defaultInstructions ?? DEFAULT_INSTRUCTIONS,
role: "user",
}
return {
unstructured: ["o1-mini"],
defaults,
loadSession: loadSession ?? (() => [{
role: "system",
content: [{
type: "text",
text: defaults.instructions,
}],
}]),
saveSession,
formatMessage,
unwrapMessage: ({ content }) => {
assert(typeof content === "string")
return content
},
completeText,
completeValue: async ({ messages, name, description, type, model }) => {
const descriptionCtx = new DescriptionContext()
const rootTypeDescription = descriptionCtx.format(type)
if (type.declaration.factory === L.string) {
return completeText([
...messages ?? [],
formatMessage([description, rootTypeDescription], "system"),
])
}
const Root = type.declaration.jsonType === "object"
? type
: L.transform(L.object({ value: type }), ({ value }) => value)
if (!name) {
name = await type.signatureHash()
}
messages = !messages?.length
? [{
role: "system",
content: defaults.instructions,
}]
: messages
return await openai.chat.completions
.create({
model: model ?? defaults.model,
messages,
response_format: {
type: "json_schema",
json_schema: {
name,
description,
schema: Root.toJSON(),
strict: true,
},
},
})
.then(unwrapChoice)
},
}
function formatMessage(
texts: Array<string | Falsy>,
role?: OpenAIAdapterDescriptor["role"],
): OpenAIAdapterDescriptor["message"] {
return {
role: role ?? defaults.role,
content: texts.filter((v): v is string => !!v).map((text) => ({
type: "text",
text,
})),
}
}
function completeText(
messages: Array<ChatCompletionMessageParam>,
model?: OpenAIAdapterDescriptor["model"],
): Promise<ChatCompletionMessage> {
return openai.chat.completions.create({
model: model ?? defaults.model,
messages,
}).then(unwrapChoice)
}
}
function unwrapChoice(completion: ChatCompletion): ChatCompletionMessage {
const { choices: [firstChoice] } = completion
assert(firstChoice, "No choices contained within the completion response.")
const { finish_reason, message } = firstChoice
assert(
finish_reason === "stop",
`Completion responded with "${finish_reason}" as finish reason; ${message}.`,
)
const { refusal } = message
assert(!refusal, `Openai refused to fulfill completion request; ${refusal}.`)
return message
}