diff --git a/gradle/libs.versions.toml b/gradle/libs.versions.toml index 846fecbe..0108be00 100644 --- a/gradle/libs.versions.toml +++ b/gradle/libs.versions.toml @@ -1,6 +1,6 @@ [versions] -kotlin = "1.8.22" -coroutines = "1.7.1" +kotlin = "1.8.20" +coroutines = "1.7.0" serialization = "1.5.1" ktor = "2.3.1" okio = "3.3.0" diff --git a/guides/ChatFunctionCall.md b/guides/ChatFunctionCall.md new file mode 100644 index 00000000..d2fcf803 --- /dev/null +++ b/guides/ChatFunctionCall.md @@ -0,0 +1,257 @@ +# Chat with Function Call + +This guide is designed to demonstrate the interaction with OpenAI API using the Kotlin language to execute a chat completion request with function calls. + +## Prerequisites + +- The OpenAI Kotlin SDK installed. +- An API key from OpenAI. + +## Overview + +We'll use the `gpt-3.5-turbo-0613` model to create a chatbot that responds to a user's query about the weather in Boston. We'll send a chat completion request which includes a function `currentWeather` that the model can call. + +## Step-by-step Instructions + +### Setting Up the OpenAI Instance + +Firstly, we initialize an instance of the OpenAI class, providing it with the OpenAI API key. + +```kotlin +val token = System.getenv("OPENAI_API_KEY") +val openAI = OpenAI(token) +``` + +### Defining the Model + +Specify the model to use for the chat request. + +```kotlin +val modelId = ModelId("gpt-3.5-turbo-0613") +``` + +### Defining the Function + +Define a dummy function `currentWeather` which the model might call. This function will return hardcoded weather information. + +```kotlin +@Serializable +data class WeatherInfo(val location: String, val temperature: String, val unit: String, val forecast: List) + +/** + * Example dummy function hard coded to return the same weather + * In production, this could be your backend API or an external API + */ +fun currentWeather(location: String, unit: String): String { + val weatherInfo = WeatherInfo(location, "72", unit, listOf("sunny", "windy")) + return Json.encodeToString(weatherInfo) +} +``` + +### Defining Function Parameters + +Define the parameters for the function the model might call. + +```kotlin +val params = FunctionParameters.buildJson { + put("type", "object") + putJsonObject("properties") { + putJsonObject("location") { + put("type", "string") + put("description", "The city and state, e.g. San Francisco, CA") + } + putJsonObject("unit") { + put("type", "string") + putJsonArray("enum") { + add("celsius") + add("fahrenheit") + } + } + } + putJsonArray("required") { + add("location") + } +} +``` + +### Setting Up the Chat Messages + +Start with a user message inquiring about the weather in Boston. + +```kotlin +val chatMessages = mutableListOf( + ChatMessage( + role = ChatRole.User, + content = "What's the weather like in Boston?" + ) +) +``` + +### Creating the Chat Completion Request + +Create a chat completion request that includes the model, messages, functions, and function call mode. + +```kotlin +val request = chatCompletionRequest { + model = modelId + messages = chatMessages + functions { + function { + name = "currentWeather" + description = "Get the current weather in a given location" + parameters = params + } + } + functionCall = FunctionMode.Auto +} +``` + +### Sending the Request and Handling the Response + +Send the chat request and handle the response. If there's a function call in the response, execute it and send a second chat completion request. + +```kotlin +val response = openAI.chatCompletion(request) +val message = response.choices.first().message ?: error("no response found!") +message.functionCall?.let { functionCall -> + val availableFunctions = mapOf("currentWeather" to ::currentWeather) + val functionToCall = availableFunctions[functionCall.name] ?: error("Function ${functionCall.name} not found") + val functionArgs = functionCall.argumentsAsJson() ?: error("arguments field is missing") + val functionResponse = functionToCall( + functionArgs.getValue("location").jsonPrimitive.content, + functionArgs["unit"]?.jsonPrimitive?.content ?: "fahrenheit" + ) + chatMessages.add( + ChatMessage( + role = message.role, + content = message.content ?: "", // required to not be empty in this case + functionCall = message.functionCall + ) + ) + chatMessages.add( + ChatMessage( + role = ChatRole.Function, + name = functionCall.name, + content = functionResponse + ) + ) + + val secondRequest = chatCompletionRequest { + model = modelId + messages = chatMessages + } + + val secondResponse = openAI.chatCompletion(secondRequest) + println(secondResponse.choices.first().message?.content) +} ?: println(message.content) +``` + +#### Notes + +- You should validate and handle the function calls made by the model carefully as the model could generate invalid JSON or hallucinate parameters. +- It's recommended to implement user confirmation flows before executing actions that could have an impact on the real world. + +### Complete Example + +Below is a complete Kotlin example following the guide: + +```kotlin +import com.aallam.openai.api.BetaOpenAI +import com.aallam.openai.api.chat.* +import com.aallam.openai.api.model.ModelId +import com.aallam.openai.client.OpenAI +import kotlinx.serialization.* +import kotlinx.serialization.json.* + +suspend fun main() { + val token = System.getenv("OPENAI_API_KEY") + val openAI = OpenAI(token) + + val modelId = ModelId("gpt-3.5-turbo-0613") + val chatMessages = mutableListOf( + ChatMessage( + role = ChatRole.User, + content = "What's the weather like in Boston?" + ) + ) + + val params = FunctionParameters.buildJson { + put("type", "object") + putJsonObject("properties") { + putJsonObject("location") { + put("type", "string") + put("description", "The city and state, e.g. San Francisco, CA") + } + putJsonObject("unit") { + put("type", "string") + putJsonArray("enum") { + add("celsius") + add("fahrenheit") + } + } + } + putJsonArray("required") { + add("location") + } + } + + val request = chatCompletionRequest { + model = modelId + messages = chatMessages + functions { + function { + name = "currentWeather" + description = "Get the current weather in a given location" + parameters = params + } + } + functionCall = FunctionMode.Auto + } + + val response = openAI.chatCompletion(request) + val message = response.choices.first().message ?: error("no response found!") + message.functionCall?.let { functionCall -> + val availableFunctions = mapOf("currentWeather" to ::currentWeather) + val functionToCall = availableFunctions[functionCall.name] ?: error("Function ${functionCall.name} not found") + val functionArgs = functionCall.argumentsAsJson() ?: error("arguments field is missing") + val functionResponse = functionToCall( + functionArgs.getValue("location").jsonPrimitive.content, + functionArgs["unit"]?.jsonPrimitive?.content ?: "fahrenheit" + ) + + chatMessages.add( + ChatMessage( + role = message.role, + content = message.content ?: "", + functionCall = message.functionCall + ) + ) + + chatMessages.add( + ChatMessage( + role = ChatRole.Function, + name = functionCall.name, + content = functionResponse + ) + ) + + val secondRequest = chatCompletionRequest { + model = modelId + messages = chatMessages + } + + val secondResponse = openAI.chatCompletion(secondRequest) + println(secondResponse.choices.first().message?.content) + } ?: println(message.content) +} + +@Serializable +data class WeatherInfo(val location: String, val temperature: String, val unit: String, val forecast: List) + +fun currentWeather(location: String, unit: String): String { + val weatherInfo = WeatherInfo(location, "72", unit, listOf("sunny", "windy")) + return Json.encodeToString(weatherInfo) +} +``` + +This completes the guide for executing a chat completion request with a function call. Happy coding! \ No newline at end of file diff --git a/guides/GettingStarted.md b/guides/GettingStarted.md index 979095c8..0800b6a0 100644 --- a/guides/GettingStarted.md +++ b/guides/GettingStarted.md @@ -16,7 +16,7 @@ Use your `OpenAI` instance to make API requests. - [Completions](#completions) - [Create completion](#create-completion) - [Chat](#chat) - - [Create chat completion](#create-chat-completion) + - [Create chat completion](#create-chat-completion-beta) - [Edits](#edits) - [Create edits](#create-edits) - [Images](#images) @@ -97,10 +97,14 @@ Creates a completion for the chat message. val chatCompletionRequest = ChatCompletionRequest( model = ModelId("gpt-3.5-turbo"), messages = listOf( + ChatMessage( + role = ChatRole.System, + content = "You are a helpful assistant!" + ), ChatMessage( role = ChatRole.User, content = "Hello!" - ) + ) ) ) val completion: ChatCompletion = openAI.chatCompletion(chatCompletionRequest) @@ -108,7 +112,7 @@ val completion: ChatCompletion = openAI.chatCompletion(chatCompletionRequest) val completions: Flow = openAI.chatCompletions(chatCompletionRequest) ``` -## Edits +## Edits Given a prompt and an instruction, the model will return an edited version of the prompt. diff --git a/openai-client/src/commonTest/kotlin/com/aallam/openai/client/TestChatCompletions.kt b/openai-client/src/commonTest/kotlin/com/aallam/openai/client/TestChatCompletions.kt index 558c164a..aab37321 100644 --- a/openai-client/src/commonTest/kotlin/com/aallam/openai/client/TestChatCompletions.kt +++ b/openai-client/src/commonTest/kotlin/com/aallam/openai/client/TestChatCompletions.kt @@ -1,11 +1,13 @@ package com.aallam.openai.client -import com.aallam.openai.api.chat.ChatCompletionChunk -import com.aallam.openai.api.chat.ChatRole -import com.aallam.openai.api.chat.chatCompletionRequest +import com.aallam.openai.api.chat.* import com.aallam.openai.api.model.ModelId import kotlinx.coroutines.flow.launchIn import kotlinx.coroutines.flow.onEach +import kotlinx.serialization.Serializable +import kotlinx.serialization.encodeToString +import kotlinx.serialization.json.Json +import kotlinx.serialization.json.jsonPrimitive import kotlin.test.Test import kotlin.test.assertNotEquals import kotlin.test.assertTrue @@ -44,4 +46,87 @@ class TestChatCompletions : TestOpenAI() { assertNotEquals(0, results.size) } + + @Test + fun chatCompletionsFunction() = test { + val modelId = ModelId("gpt-3.5-turbo-0613") + val chatMessages = mutableListOf( + ChatMessage( + role = ChatRole.User, + content = "What's the weather like in Boston?" + ) + ) + + val request = chatCompletionRequest { + model = modelId + messages = chatMessages + functions { + function { + name = "currentWeather" + description = "Get the current weather in a given location" + parameters = FunctionParameters.fromJsonString( + """ + { + "type": "object", + "properties": { + "location": { + "type": "string", + "description": "The city and state, e.g. San Francisco, CA" + }, + "unit": { + "type": "string", + "enum": [ + "celsius", + "fahrenheit" + ] + } + }, + "required": [ + "location" + ] + } + """ + ) + } + } + functionCall = FunctionMode.Auto + } + + val response = openAI.chatCompletion(request) + val message = response.choices.first().message ?: error("No chat response found!") + + message.functionCall?.let { functionCall -> + val availableFunctions = mapOf("currentWeather" to ::currentWeather) + + val functionToCall = availableFunctions[functionCall.name] ?: return@let + val functionArgs = functionCall.argumentsAsJson() ?: error("arguments field is missing") + + val functionResponse = functionToCall( + functionArgs.getValue("location").jsonPrimitive.content, + functionArgs["unit"]?.jsonPrimitive?.content ?: "fahrenheit" + ) + + chatMessages.add(message.copy(content = "")) // OpenAI throws an error in this case if the content is null, although it's optional! + chatMessages.add( + ChatMessage(role = ChatRole.Function, name = functionCall.name, content = functionResponse) + ) + + val secondResponse = openAI.chatCompletion( + request = ChatCompletionRequest( + model = modelId, + messages = chatMessages, + ) + ) + + print(secondResponse) + } + } + + @Serializable + data class WeatherInfo(val location: String, val temperature: String, val unit: String, val forecast: List) + + fun currentWeather(location: String, unit: String): String { + val weatherInfo = WeatherInfo(location, "72", unit, listOf("sunny", "windy")) + return Json.encodeToString(weatherInfo) + } } diff --git a/openai-core/src/commonMain/kotlin/com.aallam.openai.api/chat/ChatChoice.kt b/openai-core/src/commonMain/kotlin/com.aallam.openai.api/chat/ChatChoice.kt index eec65c0b..58bcfd92 100644 --- a/openai-core/src/commonMain/kotlin/com.aallam.openai.api/chat/ChatChoice.kt +++ b/openai-core/src/commonMain/kotlin/com.aallam.openai.api/chat/ChatChoice.kt @@ -11,7 +11,7 @@ import kotlinx.serialization.Serializable */ @BetaOpenAI @Serializable -public data class ChatChoice internal constructor( +public data class ChatChoice( /** * Chat choice index. */ diff --git a/openai-core/src/commonMain/kotlin/com.aallam.openai.api/chat/ChatChunk.kt b/openai-core/src/commonMain/kotlin/com.aallam.openai.api/chat/ChatChunk.kt index 8585f44a..536fb693 100644 --- a/openai-core/src/commonMain/kotlin/com.aallam.openai.api/chat/ChatChunk.kt +++ b/openai-core/src/commonMain/kotlin/com.aallam.openai.api/chat/ChatChunk.kt @@ -11,7 +11,7 @@ import kotlinx.serialization.Serializable */ @BetaOpenAI @Serializable -public data class ChatChunk internal constructor( +public data class ChatChunk( /** * Chat choice index. */ diff --git a/openai-core/src/commonMain/kotlin/com.aallam.openai.api/chat/ChatCompletion.kt b/openai-core/src/commonMain/kotlin/com.aallam.openai.api/chat/ChatCompletion.kt index 74fd4099..90c6f0a6 100644 --- a/openai-core/src/commonMain/kotlin/com.aallam.openai.api/chat/ChatCompletion.kt +++ b/openai-core/src/commonMain/kotlin/com.aallam.openai.api/chat/ChatCompletion.kt @@ -13,7 +13,7 @@ import kotlinx.serialization.Serializable */ @BetaOpenAI @Serializable -public data class ChatCompletion internal constructor( +public data class ChatCompletion( /** * A unique id assigned to this completion */ diff --git a/openai-core/src/commonMain/kotlin/com.aallam.openai.api/chat/ChatCompletionChunk.kt b/openai-core/src/commonMain/kotlin/com.aallam.openai.api/chat/ChatCompletionChunk.kt index 37681766..03a2192d 100644 --- a/openai-core/src/commonMain/kotlin/com.aallam.openai.api/chat/ChatCompletionChunk.kt +++ b/openai-core/src/commonMain/kotlin/com.aallam.openai.api/chat/ChatCompletionChunk.kt @@ -13,7 +13,7 @@ import kotlinx.serialization.Serializable */ @BetaOpenAI @Serializable -public data class ChatCompletionChunk internal constructor( +public data class ChatCompletionChunk( /** * A unique id assigned to this completion */ diff --git a/openai-core/src/commonMain/kotlin/com.aallam.openai.api/chat/ChatCompletionFunction.kt b/openai-core/src/commonMain/kotlin/com.aallam.openai.api/chat/ChatCompletionFunction.kt index 08655ba3..8ad8f696 100644 --- a/openai-core/src/commonMain/kotlin/com.aallam.openai.api/chat/ChatCompletionFunction.kt +++ b/openai-core/src/commonMain/kotlin/com.aallam.openai.api/chat/ChatCompletionFunction.kt @@ -1,8 +1,11 @@ package com.aallam.openai.api.chat +import com.aallam.openai.api.BetaOpenAI +import com.aallam.openai.api.OpenAIDsl import kotlinx.serialization.SerialName import kotlinx.serialization.Serializable +@BetaOpenAI @Serializable public data class ChatCompletionFunction( /** @@ -18,5 +21,44 @@ public data class ChatCompletionFunction( * The parameters the functions accepts, described as a JSON Schema object. See the guide for examples and the * JSON Schema reference for documentation about the format. */ - @SerialName("parameters") val parameters: JsonData? = null, + @SerialName("parameters") val parameters: FunctionParameters? = null, ) + +/** + * Builder of [ChatCompletionFunction] instances. + */ +@BetaOpenAI +@OpenAIDsl +public class ChatCompletionFunctionBuilder { + + /** + * The name of the function to be called. + */ + public var name: String? = null + + /** + * The description of what the function does. + */ + public var description: String? = null + + /** + * The parameters the function accepts. + */ + public var parameters: FunctionParameters? = null + + /** + * Create [ChatCompletionFunction] instance. + */ + public fun build(): ChatCompletionFunction = ChatCompletionFunction( + name = requireNotNull(name) { "name is required" }, + description = description, + parameters = parameters + ) +} + +/** + * The function to generate chat completion function instances. + */ +@BetaOpenAI +public fun chatCompletionFunction(block: ChatCompletionFunctionBuilder.() -> Unit): ChatCompletionFunction = + ChatCompletionFunctionBuilder().apply(block).build() diff --git a/openai-core/src/commonMain/kotlin/com.aallam.openai.api/chat/ChatCompletionRequest.kt b/openai-core/src/commonMain/kotlin/com.aallam.openai.api/chat/ChatCompletionRequest.kt index c3991a23..faefb530 100644 --- a/openai-core/src/commonMain/kotlin/com.aallam.openai.api/chat/ChatCompletionRequest.kt +++ b/openai-core/src/commonMain/kotlin/com.aallam.openai.api/chat/ChatCompletionRequest.kt @@ -92,14 +92,15 @@ public class ChatCompletionRequest( @SerialName("functions") public val functions: List? = null, /** - * Controls how the model responds to function calls. "none" means the model does not call a function, and responds - * to the end-user. "auto" means the model can pick between an end-user or calling a function. Specifying a - * particular function via {"name":\ "my_function"} forces the model to call that function. "none" is the default - * when no functions are present. "auto" is the default if functions are present. + * Controls how the model responds to function calls. [FunctionMode.None] means the model does not call a function, + * and responds to the end-user. + * [FunctionMode.Auto] means the model can pick between an end-user or calling a function. + * Specifying a particular function via [FunctionMode.Named] forces the model to call that function. + * [FunctionMode.None] is the default when no functions are present. + * [FunctionMode.Auto] is the default if functions are present. */ - @SerialName("function_call") public val functionCall: FunctionCall? = null, - - ) + @SerialName("function_call") public val functionCall: FunctionMode? = null, +) /** * The messages to generate chat completions for. @@ -188,6 +189,21 @@ public class ChatCompletionRequestBuilder { */ public var user: String? = null + /** + * A list of functions the model may generate JSON inputs for. + */ + public var functions: List? = null + + /** + * Controls how the model responds to function calls. [FunctionMode.None] means the model does not call a function, + * and responds to the end-user. + * [FunctionMode.Auto] means the model can pick between an end-user or calling a function. + * Specifying a particular function via [FunctionMode.Named] forces the model to call that function. + * [FunctionMode.None] is the default when no functions are present. + * [FunctionMode.Auto] is the default if functions are present. + */ + public var functionCall: FunctionMode? = null + /** * The messages to generate chat completions for. */ @@ -195,6 +211,13 @@ public class ChatCompletionRequestBuilder { messages = ChatMessagesBuilder().apply(block).messages } + /** + * A list of functions the model may generate JSON inputs for. + */ + public fun functions(block: FunctionsBuilder.() -> Unit) { + functions = FunctionsBuilder().apply(block).functions + } + /** * Builder of [ChatCompletionRequest] instances. */ @@ -210,6 +233,8 @@ public class ChatCompletionRequestBuilder { frequencyPenalty = frequencyPenalty, logitBias = logitBias, user = user, + functions = functions, + functionCall = functionCall, ) } @@ -227,3 +252,18 @@ public class ChatMessagesBuilder { messages += ChatMessageBuilder().apply(block).build() } } + +/** + * Creates a list of [ChatCompletionFunction]. + */ +@BetaOpenAI +public class FunctionsBuilder { + internal val functions = mutableListOf() + + /** + * Creates a [ChatMessage] instance. + */ + public fun function(block: ChatCompletionFunctionBuilder.() -> Unit) { + functions += ChatCompletionFunctionBuilder().apply(block).build() + } +} diff --git a/openai-core/src/commonMain/kotlin/com.aallam.openai.api/chat/ChatDelta.kt b/openai-core/src/commonMain/kotlin/com.aallam.openai.api/chat/ChatDelta.kt index 51311a60..a1a556fb 100644 --- a/openai-core/src/commonMain/kotlin/com.aallam.openai.api/chat/ChatDelta.kt +++ b/openai-core/src/commonMain/kotlin/com.aallam.openai.api/chat/ChatDelta.kt @@ -9,7 +9,7 @@ import kotlinx.serialization.Serializable */ @Serializable @BetaOpenAI -public data class ChatDelta internal constructor( +public data class ChatDelta( /** * The role of the author of this message. */ @@ -21,7 +21,7 @@ public data class ChatDelta internal constructor( @SerialName("content") val content: String? = null, /** - * The name of the user in a multi-user chat. + * The name and arguments of a function that should be called, as generated by the model. */ - @SerialName("name") public val name: String? = null + @SerialName("function_call") public val functionCall: FunctionCall? = null ) diff --git a/openai-core/src/commonMain/kotlin/com.aallam.openai.api/chat/ChatMessage.kt b/openai-core/src/commonMain/kotlin/com.aallam.openai.api/chat/ChatMessage.kt index fd2ca449..00e01746 100644 --- a/openai-core/src/commonMain/kotlin/com.aallam.openai.api/chat/ChatMessage.kt +++ b/openai-core/src/commonMain/kotlin/com.aallam.openai.api/chat/ChatMessage.kt @@ -22,14 +22,16 @@ public data class ChatMessage( @SerialName("content") public val content: String? = null, /** - * The name of the user in a multi-user chat. + * The name of the author of this message. + * [name] is required if role is `[ChatRole.Function], and it should be the name of the function whose response is + * in the [content]. May contain a-z, A-Z, 0-9, and underscores, with a maximum length of 64 characters. */ @SerialName("name") public val name: String? = null, /** * The name and arguments of a function that should be called, as generated by the model. */ - @SerialName("function_call") public val functionCall: JsonData? = null + @SerialName("function_call") public val functionCall: FunctionCall? = null ) /** @@ -57,16 +59,24 @@ public class ChatMessageBuilder { public var content: String? = null /** - * The name of the user in a multi-user chat. + * The name of the author of this message. + * [name] is required if role is `[ChatRole.Function], and it should be the name of the function whose response is + * in the [content]. May contain a-z, A-Z, 0-9, and underscores, with a maximum length of 64 characters. */ public var name: String? = null + /** + * The name and arguments of a function that should be called, as generated by the model. + */ + public var functionCall: FunctionCall? = null + /** * Create [ChatMessageBuilder] instance. */ public fun build(): ChatMessage = ChatMessage( role = requireNotNull(role) { "role is required" }, - content = requireNotNull(content) { "content is required" }, + content = content, name = name, + functionCall = functionCall, ) } diff --git a/openai-core/src/commonMain/kotlin/com.aallam.openai.api/chat/FunctionCall.kt b/openai-core/src/commonMain/kotlin/com.aallam.openai.api/chat/FunctionCall.kt index 69a1859c..45197451 100644 --- a/openai-core/src/commonMain/kotlin/com.aallam.openai.api/chat/FunctionCall.kt +++ b/openai-core/src/commonMain/kotlin/com.aallam.openai.api/chat/FunctionCall.kt @@ -1,62 +1,34 @@ package com.aallam.openai.api.chat -import com.aallam.openai.api.BetaOpenAI -import kotlinx.serialization.KSerializer +import kotlinx.serialization.SerialName import kotlinx.serialization.Serializable -import kotlinx.serialization.descriptors.SerialDescriptor -import kotlinx.serialization.descriptors.buildClassSerialDescriptor -import kotlinx.serialization.encoding.Decoder -import kotlinx.serialization.encoding.Encoder -import kotlinx.serialization.json.JsonDecoder -import kotlinx.serialization.json.JsonEncoder +import kotlinx.serialization.json.Json import kotlinx.serialization.json.JsonObject -import kotlinx.serialization.json.JsonPrimitive -import kotlin.jvm.JvmInline -@BetaOpenAI -@Serializable(with = FunctionCallSerializer::class) -public sealed interface FunctionCall { - public val name: String - - public companion object { - public val Auto: FunctionCall = FunctionCallString("auto") - public val None: FunctionCall = FunctionCallString("none") - public fun forceCall(name: String): FunctionCall = FunctionCallObject(name) - } -} -@OptIn(BetaOpenAI::class) -internal object FunctionCallSerializer: KSerializer{ - override val descriptor: SerialDescriptor = buildClassSerialDescriptor("FunctionCall") {} - private val objectSerializer = FunctionCallObject.serializer() - override fun deserialize(decoder: Decoder): FunctionCall { - if(decoder is JsonDecoder){ - return when(val json = decoder.decodeJsonElement()){ - is JsonPrimitive -> FunctionCallString(json.content) - is JsonObject -> objectSerializer.deserialize(decoder) - else -> throw UnsupportedOperationException("Cannot deserialize Parameters") - } - } - throw UnsupportedOperationException("Cannot deserialize Parameters") - } - - override fun serialize(encoder: Encoder, value: FunctionCall) { - if(encoder is JsonEncoder){ - when(value){ - is FunctionCallString -> encoder.encodeString(value.name) - is FunctionCallObject -> objectSerializer.serialize(encoder, value) - } - return - } - throw UnsupportedOperationException("Cannot deserialize Parameters") - } -} - -@OptIn(BetaOpenAI::class) -@JvmInline +/** + * The name and arguments of a function that should be called, as generated by the model. + */ @Serializable -internal value class FunctionCallString(override val name: String): FunctionCall +public data class FunctionCall( + /** + * The name of the function to call. + */ + @SerialName("name") val name: String? = null, -@OptIn(BetaOpenAI::class) -@Serializable -internal data class FunctionCallObject(override val name: String): FunctionCall + /** + * The arguments to call the function with, as generated by the model in JSON format. + * Note that the model does not always generate valid JSON, and may hallucinate parameters + * not defined by your function schema. + * Validate the arguments in your code before calling your function. + */ + @SerialName("arguments") val arguments: String? = null +) { + /** + * Decodes the [arguments] JSON string into a JsonObject. + * If [arguments] is null, the function will return null. + * + * @param json The Json object to be used for decoding, defaults to a default Json instance + */ + public fun argumentsAsJson(json: Json = Json): JsonObject? = arguments?.let { args -> json.decodeFromString(args) } +} diff --git a/openai-core/src/commonMain/kotlin/com.aallam.openai.api/chat/FunctionMode.kt b/openai-core/src/commonMain/kotlin/com.aallam.openai.api/chat/FunctionMode.kt new file mode 100644 index 00000000..f8c53e17 --- /dev/null +++ b/openai-core/src/commonMain/kotlin/com.aallam.openai.api/chat/FunctionMode.kt @@ -0,0 +1,77 @@ +package com.aallam.openai.api.chat + +import com.aallam.openai.api.BetaOpenAI +import com.aallam.openai.api.chat.FunctionMode.Companion.Auto +import com.aallam.openai.api.chat.FunctionMode.Companion.None +import com.aallam.openai.api.chat.FunctionMode.Default +import com.aallam.openai.api.chat.FunctionMode.Named +import kotlinx.serialization.KSerializer +import kotlinx.serialization.Serializable +import kotlinx.serialization.descriptors.SerialDescriptor +import kotlinx.serialization.descriptors.buildClassSerialDescriptor +import kotlinx.serialization.encoding.Decoder +import kotlinx.serialization.encoding.Encoder +import kotlinx.serialization.json.JsonDecoder +import kotlinx.serialization.json.JsonEncoder +import kotlinx.serialization.json.JsonObject +import kotlinx.serialization.json.JsonPrimitive +import kotlin.jvm.JvmInline + +/** + * This interface determines how the model handles function calls. + * + * There are several modes of operation: + * - [Default]: In this mode, the model does not invoke any function [None] or decides itself [Auto] on calling a function or responding directly to the end-user. This mode becomes default if any functions are specified. + * - [Named]: In this mode, the model will call a specific function, denoted by the `name` attribute. + */ +@BetaOpenAI +@Serializable(with = FunctionModeSerializer::class) +public sealed interface FunctionMode { + + /** + * Represents a function call mode. + * The value can be any string representing a specific function call mode. + */ + @JvmInline + public value class Default(public val value: String) : FunctionMode + + /** + * Represents a named function call mode. + * The name indicates a specific function that the model will call. + * + * @property name the name of the function to call. + */ + @Serializable + public data class Named(public val name: String) : FunctionMode + + /** Provides default function call modes. */ + public companion object { + /** Represents the `auto` mode. */ + public val Auto: FunctionMode = Default("auto") + + /** Represents the `none` mode. */ + public val None: FunctionMode = Default("none") + } +} + +@BetaOpenAI +internal object FunctionModeSerializer : KSerializer { + override val descriptor: SerialDescriptor = buildClassSerialDescriptor("FunctionCall") + + override fun deserialize(decoder: Decoder): FunctionMode { + require(decoder is JsonDecoder) { "This decoder is not a JsonDecoder. Cannot deserialize `FunctionCall`" } + return when (val json = decoder.decodeJsonElement()) { + is JsonPrimitive -> Default(json.content) + is JsonObject -> Named.serializer().deserialize(decoder) + else -> throw UnsupportedOperationException("Cannot deserialize FunctionMode. Unsupported JSON element.") + } + } + + override fun serialize(encoder: Encoder, value: FunctionMode) { + require(encoder is JsonEncoder) { "This encoder is not a JsonEncoder. Cannot serialize `FunctionCall`" } + when (value) { + is Default -> encoder.encodeString(value.value) + is Named -> Named.serializer().serialize(encoder, value) + } + } +} diff --git a/openai-core/src/commonMain/kotlin/com.aallam.openai.api/chat/FunctionParameters.kt b/openai-core/src/commonMain/kotlin/com.aallam.openai.api/chat/FunctionParameters.kt new file mode 100644 index 00000000..d2978afa --- /dev/null +++ b/openai-core/src/commonMain/kotlin/com.aallam.openai.api/chat/FunctionParameters.kt @@ -0,0 +1,63 @@ +package com.aallam.openai.api.chat + +import kotlinx.serialization.KSerializer +import kotlinx.serialization.Serializable +import kotlinx.serialization.descriptors.SerialDescriptor +import kotlinx.serialization.descriptors.buildClassSerialDescriptor +import kotlinx.serialization.encoding.Decoder +import kotlinx.serialization.encoding.Encoder +import kotlinx.serialization.json.* +import kotlin.jvm.JvmInline + +/** + * Represents parameters that a function accepts, described as a JSON Schema object. + * + * @property schema Json Schema object. + */ +@JvmInline +@Serializable(with = FunctionParameters.JsonDataSerializer::class) +public value class FunctionParameters(public val schema: JsonElement) { + + /** + * Custom serializer for the [FunctionParameters] class. + */ + public object JsonDataSerializer : KSerializer { + override val descriptor: SerialDescriptor = buildClassSerialDescriptor("FunctionParameters") + + /** + * Deserializes [FunctionParameters] from JSON format. + */ + override fun deserialize(decoder: Decoder): FunctionParameters { + require(decoder is JsonDecoder) { "This decoder is not a JsonDecoder. Cannot deserialize `FunctionParameters`." } + return FunctionParameters(decoder.decodeJsonElement()) + } + + /** + * Serializes [FunctionParameters] to JSON format. + */ + override fun serialize(encoder: Encoder, value: FunctionParameters) { + require(encoder is JsonEncoder) { "This encoder is not a JsonEncoder. Cannot serialize `FunctionParameters`." } + encoder.encodeJsonElement(value.schema) + } + } + + public companion object { + + /** + * Creates a [FunctionParameters] instance from a JSON string. + * + * @param json The JSON string to parse. + */ + public fun fromJsonString(json: String): FunctionParameters = FunctionParameters(Json.parseToJsonElement(json)) + + /** + * Creates a [FunctionParameters] instance using a [JsonObjectBuilder]. + * + * @param block The [JsonObjectBuilder] to use. + */ + public fun buildJson(block: JsonObjectBuilder.() -> Unit): FunctionParameters { + val json = buildJsonObject(block) + return FunctionParameters(json) + } + } +} diff --git a/openai-core/src/commonMain/kotlin/com.aallam.openai.api/chat/JsonData.kt b/openai-core/src/commonMain/kotlin/com.aallam.openai.api/chat/JsonData.kt deleted file mode 100644 index 21aba680..00000000 --- a/openai-core/src/commonMain/kotlin/com.aallam.openai.api/chat/JsonData.kt +++ /dev/null @@ -1,50 +0,0 @@ -package com.aallam.openai.api.chat - -import kotlinx.serialization.KSerializer -import kotlinx.serialization.Serializable -import kotlinx.serialization.descriptors.SerialDescriptor -import kotlinx.serialization.descriptors.buildClassSerialDescriptor -import kotlinx.serialization.encoding.Decoder -import kotlinx.serialization.encoding.Encoder -import kotlinx.serialization.json.Json -import kotlinx.serialization.json.JsonDecoder -import kotlinx.serialization.json.JsonElement -import kotlinx.serialization.json.JsonEncoder -import kotlinx.serialization.json.JsonObjectBuilder -import kotlinx.serialization.json.buildJsonObject -import kotlin.jvm.JvmInline - -@JvmInline -@Serializable(with = JsonData.JsonDataSerializer::class) -public value class JsonData(public val json: JsonElement){ - public object JsonDataSerializer: KSerializer{ - override val descriptor: SerialDescriptor = buildClassSerialDescriptor("JsonData") {} - - override fun deserialize(decoder: Decoder): JsonData { - if(decoder is JsonDecoder){ - return JsonData(decoder.decodeJsonElement()) - } - throw UnsupportedOperationException("Cannot deserialize Parameters") - } - - override fun serialize(encoder: Encoder, value: JsonData) { - if(encoder is JsonEncoder){ - encoder.encodeJsonElement(value.json) - return - } - } - } - public companion object{ - public fun fromString(json: String): JsonData = fromJsonElement(Json.parseToJsonElement(json)) - - public fun fromJsonElement(json: JsonElement): JsonData = JsonData(json) - - public fun builder(block: JsonObjectBuilder.() -> Unit): JsonData{ - val json = buildJsonObject ( - block - ) - return fromJsonElement(json) - } - - } -} \ No newline at end of file diff --git a/openai-core/src/commonMain/kotlin/com.aallam.openai.api/core/Usage.kt b/openai-core/src/commonMain/kotlin/com.aallam.openai.api/core/Usage.kt index ce5ea975..bbc7d672 100644 --- a/openai-core/src/commonMain/kotlin/com.aallam.openai.api/core/Usage.kt +++ b/openai-core/src/commonMain/kotlin/com.aallam.openai.api/core/Usage.kt @@ -4,7 +4,7 @@ import kotlinx.serialization.SerialName import kotlinx.serialization.Serializable @Serializable -public class Usage( +public data class Usage( /** * Count of prompts tokens. */ diff --git a/sample/jvm/build.gradle.kts b/sample/jvm/build.gradle.kts index 0c42f233..32913c3a 100644 --- a/sample/jvm/build.gradle.kts +++ b/sample/jvm/build.gradle.kts @@ -1,5 +1,6 @@ plugins { id("org.jetbrains.kotlin.jvm") + kotlin("plugin.serialization") application } diff --git a/sample/jvm/src/main/kotlin/com/aallam/openai/sample/jvm/App.kt b/sample/jvm/src/main/kotlin/com/aallam/openai/sample/jvm/App.kt index 25a067fc..5ec387ba 100644 --- a/sample/jvm/src/main/kotlin/com/aallam/openai/sample/jvm/App.kt +++ b/sample/jvm/src/main/kotlin/com/aallam/openai/sample/jvm/App.kt @@ -1,195 +1,42 @@ package com.aallam.openai.sample.jvm -import com.aallam.openai.api.BetaOpenAI -import com.aallam.openai.api.audio.TranscriptionRequest -import com.aallam.openai.api.audio.TranslationRequest -import com.aallam.openai.api.chat.ChatCompletionRequest -import com.aallam.openai.api.chat.ChatMessage -import com.aallam.openai.api.chat.ChatRole -import com.aallam.openai.api.chat.FunctionCall -import com.aallam.openai.api.chat.ChatCompletionFunction -import com.aallam.openai.api.chat.JsonData -import com.aallam.openai.api.completion.CompletionRequest -import com.aallam.openai.api.file.FileSource -import com.aallam.openai.api.image.ImageCreation -import com.aallam.openai.api.image.ImageEdit -import com.aallam.openai.api.image.ImageSize import com.aallam.openai.api.logging.LogLevel -import com.aallam.openai.api.model.ModelId -import com.aallam.openai.api.moderation.ModerationRequest import com.aallam.openai.client.LoggingConfig import com.aallam.openai.client.OpenAI -import kotlinx.coroutines.flow.launchIn -import kotlinx.coroutines.flow.onCompletion -import kotlinx.coroutines.flow.onEach import kotlinx.coroutines.runBlocking -import okio.FileSystem -import okio.Path.Companion.toPath -import kotlinx.serialization.json.put -@OptIn(BetaOpenAI::class) fun main() = runBlocking { val apiKey = System.getenv("OPENAI_API_KEY") val token = requireNotNull(apiKey) { "OPENAI_API_KEY environment variable must be set." } val openAI = OpenAI(token = token, logging = LoggingConfig(LogLevel.All)) - println("> Getting available engines...") - openAI.models().forEach(::println) - - println("\n> Getting ada engine...") - - val ada = openAI.model(modelId = ModelId("text-ada-001")) - println(ada) - - println("\n>️ Creating completion...") - val completionRequest = CompletionRequest( - model = ada.id, - prompt = "Somebody once told me the world is gonna roll me" - ) - openAI.completion(completionRequest).choices.forEach(::println) - - println("\n>️ Creating completion stream...") - openAI.completions(completionRequest) - .onEach { print(it.choices[0].text) } - .onCompletion { println() } - .launchIn(this) - .join() - - println("\n> Read files...") - val files = openAI.files() - println(files) - - println("\n> Create moderations...") - val moderation = openAI.moderations( - request = ModerationRequest( - input = listOf("I want to kill them.") - ) - ) - println(moderation) - - println("\n> Create images...") - val images = openAI.imageURL( - creation = ImageCreation( - prompt = "A cute baby sea otter", - n = 2, - size = ImageSize.is1024x1024 - ) - ) - println(images) - - println("\n> Edit images...") - val imageEdit = ImageEdit( - image = FileSource(path = "image.png".toPath(), fileSystem = FileSystem.RESOURCES), - mask = FileSource(path = "image.png".toPath(), fileSystem = FileSystem.RESOURCES), - prompt = "a sunlit indoor lounge area with a pool containing a flamingo", - n = 1, - size = ImageSize.is1024x1024, - ) - - val imageEdits = openAI.imageURL(imageEdit) - println(imageEdits) - - println("\n> Create chat completions...") - val chatCompletionRequest = ChatCompletionRequest( - model = ModelId("gpt-3.5-turbo"), - messages = listOf( - ChatMessage( - role = ChatRole.System, - content = "You are a helpful assistant that translates English to French." - ), - ChatMessage( - role = ChatRole.User, - content = "Translate the following English text to French: “OpenAI is awesome!”" - ) - ) - ) - openAI.chatCompletion(chatCompletionRequest).choices.forEach(::println) - - println("> Create Chat Completion function call...") - val chatCompletionCreateFunctionCall = ChatCompletionRequest( - model = ModelId("gpt-3.5-turbo-0613"), - messages = listOf( - ChatMessage( - role = ChatRole.System, - content = "You are a helpful assistant that translates English to French." - ), - ChatMessage( - role = ChatRole.User, - content = "Translate the following English text to French: “OpenAI is awesome!”" - ) - ), - functionCall = FunctionCall.forceCall("translate"), - functions = listOf( - ChatCompletionFunction( - name = "translate", - description = "Translate English to French", - parameters = JsonData.fromString( - """ - { - "type": "object", - "properties": { - "text": { - "type": "string" - } - } - } - """ - ), - ) - ) - ) - openAI.chatCompletion(chatCompletionCreateFunctionCall).choices.forEach(::println) - println("> Process Chat Completion function call...") - val chatFunctionReturn = ChatCompletionRequest( - model = ModelId("gpt-3.5-turbo-0613"), - messages = listOf( - ChatMessage( - role = ChatRole.System, - content = "You are a helpful assistant that uses a function to translates English to French.\n" + - "Use only the result of the function call as the response." - ), - ChatMessage( - role = ChatRole.User, - content = "Translate the following English text to French: “OpenAI is awesome!”" - ), - ChatMessage( - role = ChatRole.Assistant, - content = "None", - functionCall = JsonData.builder { - put("name", "translate") - put("arguments", """{"text": "OpenAI is awesome!"}""") - } - - ), - ChatMessage( - role = ChatRole.Function, - content = "openai est super !", - name = "translate", - ) - ), - ) - openAI.chatCompletion(chatFunctionReturn).choices.forEach(::println) - - println("\n>️ Creating chat completions stream...") - openAI.chatCompletions(chatCompletionRequest) - .onEach { print(it.choices.first().delta?.content.orEmpty()) } - .onCompletion { println() } - .launchIn(this) - .join() - - println("\n>️ Create transcription...") - val transcriptionRequest = TranscriptionRequest( - audio = FileSource(path = "micro-machines.wav".toPath(), fileSystem = FileSystem.RESOURCES), - model = ModelId("whisper-1"), - ) - val transcription = openAI.transcription(transcriptionRequest) - println(transcription) - - println("\n>️ Create translation...") - val translationRequest = TranslationRequest( - audio = FileSource(path = "multilingual.wav".toPath(), fileSystem = FileSystem.RESOURCES), - model = ModelId("whisper-1"), - ) - val translation = openAI.translation(translationRequest) - println(translation) + while (true) { + println("Select an option:") + println("1 - Engines") + println("2 - Completion") + println("3 - Files") + println("4 - Moderations") + println("5 - Images") + println("6 - Chat") + println("7 - Chat (w/ Function)") + println("8 - Whisper") + println("0 - Quit") + + when (readlnOrNull()?.toIntOrNull()) { + 1 -> engines(openAI) + 2 -> completion(openAI) + 3 -> files(openAI) + 4 -> moderations(openAI) + 5 -> images(openAI) + 6 -> chat(openAI) + 7 -> chatFunctionCall(openAI) + 8 -> whisper(openAI) + 0 -> { + println("Exiting...") + return@runBlocking + } + else -> println("Invalid option selected") + } + println("\n----------\n") // for readability + } } diff --git a/sample/jvm/src/main/kotlin/com/aallam/openai/sample/jvm/Chat.kt b/sample/jvm/src/main/kotlin/com/aallam/openai/sample/jvm/Chat.kt new file mode 100644 index 00000000..6c862715 --- /dev/null +++ b/sample/jvm/src/main/kotlin/com/aallam/openai/sample/jvm/Chat.kt @@ -0,0 +1,38 @@ +package com.aallam.openai.sample.jvm + +import com.aallam.openai.api.BetaOpenAI +import com.aallam.openai.api.chat.ChatCompletionRequest +import com.aallam.openai.api.chat.ChatMessage +import com.aallam.openai.api.chat.ChatRole +import com.aallam.openai.api.model.ModelId +import com.aallam.openai.client.OpenAI +import kotlinx.coroutines.CoroutineScope +import kotlinx.coroutines.flow.launchIn +import kotlinx.coroutines.flow.onCompletion +import kotlinx.coroutines.flow.onEach + +@OptIn(BetaOpenAI::class) +suspend fun CoroutineScope.chat(openAI: OpenAI) { + println("\n> Create chat completions...") + val chatCompletionRequest = ChatCompletionRequest( + model = ModelId("gpt-3.5-turbo"), + messages = listOf( + ChatMessage( + role = ChatRole.System, + content = "You are a helpful assistant that translates English to French." + ), + ChatMessage( + role = ChatRole.User, + content = "Translate the following English text to French: “OpenAI is awesome!”" + ) + ) + ) + openAI.chatCompletion(chatCompletionRequest).choices.forEach(::println) + + println("\n>️ Creating chat completions stream...") + openAI.chatCompletions(chatCompletionRequest) + .onEach { print(it.choices.first().delta?.content.orEmpty()) } + .onCompletion { println() } + .launchIn(this) + .join() +} diff --git a/sample/jvm/src/main/kotlin/com/aallam/openai/sample/jvm/ChatFunctionCall.kt b/sample/jvm/src/main/kotlin/com/aallam/openai/sample/jvm/ChatFunctionCall.kt new file mode 100644 index 00000000..7fceb20e --- /dev/null +++ b/sample/jvm/src/main/kotlin/com/aallam/openai/sample/jvm/ChatFunctionCall.kt @@ -0,0 +1,168 @@ +@file:OptIn(BetaOpenAI::class) + +package com.aallam.openai.sample.jvm + +import com.aallam.openai.api.BetaOpenAI +import com.aallam.openai.api.chat.* +import com.aallam.openai.api.model.ModelId +import com.aallam.openai.client.OpenAI +import kotlinx.coroutines.CoroutineScope +import kotlinx.coroutines.flow.launchIn +import kotlinx.coroutines.flow.onCompletion +import kotlinx.coroutines.flow.onEach +import kotlinx.serialization.Serializable +import kotlinx.serialization.encodeToString +import kotlinx.serialization.json.* + +@OptIn(BetaOpenAI::class) +suspend fun CoroutineScope.chatFunctionCall(openAI: OpenAI) { + // *** Chat Completion with Function Call *** // + + println("\n> Create Chat Completion function call...") + val modelId = ModelId("gpt-3.5-turbo-0613") + val chatMessages = mutableListOf( + ChatMessage( + role = ChatRole.User, + content = "What's the weather like in Boston?" + ) + ) + + val params = FunctionParameters.buildJson { + put("type", "object") + putJsonObject("properties") { + putJsonObject("location") { + put("type", "string") + put("description", "The city and state, e.g. San Francisco, CA") + } + putJsonObject("unit") { + put("type", "string") + putJsonArray("enum") { + add("celsius") + add("fahrenheit") + } + } + } + putJsonArray("required") { + add("location") + } + } + val request = chatCompletionRequest { + model = modelId + messages = chatMessages + functions { + function { + name = "currentWeather" + description = "Get the current weather in a given location" + parameters = params + } + } + functionCall = FunctionMode.Auto + } + + val response = openAI.chatCompletion(request) + val message = response.choices.first().message ?: error("No chat response found!") + message.functionCall?.let { functionCall -> + val functionResponse = callFunction(functionCall) + updateChatMessages(chatMessages, message, functionCall, functionResponse) + val secondResponse = openAI.chatCompletion( + request = ChatCompletionRequest( + model = modelId, + messages = chatMessages, + ) + ) + print(secondResponse) + } + + // *** Chat Completion Stream with Function Call *** // + + println("\n> Create Chat Completion function call (stream)...") + val chunks = mutableListOf() + openAI.chatCompletions(request) + .onEach { chunks += it.choices.first() } + .onCompletion { + val chatMessage = chatMessageOf(chunks) + chatMessage.functionCall?.let { + val functionResponse = callFunction(it) + updateChatMessages(chatMessages, message, it, functionResponse) + } + } + .launchIn(this) + .join() + + openAI.chatCompletions( + ChatCompletionRequest( + model = modelId, + messages = chatMessages, + ) + ) + .onEach { print(it.choices.first().delta?.content.orEmpty()) } + .onCompletion { println() } + .launchIn(this) + .join() +} + +@Serializable +data class WeatherInfo(val location: String, val temperature: String, val unit: String, val forecast: List) + +/** + * Example dummy function hard coded to return the same weather + * In production, this could be your backend API or an external API + */ +fun currentWeather(location: String, unit: String): String { + val weatherInfo = WeatherInfo(location, "72", unit, listOf("sunny", "windy")) + return Json.encodeToString(weatherInfo) +} + +private fun callFunction(functionCall: FunctionCall): String { + val availableFunctions = mapOf("currentWeather" to ::currentWeather) + val functionToCall = availableFunctions[functionCall.name] ?: error("Function ${functionCall.name} not found") + val functionArgs = functionCall.argumentsAsJson() ?: error("arguments field is missing") + + return functionToCall( + functionArgs.getValue("location").jsonPrimitive.content, + functionArgs["unit"]?.jsonPrimitive?.content ?: "fahrenheit" + ) +} + +private fun updateChatMessages( + chatMessages: MutableList, + message: ChatMessage, + functionCall: FunctionCall, + functionResponse: String +) { + chatMessages.add( + ChatMessage( + role = message.role, + content = message.content ?: "", // required to not be empty in this case + functionCall = message.functionCall + ) + ) + chatMessages.add( + ChatMessage(role = ChatRole.Function, name = functionCall.name, content = functionResponse) + ) +} + +fun chatMessageOf(chunks: List): ChatMessage { + val funcName = StringBuilder() + val funcArgs = StringBuilder() + var role: ChatRole? = null + val content = StringBuilder() + + chunks.forEach { chunk -> + role = chunk.delta?.role ?: role + chunk.delta?.content?.let { content.append(it) } + chunk.delta?.functionCall?.let { call -> + call.name?.let { funcName.append(it) } + call.arguments?.let { funcArgs.append(it) } + } + } + + return chatMessage { + this.role = role + this.content = content.toString() + if (funcName.isNotEmpty() || funcArgs.isNotEmpty()) { + functionCall = FunctionCall(funcName.toString(), funcArgs.toString()) + name = funcName.toString() + } + } +} diff --git a/sample/jvm/src/main/kotlin/com/aallam/openai/sample/jvm/Completion.kt b/sample/jvm/src/main/kotlin/com/aallam/openai/sample/jvm/Completion.kt new file mode 100644 index 00000000..e7ffaaa1 --- /dev/null +++ b/sample/jvm/src/main/kotlin/com/aallam/openai/sample/jvm/Completion.kt @@ -0,0 +1,25 @@ +package com.aallam.openai.sample.jvm + +import com.aallam.openai.api.completion.CompletionRequest +import com.aallam.openai.api.model.ModelId +import com.aallam.openai.client.OpenAI +import kotlinx.coroutines.CoroutineScope +import kotlinx.coroutines.flow.launchIn +import kotlinx.coroutines.flow.onCompletion +import kotlinx.coroutines.flow.onEach + +suspend fun CoroutineScope.completion(openAI: OpenAI) { + println("\n>️ Creating completion...") + val completionRequest = CompletionRequest( + model = ModelId("text-ada-001"), + prompt = "Somebody once told me the world is gonna roll me" + ) + openAI.completion(completionRequest).choices.forEach(::println) + + println("\n>️ Creating completion stream...") + openAI.completions(completionRequest) + .onEach { print(it.choices[0].text) } + .onCompletion { println() } + .launchIn(this) + .join() +} diff --git a/sample/jvm/src/main/kotlin/com/aallam/openai/sample/jvm/Engines.kt b/sample/jvm/src/main/kotlin/com/aallam/openai/sample/jvm/Engines.kt new file mode 100644 index 00000000..3746fd6b --- /dev/null +++ b/sample/jvm/src/main/kotlin/com/aallam/openai/sample/jvm/Engines.kt @@ -0,0 +1,14 @@ +package com.aallam.openai.sample.jvm + +import com.aallam.openai.api.model.ModelId +import com.aallam.openai.client.OpenAI + +suspend fun engines(openAI: OpenAI) { + println("> Getting available engines...") + openAI.models().forEach(::println) + + println("\n> Getting ada engine...") + + val ada = openAI.model(modelId = ModelId("text-ada-001")) + println(ada) +} diff --git a/sample/jvm/src/main/kotlin/com/aallam/openai/sample/jvm/Files.kt b/sample/jvm/src/main/kotlin/com/aallam/openai/sample/jvm/Files.kt new file mode 100644 index 00000000..b8db4d82 --- /dev/null +++ b/sample/jvm/src/main/kotlin/com/aallam/openai/sample/jvm/Files.kt @@ -0,0 +1,9 @@ +package com.aallam.openai.sample.jvm + +import com.aallam.openai.client.OpenAI + +suspend fun files(openAI: OpenAI) { + println("\n> Read files...") + val files = openAI.files() + println(files) +} diff --git a/sample/jvm/src/main/kotlin/com/aallam/openai/sample/jvm/Whisper.kt b/sample/jvm/src/main/kotlin/com/aallam/openai/sample/jvm/Whisper.kt new file mode 100644 index 00000000..f6a600a8 --- /dev/null +++ b/sample/jvm/src/main/kotlin/com/aallam/openai/sample/jvm/Whisper.kt @@ -0,0 +1,29 @@ +package com.aallam.openai.sample.jvm + +import com.aallam.openai.api.BetaOpenAI +import com.aallam.openai.api.audio.TranscriptionRequest +import com.aallam.openai.api.audio.TranslationRequest +import com.aallam.openai.api.file.FileSource +import com.aallam.openai.api.model.ModelId +import com.aallam.openai.client.OpenAI +import okio.FileSystem +import okio.Path.Companion.toPath + +@OptIn(BetaOpenAI::class) +suspend fun whisper(openAI: OpenAI) { + println("\n>️ Create transcription...") + val transcriptionRequest = TranscriptionRequest( + audio = FileSource(path = "micro-machines.wav".toPath(), fileSystem = FileSystem.RESOURCES), + model = ModelId("whisper-1"), + ) + val transcription = openAI.transcription(transcriptionRequest) + println(transcription) + + println("\n>️ Create translation...") + val translationRequest = TranslationRequest( + audio = FileSource(path = "multilingual.wav".toPath(), fileSystem = FileSystem.RESOURCES), + model = ModelId("whisper-1"), + ) + val translation = openAI.translation(translationRequest) + println(translation) +} diff --git a/sample/jvm/src/main/kotlin/com/aallam/openai/sample/jvm/images.kt b/sample/jvm/src/main/kotlin/com/aallam/openai/sample/jvm/images.kt new file mode 100644 index 00000000..5bb4aedf --- /dev/null +++ b/sample/jvm/src/main/kotlin/com/aallam/openai/sample/jvm/images.kt @@ -0,0 +1,35 @@ +package com.aallam.openai.sample.jvm + +import com.aallam.openai.api.BetaOpenAI +import com.aallam.openai.api.file.FileSource +import com.aallam.openai.api.image.ImageCreation +import com.aallam.openai.api.image.ImageEdit +import com.aallam.openai.api.image.ImageSize +import com.aallam.openai.client.OpenAI +import okio.FileSystem +import okio.Path.Companion.toPath + +@OptIn(BetaOpenAI::class) +suspend fun images(openAI: OpenAI) { + println("\n> Create images...") + val images = openAI.imageURL( + creation = ImageCreation( + prompt = "A cute baby sea otter", + n = 2, + size = ImageSize.is1024x1024 + ) + ) + println(images) + + println("\n> Edit images...") + val imageEdit = ImageEdit( + image = FileSource(path = "image.png".toPath(), fileSystem = FileSystem.RESOURCES), + mask = FileSource(path = "image.png".toPath(), fileSystem = FileSystem.RESOURCES), + prompt = "a sunlit indoor lounge area with a pool containing a flamingo", + n = 1, + size = ImageSize.is1024x1024, + ) + + val imageEdits = openAI.imageURL(imageEdit) + println(imageEdits) +} diff --git a/sample/jvm/src/main/kotlin/com/aallam/openai/sample/jvm/moderations.kt b/sample/jvm/src/main/kotlin/com/aallam/openai/sample/jvm/moderations.kt new file mode 100644 index 00000000..5b16b18a --- /dev/null +++ b/sample/jvm/src/main/kotlin/com/aallam/openai/sample/jvm/moderations.kt @@ -0,0 +1,14 @@ +package com.aallam.openai.sample.jvm + +import com.aallam.openai.api.moderation.ModerationRequest +import com.aallam.openai.client.OpenAI + +suspend fun moderations(openAI: OpenAI) { + println("\n> Create moderations...") + val moderation = openAI.moderations( + request = ModerationRequest( + input = listOf("I want to kill them.") + ) + ) + println(moderation) +}