diff --git a/README.md b/README.md index 633eadcd..8319d4f3 100644 --- a/README.md +++ b/README.md @@ -78,7 +78,7 @@ Use your `OpenAI` instance to make API requests. [Learn more](guides/GettingStar - [Images](guides/GettingStarted.md#images) - [Embeddings](guides/GettingStarted.md#embeddings) - [Files](guides/GettingStarted.md#files) -- [Fine-tunes](guides/GettingStarted.md#fine-tunes) +- [Fine-tuning](guides/GettingStarted.md#fine-tuning) - [Moderations](guides/GettingStarted.md#moderations) - [Audio](guides/GettingStarted.md#audio) @@ -86,6 +86,7 @@ Use your `OpenAI` instance to make API requests. [Learn more](guides/GettingStar - [Completions](guides/GettingStarted.md#completions) #### Deprecated +- [Fine-tunes](guides/GettingStarted.md#fine-tunes) - [Edits](guides/GettingStarted.md#edits) ## 📚 Guides diff --git a/guides/GettingStarted.md b/guides/GettingStarted.md index fce9c07e..b9f2366b 100644 --- a/guides/GettingStarted.md +++ b/guides/GettingStarted.md @@ -21,6 +21,12 @@ Use your `OpenAI` instance to make API requests. - [Create image variation](#create-image-variation) - [Embeddings](#embeddings) - [Create embeddings](#create-embeddings) +- [Fine-tuning](#fine-tuning) + - [Create fine-tuning job](#create-fine-tuning-job) + - [List fine-tuning jobs](#list-fine-tuning-jobs) + - [Retrieve fine-tuning job](#retrieve-fine-tuning-job) + - [Cancel fine-tuning](#cancel-fine-tuning) + - [List fine-tuning events](#list-fine-tuning-events) - [Audio](#audio) - [Create transcription](#create-transcription) - [Create translation](#create-translation) @@ -30,13 +36,6 @@ Use your `OpenAI` instance to make API requests. - [Delete file](#delete-file) - [Retrieve file](#retrieve-file) - [Retrieve file content](#retrieve-file-content) -- [Fine-tunes](#fine-tunes) - - [Create fine-tune](#create-fine-tune) - - [List fine-tunes](#list-fine-tunes) - - [Retrieve fine-tune](#retrieve-fine-tune) - - [Cancel fine-tune](#cancel-fine-tune) - - [List fine-tune events](#list-fine-tune-events) - - [Delete fine-tune model](#delete-fine-tune-model) - [Moderations](#moderations) - [Create moderation](#create-moderation) @@ -45,6 +44,13 @@ Use your `OpenAI` instance to make API requests. - [Create completion](#create-completion-legacy) #### Deprecated +- [Fine-tunes](#fine-tunes) + - [Create fine-tune](#create-fine-tune) + - [List fine-tunes](#list-fine-tunes) + - [Retrieve fine-tune](#retrieve-fine-tune) + - [Cancel fine-tune](#cancel-fine-tune) + - [List fine-tune events](#list-fine-tune-events) + - [Delete fine-tune model](#delete-fine-tune-model) - [Edits](#edits) - [Create edits](#create-edits-deprecated) @@ -162,6 +168,83 @@ val embeddings = openAI.embeddings( ) ```` +## Fine-tuning + +Manage fine-tuning jobs to tailor a model to your specific training data. + +### Create fine-tuning job + +Creates a job that fine-tunes a specified model from a given dataset. + +Response includes details of the enqueued job including job status and the name of the fine-tuned models once complete. + +#### No Hyperparameters + +```kotlin +val request = FineTuningRequest( + trainingFile = FileId("file-abc123"), + model = ModelId("gpt-3.5-turbo"), +) +val fineTuningJob = client.fineTuningJob(request) +``` + +#### Hyperparameters + +```kotlin +val request = FineTuningRequest( + trainingFile = FileId("file-abc123"), + model = ModelId("gpt-3.5-turbo"), + hyperparameters = Hyperparameters(nEpochs = 2), +) +val fineTuningJob = client.fineTuningJob(request) +``` + +#### Validation File + +```kotlin +val request = FineTuningRequest( + trainingFile = FileId("file-abc123"), + validation_file = FileId("file-def345"), + model = ModelId("gpt-3.5-turbo"), +) +val fineTuningJob = client.fineTuningJob(request) +``` + +### List fine-tuning jobs + +List your organization's fine-tuning jobs + +```kotlin +val fineTuningJobs = client.fineTuningJobs(limit = 2) +``` + +### Retrieve fine-tuning job + +Get info about a fine-tuning job. + +```kotlin +val id = FineTuningId("ft-AF1WoRqd3aJAHsqc9NY7iL8F") +val fineTuningJob = client.fineTuningJob(id) +``` + +### Cancel fine-tuning + +Immediately cancel a fine-tune job. + +```kotlin +val id = FineTuningId("ftjob-abc12") +client.cancel(id) +``` + +### List fine-tuning events + +Get status updates for a fine-tuning job. + +```kotlin +val id = FineTuningId("ftjob-abc12") +val fineTuningEvents = client.fineTuningEvents(id) +``` + ## Audio Learn how to turn audio into text. @@ -240,6 +323,45 @@ Returns the contents of the specified file val bytes = openAI.download(fileId) ```` +## Moderations + +Given an input text, outputs if the model classifies it as violating OpenAI's content policy. + +### Create moderation + +Classifies if text violates OpenAI's Content Policy + +````kotlin +val moderation = openAI.moderations( + request = ModerationRequest( + input = "I want to kill them." + ) +) +```` + +--- + +## Completions + +Given a prompt, the model will return one or more predicted completions, and can also return the probabilities of alternative tokens at each position. + +### Create Completion `legacy` + +Creates a completion for the provided prompt and parameters + +```kotlin +val completionRequest = CompletionRequest( + model = ModelId("text-ada-001"), + prompt = "Somebody once told me the world is gonna roll me", + echo = true +) +val completion: TextCompletion = openAI.completion(completionRequest) +// or, as flow +val completions: Flow = openAI.completions(completionRequest) +``` + +--- + ## Fine-tunes Manage fine-tuning jobs to tailor a model to your specific training data. @@ -301,45 +423,6 @@ Delete a fine-tuned model. You must have the Owner role in your organization. openAI.delete(fileId) ``` -## Moderations - -Given an input text, outputs if the model classifies it as violating OpenAI's content policy. - -### Create moderation - -Classifies if text violates OpenAI's Content Policy - -````kotlin -val moderation = openAI.moderations( - request = ModerationRequest( - input = "I want to kill them." - ) -) -```` - ---- - -## Completions - -Given a prompt, the model will return one or more predicted completions, and can also return the probabilities of alternative tokens at each position. - -### Create Completion `legacy` - -Creates a completion for the provided prompt and parameters - -```kotlin -val completionRequest = CompletionRequest( - model = ModelId("text-ada-001"), - prompt = "Somebody once told me the world is gonna roll me", - echo = true -) -val completion: TextCompletion = openAI.completion(completionRequest) -// or, as flow -val completions: Flow = openAI.completions(completionRequest) -``` - ---- - ## Edits Given a prompt and an instruction, the model will return an edited version of the prompt. diff --git a/openai-client/src/commonMain/kotlin/com.aallam.openai.client/FineTunes.kt b/openai-client/src/commonMain/kotlin/com.aallam.openai.client/FineTunes.kt index 5c59ef24..5e1c4b4f 100644 --- a/openai-client/src/commonMain/kotlin/com.aallam.openai.client/FineTunes.kt +++ b/openai-client/src/commonMain/kotlin/com.aallam.openai.client/FineTunes.kt @@ -18,35 +18,42 @@ public interface FineTunes { * Response includes details of the enqueued job including job status and the name of the fine-tuned models once * complete. */ + @Deprecated("Use FineTuning instead.") public suspend fun fineTune(request: FineTuneRequest): FineTune /** * List your organization's fine-tuning jobs. */ + @Deprecated("Use FineTuning instead.") public suspend fun fineTunes(): List /** * Gets info about to fine-tune job. */ + @Deprecated("Use FineTuning instead.") public suspend fun fineTune(fineTuneId: FineTuneId): FineTune? /** * Immediately cancel a fine-tune job. */ + @Deprecated("Use FineTuning instead.") public suspend fun cancel(fineTuneId: FineTuneId): FineTune? /** * Get fine-grained status updates for fine-tune job. */ + @Deprecated("Use FineTuning instead.") public suspend fun fineTuneEvents(fineTuneId: FineTuneId): List /** * Get fine-grained status updates for fine-tune job. */ + @Deprecated("Use FineTuning instead.") public fun fineTuneEventsFlow(fineTuneId: FineTuneId): Flow /** * Delete a fine-tuned model. You must have the Owner role in your organization. */ + @Deprecated("Use FineTuning instead.") public suspend fun delete(fineTuneModel: ModelId): Boolean } diff --git a/openai-client/src/commonMain/kotlin/com.aallam.openai.client/FineTuning.kt b/openai-client/src/commonMain/kotlin/com.aallam.openai.client/FineTuning.kt new file mode 100644 index 00000000..7854e0cb --- /dev/null +++ b/openai-client/src/commonMain/kotlin/com.aallam.openai.client/FineTuning.kt @@ -0,0 +1,52 @@ +package com.aallam.openai.client + +import com.aallam.openai.api.core.PaginatedList +import com.aallam.openai.api.finetuning.* + +/** + * Manage fine-tuning jobs to tailor a model to your specific training data. + */ +public interface FineTuning { + + /** + * Creates a job that fine-tunes a specified model from a given dataset. + * + * Response includes details of the enqueued job including job status and the name of the fine-tuned models once complete. + */ + public suspend fun fineTuningJob(request: FineTuningRequest): FineTuningJob + + /** + * List your organization's fine-tuning jobs. + * + * @param after Identifier for the last job from the previous pagination request. + * @param limit Number of fine-tuning jobs to retrieve. + */ + public suspend fun fineTuningJobs(after: String? = null, limit: Int? = null): List + + /** + * Get info about a fine-tuning job. + * + * @param id The ID of the fine-tuning job. + */ + public suspend fun fineTuningJob(id: FineTuningId): FineTuningJob? + + /** + * Immediately cancel a fine-tune job. + * + * @param id The ID of the fine-tuning job to cancel. + */ + public suspend fun cancel(id: FineTuningId): FineTuningJob? + + /** + * Get status updates for a fine-tuning job. + * + * @param id The ID of the fine-tuning job to get events for. + * @param after Identifier for the last event from the previous pagination request. + * @param limit Number of events to retrieve. + */ + public suspend fun fineTuningEvents( + id: FineTuningId, + after: String? = null, + limit: Int? = null + ): PaginatedList +} diff --git a/openai-client/src/commonMain/kotlin/com.aallam.openai.client/OpenAI.kt b/openai-client/src/commonMain/kotlin/com.aallam.openai.client/OpenAI.kt index b7093659..ec7a8a23 100644 --- a/openai-client/src/commonMain/kotlin/com.aallam.openai.client/OpenAI.kt +++ b/openai-client/src/commonMain/kotlin/com.aallam.openai.client/OpenAI.kt @@ -11,6 +11,7 @@ import kotlin.time.Duration.Companion.seconds * OpenAI API. */ public interface OpenAI : Completions, Files, Edits, Embeddings, Models, Moderations, FineTunes, Images, Chat, Audio, + FineTuning, Closeable /** diff --git a/openai-client/src/commonMain/kotlin/com.aallam.openai.client/internal/OpenAIApi.kt b/openai-client/src/commonMain/kotlin/com.aallam.openai.client/internal/OpenAIApi.kt index 70526cff..4faa42cc 100644 --- a/openai-client/src/commonMain/kotlin/com.aallam.openai.client/internal/OpenAIApi.kt +++ b/openai-client/src/commonMain/kotlin/com.aallam.openai.client/internal/OpenAIApi.kt @@ -22,4 +22,5 @@ internal class OpenAIApi( Images by ImagesApi(requester), Chat by ChatApi(requester), Audio by AudioApi(requester), + FineTuning by FineTuningApi(requester), Closeable by requester diff --git a/openai-client/src/commonMain/kotlin/com.aallam.openai.client/internal/api/ApiPath.kt b/openai-client/src/commonMain/kotlin/com.aallam.openai.client/internal/api/ApiPath.kt index 9fe9302d..651cd7fa 100644 --- a/openai-client/src/commonMain/kotlin/com.aallam.openai.client/internal/api/ApiPath.kt +++ b/openai-client/src/commonMain/kotlin/com.aallam.openai.client/internal/api/ApiPath.kt @@ -17,4 +17,5 @@ internal object ApiPath { const val ImagesVariants = "images/variations" const val Models = "models" const val Moderations = "moderations" + const val FineTuningJobs = "fine_tuning/jobs" } diff --git a/openai-client/src/commonMain/kotlin/com.aallam.openai.client/internal/api/FineTuningApi.kt b/openai-client/src/commonMain/kotlin/com.aallam.openai.client/internal/api/FineTuningApi.kt new file mode 100644 index 00000000..7287cbb7 --- /dev/null +++ b/openai-client/src/commonMain/kotlin/com.aallam.openai.client/internal/api/FineTuningApi.kt @@ -0,0 +1,66 @@ +package com.aallam.openai.client.internal.api + +import com.aallam.openai.api.core.PaginatedList +import com.aallam.openai.api.finetuning.FineTuningId +import com.aallam.openai.api.finetuning.FineTuningJob +import com.aallam.openai.api.finetuning.FineTuningJobEvent +import com.aallam.openai.api.finetuning.FineTuningRequest +import com.aallam.openai.client.FineTuning +import com.aallam.openai.client.internal.http.HttpRequester +import com.aallam.openai.client.internal.http.perform +import io.ktor.client.call.* +import io.ktor.client.request.* +import io.ktor.client.statement.* +import io.ktor.http.* + +internal class FineTuningApi(private val requester: HttpRequester) : FineTuning { + override suspend fun fineTuningJob(request: FineTuningRequest): FineTuningJob { + return requester.perform { + it.post { + url(path = ApiPath.FineTuningJobs) + setBody(request) + contentType(ContentType.Application.Json) + } + } + } + + override suspend fun fineTuningJobs(after: String?, limit: Int?): PaginatedList { + return requester.perform { + it.get { + url(path = ApiPath.FineTuningJobs) { + after?.let { value -> parameter("after", value) } + limit?.let { value -> parameter("limit", value) } + } + } + } + } + + override suspend fun fineTuningJob(id: FineTuningId): FineTuningJob? { + val response = requester.perform { + it.get { url(path = "${ApiPath.FineTuningJobs}/${id.id}") } + } + return if (response.status == HttpStatusCode.NotFound) null else response.body() + } + + override suspend fun cancel(id: FineTuningId): FineTuningJob? { + val response = requester.perform { + it.post { url(path = "${ApiPath.FineTuningJobs}/${id.id}/cancel") } + } + return if (response.status == HttpStatusCode.NotFound) null else response.body() + } + + override suspend fun fineTuningEvents( + id: FineTuningId, + after: String?, + limit: Int? + ): PaginatedList { + return requester.perform { + it.get { + url(path = "${ApiPath.FineTuningJobs}/${id.id}/events") { + after?.let { value -> parameter("after", value) } + limit?.let { value -> parameter("limit", value) } + } + } + } + } +} diff --git a/openai-client/src/commonTest/kotlin/com/aallam/openai/client/TestAudio.kt b/openai-client/src/commonTest/kotlin/com/aallam/openai/client/TestAudio.kt index 4b4f2381..999ed311 100644 --- a/openai-client/src/commonTest/kotlin/com/aallam/openai/client/TestAudio.kt +++ b/openai-client/src/commonTest/kotlin/com/aallam/openai/client/TestAudio.kt @@ -51,7 +51,7 @@ class TestAudio : TestOpenAI() { val transcription = openAI.transcription(request) assertTrue { transcription.text.isNotEmpty() } assertEquals(transcription.language, "english") - assertEquals(transcription.duration, 29.88) + assertEquals(transcription.duration!!, 29.88, absoluteTolerance = 0.1) assertTrue { transcription.segments?.isNotEmpty() ?: false } } @@ -92,7 +92,7 @@ class TestAudio : TestOpenAI() { val translation = openAI.translation(request) assertTrue { translation.text.isNotEmpty() } assertEquals(translation.language, "english") - assertEquals(translation.duration, 42.06) + assertEquals(translation.duration!!, 42.06, absoluteTolerance = 0.1) assertTrue { translation.segments?.isNotEmpty() ?: false } } } diff --git a/openai-client/src/commonTest/kotlin/com/aallam/openai/client/TestFineTunes.kt b/openai-client/src/commonTest/kotlin/com/aallam/openai/client/TestFineTunes.kt deleted file mode 100644 index 286e1bc6..00000000 --- a/openai-client/src/commonTest/kotlin/com/aallam/openai/client/TestFineTunes.kt +++ /dev/null @@ -1,78 +0,0 @@ -package com.aallam.openai.client - -import com.aallam.openai.api.file.Purpose -import com.aallam.openai.api.file.fileSource -import com.aallam.openai.api.file.fileUpload -import com.aallam.openai.api.finetune.FineTuneEvent -import com.aallam.openai.api.finetune.fineTuneRequest -import com.aallam.openai.api.model.ModelId -import com.aallam.openai.client.internal.asSource -import com.aallam.openai.client.internal.waitFileProcess -import kotlinx.coroutines.flow.launchIn -import kotlinx.coroutines.flow.onEach -import ulid.ULID -import kotlin.test.Test -import kotlin.test.assertEquals -import kotlin.test.assertNotNull -import kotlin.test.assertTrue - -class TestFineTunes : TestOpenAI() { - - @Test - fun fineTunes() = test { - - val id = ULID.randomULID() - val jsonl = """ - { "prompt":"Did the U.S. join the League of Nations?", "completion":"No"} - { "prompt":"Where was the League of Nations created?", "completion":"Paris"} - """.trimIndent() - - val source = fileSource { - name = "$id.jsonl" - source = jsonl.asSource() - } - val request = fileUpload { - file = source - purpose = Purpose("fine-tune") - } - val fileId = openAI.file(request).id - openAI.waitFileProcess(fileId) - - // Fine-tune created using training file - val fineTune = openAI.fineTune( - request = fineTuneRequest { - trainingFile = fileId - model = ModelId("ada") - } - ) - val fineTuneModel = fineTune.fineTunedModel - assertEquals(fineTune.trainingFiles.first().filename, source.name) - - // At least one fine-tune exists - val fineTunes = openAI.fineTunes() - assertTrue(fineTunes.isNotEmpty()) - - // At least last created fine-tune exists - val createdFineTune = openAI.fineTune(fineTune.id) - assertNotNull(createdFineTune) - - // Get events - val fineTuneEvents = openAI.fineTuneEvents(fineTune.id) - assertTrue(fineTuneEvents.isNotEmpty()) - - // Cancel fine-tune - val canceled = openAI.cancel(fineTune.id) - assertNotNull(canceled) - - // Get events as stream - val events = mutableListOf() - openAI.fineTuneEventsFlow(fineTune.id) - .onEach { events += it } - .launchIn(this) - .join() - - // cleanup - openAI.delete(fileId) - fineTuneModel?.let { openAI.delete(it) } - } -} diff --git a/openai-client/src/commonTest/kotlin/com/aallam/openai/client/TestFineTuning.kt b/openai-client/src/commonTest/kotlin/com/aallam/openai/client/TestFineTuning.kt new file mode 100644 index 00000000..68bbe07e --- /dev/null +++ b/openai-client/src/commonTest/kotlin/com/aallam/openai/client/TestFineTuning.kt @@ -0,0 +1,63 @@ +package com.aallam.openai.client + +import com.aallam.openai.api.file.Purpose +import com.aallam.openai.api.file.fileSource +import com.aallam.openai.api.file.fileUpload +import com.aallam.openai.api.finetuning.FineTuningRequest +import com.aallam.openai.api.model.ModelId +import com.aallam.openai.client.internal.asSource +import com.aallam.openai.client.internal.waitFileProcess +import ulid.ULID +import kotlin.test.Test +import kotlin.test.assertEquals +import kotlin.test.assertNotNull +import kotlin.test.assertTrue + +class TestFineTuning : TestOpenAI() { + + @Test + fun fineTuningJob() = test { + val id = ULID.randomULID() + val jsonl = """ + {"messages": [{"role": "system", "content": "Marv is a factual chatbot that is also sarcastic."}, {"role": "user", "content": "What's the capital of France?"}, {"role": "assistant", "content": "Paris, as if everyone doesn't know that already."}]} + {"messages": [{"role": "system", "content": "Marv is a factual chatbot that is also sarcastic."}, {"role": "user", "content": "Who wrote 'Romeo and Juliet'?"}, {"role": "assistant", "content": "Oh, just some guy named William Shakespeare. Ever heard of him?"}]} + {"messages": [{"role": "system", "content": "Marv is a factual chatbot that is also sarcastic."}, {"role": "user", "content": "How far is the Moon from Earth?"}, {"role": "assistant", "content": "Around 384,400 kilometers. Give or take a few, like that really matters."}]} + """.trimIndent() + + val source = fileSource { + name = "$id.jsonl" + source = jsonl.asSource() + } + val request = fileUpload { + file = source + purpose = Purpose("fine-tune") + } + val fileId = openAI.file(request).id + openAI.waitFileProcess(fileId) + + val jobRequest = FineTuningRequest(trainingFile = fileId, model = ModelId("gpt-3.5-turbo")) + val fineTuningJob = openAI.fineTuningJob(jobRequest) + + assertEquals(fineTuningJob.trainingFile.id, fileId.id) + + // At least one fine-tune exists + val fineTunes = openAI.fineTuningJobs() + assertTrue(fineTunes.isNotEmpty()) + println(fineTunes.contains(fineTuningJob)) + + // At least last created fine-tune exists + val createdFineTune = openAI.fineTuningJob(fineTuningJob.id) + assertNotNull(createdFineTune) + + // Get events + val fineTuneEvents = openAI.fineTuningEvents(fineTuningJob.id) + assertTrue(fineTuneEvents.isNotEmpty()) + + // Cancel fine-tune + val canceled = openAI.cancel(fineTuningJob.id) + assertNotNull(canceled) + + // cleanup + openAI.delete(fileId) + } +} diff --git a/openai-client/src/nativeTest/kotlin/com.aallam.openai.client/internal/Env.kt b/openai-client/src/nativeTest/kotlin/com.aallam.openai.client/internal/Env.kt index 4c2f9348..33c10a46 100644 --- a/openai-client/src/nativeTest/kotlin/com.aallam.openai.client/internal/Env.kt +++ b/openai-client/src/nativeTest/kotlin/com.aallam.openai.client/internal/Env.kt @@ -1,8 +1,10 @@ package com.aallam.openai.client.internal +import kotlinx.cinterop.ExperimentalForeignApi import kotlinx.cinterop.toKString import platform.posix.getenv +@OptIn(ExperimentalForeignApi::class) internal actual fun env(name: String): String? { return getenv(name)?.toKString() } diff --git a/openai-core/src/commonMain/kotlin/com.aallam.openai.api/core/OrganizationId.kt b/openai-core/src/commonMain/kotlin/com.aallam.openai.api/core/OrganizationId.kt new file mode 100644 index 00000000..b8a7d28f --- /dev/null +++ b/openai-core/src/commonMain/kotlin/com.aallam.openai.api/core/OrganizationId.kt @@ -0,0 +1,11 @@ +package com.aallam.openai.api.core + +import kotlinx.serialization.Serializable +import kotlin.jvm.JvmInline + +/** + * Organization identifier. + */ +@Serializable +@JvmInline +public value class OrganizationId(public val id: String) diff --git a/openai-core/src/commonMain/kotlin/com.aallam.openai.api/core/PaginatedList.kt b/openai-core/src/commonMain/kotlin/com.aallam.openai.api/core/PaginatedList.kt new file mode 100644 index 00000000..350bf302 --- /dev/null +++ b/openai-core/src/commonMain/kotlin/com.aallam.openai.api/core/PaginatedList.kt @@ -0,0 +1,10 @@ +package com.aallam.openai.api.core + +import kotlinx.serialization.SerialName +import kotlinx.serialization.Serializable + +@Serializable +public data class PaginatedList( + @SerialName("data") val data: List, + @SerialName("has_more") val hasMore: Boolean +) : List by data diff --git a/openai-core/src/commonMain/kotlin/com.aallam.openai.api/core/Status.kt b/openai-core/src/commonMain/kotlin/com.aallam.openai.api/core/Status.kt index 463bd971..3ac02fdd 100644 --- a/openai-core/src/commonMain/kotlin/com.aallam.openai.api/core/Status.kt +++ b/openai-core/src/commonMain/kotlin/com.aallam.openai.api/core/Status.kt @@ -15,5 +15,8 @@ public value class Status(public val value: String) { public val Deleted: Status = Status("deleted") public val Failed: Status = Status("failed") public val Cancelled: Status = Status("cancelled") + public val ValidatingFiles: Status = Status("validating_files") + public val Queued: Status = Status("queued") + public val Running: Status = Status("running") } } diff --git a/openai-core/src/commonMain/kotlin/com.aallam.openai.api/finetuning/ErrorInfo.kt b/openai-core/src/commonMain/kotlin/com.aallam.openai.api/finetuning/ErrorInfo.kt new file mode 100644 index 00000000..49a0f0f9 --- /dev/null +++ b/openai-core/src/commonMain/kotlin/com.aallam.openai.api/finetuning/ErrorInfo.kt @@ -0,0 +1,25 @@ +package com.aallam.openai.api.finetuning + +import kotlinx.serialization.Serializable + +/** + * For fine-tuning jobs that have `failed`, this will contain more information on the cause of the failure. + */ +@Serializable +public data class ErrorInfo( + + /** + * A human-readable error message. + */ + val message: String, + + /** + * A machine-readable error code. + */ + val code: String, + + /** + * The parameter that was invalid (e.g., `training_file`, `validation_file`), or null if not parameter-specific. + */ + val param: String? = null, +) diff --git a/openai-core/src/commonMain/kotlin/com.aallam.openai.api/finetuning/FineTuningId.kt b/openai-core/src/commonMain/kotlin/com.aallam.openai.api/finetuning/FineTuningId.kt new file mode 100644 index 00000000..2474f237 --- /dev/null +++ b/openai-core/src/commonMain/kotlin/com.aallam.openai.api/finetuning/FineTuningId.kt @@ -0,0 +1,11 @@ +package com.aallam.openai.api.finetuning + +import kotlinx.serialization.Serializable +import kotlin.jvm.JvmInline + +/** + * Fine-tuning identifier. + */ +@Serializable +@JvmInline +public value class FineTuningId(public val id: String) diff --git a/openai-core/src/commonMain/kotlin/com.aallam.openai.api/finetuning/FineTuningJob.kt b/openai-core/src/commonMain/kotlin/com.aallam.openai.api/finetuning/FineTuningJob.kt new file mode 100644 index 00000000..9997f717 --- /dev/null +++ b/openai-core/src/commonMain/kotlin/com.aallam.openai.api/finetuning/FineTuningJob.kt @@ -0,0 +1,93 @@ +package com.aallam.openai.api.finetuning; + +import com.aallam.openai.api.core.OrganizationId +import com.aallam.openai.api.core.Status +import com.aallam.openai.api.file.FileId +import com.aallam.openai.api.model.ModelId +import kotlinx.serialization.SerialName +import kotlinx.serialization.Serializable + +/** + * A data class representing a fine-tuning job. + */ +@Serializable +public data class FineTuningJob( + + /** + * The object identifier, which can be referenced in the API endpoints. + */ + @SerialName("id") + val id: FineTuningId, + + /** + * The Unix timestamp (in seconds) for when the fine-tuning job was created. + */ + @SerialName("created_at") + val createdAt: Int, + + /** + * The base model that is being fine-tuned. + */ + @SerialName("model") + val model: ModelId, + + /** + * The organization that owns the fine-tuning job. + */ + @SerialName("organization_id") + val organizationId: OrganizationId, + + /** + * The current status of the fine-tuning job (e.g., [Status.ValidatingFiles], [Status.Queued], etc.). + */ + @SerialName("status") + val status: Status, + + /** + * The hyperparameters used for the fine-tuning job. + */ + @SerialName("hyperparameters") + val hyperparameters: Hyperparameters, + + /** + * The file ID used for training, retrievable via the Files API. + */ + @SerialName("training_file") + val trainingFile: FileId, + + /** + * The compiled results file ID(s) for the fine-tuning job, retrievable via the Files API. + */ + @SerialName("result_files") + val resultFiles: List, + + /** + * The Unix timestamp (in seconds) for when the fine-tuning job was finished, or null if still running. + */ + @SerialName("finished_at") + val finishedAt: Int? = null, + + /** + * The name of the fine-tuned model that is being created, or null if the fine-tuning job is still running. + */ + @SerialName("fine_tuned_model") + val fineTunedModel: ModelId? = null, + + /** + * The file ID used for validation, retrievable via the Files API, or null if not available. + */ + @SerialName("validation_file") + val validationFile: FileId? = null, + + /** + * The total number of billable tokens processed by this fine-tuning job, or null if the job is still running. + */ + @SerialName("trained_tokens") + val trainedTokens: Int? = null, + + /** + * Contains more information on the cause of failure for failed fine-tuning jobs, or null if not failed. + */ + @SerialName("error") + val error: ErrorInfo? = null, +) diff --git a/openai-core/src/commonMain/kotlin/com.aallam.openai.api/finetuning/FineTuningJobEvent.kt b/openai-core/src/commonMain/kotlin/com.aallam.openai.api/finetuning/FineTuningJobEvent.kt new file mode 100644 index 00000000..f9e1211c --- /dev/null +++ b/openai-core/src/commonMain/kotlin/com.aallam.openai.api/finetuning/FineTuningJobEvent.kt @@ -0,0 +1,61 @@ +package com.aallam.openai.api.finetuning; + +import kotlinx.serialization.SerialName +import kotlinx.serialization.Serializable +import kotlin.jvm.JvmInline + +/** + * Data class representing a fine-tuning job event. + */ +@Serializable +public data class FineTuningJobEvent( + + /** + * The identifier of the fine-tuning job event. + */ + @SerialName("id") + val id: String, + + /** + * The Unix timestamp (in seconds) for when the event was created. + */ + @SerialName("created_at") + val createdAt: Int, + + /** + * The severity level of the event, which can be either "info", "warn", or "error". + */ + @SerialName("level") + val level: Level, + + /** + * A human-readable message providing more details about the event. + */ + @SerialName("message") + val message: String +) + + +/** + * Represents the severity level of a fine-tuning job event. + */ +@Serializable +@JvmInline +public value class Level(public val value: String) { + public companion object { + /** + * An informational event. + */ + public val INFO: Level = Level("info") + + /** + * A warning event. + */ + public val WARN: Level = Level("warn") + + /** + * An error event. + */ + public val ERROR: Level = Level("error") + } +} diff --git a/openai-core/src/commonMain/kotlin/com.aallam.openai.api/finetuning/FineTuningRequest.kt b/openai-core/src/commonMain/kotlin/com.aallam.openai.api/finetuning/FineTuningRequest.kt new file mode 100644 index 00000000..6952dde3 --- /dev/null +++ b/openai-core/src/commonMain/kotlin/com.aallam.openai.api/finetuning/FineTuningRequest.kt @@ -0,0 +1,111 @@ +package com.aallam.openai.api.finetuning + +import com.aallam.openai.api.OpenAIDsl +import com.aallam.openai.api.file.FileId +import com.aallam.openai.api.model.ModelId +import kotlinx.serialization.SerialName +import kotlinx.serialization.Serializable + +/** + * A data class representing a fine-tuning request. + */ +@Serializable +public data class FineTuningRequest( + + /** + * The ID of an uploaded file that contains training data. + * See [upload file](/docs/api-reference/files/upload) for how to upload a file. + * + * Your dataset must be formatted as a JSONL file. Additionally, you must upload your file with the purpose `fine-tune`. + * See the [fine-tuning guide](/docs/guides/fine-tuning) for more details. + */ + @SerialName("training_file") + val trainingFile: FileId, + + /** + * The name of the model to fine-tune. You can select one of the + * [supported models](/docs/guides/fine-tuning/what-models-can-be-fine-tuned). + */ + @SerialName("model") + val model: ModelId, + + /** + * The ID of an uploaded file that contains validation data. + * + * If you provide this file, the data is used to generate validation + * metrics periodically during fine-tuning. These metrics can be viewed in + * the fine-tuning results file. + * The same data should not be present in both train and validation files. + * + * Your dataset must be formatted as a JSONL file. You must upload your file with the purpose `fine-tune`. + * See the [fine-tuning guide](/docs/guides/fine-tuning) for more details. + */ + @SerialName("validation_file") + val validationFile: FileId? = null, + + /** + * The hyperparameters used for the fine-tuning job. + */ + @SerialName("hyperparameters") + val hyperparameters: Hyperparameters? = null, + + /** + * A string of up to 18 characters that will be added to your fine-tuned model name. + * For example, a `suffix` of "custom-model-name" would produce a model name like + * `ft:gpt-3.5-turbo:openai:custom-model-name:7p4lURel`. + */ + @SerialName("suffix") + val suffix: String? = null +) + +/** + * Create a Fine-Tuning request. + */ +public fun fineTuningRequest(block: FineTuningRequestBuilder.() -> Unit): FineTuningRequest = + FineTuningRequestBuilder().apply(block).build() + +/** + * Builder of [FineTuningRequest] instances. + */ +@OpenAIDsl +public class FineTuningRequestBuilder { + + /** + * The ID of an uploaded file that contains training data. + * See [upload file](/docs/api-reference/files/upload) for how to upload a file. + */ + public var trainingFile: FileId? = null + + /** + * The name of the model to fine-tune. + * See [supported models](/docs/guides/fine-tuning/what-models-can-be-fine-tuned) for more details. + */ + public var model: ModelId? = null + + /** + * The ID of an uploaded file that contains validation data (Optional). + * The same data should not be present in both train and validation files. + */ + public var validationFile: FileId? = null + + /** + * The hyperparameters used for the fine-tuning job (Optional). + */ + public var hyperparameters: Hyperparameters? = null + + /** + * A string of up to 18 characters that will be added to your fine-tuned model name (Optional). + */ + public var suffix: String? = null + + /** + * Create a new instance of [FineTuningRequest]. + */ + public fun build(): FineTuningRequest = FineTuningRequest( + trainingFile = requireNotNull(trainingFile) { "trainingFile is required" }, + model = requireNotNull(model) { "model is required" }, + validationFile = validationFile, + hyperparameters = hyperparameters, + suffix = suffix, + ) +} diff --git a/openai-core/src/commonMain/kotlin/com.aallam.openai.api/finetuning/Hyperparameters.kt b/openai-core/src/commonMain/kotlin/com.aallam.openai.api/finetuning/Hyperparameters.kt new file mode 100644 index 00000000..a5011a9a --- /dev/null +++ b/openai-core/src/commonMain/kotlin/com.aallam.openai.api/finetuning/Hyperparameters.kt @@ -0,0 +1,75 @@ +package com.aallam.openai.api.finetuning + +import com.aallam.openai.api.finetuning.Hyperparameters.NEpochs +import com.aallam.openai.api.finetuning.internal.NEpochsSerializer +import kotlinx.serialization.SerialName +import kotlinx.serialization.Serializable +import kotlin.jvm.JvmInline + +/** + * A data class representing hyperparameters used during the fine-tuning of a model. + * + * This class holds configuration options that guide the training process, + * and it supports serialization to allow for easy storage and retrieval of the settings. + */ +@Serializable +public data class Hyperparameters( + + /** + * The number of training epochs. + * + * This parameter can either be a specific number or the string "auto", + * where "auto" implies that the number of epochs will be determined automatically. + * It uses a sealed interface [NEpochs] to accept either an integer or a string value. + */ + @SerialName("n_epochs") + val nEpochs: NEpochs +) { + + public constructor(nEpochs: Int) : this(NEpochs(nEpochs)) + public constructor(nEpochs: String) : this(NEpochs(nEpochs)) + + /** + * A sealed interface representing a flexible parameter for the number of epochs. + * + * This interface allows the number of epochs to be either a specific [Int] + * or a [String] representing an automatic value selection ("auto"). + */ + @Serializable(NEpochsSerializer::class) + public sealed interface NEpochs { + + /** + * A value which can be either an [Int] or a [String] representing the number of epochs. + */ + public val value: Any + + public companion object { + /** + * Creates an [NEpochs] instance holding an [Int] value. + */ + public operator fun invoke(value: Int): NEpochs = NEpochsInt(value) + + /** + * Creates an [NEpochs] instance holding a [String] value. + */ + public operator fun invoke(value: String): NEpochs = NEpochsString(value) + + /** + * A predefined [NEpochs] instance which indicates automatic determination of epochs. + */ + public val Auto: NEpochs = NEpochsString("auto") + } + } +} + +/** + * An [Hyperparameters.NEpochs] implementation that holds an integer value representing a specific number of epochs. + */ +@JvmInline +internal value class NEpochsInt(override val value: Int) : NEpochs + +/** + * An [Hyperparameters.NEpochs] implementation that holds a string value which can be used to specify automatic determination of epochs. + */ +@JvmInline +internal value class NEpochsString(override val value: String) : NEpochs diff --git a/openai-core/src/commonMain/kotlin/com.aallam.openai.api/finetuning/internal/NEpochsSerializer.kt b/openai-core/src/commonMain/kotlin/com.aallam.openai.api/finetuning/internal/NEpochsSerializer.kt new file mode 100644 index 00000000..00166246 --- /dev/null +++ b/openai-core/src/commonMain/kotlin/com.aallam.openai.api/finetuning/internal/NEpochsSerializer.kt @@ -0,0 +1,36 @@ +package com.aallam.openai.api.finetuning.internal + +import com.aallam.openai.api.finetuning.Hyperparameters +import kotlinx.serialization.KSerializer +import kotlinx.serialization.descriptors.SerialDescriptor +import kotlinx.serialization.descriptors.buildClassSerialDescriptor +import kotlinx.serialization.encoding.Decoder +import kotlinx.serialization.encoding.Encoder +import kotlinx.serialization.json.JsonDecoder +import kotlinx.serialization.json.JsonPrimitive +import kotlinx.serialization.json.int +import kotlinx.serialization.json.intOrNull + +internal class NEpochsSerializer : KSerializer { + + override val descriptor: SerialDescriptor = buildClassSerialDescriptor("n_epochs") + + override fun deserialize(decoder: Decoder): Hyperparameters.NEpochs { + val decode = decoder as JsonDecoder + val element = decode.decodeJsonElement() as JsonPrimitive + return if (element.isString && element.content == "auto") { + Hyperparameters.NEpochs.Auto + } else if (element.intOrNull != null) { + Hyperparameters.NEpochs(element.int) + } else { + error("unsupported n_epochs format: $element") + } + } + + override fun serialize(encoder: Encoder, value: Hyperparameters.NEpochs) { + when (val nEpochs = value.value) { + is String -> encoder.encodeString(nEpochs) + is Int -> encoder.encodeInt(nEpochs) + } + } +} diff --git a/openai-core/src/commonTest/kotlin/com.aallam.openai.api/chat/TestFunctionMode.kt b/openai-core/src/commonTest/kotlin/com.aallam.openai.api/chat/TestFunctionMode.kt index ecb45239..9b16e903 100644 --- a/openai-core/src/commonTest/kotlin/com.aallam.openai.api/chat/TestFunctionMode.kt +++ b/openai-core/src/commonTest/kotlin/com.aallam.openai.api/chat/TestFunctionMode.kt @@ -1,11 +1,9 @@ package com.aallam.openai.api.chat -import com.aallam.openai.api.BetaOpenAI import kotlinx.serialization.json.Json import kotlin.test.Test import kotlin.test.assertEquals -@OptIn(BetaOpenAI::class) class TestFunctionMode { @Test diff --git a/openai-core/src/commonTest/kotlin/com/aallam/openai/api/finetuning/TestHyperparameters.kt b/openai-core/src/commonTest/kotlin/com/aallam/openai/api/finetuning/TestHyperparameters.kt new file mode 100644 index 00000000..327849f5 --- /dev/null +++ b/openai-core/src/commonTest/kotlin/com/aallam/openai/api/finetuning/TestHyperparameters.kt @@ -0,0 +1,40 @@ +package com.aallam.openai.api.finetuning + +import kotlinx.serialization.json.Json +import kotlin.test.Test +import kotlin.test.assertEquals + +class TestHyperparameters { + + @Test + fun serializeHyperparametersNEpochsAuto() { + val params = Hyperparameters(nEpochs = Hyperparameters.NEpochs.Auto) + val encodedParams = Json.encodeToString(Hyperparameters.serializer(), params) + val json = """{"n_epochs":"auto"}""" + assertEquals(encodedParams, json) + val decodedAuto = Json.decodeFromString(Hyperparameters.serializer(), json) + assertEquals(params, decodedAuto) + } + + @Test + fun serializeHyperparametersNEpochsInt() { + val params = Hyperparameters(nEpochs = Hyperparameters.NEpochs(50)) + val encodedParams = Json.encodeToString(Hyperparameters.serializer(), params) + val json = """{"n_epochs":50}""" + assertEquals(encodedParams, json) + assertEquals(encodedParams, json) + val decodedAuto = Json.decodeFromString(Hyperparameters.serializer(), json) + assertEquals(params, decodedAuto) + } + + @Test + fun serializeHyperparametersNEpochsString() { + val params = Hyperparameters(nEpochs = Hyperparameters.NEpochs("auto")) + val encodedParams = Json.encodeToString(Hyperparameters.serializer(), params) + val json = """{"n_epochs":"auto"}""" + assertEquals(encodedParams, json) + assertEquals(encodedParams, json) + val decodedAuto = Json.decodeFromString(Hyperparameters.serializer(), json) + assertEquals(params, decodedAuto) + } +}