Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Simplify conversations and their JVM integrations #319

Merged
merged 2 commits into from
Aug 14, 2023
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions core/build.gradle.kts
Original file line number Diff line number Diff line change
Expand Up @@ -87,6 +87,11 @@ kotlin {
implementation(libs.logback)
implementation(libs.skrape)
implementation(libs.rss.reader)
api(libs.jackson)
raulraja marked this conversation as resolved.
Show resolved Hide resolved
api(libs.jackson.schema)
api(libs.jackson.schema.jakarta)
api(libs.jakarta.validation)
implementation(libs.kotlinx.coroutines.reactive)
api(libs.ktor.client.cio)
}
}
Expand Down
126 changes: 105 additions & 21 deletions core/src/commonMain/kotlin/com/xebia/functional/xef/auto/Conversation.kt
Original file line number Diff line number Diff line change
Expand Up @@ -9,36 +9,44 @@ import com.xebia.functional.xef.llm.models.images.ImagesGenerationResponse
import com.xebia.functional.xef.prompt.Prompt
import com.xebia.functional.xef.vectorstores.ConversationId
import com.xebia.functional.xef.vectorstores.VectorStore
import kotlin.jvm.JvmName
import kotlin.jvm.JvmOverloads
import kotlin.jvm.JvmSynthetic
import kotlinx.coroutines.CoroutineScope
import kotlinx.coroutines.Deferred
import kotlinx.coroutines.async
import kotlinx.coroutines.flow.Flow
import kotlinx.uuid.UUID
import kotlinx.uuid.generateUUID

/**
* The [Conversation] is the context in which [AI] values are run. It encapsulates all the
* dependencies required to run [AI] values, and provides convenient syntax for writing [AI] based
* programs.
*/
class Conversation
@JvmOverloads
constructor(
val store: VectorStore,
val conversationId: ConversationId? = ConversationId(UUID.generateUUID().toString())
) : AutoCloseable, AutoClose by autoClose() {
interface Conversation : AutoClose, AutoCloseable {
raulraja marked this conversation as resolved.
Show resolved Hide resolved

val store: VectorStore

val conversationId: ConversationId?

val conversation: Conversation

@AiDsl
@JvmSynthetic
raulraja marked this conversation as resolved.
Show resolved Hide resolved
suspend fun addContext(vararg docs: String) {
store.addTexts(docs.toList())
}

fun CoroutineScope.addContextAsync(vararg docs: String): Deferred<Unit> = async {
raulraja marked this conversation as resolved.
Show resolved Hide resolved
store.addTexts(docs.toList())
}

fun CoroutineScope.addContextAsync(docs: Iterable<String>): Deferred<Unit> = async {
store.addTexts(docs.toList())
}

@AiDsl
suspend fun addContext(docs: Iterable<String>) {
@JvmSynthetic
suspend fun addContext(docs: Iterable<String>): Unit {
store.addTexts(docs.toList())
}

@AiDsl
@JvmName("promptWithSerializer")
@JvmSynthetic
suspend fun <A> ChatWithFunctions.prompt(
prompt: String,
functions: List<CFunction>,
Expand All @@ -47,36 +55,73 @@ constructor(
): A {
return prompt(
prompt = Prompt(prompt),
scope = this@Conversation,
scope = conversation,
serializer = serializer,
functions = functions,
promptConfiguration = promptConfiguration,
)
}

fun <A> CoroutineScope.promptAsync(
chatWithFunctions: ChatWithFunctions,
prompt: String,
functions: List<CFunction>,
serializer: (json: String) -> A,
promptConfiguration: PromptConfiguration,
): Deferred<A> {
return async {
chatWithFunctions.prompt(
prompt = Prompt(prompt),
scope = conversation,
serializer = serializer,
functions = functions,
promptConfiguration = promptConfiguration,
)
}
}

@AiDsl
@JvmSynthetic
suspend fun Chat.promptMessage(
question: String,
promptConfiguration: PromptConfiguration = PromptConfiguration.DEFAULTS
): String =
promptMessages(question, this@Conversation, emptyList(), promptConfiguration).firstOrNull()
promptMessages(question, conversation, emptyList(), promptConfiguration).firstOrNull()
?: throw AIError.NoResponse()

@AiDsl
fun CoroutineScope.promptMessageAsync(
chat: Chat,
question: String,
promptConfiguration: PromptConfiguration = PromptConfiguration.DEFAULTS
): Deferred<String> = async {
chat.promptMessages(question, conversation, emptyList(), promptConfiguration).firstOrNull()
?: throw AIError.NoResponse()
}

@AiDsl
@JvmSynthetic
suspend fun Chat.promptMessages(
question: String,
functions: List<CFunction> = emptyList(),
promptConfiguration: PromptConfiguration = PromptConfiguration.DEFAULTS
): List<String> =
promptMessages(Prompt(question), this@Conversation, functions, promptConfiguration)
): List<String> = promptMessages(Prompt(question), conversation, functions, promptConfiguration)

fun CoroutineScope.promptMessagesAsync(
chat: Chat,
question: String,
functions: List<CFunction> = emptyList(),
promptConfiguration: PromptConfiguration = PromptConfiguration.DEFAULTS
): Deferred<List<String>> = async {
chat.promptMessages(Prompt(question), conversation, functions, promptConfiguration)
}

@AiDsl
fun Chat.promptStreaming(
question: String,
functions: List<CFunction>,
promptConfiguration: PromptConfiguration = PromptConfiguration.DEFAULTS
): Flow<String> =
promptStreaming(Prompt(question), this@Conversation, functions, promptConfiguration)
): Flow<String> = promptStreaming(Prompt(question), conversation, functions, promptConfiguration)

/**
* Run a [prompt] describes the images you want to generate within the context of [Conversation].
Expand All @@ -86,13 +131,25 @@ constructor(
* @param numberImages number of images to generate.
* @param size the size of the images to generate.
*/
@AiDsl
@JvmSynthetic
suspend fun Images.images(
prompt: String,
numberImages: Int = 1,
size: String = "1024x1024",
promptConfiguration: PromptConfiguration = PromptConfiguration.DEFAULTS
): ImagesGenerationResponse = this.images(Prompt(prompt), numberImages, size, promptConfiguration)

fun CoroutineScope.imagesAsync(
images: Images,
prompt: String,
numberImages: Int = 1,
size: String = "1024x1024",
promptConfiguration: PromptConfiguration = PromptConfiguration.DEFAULTS
): Deferred<ImagesGenerationResponse> = async {
images.images(Prompt(prompt), numberImages, size, promptConfiguration)
}

/**
* Run a [prompt] describes the images you want to generate within the context of [Conversation].
* Returns a [ImagesGenerationResponse] containing time and urls with images generated.
Expand All @@ -101,10 +158,37 @@ constructor(
* @param numberImages number of images to generate.
* @param size the size of the images to generate.
*/
@AiDsl
@JvmSynthetic
suspend fun Images.images(
prompt: Prompt,
numberImages: Int = 1,
size: String = "1024x1024",
promptConfiguration: PromptConfiguration = PromptConfiguration.DEFAULTS
): ImagesGenerationResponse = images(prompt, store, numberImages, size, promptConfiguration)

fun CoroutineScope.imagesAsync(
images: Images,
prompt: Prompt,
numberImages: Int = 1,
size: String = "1024x1024",
promptConfiguration: PromptConfiguration = PromptConfiguration.DEFAULTS
): Deferred<ImagesGenerationResponse> = async {
images.images(prompt, store, numberImages, size, promptConfiguration)
}

companion object {

operator fun invoke(
store: VectorStore,
conversationId: ConversationId? = ConversationId(UUID.generateUUID().toString())
): PlatformConversation = PlatformConversation.create(store, conversationId)
raulraja marked this conversation as resolved.
Show resolved Hide resolved

@JvmSynthetic
suspend operator fun <A> invoke(
store: VectorStore,
conversationId: ConversationId? = ConversationId(UUID.generateUUID().toString()),
block: suspend PlatformConversation.() -> A
): A = block(invoke(store, conversationId))
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
package com.xebia.functional.xef.auto

import com.xebia.functional.xef.vectorstores.ConversationId
import com.xebia.functional.xef.vectorstores.VectorStore

expect abstract class PlatformConversation(
store: VectorStore,
conversationId: ConversationId?,
) : Conversation {

companion object {
fun create(
store: VectorStore,
conversationId: ConversationId?,
): PlatformConversation
}
}
42 changes: 18 additions & 24 deletions core/src/commonMain/kotlin/com/xebia/functional/xef/llm/Chat.kt
Original file line number Diff line number Diff line change
Expand Up @@ -176,16 +176,13 @@ interface Chat : LLM {
buffer: StringBuilder,
) {
val lastRequestMessage = request.messages.lastOrNull()
if (scope.conversationId != null && lastRequestMessage != null) {
val cid = scope.conversationId
if (cid != null && lastRequestMessage != null) {
val requestMemory =
Memory(
conversationId = scope.conversationId,
content = lastRequestMessage,
timestamp = getTimeMillis()
)
Memory(conversationId = cid, content = lastRequestMessage, timestamp = getTimeMillis())
val responseMemory =
Memory(
conversationId = scope.conversationId,
conversationId = cid,
content =
Message(role = Role.ASSISTANT, content = buffer.toString(), name = Role.ASSISTANT.name),
timestamp = getTimeMillis(),
Expand All @@ -200,17 +197,15 @@ interface Chat : LLM {
): List<ChoiceWithFunctions> = also {
val firstChoice = firstOrNull()
val requestUserMessage = request.messages.lastOrNull()
if (requestUserMessage != null && firstChoice != null && scope.conversationId != null) {
val cid = scope.conversationId
if (requestUserMessage != null && firstChoice != null && cid != null) {
val role = firstChoice.message?.role?.uppercase()?.let { Role.valueOf(it) } ?: Role.USER

val requestMemory =
Memory(
conversationId = scope.conversationId,
content = requestUserMessage,
timestamp = getTimeMillis()
)
Memory(conversationId = cid, content = requestUserMessage, timestamp = getTimeMillis())
val firstChoiceMemory =
Memory(
conversationId = scope.conversationId,
conversationId = cid,
content =
Message(
role = role,
Expand All @@ -230,17 +225,14 @@ interface Chat : LLM {
): List<Choice> = also {
val firstChoice = firstOrNull()
val requestUserMessage = request.messages.lastOrNull()
if (requestUserMessage != null && firstChoice != null && scope.conversationId != null) {
val cid = scope.conversationId
if (requestUserMessage != null && firstChoice != null && cid != null) {
val role = firstChoice.message?.role?.name?.uppercase()?.let { Role.valueOf(it) } ?: Role.USER
val requestMemory =
Memory(
conversationId = scope.conversationId,
content = requestUserMessage,
timestamp = getTimeMillis()
)
Memory(conversationId = cid, content = requestUserMessage, timestamp = getTimeMillis())
val firstChoiceMemory =
Memory(
conversationId = scope.conversationId,
conversationId = cid,
content =
Message(role = role, content = firstChoice.message?.content ?: "", name = role.name),
timestamp = getTimeMillis()
Expand All @@ -255,12 +247,14 @@ interface Chat : LLM {
private suspend fun memories(
scope: Conversation,
promptConfiguration: PromptConfiguration
): List<Memory> =
if (scope.conversationId != null) {
scope.store.memories(scope.conversationId, promptConfiguration.memoryLimit)
): List<Memory> {
val cid = scope.conversationId
return if (cid != null) {
scope.store.memories(cid, promptConfiguration.memoryLimit)
} else {
emptyList()
}
}

private suspend fun fitMessagesByTokens(
history: List<Message>,
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
package com.xebia.functional.xef.auto

import com.xebia.functional.xef.vectorstores.ConversationId
import com.xebia.functional.xef.vectorstores.VectorStore

class JSConversation(
override val store: VectorStore,
override val conversationId: ConversationId?
) : PlatformConversation(store, conversationId) {

override val conversation: Conversation = this

override fun close() {}

override fun <A : AutoCloseable> autoClose(autoCloseable: A): A {
return autoCloseable
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
package com.xebia.functional.xef.auto

import com.xebia.functional.xef.vectorstores.ConversationId
import com.xebia.functional.xef.vectorstores.VectorStore

actual abstract class PlatformConversation
actual constructor(store: VectorStore, conversationId: ConversationId?) : Conversation, AutoClose {
actual companion object {
actual fun create(store: VectorStore, conversationId: ConversationId?): PlatformConversation =
JSConversation(store, conversationId)
}
}

This file was deleted.

Loading