Skip to content

Commit

Permalink
- Fix langchain4j unwanted dependency 2
Browse files Browse the repository at this point in the history
- Tested on Llamma3 and gemma2
  • Loading branch information
humcqc committed Jul 15, 2024
1 parent d96b632 commit 3aa3806
Show file tree
Hide file tree
Showing 6 changed files with 32 additions and 11 deletions.
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
package io.quarkiverse.langchain4j.ollama.tool;
package io.quarkiverse.langchain4j.data;

import java.util.List;

Expand All @@ -7,12 +7,14 @@
import dev.langchain4j.internal.ValidationUtils;
import dev.langchain4j.model.output.TokenUsage;

class AiStatsMessage extends AiMessage {
public class AiStatsMessage extends AiMessage {
private String updatableText;

final TokenUsage tokenUsage;

AiStatsMessage(String text, TokenUsage tokenUsage) {
public AiStatsMessage(String text, TokenUsage tokenUsage) {
super(text);
this.updatableText = text;
this.tokenUsage = ValidationUtils.ensureNotNull(tokenUsage, "tokeUsage");
}

Expand All @@ -23,14 +25,24 @@ class AiStatsMessage extends AiMessage {

AiStatsMessage(String text, List<ToolExecutionRequest> toolExecutionRequests, TokenUsage tokenUsage) {
super(text, toolExecutionRequests);
this.updatableText = text;
this.tokenUsage = ValidationUtils.ensureNotNull(tokenUsage, "tokenUsage");
}

TokenUsage getTokenUsage() {
public void updateText(String text) {
this.updatableText = text;
}

@Override
public String text() {
return updatableText;
}

public TokenUsage getTokenUsage() {
return tokenUsage;
}

static AiStatsMessage from(AiMessage aiMessage, TokenUsage tokenUsage) {
public static AiStatsMessage from(AiMessage aiMessage, TokenUsage tokenUsage) {
if (aiMessage.text() == null) {
return new AiStatsMessage(aiMessage.toolExecutionRequests(), tokenUsage);
} else if (aiMessage.hasToolExecutionRequests()) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
import dev.langchain4j.Experimental;
import dev.langchain4j.agent.tool.ToolExecutionRequest;
import dev.langchain4j.data.message.AiMessage;
import io.quarkiverse.langchain4j.data.AiStatsMessage;

@Experimental
public class ToolsResultMemory {
Expand All @@ -24,9 +25,10 @@ public AiMessage substituteAiMessage(AiMessage message) {
if (message.text() == null) {
return message;
}
// TODO: Discuss with langchain the best approach
// return new AiMessage(substituteArguments(message.text(), variables), message.toolExecutionRequests());
message.updateText(substituteVariables(message.text(), variables));
if (message instanceof AiStatsMessage updatableMessage) {
updatableMessage.updateText(substituteVariables(message.text(), variables));
return updatableMessage;
}
return message;
}

Expand All @@ -39,7 +41,7 @@ public ToolExecutionRequest substituteArguments(ToolExecutionRequest toolExecuti

private static String substituteVariables(String msg, Map<String, String> resultMap) {
Matcher matcher = VARIABLE_PATTERN.matcher(msg);
StringBuffer newArguments = new StringBuffer();
StringBuilder newArguments = new StringBuilder();
if (!matcher.find()) {
return msg;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,9 +17,9 @@
import io.quarkus.test.junit.QuarkusTest;

@Disabled("Integration tests that need an ollama server running")
@DisplayName("LLM Tools test - Llama3")
@DisplayName("LLM Parallel Tools test")
@QuarkusTest
public class ToolsLlama3IT {
public class ToolsParallelIT {

@RegisterAiService(tools = Tools.Calculator.class)
public interface MathAssistant {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,11 @@ quarkus.langchain4j.timeout=60s
quarkus.langchain4j.log-requests=true
quarkus.langchain4j.log-responses=true
quarkus.langchain4j.ollama.chat-model.model-id = llama3
#quarkus.langchain4j.ollama.chat-model.model-id = llama3 -> ok
#quarkus.langchain4j.ollama.chat-model.model-id = gemma2 -> ok
#quarkus.langchain4j.ollama.chat-model.model-id = mistral -> nok, mistral uses unavailable tools
#quarkus.langchain4j.ollama.chat-model.model-id = qwen2 -> nok, qwen2 uses unavailable tools
#quarkus.langchain4j.ollama.chat-model.model-id = phi3 -> nok, phi3 uses unavailable tools
quarkus.langchain4j.ollama.chat-model.temperature = 0.0
quarkus.langchain4j.ollama.chat-model.num-ctx = 3072
quarkus.langchain4j.ollama.chat-model.num-predict = 3072
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@

import dev.langchain4j.agent.tool.ToolExecutionRequest;
import dev.langchain4j.data.message.*;
import io.quarkiverse.langchain4j.data.AiStatsMessage;
import io.quarkiverse.langchain4j.ollama.ImageUtils;
import io.quarkiverse.langchain4j.ollama.Message;
import io.quarkiverse.langchain4j.ollama.Role;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@
import dev.langchain4j.model.output.FinishReason;
import dev.langchain4j.model.output.Response;
import dev.langchain4j.model.output.TokenUsage;
import io.quarkiverse.langchain4j.data.AiStatsMessage;
import io.quarkiverse.langchain4j.ollama.*;

/**
Expand Down

0 comments on commit 3aa3806

Please sign in to comment.