diff --git a/examples/applications/astradb-sink/configuration.yaml b/examples/applications/astradb-sink/configuration.yaml
index be7dd748d..6ada7c05b 100644
--- a/examples/applications/astradb-sink/configuration.yaml
+++ b/examples/applications/astradb-sink/configuration.yaml
@@ -21,8 +21,8 @@ configuration:
name: "AstraDatasource"
configuration:
service: "astra"
- clientId: "{{{ secrets.astra.clientId }}}"
- secret: "{{{ secrets.astra.secret }}}"
- token: "{{{ secrets.astra.token }}}"
- database: "{{{ secrets.astra.database }}}"
- environment: "{{{ secrets.astra.environment }}}"
\ No newline at end of file
+ clientId: "${ secrets.astra.clientId }"
+ secret: "${ secrets.astra.secret }"
+ token: "${ secrets.astra.token }"
+ database: "${ secrets.astra.database }"
+ environment: "${ secrets.astra.environment }"
\ No newline at end of file
diff --git a/examples/applications/compute-hugging-face/configuration.yaml b/examples/applications/compute-hugging-face/configuration.yaml
index 09eddd4a6..31250d421 100644
--- a/examples/applications/compute-hugging-face/configuration.yaml
+++ b/examples/applications/compute-hugging-face/configuration.yaml
@@ -20,4 +20,4 @@ configuration:
- type: "hugging-face-configuration"
name: "Hugging Face AI configuration"
configuration:
- access-key: "{{ secrets.hugging-face.access-key }}"
+ access-key: "${ secrets.hugging-face.access-key }"
diff --git a/examples/applications/compute-openai-embeddings/configuration.yaml b/examples/applications/compute-openai-embeddings/configuration.yaml
index 502376f20..cf6793a59 100644
--- a/examples/applications/compute-openai-embeddings/configuration.yaml
+++ b/examples/applications/compute-openai-embeddings/configuration.yaml
@@ -20,6 +20,6 @@ configuration:
- type: "open-ai-configuration"
name: "OpenAI Azure configuration"
configuration:
- url: "{{ secrets.open-ai.url }}"
- access-key: "{{ secrets.open-ai.access-key }}"
- provider: "{{ secrets.open-ai.provider }}"
+ url: "${secrets.open-ai.url}"
+ access-key: "${secrets.open-ai.access-key}"
+ provider: "${secrets.open-ai.provider}"
diff --git a/examples/applications/compute-openai-embeddings/pipeline.yaml b/examples/applications/compute-openai-embeddings/pipeline.yaml
index bd4f6c925..de56c51bd 100644
--- a/examples/applications/compute-openai-embeddings/pipeline.yaml
+++ b/examples/applications/compute-openai-embeddings/pipeline.yaml
@@ -31,7 +31,7 @@ pipeline:
input: "input-topic"
output: "output-topic"
configuration:
- model: "{{{secrets.open-ai.embeddings-model}}}" # This needs to match the name of the model deployment, not the base model
+ model: "${secrets.open-ai.embeddings-model}" # This needs to match the name of the model deployment, not the base model
embeddings-field: "value.embeddings"
text: "{{% value.name }} {{% value.description }}"
batch-size: 10
diff --git a/examples/applications/compute-vertexai/configuration.yaml b/examples/applications/compute-vertexai/configuration.yaml
index 7ff1b42ae..17d1808fe 100644
--- a/examples/applications/compute-vertexai/configuration.yaml
+++ b/examples/applications/compute-vertexai/configuration.yaml
@@ -20,9 +20,8 @@ configuration:
- type: "vertex-configuration"
name: "Google Vertex AI configuration"
configuration:
- url: "{{ secrets.vertex-ai.url }}"
- # use triple quotes in order to turn off escaping
- serviceAccountJson: "{{{ secrets.vertex-ai.serviceAccountJson }}}"
- token: "{{ secrets.vertex-ai.token }}"
- region: "{{ secrets.vertex-ai.region }}"
- project: "{{ secrets.vertex-ai.project }}"
+ url: "${secrets.vertex-ai.url}"
+ serviceAccountJson: "${secrets.vertex-ai.serviceAccountJson}"
+ token: "${secrets.vertex-ai.token}"
+ region: "${secrets.vertex-ai.region}"
+ project: "${secrets.vertex-ai.project}"
diff --git a/examples/applications/docker-chatbot/chatbot.yaml b/examples/applications/docker-chatbot/chatbot.yaml
index b47023666..b20faddbf 100644
--- a/examples/applications/docker-chatbot/chatbot.yaml
+++ b/examples/applications/docker-chatbot/chatbot.yaml
@@ -32,7 +32,7 @@ pipeline:
- name: "compute-embeddings"
type: "compute-ai-embeddings"
configuration:
- model: "{{{secrets.open-ai.embeddings-model}}}" # This needs to match the name of the model deployment, not the base model
+ model: "${secrets.open-ai.embeddings-model}" # This needs to match the name of the model deployment, not the base model
embeddings-field: "value.question_embeddings"
text: "{{% value.question }}"
flush-interval: 0
@@ -62,7 +62,7 @@ pipeline:
type: "ai-chat-completions"
configuration:
- model: "{{{secrets.open-ai.chat-completions-model}}}" # This needs to be set to the model deployment name, not the base name
+ model: "${secrets.open-ai.chat-completions-model}" # This needs to be set to the model deployment name, not the base name
# on the log-topic we add a field with the answer
completion-field: "value.answer"
# we are also logging the prompt we sent to the LLM
@@ -88,7 +88,7 @@ pipeline:
Do not provide information that is not related to the LangStream project.
Documents:
- {{%# value.related_documents}}
+ {{ value.related_documents}}
{{% text}}
{{%/ value.related_documents}}
- role: user
diff --git a/examples/applications/docker-chatbot/configuration.yaml b/examples/applications/docker-chatbot/configuration.yaml
index 72b6965dc..15675fce2 100644
--- a/examples/applications/docker-chatbot/configuration.yaml
+++ b/examples/applications/docker-chatbot/configuration.yaml
@@ -22,15 +22,15 @@ configuration:
configuration:
service: "jdbc"
driverClass: "herddb.jdbc.Driver"
- url: "{{secrets.herddb.url}}"
- user: "{{secrets.herddb.user}}"
- password: "{{secrets.herddb.password}}"
+ url: "${secrets.herddb.url}"
+ user: "${secrets.herddb.user}"
+ password: "${secrets.herddb.password}"
- type: "open-ai-configuration"
name: "OpenAI Azure configuration"
configuration:
- url: "{{ secrets.open-ai.url }}"
- access-key: "{{ secrets.open-ai.access-key }}"
- provider: "{{ secrets.open-ai.provider }}"
+ url: "${secrets.open-ai.url}"
+ access-key: "${secrets.open-ai.access-key}"
+ provider: "${secrets.open-ai.provider}"
dependencies:
- name: "HerdDB.org JDBC Driver"
url: "https://repo1.maven.org/maven2/org/herddb/herddb-jdbc/0.28.0/herddb-jdbc-0.28.0-thin.jar"
diff --git a/examples/applications/docker-chatbot/crawler.yaml b/examples/applications/docker-chatbot/crawler.yaml
index 743e4d3f5..85f590046 100644
--- a/examples/applications/docker-chatbot/crawler.yaml
+++ b/examples/applications/docker-chatbot/crawler.yaml
@@ -55,11 +55,11 @@ pipeline:
http-timeout: 10000
handle-cookies: true
max-unflushed-pages: 100
- bucketName: "{{{secrets.s3.bucket-name}}}"
- endpoint: "{{{secrets.s3.endpoint}}}"
- access-key: "{{{secrets.s3.access-key}}}"
- secret-key: "{{{secrets.s3.secret}}}"
- region: "{{{secrets.s3.region}}}"
+ bucketName: "${secrets.s3.bucket-name}"
+ endpoint: "${secrets.s3.endpoint}"
+ access-key: "${secrets.s3.access-key}"
+ secret-key: "${secrets.s3.secret}"
+ region: "${secrets.s3.region}"
- name: "Extract text"
type: "text-extractor"
- name: "Normalise text"
diff --git a/examples/applications/gateway-authentication/gateways.yaml b/examples/applications/gateway-authentication/gateways.yaml
index 17c02104c..8a6afd99d 100644
--- a/examples/applications/gateway-authentication/gateways.yaml
+++ b/examples/applications/gateway-authentication/gateways.yaml
@@ -52,7 +52,7 @@ gateways:
provider: google
allow-test-mode: true
configuration:
- clientId: "{{ secrets.google.client-id }}"
+ clientId: "${secrets.google.client-id}"
produce-options:
headers:
- key: langstream-client-user-id
@@ -69,7 +69,7 @@ gateways:
allow-test-mode: true
provider: google
configuration:
- clientId: "{{ secrets.google.client-id }}"
+ clientId: "${secrets.google.client-id}"
consume-options:
filters:
headers:
@@ -86,7 +86,7 @@ gateways:
authentication:
provider: github
configuration:
- clientId: "{{ secrets.github.client-id }}"
+ clientId: "${secrets.github.client-id}"
produce-options:
headers:
- key: langstream-client-user-id
@@ -102,7 +102,7 @@ gateways:
authentication:
provider: github
configuration:
- clientId: "{{ secrets.github.client-id }}"
+ clientId: "${secrets.github.client-id}"
consume-options:
filters:
headers:
diff --git a/examples/applications/kafka-connect/pipeline.yaml b/examples/applications/kafka-connect/pipeline.yaml
index eca88b86e..4b5edc57e 100644
--- a/examples/applications/kafka-connect/pipeline.yaml
+++ b/examples/applications/kafka-connect/pipeline.yaml
@@ -28,8 +28,8 @@ pipeline:
connector.class: com.datastax.oss.kafka.sink.CassandraSinkConnector
key.converter: org.apache.kafka.connect.storage.StringConverter
value.converter: org.apache.kafka.connect.storage.StringConverter
- cloud.secureConnectBundle: "{{{ secrets.cassandra.secure-connect-bundle }}}"
- auth.username: "{{{ secrets.cassandra.username }}}"
- auth.password: "{{{ secrets.cassandra.password }}}"
+ cloud.secureConnectBundle: "${ secrets.cassandra.secure-connect-bundle }"
+ auth.username: "${ secrets.cassandra.username }"
+ auth.password: "${ secrets.cassandra.password }"
topic.input-topic.vsearch.products.mapping: "id=value.id,description=value.description,name=value.name"
name: cassandra-sink
\ No newline at end of file
diff --git a/examples/applications/langchain-source/pipeline.yaml b/examples/applications/langchain-source/pipeline.yaml
index 9b750180b..a39d927c6 100644
--- a/examples/applications/langchain-source/pipeline.yaml
+++ b/examples/applications/langchain-source/pipeline.yaml
@@ -24,7 +24,7 @@ pipeline:
output: "output-topic"
configuration:
className: s3_langchain.S3LangChain
- bucketName: "{{{ secrets.s3.bucket-name }}}"
- endpoint: "{{{ secrets.s3.endpoint }}}"
- username: "{{{ secrets.s3.access-key }}}"
- password: "{{{ secrets.s3.secret }}}"
\ No newline at end of file
+ bucketName: "${ secrets.s3.bucket-name }"
+ endpoint: "${ secrets.s3.endpoint }"
+ username: "${ secrets.s3.access-key }"
+ password: "${ secrets.s3.secret }"
\ No newline at end of file
diff --git a/examples/applications/llamaindex-cassandra-sink/pipeline.yaml b/examples/applications/llamaindex-cassandra-sink/pipeline.yaml
index d57f3e31d..93905dca8 100644
--- a/examples/applications/llamaindex-cassandra-sink/pipeline.yaml
+++ b/examples/applications/llamaindex-cassandra-sink/pipeline.yaml
@@ -24,10 +24,10 @@ pipeline:
input: "input-topic"
configuration:
className: llamaindex_cassandra.LlamaIndexCassandraSink
- openaiKey: "{{ secrets.open-ai.access-key }}"
+ openaiKey: "${secrets.open-ai.access-key}"
cassandra:
- username: "{{{ secrets.astra.clientId }}}"
- password: "{{{ secrets.astra.secret }}}"
- secureBundle: "{{{ secrets.astra.secureBundle }}}"
+ username: "${secrets.astra.clientId}"
+ password: "${secrets.astra.secret}"
+ secureBundle: "${secrets.astra.secureBundle}"
keyspace: ks1
table: vs_ll_openai
diff --git a/examples/applications/openai-completions/configuration.yaml b/examples/applications/openai-completions/configuration.yaml
index 502376f20..cf6793a59 100644
--- a/examples/applications/openai-completions/configuration.yaml
+++ b/examples/applications/openai-completions/configuration.yaml
@@ -20,6 +20,6 @@ configuration:
- type: "open-ai-configuration"
name: "OpenAI Azure configuration"
configuration:
- url: "{{ secrets.open-ai.url }}"
- access-key: "{{ secrets.open-ai.access-key }}"
- provider: "{{ secrets.open-ai.provider }}"
+ url: "${secrets.open-ai.url}"
+ access-key: "${secrets.open-ai.access-key}"
+ provider: "${secrets.open-ai.provider}"
diff --git a/examples/applications/openai-completions/gateways.yaml b/examples/applications/openai-completions/gateways.yaml
index 4c0b99d4a..24299f1e2 100644
--- a/examples/applications/openai-completions/gateways.yaml
+++ b/examples/applications/openai-completions/gateways.yaml
@@ -60,7 +60,7 @@ gateways:
authentication:
provider: google
configuration:
- clientId: "{{ secrets.google.client-id }}"
+ clientId: "${secrets.google.client-id}"
produce-options:
headers:
- key: langstream-client-user-id
@@ -72,7 +72,7 @@ gateways:
authentication:
provider: google
configuration:
- clientId: "{{ secrets.google.client-id }}"
+ clientId: "${secrets.google.client-id}"
consume-options:
filters:
headers:
diff --git a/examples/applications/openai-completions/pipeline.yaml b/examples/applications/openai-completions/pipeline.yaml
index e8cc896e5..cc3c0da90 100644
--- a/examples/applications/openai-completions/pipeline.yaml
+++ b/examples/applications/openai-completions/pipeline.yaml
@@ -31,7 +31,7 @@ pipeline:
type: "ai-chat-completions"
output: "history-topic"
configuration:
- model: "{{{secrets.open-ai.chat-completions-model}}}" # This needs to be set to the model deployment name, not the base name
+ model: "${secrets.open-ai.chat-completions-model}" # This needs to be set to the model deployment name, not the base name
# on the log-topic we add a field with the answer
completion-field: "value.answer"
# we are also logging the prompt we sent to the LLM
diff --git a/examples/applications/openai-text-completions/configuration.yaml b/examples/applications/openai-text-completions/configuration.yaml
index 3a000f1bd..e924fadc6 100644
--- a/examples/applications/openai-text-completions/configuration.yaml
+++ b/examples/applications/openai-text-completions/configuration.yaml
@@ -20,4 +20,4 @@ configuration:
- type: "open-ai-configuration"
name: "OpenAI configuration"
configuration:
- access-key: "{{ secrets.open-ai.access-key }}"
+ access-key: "${secrets.open-ai.access-key}"
diff --git a/examples/applications/openai-text-completions/pipeline.yaml b/examples/applications/openai-text-completions/pipeline.yaml
index 9f1f60926..4eaf42cd0 100644
--- a/examples/applications/openai-text-completions/pipeline.yaml
+++ b/examples/applications/openai-text-completions/pipeline.yaml
@@ -31,7 +31,7 @@ pipeline:
type: "ai-text-completions"
output: "debug"
configuration:
- model: "{{{secrets.open-ai.text-completions-model}}}"
+ model: "${secrets.open-ai.text-completions-model}"
# on the log-topic we add a field with the answer
completion-field: "value.answer"
# we are also logging the prompt we sent to the LLM
diff --git a/examples/applications/python-processor-embeddings/pipeline.yaml b/examples/applications/python-processor-embeddings/pipeline.yaml
index d9f5974b2..fff78bd17 100644
--- a/examples/applications/python-processor-embeddings/pipeline.yaml
+++ b/examples/applications/python-processor-embeddings/pipeline.yaml
@@ -27,4 +27,4 @@ pipeline:
output: "output-topic"
configuration:
className: embeddings.Embedding
- openaiKey: "{{ secrets.open-ai.access-key }}"
\ No newline at end of file
+ openaiKey: "${secrets.open-ai.access-key}"
\ No newline at end of file
diff --git a/examples/applications/query-astradb/configuration.yaml b/examples/applications/query-astradb/configuration.yaml
index c59d7ef83..670568b84 100644
--- a/examples/applications/query-astradb/configuration.yaml
+++ b/examples/applications/query-astradb/configuration.yaml
@@ -21,9 +21,9 @@ configuration:
name: "AstraDatasource"
configuration:
service: "astra"
- clientId: "{{{ secrets.astra.clientId }}}"
- secret: "{{{ secrets.astra.secret }}}"
- secureBundle: "{{{ secrets.astra.secureBundle }}}"
- database: "{{{ secrets.astra.database }}}"
- token: "{{{ secrets.astra.token }}}"
+ clientId: "${ secrets.astra.clientId }"
+ secret: "${ secrets.astra.secret }"
+ secureBundle: "${ secrets.astra.secureBundle }"
+ database: "${ secrets.astra.database }"
+ token: "${ secrets.astra.token }"
diff --git a/examples/applications/query-milvus/chatbot.yaml b/examples/applications/query-milvus/chatbot.yaml
index 609e3a203..959d2e9ae 100644
--- a/examples/applications/query-milvus/chatbot.yaml
+++ b/examples/applications/query-milvus/chatbot.yaml
@@ -32,7 +32,7 @@ pipeline:
- name: "compute-embeddings"
type: "compute-ai-embeddings"
configuration:
- model: "{{{secrets.open-ai.embeddings-model}}}" # This needs to match the name of the model deployment, not the base model
+ model: "${secrets.open-ai.embeddings-model}" # This needs to match the name of the model deployment, not the base model
embeddings-field: "value.question_embeddings"
text: "{{% value.question }}"
flush-interval: 0
@@ -54,7 +54,7 @@ pipeline:
type: "ai-chat-completions"
configuration:
- model: "{{{secrets.open-ai.chat-completions-model}}}" # This needs to be set to the model deployment name, not the base name
+ model: "${secrets.open-ai.chat-completions-model}" # This needs to be set to the model deployment name, not the base name
# on the log-topic we add a field with the answer
completion-field: "value.answer"
# we are also logging the prompt we sent to the LLM
@@ -80,7 +80,7 @@ pipeline:
Do not provide information that is not related to the LangStream project.
Documents:
- {{%# value.related_documents}}
+ {{ value.related_documents}}
{{% text}}
{{%/ value.related_documents}}
- role: user
diff --git a/examples/applications/query-milvus/configuration.yaml b/examples/applications/query-milvus/configuration.yaml
index d85b514b0..78a102de4 100644
--- a/examples/applications/query-milvus/configuration.yaml
+++ b/examples/applications/query-milvus/configuration.yaml
@@ -20,21 +20,21 @@ configuration:
- type: "open-ai-configuration"
name: "OpenAI Azure configuration"
configuration:
- url: "{{ secrets.open-ai.url }}"
- access-key: "{{ secrets.open-ai.access-key }}"
- provider: "{{ secrets.open-ai.provider }}"
+ url: "${secrets.open-ai.url}"
+ access-key: "${secrets.open-ai.access-key}"
+ provider: "${secrets.open-ai.provider}"
- type: "datasource"
name: "MilvusDatasource"
configuration:
service: "milvus"
## OSS Milvus
- username: "{{{ secrets.milvus.username }}}"
- password: "{{{ secrets.milvus.password }}}"
- host: "{{{ secrets.milvus.host }}}"
- port: "{{{ secrets.milvus.port }}}"
+ username: "${secrets.milvus.username}"
+ password: "${secrets.milvus.password}"
+ host: "${secrets.milvus.host}"
+ port: "${secrets.milvus.port}"
## Set to "upsert" for OSS Milvus, on Zills use "delete-insert"
- write-mode: "{{{ secrets.milvus.write-mode }}}"
+ write-mode: "${secrets.milvus.write-mode}"
## Zillis
- url: "{{{ secrets.milvus.url }}}"
- token: "{{{ secrets.milvus.token }}}"
+ url: "${secrets.milvus.url}"
+ token: "${secrets.milvus.token}"
diff --git a/examples/applications/query-milvus/crawler.yaml b/examples/applications/query-milvus/crawler.yaml
index e54c61739..9598f9cbf 100644
--- a/examples/applications/query-milvus/crawler.yaml
+++ b/examples/applications/query-milvus/crawler.yaml
@@ -91,11 +91,11 @@ pipeline:
http-timeout: 10000
handle-cookies: true
max-unflushed-pages: 100
- bucketName: "{{{secrets.s3.bucket-name}}}"
- endpoint: "{{{secrets.s3.endpoint}}}"
- access-key: "{{{secrets.s3.access-key}}}"
- secret-key: "{{{secrets.s3.secret}}}"
- region: "{{{secrets.s3.region}}}"
+ bucketName: "${secrets.s3.bucket-name}"
+ endpoint: "${secrets.s3.endpoint}"
+ access-key: "${secrets.s3.access-key}"
+ secret-key: "${secrets.s3.secret}"
+ region: "${secrets.s3.region}"
- name: "Extract text"
type: "text-extractor"
- name: "Normalise text"
diff --git a/examples/applications/query-pinecone/configuration.yaml b/examples/applications/query-pinecone/configuration.yaml
index 74ff663c8..a909d32a0 100644
--- a/examples/applications/query-pinecone/configuration.yaml
+++ b/examples/applications/query-pinecone/configuration.yaml
@@ -20,15 +20,15 @@ configuration:
- type: "open-ai-configuration"
name: "OpenAI Azure configuration"
configuration:
- url: "{{ secrets.open-ai.url }}"
- access-key: "{{ secrets.open-ai.access-key }}"
- provider: "{{ secrets.open-ai.provider }}"
+ url: "${secrets.open-ai.url}"
+ access-key: "${secrets.open-ai.access-key}"
+ provider: "${secrets.open-ai.provider}"
- type: "vector-database"
name: "PineconeDatasource"
configuration:
service: "pinecone"
- api-key: "{{{secrets.pinecone.api-key}}}"
- environment: "{{{secrets.pinecone.environment}}}"
- index-name: "{{{secrets.pinecone.index-name}}}"
- project-name: "{{{secrets.pinecone.project-name}}}"
+ api-key: "${secrets.pinecone.api-key}"
+ environment: "${secrets.pinecone.environment}"
+ index-name: "${secrets.pinecone.index-name}"
+ project-name: "${secrets.pinecone.project-name}"
server-side-timeout-sec: 10
diff --git a/examples/applications/query-pinecone/query.yaml b/examples/applications/query-pinecone/query.yaml
index dfc48ccc9..54ee343af 100644
--- a/examples/applications/query-pinecone/query.yaml
+++ b/examples/applications/query-pinecone/query.yaml
@@ -27,7 +27,7 @@ pipeline:
type: "compute-ai-embeddings"
input: "input-topic"
configuration:
- model: "{{{secrets.open-ai.embeddings-model}}}" # This needs to match the name of the model deployment, not the base model
+ model: "${secrets.open-ai.embeddings-model}" # This needs to match the name of the model deployment, not the base model
embeddings-field: "value.embeddings"
text: "{{% value.question }}"
flush-interval: 0
diff --git a/examples/applications/query-pinecone/write.yaml b/examples/applications/query-pinecone/write.yaml
index bf72cdf67..85b7d3ed7 100644
--- a/examples/applications/query-pinecone/write.yaml
+++ b/examples/applications/query-pinecone/write.yaml
@@ -25,7 +25,7 @@ pipeline:
type: "compute-ai-embeddings"
input: "vectors-topic"
configuration:
- model: "{{{secrets.open-ai.embeddings-model}}}" # This needs to match the name of the model deployment, not the base model
+ model: "${secrets.open-ai.embeddings-model}" # This needs to match the name of the model deployment, not the base model
embeddings-field: "value.embeddings"
text: "{{% value.document }}"
batch-size: 10
diff --git a/examples/applications/text-processing/configuration.yaml b/examples/applications/text-processing/configuration.yaml
index 5a7a936b5..cc91554dd 100644
--- a/examples/applications/text-processing/configuration.yaml
+++ b/examples/applications/text-processing/configuration.yaml
@@ -20,15 +20,15 @@ configuration:
- type: "open-ai-configuration"
name: "OpenAI Azure configuration"
configuration:
- url: "{{ secrets.open-ai.url }}"
- access-key: "{{ secrets.open-ai.access-key }}"
- provider: "{{ secrets.open-ai.provider }}"
+ url: "${secrets.open-ai.url}"
+ access-key: "${secrets.open-ai.access-key}"
+ provider: "${secrets.open-ai.provider}"
- type: "datasource"
name: "AstraDatasource"
configuration:
service: "astra"
- clientId: "{{{ secrets.astra.clientId }}}"
- secret: "{{{ secrets.astra.secret }}}"
- secureBundle: "{{{ secrets.astra.secureBundle }}}"
- database: "{{{ secrets.astra.database }}}"
- token: "{{{ secrets.astra.token }}}"
\ No newline at end of file
+ clientId: "${secrets.astra.clientId}"
+ secret: "${secrets.astra.secret}"
+ secureBundle: "${secrets.astra.secureBundle}"
+ database: "${secrets.astra.database}"
+ token: "${secrets.astra.token}"
\ No newline at end of file
diff --git a/examples/applications/text-processing/extract-text.yaml b/examples/applications/text-processing/extract-text.yaml
index 6fca92dd3..c625530e8 100644
--- a/examples/applications/text-processing/extract-text.yaml
+++ b/examples/applications/text-processing/extract-text.yaml
@@ -22,11 +22,11 @@ pipeline:
- name: "Read from S3"
type: "s3-source"
configuration:
- bucketName: "{{{secrets.s3-credentials.bucket-name}}}"
- endpoint: "{{{secrets.s3-credentials.endpoint}}}"
- access-key: "{{{secrets.s3-credentials.access-key}}}"
- secret-key: "{{{secrets.s3-credentials.secret}}}"
- region: "{{{secrets.s3-credentials.region}}}"
+ bucketName: "${secrets.s3-credentials.bucket-name}"
+ endpoint: "${secrets.s3-credentials.endpoint}"
+ access-key: "${secrets.s3-credentials.access-key}"
+ secret-key: "${secrets.s3-credentials.secret}"
+ region: "${secrets.s3-credentials.region}"
idle-time: 5
- name: "Extract text"
type: "text-extractor"
@@ -75,7 +75,7 @@ pipeline:
type: "compute-ai-embeddings"
output: "chunks-topic"
configuration:
- model: "{{{secrets.open-ai.embeddings-model}}}" # This needs to match the name of the model deployment, not the base model
+ model: "${secrets.open-ai.embeddings-model}" # This needs to match the name of the model deployment, not the base model
embeddings-field: "value.embeddings_vector"
text: "{{% value.text }}"
batch-size: 10
diff --git a/examples/applications/vertexai-text-completions/configuration.yaml b/examples/applications/vertexai-text-completions/configuration.yaml
index 7ff1b42ae..17d1808fe 100644
--- a/examples/applications/vertexai-text-completions/configuration.yaml
+++ b/examples/applications/vertexai-text-completions/configuration.yaml
@@ -20,9 +20,8 @@ configuration:
- type: "vertex-configuration"
name: "Google Vertex AI configuration"
configuration:
- url: "{{ secrets.vertex-ai.url }}"
- # use triple quotes in order to turn off escaping
- serviceAccountJson: "{{{ secrets.vertex-ai.serviceAccountJson }}}"
- token: "{{ secrets.vertex-ai.token }}"
- region: "{{ secrets.vertex-ai.region }}"
- project: "{{ secrets.vertex-ai.project }}"
+ url: "${secrets.vertex-ai.url}"
+ serviceAccountJson: "${secrets.vertex-ai.serviceAccountJson}"
+ token: "${secrets.vertex-ai.token}"
+ region: "${secrets.vertex-ai.region}"
+ project: "${secrets.vertex-ai.project}"
diff --git a/examples/applications/vertexai-text-completions/pipeline.yaml b/examples/applications/vertexai-text-completions/pipeline.yaml
index 0bfd73bb3..4e7b06241 100644
--- a/examples/applications/vertexai-text-completions/pipeline.yaml
+++ b/examples/applications/vertexai-text-completions/pipeline.yaml
@@ -29,7 +29,7 @@ pipeline:
type: "ai-text-completions"
output: "answers"
configuration:
- model: "{{{secrets.vertex-ai.text-completions-model}}}"
+ model: "${secrets.vertex-ai.text-completions-model}"
# on the log-topic we add a field with the answer
completion-field: "value.answer"
# we are also logging the prompt we sent to the LLM
diff --git a/examples/applications/webcrawler-source/chatbot.yaml b/examples/applications/webcrawler-source/chatbot.yaml
index e779d7077..c7b170b58 100644
--- a/examples/applications/webcrawler-source/chatbot.yaml
+++ b/examples/applications/webcrawler-source/chatbot.yaml
@@ -32,7 +32,7 @@ pipeline:
- name: "compute-embeddings"
type: "compute-ai-embeddings"
configuration:
- model: "{{{secrets.open-ai.embeddings-model}}}" # This needs to match the name of the model deployment, not the base model
+ model: "${secrets.open-ai.embeddings-model}" # This needs to match the name of the model deployment, not the base model
embeddings-field: "value.question_embeddings"
text: "{{% value.question }}"
flush-interval: 0
@@ -61,7 +61,7 @@ pipeline:
- name: "ai-chat-completions"
type: "ai-chat-completions"
configuration:
- model: "{{{secrets.open-ai.chat-completions-model}}}" # This needs to be set to the model deployment name, not the base name
+ model: "${secrets.open-ai.chat-completions-model}" # This needs to be set to the model deployment name, not the base name
# on the log-topic we add a field with the answer
completion-field: "value.answer"
# we are also logging the prompt we sent to the LLM
@@ -87,7 +87,7 @@ pipeline:
Do not provide information that is not related to the LangStream project.
Documents:
- {{%# value.related_documents}}
+ {{ value.related_documents}}
{{% text}}
{{%/ value.related_documents}}
- role: user
diff --git a/examples/applications/webcrawler-source/configuration.yaml b/examples/applications/webcrawler-source/configuration.yaml
index 525e68629..db28f9906 100644
--- a/examples/applications/webcrawler-source/configuration.yaml
+++ b/examples/applications/webcrawler-source/configuration.yaml
@@ -20,16 +20,16 @@ configuration:
- type: "open-ai-configuration"
name: "OpenAI Azure configuration"
configuration:
- url: "{{ secrets.open-ai.url }}"
- access-key: "{{ secrets.open-ai.access-key }}"
- provider: "{{ secrets.open-ai.provider }}"
+ url: "${secrets.open-ai.url}"
+ access-key: "${secrets.open-ai.access-key}"
+ provider: "${secrets.open-ai.provider}"
- type: "datasource"
name: "AstraDatasource"
configuration:
service: "astra"
- clientId: "{{{ secrets.astra.clientId }}}"
- secret: "{{{ secrets.astra.secret }}}"
- secureBundle: "{{{ secrets.astra.secureBundle }}}"
- database: "{{{ secrets.astra.database }}}"
- token: "{{{ secrets.astra.token }}}"
- environment: "{{{ secrets.astra.environment }}}"
\ No newline at end of file
+ clientId: "${secrets.astra.clientId}"
+ secret: "${secrets.astra.secret}"
+ secureBundle: "${secrets.astra.secureBundle}"
+ database: "${secrets.astra.database}"
+ token: "${secrets.astra.token}"
+ environment: "${secrets.astra.environment}"
\ No newline at end of file
diff --git a/examples/applications/webcrawler-source/crawler.yaml b/examples/applications/webcrawler-source/crawler.yaml
index 9bec4b98d..92a371708 100644
--- a/examples/applications/webcrawler-source/crawler.yaml
+++ b/examples/applications/webcrawler-source/crawler.yaml
@@ -38,11 +38,11 @@ pipeline:
http-timeout: 10000
handle-cookies: true
max-unflushed-pages: 100
- bucketName: "{{{secrets.s3.bucket-name}}}"
- endpoint: "{{{secrets.s3.endpoint}}}"
- access-key: "{{{secrets.s3.access-key}}}"
- secret-key: "{{{secrets.s3.secret}}}"
- region: "{{{secrets.s3.region}}}"
+ bucketName: "${secrets.s3.bucket-name}"
+ endpoint: "${secrets.s3.endpoint}"
+ access-key: "${secrets.s3.access-key}"
+ secret-key: "${secrets.s3.secret}"
+ region: "${secrets.s3.region}"
- name: "Extract text"
type: "text-extractor"
- name: "Normalise text"
diff --git a/examples/instances/astra.yaml b/examples/instances/astra.yaml
index 9101fbb8e..383543faf 100644
--- a/examples/instances/astra.yaml
+++ b/examples/instances/astra.yaml
@@ -23,8 +23,8 @@ instance:
type: "kafka"
configuration:
admin:
- bootstrap.servers: "{{{ secrets.kafka.bootstrap-servers }}}"
+ bootstrap.servers: "${ secrets.kafka.bootstrap-servers }"
security.protocol: SASL_SSL
- sasl.jaas.config: "org.apache.kafka.common.security.plain.PlainLoginModule required username='{{{ secrets.kafka.username }}}' password='{{{ secrets.kafka.password }}}';"
+ sasl.jaas.config: "org.apache.kafka.common.security.plain.PlainLoginModule required username='${ secrets.kafka.username }' password='${ secrets.kafka.password }';"
sasl.mechanism: PLAIN
session.timeout.ms: "45000"
diff --git a/langstream-cli/src/test/resources/expected-get.json b/langstream-cli/src/test/resources/expected-get.json
index 29804b343..e73a23739 100644
--- a/langstream-cli/src/test/resources/expected-get.json
+++ b/langstream-cli/src/test/resources/expected-get.json
@@ -7,9 +7,9 @@
"name" : "OpenAI Azure configuration",
"type" : "open-ai-configuration",
"configuration" : {
- "access-key" : "{{ secrets.open-ai.access-key }}",
+ "access-key" : "${secrets.open-ai.access-key}",
"provider" : "azure",
- "url" : "{{ secrets.open-ai.url }}"
+ "url" : "${secrets.open-ai.url}"
}
}
},
@@ -38,12 +38,12 @@
"enableDeadletterQueue" : false
},
"configuration" : {
- "access-key" : "{{{secrets.s3-credentials.access-key}}}",
- "bucketName" : "{{{secrets.s3-credentials.bucket-name}}}",
- "endpoint" : "{{{secrets.s3-credentials.endpoint}}}",
+ "access-key" : "${secrets.s3-credentials.access-key}",
+ "bucketName" : "${secrets.s3-credentials.bucket-name}",
+ "endpoint" : "${secrets.s3-credentials.endpoint}",
"idle-time" : 5,
- "region" : "{{{secrets.s3-credentials.region}}}",
- "secret-key" : "{{{secrets.s3-credentials.secret}}}"
+ "region" : "${secrets.s3-credentials.region}",
+ "secret-key" : "${secrets.s3-credentials.secret}"
},
"resources" : {
"parallelism" : 1,
@@ -276,9 +276,9 @@
},
"output" : null,
"configuration" : {
- "auth.password" : "{{{ secrets.cassandra.password }}}",
- "auth.username" : "{{{ secrets.cassandra.username }}}",
- "cloud.secureConnectBundle" : "{{{ secrets.cassandra.secure-connect-bundle }}}",
+ "auth.password" : "${ secrets.cassandra.password }",
+ "auth.username" : "${ secrets.cassandra.username }",
+ "cloud.secureConnectBundle" : "${ secrets.cassandra.secure-connect-bundle }",
"connector.class" : "com.datastax.oss.kafka.sink.CassandraSinkConnector",
"key.converter" : "org.apache.kafka.connect.storage.StringConverter",
"name" : "cassandra-sink",
diff --git a/langstream-core/pom.xml b/langstream-core/pom.xml
index 81215ddbb..3bec033d0 100644
--- a/langstream-core/pom.xml
+++ b/langstream-core/pom.xml
@@ -59,10 +59,6 @@
junit-jupiter
test
-
- com.samskivert
- jmustache
-
org.awaitility
awaitility
diff --git a/langstream-core/src/main/java/ai/langstream/impl/common/ApplicationPlaceholderResolver.java b/langstream-core/src/main/java/ai/langstream/impl/common/ApplicationPlaceholderResolver.java
index 2d0cf64f6..1d46f52d3 100644
--- a/langstream-core/src/main/java/ai/langstream/impl/common/ApplicationPlaceholderResolver.java
+++ b/langstream-core/src/main/java/ai/langstream/impl/common/ApplicationPlaceholderResolver.java
@@ -29,7 +29,6 @@
import ai.langstream.api.model.TopicDefinition;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.SerializationFeature;
-import com.samskivert.mustache.Mustache;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
@@ -47,6 +46,12 @@ public class ApplicationPlaceholderResolver {
private static final ObjectMapper mapper =
new ObjectMapper().enable(SerializationFeature.INDENT_OUTPUT);
+ private static final ObjectMapper mapperForTemplates =
+ new ObjectMapper()
+ .configure(
+ SerializationFeature.ORDER_MAP_ENTRIES_BY_KEYS,
+ true); // help with tests (also of applications using LS)
+
private ApplicationPlaceholderResolver() {}
@SneakyThrows
@@ -97,8 +102,9 @@ private static Map resolveModules(
(Map.Entry entry) -> {
String topicName = entry.getKey();
TopicDefinition definition = entry.getValue().copy();
- definition.setName(resolveValue(context, definition.getName()));
- newTopics.put(resolveValue(context, topicName), definition);
+ definition.setName(
+ resolveValueAsString(context, definition.getName()));
+ newTopics.put(resolveValueAsString(context, topicName), definition);
});
module.replaceTopics(newTopics);
if (module.getAssets() != null) {
@@ -185,8 +191,8 @@ private static Gateways resolveGateways(Application instance, Map context, Connection conn
}
return new Connection(
connection.connectionType(),
- resolveValue(context, connection.definition()),
+ resolveValueAsString(context, connection.definition()),
connection.enableDeadletterQueue());
}
@@ -235,36 +241,96 @@ static Object resolveValue(Map context, Object object) {
} else if (object instanceof Collection) {
return resolveCollection(context, (Collection>) object);
} else {
- return resolveValue(context, object == null ? null : object.toString());
+ return resolveSingleValue(context, object == null ? null : object.toString());
}
}
- private record Placeholder(String key, String value, String finalReplacement) {}
+ static String resolveValueAsString(Map context, String template) {
+ Object value = resolveSingleValue(context, template);
+ return value == null ? null : value.toString();
+ }
- static String resolveValue(Map context, String template) {
+ static Object resolveSingleValue(Map context, String template) {
if (template == null) {
return null;
}
- List placeholders = new ArrayList<>();
- placeholders.add(new Placeholder("{{% ", "{__MUSTACHE_ESCAPING_PREFIX ", "{{ "));
- placeholders.add(
- new Placeholder("{{%# ", "{__MUSTACHE_ESCAPING_PREFIX_LOOPSTART ", "{{# "));
- placeholders.add(new Placeholder("{{%/ ", "{__MUSTACHE_ESCAPING_PREFIX_LOOPEND ", "{{/ "));
- String escaped = template;
- for (Placeholder placeholder : placeholders) {
- escaped = escaped.replace(placeholder.key, placeholder.value);
+ String reference = template.trim();
+ if (!reference.startsWith("${") || !reference.endsWith("}")) {
+ // this is a raw value like "username=${secrets.username}"
+ // password=${secrets.password}"
+ return resolvePlaceholdersInString(template, context);
+ }
+
+ // exact match ${ x.y.z } (and not ${ x.y.z }${ x.y.z })
+ if (reference.startsWith("${")
+ && reference.endsWith("}")
+ && reference.indexOf("{") == reference.lastIndexOf("{")) {
+ String placeholder = reference.substring(2, reference.length() - 1);
+ return resolveReference(placeholder, context);
+ }
+ log.warn("Unknown placeholder: {}", reference);
+ return resolvePlaceholdersInString(template, context);
+ }
+
+ static String resolvePlaceholdersInString(String template, Map context) {
+ StringBuilder result = new StringBuilder();
+ int position = 0;
+ int pos = template.indexOf("${", position);
+ if (pos < 0) {
+ return template;
}
- try {
- final String result = Mustache.compiler().compile(escaped).execute(context);
- String finalResult = result;
- for (Placeholder placeholder : placeholders) {
- finalResult = finalResult.replace(placeholder.value, placeholder.finalReplacement);
+ while (pos >= 0) {
+ result.append(template, position, pos);
+ int end = template.indexOf("}", pos);
+ if (end < 0) {
+ throw new IllegalArgumentException("Invalid placeholder: " + template);
+ }
+ String placeholder = template.substring(pos + 2, end).trim();
+ Object value = resolveReference(placeholder, context);
+ if (value == null) {
+ // to not write "null" inside the string
+ value = "";
+ }
+ if (!(value instanceof String)) {
+ // stuff that is not a String has to be converted to something that fits in a String
+ // using JSON is the least bad option
+ try {
+ value = mapperForTemplates.writeValueAsString(value);
+ } catch (IOException impossible) {
+ throw new IllegalStateException(impossible);
+ }
}
- return finalResult;
- } catch (com.samskivert.mustache.MustacheException e) {
- log.error("Error resolving template: {}", template, e);
+ result.append(value);
+ position = end + 1;
+ pos = template.indexOf("${", position);
+ }
+ result.append(template, position, template.length());
+ return result.toString();
+ }
+
+ private static Object resolveReference(String placeholder, Object context) {
+ placeholder = placeholder.trim();
+ int dot = placeholder.indexOf('.');
+ if (dot < 0) {
+ return resolveProperty(context, placeholder);
+ } else {
+ String parent = placeholder.substring(0, dot);
+ String child = placeholder.substring(dot + 1);
+ Object parentValue = resolveProperty(context, parent);
+ return resolveReference(child, parentValue);
+ }
+ }
+
+ private static Object resolveProperty(Object context, String property) {
+ if (context == null) {
+ throw new IllegalArgumentException(
+ "Property " + property + " cannot be resolved on a empty context");
+ }
+ if (context instanceof Map) {
+ return ((Map) context).get(property);
+ } else {
throw new IllegalArgumentException(
- "Error resolving template: " + template + " " + e, e);
+ "Cannot resolve property " + property + " on " + context);
}
}
diff --git a/langstream-core/src/test/java/ai/langstream/impl/common/ApplicationPlaceholderResolverTest.java b/langstream-core/src/test/java/ai/langstream/impl/common/ApplicationPlaceholderResolverTest.java
index 4443abd38..5d2636086 100644
--- a/langstream-core/src/test/java/ai/langstream/impl/common/ApplicationPlaceholderResolverTest.java
+++ b/langstream-core/src/test/java/ai/langstream/impl/common/ApplicationPlaceholderResolverTest.java
@@ -16,12 +16,11 @@
package ai.langstream.impl.common;
import static org.junit.jupiter.api.Assertions.assertEquals;
-import static org.junit.jupiter.api.Assertions.assertInstanceOf;
import ai.langstream.api.model.Application;
import ai.langstream.api.model.Resource;
import ai.langstream.impl.parser.ModelBuilder;
-import com.samskivert.mustache.MustacheException;
+import java.util.List;
import java.util.Map;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Test;
@@ -59,15 +58,12 @@ void testAvailablePlaceholders() throws Exception {
ApplicationPlaceholderResolver.createContext(applicationInstance);
assertEquals(
"my-access-key",
- ApplicationPlaceholderResolver.resolveValue(
- context, "{{secrets.openai-credentials.accessKey}}"));
- assertEquals(
- "http://mypulsar.localhost:8080",
- ApplicationPlaceholderResolver.resolveValue(
- context, "{{cluster.configuration.admin.serviceUrl}}"));
+ ApplicationPlaceholderResolver.resolveSingleValue(
+ context, "${secrets.openai-credentials.accessKey}"));
assertEquals(
"http://myurl.localhost:8080/endpoint",
- ApplicationPlaceholderResolver.resolveValue(context, "{{globals.open-api-url}}"));
+ ApplicationPlaceholderResolver.resolveSingleValue(
+ context, "${globals.open-api-url}"));
}
@Test
@@ -83,8 +79,8 @@ void testResolveSecretsInConfiguration() throws Exception {
name: "OpenAI Azure configuration"
id: "openai-azure"
configuration:
- credentials: "{{secrets.openai-credentials.accessKey}}"
- url: "{{globals.open-api-url}}"
+ credentials: "${secrets.openai-credentials.accessKey}"
+ url: "${globals.open-api-url}"
"""),
"""
@@ -119,19 +115,19 @@ void testResolveInAgentConfiguration() throws Exception {
module: "module-1"
id: "pipeline-1"
topics:
- - name: "{{{globals.input-topic}}}"
- - name: "{{{globals.output-topic}}}"
- - name: "{{{globals.stream-response-topic}}}"
+ - name: "${globals.input-topic}"
+ - name: "${globals.output-topic}"
+ - name: "${globals.stream-response-topic}"
pipeline:
- name: "agent1"
id: "agent1"
type: "ai-chat-completions"
- input: "{{{globals.input-topic}}}"
- output: "{{{globals.output-topic}}}"
+ input: "${globals.input-topic}"
+ output: "${globals.output-topic}"
configuration:
- stream-to-topic: "{{{globals.stream-response-topic}}}"
+ stream-to-topic: "${globals.stream-response-topic}"
sinkType: "some-sink-type-on-your-cluster"
- access-key: "{{ secrets.ak.value }}"
+ access-key: "${secrets.ak.value}"
"""),
"""
instance:
@@ -235,7 +231,7 @@ void testErrorOnNotFound() throws Exception {
name: "OpenAI Azure configuration"
id: "openai-azure"
configuration:
- credentials: "{{secrets.openai-credentials.invalid}}"
+ credentials: "${secrets.openai-credentials.invalid}"
"""),
null,
@@ -247,8 +243,9 @@ void testErrorOnNotFound() throws Exception {
() ->
ApplicationPlaceholderResolver.resolvePlaceholders(
applicationInstance));
-
- assertInstanceOf(MustacheException.Context.class, illegalArgumentException.getCause());
+ assertEquals(
+ "Property invalid cannot be resolved on a empty context",
+ illegalArgumentException.getMessage());
}
@Test
@@ -280,23 +277,6 @@ void testKeepStruct() throws Exception {
Assertions.assertTrue(configuration.get("myvalue") instanceof String);
}
- @Test
- void testEscapeMustache() {
- assertEquals(
- """
- {{ do not resolve }} resolved
- {{# value.related_documents}}
- {{ text}}
- {{/ value.related_documents}}""",
- ApplicationPlaceholderResolver.resolveValue(
- Map.of("test", "resolved"),
- """
- {{% do not resolve }} {{ test }}
- {{%# value.related_documents}}
- {{% text}}
- {{%/ value.related_documents}}"""));
- }
-
@Test
void testResolveTopicsInGateway() throws Exception {
Application applicationInstance =
@@ -307,28 +287,28 @@ void testResolveTopicsInGateway() throws Exception {
module: "module-1"
id: "pipeline-1"
topics:
- - name: "{{{globals.input-topic}}}"
- - name: "{{{globals.output-topic}}}"
- - name: "{{{globals.stream-response-topic}}}"
+ - name: "${globals.input-topic}"
+ - name: "${globals.output-topic}"
+ - name: "${globals.stream-response-topic}"
pipeline:
- name: "agent1"
id: "agent1"
type: "ai-chat-completions"
- input: "{{{globals.input-topic}}}"
- output: "{{{globals.output-topic}}}"
+ input: "${globals.input-topic}"
+ output: "${globals.output-topic}"
""",
"gateways.yaml",
"""
gateways:
- id: produce
type: produce
- topic: "{{{globals.input-topic}}}"
- events-topic: "{{{globals.stream-response-topic}}}"
+ topic: "${globals.input-topic}"
+ events-topic: "${globals.stream-response-topic}"
produce-options: {}
- id: consume
type: consume
- topic: "{{{globals.input-topic}}}"
- events-topic: "{{{globals.stream-response-topic}}}"
+ topic: "${globals.input-topic}"
+ events-topic: "${globals.stream-response-topic}"
consume-options: {}
"""),
"""
@@ -348,4 +328,100 @@ void testResolveTopicsInGateway() throws Exception {
assertEquals("my-input-topic", resolved.getGateways().gateways().get(1).getTopic());
assertEquals("my-stream-topic", resolved.getGateways().gateways().get(1).getEventsTopic());
}
+
+ @Test
+ void testResolveVariablesInAssets() throws Exception {
+ Application applicationInstance =
+ ModelBuilder.buildApplicationInstance(
+ Map.of(
+ "module1.yaml",
+ """
+ module: "module-1"
+ id: "pipeline-1"
+ assets:
+ - name: "by asset"
+ asset-type: "some-type"
+ config:
+ some-value: "${globals.table-name}"
+ pipeline:
+ - name: "agent1"
+ id: "agent1"
+ type: "identity"
+ """),
+ """
+ instance:
+ globals:
+ table-name: my-table
+ """,
+ null)
+ .getApplication();
+
+ final Application resolved =
+ ApplicationPlaceholderResolver.resolvePlaceholders(applicationInstance);
+ assertEquals(
+ "my-table",
+ resolved.getModule("module-1").getAssets().get(0).getConfig().get("some-value"));
+ }
+
+ @Test
+ void testResolveAsString() {
+ assertEquals("test", ApplicationPlaceholderResolver.resolveValueAsString(Map.of(), "test"));
+ assertEquals(
+ "xxx",
+ ApplicationPlaceholderResolver.resolveValueAsString(
+ Map.of("globals", Map.of("foo", Map.of("bar", "xxx"))),
+ "${globals.foo.bar}"));
+ }
+
+ @Test
+ void testResolve() {
+ Map context =
+ Map.of(
+ "globals",
+ Map.of(
+ "foo",
+ Map.of(
+ "bar",
+ "xxx",
+ "number",
+ 123,
+ "list",
+ List.of(1, 2),
+ "map",
+ Map.of("one", 1, "two", 2))));
+ assertEquals(
+ "xxx",
+ ApplicationPlaceholderResolver.resolveSingleValue(context, "${globals.foo.bar}"));
+ assertEquals(
+ 123,
+ ApplicationPlaceholderResolver.resolveSingleValue(
+ context, "${globals.foo.number}"));
+ assertEquals(
+ List.of(1, 2),
+ ApplicationPlaceholderResolver.resolveSingleValue(context, "${globals.foo.list}"));
+
+ // some spaces
+ assertEquals(
+ 123,
+ ApplicationPlaceholderResolver.resolveSingleValue(
+ context, "${ globals.foo.number }"));
+
+ // simple concat
+ assertEquals(
+ "123-xxx",
+ ApplicationPlaceholderResolver.resolveSingleValue(
+ context, "${ globals.foo.number }-${ globals.foo.bar }"));
+
+ // using a list, but in a string context
+ assertEquals(
+ "123-[1,2]",
+ ApplicationPlaceholderResolver.resolveSingleValue(
+ context, "${ globals.foo.number }-${ globals.foo.list }"));
+
+ // using a map, but in a string context
+ assertEquals(
+ "123-{\"one\":1,\"two\":2}",
+ ApplicationPlaceholderResolver.resolveSingleValue(
+ context, "${ globals.foo.number }-${ globals.foo.map }"));
+ }
}
diff --git a/langstream-core/src/test/java/ai/langstream/impl/deploy/ApplicationDeployerTest.java b/langstream-core/src/test/java/ai/langstream/impl/deploy/ApplicationDeployerTest.java
index d807f3bd8..940fe7636 100644
--- a/langstream-core/src/test/java/ai/langstream/impl/deploy/ApplicationDeployerTest.java
+++ b/langstream-core/src/test/java/ai/langstream/impl/deploy/ApplicationDeployerTest.java
@@ -107,7 +107,7 @@ void testDeploy() throws Exception {
name: "OpenAI Azure configuration"
id: "openai-azure"
configuration:
- credentials: "{{secrets.openai-credentials.accessKey}}"
+ credentials: "${secrets.openai-credentials.accessKey}"
"""),
"""
diff --git a/langstream-core/src/test/resources/application1/chatbot.yaml b/langstream-core/src/test/resources/application1/chatbot.yaml
index f076b12ae..4d356d25f 100644
--- a/langstream-core/src/test/resources/application1/chatbot.yaml
+++ b/langstream-core/src/test/resources/application1/chatbot.yaml
@@ -56,7 +56,7 @@ pipeline:
- role: system
content: |
An user is going to perform a questions, these documents may help you in answering to their questions.
- {{%# value.related_documents}}
+ {{ value.related_documents}}
{{% text}}
{{%/ value.related_documents}}
- role: user
diff --git a/langstream-core/src/test/resources/application1/configuration.yaml b/langstream-core/src/test/resources/application1/configuration.yaml
index 68120c714..b95153b30 100644
--- a/langstream-core/src/test/resources/application1/configuration.yaml
+++ b/langstream-core/src/test/resources/application1/configuration.yaml
@@ -30,16 +30,16 @@ configuration:
- type: "open-ai-configuration"
name: "OpenAI Azure configuration"
configuration:
- accessKey: "{{secrets.openai-credentials.accessKey}}"
+ accessKey: "${secrets.openai-credentials.accessKey}"
url: "https://api-server/v1/engines/"
- type: "hugging-face-configuration"
name: "Hugging Face API configuration"
configuration:
- credentials: "{{secrets.hf-credentials.accessKey}}"
+ credentials: "${secrets.hf-credentials.accessKey}"
- type: "datasource"
name: "Sample DataSource"
configuration:
type: astra
- username: "{{secrets.astra-credentials.username}}"
- password: "{{secrets.astra-credentials.password}}"
- secureBundle: "{{secrets.astra-credentials.secureBundle}}"
\ No newline at end of file
+ username: "${secrets.astra-credentials.username}"
+ password: "${secrets.astra-credentials.password}"
+ secureBundle: "${secrets.astra-credentials.secureBundle}"
\ No newline at end of file
diff --git a/langstream-e2e-tests/src/test/resources/apps/cassandra-sink/configuration.yaml b/langstream-e2e-tests/src/test/resources/apps/cassandra-sink/configuration.yaml
index a56141d0e..b60bc184d 100644
--- a/langstream-e2e-tests/src/test/resources/apps/cassandra-sink/configuration.yaml
+++ b/langstream-e2e-tests/src/test/resources/apps/cassandra-sink/configuration.yaml
@@ -21,9 +21,9 @@ configuration:
name: "CassandraDatasource"
configuration:
service: "cassandra"
- contact-points: "{{{ secrets.cassandra.contact-points }}}"
- loadBalancing-localDc: "{{{ secrets.cassandra.local-dc }}}"
- port: "{{{ secrets.cassandra.port }}}"
+ contact-points: "${ secrets.cassandra.contact-points }"
+ loadBalancing-localDc: "${ secrets.cassandra.local-dc }"
+ port: "${ secrets.cassandra.port }"
dependencies:
- name: "Kafka Connect Sink for Apache Cassandra from DataStax"
url: "https://github.com/datastax/kafka-sink/releases/download/1.5.0/kafka-connect-cassandra-sink-1.5.0.jar"
diff --git a/langstream-e2e-tests/src/test/resources/apps/cassandra-sink/pipeline.yaml b/langstream-e2e-tests/src/test/resources/apps/cassandra-sink/pipeline.yaml
index af71b1d67..cdf2e45dd 100644
--- a/langstream-e2e-tests/src/test/resources/apps/cassandra-sink/pipeline.yaml
+++ b/langstream-e2e-tests/src/test/resources/apps/cassandra-sink/pipeline.yaml
@@ -53,6 +53,6 @@ pipeline:
key.converter: org.apache.kafka.connect.storage.StringConverter
value.converter: org.apache.kafka.connect.storage.StringConverter
loadBalancing.localDc: "datacenter1"
- contactPoints: "{{{ secrets.cassandra.contact-points }}}"
+ contactPoints: "${ secrets.cassandra.contact-points }"
topic.ls-test-topic1.vsearch.products.mapping: "id=value.id,description=value.description,name=value.name"
name: cassandra-sink
\ No newline at end of file
diff --git a/langstream-e2e-tests/src/test/resources/apps/chat-completions/configuration.yaml b/langstream-e2e-tests/src/test/resources/apps/chat-completions/configuration.yaml
index 44cf803d9..df23fa030 100644
--- a/langstream-e2e-tests/src/test/resources/apps/chat-completions/configuration.yaml
+++ b/langstream-e2e-tests/src/test/resources/apps/chat-completions/configuration.yaml
@@ -21,14 +21,14 @@ configuration:
id: "open-ai"
name: "OpenAI Azure configuration"
configuration:
- url: "{{ secrets.open-ai.url }}"
- access-key: "{{ secrets.open-ai.access-key }}"
- provider: "{{ secrets.open-ai.provider }}"
+ url: "${secrets.open-ai.url}"
+ access-key: "${secrets.open-ai.access-key}"
+ provider: "${secrets.open-ai.provider}"
- type: "vertex-configuration"
name: "Google Vertex AI configuration"
id: "vertex"
configuration:
- url: "{{ secrets.vertex-ai.url }}"
- serviceAccountJson: "{{ secrets.vertex-ai.service-json }}"
- region: "{{ secrets.vertex-ai.region }}"
- project: "{{ secrets.vertex-ai.project }}"
+ url: "${secrets.vertex-ai.url}"
+ serviceAccountJson: "${secrets.vertex-ai.service-json}"
+ region: "${secrets.vertex-ai.region}"
+ project: "${secrets.vertex-ai.project}"
diff --git a/langstream-e2e-tests/src/test/resources/apps/chat-completions/pipeline.yaml b/langstream-e2e-tests/src/test/resources/apps/chat-completions/pipeline.yaml
index 7810c27ae..28b3ca71f 100644
--- a/langstream-e2e-tests/src/test/resources/apps/chat-completions/pipeline.yaml
+++ b/langstream-e2e-tests/src/test/resources/apps/chat-completions/pipeline.yaml
@@ -31,8 +31,8 @@ pipeline:
type: "ai-chat-completions"
output: "ls-test-history-topic"
configuration:
- ai-service: "{{{secrets.chat-completions.service}}}"
- model: "{{{secrets.chat-completions.model}}}"
+ ai-service: "${secrets.chat-completions.service}"
+ model: "${secrets.chat-completions.model}"
completion-field: "value.answer"
log-field: "value.prompt"
stream-to-topic: "ls-test-output-topic"
diff --git a/langstream-e2e-tests/src/test/resources/apps/python-processor/pipeline.yaml b/langstream-e2e-tests/src/test/resources/apps/python-processor/pipeline.yaml
index 5363fe1c4..26dc40078 100644
--- a/langstream-e2e-tests/src/test/resources/apps/python-processor/pipeline.yaml
+++ b/langstream-e2e-tests/src/test/resources/apps/python-processor/pipeline.yaml
@@ -40,5 +40,5 @@ pipeline:
input: ls-test-topic0
output: ls-test-topic1
configuration:
- secret_value: "{{ secrets.secret1.value-key }}"
+ secret_value: "${secrets.secret1.value-key}"
className: example.Exclamation
\ No newline at end of file
diff --git a/langstream-e2e-tests/src/test/resources/apps/python-sink/pipeline.yaml b/langstream-e2e-tests/src/test/resources/apps/python-sink/pipeline.yaml
index fa2d692ef..88c3c0cfd 100644
--- a/langstream-e2e-tests/src/test/resources/apps/python-sink/pipeline.yaml
+++ b/langstream-e2e-tests/src/test/resources/apps/python-sink/pipeline.yaml
@@ -35,4 +35,4 @@ pipeline:
input: ls-test-input
configuration:
className: example.TestSink
- bootstrapServers: "{{ secrets.kafka.bootstrap-servers }}"
\ No newline at end of file
+ bootstrapServers: "${secrets.kafka.bootstrap-servers}"
\ No newline at end of file
diff --git a/langstream-e2e-tests/src/test/resources/apps/text-completions/configuration.yaml b/langstream-e2e-tests/src/test/resources/apps/text-completions/configuration.yaml
index 44cf803d9..df23fa030 100644
--- a/langstream-e2e-tests/src/test/resources/apps/text-completions/configuration.yaml
+++ b/langstream-e2e-tests/src/test/resources/apps/text-completions/configuration.yaml
@@ -21,14 +21,14 @@ configuration:
id: "open-ai"
name: "OpenAI Azure configuration"
configuration:
- url: "{{ secrets.open-ai.url }}"
- access-key: "{{ secrets.open-ai.access-key }}"
- provider: "{{ secrets.open-ai.provider }}"
+ url: "${secrets.open-ai.url}"
+ access-key: "${secrets.open-ai.access-key}"
+ provider: "${secrets.open-ai.provider}"
- type: "vertex-configuration"
name: "Google Vertex AI configuration"
id: "vertex"
configuration:
- url: "{{ secrets.vertex-ai.url }}"
- serviceAccountJson: "{{ secrets.vertex-ai.service-json }}"
- region: "{{ secrets.vertex-ai.region }}"
- project: "{{ secrets.vertex-ai.project }}"
+ url: "${secrets.vertex-ai.url}"
+ serviceAccountJson: "${secrets.vertex-ai.service-json}"
+ region: "${secrets.vertex-ai.region}"
+ project: "${secrets.vertex-ai.project}"
diff --git a/langstream-e2e-tests/src/test/resources/apps/text-completions/pipeline.yaml b/langstream-e2e-tests/src/test/resources/apps/text-completions/pipeline.yaml
index 2efa6ac93..2c28b8345 100644
--- a/langstream-e2e-tests/src/test/resources/apps/text-completions/pipeline.yaml
+++ b/langstream-e2e-tests/src/test/resources/apps/text-completions/pipeline.yaml
@@ -31,8 +31,8 @@ pipeline:
type: "ai-text-completions"
output: "ls-test-history-topic"
configuration:
- ai-service: "{{{secrets.text-completions.service}}}"
- model: "{{{secrets.text-completions.model}}}"
+ ai-service: "${secrets.text-completions.service}"
+ model: "${secrets.text-completions.model}"
completion-field: "value.answer"
log-field: "value.prompt"
stream-to-topic: "ls-test-output-topic"
diff --git a/langstream-e2e-tests/src/test/resources/apps/webcrawler-to-vector/chatbot.yaml b/langstream-e2e-tests/src/test/resources/apps/webcrawler-to-vector/chatbot.yaml
index 1e71d7dc0..e0f147123 100644
--- a/langstream-e2e-tests/src/test/resources/apps/webcrawler-to-vector/chatbot.yaml
+++ b/langstream-e2e-tests/src/test/resources/apps/webcrawler-to-vector/chatbot.yaml
@@ -34,8 +34,8 @@ pipeline:
- name: "compute-embeddings"
type: "compute-ai-embeddings"
configuration:
- ai-service: "{{{secrets.embeddings.service}}}"
- model: "{{{secrets.embeddings.model}}}"
+ ai-service: "${secrets.embeddings.service}"
+ model: "${secrets.embeddings.model}"
embeddings-field: "value.question_embeddings"
text: "{{% value.question }}"
flush-interval: 0
@@ -51,8 +51,8 @@ pipeline:
type: "ai-chat-completions"
configuration:
- ai-service: "{{{secrets.chat-completions.service}}}"
- model: "{{{secrets.chat-completions.model}}}" # This needs to be set to the model deployment name, not the base name
+ ai-service: "${secrets.chat-completions.service}"
+ model: "${secrets.chat-completions.model}" # This needs to be set to the model deployment name, not the base name
# on the ls-test-log-topic we add a field with the answer
completion-field: "value.answer"
# we are also logging the prompt we sent to the LLM
@@ -78,7 +78,7 @@ pipeline:
Do not provide information that is not related to the LangStream project.
Documents:
- {{%# value.related_documents}}
+ {{ value.related_documents}}
{{% text}}
{{%/ value.related_documents}}
- role: user
diff --git a/langstream-e2e-tests/src/test/resources/apps/webcrawler-to-vector/configuration.yaml b/langstream-e2e-tests/src/test/resources/apps/webcrawler-to-vector/configuration.yaml
index 0a9d8beee..fc4ce5e10 100644
--- a/langstream-e2e-tests/src/test/resources/apps/webcrawler-to-vector/configuration.yaml
+++ b/langstream-e2e-tests/src/test/resources/apps/webcrawler-to-vector/configuration.yaml
@@ -21,23 +21,23 @@ configuration:
id: "open-ai"
name: "OpenAI Azure configuration"
configuration:
- url: "{{ secrets.open-ai.url }}"
- access-key: "{{ secrets.open-ai.access-key }}"
- provider: "{{ secrets.open-ai.provider }}"
+ url: "${secrets.open-ai.url}"
+ access-key: "${secrets.open-ai.access-key}"
+ provider: "${secrets.open-ai.provider}"
- type: "datasource"
name: "AstraDatasource"
configuration:
service: "astra"
- clientId: "{{{ secrets.astra.clientId }}}"
- secret: "{{{ secrets.astra.secret }}}"
- database: "{{{ secrets.astra.database }}}"
- token: "{{{ secrets.astra.token }}}"
- environment: "{{{ secrets.astra.environment }}}"
+ clientId: "${secrets.astra.clientId}"
+ secret: "${secrets.astra.secret}"
+ database: "${secrets.astra.database}"
+ token: "${secrets.astra.token}"
+ environment: "${secrets.astra.environment}"
- type: "vertex-configuration"
name: "Google Vertex AI configuration"
id: "vertex"
configuration:
- url: "{{ secrets.vertex-ai.url }}"
- serviceAccountJson: "{{ secrets.vertex-ai.service-json }}"
- region: "{{ secrets.vertex-ai.region }}"
- project: "{{ secrets.vertex-ai.project }}"
\ No newline at end of file
+ url: "${secrets.vertex-ai.url}"
+ serviceAccountJson: "${secrets.vertex-ai.service-json}"
+ region: "${secrets.vertex-ai.region}"
+ project: "${secrets.vertex-ai.project}"
\ No newline at end of file
diff --git a/langstream-e2e-tests/src/test/resources/apps/webcrawler-to-vector/crawler.yaml b/langstream-e2e-tests/src/test/resources/apps/webcrawler-to-vector/crawler.yaml
index 648bc44b8..c1f34253a 100644
--- a/langstream-e2e-tests/src/test/resources/apps/webcrawler-to-vector/crawler.yaml
+++ b/langstream-e2e-tests/src/test/resources/apps/webcrawler-to-vector/crawler.yaml
@@ -39,9 +39,9 @@ pipeline:
handle-cookies: true
max-unflushed-pages: 100
bucketName: "langstream-test-crawler-to-vector"
- endpoint: "{{{secrets.s3.endpoint}}}"
- access-key: "{{{secrets.s3.access-key}}}"
- secret-key: "{{{secrets.s3.secret-key}}}"
+ endpoint: "${secrets.s3.endpoint}"
+ access-key: "${secrets.s3.access-key}"
+ secret-key: "${secrets.s3.secret-key}"
- name: "Extract text"
type: "text-extractor"
- name: "Normalise text"
@@ -89,8 +89,8 @@ pipeline:
type: "compute-ai-embeddings"
output: "ls-test-chunks-topic"
configuration:
- ai-service: "{{{secrets.embeddings.service}}}"
- model: "{{{secrets.embeddings.model}}}"
+ ai-service: "${secrets.embeddings.service}"
+ model: "${secrets.embeddings.model}"
embeddings-field: "value.embeddings_vector"
text: "{{% value.text }}"
batch-size: 10
diff --git a/langstream-k8s-runtime/langstream-k8s-runtime-core/src/test/java/ai/langstream/runtime/impl/k8s/GenAIAgentsTest.java b/langstream-k8s-runtime/langstream-k8s-runtime-core/src/test/java/ai/langstream/runtime/impl/k8s/GenAIAgentsTest.java
index 7ded63dba..cb2a9fb38 100644
--- a/langstream-k8s-runtime/langstream-k8s-runtime-core/src/test/java/ai/langstream/runtime/impl/k8s/GenAIAgentsTest.java
+++ b/langstream-k8s-runtime/langstream-k8s-runtime-core/src/test/java/ai/langstream/runtime/impl/k8s/GenAIAgentsTest.java
@@ -88,7 +88,7 @@ public void testOpenAIComputeEmbeddingFunction() throws Exception {
configuration:
model: "text-embedding-ada-002"
embeddings-field: "value.embeddings"
- text: "{{% value.name }} {{% value.description }}"
+ text: "{{ value.name }} {{ value.description }}"
"""),
buildInstanceYaml(),
null)
@@ -171,7 +171,7 @@ public void testMergeGenAIToolKitAgents() throws Exception {
configuration:
model: "text-embedding-ada-002"
embeddings-field: "value.embeddings"
- text: "{{% value.name }} {{% value.description }}"
+ text: "{{ value.name }} {{ value.description }}"
- name: "drop"
id: "step2"
type: "drop-fields"
@@ -287,7 +287,7 @@ public void testDontMergeGenAIToolKitAgentsWithExplicitLogTopic() throws Excepti
configuration:
model: "text-embedding-ada-002"
embeddings-field: "value.embeddings"
- text: "{{% value.name }} {{% value.description }}"
+ text: "{{ value.name }} {{ value.description }}"
- name: "drop"
id: "step2"
type: "drop-fields"
@@ -414,7 +414,7 @@ public void testMapAllGenAIToolKitAgents() throws Exception {
configuration:
model: "text-embedding-ada-002"
embeddings-field: "value.embeddings"
- text: "{{% value.name }} {{% value.description }}"
+ text: "{{ value.name }} {{ value.description }}"
- name: "dropfields"
type: "drop-fields"
configuration:
@@ -694,7 +694,7 @@ public void testEmbeddingsThanQuery() throws Exception {
configuration:
model: "text-embedding-ada-002"
embeddings-field: "value.embeddings"
- text: "{{% value.name }} {{% value.description }}"
+ text: "{{ value.name }} {{ value.description }}"
- name: "query1"
id: query1
type: "query"
@@ -793,7 +793,7 @@ public void testForceAiService() throws Exception {
ai-service: "my-vertex"
model: "text-embedding-ada-002"
embeddings-field: "value.embeddings"
- text: "{{% value.name }} {{% value.description }}"
+ text: "{{ value.name }} {{ value.description }}"
"""),
buildInstanceYaml(),
null)
diff --git a/langstream-k8s-runtime/langstream-k8s-runtime-core/src/test/java/ai/langstream/runtime/impl/k8s/KubernetesClusterRuntimeDockerTest.java b/langstream-k8s-runtime/langstream-k8s-runtime-core/src/test/java/ai/langstream/runtime/impl/k8s/KubernetesClusterRuntimeDockerTest.java
index cefb6a027..068b4703d 100644
--- a/langstream-k8s-runtime/langstream-k8s-runtime-core/src/test/java/ai/langstream/runtime/impl/k8s/KubernetesClusterRuntimeDockerTest.java
+++ b/langstream-k8s-runtime/langstream-k8s-runtime-core/src/test/java/ai/langstream/runtime/impl/k8s/KubernetesClusterRuntimeDockerTest.java
@@ -122,7 +122,7 @@ public void testOpenAIComputeEmbeddingFunction() throws Exception {
configuration:
model: "text-embedding-ada-002"
embeddings-field: "value.embeddings"
- text: "{{% value.name }} {{% value.description }}"
+ text: "{{ value.name }} {{ value.description }}"
"""),
buildInstanceYaml(),
null)
@@ -299,7 +299,7 @@ public void testCodeArchiveId() throws Exception {
configuration:
model: "text-embedding-ada-002"
embeddings-field: "value.embeddings"
- text: "{{% value.name }} {{% value.description }}"
+ text: "{{ value.name }} {{ value.description }}"
"""),
buildInstanceYaml(),
null)
diff --git a/langstream-runtime/langstream-runtime-impl/src/test/java/ai/langstream/AbstractApplicationRunner.java b/langstream-runtime/langstream-runtime-impl/src/test/java/ai/langstream/AbstractApplicationRunner.java
index ed0264d9e..675108d40 100644
--- a/langstream-runtime/langstream-runtime-impl/src/test/java/ai/langstream/AbstractApplicationRunner.java
+++ b/langstream-runtime/langstream-runtime-impl/src/test/java/ai/langstream/AbstractApplicationRunner.java
@@ -265,7 +265,7 @@ protected List waitForMessages(KafkaConsumer consumer, List> e
assertArrayEquals((byte[]) expectedValue, (byte[]) actualValue);
} else {
log.info("expected: {}", expectedValue);
- log.info("got: {}", actualValue);
+ log.info("got: {}", actualValue);
assertEquals(expectedValue, actualValue);
}
}
diff --git a/langstream-runtime/langstream-runtime-impl/src/test/java/ai/langstream/assets/DeployAssetsTest.java b/langstream-runtime/langstream-runtime-impl/src/test/java/ai/langstream/assets/DeployAssetsTest.java
index 76eaef5e7..46e8ea833 100644
--- a/langstream-runtime/langstream-runtime-impl/src/test/java/ai/langstream/assets/DeployAssetsTest.java
+++ b/langstream-runtime/langstream-runtime-impl/src/test/java/ai/langstream/assets/DeployAssetsTest.java
@@ -50,7 +50,7 @@ public void testDeployAsset() throws Exception {
- type: "datasource"
name: "the-resource"
configuration:
- foo: "{{{secrets.the-secret.password}}}"
+ foo: "${secrets.the-secret.password}"
""",
"module.yaml",
"""
@@ -61,7 +61,7 @@ public void testDeployAsset() throws Exception {
creation-mode: create-if-not-exists
asset-type: "mock-database-resource"
config:
- table: "{{{globals.table-name}}}"
+ table: "${globals.table-name}"
datasource: "the-resource"
- name: "my-table2"
creation-mode: create-if-not-exists
diff --git a/langstream-runtime/langstream-runtime-impl/src/test/java/ai/langstream/kafka/AstraDBAssetQueryWriteIT.java b/langstream-runtime/langstream-runtime-impl/src/test/java/ai/langstream/kafka/AstraDBAssetQueryWriteIT.java
index 4bfc67f81..1d6056bf4 100644
--- a/langstream-runtime/langstream-runtime-impl/src/test/java/ai/langstream/kafka/AstraDBAssetQueryWriteIT.java
+++ b/langstream-runtime/langstream-runtime-impl/src/test/java/ai/langstream/kafka/AstraDBAssetQueryWriteIT.java
@@ -49,12 +49,12 @@ public void testAstra() throws Exception {
name: "AstraDBDatasource"
configuration:
service: "astra"
- clientId: "{{{ secrets.astra.clientId }}}"
- secret: "{{{ secrets.astra.secret }}}"
+ clientId: "${ secrets.astra.clientId }"
+ secret: "${ secrets.astra.secret }"
# These are optional, but if you want to use the astra-keyspace asset you need them
- token: "{{{ secrets.astra.token }}}"
- database: "{{{ secrets.astra.database }}}"
- environment: "{{{ secrets.astra.environment }}}"
+ token: "${ secrets.astra.token }"
+ database: "${ secrets.astra.database }"
+ environment: "${ secrets.astra.environment }"
""",
"pipeline.yaml",
"""
diff --git a/langstream-runtime/langstream-runtime-impl/src/test/java/ai/langstream/kafka/ChatCompletionsIT.java b/langstream-runtime/langstream-runtime-impl/src/test/java/ai/langstream/kafka/ChatCompletionsIT.java
index d47e411e2..952e33aef 100644
--- a/langstream-runtime/langstream-runtime-impl/src/test/java/ai/langstream/kafka/ChatCompletionsIT.java
+++ b/langstream-runtime/langstream-runtime-impl/src/test/java/ai/langstream/kafka/ChatCompletionsIT.java
@@ -107,25 +107,25 @@ public void testChatCompletionWithStreaming() throws Exception {
module: "module-1"
id: "pipeline-1"
topics:
- - name: "{{{globals.input-topic}}}"
+ - name: "${globals.input-topic}"
creation-mode: create-if-not-exists
- - name: "{{{globals.output-topic}}}"
+ - name: "${globals.output-topic}"
creation-mode: create-if-not-exists
- - name: "{{{globals.stream-topic}}}"
+ - name: "${globals.stream-topic}"
creation-mode: create-if-not-exists
pipeline:
- name: "convert-to-json"
id: "step1"
type: "document-to-json"
- input: "{{{globals.input-topic}}}"
+ input: "${globals.input-topic}"
configuration:
text-field: "question"
- name: "chat-completions"
type: "ai-chat-completions"
- output: "{{{globals.output-topic}}}"
+ output: "${globals.output-topic}"
configuration:
model: "%s"
- stream-to-topic: "{{{globals.stream-topic}}}"
+ stream-to-topic: "${globals.stream-topic}"
stream-response-completion-field: "value"
completion-field: "value.answer"
log-field: "value.prompt"
@@ -137,7 +137,7 @@ public void testChatCompletionWithStreaming() throws Exception {
"""
.formatted(
model,
- "What can you tell me about {{% value.question}} ?"));
+ "What can you tell me about {{{ value.question }}} ?"));
try (ApplicationRuntime applicationRuntime =
deployApplication(
tenant, appId, application, buildInstanceYaml(), expectedAgents)) {
@@ -183,7 +183,7 @@ tenant, appId, application, buildInstanceYaml(), expectedAgents)) {
consumer,
List.of(
"""
- {"question":"the car","session-id":"2139847128764192","answer":"A car is a vehicle","prompt":"{\\"options\\":{\\"type\\":\\"ai-chat-completions\\",\\"when\\":null,\\"model\\":\\"gpt-35-turbo\\",\\"messages\\":[{\\"role\\":\\"user\\",\\"content\\":\\"What can you tell me about {{ value.question}} ?\\"}],\\"stream-to-topic\\":\\"%s\\",\\"stream-response-completion-field\\":\\"value\\",\\"min-chunks-per-message\\":3,\\"completion-field\\":\\"value.answer\\",\\"stream\\":true,\\"log-field\\":\\"value.prompt\\",\\"max-tokens\\":null,\\"temperature\\":null,\\"top-p\\":null,\\"logit-bias\\":null,\\"user\\":null,\\"stop\\":null,\\"presence-penalty\\":null,\\"frequency-penalty\\":null},\\"messages\\":[{\\"role\\":\\"user\\",\\"content\\":\\"What can you tell me about the car ?\\"}],\\"model\\":\\"gpt-35-turbo\\"}"}"""
+ {"question":"the car","session-id":"2139847128764192","answer":"A car is a vehicle","prompt":"{\\"options\\":{\\"type\\":\\"ai-chat-completions\\",\\"when\\":null,\\"model\\":\\"gpt-35-turbo\\",\\"messages\\":[{\\"role\\":\\"user\\",\\"content\\":\\"What can you tell me about {{{ value.question }}} ?\\"}],\\"stream-to-topic\\":\\"%s\\",\\"stream-response-completion-field\\":\\"value\\",\\"min-chunks-per-message\\":3,\\"completion-field\\":\\"value.answer\\",\\"stream\\":true,\\"log-field\\":\\"value.prompt\\",\\"max-tokens\\":null,\\"temperature\\":null,\\"top-p\\":null,\\"logit-bias\\":null,\\"user\\":null,\\"stop\\":null,\\"presence-penalty\\":null,\\"frequency-penalty\\":null},\\"messages\\":[{\\"role\\":\\"user\\",\\"content\\":\\"What can you tell me about the car ?\\"}],\\"model\\":\\"gpt-35-turbo\\"}"}"""
.formatted(streamToTopic)));
ConsumerRecord record = mainOutputRecords.get(0);
diff --git a/langstream-runtime/langstream-runtime-impl/src/test/java/ai/langstream/kafka/PlaceholderEndToEndTest.java b/langstream-runtime/langstream-runtime-impl/src/test/java/ai/langstream/kafka/PlaceholderEndToEndTest.java
index 9774ef899..234d94ae0 100644
--- a/langstream-runtime/langstream-runtime-impl/src/test/java/ai/langstream/kafka/PlaceholderEndToEndTest.java
+++ b/langstream-runtime/langstream-runtime-impl/src/test/java/ai/langstream/kafka/PlaceholderEndToEndTest.java
@@ -76,12 +76,12 @@ public void testUseSchemaWithKafkaAndVariableTopicNames() throws Exception {
module: "module-1"
id: "pipeline-1"
topics:
- - name: "{{{globals.input-topic}}}"
+ - name: "${globals.input-topic}"
creation-mode: create-if-not-exists
schema:
type: avro
schema: '%s'
- - name: "{{{globals.output-topic}}}"
+ - name: "${globals.output-topic}"
creation-mode: create-if-not-exists
schema:
type: avro
@@ -89,8 +89,8 @@ public void testUseSchemaWithKafkaAndVariableTopicNames() throws Exception {
- name: "identity"
id: "step1"
type: "identity"
- input: "{{{globals.input-topic}}}"
- output: "{{{globals.output-topic}}}"
+ input: "${globals.input-topic}"
+ output: "${globals.output-topic}"
"""
.formatted(schemaDefinition));
try (ApplicationRuntime applicationRuntime =