Skip to content
This repository has been archived by the owner on Aug 25, 2024. It is now read-only.

Commit

Permalink
[breaking change] Do not use Mustache but SpringBoot like references …
Browse files Browse the repository at this point in the history
…to refer to secrets and globals (LangStream#507)
  • Loading branch information
eolivelli authored Oct 2, 2023
1 parent 1f72e49 commit fc95ee1
Show file tree
Hide file tree
Showing 58 changed files with 430 additions and 294 deletions.
10 changes: 5 additions & 5 deletions examples/applications/astradb-sink/configuration.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -21,8 +21,8 @@ configuration:
name: "AstraDatasource"
configuration:
service: "astra"
clientId: "{{{ secrets.astra.clientId }}}"
secret: "{{{ secrets.astra.secret }}}"
token: "{{{ secrets.astra.token }}}"
database: "{{{ secrets.astra.database }}}"
environment: "{{{ secrets.astra.environment }}}"
clientId: "${ secrets.astra.clientId }"
secret: "${ secrets.astra.secret }"
token: "${ secrets.astra.token }"
database: "${ secrets.astra.database }"
environment: "${ secrets.astra.environment }"
Original file line number Diff line number Diff line change
Expand Up @@ -20,4 +20,4 @@ configuration:
- type: "hugging-face-configuration"
name: "Hugging Face AI configuration"
configuration:
access-key: "{{ secrets.hugging-face.access-key }}"
access-key: "${ secrets.hugging-face.access-key }"
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,6 @@ configuration:
- type: "open-ai-configuration"
name: "OpenAI Azure configuration"
configuration:
url: "{{ secrets.open-ai.url }}"
access-key: "{{ secrets.open-ai.access-key }}"
provider: "{{ secrets.open-ai.provider }}"
url: "${secrets.open-ai.url}"
access-key: "${secrets.open-ai.access-key}"
provider: "${secrets.open-ai.provider}"
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ pipeline:
input: "input-topic"
output: "output-topic"
configuration:
model: "{{{secrets.open-ai.embeddings-model}}}" # This needs to match the name of the model deployment, not the base model
model: "${secrets.open-ai.embeddings-model}" # This needs to match the name of the model deployment, not the base model
embeddings-field: "value.embeddings"
text: "{{% value.name }} {{% value.description }}"
batch-size: 10
Expand Down
11 changes: 5 additions & 6 deletions examples/applications/compute-vertexai/configuration.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -20,9 +20,8 @@ configuration:
- type: "vertex-configuration"
name: "Google Vertex AI configuration"
configuration:
url: "{{ secrets.vertex-ai.url }}"
# use triple quotes in order to turn off escaping
serviceAccountJson: "{{{ secrets.vertex-ai.serviceAccountJson }}}"
token: "{{ secrets.vertex-ai.token }}"
region: "{{ secrets.vertex-ai.region }}"
project: "{{ secrets.vertex-ai.project }}"
url: "${secrets.vertex-ai.url}"
serviceAccountJson: "${secrets.vertex-ai.serviceAccountJson}"
token: "${secrets.vertex-ai.token}"
region: "${secrets.vertex-ai.region}"
project: "${secrets.vertex-ai.project}"
6 changes: 3 additions & 3 deletions examples/applications/docker-chatbot/chatbot.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ pipeline:
- name: "compute-embeddings"
type: "compute-ai-embeddings"
configuration:
model: "{{{secrets.open-ai.embeddings-model}}}" # This needs to match the name of the model deployment, not the base model
model: "${secrets.open-ai.embeddings-model}" # This needs to match the name of the model deployment, not the base model
embeddings-field: "value.question_embeddings"
text: "{{% value.question }}"
flush-interval: 0
Expand Down Expand Up @@ -62,7 +62,7 @@ pipeline:
type: "ai-chat-completions"

configuration:
model: "{{{secrets.open-ai.chat-completions-model}}}" # This needs to be set to the model deployment name, not the base name
model: "${secrets.open-ai.chat-completions-model}" # This needs to be set to the model deployment name, not the base name
# on the log-topic we add a field with the answer
completion-field: "value.answer"
# we are also logging the prompt we sent to the LLM
Expand All @@ -88,7 +88,7 @@ pipeline:
Do not provide information that is not related to the LangStream project.
Documents:
{{%# value.related_documents}}
{{ value.related_documents}}
{{% text}}
{{%/ value.related_documents}}
- role: user
Expand Down
12 changes: 6 additions & 6 deletions examples/applications/docker-chatbot/configuration.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -22,15 +22,15 @@ configuration:
configuration:
service: "jdbc"
driverClass: "herddb.jdbc.Driver"
url: "{{secrets.herddb.url}}"
user: "{{secrets.herddb.user}}"
password: "{{secrets.herddb.password}}"
url: "${secrets.herddb.url}"
user: "${secrets.herddb.user}"
password: "${secrets.herddb.password}"
- type: "open-ai-configuration"
name: "OpenAI Azure configuration"
configuration:
url: "{{ secrets.open-ai.url }}"
access-key: "{{ secrets.open-ai.access-key }}"
provider: "{{ secrets.open-ai.provider }}"
url: "${secrets.open-ai.url}"
access-key: "${secrets.open-ai.access-key}"
provider: "${secrets.open-ai.provider}"
dependencies:
- name: "HerdDB.org JDBC Driver"
url: "https://repo1.maven.org/maven2/org/herddb/herddb-jdbc/0.28.0/herddb-jdbc-0.28.0-thin.jar"
Expand Down
10 changes: 5 additions & 5 deletions examples/applications/docker-chatbot/crawler.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -55,11 +55,11 @@ pipeline:
http-timeout: 10000
handle-cookies: true
max-unflushed-pages: 100
bucketName: "{{{secrets.s3.bucket-name}}}"
endpoint: "{{{secrets.s3.endpoint}}}"
access-key: "{{{secrets.s3.access-key}}}"
secret-key: "{{{secrets.s3.secret}}}"
region: "{{{secrets.s3.region}}}"
bucketName: "${secrets.s3.bucket-name}"
endpoint: "${secrets.s3.endpoint}"
access-key: "${secrets.s3.access-key}"
secret-key: "${secrets.s3.secret}"
region: "${secrets.s3.region}"
- name: "Extract text"
type: "text-extractor"
- name: "Normalise text"
Expand Down
8 changes: 4 additions & 4 deletions examples/applications/gateway-authentication/gateways.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,7 @@ gateways:
provider: google
allow-test-mode: true
configuration:
clientId: "{{ secrets.google.client-id }}"
clientId: "${secrets.google.client-id}"
produce-options:
headers:
- key: langstream-client-user-id
Expand All @@ -69,7 +69,7 @@ gateways:
allow-test-mode: true
provider: google
configuration:
clientId: "{{ secrets.google.client-id }}"
clientId: "${secrets.google.client-id}"
consume-options:
filters:
headers:
Expand All @@ -86,7 +86,7 @@ gateways:
authentication:
provider: github
configuration:
clientId: "{{ secrets.github.client-id }}"
clientId: "${secrets.github.client-id}"
produce-options:
headers:
- key: langstream-client-user-id
Expand All @@ -102,7 +102,7 @@ gateways:
authentication:
provider: github
configuration:
clientId: "{{ secrets.github.client-id }}"
clientId: "${secrets.github.client-id}"
consume-options:
filters:
headers:
Expand Down
6 changes: 3 additions & 3 deletions examples/applications/kafka-connect/pipeline.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -28,8 +28,8 @@ pipeline:
connector.class: com.datastax.oss.kafka.sink.CassandraSinkConnector
key.converter: org.apache.kafka.connect.storage.StringConverter
value.converter: org.apache.kafka.connect.storage.StringConverter
cloud.secureConnectBundle: "{{{ secrets.cassandra.secure-connect-bundle }}}"
auth.username: "{{{ secrets.cassandra.username }}}"
auth.password: "{{{ secrets.cassandra.password }}}"
cloud.secureConnectBundle: "${ secrets.cassandra.secure-connect-bundle }"
auth.username: "${ secrets.cassandra.username }"
auth.password: "${ secrets.cassandra.password }"
topic.input-topic.vsearch.products.mapping: "id=value.id,description=value.description,name=value.name"
name: cassandra-sink
8 changes: 4 additions & 4 deletions examples/applications/langchain-source/pipeline.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ pipeline:
output: "output-topic"
configuration:
className: s3_langchain.S3LangChain
bucketName: "{{{ secrets.s3.bucket-name }}}"
endpoint: "{{{ secrets.s3.endpoint }}}"
username: "{{{ secrets.s3.access-key }}}"
password: "{{{ secrets.s3.secret }}}"
bucketName: "${ secrets.s3.bucket-name }"
endpoint: "${ secrets.s3.endpoint }"
username: "${ secrets.s3.access-key }"
password: "${ secrets.s3.secret }"
8 changes: 4 additions & 4 deletions examples/applications/llamaindex-cassandra-sink/pipeline.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -24,10 +24,10 @@ pipeline:
input: "input-topic"
configuration:
className: llamaindex_cassandra.LlamaIndexCassandraSink
openaiKey: "{{ secrets.open-ai.access-key }}"
openaiKey: "${secrets.open-ai.access-key}"
cassandra:
username: "{{{ secrets.astra.clientId }}}"
password: "{{{ secrets.astra.secret }}}"
secureBundle: "{{{ secrets.astra.secureBundle }}}"
username: "${secrets.astra.clientId}"
password: "${secrets.astra.secret}"
secureBundle: "${secrets.astra.secureBundle}"
keyspace: ks1
table: vs_ll_openai
6 changes: 3 additions & 3 deletions examples/applications/openai-completions/configuration.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,6 @@ configuration:
- type: "open-ai-configuration"
name: "OpenAI Azure configuration"
configuration:
url: "{{ secrets.open-ai.url }}"
access-key: "{{ secrets.open-ai.access-key }}"
provider: "{{ secrets.open-ai.provider }}"
url: "${secrets.open-ai.url}"
access-key: "${secrets.open-ai.access-key}"
provider: "${secrets.open-ai.provider}"
4 changes: 2 additions & 2 deletions examples/applications/openai-completions/gateways.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@ gateways:
authentication:
provider: google
configuration:
clientId: "{{ secrets.google.client-id }}"
clientId: "${secrets.google.client-id}"
produce-options:
headers:
- key: langstream-client-user-id
Expand All @@ -72,7 +72,7 @@ gateways:
authentication:
provider: google
configuration:
clientId: "{{ secrets.google.client-id }}"
clientId: "${secrets.google.client-id}"
consume-options:
filters:
headers:
Expand Down
2 changes: 1 addition & 1 deletion examples/applications/openai-completions/pipeline.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ pipeline:
type: "ai-chat-completions"
output: "history-topic"
configuration:
model: "{{{secrets.open-ai.chat-completions-model}}}" # This needs to be set to the model deployment name, not the base name
model: "${secrets.open-ai.chat-completions-model}" # This needs to be set to the model deployment name, not the base name
# on the log-topic we add a field with the answer
completion-field: "value.answer"
# we are also logging the prompt we sent to the LLM
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,4 +20,4 @@ configuration:
- type: "open-ai-configuration"
name: "OpenAI configuration"
configuration:
access-key: "{{ secrets.open-ai.access-key }}"
access-key: "${secrets.open-ai.access-key}"
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ pipeline:
type: "ai-text-completions"
output: "debug"
configuration:
model: "{{{secrets.open-ai.text-completions-model}}}"
model: "${secrets.open-ai.text-completions-model}"
# on the log-topic we add a field with the answer
completion-field: "value.answer"
# we are also logging the prompt we sent to the LLM
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,4 +27,4 @@ pipeline:
output: "output-topic"
configuration:
className: embeddings.Embedding
openaiKey: "{{ secrets.open-ai.access-key }}"
openaiKey: "${secrets.open-ai.access-key}"
10 changes: 5 additions & 5 deletions examples/applications/query-astradb/configuration.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -21,9 +21,9 @@ configuration:
name: "AstraDatasource"
configuration:
service: "astra"
clientId: "{{{ secrets.astra.clientId }}}"
secret: "{{{ secrets.astra.secret }}}"
secureBundle: "{{{ secrets.astra.secureBundle }}}"
database: "{{{ secrets.astra.database }}}"
token: "{{{ secrets.astra.token }}}"
clientId: "${ secrets.astra.clientId }"
secret: "${ secrets.astra.secret }"
secureBundle: "${ secrets.astra.secureBundle }"
database: "${ secrets.astra.database }"
token: "${ secrets.astra.token }"

6 changes: 3 additions & 3 deletions examples/applications/query-milvus/chatbot.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ pipeline:
- name: "compute-embeddings"
type: "compute-ai-embeddings"
configuration:
model: "{{{secrets.open-ai.embeddings-model}}}" # This needs to match the name of the model deployment, not the base model
model: "${secrets.open-ai.embeddings-model}" # This needs to match the name of the model deployment, not the base model
embeddings-field: "value.question_embeddings"
text: "{{% value.question }}"
flush-interval: 0
Expand All @@ -54,7 +54,7 @@ pipeline:
type: "ai-chat-completions"

configuration:
model: "{{{secrets.open-ai.chat-completions-model}}}" # This needs to be set to the model deployment name, not the base name
model: "${secrets.open-ai.chat-completions-model}" # This needs to be set to the model deployment name, not the base name
# on the log-topic we add a field with the answer
completion-field: "value.answer"
# we are also logging the prompt we sent to the LLM
Expand All @@ -80,7 +80,7 @@ pipeline:
Do not provide information that is not related to the LangStream project.
Documents:
{{%# value.related_documents}}
{{ value.related_documents}}
{{% text}}
{{%/ value.related_documents}}
- role: user
Expand Down
20 changes: 10 additions & 10 deletions examples/applications/query-milvus/configuration.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -20,21 +20,21 @@ configuration:
- type: "open-ai-configuration"
name: "OpenAI Azure configuration"
configuration:
url: "{{ secrets.open-ai.url }}"
access-key: "{{ secrets.open-ai.access-key }}"
provider: "{{ secrets.open-ai.provider }}"
url: "${secrets.open-ai.url}"
access-key: "${secrets.open-ai.access-key}"
provider: "${secrets.open-ai.provider}"
- type: "datasource"
name: "MilvusDatasource"
configuration:
service: "milvus"
## OSS Milvus
username: "{{{ secrets.milvus.username }}}"
password: "{{{ secrets.milvus.password }}}"
host: "{{{ secrets.milvus.host }}}"
port: "{{{ secrets.milvus.port }}}"
username: "${secrets.milvus.username}"
password: "${secrets.milvus.password}"
host: "${secrets.milvus.host}"
port: "${secrets.milvus.port}"
## Set to "upsert" for OSS Milvus, on Zills use "delete-insert"
write-mode: "{{{ secrets.milvus.write-mode }}}"
write-mode: "${secrets.milvus.write-mode}"
## Zillis
url: "{{{ secrets.milvus.url }}}"
token: "{{{ secrets.milvus.token }}}"
url: "${secrets.milvus.url}"
token: "${secrets.milvus.token}"

10 changes: 5 additions & 5 deletions examples/applications/query-milvus/crawler.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -91,11 +91,11 @@ pipeline:
http-timeout: 10000
handle-cookies: true
max-unflushed-pages: 100
bucketName: "{{{secrets.s3.bucket-name}}}"
endpoint: "{{{secrets.s3.endpoint}}}"
access-key: "{{{secrets.s3.access-key}}}"
secret-key: "{{{secrets.s3.secret}}}"
region: "{{{secrets.s3.region}}}"
bucketName: "${secrets.s3.bucket-name}"
endpoint: "${secrets.s3.endpoint}"
access-key: "${secrets.s3.access-key}"
secret-key: "${secrets.s3.secret}"
region: "${secrets.s3.region}"
- name: "Extract text"
type: "text-extractor"
- name: "Normalise text"
Expand Down
14 changes: 7 additions & 7 deletions examples/applications/query-pinecone/configuration.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -20,15 +20,15 @@ configuration:
- type: "open-ai-configuration"
name: "OpenAI Azure configuration"
configuration:
url: "{{ secrets.open-ai.url }}"
access-key: "{{ secrets.open-ai.access-key }}"
provider: "{{ secrets.open-ai.provider }}"
url: "${secrets.open-ai.url}"
access-key: "${secrets.open-ai.access-key}"
provider: "${secrets.open-ai.provider}"
- type: "vector-database"
name: "PineconeDatasource"
configuration:
service: "pinecone"
api-key: "{{{secrets.pinecone.api-key}}}"
environment: "{{{secrets.pinecone.environment}}}"
index-name: "{{{secrets.pinecone.index-name}}}"
project-name: "{{{secrets.pinecone.project-name}}}"
api-key: "${secrets.pinecone.api-key}"
environment: "${secrets.pinecone.environment}"
index-name: "${secrets.pinecone.index-name}"
project-name: "${secrets.pinecone.project-name}"
server-side-timeout-sec: 10
2 changes: 1 addition & 1 deletion examples/applications/query-pinecone/query.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ pipeline:
type: "compute-ai-embeddings"
input: "input-topic"
configuration:
model: "{{{secrets.open-ai.embeddings-model}}}" # This needs to match the name of the model deployment, not the base model
model: "${secrets.open-ai.embeddings-model}" # This needs to match the name of the model deployment, not the base model
embeddings-field: "value.embeddings"
text: "{{% value.question }}"
flush-interval: 0
Expand Down
2 changes: 1 addition & 1 deletion examples/applications/query-pinecone/write.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ pipeline:
type: "compute-ai-embeddings"
input: "vectors-topic"
configuration:
model: "{{{secrets.open-ai.embeddings-model}}}" # This needs to match the name of the model deployment, not the base model
model: "${secrets.open-ai.embeddings-model}" # This needs to match the name of the model deployment, not the base model
embeddings-field: "value.embeddings"
text: "{{% value.document }}"
batch-size: 10
Expand Down
Loading

0 comments on commit fc95ee1

Please sign in to comment.