diff --git a/helm-charts/common/data-prep/templates/configmap.yaml b/helm-charts/common/data-prep/templates/configmap.yaml index 636227627..84b26d634 100644 --- a/helm-charts/common/data-prep/templates/configmap.yaml +++ b/helm-charts/common/data-prep/templates/configmap.yaml @@ -34,7 +34,4 @@ data: {{- else }} no_proxy: {{ .Values.global.no_proxy | quote }} {{- end }} - LANGCHAIN_TRACING_V2: {{ .Values.global.LANGCHAIN_TRACING_V2 | quote }} - LANGCHAIN_API_KEY: {{ .Values.global.LANGCHAIN_API_KEY | quote }} - LANGCHAIN_PROJECT: "opea-dataprep-service" LOGFLAG: {{ .Values.LOGFLAG | quote }} diff --git a/helm-charts/common/data-prep/values.yaml b/helm-charts/common/data-prep/values.yaml index 6a1fc0823..a18b68f2e 100644 --- a/helm-charts/common/data-prep/values.yaml +++ b/helm-charts/common/data-prep/values.yaml @@ -100,5 +100,3 @@ global: https_proxy: "" no_proxy: "" HUGGINGFACEHUB_API_TOKEN: "insert-your-huggingface-token-here" - LANGCHAIN_TRACING_V2: false - LANGCHAIN_API_KEY: "insert-your-langchain-key-here" diff --git a/helm-charts/common/embedding-usvc/templates/configmap.yaml b/helm-charts/common/embedding-usvc/templates/configmap.yaml index 28da012e3..5ec5904ad 100644 --- a/helm-charts/common/embedding-usvc/templates/configmap.yaml +++ b/helm-charts/common/embedding-usvc/templates/configmap.yaml @@ -20,7 +20,4 @@ data: {{- else }} no_proxy: {{ .Values.global.no_proxy | quote }} {{- end }} - LANGCHAIN_TRACING_V2: {{ .Values.global.LANGCHAIN_TRACING_V2 | quote }} - LANGCHAIN_API_KEY: {{ .Values.global.LANGCHAIN_API_KEY }} - LANGCHAIN_PROJECT: "opea-embedding-service" LOGFLAG: {{ .Values.LOGFLAG | quote }} diff --git a/helm-charts/common/embedding-usvc/values.yaml b/helm-charts/common/embedding-usvc/values.yaml index b57f08e00..dfab98260 100644 --- a/helm-charts/common/embedding-usvc/values.yaml +++ b/helm-charts/common/embedding-usvc/values.yaml @@ -89,9 +89,3 @@ global: http_proxy: "" https_proxy: "" no_proxy: "" - # HUGGINGFACEHUB_API_TOKEN: "insert-your-huggingface-token-here" - LANGCHAIN_TRACING_V2: false - LANGCHAIN_API_KEY: "insert-your-langchain-key-here" - # set modelUseHostPath to host directory if you want to use hostPath volume for model storage - # comment out modeluseHostPath if you want to download the model from huggingface - modelUseHostPath: /mnt/opea-models diff --git a/helm-charts/common/llm-uservice/README.md b/helm-charts/common/llm-uservice/README.md index 191b1efd8..5ace49d1b 100644 --- a/helm-charts/common/llm-uservice/README.md +++ b/helm-charts/common/llm-uservice/README.md @@ -46,10 +46,9 @@ curl http://localhost:9000/v1/chat/completions \ ## Values -| Key | Type | Default | Description | -| ------------------------------- | ------ | -------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------ | -| global.HUGGINGFACEHUB_API_TOKEN | string | `""` | Your own Hugging Face API token | -| global.modelUseHostPath | string | `"/mnt/opea-models"` | Cached models directory, tgi will not download if the model is cached here. The host path "modelUseHostPath" will be mounted to container as /data directory | -| image.repository | string | `"opea/llm-tgi"` | | -| service.port | string | `"9000"` | | -| TGI_LLM_ENDPOINT | string | `""` | LLM endpoint | +| Key | Type | Default | Description | +| ------------------------------- | ------ | ---------------- | ------------------------------- | +| global.HUGGINGFACEHUB_API_TOKEN | string | `""` | Your own Hugging Face API token | +| image.repository | string | `"opea/llm-tgi"` | | +| service.port | string | `"9000"` | | +| TGI_LLM_ENDPOINT | string | `""` | LLM endpoint | diff --git a/helm-charts/common/llm-uservice/templates/configmap.yaml b/helm-charts/common/llm-uservice/templates/configmap.yaml index 8e734b652..bb1c39434 100644 --- a/helm-charts/common/llm-uservice/templates/configmap.yaml +++ b/helm-charts/common/llm-uservice/templates/configmap.yaml @@ -25,7 +25,4 @@ data: {{- else }} no_proxy: {{ .Values.global.no_proxy | quote }} {{- end }} - LANGCHAIN_TRACING_V2: {{ .Values.global.LANGCHAIN_TRACING_V2 | quote }} - LANGCHAIN_API_KEY: {{ .Values.global.LANGCHAIN_API_KEY }} - LANGCHAIN_PROJECT: "opea-llm-uservice" LOGFLAG: {{ .Values.LOGFLAG | quote }} diff --git a/helm-charts/common/llm-uservice/values.yaml b/helm-charts/common/llm-uservice/values.yaml index 0b7b2642b..39b10894d 100644 --- a/helm-charts/common/llm-uservice/values.yaml +++ b/helm-charts/common/llm-uservice/values.yaml @@ -90,8 +90,3 @@ global: https_proxy: "" no_proxy: "" HUGGINGFACEHUB_API_TOKEN: "insert-your-huggingface-token-here" - LANGCHAIN_TRACING_V2: false - LANGCHAIN_API_KEY: "insert-your-langchain-key-here" - # set modelUseHostPath to host directory if you want to use hostPath volume for model storage - # comment out modeluseHostPath if you want to download the model from huggingface - modelUseHostPath: /mnt/opea-models diff --git a/helm-charts/common/reranking-usvc/templates/configmap.yaml b/helm-charts/common/reranking-usvc/templates/configmap.yaml index d63c53ee2..69b1fcd91 100644 --- a/helm-charts/common/reranking-usvc/templates/configmap.yaml +++ b/helm-charts/common/reranking-usvc/templates/configmap.yaml @@ -20,7 +20,4 @@ data: {{- else }} no_proxy: {{ .Values.global.no_proxy | quote }} {{- end }} - LANGCHAIN_TRACING_V2: {{ .Values.global.LANGCHAIN_TRACING_V2 | quote }} - LANGCHAIN_API_KEY: {{ .Values.global.LANGCHAIN_API_KEY | quote }} - LANGCHAIN_PROJECT: "opea-reranking-service" LOGFLAG: {{ .Values.LOGFLAG | quote }} diff --git a/helm-charts/common/reranking-usvc/values.yaml b/helm-charts/common/reranking-usvc/values.yaml index 3bac2cccf..56454d11b 100644 --- a/helm-charts/common/reranking-usvc/values.yaml +++ b/helm-charts/common/reranking-usvc/values.yaml @@ -89,9 +89,3 @@ global: http_proxy: "" https_proxy: "" no_proxy: "" - # HUGGINGFACEHUB_API_TOKEN: "insert-your-huggingface-token-here" - LANGCHAIN_TRACING_V2: false - LANGCHAIN_API_KEY: "insert-your-langchain-key-here" - # set modelUseHostPath to host directory if you want to use hostPath volume for model storage - # comment out modeluseHostPath if you want to download the model from huggingface - modelUseHostPath: /mnt/opea-models diff --git a/helm-charts/common/retriever-usvc/templates/configmap.yaml b/helm-charts/common/retriever-usvc/templates/configmap.yaml index b2be3834f..39a4271a8 100644 --- a/helm-charts/common/retriever-usvc/templates/configmap.yaml +++ b/helm-charts/common/retriever-usvc/templates/configmap.yaml @@ -28,9 +28,6 @@ data: {{- else }} no_proxy: {{ .Values.global.no_proxy | quote }} {{- end }} - LANGCHAIN_TRACING_V2: {{ .Values.global.LANGCHAIN_TRACING_V2 | quote }} - LANGCHAIN_API_KEY: {{ .Values.global.LANGCHAIN_API_KEY | quote }} - LANGCHAIN_PROJECT: "opea-retriever-service" HF_HOME: "/tmp/.cache/huggingface" HUGGINGFACEHUB_API_TOKEN: {{ .Values.global.HUGGINGFACEHUB_API_TOKEN | quote}} LOGFLAG: {{ .Values.LOGFLAG | quote }} diff --git a/helm-charts/common/retriever-usvc/values.yaml b/helm-charts/common/retriever-usvc/values.yaml index fe8f36e08..19157edd2 100644 --- a/helm-charts/common/retriever-usvc/values.yaml +++ b/helm-charts/common/retriever-usvc/values.yaml @@ -94,6 +94,4 @@ global: http_proxy: "" https_proxy: "" no_proxy: "" - LANGCHAIN_TRACING_V2: false - LANGCHAIN_API_KEY: "insert-your-langchain-key-here" HUGGINGFACEHUB_API_TOKEN: "insert-your-huggingface-token-here" diff --git a/helm-charts/update_genaiexamples.sh b/helm-charts/update_genaiexamples.sh index 331819416..5c1c546c4 100755 --- a/helm-charts/update_genaiexamples.sh +++ b/helm-charts/update_genaiexamples.sh @@ -33,8 +33,7 @@ function generate_yaml { extra=$5 local extraparams="" - extraparams="--set global.modelUseHostPath=${MODELPATH}" - [[ "x$extra" != "x" ]] && extraparams="${extraparams},${extra}" + [[ "x$extra" != "x" ]] && extraparams="--set ${extra}" helm dependency update $chart helm template $chart $chart --skip-tests $extraparams -f $chart/${valuefile} > $outputdir/$outputfile diff --git a/microservices-connector/config/manifests/data-prep.yaml b/microservices-connector/config/manifests/data-prep.yaml index a958941cc..c681e93a9 100644 --- a/microservices-connector/config/manifests/data-prep.yaml +++ b/microservices-connector/config/manifests/data-prep.yaml @@ -25,9 +25,6 @@ data: http_proxy: "" https_proxy: "" no_proxy: "" - LANGCHAIN_TRACING_V2: "false" - LANGCHAIN_API_KEY: "insert-your-langchain-key-here" - LANGCHAIN_PROJECT: "opea-dataprep-service" LOGFLAG: "" --- # Source: data-prep/templates/service.yaml diff --git a/microservices-connector/config/manifests/docsum-llm-uservice.yaml b/microservices-connector/config/manifests/docsum-llm-uservice.yaml index 2f0adc1f8..b0a5a92d3 100644 --- a/microservices-connector/config/manifests/docsum-llm-uservice.yaml +++ b/microservices-connector/config/manifests/docsum-llm-uservice.yaml @@ -20,9 +20,6 @@ data: http_proxy: "" https_proxy: "" no_proxy: "" - LANGCHAIN_TRACING_V2: "false" - LANGCHAIN_API_KEY: insert-your-langchain-key-here - LANGCHAIN_PROJECT: "opea-llm-uservice" LOGFLAG: "" --- # Source: llm-uservice/templates/service.yaml diff --git a/microservices-connector/config/manifests/embedding-usvc.yaml b/microservices-connector/config/manifests/embedding-usvc.yaml index 45e9b2e85..c5eefa0c6 100644 --- a/microservices-connector/config/manifests/embedding-usvc.yaml +++ b/microservices-connector/config/manifests/embedding-usvc.yaml @@ -18,9 +18,6 @@ data: http_proxy: "" https_proxy: "" no_proxy: "" - LANGCHAIN_TRACING_V2: "false" - LANGCHAIN_API_KEY: insert-your-langchain-key-here - LANGCHAIN_PROJECT: "opea-embedding-service" LOGFLAG: "" --- # Source: embedding-usvc/templates/service.yaml diff --git a/microservices-connector/config/manifests/llm-uservice.yaml b/microservices-connector/config/manifests/llm-uservice.yaml index a65b03dd1..43f0c64a7 100644 --- a/microservices-connector/config/manifests/llm-uservice.yaml +++ b/microservices-connector/config/manifests/llm-uservice.yaml @@ -20,9 +20,6 @@ data: http_proxy: "" https_proxy: "" no_proxy: "" - LANGCHAIN_TRACING_V2: "false" - LANGCHAIN_API_KEY: insert-your-langchain-key-here - LANGCHAIN_PROJECT: "opea-llm-uservice" LOGFLAG: "" --- # Source: llm-uservice/templates/service.yaml diff --git a/microservices-connector/config/manifests/reranking-usvc.yaml b/microservices-connector/config/manifests/reranking-usvc.yaml index 954a8c19e..ba1b97bed 100644 --- a/microservices-connector/config/manifests/reranking-usvc.yaml +++ b/microservices-connector/config/manifests/reranking-usvc.yaml @@ -18,9 +18,6 @@ data: http_proxy: "" https_proxy: "" no_proxy: "" - LANGCHAIN_TRACING_V2: "false" - LANGCHAIN_API_KEY: "insert-your-langchain-key-here" - LANGCHAIN_PROJECT: "opea-reranking-service" LOGFLAG: "" --- # Source: reranking-usvc/templates/service.yaml diff --git a/microservices-connector/config/manifests/retriever-usvc.yaml b/microservices-connector/config/manifests/retriever-usvc.yaml index 79824a967..2c5456857 100644 --- a/microservices-connector/config/manifests/retriever-usvc.yaml +++ b/microservices-connector/config/manifests/retriever-usvc.yaml @@ -22,9 +22,6 @@ data: http_proxy: "" https_proxy: "" no_proxy: "" - LANGCHAIN_TRACING_V2: "false" - LANGCHAIN_API_KEY: "insert-your-langchain-key-here" - LANGCHAIN_PROJECT: "opea-retriever-service" HF_HOME: "/tmp/.cache/huggingface" HUGGINGFACEHUB_API_TOKEN: "insert-your-huggingface-token-here" LOGFLAG: ""