diff --git a/ChatQnA/docker/gaudi/README.md b/ChatQnA/docker/gaudi/README.md index 2cd8fb562a..2dbc53b6c4 100644 --- a/ChatQnA/docker/gaudi/README.md +++ b/ChatQnA/docker/gaudi/README.md @@ -112,6 +112,8 @@ export RERANK_SERVICE_HOST_IP=${host_ip} export LLM_SERVICE_HOST_IP=${host_ip} export BACKEND_SERVICE_ENDPOINT="http://${host_ip}:8888/v1/chatqna" export DATAPREP_SERVICE_ENDPOINT="http://${host_ip}:6007/v1/dataprep" +export DATAPREP_GET_FILE_ENDPOINT="http://${host_ip}:6008/v1/dataprep/get_file" +export DATAPREP_DELETE_FILE_ENDPOINT="http://${host_ip}:6009/v1/dataprep/delete_file" ``` Note: Please replace with `host_ip` with you external IP address, do **NOT** use localhost. @@ -231,6 +233,32 @@ curl -X POST "http://${host_ip}:6007/v1/dataprep" \ This command updates a knowledge base by submitting a list of HTTP links for processing. +Also, you are able to get the file/link list that you uploaded: + +```bash +curl -X POST "http://${host_ip}:6008/v1/dataprep/get_file" \ + -H "Content-Type: application/json" +``` + +To delete the file/link you uploaded: + +```bash +# delete link +curl -X POST "http://${host_ip}:6009/v1/dataprep/delete_file" \ + -d '{"file_path": "https://opea.dev"}' \ + -H "Content-Type: application/json" + +# delete file +curl -X POST "http://${host_ip}:6009/v1/dataprep/delete_file" \ + -d '{"file_path": "nke-10k-2023.pdf"}' \ + -H "Content-Type: application/json" + +# delete all uploaded files and links +curl -X POST "http://${host_ip}:6009/v1/dataprep/delete_file" \ + -d '{"file_path": "all"}' \ + -H "Content-Type: application/json" +``` + ## Enable LangSmith for Monotoring Application (Optional) LangSmith offers tools to debug, evaluate, and monitor language models and intelligent agents. It can be used to assess benchmark data for each microservice. Before launching your services with `docker compose -f docker_compose.yaml up -d`, you need to enable LangSmith tracing by setting the `LANGCHAIN_TRACING_V2` environment variable to true and configuring your LangChain API key. diff --git a/ChatQnA/docker/gaudi/docker_compose.yaml b/ChatQnA/docker/gaudi/docker_compose.yaml index d297d4e9fb..9f8532ba36 100644 --- a/ChatQnA/docker/gaudi/docker_compose.yaml +++ b/ChatQnA/docker/gaudi/docker_compose.yaml @@ -18,6 +18,8 @@ services: - redis-vector-db ports: - "6007:6007" + - "6008:6008" + - "6009:6009" environment: no_proxy: ${no_proxy} http_proxy: ${http_proxy} @@ -192,6 +194,8 @@ services: - http_proxy=${http_proxy} - CHAT_BASE_URL=${BACKEND_SERVICE_ENDPOINT} - UPLOAD_FILE_BASE_URL=${DATAPREP_SERVICE_ENDPOINT} + - GET_FILE=${DATAPREP_GET_FILE_ENDPOINT} + - DELETE_FILE=${DATAPREP_DELETE_FILE_ENDPOINT} ipc: host restart: always diff --git a/ChatQnA/docker/gpu/README.md b/ChatQnA/docker/gpu/README.md index 18b3e95721..7a069ff390 100644 --- a/ChatQnA/docker/gpu/README.md +++ b/ChatQnA/docker/gpu/README.md @@ -100,6 +100,8 @@ export RERANK_SERVICE_HOST_IP=${host_ip} export LLM_SERVICE_HOST_IP=${host_ip} export BACKEND_SERVICE_ENDPOINT="http://${host_ip}:8888/v1/chatqna" export DATAPREP_SERVICE_ENDPOINT="http://${host_ip}:6007/v1/dataprep" +export DATAPREP_GET_FILE_ENDPOINT="http://${host_ip}:6008/v1/dataprep/get_file" +export DATAPREP_DELETE_FILE_ENDPOINT="http://${host_ip}:6009/v1/dataprep/delete_file" ``` Note: Please replace with `host_ip` with you external IP address, do **NOT** use localhost. @@ -219,6 +221,32 @@ curl -X POST "http://${host_ip}:6007/v1/dataprep" \ This command updates a knowledge base by submitting a list of HTTP links for processing. +Also, you are able to get the file list that you uploaded: + +```bash +curl -X POST "http://${host_ip}:6008/v1/dataprep/get_file" \ + -H "Content-Type: application/json" +``` + +To delete the file/link you uploaded: + +```bash +# delete link +curl -X POST "http://${host_ip}:6009/v1/dataprep/delete_file" \ + -d '{"file_path": "https://opea.dev"}' \ + -H "Content-Type: application/json" + +# delete file +curl -X POST "http://${host_ip}:6009/v1/dataprep/delete_file" \ + -d '{"file_path": "nke-10k-2023.pdf"}' \ + -H "Content-Type: application/json" + +# delete all uploaded files and links +curl -X POST "http://${host_ip}:6009/v1/dataprep/delete_file" \ + -d '{"file_path": "all"}' \ + -H "Content-Type: application/json" +``` + ## Enable LangSmith for Monotoring Application (Optional) LangSmith offers tools to debug, evaluate, and monitor language models and intelligent agents. It can be used to assess benchmark data for each microservice. Before launching your services with `docker compose -f docker_compose.yaml up -d`, you need to enable LangSmith tracing by setting the `LANGCHAIN_TRACING_V2` environment variable to true and configuring your LangChain API key. diff --git a/ChatQnA/docker/gpu/docker_compose.yaml b/ChatQnA/docker/gpu/docker_compose.yaml index e19fcdb86a..15c2f36797 100644 --- a/ChatQnA/docker/gpu/docker_compose.yaml +++ b/ChatQnA/docker/gpu/docker_compose.yaml @@ -18,6 +18,8 @@ services: - redis-vector-db ports: - "6007:6007" + - "6008:6008" + - "6009:6009" environment: no_proxy: ${no_proxy} http_proxy: ${http_proxy} @@ -204,6 +206,8 @@ services: - http_proxy=${http_proxy} - CHAT_BASE_URL=${BACKEND_SERVICE_ENDPOINT} - UPLOAD_FILE_BASE_URL=${DATAPREP_SERVICE_ENDPOINT} + - GET_FILE=${DATAPREP_GET_FILE_ENDPOINT} + - DELETE_FILE=${DATAPREP_DELETE_FILE_ENDPOINT} ipc: host restart: always diff --git a/ChatQnA/docker/xeon/README.md b/ChatQnA/docker/xeon/README.md index 16c543911c..6fdc95f21d 100644 --- a/ChatQnA/docker/xeon/README.md +++ b/ChatQnA/docker/xeon/README.md @@ -128,7 +128,8 @@ Build frontend Docker image that enables Conversational experience with ChatQnA cd GenAIExamples/ChatQnA/docker/ui/ export BACKEND_SERVICE_ENDPOINT="http://${host_ip}:8888/v1/chatqna" export DATAPREP_SERVICE_ENDPOINT="http://${host_ip}:6007/v1/dataprep" -docker build --no-cache -t opea/chatqna-conversation-ui:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy --build-arg BACKEND_SERVICE_ENDPOINT=$BACKEND_SERVICE_ENDPOINT --build-arg DATAPREP_SERVICE_ENDPOINT=$DATAPREP_SERVICE_ENDPOINT -f ./docker/Dockerfile.react . +export DATAPREP_GET_FILE_ENDPOINT="http://${host_ip}:6008/v1/dataprep/get_file" +docker build --no-cache -t opea/chatqna-conversation-ui:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy --build-arg BACKEND_SERVICE_ENDPOINT=$BACKEND_SERVICE_ENDPOINT --build-arg DATAPREP_SERVICE_ENDPOINT=$DATAPREP_SERVICE_ENDPOINT --build-arg DATAPREP_GET_FILE_ENDPOINT=$DATAPREP_GET_FILE_ENDPOINT -f ./docker/Dockerfile.react . cd ../../../.. ``` @@ -190,6 +191,8 @@ export RERANK_SERVICE_HOST_IP=${host_ip} export LLM_SERVICE_HOST_IP=${host_ip} export BACKEND_SERVICE_ENDPOINT="http://${host_ip}:8888/v1/chatqna" export DATAPREP_SERVICE_ENDPOINT="http://${host_ip}:6007/v1/dataprep" +export DATAPREP_GET_FILE_ENDPOINT="http://${host_ip}:6008/v1/dataprep/get_file" +export DATAPREP_DELETE_FILE_ENDPOINT="http://${host_ip}:6009/v1/dataprep/delete_file" ``` Note: Please replace with `host_ip` with you external IP address, do not use localhost. @@ -309,6 +312,32 @@ curl -X POST "http://${host_ip}:6007/v1/dataprep" \ This command updates a knowledge base by submitting a list of HTTP links for processing. +Also, you are able to get the file list that you uploaded: + +```bash +curl -X POST "http://${host_ip}:6008/v1/dataprep/get_file" \ + -H "Content-Type: application/json" +``` + +To delete the file/link you uploaded: + +```bash +# delete link +curl -X POST "http://${host_ip}:6009/v1/dataprep/delete_file" \ + -d '{"file_path": "https://opea.dev"}' \ + -H "Content-Type: application/json" + +# delete file +curl -X POST "http://${host_ip}:6009/v1/dataprep/delete_file" \ + -d '{"file_path": "nke-10k-2023.pdf"}' \ + -H "Content-Type: application/json" + +# delete all uploaded files and links +curl -X POST "http://${host_ip}:6009/v1/dataprep/delete_file" \ + -d '{"file_path": "all"}' \ + -H "Content-Type: application/json" +``` + ## Enable LangSmith for Monotoring Application (Optional) LangSmith offers tools to debug, evaluate, and monitor language models and intelligent agents. It can be used to assess benchmark data for each microservice. Before launching your services with `docker compose -f docker_compose.yaml up -d`, you need to enable LangSmith tracing by setting the `LANGCHAIN_TRACING_V2` environment variable to true and configuring your LangChain API key. diff --git a/ChatQnA/docker/xeon/docker_compose.yaml b/ChatQnA/docker/xeon/docker_compose.yaml index 55ce080ed9..10c7b5d652 100644 --- a/ChatQnA/docker/xeon/docker_compose.yaml +++ b/ChatQnA/docker/xeon/docker_compose.yaml @@ -18,6 +18,8 @@ services: - redis-vector-db ports: - "6007:6007" + - "6008:6008" + - "6009:6009" environment: no_proxy: ${no_proxy} http_proxy: ${http_proxy} @@ -183,6 +185,8 @@ services: - http_proxy=${http_proxy} - CHAT_BASE_URL=${BACKEND_SERVICE_ENDPOINT} - UPLOAD_FILE_BASE_URL=${DATAPREP_SERVICE_ENDPOINT} + - GET_FILE=${DATAPREP_GET_FILE_ENDPOINT} + - DELETE_FILE=${DATAPREP_DELETE_FILE_ENDPOINT} ipc: host restart: always chaqna-xeon-conversation-ui-server: