From 2e63fe03e164511045491bd2697f69270478fb32 Mon Sep 17 00:00:00 2001 From: olederle Date: Fri, 15 Mar 2024 15:06:37 +0100 Subject: [PATCH] refactor: extend documentation * mention azopenai as available option and extras * add recommended section * include settings-azopenai.yaml configuration file --- fern/docs/pages/installation/installation.mdx | 29 +++++++++++++++++-- settings-azopenai.yaml | 17 +++++++++++ settings.yaml | 1 - 3 files changed, 44 insertions(+), 3 deletions(-) create mode 100644 settings-azopenai.yaml diff --git a/fern/docs/pages/installation/installation.mdx b/fern/docs/pages/installation/installation.mdx index 67431ebdb..057043c72 100644 --- a/fern/docs/pages/installation/installation.mdx +++ b/fern/docs/pages/installation/installation.mdx @@ -30,8 +30,8 @@ pyenv local 3.11 PrivateGPT allows to customize the setup -from fully local to cloud based- by deciding the modules to use. Here are the different options available: -- LLM: "llama-cpp", "ollama", "sagemaker", "openai", "openailike" -- Embeddings: "huggingface", "openai", "sagemaker" +- LLM: "llama-cpp", "ollama", "sagemaker", "openai", "openailike", "azopenai" +- Embeddings: "huggingface", "openai", "sagemaker", "azopenai" - Vector stores: "qdrant", "chroma", "postgres" - UI: whether or not to enable UI (Gradio) or just go with the API @@ -49,10 +49,12 @@ Where `` can be any of the following: - llms-sagemaker: adds support for Amazon Sagemaker LLM, requires Sagemaker inference endpoints - llms-openai: adds support for OpenAI LLM, requires OpenAI API key - llms-openai-like: adds support for 3rd party LLM providers that are compatible with OpenAI's API +- llms-azopenai: adds support for Azure OpenAI LLM, requires Azure OpenAI inference endpoints - embeddings-ollama: adds support for Ollama Embeddings, requires Ollama running locally - embeddings-huggingface: adds support for local Embeddings using HuggingFace - embeddings-sagemaker: adds support for Amazon Sagemaker Embeddings, requires Sagemaker inference endpoints - embeddings-openai = adds support for OpenAI Embeddings, requires OpenAI API key +- embeddings-azopenai = adds support for Azure OpenAI Embeddings, requires Azure OpenAI inference endpoints - vector-stores-qdrant: adds support for Qdrant vector store - vector-stores-chroma: adds support for Chroma DB vector store - vector-stores-postgres: adds support for Postgres vector store @@ -160,6 +162,29 @@ PrivateGPT will use the already existing `settings-openai.yaml` settings file, w The UI will be available at http://localhost:8001 +### Non-Private, Azure OpenAI-powered test setup + +If you want to test PrivateGPT with Azure OpenAI's LLM and Embeddings -taking into account your data is going to Azure OpenAI!- you can run the following command: + +You need to have access to Azure OpenAI inference endpoints for the LLM and / or the embeddings, and have Azure OpenAI credentials properly configured. + +Edit the `settings-azopenai.yaml` file to include the correct Azure OpenAI endpoints. + +Then, install PrivateGPT with the following command: +```bash +poetry install --extras "ui llms-azopenai embeddings-azopenai vector-stores-qdrant" +``` + +Once installed, you can run PrivateGPT. + +```bash +PGPT_PROFILES=azopenai make run +``` + +PrivateGPT will use the already existing `settings-openai.yaml` settings file, which is already configured to use OpenAI LLM and Embeddings endpoints, and Qdrant. + +The UI will be available at http://localhost:8001 + ### Local, Llama-CPP powered setup If you want to run PrivateGPT fully locally without relying on Ollama, you can run the following command: diff --git a/settings-azopenai.yaml b/settings-azopenai.yaml new file mode 100644 index 000000000..7e4b47c1c --- /dev/null +++ b/settings-azopenai.yaml @@ -0,0 +1,17 @@ +server: + env_name: ${APP_ENV:azopenai} + +llm: + mode: azopenai + +embedding: + mode: azopenai + +azopenai: + api_key: ${AZ_OPENAI_API_KEY:} + azure_endpoint: ${AZ_OPENAI_ENDPOINT:} + embedding_deployment_name: ${AZ_OPENAI_EMBEDDING_DEPLOYMENT_NAME:} + llm_deployment_name: ${AZ_OPENAI_LLM_DEPLOYMENT_NAME:} + api_version: "2023-05-15" + embedding_model: text-embedding-ada-002 + llm_model: gpt-35-turbo \ No newline at end of file diff --git a/settings.yaml b/settings.yaml index dab3f5cb9..f78685cbc 100644 --- a/settings.yaml +++ b/settings.yaml @@ -82,7 +82,6 @@ ollama: embedding_model: nomic-embed-text api_base: http://localhost:11434 - azopenai: api_key: ${AZ_OPENAI_API_KEY:} azure_endpoint: ${AZ_OPENAI_ENDPOINT:}