From f3ceae881757e64415281cdcaf54b91bc314f935 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Fynn=20Fl=C3=BCgge?= Date: Fri, 29 Mar 2024 10:25:24 +0100 Subject: [PATCH] feat: add Anthropic Claude 3 Opus chat model (#40) --- README.md | 8 +++++++- codeqai/app.py | 4 ++++ codeqai/config.py | 15 +++++++++++++++ codeqai/constants.py | 1 + codeqai/embeddings.py | 2 +- codeqai/llm.py | 6 +++++- pyproject.toml | 2 +- 7 files changed, 34 insertions(+), 4 deletions(-) diff --git a/README.md b/README.md index 959997b..96c5bc7 100644 --- a/README.md +++ b/README.md @@ -24,7 +24,7 @@ Built with [langchain](https://github.com/langchain-ai/langchain), [treesitter]( - ⚙️  Synchronize vector store and latest code changes with ease - 💻  100% local embeddings and llms - sentence-transformers, instructor-embeddings, llama.cpp, Ollama -- 🌐  OpenAI and Azure OpenAI support +- 🌐  OpenAI, Azure OpenAI and Anthropic - 🌳  Treesitter integration > [!NOTE] @@ -143,6 +143,12 @@ export OPENAI_API_KEY = "your Azure OpenAI api key" export OPENAI_API_VERSION = "2023-05-15" ``` +### Anthropic + +```bash +export ANTHROPIC_API_KEY="your Anthropic api key" +``` + > [!NOTE] > To change the environment variables later, update the `~/.config/codeqai/.env` manually. diff --git a/codeqai/app.py b/codeqai/app.py index c7e133d..ad577c1 100644 --- a/codeqai/app.py +++ b/codeqai/app.py @@ -110,6 +110,10 @@ def run(): "OPENAI_API_VERSION", ] ) + + if config["llm-host"] == LlmHost.ANTHROPIC.value: + required_keys.append("ANTHROPIC_API_KEY") + env_path = get_config_path().replace("config.yaml", ".env") env_loader(env_path, required_keys) diff --git a/codeqai/config.py b/codeqai/config.py index 50c6ec4..d52c6f0 100644 --- a/codeqai/config.py +++ b/codeqai/config.py @@ -104,6 +104,7 @@ def create_config(): choices=[ LlmHost.OPENAI.value, LlmHost.AZURE_OPENAI.value, + LlmHost.ANTHROPIC.value, ], default=LlmHost.OPENAI.value, ), @@ -180,6 +181,20 @@ def create_config(): ), ] + elif config["llm-host"] == "Anthropic": + questions = [ + inquirer.List( + "chat-model", + message="Which Anthropic chat model do you want to use?", + choices=[ + "claude-3-opus-20240229", + "claude-3-sonnet-20240229", + "claude-3-haiku-20240307", + ], + default="claude-3-opus-20240229", + ), + ] + # Check if "chat-model" is already present in the case of Azure_OpenAI if "chat-model" not in config: answersChatmodel = inquirer.prompt(questions) diff --git a/codeqai/constants.py b/codeqai/constants.py index 9ab7e23..f233cdb 100644 --- a/codeqai/constants.py +++ b/codeqai/constants.py @@ -33,3 +33,4 @@ class LlmHost(Enum): OLLAMA = "Ollama" OPENAI = "OpenAI" AZURE_OPENAI = "Azure-OpenAI" + ANTHROPIC = "Anhtropic" diff --git a/codeqai/embeddings.py b/codeqai/embeddings.py index 4846d3b..23434d9 100644 --- a/codeqai/embeddings.py +++ b/codeqai/embeddings.py @@ -1,6 +1,6 @@ import inquirer from langchain_community.embeddings import HuggingFaceEmbeddings -from langchain_openai import OpenAIEmbeddings, AzureOpenAIEmbeddings +from langchain_openai import AzureOpenAIEmbeddings, OpenAIEmbeddings from codeqai import utils from codeqai.constants import EmbeddingsModel diff --git a/codeqai/llm.py b/codeqai/llm.py index dd1e2e5..75b5d44 100644 --- a/codeqai/llm.py +++ b/codeqai/llm.py @@ -5,7 +5,7 @@ import inquirer from langchain.callbacks.manager import CallbackManager from langchain.callbacks.streaming_stdout import StreamingStdOutCallbackHandler -from langchain_community.chat_models import AzureChatOpenAI +from langchain_community.chat_models import AzureChatOpenAI, ChatAnthropic from langchain_community.llms import LlamaCpp, Ollama from langchain_openai import ChatOpenAI @@ -33,6 +33,10 @@ def __init__(self, llm_host: LlmHost, chat_model: str, deployment=None): raise ValueError( "Azure OpenAI requires environment variable AZURE_OPENAI_ENDPOINT to be set." ) + elif llm_host == LlmHost.ANTHROPIC: + self.chat_model = ChatAnthropic( + temperature=0.9, max_tokens=2048, model_name=chat_model + ) elif llm_host == LlmHost.LLAMACPP: self.install_llama_cpp() self.chat_model = LlamaCpp( diff --git a/pyproject.toml b/pyproject.toml index 0873da7..a6b5bf8 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "codeqai" -version = "0.0.16" +version = "0.0.17" description = "" authors = ["fynnfluegge "] readme = "README.md"