From 8ddd110168eb537a796984b8e53197f7328265ac Mon Sep 17 00:00:00 2001 From: Corey Jones Date: Fri, 25 Oct 2024 20:17:57 -0400 Subject: [PATCH 1/2] Added OpenRouter support and model testing --- .../Model Testing/model_testing.py | 33 +++++++++++++++++ docs/integrations/openrouter.py | 37 +++++++++++++++++++ setup.sh | 25 ++++++++++++- 3 files changed, 94 insertions(+), 1 deletion(-) create mode 100644 docs/api-reference/Model Testing/model_testing.py create mode 100644 docs/integrations/openrouter.py diff --git a/docs/api-reference/Model Testing/model_testing.py b/docs/api-reference/Model Testing/model_testing.py new file mode 100644 index 000000000..53b149153 --- /dev/null +++ b/docs/api-reference/Model Testing/model_testing.py @@ -0,0 +1,33 @@ +import os + +import requests + + +# model testing +def test_openrouter_model(): + url = "https://openrouter.ai/api/v1/chat/completions" + api_key = os.getenv("OPENROUTER_API_KEY") + + # Set headers and payload for test + headers = {"Authorization": f"Bearer {api_key}", "Content-Type": "application/json"} + payload = { + "model": "openai/gpt-3.5-turbo", + "messages": [{"role": "user", "content": "This is a test message for model verification."}], + } + + # Send test request + response = requests.post(url, headers=headers, json=payload) + + if response.ok: + # Display a sample response if the model responds successfully + response_data = response.json() + print("Model test successful!") + print("Response:", response_data) + else: + # If there’s an error, print the error message + print(f"Model test failed with status code {response.status_code}.") + print("Error details:", response.text) + + +# Call the testing function +test_openrouter_model() diff --git a/docs/integrations/openrouter.py b/docs/integrations/openrouter.py new file mode 100644 index 000000000..6eb5989a6 --- /dev/null +++ b/docs/integrations/openrouter.py @@ -0,0 +1,37 @@ +import os + +import requests + + +def get_openrouter_response() -> None: + url = "https://openrouter.ai/api/v1/chat/completions" + + # Create headers and payload + headers = { + "Authorization": f"Bearer {os.getenv('openrouter_api_key')}", + "HTTP-Referer": os.getenv("your_site_url"), + "X-Title": os.getenv("your_site_name"), + "Content-Type": "application/json", + } + + payload = {"model": "OPENROUTER_MODEL", "messages": [{"role": "user", "content": "YOUR QUESTION HERE"}]} + + print("Sending request...") + + # Send request and capture response + response = requests.post(url, headers=headers, json=payload) + + # Print status code and response content + print("Response Content:", response.text) + + # Handle the response + if response.ok: + try: + print("Response JSON:", response.json()) + except ValueError: + print("Response is not in JSON format.") + else: + print(f"Error: {response.status_code}\n{response.text}") + + +get_openrouter_response() diff --git a/setup.sh b/setup.sh index 8b7a145ec..238ce8264 100755 --- a/setup.sh +++ b/setup.sh @@ -52,7 +52,7 @@ setup_llm_providers() { else update_or_add_env_var "OPENAI_API_KEY" "$openai_api_key" update_or_add_env_var "ENABLE_OPENAI" "true" - model_options+=("OPENAI_GPT4_TURBO" "OPENAI_GPT4V" "OPENAI_GPT4O") + model_options+=("OPENAI_GPT4_TURBO" "OPENAI_GPT4V" "OPENAI_GPT4O" "ANTHROPIC/CLAUDE-3.5-SONNET" "meta-llama/llama-3.2-90b-vision-instruct") fi else update_or_add_env_var "ENABLE_OPENAI" "false" @@ -98,6 +98,24 @@ setup_llm_providers() { update_or_add_env_var "ENABLE_AZURE" "false" fi + # Openrouter Configuration + echo "To enable Openrouter, you must have an Openrouter API key." + read -p "Do you want to enable Openrouter (y/n)? " enable_openrouter + if [[ "$enable_openrouter" == "y" ]]; then + read -p "Enter your Openrouter API key: " openrouter_api_key + if [ -z "$openrouter_api_key" ]; then + echo "Error: Openrouter API key is required." + echo "Openrouter will not be enabled." + else + update_or_add_env_var "OPENROUTER_API_KEY" "$openrouter_api_key" + update_or_add_env_var "ENABLE_OPENROUTER" "true" + model_options+=("ANTHROPIC/CLAUDE-3.5-SONNET" "meta-llama/llama-3.2-90b-vision-instruct" "google/gemini-flash-1.5-8b") + fi + else + update_or_add_env_var "ENABLE_OPENROUTER" "false" + fi + + # Model Selection if [ ${#model_options[@]} -eq 0 ]; then echo "No LLM providers enabled. You won't be able to run Skyvern unless you enable at least one provider. You can re-run this script to enable providers or manually update the .env file." @@ -308,3 +326,8 @@ main() { # Execute main function main + +#Test Model +bash +echo "Testing OpenRouter model connection..." +python3 -c "from your_module import test_openrouter_model; test_openrouter_model() \ No newline at end of file From 782b135bc6a105e2ff189ba3ce7e2f9f97bb3860 Mon Sep 17 00:00:00 2001 From: CoreyJness Date: Fri, 25 Oct 2024 22:45:57 -0400 Subject: [PATCH 2/2] Update docs/api-reference/Model Testing/model_testing.py Co-authored-by: ellipsis-dev[bot] <65095814+ellipsis-dev[bot]@users.noreply.github.com> --- docs/api-reference/Model Testing/model_testing.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/api-reference/Model Testing/model_testing.py b/docs/api-reference/Model Testing/model_testing.py index 53b149153..8fc3063ab 100644 --- a/docs/api-reference/Model Testing/model_testing.py +++ b/docs/api-reference/Model Testing/model_testing.py @@ -6,7 +6,7 @@ # model testing def test_openrouter_model(): url = "https://openrouter.ai/api/v1/chat/completions" - api_key = os.getenv("OPENROUTER_API_KEY") + api_key = os.getenv("OPENROUTER_API_KEY") or '' # Set headers and payload for test headers = {"Authorization": f"Bearer {api_key}", "Content-Type": "application/json"}