Skip to content

Commit

Permalink
Feat/llm/openai/proxy (#182)
Browse files Browse the repository at this point in the history
* add proxy url example in env

* add proxy option

* add httpx[socks] for socket proxy

* add httpx with socks

* Update .env.example

* Update backend/app/llms.py

---------

Co-authored-by: Harrison Chase <hw.chase.17@gmail.com>
  • Loading branch information
shamspias and hwchase17 authored Feb 14, 2024
1 parent 5adef76 commit d3425b1
Show file tree
Hide file tree
Showing 4 changed files with 12 additions and 3 deletions.
1 change: 1 addition & 0 deletions .env.example
Original file line number Diff line number Diff line change
Expand Up @@ -10,3 +10,4 @@ ROBOCORP_ACTION_SERVER_URL=https://dummy-action-server.robocorp.link
ROBOCORP_ACTION_SERVER_KEY=dummy-api-key
CONNERY_RUNNER_URL=https://your-personal-connery-runner-url
CONNERY_RUNNER_API_KEY=placeholder
PROXY_URL=your_proxy_url
12 changes: 9 additions & 3 deletions backend/app/llms.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import os
from functools import lru_cache

import httpx
import boto3
from langchain_community.chat_models import BedrockChat, ChatAnthropic, ChatFireworks
from langchain_google_vertexai import ChatVertexAI
Expand All @@ -9,13 +9,19 @@

@lru_cache(maxsize=4)
def get_openai_llm(gpt_4: bool = False, azure: bool = False):
proxy_url = os.environ["PROXY_URL"]
if proxy_url is not None or proxy_url != "":
http_client = httpx.AsyncClient(proxies=proxy_url)
else:
http_client = None
if not azure:
if gpt_4:
llm = ChatOpenAI(model="gpt-4-1106-preview", temperature=0, streaming=True)
llm = ChatOpenAI(http_client=http_client, model="gpt-4-1106-preview", temperature=0, streaming=True)
else:
llm = ChatOpenAI(model="gpt-3.5-turbo-1106", temperature=0, streaming=True)
llm = ChatOpenAI(http_client=http_client, model="gpt-3.5-turbo-1106", temperature=0, streaming=True)
else:
llm = AzureChatOpenAI(
http_client=http_client,
temperature=0,
deployment_name=os.environ["AZURE_OPENAI_DEPLOYMENT_NAME"],
openai_api_base=os.environ["AZURE_OPENAI_API_BASE"],
Expand Down
1 change: 1 addition & 0 deletions backend/pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,7 @@ pdfminer-six = "^20231228"
langchain-robocorp = "^0.0.3"
fireworks-ai = "^0.11.2"
anthropic = "^0.13.0"
httpx = { version = "0.25.2", extras = ["socks"] }

[tool.poetry.group.dev.dependencies]
uvicorn = "^0.23.2"
Expand Down
1 change: 1 addition & 0 deletions backend/requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@ langchain-cli
-e packages/agent-executor
langchain>=0.1.0
langserve>=0.0.23
httpx[socks]
langgraph
python-multipart
langchain-robocorp>=0.0.1
Expand Down

0 comments on commit d3425b1

Please sign in to comment.