Skip to content

Commit

Permalink
Merge branch 'main' into feat/llm/gigachat
Browse files Browse the repository at this point in the history
  • Loading branch information
shamspias authored Feb 29, 2024
2 parents c26a5fa + 208362e commit 6085c4a
Showing 1 changed file with 12 additions and 4 deletions.
16 changes: 12 additions & 4 deletions backend/app/llms.py
Original file line number Diff line number Diff line change
@@ -1,19 +1,27 @@
import logging
import os
from functools import lru_cache
from urllib.parse import urlparse
import httpx
import boto3
from langchain_community.chat_models import BedrockChat, ChatAnthropic, ChatFireworks, GigaChat
from langchain_google_vertexai import ChatVertexAI
from langchain_openai import AzureChatOpenAI, ChatOpenAI

logger = logging.getLogger(__name__)


@lru_cache(maxsize=4)
def get_openai_llm(gpt_4: bool = False, azure: bool = False):
proxy_url = os.getenv("PROXY_URL")
if proxy_url is not None and proxy_url != "":
http_client = httpx.AsyncClient(proxies=proxy_url)
else:
http_client = None
http_client = None
if proxy_url:
parsed_url = urlparse(proxy_url)
if parsed_url.scheme and parsed_url.netloc:
http_client = httpx.AsyncClient(proxies=proxy_url)
else:
logger.warn("Invalid proxy URL provided. Proceeding without proxy.")

if not azure:
if gpt_4:
llm = ChatOpenAI(
Expand Down

0 comments on commit 6085c4a

Please sign in to comment.