From 0cae0c865bcee2cc1d9df1445dae95c0d555062b Mon Sep 17 00:00:00 2001 From: wxg0103 <727495428@qq.com> Date: Tue, 24 Dec 2024 18:40:59 +0800 Subject: [PATCH] feat: support aws proxy_url --- .../credential/llm.py | 1 + .../aws_bedrock_model_provider/model/llm.py | 23 ++++++++++++++++--- 2 files changed, 21 insertions(+), 3 deletions(-) diff --git a/apps/setting/models_provider/impl/aws_bedrock_model_provider/credential/llm.py b/apps/setting/models_provider/impl/aws_bedrock_model_provider/credential/llm.py index 8c4d6834db6..df18fc6ac0f 100644 --- a/apps/setting/models_provider/impl/aws_bedrock_model_provider/credential/llm.py +++ b/apps/setting/models_provider/impl/aws_bedrock_model_provider/credential/llm.py @@ -79,6 +79,7 @@ def encryption_dict(self, model: Dict[str, object]): region_name = forms.TextInputField('Region Name', required=True) access_key_id = forms.TextInputField('Access Key ID', required=True) secret_access_key = forms.PasswordInputField('Secret Access Key', required=True) + base_url = forms.TextInputField('Proxy URL', required=False) def get_model_params_setting_form(self, model_name): return BedrockLLMModelParams() diff --git a/apps/setting/models_provider/impl/aws_bedrock_model_provider/model/llm.py b/apps/setting/models_provider/impl/aws_bedrock_model_provider/model/llm.py index 950cd2b3f3c..dda406963db 100644 --- a/apps/setting/models_provider/impl/aws_bedrock_model_provider/model/llm.py +++ b/apps/setting/models_provider/impl/aws_bedrock_model_provider/model/llm.py @@ -1,4 +1,6 @@ from typing import List, Dict + +from botocore.config import Config from langchain_community.chat_models import BedrockChat from setting.models_provider.base_model_provider import MaxKBBaseModel @@ -33,19 +35,34 @@ def is_cache_model(): return False def __init__(self, model_id: str, region_name: str, credentials_profile_name: str, - streaming: bool = False, **kwargs): + streaming: bool = False, config: Config = None, **kwargs): super().__init__(model_id=model_id, region_name=region_name, - credentials_profile_name=credentials_profile_name, streaming=streaming, **kwargs) + credentials_profile_name=credentials_profile_name, streaming=streaming, config=config, + **kwargs) @classmethod def new_instance(cls, model_type: str, model_name: str, model_credential: Dict[str, str], **model_kwargs) -> 'BedrockModel': optional_params = MaxKBBaseModel.filter_optional_params(model_kwargs) + config = {} + # 判断model_kwargs是否包含 base_url 且不为空 + if 'base_url' in model_credential and model_credential['base_url']: + proxy_url = model_credential['base_url'] + config = Config( + proxies={ + 'http': proxy_url, + 'https': proxy_url + }, + connect_timeout=60, + read_timeout=60 + ) + return cls( model_id=model_name, region_name=model_credential['region_name'], credentials_profile_name=model_credential['credentials_profile_name'], streaming=model_kwargs.pop('streaming', True), - model_kwargs=optional_params + model_kwargs=optional_params, + config=config )