Skip to content

Commit

Permalink
Merge pull request #1546 from seehi/feat-set-context-length
Browse files Browse the repository at this point in the history
Make context length configurable
  • Loading branch information
geekan authored Oct 28, 2024
2 parents 8b209d4 + 7b81c8a commit 7146a7b
Show file tree
Hide file tree
Showing 2 changed files with 4 additions and 1 deletion.
1 change: 1 addition & 0 deletions metagpt/configs/llm_config.py
Original file line number Diff line number Diff line change
Expand Up @@ -83,6 +83,7 @@ class LLMConfig(YamlModel):
logprobs: Optional[bool] = None
top_logprobs: Optional[int] = None
timeout: int = 600
context_length: Optional[int] = None # Max input tokens

# For Amazon Bedrock
region_name: str = None
Expand Down
4 changes: 3 additions & 1 deletion metagpt/rag/factories/llm.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,10 +23,12 @@ class RAGLLM(CustomLLM):
"""LlamaIndex's LLM is different from MetaGPT's LLM.
Inherit CustomLLM from llamaindex, making MetaGPT's LLM can be used by LlamaIndex.
Set context_length or max_token of LLM in config.yaml if you encounter "Calculated available context size -xxx was not non-negative" error.
"""

model_infer: BaseLLM = Field(..., description="The MetaGPT's LLM.")
context_window: int = TOKEN_MAX.get(config.llm.model, DEFAULT_CONTEXT_WINDOW)
context_window: int = config.llm.context_length or TOKEN_MAX.get(config.llm.model, DEFAULT_CONTEXT_WINDOW)
num_output: int = config.llm.max_token
model_name: str = config.llm.model

Expand Down

0 comments on commit 7146a7b

Please sign in to comment.