Skip to content

Commit

Permalink
Merge pull request #26 from FacerAin/feat/agent
Browse files Browse the repository at this point in the history
[#23] Add LLM Caching
  • Loading branch information
FacerAin authored Dec 6, 2023
2 parents b756e0f + 3cffa55 commit c8b81d4
Show file tree
Hide file tree
Showing 2 changed files with 5 additions and 0 deletions.
4 changes: 4 additions & 0 deletions app/agent/agent.py
Original file line number Diff line number Diff line change
@@ -1,14 +1,18 @@
import datetime

from langchain.agents import AgentExecutor, AgentType, LLMSingleActionAgent, Tool, initialize_agent
from langchain.cache import InMemoryCache
from langchain.chains import LLMChain
from langchain.chat_models import ChatOpenAI
from langchain.globals import set_llm_cache

from app.agent.parser import CustomAgentOutputParser
from app.agent.prompts import AgentPromptTemplate, agent_prompt_template, retriever_prompt_template
from app.agent.retriever import PineconeRetriever
from app.core.config import settings

set_llm_cache(InMemoryCache())


class ExecutorAgent:
def __init__(self):
Expand Down
1 change: 1 addition & 0 deletions requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -8,3 +8,4 @@ uvicorn==0.23.2
pydantic-settings==2.0.3
pydantic==2.3.0
tiktoken==0.3.3
SQLAlchemy >= 2.0

0 comments on commit c8b81d4

Please sign in to comment.