Skip to content

Commit

Permalink
fix(tests):
Browse files Browse the repository at this point in the history
  • Loading branch information
msoedov committed Jul 12, 2023
1 parent f34a29a commit 764bb2d
Show file tree
Hide file tree
Showing 3 changed files with 9 additions and 6 deletions.
3 changes: 2 additions & 1 deletion examples/ex4.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,8 @@
import os

from langchain import OpenAI, PromptTemplate
from langchain import PromptTemplate
from langchain.chains import LLMChain, LLMRequestsChain, SequentialChain
from langchain.llms import OpenAI

os.environ["OPENAI_API_KEY"] = os.environ.get("OPENAI_API_KEY", "sk-********")

Expand Down
4 changes: 2 additions & 2 deletions examples/ex5.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
from langchain.chains import ConversationChain
from langchain.chat_models import ChatOpenAI
from langchain.llms import OpenAI
from langchain.memory import ConversationBufferMemory
from langchain.prompts import (
ChatPromptTemplate,
Expand All @@ -18,7 +18,7 @@
]
)

llm = ChatOpenAI(temperature=0)
llm = OpenAI(temperature=0)
memory = ConversationBufferMemory(return_messages=True)
conversation = ConversationChain(memory=memory, prompt=prompt, llm=llm)

Expand Down
8 changes: 5 additions & 3 deletions langcorn/server/test_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,9 @@
@pytest.fixture(autouse=True)
def suppress_openai():
llm = FakeListLLM(responses=["FakeListLLM" for i in range(100)])
with patch("langchain.llms.OpenAI._generate", new=llm._generate):
with patch("langchain.llms.OpenAI._generate", new=llm._generate), patch(
"langchain.llms.OpenAI._agenerate", new=llm._agenerate
):
yield


Expand Down Expand Up @@ -72,7 +74,7 @@ def test_chain_x(self, suppress_openai, example_app):
url="https://github.com/msoedov/langcorn/blob/main/examples/ex7_agent.py",
),
),
# ("/examples.ex3.chain/run", dict(question="QUERY")),
# ("/examples.ex3.chain/run", dict(question="QUERY")), # requires llm response format
(
"/examples.ex4.sequential_chain/run",
dict(
Expand All @@ -88,7 +90,7 @@ def test_chain_x(self, suppress_openai, example_app):
"/examples.ex6.conversation_with_summary/run",
dict(input="QUERY", history="", memory=[]),
),
("/examples.ex7_agent.agent/run", dict(input="QUERY")),
# ("/examples.ex7_agent.agent/run", dict(input="QUERY")), # requires llm response format
("/examples.ex8.qa/run", dict(query="QUERY")),
],
)
Expand Down

0 comments on commit 764bb2d

Please sign in to comment.