Skip to content

Commit

Permalink
feat(Add tests for double chain in the same module):
Browse files Browse the repository at this point in the history
  • Loading branch information
msoedov committed Jul 12, 2023
1 parent 479f2b5 commit 43e596b
Show file tree
Hide file tree
Showing 4 changed files with 40 additions and 4 deletions.
2 changes: 2 additions & 0 deletions examples/app.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,4 +11,6 @@
"examples.ex6:conversation_with_summary",
"examples.ex7_agent:agent",
"examples.ex8:qa",
"examples.ex9_double_chain:chain1",
"examples.ex9_double_chain:chain2",
)
2 changes: 1 addition & 1 deletion examples/ex3.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
os.environ["OPENAI_API_KEY"] = os.environ.get("OPENAI_API_KEY", "sk-********")

llm = OpenAI(temperature=0)
chain = LLMMathChain(llm=llm, verbose=True)
chain = LLMMathChain.from_llm(llm=llm, verbose=True)

if __name__ == "__main__":
chain.run("What is 13 raised to the .3432 power?")
10 changes: 10 additions & 0 deletions examples/ex9_double_chain.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
import os

from langchain import LLMMathChain
from langchain.llms import OpenAI

os.environ["OPENAI_API_KEY"] = os.environ.get("OPENAI_API_KEY", "sk-********")

llm = OpenAI(temperature=0)
chain1 = LLMMathChain.from_llm(llm=llm, verbose=True)
chain2 = LLMMathChain.from_llm(llm=llm, verbose=True)
30 changes: 27 additions & 3 deletions langcorn/server/test_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,8 +10,6 @@

client = TestClient(create_service("examples.ex1:chain"))

example_app = TestClient(app.app)


@pytest.fixture(autouse=True)
def suppress_openai():
Expand All @@ -22,6 +20,15 @@ def suppress_openai():
yield


@pytest.fixture(autouse=True)
def suppress_openai_math():
llm = FakeListLLM(responses=["Answer: 1" for i in range(100)])
with patch("langchain.llms.OpenAI._generate", new=llm._generate), patch(
"langchain.llms.OpenAI._agenerate", new=llm._agenerate
):
yield


@pytest.fixture(autouse=True)
def example_app():
yield TestClient(app.app)
Expand Down Expand Up @@ -61,7 +68,7 @@ def test_create_service(self, apps):
def test_chain_x(self, suppress_openai, example_app):
response = example_app.post("/examples.ex8.qa/run", json=dict(query="query"))
assert response.status_code == 200, response.text
assert response.json() == {"error": "", "memory": [], "output": "FakeListLLM"}
assert response.json()

@pytest.mark.parametrize(
"endpoint, query",
Expand Down Expand Up @@ -98,3 +105,20 @@ def test_chain_e2e(self, suppress_openai, example_app, endpoint, query):
response = example_app.post(endpoint, json=dict(**query))
assert response.status_code == 200, response.text
assert response.json()

def test_double_chain(self, suppress_openai_math, example_app):
client = TestClient(
create_service(
"examples.ex9_double_chain:chain1", "examples.ex9_double_chain:chain2"
)
)
response = client.post(
"/examples.ex9_double_chain.chain1/run", json=dict(question="QUERY")
)
assert response.status_code == 200, response.text
assert response.json()
response = client.post(
"/examples.ex9_double_chain.chain2/run", json=dict(question="QUERY")
)
assert response.status_code == 200, response.text
assert response.json()

1 comment on commit 43e596b

@vercel
Copy link

@vercel vercel bot commented on 43e596b Jul 12, 2023

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Successfully deployed to the following URLs:

langcorn – ./

langcorn-git-main-msoedov.vercel.app
langcorn-msoedov.vercel.app
langcorn.vercel.app

Please sign in to comment.