Skip to content

Commit

Permalink
ci: add GHA to execute how-to notebooks (#1694)
Browse files Browse the repository at this point in the history
  • Loading branch information
vbarda authored Sep 12, 2024
1 parent 73d1051 commit 889cc9b
Show file tree
Hide file tree
Showing 11 changed files with 404 additions and 65 deletions.
53 changes: 53 additions & 0 deletions .github/workflows/run_notebooks.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,53 @@
name: Run notebooks

on:
workflow_call:
schedule:
- cron: '0 13 * * *'

jobs:
build:
runs-on: ubuntu-latest
strategy:
matrix:
lib-version:
- "development"
- "latest"

name: "test (langgraph: ${{ matrix.lib-version }})"
steps:
- uses: actions/checkout@v4
- name: Set up Python + Poetry
uses: "./.github/actions/poetry_setup"
with:
python-version: 3.11
poetry-version: 1.7.1
cache-key: test-langgraph-notebooks

- name: Install dependencies
run: |
poetry install --with test
poetry run pip install jupyter
- name: Start services
run: make start-services

- name: Prepare notebooks
if: ${{ matrix.lib-version == 'development' }}
run: poetry run python docs/_scripts/prepare_notebooks_for_ci.py

- name: Run notebooks
env:
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }}
TAVILY_API_KEY: ${{ secrets.TAVILY_API_KEY }}
LANGSMITH_API_KEY: ${{ secrets.LANGSMITH_API_KEY }}
run: |
for file in $(find docs/docs/how-tos -name "*.ipynb")
do
echo "Executing $file"
PIP_PRE=1 poetry run jupyter execute "$file"
done
- name: Stop services
run: make stop-services
8 changes: 7 additions & 1 deletion Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -20,4 +20,10 @@ clean-docs:
rm -rf docs/site

codespell:
./docs/codespell_notebooks.sh .
./docs/codespell_notebooks.sh .

start-services:
docker compose -f docs/test-compose.yml up -V --force-recreate --wait --remove-orphans

stop-services:
docker compose -f docs/test-compose.yml down
44 changes: 44 additions & 0 deletions docs/_scripts/prepare_notebooks_for_ci.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,44 @@
"""Preprocess notebooks for CI. Currently removes pip install cells."""

import os
import json
import logging

logger = logging.getLogger(__name__)
NOTEBOOK_DIRS = ("docs/docs/how-tos",)

def remove_install_cells(notebook_path: str) -> None:
with open(notebook_path, "r") as file:
notebook = json.load(file)

indices_to_delete = []
for index, cell in enumerate(notebook["cells"]):
if cell["cell_type"] == "code":
if any("pip install" in line for line in cell["source"]):
indices_to_delete.append(index)

for index in reversed(indices_to_delete):
notebook["cells"].pop(index)

with open(notebook_path, "w") as file:
json.dump(notebook, file, indent=2)


def process_notebooks() -> None:
for directory in NOTEBOOK_DIRS:
for root, _, files in os.walk(directory):
for file in files:
if not file.endswith(".ipynb"):
continue

notebook_path = os.path.join(root, file)
try:
remove_install_cells(notebook_path)
logger.info(f"Processed: {notebook_path}")
except Exception as e:
logger.error(f"Error processing {notebook_path}: {e}")


if __name__ == "__main__":
process_notebooks()
logger.info("All notebooks processed successfully.")
16 changes: 12 additions & 4 deletions docs/docs/how-tos/human_in_the_loop/wait-user-input.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -374,14 +374,22 @@
"model = ChatAnthropic(model=\"claude-3-5-sonnet-20240620\")\n",
"model = ChatOpenAI(model=\"gpt-4o\")\n",
"\n",
"# NOTE:\n",
"# - if you're using langchain-core >= 0.3, you need to use pydantic v2\n",
"# - if you're using langchain-core >= 0.2,<0.3, you need to use pydantic v1\n",
"from langchain_core import __version__ as core_version\n",
"from packaging import version\n",
"\n",
"core_version = version.parse(core_version)\n",
"if (core_version.major, core_version.minor) < (0, 3):\n",
" from pydantic.v1 import BaseModel\n",
"else:\n",
" from pydantic import BaseModel\n",
"\n",
"# We are going \"bind\" all tools to the model\n",
"# We have the ACTUAL tools from above, but we also need a mock tool to ask a human\n",
"# Since `bind_tools` takes in tools but also just tool definitions,\n",
"# We can define a tool definition for `ask_human`\n",
"\n",
"from pydantic import BaseModel\n",
"\n",
"\n",
"class AskHuman(BaseModel):\n",
" \"\"\"Ask the human a question\"\"\"\n",
"\n",
Expand Down
18 changes: 15 additions & 3 deletions docs/docs/how-tos/many-tools.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@
"outputs": [],
"source": [
"%%capture --no-stderr\n",
"%pip install --quiet -U langgraph langchain_openai"
"%pip install --quiet -U langgraph langchain_openai numpy"
]
},
{
Expand Down Expand Up @@ -351,7 +351,19 @@
"outputs": [],
"source": [
"from langchain_core.messages import HumanMessage, SystemMessage, ToolMessage\n",
"from pydantic import BaseModel, Field\n",
"from langgraph.pregel.retry import RetryPolicy\n",
"\n",
"# NOTE:\n",
"# - if you're using langchain-core >= 0.3, you need to use pydantic v2\n",
"# - if you're using langchain-core >= 0.2,<0.3, you need to use pydantic v1\n",
"from langchain_core import __version__ as core_version\n",
"from packaging import version\n",
"\n",
"core_version = version.parse(core_version)\n",
"if (core_version.major, core_version.minor) < (0, 3):\n",
" from pydantic.v1 import BaseModel, Field\n",
"else:\n",
" from pydantic import BaseModel, Field\n",
"\n",
"\n",
"class QueryForTools(BaseModel):\n",
Expand Down Expand Up @@ -394,7 +406,7 @@
"\n",
"graph_builder = StateGraph(State)\n",
"graph_builder.add_node(\"agent\", agent)\n",
"graph_builder.add_node(\"select_tools\", select_tools)\n",
"graph_builder.add_node(\"select_tools\", select_tools, retry=RetryPolicy(max_attempts=3))\n",
"\n",
"tool_node = ToolNode(tools=tools)\n",
"graph_builder.add_node(\"tools\", tool_node)\n",
Expand Down
15 changes: 13 additions & 2 deletions docs/docs/how-tos/map-reduce.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -94,12 +94,23 @@
"import operator\n",
"from typing import Annotated, TypedDict\n",
"\n",
"from pydantic import BaseModel, Field\n",
"from langchain_anthropic import ChatAnthropic\n",
"\n",
"from langgraph.constants import Send\n",
"from langgraph.graph import END, StateGraph, START\n",
"\n",
"# NOTE:\n",
"# - if you're using langchain-core >= 0.3, you need to use pydantic v2\n",
"# - if you're using langchain-core >= 0.2,<0.3, you need to use pydantic v1\n",
"from langchain_core import __version__ as core_version\n",
"from packaging import version\n",
"\n",
"core_version = version.parse(core_version)\n",
"if (core_version.major, core_version.minor) < (0, 3):\n",
" from pydantic.v1 import BaseModel, Field\n",
"else:\n",
" from pydantic import BaseModel, Field\n",
"\n",
"# Model and prompts\n",
"# Define model and prompts we will use\n",
"subjects_prompt = \"\"\"Generate a comma separated list of between 2 and 5 examples related to: {topic}.\"\"\"\n",
Expand All @@ -118,7 +129,7 @@
"\n",
"\n",
"class BestJoke(BaseModel):\n",
" id: int = Field(description=\"Index of the best joke, starting with 0\")\n",
" id: int = Field(description=\"Index of the best joke, starting with 0\", ge=0)\n",
"\n",
"\n",
"model = ChatAnthropic(model=\"claude-3-5-sonnet-20240620\")\n",
Expand Down
4 changes: 1 addition & 3 deletions docs/docs/how-tos/memory/manage-conversation-history.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -238,9 +238,7 @@
" \"\"\"Call to surf the web.\"\"\"\n",
" # This is a placeholder for the actual implementation\n",
" # Don't let the LLM know this though 😊\n",
" return [\n",
" \"It's sunny in San Francisco, but you better look out if you're a Gemini 😈.\"\n",
" ]\n",
" return \"It's sunny in San Francisco, but you better look out if you're a Gemini 😈.\"\n",
"\n",
"\n",
"tools = [search]\n",
Expand Down
14 changes: 12 additions & 2 deletions docs/docs/how-tos/pass-run-time-values-to-tools.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -98,11 +98,21 @@
"from typing_extensions import Annotated\n",
"\n",
"from langchain_core.documents import Document\n",
"from pydantic import BaseModel\n",
"from langchain_core.tools import tool\n",
"\n",
"from langgraph.prebuilt import InjectedState\n",
"\n",
"# NOTE:\n",
"# - if you're using langchain-core >= 0.3, you need to use pydantic v2\n",
"# - if you're using langchain-core >= 0.2,<0.3, you need to use pydantic v1\n",
"from langchain_core import __version__ as core_version\n",
"from packaging import version\n",
"\n",
"core_version = version.parse(core_version)\n",
"if (core_version.major, core_version.minor) < (0, 3):\n",
" from pydantic.v1 import BaseModel\n",
"else:\n",
" from pydantic import BaseModel\n",
"\n",
"\n",
"@tool(parse_docstring=True, response_format=\"content_and_artifact\")\n",
"def get_context(question: List[str]) -> Tuple[str, List[Document]]:\n",
Expand Down
25 changes: 25 additions & 0 deletions docs/test-compose.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
name: notebook-tests
services:
mongo:
image: mongo:latest
ports:
- "27017:27017"
redis:
image: redis:latest
ports:
- "6379:6379"
postgres:
image: postgres:16
ports:
- "5442:5432"
environment:
POSTGRES_DB: postgres
POSTGRES_USER: postgres
POSTGRES_PASSWORD: postgres
healthcheck:
test: pg_isready -U postgres
start_period: 10s
timeout: 1s
retries: 5
interval: 60s
start_interval: 1s
Loading

0 comments on commit 889cc9b

Please sign in to comment.