Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Linting #2

Merged
merged 4 commits into from
Jan 4, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
42 changes: 42 additions & 0 deletions .github/workflows/linting.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,42 @@
name: Lint code and check formatting with black and isort

on:
pull_request:

jobs:
pre-commit:
runs-on: ubuntu-latest

steps:
- name: Checkout code
uses: actions/checkout@v4

- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: 3.11

- name: Cache pip dependencies
uses: actions/cache@v4
with:
path: ~/.cache/pip
key: ${{ runner.os }}-pip-${{ hashFiles('**/requirements.txt') }}
restore-keys: |
${{ runner.os }}-pip-
- name: Cache pre-commit dependencies
uses: actions/cache@v4
with:
path: ~/.cache/pre-commit
key: ${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
restore-keys: |
${{ runner.os }}-pre-commit-
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install pre-commit
- name: Run pre-commit
run: |
pre-commit run --all-files
18 changes: 18 additions & 0 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v5.0.0
hooks:
- id: trailing-whitespace
- id: end-of-file-fixer

- repo: https://github.com/pre-commit/mirrors-isort
rev: v5.10.1
hooks:
- id: isort
args: ["--profile=black"]

- repo: https://github.com/psf/black
rev: 24.10.0
hooks:
- id: black
args: ["--config=pyproject.toml"]
2 changes: 1 addition & 1 deletion main.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from src.app import BitsGPT

bitsgpt = BitsGPT()
app = bitsgpt.app
app = bitsgpt.app
119 changes: 118 additions & 1 deletion poetry.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 1 addition & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@ langgraph = "^0.2.60"
langsmith = "^0.2.7"
langchain-groq = "^0.2.2"
langgraph-cli = {extras = ["inmem"], version = "^0.1.65"}
pre-commit = "^4.0.1"


[build-system]
Expand Down
35 changes: 21 additions & 14 deletions src/agents.py
Original file line number Diff line number Diff line change
@@ -1,13 +1,13 @@
import os
import textwrap
from dotenv import load_dotenv


from langchain_groq import ChatGroq
from dotenv import load_dotenv
from langchain_core.prompts import ChatPromptTemplate
from langchain_groq import ChatGroq

load_dotenv()


class Agents:
def __init__(self):

Expand All @@ -25,8 +25,9 @@ def __init__(self):
agent_name = agent_name.split(".")[0]
self.prompts[agent_name] = f.read()


def get_prompt(self, agent_name: str, query: str, chat_history: str, agent_scratchpad=False) -> ChatPromptTemplate:
def get_prompt(
self, agent_name: str, query: str, chat_history: str, agent_scratchpad=False
) -> ChatPromptTemplate:

prompt = [
(
Expand All @@ -35,21 +36,25 @@ def get_prompt(self, agent_name: str, query: str, chat_history: str, agent_scrat
),
(
"user",
textwrap.dedent(f"<query>{query}</query>\n\n<history>{chat_history}</history>"),
textwrap.dedent(
f"<query>{query}</query>\n\n<history>{chat_history}</history>"
),
),
]
if agent_scratchpad:
prompt.append(("placeholder", "{agent_scratchpad}"))
return ChatPromptTemplate.from_messages(prompt)

def intent_classifier(self, query: str, chat_history: str) -> str:
prompt = self.get_prompt("INTENT_CLASSIFIER_AGENT", query, chat_history)

chain = prompt | self.llm

result = chain.invoke({
"input": query,
})
result = chain.invoke(
{
"input": query,
}
)

return result

Expand All @@ -58,11 +63,13 @@ def general_campus_query(self, query: str, chat_history: str) -> str:

chain = prompt | self.llm

result = chain.invoke({
"input": query,
})
result = chain.invoke(
{
"input": query,
}
)

return result

def course_query(self, query: str, chat_history: str) -> str:
raise NotImplementedError("Course query not implemented yet")
15 changes: 10 additions & 5 deletions src/app.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,12 @@
from langgraph.graph import StateGraph, END
from langgraph.graph import END, StateGraph

from .nodes import (
course_query,
general_campus_query,
intent_classifier,
not_related_query,
)
from .state import State
from .nodes import intent_classifier, course_query, general_campus_query, not_related_query


class BitsGPT:
Expand All @@ -25,12 +31,11 @@ def intent_router(state):
return "general_campus_query"
else:
return "not_related_query"



graph.add_conditional_edges("intent_classifer", intent_router)

graph.add_edge("course_query", END)
graph.add_edge("general_campus_query", END)
graph.add_edge("not_related_query", END)

return graph
return graph
13 changes: 10 additions & 3 deletions src/nodes.py
Original file line number Diff line number Diff line change
@@ -1,26 +1,33 @@
from langchain_core.messages import AIMessage

from .agents import Agents
from .state import State

agents = Agents()


def intent_classifier(state: State):
query = state["messages"][0].content
result = agents.intent_classifier(query, state.get("chat_history", ""))

return {"messages": [result]}


def course_query(state: State):
query = state["messages"][0].content
result = AIMessage("Course query not implemented yet")
return {"messages": [result]}


def general_campus_query(state: State):
query = state["messages"][0].content
result = agents.general_campus_query(query, state.get("chat_history", ""))
return {"messages": [result]}


def not_related_query(state: State):
query = state["messages"][0].content
result = AIMessage("I'm sorry, I don't understand the question, if it relates to campus please rephrase.")
return {"messages": [result]}
result = AIMessage(
"I'm sorry, I don't understand the question, if it relates to campus please rephrase."
)
return {"messages": [result]}
Loading
Loading