diff --git a/examples/async/expert_answer.py b/examples/async/expert_answer.py index 66ee319..09a9d93 100644 --- a/examples/async/expert_answer.py +++ b/examples/async/expert_answer.py @@ -1,8 +1,11 @@ -from asyncio import gather, run as _await -from funcchain import achain, settings -from pydantic import BaseModel +from asyncio import gather +from asyncio import run as _await from random import shuffle +from pydantic import BaseModel + +from funcchain import achain, settings + settings.temperature = 1 settings.llm = "openai/gpt-3.5-turbo-1106" diff --git a/examples/experiments/dynamic_model_generation.py b/examples/experiments/dynamic_model_generation.py index a65558b..4b0ce4b 100644 --- a/examples/experiments/dynamic_model_generation.py +++ b/examples/experiments/dynamic_model_generation.py @@ -1,9 +1,10 @@ from langchain.document_loaders import WebBaseLoader -from funcchain import chain, settings -from funcchain.parser import CodeBlock from pydantic import BaseModel from rich import print +from funcchain import chain, settings +from funcchain.parser import CodeBlock + settings.llm = "gpt-4-1106-preview" settings.context_lenght = 4096 * 8 diff --git a/src/funcchain/_llms.py b/src/funcchain/_llms.py index 5a37e3f..407bc1b 100644 --- a/src/funcchain/_llms.py +++ b/src/funcchain/_llms.py @@ -7,14 +7,7 @@ from langchain.callbacks.manager import CallbackManagerForLLMRun from langchain.chat_models.base import BaseChatModel from langchain.schema import ChatResult -from langchain.schema.messages import ( - AIMessage, - AIMessageChunk, - BaseMessage, - ChatMessage, - HumanMessage, - SystemMessage, -) +from langchain.schema.messages import AIMessage, AIMessageChunk, BaseMessage, ChatMessage, HumanMessage, SystemMessage from langchain.schema.output import ChatGeneration, ChatGenerationChunk from langchain_core.language_models import BaseLanguageModel from langchain_core.pydantic_v1 import Field, root_validator diff --git a/src/funcchain/chain/creation.py b/src/funcchain/chain/creation.py index 8107299..57005fd 100644 --- a/src/funcchain/chain/creation.py +++ b/src/funcchain/chain/creation.py @@ -1,19 +1,16 @@ from types import UnionType from typing import TypeVar, Union -from funcchain._llms import ChatLlamaCpp from langchain.chat_models.base import BaseChatModel from langchain.prompts import ChatPromptTemplate from langchain.schema import AIMessage, BaseMessage, BaseOutputParser, HumanMessage from langchain.schema.chat_history import BaseChatMessageHistory -from langchain.schema.runnable import ( - RunnableSequence, - RunnableSerializable, - RunnableWithFallbacks, -) +from langchain.schema.runnable import RunnableSequence, RunnableSerializable, RunnableWithFallbacks from PIL import Image from pydantic import BaseModel +from funcchain._llms import ChatLlamaCpp + from ..parser import MultiToolParser, ParserBaseModel, PydanticFuncParser from ..settings import FuncchainSettings from ..streaming import stream_handler @@ -24,17 +21,13 @@ is_function_model, is_vision_model, kwargs_from_parent, - univeral_model_selector, multi_pydantic_to_functions, parser_for, pydantic_to_functions, pydantic_to_grammar, + univeral_model_selector, ) -from .prompt import ( - HumanImageMessagePromptTemplate, - create_chat_prompt, - create_instruction_prompt, -) +from .prompt import HumanImageMessagePromptTemplate, create_chat_prompt, create_instruction_prompt ChainOutput = TypeVar("ChainOutput") @@ -261,7 +254,7 @@ def _gather_llm( llm = settings.llm else: llm = univeral_model_selector(settings) - + if not llm: raise RuntimeError( "No language model provided. Either set the llm environment variable or " diff --git a/src/funcchain/chain/interface.py b/src/funcchain/chain/interface.py index 691ef40..8c3dfbe 100644 --- a/src/funcchain/chain/interface.py +++ b/src/funcchain/chain/interface.py @@ -4,8 +4,8 @@ from langchain.schema import BaseMessage, BaseOutputParser from langchain.schema.chat_history import BaseChatMessageHistory -from .invoke import ainvoke, invoke from ..settings import SettingsOverride, get_settings +from .invoke import ainvoke, invoke ChainOutput = TypeVar("ChainOutput") diff --git a/src/funcchain/chain/invoke.py b/src/funcchain/chain/invoke.py index 06687d3..fd85510 100644 --- a/src/funcchain/chain/invoke.py +++ b/src/funcchain/chain/invoke.py @@ -4,8 +4,8 @@ from langchain.schema import BaseMessage, BaseOutputParser from langchain.schema.chat_history import BaseChatMessageHistory -from ..utils.decorators import get_parent_frame, log_openai_callback, retry_parse from ..settings import FuncchainSettings +from ..utils.decorators import get_parent_frame, log_openai_callback, retry_parse from .creation import create_chain T = TypeVar("T") diff --git a/src/funcchain/chain/prompt.py b/src/funcchain/chain/prompt.py index 318ede0..63c9c9a 100644 --- a/src/funcchain/chain/prompt.py +++ b/src/funcchain/chain/prompt.py @@ -1,16 +1,13 @@ from string import Formatter -from typing import Any, Type, Optional +from typing import Any, Optional, Type -from PIL import Image -from pydantic import BaseModel from langchain.prompts import ChatPromptTemplate -from langchain.prompts.chat import ( - BaseStringMessagePromptTemplate, - MessagePromptTemplateT, -) +from langchain.prompts.chat import BaseStringMessagePromptTemplate, MessagePromptTemplateT from langchain.prompts.prompt import PromptTemplate from langchain.schema import BaseMessage, HumanMessage, SystemMessage from langchain.schema.chat_history import BaseChatMessageHistory +from PIL import Image +from pydantic import BaseModel from ..utils import image_to_base64_url diff --git a/src/funcchain/parser.py b/src/funcchain/parser.py index 545ddf2..6ad55b6 100644 --- a/src/funcchain/parser.py +++ b/src/funcchain/parser.py @@ -3,12 +3,7 @@ import re from typing import Callable, Optional, Type, TypeVar -from langchain.schema import ( - AIMessage, - ChatGeneration, - Generation, - OutputParserException, -) +from langchain.schema import AIMessage, ChatGeneration, Generation, OutputParserException from langchain.schema.output_parser import BaseGenerationOutputParser, BaseOutputParser from pydantic import BaseModel, ValidationError diff --git a/src/funcchain/settings.py b/src/funcchain/settings.py index 165c86d..881903a 100644 --- a/src/funcchain/settings.py +++ b/src/funcchain/settings.py @@ -8,8 +8,8 @@ from langchain.chat_models.base import BaseChatModel from langchain.globals import set_llm_cache from langchain.schema.runnable import RunnableWithFallbacks -from pydantic_settings import BaseSettings from pydantic import Field +from pydantic_settings import BaseSettings set_llm_cache(InMemoryCache()) diff --git a/src/funcchain/streaming.py b/src/funcchain/streaming.py index 0aa4730..5e6e368 100644 --- a/src/funcchain/streaming.py +++ b/src/funcchain/streaming.py @@ -86,6 +86,7 @@ def stream_to( ... # your chain calls here """ import builtins + import rich if (fn is builtins.print or fn is rich.print) and kwargs == {}: diff --git a/src/funcchain/utils/__init__.py b/src/funcchain/utils/__init__.py index dd639e8..20c7876 100644 --- a/src/funcchain/utils/__init__.py +++ b/src/funcchain/utils/__init__.py @@ -1,9 +1,7 @@ from .decorators import * # noqa: F401, F403 from .function_frame import * # noqa: F401, F403 +from .grammars import pydantic_to_grammar # noqa: F401, F403 +from .grammars import schema_to_grammar # noqa: F401, F403 from .helpers import * # noqa: F401, F403 from .image import * # noqa: F401, F403 from .model_defaults import * # noqa: F401, F403 -from .grammars import ( - schema_to_grammar, # noqa: F401, F403 - pydantic_to_grammar, # noqa: F401, F403 -) diff --git a/src/funcchain/utils/decorators.py b/src/funcchain/utils/decorators.py index 00c708d..b510106 100644 --- a/src/funcchain/utils/decorators.py +++ b/src/funcchain/utils/decorators.py @@ -11,8 +11,8 @@ from langchain.schema.output_parser import OutputParserException from rich import print -from ..settings import FuncchainSettings from ..exceptions import ParsingRetryException +from ..settings import FuncchainSettings from .function_frame import get_parent_frame diff --git a/src/funcchain/utils/function_frame.py b/src/funcchain/utils/function_frame.py index 25af9e6..624d140 100644 --- a/src/funcchain/utils/function_frame.py +++ b/src/funcchain/utils/function_frame.py @@ -6,7 +6,6 @@ from ..parser import BoolOutputParser, ParserBaseModel, PydanticOutputParser - FUNC_DEPTH = 8 diff --git a/src/funcchain/utils/grammars.py b/src/funcchain/utils/grammars.py index 06c7a48..bd41804 100644 --- a/src/funcchain/utils/grammars.py +++ b/src/funcchain/utils/grammars.py @@ -1,6 +1,7 @@ import json import re from typing import Type + from pydantic import BaseModel SPACE_RULE = '" "?' diff --git a/src/funcchain/utils/model_defaults.py b/src/funcchain/utils/model_defaults.py index 9c9e133..8ef0692 100644 --- a/src/funcchain/utils/model_defaults.py +++ b/src/funcchain/utils/model_defaults.py @@ -1,16 +1,11 @@ +from pathlib import Path from typing import Any -from pathlib import Path -from langchain.chat_models import ( - AzureChatOpenAI, - ChatAnthropic, - ChatGooglePalm, - ChatOpenAI, -) +from langchain.chat_models import AzureChatOpenAI, ChatAnthropic, ChatGooglePalm, ChatOpenAI from langchain.chat_models.base import BaseChatModel -from ..settings import FuncchainSettings from .._llms import ChatLlamaCpp +from ..settings import FuncchainSettings def get_gguf_model( diff --git a/tests/async_test.py b/tests/async_test.py index 1c95199..e4cb787 100644 --- a/tests/async_test.py +++ b/tests/async_test.py @@ -1,8 +1,11 @@ -from asyncio import gather, run as _await -from funcchain import achain, settings -from pydantic import BaseModel +from asyncio import gather +from asyncio import run as _await from random import shuffle +from pydantic import BaseModel + +from funcchain import achain, settings + settings.temperature = 1 settings.llm = "openai/gpt-3.5-turbo-1106" diff --git a/tests/llamacpp_test.py b/tests/llamacpp_test.py index d2fbb2b..adf8c01 100644 --- a/tests/llamacpp_test.py +++ b/tests/llamacpp_test.py @@ -1,7 +1,8 @@ import pytest -from funcchain import chain, settings from pydantic import BaseModel +from funcchain import chain, settings + class Task(BaseModel): description: str diff --git a/tests/openai_test.py b/tests/openai_test.py index 47b57b3..2927800 100644 --- a/tests/openai_test.py +++ b/tests/openai_test.py @@ -1,6 +1,7 @@ -from funcchain import chain, settings from pydantic import BaseModel, Field +from funcchain import chain, settings + class Task(BaseModel): name: str