Skip to content

Commit

Permalink
🔧 fix llm argument in chain/achain
Browse files Browse the repository at this point in the history
  • Loading branch information
shroominic committed Jun 28, 2024
1 parent d27dbf7 commit c45b505
Showing 1 changed file with 4 additions and 6 deletions.
10 changes: 4 additions & 6 deletions src/funcchain/syntax/executable.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,8 @@ def chain(
"""
Generate response of llm for provided instructions.
"""
if llm:
settings_override["llm"] = llm
settings = create_local_settings(settings_override)
callbacks: Callbacks = None
output_types = get_output_types()
Expand All @@ -41,9 +43,6 @@ def chain(
memory = memory or ChatMessageHistory()
input_kwargs.update(kwargs_from_parent())

if llm:
settings_override["llm"] = llm

# todo maybe this should be done in the prompt processor?
system = system or settings.system_prompt
if system:
Expand Down Expand Up @@ -87,6 +86,8 @@ async def achain(
"""
Asyncronously generate response of llm for provided instructions.
"""
if llm:
settings_override["llm"] = llm
settings = create_local_settings(settings_override)
callbacks: Callbacks = None
output_types = get_output_types()
Expand All @@ -95,9 +96,6 @@ async def achain(
memory = memory or ChatMessageHistory()
input_kwargs.update(kwargs_from_parent())

if llm:
settings_override["llm"] = llm

# todo maybe this should be done in the prompt processor?
system = system or settings.system_prompt
if system:
Expand Down

0 comments on commit c45b505

Please sign in to comment.