Skip to content

Commit

Permalink
Merge pull request #1818 from langchain-ai/nc/23sep/stream-messages-fix
Browse files Browse the repository at this point in the history
Fix edge cases with stream_mode=messages
  • Loading branch information
nfcampos committed Sep 24, 2024
2 parents e1a3336 + 58aaffb commit beea3be
Showing 1 changed file with 3 additions and 4 deletions.
7 changes: 3 additions & 4 deletions libs/langgraph/langgraph/pregel/messages.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,21 +27,20 @@ class StreamMessagesHandler(BaseCallbackHandler, _StreamingCallbackHandler):
"""A callback handler that implements stream_mode=messages.
Collects messages from (1) chat model stream events and (2) node outputs."""

run_inline = True
"""We want this callback to run in the main thread, to avoid order/locking issues."""

def __init__(self, stream: Callable[[StreamChunk], None]):
self.stream = stream
self.metadata: dict[UUID, Meta] = {}
self.seen: set[Union[int, str]] = set()

def _emit(self, meta: Meta, message: BaseMessage, *, dedupe: bool = False) -> None:
ident = id(message)
if dedupe and message.id in self.seen:
return
elif ident in self.seen:
return
else:
if message.id is None:
message.id = str(uuid4())
self.seen.add(ident)
self.seen.add(message.id)
self.stream((meta[0], "messages", (message, meta[1])))

Expand Down

0 comments on commit beea3be

Please sign in to comment.