From b01d9389971b7a32bd49e826b204bf4aadee478d Mon Sep 17 00:00:00 2001 From: cffranco94 Date: Tue, 30 Jul 2024 18:01:06 +0100 Subject: [PATCH] experimental: Add config to convert_to_graph_documents (#24012) PR title: Experimental: Add config to convert_to_graph_documents Description: In order to use langfuse, i need to pass the langfuse configuration when invoking the chain. langchain_experimental does not allow to add any parameters (beside the documents) to the convert_to_graph_documents method. This way, I cannot monitor the chain in langfuse. If no one reviews your PR within a few days, please @-mention one of baskaryan, efriis, eyurtsev, ccurme, vbarda, hwchase17. --------- Co-authored-by: Catarina Franco Co-authored-by: Chester Curme --- .../graph_transformers/llm.py | 21 ++++++++++++------- 1 file changed, 13 insertions(+), 8 deletions(-) diff --git a/libs/experimental/langchain_experimental/graph_transformers/llm.py b/libs/experimental/langchain_experimental/graph_transformers/llm.py index 5b432a94bfc2f..f9f6fc8792628 100644 --- a/libs/experimental/langchain_experimental/graph_transformers/llm.py +++ b/libs/experimental/langchain_experimental/graph_transformers/llm.py @@ -13,6 +13,7 @@ PromptTemplate, ) from langchain_core.pydantic_v1 import BaseModel, Field, create_model +from langchain_core.runnables import RunnableConfig examples = [ { @@ -710,13 +711,15 @@ def __init__( prompt = prompt or default_prompt self.chain = prompt | structured_llm - def process_response(self, document: Document) -> GraphDocument: + def process_response( + self, document: Document, config: Optional[RunnableConfig] = None + ) -> GraphDocument: """ Processes a single document, transforming it into a graph document using an LLM based on the model's schema and constraints. """ text = document.page_content - raw_schema = self.chain.invoke({"input": text}) + raw_schema = self.chain.invoke({"input": text}, config=config) if self._function_call: raw_schema = cast(Dict[Any, Any], raw_schema) nodes, relationships = _convert_to_graph_document(raw_schema) @@ -765,7 +768,7 @@ def process_response(self, document: Document) -> GraphDocument: return GraphDocument(nodes=nodes, relationships=relationships, source=document) def convert_to_graph_documents( - self, documents: Sequence[Document] + self, documents: Sequence[Document], config: Optional[RunnableConfig] = None ) -> List[GraphDocument]: """Convert a sequence of documents into graph documents. @@ -776,15 +779,17 @@ def convert_to_graph_documents( Returns: Sequence[GraphDocument]: The transformed documents as graphs. """ - return [self.process_response(document) for document in documents] + return [self.process_response(document, config) for document in documents] - async def aprocess_response(self, document: Document) -> GraphDocument: + async def aprocess_response( + self, document: Document, config: Optional[RunnableConfig] = None + ) -> GraphDocument: """ Asynchronously processes a single document, transforming it into a graph document. """ text = document.page_content - raw_schema = await self.chain.ainvoke({"input": text}) + raw_schema = await self.chain.ainvoke({"input": text}, config=config) raw_schema = cast(Dict[Any, Any], raw_schema) nodes, relationships = _convert_to_graph_document(raw_schema) @@ -811,13 +816,13 @@ async def aprocess_response(self, document: Document) -> GraphDocument: return GraphDocument(nodes=nodes, relationships=relationships, source=document) async def aconvert_to_graph_documents( - self, documents: Sequence[Document] + self, documents: Sequence[Document], config: Optional[RunnableConfig] = None ) -> List[GraphDocument]: """ Asynchronously convert a sequence of documents into graph documents. """ tasks = [ - asyncio.create_task(self.aprocess_response(document)) + asyncio.create_task(self.aprocess_response(document, config)) for document in documents ] results = await asyncio.gather(*tasks)