Skip to content

Commit

Permalink
fix: Fix execution error when using AI chain nodes with non-chat model (
Browse files Browse the repository at this point in the history
n8n-io#8724)

Signed-off-by: Oleg Ivaniv <me@olegivaniv.com>
  • Loading branch information
OlegIvaniv authored Feb 23, 2024
1 parent d03d927 commit 0882dc0
Show file tree
Hide file tree
Showing 3 changed files with 51 additions and 60 deletions.
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
/* eslint-disable n8n-nodes-base/node-filename-against-convention */
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
import type { VectorStore } from 'langchain/vectorstores/base';
import { NodeConnectionType, NodeOperationError, jsonStringify } from 'n8n-workflow';
import { NodeConnectionType, NodeOperationError } from 'n8n-workflow';
import type {
INodeCredentialDescription,
INodeProperties,
Expand All @@ -18,7 +18,7 @@ import type { Document } from 'langchain/document';
import { logWrapper } from '../../../utils/logWrapper';
import type { N8nJsonLoader } from '../../../utils/N8nJsonLoader';
import type { N8nBinaryLoader } from '../../../utils/N8nBinaryLoader';
import { getMetadataFiltersValues } from '../../../utils/helpers';
import { getMetadataFiltersValues, logAiEvent } from '../../../utils/helpers';
import { getConnectionHintNoticeField } from '../../../utils/sharedFields';
import { processDocument } from './processDocuments';

Expand Down Expand Up @@ -237,7 +237,7 @@ export const createVectorStoreNode = (args: VectorStoreNodeConstructorArgs) =>
});

resultData.push(...serializedDocs);
void this.logAiEvent('n8n.ai.vector.store.searched', jsonStringify({ query: prompt }));
void logAiEvent(this, 'n8n.ai.vector.store.searched', { query: prompt });
}

return await this.prepareOutputData(resultData);
Expand All @@ -264,7 +264,7 @@ export const createVectorStoreNode = (args: VectorStoreNodeConstructorArgs) =>
try {
await args.populateVectorStore(this, embeddings, processedDocuments, itemIndex);

void this.logAiEvent('n8n.ai.vector.store.populated');
void logAiEvent(this, 'n8n.ai.vector.store.populated');
} catch (error) {
throw error;
}
Expand Down
15 changes: 14 additions & 1 deletion packages/@n8n/nodes-langchain/utils/helpers.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import { NodeConnectionType, type IExecuteFunctions, NodeOperationError } from 'n8n-workflow';
import { NodeConnectionType, NodeOperationError, jsonStringify } from 'n8n-workflow';
import type { EventNamesAiNodesType, IDataObject, IExecuteFunctions } from 'n8n-workflow';
import { BaseChatModel } from 'langchain/chat_models/base';
import { BaseChatModel as BaseChatModelCore } from '@langchain/core/language_models/chat_models';
import type { BaseOutputParser } from '@langchain/core/output_parsers';
Expand Down Expand Up @@ -64,3 +65,15 @@ export function getPromptInputByType(options: {

return input;
}

export async function logAiEvent(
executeFunctions: IExecuteFunctions,
event: EventNamesAiNodesType,
data?: IDataObject,
) {
try {
await executeFunctions.logAiEvent(event, data ? jsonStringify(data) : undefined);
} catch (error) {
executeFunctions.logger.debug(`Error logging AI event: ${event}`);
}
}
88 changes: 33 additions & 55 deletions packages/@n8n/nodes-langchain/utils/logWrapper.ts
Original file line number Diff line number Diff line change
@@ -1,16 +1,10 @@
import {
NodeOperationError,
type ConnectionTypes,
type IExecuteFunctions,
type INodeExecutionData,
NodeConnectionType,
jsonStringify,
} from 'n8n-workflow';
import { NodeOperationError, NodeConnectionType } from 'n8n-workflow';
import type { ConnectionTypes, IExecuteFunctions, INodeExecutionData } from 'n8n-workflow';

import { Tool } from 'langchain/tools';
import type { BaseMessage, ChatResult, InputValues } from 'langchain/schema';
import type { ChatResult, InputValues, BaseMessage } from 'langchain/schema';
import { BaseChatMessageHistory } from 'langchain/schema';
import { BaseChatModel } from 'langchain/chat_models/base';
import type { BaseChatModel } from 'langchain/chat_models/base';
import type { CallbackManagerForLLMRun } from 'langchain/callbacks';

import { Embeddings } from 'langchain/embeddings/base';
Expand All @@ -28,7 +22,7 @@ import { BaseOutputParser } from 'langchain/schema/output_parser';
import { isObject } from 'lodash';
import { N8nJsonLoader } from './N8nJsonLoader';
import { N8nBinaryLoader } from './N8nBinaryLoader';
import { isChatInstance } from './helpers';
import { isChatInstance, logAiEvent } from './helpers';

const errorsMap: { [key: string]: { message: string; description: string } } = {
'You exceeded your current quota, please check your plan and billing details.': {
Expand Down Expand Up @@ -202,10 +196,7 @@ export function logWrapper(
const payload = { action: 'getMessages', response };
executeFunctions.addOutputData(connectionType, index, [[{ json: payload }]]);

void executeFunctions.logAiEvent(
'n8n.ai.memory.get.messages',
jsonStringify({ response }),
);
void logAiEvent(executeFunctions, 'n8n.ai.memory.get.messages', { response });
return response;
};
} else if (prop === 'addMessage' && 'addMessage' in target) {
Expand All @@ -222,10 +213,7 @@ export function logWrapper(
arguments: [message],
});

void executeFunctions.logAiEvent(
'n8n.ai.memory.added.message',
jsonStringify({ message }),
);
void logAiEvent(executeFunctions, 'n8n.ai.memory.added.message', { message });
executeFunctions.addOutputData(connectionType, index, [[{ json: payload }]]);
};
}
Expand Down Expand Up @@ -255,18 +243,21 @@ export function logWrapper(
runManager,
],
})) as ChatResult;

void executeFunctions.logAiEvent(
'n8n.ai.llm.generated',
jsonStringify({
messages:
typeof messages === 'string'
? messages
: messages.map((message) => message.toJSON()),
options,
response,
}),
);
const parsedMessages =
typeof messages === 'string'
? messages
: messages.map((message) => {
if (typeof message === 'string') return message;
if (typeof message?.toJSON === 'function') return message.toJSON();

return message;
});

void logAiEvent(executeFunctions, 'n8n.ai.llm.generated', {
messages: parsedMessages,
options,
response,
});
executeFunctions.addOutputData(connectionType, index, [[{ json: { response } }]]);
return response;
} catch (error) {
Expand Down Expand Up @@ -299,10 +290,9 @@ export function logWrapper(
executeFunctions.addOutputData(connectionType, index, [
[{ json: { action: 'getFormatInstructions', response } }],
]);
void executeFunctions.logAiEvent(
'n8n.ai.output.parser.get.instructions',
jsonStringify({ response }),
);
void logAiEvent(executeFunctions, 'n8n.ai.output.parser.get.instructions', {
response,
});
return response;
};
} else if (prop === 'parse' && 'parse' in target) {
Expand All @@ -321,10 +311,7 @@ export function logWrapper(
arguments: [stringifiedText],
})) as object;

void executeFunctions.logAiEvent(
'n8n.ai.output.parser.parsed',
jsonStringify({ text, response }),
);
void logAiEvent(executeFunctions, 'n8n.ai.output.parser.parsed', { text, response });
executeFunctions.addOutputData(connectionType, index, [
[{ json: { action: 'parse', response } }],
]);
Expand Down Expand Up @@ -353,10 +340,7 @@ export function logWrapper(
arguments: [query, config],
})) as Array<Document<Record<string, any>>>;

void executeFunctions.logAiEvent(
'n8n.ai.retriever.get.relevant.documents',
jsonStringify({ query }),
);
void logAiEvent(executeFunctions, 'n8n.ai.retriever.get.relevant.documents', { query });
executeFunctions.addOutputData(connectionType, index, [[{ json: { response } }]]);
return response;
};
Expand All @@ -381,7 +365,7 @@ export function logWrapper(
arguments: [documents],
})) as number[][];

void executeFunctions.logAiEvent('n8n.ai.embeddings.embedded.document');
void logAiEvent(executeFunctions, 'n8n.ai.embeddings.embedded.document');
executeFunctions.addOutputData(connectionType, index, [[{ json: { response } }]]);
return response;
};
Expand All @@ -401,7 +385,7 @@ export function logWrapper(
method: target[prop],
arguments: [query],
})) as number[];
void executeFunctions.logAiEvent('n8n.ai.embeddings.embedded.query');
void logAiEvent(executeFunctions, 'n8n.ai.embeddings.embedded.query');
executeFunctions.addOutputData(connectionType, index, [[{ json: { response } }]]);
return response;
};
Expand Down Expand Up @@ -446,7 +430,7 @@ export function logWrapper(
arguments: [item, itemIndex],
})) as number[];

void executeFunctions.logAiEvent('n8n.ai.document.processed');
void logAiEvent(executeFunctions, 'n8n.ai.document.processed');
executeFunctions.addOutputData(connectionType, index, [
[{ json: { response }, pairedItem: { item: itemIndex } }],
]);
Expand All @@ -472,7 +456,7 @@ export function logWrapper(
arguments: [text],
})) as string[];

void executeFunctions.logAiEvent('n8n.ai.text.splitter.split');
void logAiEvent(executeFunctions, 'n8n.ai.text.splitter.split');
executeFunctions.addOutputData(connectionType, index, [[{ json: { response } }]]);
return response;
};
Expand All @@ -496,10 +480,7 @@ export function logWrapper(
arguments: [query],
})) as string;

void executeFunctions.logAiEvent(
'n8n.ai.tool.called',
jsonStringify({ query, response }),
);
void logAiEvent(executeFunctions, 'n8n.ai.tool.called', { query, response });
executeFunctions.addOutputData(connectionType, index, [[{ json: { response } }]]);
return response;
};
Expand Down Expand Up @@ -529,10 +510,7 @@ export function logWrapper(
arguments: [query, k, filter, _callbacks],
})) as Array<Document<Record<string, any>>>;

void executeFunctions.logAiEvent(
'n8n.ai.vector.store.searched',
jsonStringify({ query }),
);
void logAiEvent(executeFunctions, 'n8n.ai.vector.store.searched', { query });
executeFunctions.addOutputData(connectionType, index, [[{ json: { response } }]]);

return response;
Expand Down

0 comments on commit 0882dc0

Please sign in to comment.