Skip to content

Commit

Permalink
Chore/upgrade llamaindex version (FlowiseAI#2440)
Browse files Browse the repository at this point in the history
* updates to loader to support file upload

* adding a todo

* upgrade llamaindex

* update groq icon

* update azure models

* update llamaindex version

---------

Co-authored-by: Henry <hzj94@hotmail.com>
  • Loading branch information
vinodkiran and HenryHengZJ authored May 22, 2024
1 parent e83dcb0 commit ff23817
Show file tree
Hide file tree
Showing 22 changed files with 1,340 additions and 297 deletions.
37 changes: 33 additions & 4 deletions packages/components/models.json
Original file line number Diff line number Diff line change
Expand Up @@ -244,21 +244,29 @@
"label": "gpt-4",
"name": "gpt-4"
},
{
"label": "gpt-4-turbo",
"name": "gpt-4-turbo"
},
{
"label": "gpt-4-32k",
"name": "gpt-4-32k"
},
{
"label": "gpt-35-turbo",
"name": "gpt-35-turbo"
"label": "gpt-3.5-turbo",
"name": "gpt-3.5-turbo"
},
{
"label": "gpt-35-turbo-16k",
"name": "gpt-35-turbo-16k"
"label": "gpt-3.5-turbo-16k",
"name": "gpt-3.5-turbo-16k"
},
{
"label": "gpt-4-vision-preview",
"name": "gpt-4-vision-preview"
},
{
"label": "gpt-4-1106-preview",
"name": "gpt-4-1106-preview"
}
]
},
Expand Down Expand Up @@ -504,6 +512,10 @@
{
"name": "chatOpenAI_LlamaIndex",
"models": [
{
"label": "gpt-4o",
"name": "gpt-4o"
},
{
"label": "gpt-4",
"name": "gpt-4"
Expand Down Expand Up @@ -622,6 +634,23 @@
"name": "mistral-large-2402"
}
]
},
{
"name": "chatMistral_LlamaIndex",
"models": [
{
"label": "mistral-tiny",
"name": "mistral-tiny"
},
{
"label": "mistral-small",
"name": "mistral-small"
},
{
"label": "mistral-medium",
"name": "mistral-medium"
}
]
}
],
"llm": [
Expand Down
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Original file line number Diff line number Diff line change
@@ -1,9 +1,9 @@
import { flatten } from 'lodash'
import { ChatMessage, OpenAI, OpenAIAgent } from 'llamaindex'
import { getBaseClasses } from '../../../src/utils'
import { FlowiseMemory, ICommonObject, IMessage, INode, INodeData, INodeParams, IUsedTool } from '../../../src/Interface'
import { MessageContentTextDetail, ChatMessage, AnthropicAgent, Anthropic } from 'llamaindex'
import { getBaseClasses } from '../../../../src/utils'
import { FlowiseMemory, ICommonObject, IMessage, INode, INodeData, INodeParams, IUsedTool } from '../../../../src/Interface'

class OpenAIFunctionAgent_LlamaIndex_Agents implements INode {
class AnthropicAgent_LlamaIndex_Agents implements INode {
label: string
name: string
version: number
Expand All @@ -18,16 +18,15 @@ class OpenAIFunctionAgent_LlamaIndex_Agents implements INode {
badge?: string

constructor(fields?: { sessionId?: string }) {
this.label = 'OpenAI Tool Agent'
this.name = 'openAIToolAgentLlamaIndex'
this.label = 'Anthropic Agent'
this.name = 'anthropicAgentLlamaIndex'
this.version = 1.0
this.type = 'OpenAIToolAgent'
this.type = 'AnthropicAgent'
this.category = 'Agents'
this.icon = 'function.svg'
this.description = `Agent that uses OpenAI Function Calling to pick the tools and args to call using LlamaIndex`
this.baseClasses = [this.type, ...getBaseClasses(OpenAIAgent)]
this.icon = 'Anthropic.svg'
this.description = `Agent that uses Anthropic Claude Function Calling to pick the tools and args to call using LlamaIndex`
this.baseClasses = [this.type, ...getBaseClasses(AnthropicAgent)]
this.tags = ['LlamaIndex']
this.badge = 'NEW'
this.inputs = [
{
label: 'Tools',
Expand All @@ -41,7 +40,7 @@ class OpenAIFunctionAgent_LlamaIndex_Agents implements INode {
type: 'BaseChatMemory'
},
{
label: 'OpenAI/Azure Chat Model',
label: 'Anthropic Claude Model',
name: 'model',
type: 'BaseChatModel_LlamaIndex'
},
Expand All @@ -63,7 +62,7 @@ class OpenAIFunctionAgent_LlamaIndex_Agents implements INode {

async run(nodeData: INodeData, input: string, options: ICommonObject): Promise<string | ICommonObject> {
const memory = nodeData.inputs?.memory as FlowiseMemory
const model = nodeData.inputs?.model as OpenAI
const model = nodeData.inputs?.model as Anthropic
const systemMessage = nodeData.inputs?.systemMessage as string
const prependMessages = options?.prependMessages

Expand Down Expand Up @@ -94,31 +93,33 @@ class OpenAIFunctionAgent_LlamaIndex_Agents implements INode {
}
}

const agent = new OpenAIAgent({
const agent = new AnthropicAgent({
tools,
llm: model,
prefixMessages: chatHistory,
chatHistory: chatHistory,
verbose: process.env.DEBUG === 'true' ? true : false
})

let text = ''
const usedTools: IUsedTool[] = []

const response = await agent.chat({
message: input
})
const response = await agent.chat({ message: input, chatHistory, verbose: process.env.DEBUG === 'true' ? true : false })

if (response.sources.length) {
for (const sourceTool of response.sources) {
usedTools.push({
tool: sourceTool.toolName,
toolInput: sourceTool.rawInput,
toolOutput: sourceTool.rawOutput
tool: sourceTool.tool?.metadata.name ?? '',
toolInput: sourceTool.input,
toolOutput: sourceTool.output as any
})
}
}

text = String(response)
if (Array.isArray(response.response.message.content) && response.response.message.content.length > 0) {
text = (response.response.message.content[0] as MessageContentTextDetail).text
} else {
text = response.response.message.content as string
}

await memory.addChatMessages(
[
Expand All @@ -138,4 +139,4 @@ class OpenAIFunctionAgent_LlamaIndex_Agents implements INode {
}
}

module.exports = { nodeClass: OpenAIFunctionAgent_LlamaIndex_Agents }
module.exports = { nodeClass: AnthropicAgent_LlamaIndex_Agents }
Original file line number Diff line number Diff line change
@@ -0,0 +1,167 @@
import { flatten } from 'lodash'
import { ChatMessage, OpenAI, OpenAIAgent } from 'llamaindex'
import { getBaseClasses } from '../../../../src/utils'
import { FlowiseMemory, ICommonObject, IMessage, INode, INodeData, INodeParams, IUsedTool } from '../../../../src/Interface'

class OpenAIFunctionAgent_LlamaIndex_Agents implements INode {
label: string
name: string
version: number
description: string
type: string
icon: string
category: string
baseClasses: string[]
tags: string[]
inputs: INodeParams[]
sessionId?: string
badge?: string

constructor(fields?: { sessionId?: string }) {
this.label = 'OpenAI Tool Agent'
this.name = 'openAIToolAgentLlamaIndex'
this.version = 2.0
this.type = 'OpenAIToolAgent'
this.category = 'Agents'
this.icon = 'function.svg'
this.description = `Agent that uses OpenAI Function Calling to pick the tools and args to call using LlamaIndex`
this.baseClasses = [this.type, ...getBaseClasses(OpenAIAgent)]
this.tags = ['LlamaIndex']
this.inputs = [
{
label: 'Tools',
name: 'tools',
type: 'Tool_LlamaIndex',
list: true
},
{
label: 'Memory',
name: 'memory',
type: 'BaseChatMemory'
},
{
label: 'OpenAI/Azure Chat Model',
name: 'model',
type: 'BaseChatModel_LlamaIndex'
},
{
label: 'System Message',
name: 'systemMessage',
type: 'string',
rows: 4,
optional: true,
additionalParams: true
}
]
this.sessionId = fields?.sessionId
}

async init(): Promise<any> {
return null
}

async run(nodeData: INodeData, input: string, options: ICommonObject): Promise<string | ICommonObject> {
const memory = nodeData.inputs?.memory as FlowiseMemory
const model = nodeData.inputs?.model as OpenAI
const systemMessage = nodeData.inputs?.systemMessage as string
let tools = nodeData.inputs?.tools
tools = flatten(tools)

const isStreamingEnabled = options.socketIO && options.socketIOClientId

const chatHistory = [] as ChatMessage[]

if (systemMessage) {
chatHistory.push({
content: systemMessage,
role: 'system'
})
}

const msgs = (await memory.getChatMessages(this.sessionId, false)) as IMessage[]
for (const message of msgs) {
if (message.type === 'apiMessage') {
chatHistory.push({
content: message.message,
role: 'assistant'
})
} else if (message.type === 'userMessage') {
chatHistory.push({
content: message.message,
role: 'user'
})
}
}

const agent = new OpenAIAgent({
tools,
llm: model,
chatHistory: chatHistory,
verbose: process.env.DEBUG === 'true' ? true : false
})

let text = ''
let isStreamingStarted = false
const usedTools: IUsedTool[] = []

if (isStreamingEnabled) {
const stream = await agent.chat({
message: input,
chatHistory,
stream: true,
verbose: process.env.DEBUG === 'true' ? true : false
})
for await (const chunk of stream) {
//console.log('chunk', chunk)
text += chunk.response.delta
if (!isStreamingStarted) {
isStreamingStarted = true
options.socketIO.to(options.socketIOClientId).emit('start', chunk.response.delta)
if (chunk.sources.length) {
for (const sourceTool of chunk.sources) {
usedTools.push({
tool: sourceTool.tool?.metadata.name ?? '',
toolInput: sourceTool.input,
toolOutput: sourceTool.output as any
})
}
options.socketIO.to(options.socketIOClientId).emit('usedTools', usedTools)
}
}

options.socketIO.to(options.socketIOClientId).emit('token', chunk.response.delta)
}
} else {
const response = await agent.chat({ message: input, chatHistory, verbose: process.env.DEBUG === 'true' ? true : false })
if (response.sources.length) {
for (const sourceTool of response.sources) {
usedTools.push({
tool: sourceTool.tool?.metadata.name ?? '',
toolInput: sourceTool.input,
toolOutput: sourceTool.output as any
})
}
}

text = response.response.message.content as string
}

await memory.addChatMessages(
[
{
text: input,
type: 'userMessage'
},
{
text: text,
type: 'apiMessage'
}
],
this.sessionId
)

return usedTools.length ? { text: text, usedTools } : text
}
}

module.exports = { nodeClass: OpenAIFunctionAgent_LlamaIndex_Agents }
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import { ICommonObject, INode, INodeData, INodeOptionsValue, INodeParams } from '../../../src/Interface'
import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils'
import { OpenAI, ALL_AVAILABLE_OPENAI_MODELS } from 'llamaindex'
import { OpenAI } from 'llamaindex'
import { getModels, MODEL_TYPE } from '../../../src/modelLoader'

interface AzureOpenAIConfig {
Expand All @@ -10,6 +10,28 @@ interface AzureOpenAIConfig {
deploymentName?: string
}

const ALL_AZURE_OPENAI_CHAT_MODELS = {
'gpt-35-turbo': { contextWindow: 4096, openAIModel: 'gpt-3.5-turbo' },
'gpt-35-turbo-16k': {
contextWindow: 16384,
openAIModel: 'gpt-3.5-turbo-16k'
},
'gpt-4': { contextWindow: 8192, openAIModel: 'gpt-4' },
'gpt-4-32k': { contextWindow: 32768, openAIModel: 'gpt-4-32k' },
'gpt-4-turbo': {
contextWindow: 128000,
openAIModel: 'gpt-4-turbo'
},
'gpt-4-vision-preview': {
contextWindow: 128000,
openAIModel: 'gpt-4-vision-preview'
},
'gpt-4-1106-preview': {
contextWindow: 128000,
openAIModel: 'gpt-4-1106-preview'
}
}

class AzureChatOpenAI_LlamaIndex_ChatModels implements INode {
label: string
name: string
Expand Down Expand Up @@ -90,7 +112,7 @@ class AzureChatOpenAI_LlamaIndex_ChatModels implements INode {
}

async init(nodeData: INodeData, _: string, options: ICommonObject): Promise<any> {
const modelName = nodeData.inputs?.modelName as keyof typeof ALL_AVAILABLE_OPENAI_MODELS
const modelName = nodeData.inputs?.modelName as keyof typeof ALL_AZURE_OPENAI_CHAT_MODELS
const temperature = nodeData.inputs?.temperature as string
const maxTokens = nodeData.inputs?.maxTokens as string
const topP = nodeData.inputs?.topP as string
Expand Down
Loading

0 comments on commit ff23817

Please sign in to comment.