Skip to content

Commit

Permalink
Merge pull request #15117 from Budibase/openai-fix
Browse files Browse the repository at this point in the history
rely on llm status rather than initialised variable
  • Loading branch information
shogunpurple authored Dec 4, 2024
2 parents 82a1165 + 3d8e15a commit 963d162
Show file tree
Hide file tree
Showing 7 changed files with 17 additions and 13 deletions.
2 changes: 1 addition & 1 deletion packages/pro
Submodule pro updated from e60f4b to 7b8789
2 changes: 1 addition & 1 deletion packages/server/src/api/routes/tests/row.spec.ts
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ jest.mock("@budibase/pro", () => ({
ai: {
LargeLanguageModel: {
forCurrentTenant: async () => ({
initialised: true,
llm: {},
run: jest.fn(() => `Mock LLM Response`),
buildPromptFromAIOperation: jest.fn(),
}),
Expand Down
2 changes: 1 addition & 1 deletion packages/server/src/api/routes/tests/search.spec.ts
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,7 @@ jest.mock("@budibase/pro", () => ({
ai: {
LargeLanguageModel: {
forCurrentTenant: async () => ({
initialised: true,
llm: {},
run: jest.fn(() => `Mock LLM Response`),
buildPromptFromAIOperation: jest.fn(),
}),
Expand Down
10 changes: 6 additions & 4 deletions packages/server/src/automations/steps/openai.ts
Original file line number Diff line number Diff line change
Expand Up @@ -106,13 +106,15 @@ export async function run({
(await features.flags.isEnabled(FeatureFlag.BUDIBASE_AI)) &&
(await pro.features.isBudibaseAIEnabled())

let llm
let llmWrapper
if (budibaseAIEnabled || customConfigsEnabled) {
llm = await pro.ai.LargeLanguageModel.forCurrentTenant(inputs.model)
llmWrapper = await pro.ai.LargeLanguageModel.forCurrentTenant(
inputs.model
)
}

response = llm?.initialised
? await llm.run(inputs.prompt)
response = llmWrapper?.llm
? await llmWrapper.run(inputs.prompt)
: await legacyOpenAIPrompt(inputs)

return {
Expand Down
2 changes: 1 addition & 1 deletion packages/server/src/automations/tests/openai.spec.ts
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ jest.mock("@budibase/pro", () => ({
ai: {
LargeLanguageModel: {
forCurrentTenant: jest.fn().mockImplementation(() => ({
initialised: true,
llm: {},
init: jest.fn(),
run: jest.fn(),
})),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ jest.mock("@budibase/pro", () => ({
ai: {
LargeLanguageModel: {
forCurrentTenant: async () => ({
initialised: true,
llm: {},
run: jest.fn(() => "response from LLM"),
buildPromptFromAIOperation: buildPromptMock,
}),
Expand Down
10 changes: 6 additions & 4 deletions packages/server/src/utilities/rowProcessor/utils.ts
Original file line number Diff line number Diff line change
Expand Up @@ -126,8 +126,10 @@ export async function processAIColumns<T extends Row | Row[]>(
const numRows = Array.isArray(inputRows) ? inputRows.length : 1
span?.addTags({ table_id: table._id, numRows })
const rows = Array.isArray(inputRows) ? inputRows : [inputRows]
const llm = await pro.ai.LargeLanguageModel.forCurrentTenant("gpt-4o-mini")
if (rows && llm.initialised) {
const llmWrapper = await pro.ai.LargeLanguageModel.forCurrentTenant(
"gpt-4o-mini"
)
if (rows && llmWrapper.llm) {
// Ensure we have snippet context
await context.ensureSnippetContext()

Expand All @@ -151,14 +153,14 @@ export async function processAIColumns<T extends Row | Row[]>(
}
}

const prompt = llm.buildPromptFromAIOperation({
const prompt = llmWrapper.buildPromptFromAIOperation({
schema: aiSchema,
row,
})

return tracer.trace("processAIColumn", {}, async span => {
span?.addTags({ table_id: table._id, column })
const llmResponse = await llm.run(prompt!)
const llmResponse = await llmWrapper.run(prompt!)
return {
...row,
[column]: llmResponse,
Expand Down

0 comments on commit 963d162

Please sign in to comment.