Skip to content

Commit

Permalink
pro ref
Browse files Browse the repository at this point in the history
  • Loading branch information
shogunpurple committed Dec 4, 2024
1 parent 7fcff8a commit e119e31
Show file tree
Hide file tree
Showing 3 changed files with 10 additions and 8 deletions.
2 changes: 1 addition & 1 deletion packages/pro
Submodule pro updated from 0d7fa3 to 7b8789
8 changes: 4 additions & 4 deletions packages/server/src/automations/steps/openai.ts
Original file line number Diff line number Diff line change
Expand Up @@ -106,13 +106,13 @@ export async function run({
(await features.flags.isEnabled(FeatureFlag.BUDIBASE_AI)) &&
(await pro.features.isBudibaseAIEnabled())

let llm
let llmWrapper
if (budibaseAIEnabled || customConfigsEnabled) {
llm = await pro.ai.LargeLanguageModel.forCurrentTenant(inputs.model)
llmWrapper = await pro.ai.LargeLanguageModel.forCurrentTenant(inputs.model)
}

response = llm?.initialised
? await llm.run(inputs.prompt)
response = llmWrapper?.llm
? await llmWrapper.run(inputs.prompt)
: await legacyOpenAIPrompt(inputs)

return {
Expand Down
8 changes: 5 additions & 3 deletions packages/server/src/utilities/rowProcessor/utils.ts
Original file line number Diff line number Diff line change
Expand Up @@ -126,7 +126,9 @@ export async function processAIColumns<T extends Row | Row[]>(
const numRows = Array.isArray(inputRows) ? inputRows.length : 1
span?.addTags({ table_id: table._id, numRows })
const rows = Array.isArray(inputRows) ? inputRows : [inputRows]
const llmWrapper = await pro.ai.LargeLanguageModel.forCurrentTenant("gpt-4o-mini")
const llmWrapper = await pro.ai.LargeLanguageModel.forCurrentTenant(
"gpt-4o-mini"
)
if (rows && llmWrapper.llm) {
// Ensure we have snippet context
await context.ensureSnippetContext()
Expand All @@ -151,14 +153,14 @@ export async function processAIColumns<T extends Row | Row[]>(
}
}

const prompt = llm.buildPromptFromAIOperation({
const prompt = llmWrapper.buildPromptFromAIOperation({
schema: aiSchema,
row,
})

return tracer.trace("processAIColumn", {}, async span => {
span?.addTags({ table_id: table._id, column })
const llmResponse = await llm.run(prompt!)
const llmResponse = await llmWrapper.run(prompt!)
return {
...row,
[column]: llmResponse,
Expand Down

0 comments on commit e119e31

Please sign in to comment.