From 6836a7e2a1c91dea9b842e0a9e6c0a851e0d2070 Mon Sep 17 00:00:00 2001 From: Pavel Jbanov Date: Mon, 11 Nov 2024 18:02:14 -0500 Subject: [PATCH] chore: removed generate/generateStream from executable prompts (#1239) --- js/ai/src/prompt.ts | 50 +++------- js/genkit/src/genkit.ts | 34 ++----- js/genkit/tests/prompts_test.ts | 123 ------------------------ js/testapps/flow-simple-ai/src/index.ts | 10 +- js/testapps/menu/src/02/flows.ts | 10 +- js/testapps/menu/src/04/flows.ts | 8 +- js/testapps/menu/src/05/flows.ts | 14 +-- js/testapps/prompt-file/src/index.ts | 13 ++- js/testapps/rag/src/pdf_rag.ts | 16 +-- js/testapps/rag/src/simple_rag.ts | 24 ++--- 10 files changed, 60 insertions(+), 242 deletions(-) diff --git a/js/ai/src/prompt.ts b/js/ai/src/prompt.ts index c5a93f139..c3a1bb674 100644 --- a/js/ai/src/prompt.ts +++ b/js/ai/src/prompt.ts @@ -64,15 +64,9 @@ export function isPrompt(arg: any): boolean { } export type PromptGenerateOptions< - I = undefined, + O extends z.ZodTypeAny = z.ZodTypeAny, CustomOptions extends z.ZodTypeAny = z.ZodTypeAny, -> = Omit< - GenerateOptions, - 'prompt' | 'input' | 'model' -> & { - model?: ModelArgument; - input?: I; -}; +> = Omit, 'prompt'>; /** * A prompt that can be executed as a function. @@ -89,10 +83,10 @@ export interface ExecutablePrompt< * @param opt Options for the prompt template, including user input variables and custom model configuration options. * @returns the model response as a promise of `GenerateStreamResponse`. */ - ( + ( input?: I, - opts?: PromptGenerateOptions - ): Promise>>; + opts?: PromptGenerateOptions + ): Promise>>; /** * Generates a response by rendering the prompt template with given user input and then calling the model. @@ -100,30 +94,10 @@ export interface ExecutablePrompt< * @param opt Options for the prompt template, including user input variables and custom model configuration options. * @returns the model response as a promise of `GenerateStreamResponse`. */ - stream( + stream( input?: I, - opts?: PromptGenerateOptions - ): Promise>>; - - /** - * Generates a response by rendering the prompt template with given user input and additional generate options and then calling the model. - * - * @param opt Options for the prompt template, including user input variables and custom model configuration options. - * @returns the model response as a promise of `GenerateResponse`. - */ - generate( - opt: PromptGenerateOptions - ): Promise>>; - - /** - * Generates a streaming response by rendering the prompt template with given user input and additional generate options and then calling the model. - * - * @param opt Options for the prompt template, including user input variables and custom model configuration options. - * @returns the model response as a promise of `GenerateStreamResponse`. - */ - generateStream( - opt: PromptGenerateOptions - ): Promise>>; + opts?: PromptGenerateOptions + ): Promise>>; /** * Renders the prompt template based on user input. @@ -131,9 +105,11 @@ export interface ExecutablePrompt< * @param opt Options for the prompt template, including user input variables and custom model configuration options. * @returns a `GenerateOptions` object to be used with the `generate()` function from @genkit-ai/ai. */ - render( - opt: PromptGenerateOptions - ): Promise>; + render( + opt: PromptGenerateOptions & { + input?: I; + } + ): Promise>; /** * Returns the prompt usable as a tool. diff --git a/js/genkit/src/genkit.ts b/js/genkit/src/genkit.ts index eef0320ba..ef4ea92d6 100644 --- a/js/genkit/src/genkit.ts +++ b/js/genkit/src/genkit.ts @@ -438,7 +438,7 @@ export class Genkit { ): ExecutablePrompt { const executablePrompt = async ( input?: z.infer, - opts?: PromptGenerateOptions + opts?: PromptGenerateOptions ): Promise => { const renderedOpts = await ( executablePrompt as ExecutablePrompt @@ -460,29 +460,11 @@ export class Genkit { }); return this.generateStream(renderedOpts); }; - (executablePrompt as ExecutablePrompt).generate = - async ( - opt: PromptGenerateOptions - ): Promise> => { - const renderedOpts = await ( - executablePrompt as ExecutablePrompt - ).render(opt); - return this.generate(renderedOpts); - }; - (executablePrompt as ExecutablePrompt).generateStream = - async ( - opt: PromptGenerateOptions - ): Promise> => { - const renderedOpts = await ( - executablePrompt as ExecutablePrompt - ).render(opt); - return this.generateStream(renderedOpts); - }; - (executablePrompt as ExecutablePrompt).render = async < - Out extends O, - >( - opt: PromptGenerateOptions - ): Promise> => { + (executablePrompt as ExecutablePrompt).render = async ( + opt: PromptGenerateOptions & { + input?: I; + } + ): Promise> => { let model: ModelAction | undefined; options = await options; try { @@ -509,8 +491,8 @@ export class Genkit { ...opt.config, }, model, - } as GenerateOptions; - delete (resultOptions as PromptGenerateOptions).input; + } as GenerateOptions; + delete (resultOptions as any).input; return resultOptions; }; (executablePrompt as ExecutablePrompt).asTool = diff --git a/js/genkit/tests/prompts_test.ts b/js/genkit/tests/prompts_test.ts index 5a36291fa..7996f85f8 100644 --- a/js/genkit/tests/prompts_test.ts +++ b/js/genkit/tests/prompts_test.ts @@ -77,32 +77,6 @@ describe('definePrompt - dotprompt', () => { ); }); - it('calls dotprompt with .generate', async () => { - const hi = ai.definePrompt( - { - name: 'hi', - input: { - schema: z.object({ - name: z.string(), - }), - }, - config: { - temperature: 11, - }, - }, - 'hi {{ name }}' - ); - - const response = await hi.generate({ - input: { name: 'Genkit' }, - config: { version: 'abc' }, - }); - assert.strictEqual( - response.text, - 'Echo: hi Genkit; config: {"version":"abc","temperature":11}' - ); - }); - it('calls dotprompt with default model via retrieved prompt', async () => { ai.definePrompt( { @@ -215,39 +189,6 @@ describe('definePrompt - dotprompt', () => { assert.deepStrictEqual(chunks, ['3', '2', '1']); }); - it('streams dotprompt .generateStream', async () => { - const hi = ai.definePrompt( - { - name: 'hi', - input: { - schema: z.object({ - name: z.string(), - }), - }, - config: { - temperature: 11, - }, - }, - 'hi {{ name }}' - ); - - const { response, stream } = await hi.generateStream({ - input: { name: 'Genkit' }, - config: { version: 'abc' }, - }); - const chunks: string[] = []; - for await (const chunk of stream) { - chunks.push(chunk.text); - } - const responseText = (await response).text; - - assert.strictEqual( - responseText, - 'Echo: hi Genkit; config: {"version":"abc","temperature":11}' - ); - assert.deepStrictEqual(chunks, ['3', '2', '1']); - }); - it('calls dotprompt with default model via retrieved prompt', async () => { ai.definePrompt( { @@ -621,70 +562,6 @@ describe('definePrompt', () => { 'Echo: hi Genkit; config: {"version":"abc","temperature":11}' ); }); - - it('works with .generate', async () => { - const hi = ai.definePrompt( - { - name: 'hi', - model: 'echoModel', - input: { - schema: z.object({ - name: z.string(), - }), - }, - }, - async (input) => { - return { - messages: [ - { role: 'user', content: [{ text: `hi ${input.name}` }] }, - ], - }; - } - ); - - const response = await hi.generate({ input: { name: 'Genkit' } }); - assert.strictEqual(response.text, 'Echo: hi Genkit; config: {}'); - }); - - it('streams dotprompt with .generateStream', async () => { - const hi = ai.definePrompt( - { - name: 'hi', - input: { - schema: z.object({ - name: z.string(), - }), - }, - config: { - temperature: 11, - }, - }, - async (input) => { - return { - messages: [ - { role: 'user', content: [{ text: `hi ${input.name}` }] }, - ], - }; - } - ); - - const { response, stream } = await hi.generateStream({ - model: 'echoModel', - input: { name: 'Genkit' }, - config: { version: 'abc' }, - }); - const chunks: string[] = []; - for await (const chunk of stream) { - chunks.push(chunk.text); - } - const responseText = (await response).text; - - assert.strictEqual( - responseText, - 'Echo: hi Genkit; config: {"version":"abc","temperature":11}' - ); - assert.deepStrictEqual(chunks, ['3', '2', '1']); - }); }); describe('render', () => { diff --git a/js/testapps/flow-simple-ai/src/index.ts b/js/testapps/flow-simple-ai/src/index.ts index 29304c045..57ad3b404 100644 --- a/js/testapps/flow-simple-ai/src/index.ts +++ b/js/testapps/flow-simple-ai/src/index.ts @@ -338,10 +338,12 @@ export const dotpromptContext = ai.defineFlow( }, ]; - const result = await ai.prompt('dotpromptContext').generate({ - input: { question: question }, - docs, - }); + const result = await ai.prompt('dotpromptContext')( + { question: question }, + { + docs, + } + ); return result.output as any; } ); diff --git a/js/testapps/menu/src/02/flows.ts b/js/testapps/menu/src/02/flows.ts index d3376b05e..4815334ef 100644 --- a/js/testapps/menu/src/02/flows.ts +++ b/js/testapps/menu/src/02/flows.ts @@ -27,12 +27,8 @@ export const s02_menuQuestionFlow = ai.defineFlow( outputSchema: AnswerOutputSchema, }, async (input) => { - return s02_dataMenuPrompt - .generate({ - input: { question: input.question }, - }) - .then((response) => { - return { answer: response.text }; - }); + return s02_dataMenuPrompt({ question: input.question }).then((response) => { + return { answer: response.text }; + }); } ); diff --git a/js/testapps/menu/src/04/flows.ts b/js/testapps/menu/src/04/flows.ts index 23e7590ee..b171caf4a 100644 --- a/js/testapps/menu/src/04/flows.ts +++ b/js/testapps/menu/src/04/flows.ts @@ -74,11 +74,9 @@ export const s04_ragMenuQuestionFlow = ai.defineFlow( ); // Generate the response - const response = await s04_ragDataMenuPrompt.generate({ - input: { - menuData: menuData, - question: input.question, - }, + const response = await s04_ragDataMenuPrompt({ + menuData: menuData, + question: input.question, }); return { answer: response.text }; } diff --git a/js/testapps/menu/src/05/flows.ts b/js/testapps/menu/src/05/flows.ts index 213e06812..30f0ac509 100644 --- a/js/testapps/menu/src/05/flows.ts +++ b/js/testapps/menu/src/05/flows.ts @@ -38,10 +38,8 @@ export const s05_readMenuFlow = ai.defineFlow( }, async (unused) => { const imageDataUrl = await inlineDataUrl('menu.jpeg', 'image/jpeg'); - const response = await s05_readMenuPrompt.generate({ - input: { - imageUrl: imageDataUrl, - }, + const response = await s05_readMenuPrompt({ + imageUrl: imageDataUrl, }); return { menuText: response.text }; } @@ -57,11 +55,9 @@ export const s05_textMenuQuestionFlow = ai.defineFlow( outputSchema: AnswerOutputSchema, }, async (input) => { - const response = await s05_textMenuPrompt.generate({ - input: { - menuText: input.menuText, - question: input.question, - }, + const response = await s05_textMenuPrompt({ + menuText: input.menuText, + question: input.question, }); return { answer: response.text }; } diff --git a/js/testapps/prompt-file/src/index.ts b/js/testapps/prompt-file/src/index.ts index 4d1003727..c94e8d3f2 100644 --- a/js/testapps/prompt-file/src/index.ts +++ b/js/testapps/prompt-file/src/index.ts @@ -60,8 +60,7 @@ ai.defineFlow( outputSchema: RecipeSchema, }, async (input) => - (await ai.prompt('recipe').generate({ input: input })) - .output! + (await ai.prompt('recipe')(input)).output! ); ai.defineFlow( @@ -73,8 +72,7 @@ ai.defineFlow( outputSchema: z.any(), }, async (input) => - (await ai.prompt('recipe', { variant: 'robot' }).generate({ input: input })) - .output + (await ai.prompt('recipe', { variant: 'robot' })(input)).output ); // A variation that supports streaming, optionally @@ -92,15 +90,16 @@ ai.defineStreamingFlow( async ({ subject, personality }, streamingCallback) => { const storyPrompt = ai.prompt('story'); if (streamingCallback) { - const { response, stream } = await storyPrompt.generateStream({ - input: { subject, personality }, + const { response, stream } = await storyPrompt.stream({ + subject, + personality, }); for await (const chunk of stream) { streamingCallback(chunk.content[0]?.text!); } return (await response).text; } else { - const response = await storyPrompt.generate({ input: { subject } }); + const response = await storyPrompt({ subject }); return response.text; } } diff --git a/js/testapps/rag/src/pdf_rag.ts b/js/testapps/rag/src/pdf_rag.ts index 2c79c162a..7cb509d5e 100644 --- a/js/testapps/rag/src/pdf_rag.ts +++ b/js/testapps/rag/src/pdf_rag.ts @@ -45,15 +45,15 @@ export const pdfQA = ai.defineFlow( options: { k: 3 }, }); - return augmentedPrompt - .generate({ - input: { - question: query, - context: docs.map((d) => d.text), - }, + return augmentedPrompt( + { + question: query, + context: docs.map((d) => d.text), + }, + { streamingCallback, - }) - .then((r) => r.text); + } + ).then((r) => r.text); } ); diff --git a/js/testapps/rag/src/simple_rag.ts b/js/testapps/rag/src/simple_rag.ts index 7c62cc72c..34e694a48 100644 --- a/js/testapps/rag/src/simple_rag.ts +++ b/js/testapps/rag/src/simple_rag.ts @@ -62,14 +62,10 @@ export const askQuestionsAboutCatsFlow = ai.defineFlow( query, options: { k: 3 }, }); - return augmentedPrompt - .generate({ - input: { - question: query, - context: docs.map((d) => d.text), - }, - }) - .then((r) => r.text); + return augmentedPrompt({ + question: query, + context: docs.map((d) => d.text), + }).then((r) => r.text); } ); @@ -87,14 +83,10 @@ export const askQuestionsAboutDogsFlow = ai.defineFlow( query, options: { k: 3 }, }); - return augmentedPrompt - .generate({ - input: { - question: query, - context: docs.map((d) => d.text), - }, - }) - .then((r) => r.text); + return augmentedPrompt({ + question: query, + context: docs.map((d) => d.text), + }).then((r) => r.text); } );