Skip to content

Commit

Permalink
Introduce transcript newlines for function calls/responses (#499)
Browse files Browse the repository at this point in the history
The TTS buffer only gets flushed when it sees content after a sentence.
To ensure that it gets flushed after any pre-function-call filler text,
this change:
- Adds the `batchFrames` render option to ai-jsx to coalesce updates
from different parts of the render tree. (This delays surfacing frames
until further rendering is blocked on I/O.)
- Adds message-level state tracking (`"in-progress" | "done"`) in
`fixie-sdk` to text messages
- Changes the fixie backend in the voice demo to emit newlines after
`"done"` text messages
 - Stops appending text after `"in-progress"` text messages

---------

Co-authored-by: Justin Uberti <justin@fixie.ai>
  • Loading branch information
petersalas and juberti authored Nov 20, 2023
1 parent 574f2f8 commit 83627e8
Show file tree
Hide file tree
Showing 10 changed files with 120 additions and 13 deletions.
2 changes: 1 addition & 1 deletion packages/ai-jsx/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
"repository": "fixie-ai/ai-jsx",
"bugs": "https://github.com/fixie-ai/ai-jsx/issues",
"homepage": "https://ai-jsx.com",
"version": "0.26.1",
"version": "0.27.0",
"volta": {
"extends": "../../package.json"
},
Expand Down
39 changes: 38 additions & 1 deletion packages/ai-jsx/src/core/render.ts
Original file line number Diff line number Diff line change
Expand Up @@ -105,6 +105,12 @@ interface RenderOpts<TIntermediate = string, TFinal = string> {
* Indicates that the stream should be append-only.
*/
appendOnly?: boolean;

/**
* Indicates that intermediate frames should be skipped if the next
* frame is available without performing I/O.
*/
batchFrames?: boolean;
}

/**
Expand Down Expand Up @@ -424,8 +430,35 @@ function createRenderContextInternal(
// eslint-disable-next-line @typescript-eslint/prefer-nullish-coalescing
const shouldStop = (opts?.stop || (() => false)) as ElementPredicate;
const generatorToWrap = renderStream(context, renderable, shouldStop, Boolean(opts?.appendOnly));

let nextPromise = generatorToWrap.next();
while (true) {
const next = await generatorToWrap.next();
let next = await nextPromise;

if (!next.done && opts?.batchFrames) {
// We use `setImmediate` or `setTimeout` to ensure that all (recursively) queued microtasks
// are completed. (Promise.then handlers are queued as microtasks.)
// See https://developer.mozilla.org/en-US/docs/Web/API/HTML_DOM_API/Microtask_guide
const nullPromise = new Promise<null>((resolve) => {
if ('setImmediate' in globalThis) {
setImmediate(() => resolve(null));
} else {
setTimeout(() => resolve(null), 0);
}
});

while (!next.done) {
nextPromise = generatorToWrap.next();

// Consume from the generator until the null promise resolves.
const nextOrNull = await Promise.race([nextPromise, nullPromise]);
if (nextOrNull === null) {
break;
}
next = nextOrNull;
}
}

const value = opts?.stop ? (next.value as TFinal) : (next.value.join('') as TFinal);
if (next.done) {
if (promiseResult === null) {
Expand All @@ -444,6 +477,10 @@ function createRenderContextInternal(
// Otherwise yield the (string) value as-is.
yield value;
}

if (!opts?.batchFrames) {
nextPromise = generatorToWrap.next();
}
}
})() as AsyncGenerator<TIntermediate, TFinal>;

Expand Down
34 changes: 34 additions & 0 deletions packages/examples/test/core/render.tsx
Original file line number Diff line number Diff line change
@@ -0,0 +1,34 @@
import * as AI from 'ai-jsx';

it('ensures that synchronous updates are not batched when batchFrames is false', async () => {
async function* MyComponent() {
yield '1';
yield '2';
return '3';
}

const ctx = AI.createRenderContext();
const renderResult = ctx.render(<MyComponent />, { batchFrames: false });
const frames: string[] = [];
for await (const frame of renderResult) {
frames.push(frame);
}
expect(frames).toEqual(['1', '2']);
expect(await renderResult).toBe('3');
});

it('ensures that synchronous updates are batched when batchFrames is true', async () => {
async function* MyComponent() {
yield '1';
yield '2';
return '3';
}

const ctx = AI.createRenderContext();
const renderResult = ctx.render(<MyComponent />, { batchFrames: true });
// eslint-disable-next-line @typescript-eslint/no-unused-vars
for await (const _ of renderResult) {
throw new Error('Render updates should be batched');
}
expect(await renderResult).toBe('3');
});
5 changes: 4 additions & 1 deletion packages/fixie-sdk/.eslintrc.cjs
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,10 @@ module.exports = {

rules: {
'no-unused-vars': 'off',
'@typescript-eslint/no-unused-vars': ['warn', { ignoreRestSiblings: true, argsIgnorePattern: '^_' }],
'@typescript-eslint/no-unused-vars': [
'warn',
{ ignoreRestSiblings: true, argsIgnorePattern: '^_', varsIgnorePattern: '^_' },
],

'no-undef': 'off',
'no-magic-numbers': 'off',
Expand Down
4 changes: 2 additions & 2 deletions packages/fixie-sdk/package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "@fixieai/sdk",
"version": "1.6.0",
"version": "1.7.0",
"license": "MIT",
"repository": "fixie-ai/ai-jsx",
"bugs": "https://github.com/fixie-ai/ai-jsx/issues",
Expand All @@ -21,7 +21,7 @@
"fixie-serve-bin": "dist/fixie-serve-bin.js"
},
"peerDependencies": {
"ai-jsx": ">=0.17.3 <1.0.0"
"ai-jsx": ">=0.27.0 <1.0.0"
},
"dependencies": {
"@opentelemetry/api": "^1.6.0",
Expand Down
5 changes: 4 additions & 1 deletion packages/fixie-sdk/src/fixie-serve-bin.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,10 @@ async function serve({
<Handler {...(invokeAgentRequest.parameters ?? {})} />
</FixieRequestWrapper>
);
const generator = createRenderContext({ enableOpenTelemetry: true }).render(renderable)[Symbol.asyncIterator]();

// Enable frame batching to run ahead as aggressively as possible and ensure we don't have frame tearing. (See MessageState.)
const renderResult = createRenderContext({ enableOpenTelemetry: true }).render(renderable, { batchFrames: true });
const generator = renderResult[Symbol.asyncIterator]();
return res
.status(200)
.type('application/jsonl')
Expand Down
23 changes: 23 additions & 0 deletions packages/fixie-sdk/src/request-wrapper.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,28 @@ export const RequestContext = AI.createContext<{
agentId: string;
} | null>(null);

/**
* Renders to "in-progress" while `children` is still being rendered, and "done" when it's done.
*
* `children` should already be memoized to ensure that it's only rendered once.
*
* To ensure that this component renders consistently with `children`, a render containing both
* nodes MUST use frame batching. Without it, there will be frames where the result of this component
* will be inconsistent with the component whose rendering it's tracking.
*/
async function* MessageState({ children }: { children: AI.Node }, { render }: AI.ComponentContext) {
const renderResult = render(children);
let didYield = false;
for await (const _ of renderResult) {
if (!didYield) {
didYield = true;
yield 'in-progress';
}
}

return 'done';
}

/**
* Wraps a conversational AI.JSX component to be used as a Fixie request handler.
*
Expand Down Expand Up @@ -49,6 +71,7 @@ export function FixieRequestWrapper({
<Json>
{{
kind: 'text',
state: <MessageState>{message.element}</MessageState>,
content: message.element,
metadata: message.element.props.metadata,
}}
Expand Down
1 change: 1 addition & 0 deletions packages/fixie-sdk/src/types.ts
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@ export interface FunctionResponseMessage extends MessageBase {
export interface TextMessage extends MessageBase {
kind: 'text';
content: string;
state?: 'in-progress' | 'done';
}

export type Message = FunctionCallMessage | FunctionResponseMessage | TextMessage;
Expand Down
18 changes: 12 additions & 6 deletions packages/voice/src/app/agent/chat.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -183,13 +183,19 @@ export class ChatRequest {

if (!this.done) {
const currentTurn = isStartConversationRequest ? value.turns.at(-1) : value;
const currentMessage = currentTurn.messages
.filter((m: any) => m.kind === 'text')
.map((m: any) => m.content)
.join(' ');

if (currentMessage === this.outMessage) {
continue;
const textMessages = currentTurn.messages.filter((m: any) => m.kind === 'text');
let currentMessage = '';
for (const textMessage of textMessages) {
currentMessage += textMessage.content;
const messageState = textMessage.state;
if (messageState === 'in-progress') {
// This message is still being generated, so don't include any text after it.
break;
} else if (messageState === 'done') {
// Append two newlines to end the paragraph (i.e. make clear to the TTS pipeline that the text is complete).
currentMessage += '\n\n';
}
}

// Find the longest matching prefix.
Expand Down
2 changes: 1 addition & 1 deletion yarn.lock
Original file line number Diff line number Diff line change
Expand Up @@ -3102,7 +3102,7 @@ __metadata:
typescript: ^5.1.3
yargs: ^17.7.2
peerDependencies:
ai-jsx: ">=0.17.3 <1.0.0"
ai-jsx: ">=0.27.0 <1.0.0"
bin:
fixie-serve-bin: dist/fixie-serve-bin.js
languageName: unknown
Expand Down

4 comments on commit 83627e8

@vercel
Copy link

@vercel vercel bot commented on 83627e8 Nov 20, 2023

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Successfully deployed to the following URLs:

ai-jsx-voice – ./packages/voice

ai-jsx-voice-fixie-ai.vercel.app
voice.fixie.ai
ai-jsx-voice.vercel.app
ai-jsx-voice-git-main-fixie-ai.vercel.app

@vercel
Copy link

@vercel vercel bot commented on 83627e8 Nov 20, 2023

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Successfully deployed to the following URLs:

ai-jsx-docs – ./packages/docs

ai-jsx-docs.vercel.app
ai-jsx-docs-git-main-fixie-ai.vercel.app
ai-jsx-docs-fixie-ai.vercel.app
docs.ai-jsx.com

@vercel
Copy link

@vercel vercel bot commented on 83627e8 Nov 20, 2023

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Successfully deployed to the following URLs:

ai-jsx-nextjs-demo – ./packages/nextjs-demo

ai-jsx-nextjs-demo.vercel.app
ai-jsx-nextjs-demo-git-main-fixie-ai.vercel.app
ai-jsx-nextjs-demo-fixie-ai.vercel.app

@vercel
Copy link

@vercel vercel bot commented on 83627e8 Nov 20, 2023

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Successfully deployed to the following URLs:

ai-jsx-tutorial-nextjs – ./packages/tutorial-nextjs

ai-jsx-tutorial-nextjs.vercel.app
ai-jsx-tutorial-nextjs-git-main-fixie-ai.vercel.app
ai-jsx-tutorial-nextjs-fixie-ai.vercel.app

Please sign in to comment.