From 78a72fc3ff20bbc0ec48aa329e82513af2a71a6a Mon Sep 17 00:00:00 2001 From: Josh Story Date: Fri, 16 Aug 2024 18:32:57 -0700 Subject: [PATCH] [Flight] model halted references explicitly using infinitely suspending promises isn't right because this will parse as a promise which is only appropriate if the value we're halting at is a promise. Instead we need to have a special marker type that says this reference will never resolve. Additionally flight client needs to not error any halted references when the stream closes because they will otherwise appear as an error --- .../react-client/src/ReactFlightClient.js | 32 +++++++ .../src/__tests__/ReactFlightDOM-test.js | 88 +++++++++++++++++++ .../react-server/src/ReactFlightServer.js | 40 ++++++--- 3 files changed, 150 insertions(+), 10 deletions(-) diff --git a/packages/react-client/src/ReactFlightClient.js b/packages/react-client/src/ReactFlightClient.js index 564f4e859871f..0c118b9b3a18f 100644 --- a/packages/react-client/src/ReactFlightClient.js +++ b/packages/react-client/src/ReactFlightClient.js @@ -45,6 +45,7 @@ import { enableRefAsProp, enableFlightReadableStream, enableOwnerStacks, + enableHalt, } from 'shared/ReactFeatureFlags'; import { @@ -860,6 +861,25 @@ function getChunk(response: Response, id: number): SomeChunk { return chunk; } +/** + * Fork of waitForReference that doesn't ever reslve + */ +function waitForever() { + if (initializingHandler) { + initializingHandler.deps++; + } else { + initializingHandler = { + parent: null, + chunk: null, + value: null, + deps: 1, + errored: false, + }; + } + + return null; +} + function waitForReference( referencedChunk: SomeChunk, parentObject: Object, @@ -1227,6 +1247,18 @@ function parseModelString( } return readTemporaryReference(temporaryReferences, reference); } + case '&': { + if (enableHalt) { + if (value === '$&L') { + // This is a lazy wrapper for a halted reference + return createLazyChunkWrapper(createBlockedChunk()); + } + return waitForever(); + } + // It'd be an error if this ever happened when this flag is off + // but that shoudl be impossibe + // fallthrough + } case 'Q': { // Map const ref = value.slice(2); diff --git a/packages/react-server-dom-webpack/src/__tests__/ReactFlightDOM-test.js b/packages/react-server-dom-webpack/src/__tests__/ReactFlightDOM-test.js index 6a0ce0152b704..f223690506f10 100644 --- a/packages/react-server-dom-webpack/src/__tests__/ReactFlightDOM-test.js +++ b/packages/react-server-dom-webpack/src/__tests__/ReactFlightDOM-test.js @@ -2856,4 +2856,92 @@ describe('ReactFlightDOM', () => { jest.advanceTimersByTime('100'); expect(await race).toBe('timeout'); }); + + it('will halt unfinished chunks inside Suspense when aborting a prerender', async () => { + const controller = new AbortController(); + function ComponentThatAborts() { + controller.abort(); + return null; + } + + async function Component() { + return 'hello world'; + } + + function App() { + return ( +
+ + + + + + + + + +
+ ); + } + + const errors = []; + const {pendingResult} = await serverAct(() => { + return { + pendingResult: ReactServerDOMStaticServer.prerenderToNodeStream( + , + {}, + { + onError(x) { + errors.push(x); + }, + signal: controller.signal, + }, + ), + }; + }); + + controller.abort(); + + const {prelude} = await pendingResult; + expect(errors).toEqual([]); + const response = ReactServerDOMClient.createFromReadableStream( + Readable.toWeb(prelude), + ); + + const {writable: fizzWritable, readable: fizzReadable} = getTestStream(); + + function ClientApp() { + return use(response); + } + let abortFizz; + await serverAct(async () => { + const {pipe, abort} = ReactDOMFizzServer.renderToPipeableStream( + React.createElement(ClientApp), + { + onError(error, errorInfo) { + errors.push(error); + }, + }, + ); + pipe(fizzWritable); + abortFizz = abort; + }); + + await serverAct(() => { + abortFizz('boom'); + }); + + // one error per boundary + expect(errors).toEqual(['boom', 'boom', 'boom']); + + const container = document.createElement('div'); + await readInto(container, fizzReadable); + expect(getMeaningfulChildren(container)).toEqual( +
+ {'loading...'} + {'loading too...'} + {'loading three...'} +
, + ); + }); }); diff --git a/packages/react-server/src/ReactFlightServer.js b/packages/react-server/src/ReactFlightServer.js index db0ee7b3cebad..bd2b1f27b052c 100644 --- a/packages/react-server/src/ReactFlightServer.js +++ b/packages/react-server/src/ReactFlightServer.js @@ -615,7 +615,7 @@ function serializeThenable( request.abortableTasks.delete(newTask); newTask.status = ABORTED; if (enableHalt && request.fatalError === haltSymbol) { - emitModelChunk(request, newTask.id, reusableInfinitePromiseModel); + emitModelChunk(request, newTask.id, reusableHaltedReferenceModel); } else { const errorId: number = (request.fatalError: any); const model = stringify(serializeByValueID(errorId)); @@ -1818,7 +1818,6 @@ function serializeLazyID(id: number): string { function serializeInfinitePromise(): string { return '$@'; } -const reusableInfinitePromiseModel = stringify(serializeInfinitePromise()); function serializePromiseID(id: number): string { return '$@' + id.toString(16); @@ -1836,6 +1835,15 @@ function serializeLimitedObject(): string { return '$Y'; } +function serializeHaltedReference(): string { + return '$&'; +} +const reusableHaltedReferenceModel = '"$&"'; + +function serializeLazyHaltedReference(): string { + return '$&L'; +} + function serializeNumber(number: number): string | number { if (Number.isFinite(number)) { if (number === 0 && 1 / number === -Infinity) { @@ -2177,7 +2185,10 @@ function renderModel( if (request.status === ABORTING) { task.status = ABORTED; if (enableHalt && request.fatalError === haltSymbol) { - return serializeInfinitePromise(); + if (wasReactNode) { + return serializeLazyHaltedReference(); + } + return serializeHaltedReference(); } const errorId: number = (request.fatalError: any); if (wasReactNode) { @@ -2233,7 +2244,10 @@ function renderModel( if (request.status === ABORTING) { task.status = ABORTED; if (enableHalt && request.fatalError === haltSymbol) { - return serializeInfinitePromise(); + if (wasReactNode) { + return serializeLazyHaltedReference(); + } + return serializeHaltedReference(); } const errorId: number = (request.fatalError: any); if (wasReactNode) { @@ -3725,7 +3739,7 @@ function retryTask(request: Request, task: Task): void { request.abortableTasks.delete(task); task.status = ABORTED; if (enableHalt && request.fatalError === haltSymbol) { - emitModelChunk(request, task.id, reusableInfinitePromiseModel); + emitModelChunk(request, task.id, reusableHaltedReferenceModel); } else { const errorId: number = (request.fatalError: any); const model = stringify(serializeByValueID(errorId)); @@ -3753,7 +3767,7 @@ function retryTask(request: Request, task: Task): void { request.abortableTasks.delete(task); task.status = ABORTED; if (enableHalt && request.fatalError === haltSymbol) { - emitModelChunk(request, task.id, reusableInfinitePromiseModel); + emitModelChunk(request, task.id, reusableHaltedReferenceModel); } else { const errorId: number = (request.fatalError: any); const model = stringify(serializeByValueID(errorId)); @@ -3810,8 +3824,7 @@ function performWork(request: Request): void { } if (request.abortableTasks.size === 0) { // we're done rendering - const onAllReady = request.onAllReady; - onAllReady(); + allReady(request); } } catch (error) { logRecoverableError(request, error, null); @@ -3842,7 +3855,7 @@ function haltTask(task: Task, request: Request): void { return; } task.status = ABORTED; - emitModelChunk(request, task.id, reusableInfinitePromiseModel); + emitModelChunk(request, task.id, reusableHaltedReferenceModel); } function flushCompletedChunks( @@ -4057,6 +4070,7 @@ export function abort(request: Request, reason: mixed): void { if (request.destination !== null) { flushCompletedChunks(request, request.destination); } + allReady(request); } catch (error) { logRecoverableError(request, error, null); fatalError(request, error); @@ -4077,7 +4091,6 @@ export function halt(request: Request, reason: mixed): void { // We have tasks to abort. We'll emit one error row and then emit a reference // to that row from every row that's still remaining. if (abortableTasks.size > 0) { - request.pendingChunks++; abortableTasks.forEach(task => haltTask(task, request)); abortableTasks.clear(); } @@ -4089,8 +4102,15 @@ export function halt(request: Request, reason: mixed): void { if (request.destination !== null) { flushCompletedChunks(request, request.destination); } + allReady(request); } catch (error) { logRecoverableError(request, error, null); fatalError(request, error); } } + +function allReady(request: Request) { + const onAllReady = request.onAllReady; + onAllReady(); + request.onAllReady = noop; +}