Skip to content

Commit

Permalink
[Flight] model halted references explicitly
Browse files Browse the repository at this point in the history
using infinitely suspending promises isn't right because this will parse as a promise which is only appropriate if the value we're halting at is a promise. Instead we need to have a special marker type that says this reference will never resolve. Additionally flight client needs to not error any halted references when the stream closes because they will otherwise appear as an error
  • Loading branch information
gnoff committed Aug 17, 2024
1 parent 7954db9 commit 78a72fc
Show file tree
Hide file tree
Showing 3 changed files with 150 additions and 10 deletions.
32 changes: 32 additions & 0 deletions packages/react-client/src/ReactFlightClient.js
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,7 @@ import {
enableRefAsProp,
enableFlightReadableStream,
enableOwnerStacks,
enableHalt,
} from 'shared/ReactFeatureFlags';

import {
Expand Down Expand Up @@ -860,6 +861,25 @@ function getChunk(response: Response, id: number): SomeChunk<any> {
return chunk;
}

/**
* Fork of waitForReference that doesn't ever reslve
*/
function waitForever() {
if (initializingHandler) {
initializingHandler.deps++;
} else {
initializingHandler = {
parent: null,
chunk: null,
value: null,
deps: 1,
errored: false,
};
}

return null;
}

function waitForReference<T>(
referencedChunk: SomeChunk<T>,
parentObject: Object,
Expand Down Expand Up @@ -1227,6 +1247,18 @@ function parseModelString(
}
return readTemporaryReference(temporaryReferences, reference);
}
case '&': {
if (enableHalt) {
if (value === '$&L') {
// This is a lazy wrapper for a halted reference
return createLazyChunkWrapper(createBlockedChunk());
}
return waitForever();
}
// It'd be an error if this ever happened when this flag is off
// but that shoudl be impossibe
// fallthrough
}
case 'Q': {
// Map
const ref = value.slice(2);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2856,4 +2856,92 @@ describe('ReactFlightDOM', () => {
jest.advanceTimersByTime('100');
expect(await race).toBe('timeout');
});

it('will halt unfinished chunks inside Suspense when aborting a prerender', async () => {
const controller = new AbortController();
function ComponentThatAborts() {
controller.abort();
return null;
}

async function Component() {
return 'hello world';
}

function App() {
return (
<div>
<Suspense fallback="loading...">
<Component />
</Suspense>
<Suspense fallback="loading too...">
<ComponentThatAborts />
</Suspense>
<Suspense fallback="loading three...">
<Component />
</Suspense>
</div>
);
}

const errors = [];
const {pendingResult} = await serverAct(() => {
return {
pendingResult: ReactServerDOMStaticServer.prerenderToNodeStream(
<App />,
{},
{
onError(x) {
errors.push(x);
},
signal: controller.signal,
},
),
};
});

controller.abort();

const {prelude} = await pendingResult;
expect(errors).toEqual([]);
const response = ReactServerDOMClient.createFromReadableStream(
Readable.toWeb(prelude),
);

const {writable: fizzWritable, readable: fizzReadable} = getTestStream();

function ClientApp() {
return use(response);
}
let abortFizz;
await serverAct(async () => {
const {pipe, abort} = ReactDOMFizzServer.renderToPipeableStream(
React.createElement(ClientApp),
{
onError(error, errorInfo) {
errors.push(error);
},
},
);
pipe(fizzWritable);
abortFizz = abort;
});

await serverAct(() => {
abortFizz('boom');
});

// one error per boundary
expect(errors).toEqual(['boom', 'boom', 'boom']);

const container = document.createElement('div');
await readInto(container, fizzReadable);
expect(getMeaningfulChildren(container)).toEqual(
<div>
{'loading...'}
{'loading too...'}
{'loading three...'}
</div>,
);
});
});
40 changes: 30 additions & 10 deletions packages/react-server/src/ReactFlightServer.js
Original file line number Diff line number Diff line change
Expand Up @@ -615,7 +615,7 @@ function serializeThenable(
request.abortableTasks.delete(newTask);
newTask.status = ABORTED;
if (enableHalt && request.fatalError === haltSymbol) {
emitModelChunk(request, newTask.id, reusableInfinitePromiseModel);
emitModelChunk(request, newTask.id, reusableHaltedReferenceModel);
} else {
const errorId: number = (request.fatalError: any);
const model = stringify(serializeByValueID(errorId));
Expand Down Expand Up @@ -1818,7 +1818,6 @@ function serializeLazyID(id: number): string {
function serializeInfinitePromise(): string {
return '$@';
}
const reusableInfinitePromiseModel = stringify(serializeInfinitePromise());

function serializePromiseID(id: number): string {
return '$@' + id.toString(16);
Expand All @@ -1836,6 +1835,15 @@ function serializeLimitedObject(): string {
return '$Y';
}

function serializeHaltedReference(): string {
return '$&';
}
const reusableHaltedReferenceModel = '"$&"';
function serializeLazyHaltedReference(): string {
return '$&L';
}
function serializeNumber(number: number): string | number {
if (Number.isFinite(number)) {
if (number === 0 && 1 / number === -Infinity) {
Expand Down Expand Up @@ -2177,7 +2185,10 @@ function renderModel(
if (request.status === ABORTING) {
task.status = ABORTED;
if (enableHalt && request.fatalError === haltSymbol) {
return serializeInfinitePromise();
if (wasReactNode) {
return serializeLazyHaltedReference();
}
return serializeHaltedReference();
}
const errorId: number = (request.fatalError: any);
if (wasReactNode) {
Expand Down Expand Up @@ -2233,7 +2244,10 @@ function renderModel(
if (request.status === ABORTING) {
task.status = ABORTED;
if (enableHalt && request.fatalError === haltSymbol) {
return serializeInfinitePromise();
if (wasReactNode) {
return serializeLazyHaltedReference();
}
return serializeHaltedReference();
}
const errorId: number = (request.fatalError: any);
if (wasReactNode) {
Expand Down Expand Up @@ -3725,7 +3739,7 @@ function retryTask(request: Request, task: Task): void {
request.abortableTasks.delete(task);
task.status = ABORTED;
if (enableHalt && request.fatalError === haltSymbol) {
emitModelChunk(request, task.id, reusableInfinitePromiseModel);
emitModelChunk(request, task.id, reusableHaltedReferenceModel);
} else {
const errorId: number = (request.fatalError: any);
const model = stringify(serializeByValueID(errorId));
Expand Down Expand Up @@ -3753,7 +3767,7 @@ function retryTask(request: Request, task: Task): void {
request.abortableTasks.delete(task);
task.status = ABORTED;
if (enableHalt && request.fatalError === haltSymbol) {
emitModelChunk(request, task.id, reusableInfinitePromiseModel);
emitModelChunk(request, task.id, reusableHaltedReferenceModel);
} else {
const errorId: number = (request.fatalError: any);
const model = stringify(serializeByValueID(errorId));
Expand Down Expand Up @@ -3810,8 +3824,7 @@ function performWork(request: Request): void {
}
if (request.abortableTasks.size === 0) {
// we're done rendering
const onAllReady = request.onAllReady;
onAllReady();
allReady(request);
}
} catch (error) {
logRecoverableError(request, error, null);
Expand Down Expand Up @@ -3842,7 +3855,7 @@ function haltTask(task: Task, request: Request): void {
return;
}
task.status = ABORTED;
emitModelChunk(request, task.id, reusableInfinitePromiseModel);
emitModelChunk(request, task.id, reusableHaltedReferenceModel);
}

function flushCompletedChunks(
Expand Down Expand Up @@ -4057,6 +4070,7 @@ export function abort(request: Request, reason: mixed): void {
if (request.destination !== null) {
flushCompletedChunks(request, request.destination);
}
allReady(request);
} catch (error) {
logRecoverableError(request, error, null);
fatalError(request, error);
Expand All @@ -4077,7 +4091,6 @@ export function halt(request: Request, reason: mixed): void {
// We have tasks to abort. We'll emit one error row and then emit a reference
// to that row from every row that's still remaining.
if (abortableTasks.size > 0) {
request.pendingChunks++;
abortableTasks.forEach(task => haltTask(task, request));
abortableTasks.clear();
}
Expand All @@ -4089,8 +4102,15 @@ export function halt(request: Request, reason: mixed): void {
if (request.destination !== null) {
flushCompletedChunks(request, request.destination);
}
allReady(request);
} catch (error) {
logRecoverableError(request, error, null);
fatalError(request, error);
}
}

function allReady(request: Request) {
const onAllReady = request.onAllReady;
onAllReady();
request.onAllReady = noop;
}

0 comments on commit 78a72fc

Please sign in to comment.