Skip to content

Commit

Permalink
[flight] When halting call onError/onPostpone
Browse files Browse the repository at this point in the history
Halt was originally implemented as an alternative to error handling and thus halted reasons were not exposed through any observability event like onError or onPostpone. We could add something like onAbort or onHalt in it's place but it's not clear that this is particularly well motivated. Instead in this change we update halt semantics to still call onError and onPostpone with the abort reason. So a halt doesn't change what you can observe but it does change the serialization model. So while you will see errors through onError they won't propagate to the consumer as errors.
  • Loading branch information
gnoff committed Aug 17, 2024
1 parent ea1bd79 commit 73954db
Show file tree
Hide file tree
Showing 2 changed files with 129 additions and 95 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -2746,6 +2746,7 @@ describe('ReactFlightDOM', () => {
}

const controller = new AbortController();
const errors = [];
const {pendingResult} = await serverAct(async () => {
// destructure trick to avoid the act scope from awaiting the returned value
return {
Expand All @@ -2754,15 +2755,20 @@ describe('ReactFlightDOM', () => {
webpackMap,
{
signal: controller.signal,
onError(err) {
errors.push(err);
},
},
),
};
});

controller.abort();
controller.abort('boom');
resolveGreeting();
const {prelude} = await pendingResult;

expect(errors).toEqual(['boom']);

const preludeWeb = Readable.toWeb(prelude);
const response = ReactServerDOMClient.createFromReadableStream(preludeWeb);

Expand All @@ -2772,7 +2778,7 @@ describe('ReactFlightDOM', () => {
return use(response);
}

const errors = [];
errors.length = 0;
let abortFizz;
await serverAct(async () => {
const {pipe, abort} = ReactDOMFizzServer.renderToPipeableStream(
Expand All @@ -2788,10 +2794,10 @@ describe('ReactFlightDOM', () => {
});

await serverAct(() => {
abortFizz('boom');
abortFizz('bam');
});

expect(errors).toEqual(['boom']);
expect(errors).toEqual(['bam']);

const container = document.createElement('div');
await readInto(container, fizzReadable);
Expand Down Expand Up @@ -2861,7 +2867,7 @@ describe('ReactFlightDOM', () => {
it('will halt unfinished chunks inside Suspense when aborting a prerender', async () => {
const controller = new AbortController();
function ComponentThatAborts() {
controller.abort();
controller.abort('boom');
return null;
}

Expand Down Expand Up @@ -2901,10 +2907,8 @@ describe('ReactFlightDOM', () => {
};
});

controller.abort();

const {prelude} = await pendingResult;
expect(errors).toEqual([]);
expect(errors).toEqual(['boom']);
const response = ReactServerDOMClient.createFromReadableStream(
Readable.toWeb(prelude),
);
Expand All @@ -2914,6 +2918,7 @@ describe('ReactFlightDOM', () => {
function ClientApp() {
return use(response);
}
errors.length = 0;
let abortFizz;
await serverAct(async () => {
const {pipe, abort} = ReactDOMFizzServer.renderToPipeableStream(
Expand Down
203 changes: 116 additions & 87 deletions packages/react-server/src/ReactFlightServer.js
Original file line number Diff line number Diff line change
Expand Up @@ -753,30 +753,32 @@ function serializeReadableStream(
}
aborted = true;
request.abortListeners.delete(error);

let cancelWith: mixed;
if (enableHalt && request.fatalError === haltSymbol) {
cancelWith = reason;
} else if (
if (
enablePostpone &&
typeof reason === 'object' &&
reason !== null &&
(reason: any).$$typeof === REACT_POSTPONE_TYPE
) {
cancelWith = reason;
const postponeInstance: Postpone = (reason: any);
logPostpone(request, postponeInstance.message, streamTask);
emitPostponeChunk(request, streamTask.id, postponeInstance);
enqueueFlush(request);
if (enableHalt && request.fatalError === haltSymbol) {
request.pendingChunks--;
} else {
emitPostponeChunk(request, streamTask.id, postponeInstance);
enqueueFlush(request);
}
} else {
cancelWith = reason;
const digest = logRecoverableError(request, reason, streamTask);
emitErrorChunk(request, streamTask.id, digest, reason);
enqueueFlush(request);
if (enableHalt && request.fatalError === haltSymbol) {
request.pendingChunks--;
} else {
emitErrorChunk(request, streamTask.id, digest, reason);
enqueueFlush(request);
}
}

// $FlowFixMe should be able to pass mixed
reader.cancel(cancelWith).then(error, error);
reader.cancel(reason).then(error, error);
}

request.abortListeners.add(error);
Expand Down Expand Up @@ -880,30 +882,33 @@ function serializeAsyncIterable(
}
aborted = true;
request.abortListeners.delete(error);
let throwWith: mixed;
if (enableHalt && request.fatalError === haltSymbol) {
throwWith = reason;
} else if (
if (
enablePostpone &&
typeof reason === 'object' &&
reason !== null &&
(reason: any).$$typeof === REACT_POSTPONE_TYPE
) {
throwWith = reason;
const postponeInstance: Postpone = (reason: any);
logPostpone(request, postponeInstance.message, streamTask);
emitPostponeChunk(request, streamTask.id, postponeInstance);
enqueueFlush(request);
if (enableHalt && request.fatalError === haltSymbol) {
request.pendingChunks--;
} else {
emitPostponeChunk(request, streamTask.id, postponeInstance);
enqueueFlush(request);
}
} else {
throwWith = reason;
const digest = logRecoverableError(request, reason, streamTask);
emitErrorChunk(request, streamTask.id, digest, reason);
enqueueFlush(request);
if (enableHalt && request.fatalError === haltSymbol) {
request.pendingChunks--;
} else {
emitErrorChunk(request, streamTask.id, digest, reason);
enqueueFlush(request);
}
}
if (typeof (iterator: any).throw === 'function') {
// The iterator protocol doesn't necessarily include this but a generator do.
// $FlowFixMe should be able to pass mixed
iterator.throw(throwWith).then(error, error);
iterator.throw(reason).then(error, error);
}
}
request.abortListeners.add(error);
Expand Down Expand Up @@ -2095,18 +2100,31 @@ function serializeBlob(request: Request, blob: Blob): string {
}
aborted = true;
request.abortListeners.delete(error);
let cancelWith: mixed;
if (enableHalt && request.fatalError === haltSymbol) {
cancelWith = reason;
if (
enablePostpone &&
typeof reason === 'object' &&
reason !== null &&
(reason: any).$$typeof === REACT_POSTPONE_TYPE
) {
const postponeInstance: Postpone = (reason: any);
logPostpone(request, postponeInstance.message, newTask);
if (enableHalt && request.fatalError === haltSymbol) {
request.pendingChunks--;
} else {
emitPostponeChunk(request, newTask.id, postponeInstance);
enqueueFlush(request);
}
} else {
cancelWith = reason;
const digest = logRecoverableError(request, reason, newTask);
emitErrorChunk(request, newTask.id, digest, reason);
request.abortableTasks.delete(newTask);
enqueueFlush(request);
if (enableHalt && request.fatalError === haltSymbol) {
request.pendingChunks--;
} else {
emitErrorChunk(request, newTask.id, digest, reason);
enqueueFlush(request);
}
}
// $FlowFixMe should be able to pass mixed
reader.cancel(cancelWith).then(error, error);
reader.cancel(reason).then(error, error);
}
request.abortListeners.add(error);
Expand Down Expand Up @@ -3998,14 +4016,15 @@ export function stopFlowing(request: Request): void {

// This is called to early terminate a request. It creates an error at all pending tasks.
export function abort(request: Request, reason: mixed): void {
if (request.status === OPEN) {
request.status = ABORTING;
}
try {
if (request.status === OPEN) {
request.status = ABORTING;
}
const abortableTasks = request.abortableTasks;
// We have tasks to abort. We'll emit one error row and then emit a reference
// to that row from every row that's still remaining.
if (abortableTasks.size > 0) {
request.status = ABORTING;
request.pendingChunks++;
const errorId = request.nextChunkId++;
request.fatalError = errorId;
Expand All @@ -4019,54 +4038,14 @@ export function abort(request: Request, reason: mixed): void {
logPostpone(request, postponeInstance.message, null);
emitPostponeChunk(request, errorId, postponeInstance);
} else {
const error =
reason === undefined
? new Error(
'The render was aborted by the server without a reason.',
)
: typeof reason === 'object' &&
reason !== null &&
typeof reason.then === 'function'
? new Error(
'The render was aborted by the server with a promise.',
)
: reason;
const error = resolveAbortError(reason);
const digest = logRecoverableError(request, error, null);
emitErrorChunk(request, errorId, digest, error);
}
abortableTasks.forEach(task => abortTask(task, request, errorId));
abortableTasks.clear();
}
const abortListeners = request.abortListeners;
if (abortListeners.size > 0) {
let error;
if (
enablePostpone &&
typeof reason === 'object' &&
reason !== null &&
(reason: any).$$typeof === REACT_POSTPONE_TYPE
) {
// We aborted with a Postpone but since we're passing this to an
// external handler, passing this object would leak it outside React.
// We create an alternative reason for it instead.
error = new Error('The render was aborted due to being postponed.');
} else {
error =
reason === undefined
? new Error(
'The render was aborted by the server without a reason.',
)
: typeof reason === 'object' &&
reason !== null &&
typeof reason.then === 'function'
? new Error(
'The render was aborted by the server with a promise.',
)
: reason;
}
abortListeners.forEach(callback => callback(error));
abortListeners.clear();
}
abortAnyListeners(reason, request.abortListeners);
if (request.destination !== null) {
flushCompletedChunks(request, request.destination);
}
Expand All @@ -4082,23 +4061,32 @@ const haltSymbol = Symbol('halt');
// This is called to stop rendering without erroring. All unfinished work is represented Promises
// that never resolve.
export function halt(request: Request, reason: mixed): void {
if (request.status === OPEN) {
request.status = ABORTING;
}
request.fatalError = haltSymbol;
try {
if (request.status === OPEN) {
request.status = ABORTING;
}
request.fatalError = haltSymbol;
const abortableTasks = request.abortableTasks;
// We have tasks to abort. We'll emit one error row and then emit a reference
// to that row from every row that's still remaining.
if (abortableTasks.size > 0) {
// We have tasks to halt. We will log the error or postpone but we don't
// emit an error or postpone chunk. Instead we will emit a reference that
// never resolves on the client.
if (
enablePostpone &&
typeof reason === 'object' &&
reason !== null &&
(reason: any).$$typeof === REACT_POSTPONE_TYPE
) {
const postponeInstance: Postpone = (reason: any);
logPostpone(request, postponeInstance.message, null);
} else {
const error = resolveAbortError(reason);
logRecoverableError(request, error, null);
}
abortableTasks.forEach(task => haltTask(task, request));
abortableTasks.clear();
}
const abortListeners = request.abortListeners;
if (abortListeners.size > 0) {
abortListeners.forEach(callback => callback(reason));
abortListeners.clear();
}
abortAnyListeners(reason, request.abortListeners);
if (request.destination !== null) {
flushCompletedChunks(request, request.destination);
}
Expand All @@ -4109,6 +4097,47 @@ export function halt(request: Request, reason: mixed): void {
}
}

function resolveAbortError(reason: mixed): mixed {
return reason === undefined
? new Error('The render was aborted by the server without a reason.')
: typeof reason === 'object' &&
reason !== null &&
typeof reason.then === 'function'
? new Error('The render was aborted by the server with a promise.')
: reason;
}

function abortAnyListeners(
reason: mixed,
listeners: Set<(reason: mixed) => void>,
) {
if (listeners.size > 0) {
let error;
if (
enablePostpone &&
typeof reason === 'object' &&
reason !== null &&
(reason: any).$$typeof === REACT_POSTPONE_TYPE
) {
// We aborted with a Postpone but since we're passing this to an
// external handler, passing this object would leak it outside React.
// We create an alternative reason for it instead.
error = new Error('The render was aborted due to being postponed.');
} else {
error =
reason === undefined
? new Error('The render was aborted by the server without a reason.')
: typeof reason === 'object' &&
reason !== null &&
typeof reason.then === 'function'
? new Error('The render was aborted by the server with a promise.')
: reason;
}
listeners.forEach(callback => callback(error));
listeners.clear();
}
}

function allReady(request: Request) {
const onAllReady = request.onAllReady;
onAllReady();
Expand Down

0 comments on commit 73954db

Please sign in to comment.