diff --git a/packages/react-server-dom-webpack/src/__tests__/ReactFlightDOM-test.js b/packages/react-server-dom-webpack/src/__tests__/ReactFlightDOM-test.js
index aae9cf48c4285..41fc0bfd41088 100644
--- a/packages/react-server-dom-webpack/src/__tests__/ReactFlightDOM-test.js
+++ b/packages/react-server-dom-webpack/src/__tests__/ReactFlightDOM-test.js
@@ -2797,7 +2797,16 @@ describe('ReactFlightDOM', () => {
abortFizz('bam');
});
- expect(errors).toEqual(['bam']);
+ if (__DEV__) {
+ expect(errors).toEqual([new Error('Connection closed.')]);
+ } else {
+ // This is likely a bug. In Dev we get a connection closed error
+ // because the debug info creates a chunk that has a pending status
+ // and when the stream finishes we error if any chunks are still pending.
+ // In production there is no debug info so the missing chunk is never instantiated
+ // because nothing triggers model evaluation before the stream completes
+ expect(errors).toEqual(['bam']);
+ }
const container = document.createElement('div');
await readInto(container, fizzReadable);
@@ -2919,10 +2928,11 @@ describe('ReactFlightDOM', () => {
});
const {prelude} = await pendingResult;
+
expect(errors).toEqual(['boom']);
- const response = ReactServerDOMClient.createFromReadableStream(
- Readable.toWeb(prelude),
- );
+
+ const preludeWeb = Readable.toWeb(prelude);
+ const response = ReactServerDOMClient.createFromReadableStream(preludeWeb);
const {writable: fizzWritable, readable: fizzReadable} = getTestStream();
@@ -2949,7 +2959,17 @@ describe('ReactFlightDOM', () => {
});
// one error per boundary
- expect(errors).toEqual(['boom', 'boom', 'boom']);
+ if (__DEV__) {
+ const err = new Error('Connection closed.');
+ expect(errors).toEqual([err, err, err]);
+ } else {
+ // This is likely a bug. In Dev we get a connection closed error
+ // because the debug info creates a chunk that has a pending status
+ // and when the stream finishes we error if any chunks are still pending.
+ // In production there is no debug info so the missing chunk is never instantiated
+ // because nothing triggers model evaluation before the stream completes
+ expect(errors).toEqual(['boom', 'boom', 'boom']);
+ }
const container = document.createElement('div');
await readInto(container, fizzReadable);
diff --git a/packages/react-server-dom-webpack/src/__tests__/ReactFlightDOMBrowser-test.js b/packages/react-server-dom-webpack/src/__tests__/ReactFlightDOMBrowser-test.js
index a4c5df377be57..fa1e65862564e 100644
--- a/packages/react-server-dom-webpack/src/__tests__/ReactFlightDOMBrowser-test.js
+++ b/packages/react-server-dom-webpack/src/__tests__/ReactFlightDOMBrowser-test.js
@@ -2454,12 +2454,28 @@ describe('ReactFlightDOMBrowser', () => {
passThrough(prelude),
);
const container = document.createElement('div');
- const root = ReactDOMClient.createRoot(container);
+ errors.length = 0;
+ const root = ReactDOMClient.createRoot(container, {
+ onUncaughtError(err) {
+ errors.push(err);
+ },
+ });
await act(() => {
root.render();
});
- expect(container.innerHTML).toBe('
loading...
');
+ if (__DEV__) {
+ expect(errors).toEqual([new Error('Connection closed.')]);
+ expect(container.innerHTML).toBe('');
+ } else {
+ // This is likely a bug. In Dev we get a connection closed error
+ // because the debug info creates a chunk that has a pending status
+ // and when the stream finishes we error if any chunks are still pending.
+ // In production there is no debug info so the missing chunk is never instantiated
+ // because nothing triggers model evaluation before the stream completes
+ expect(errors).toEqual([]);
+ expect(container.innerHTML).toBe('loading...
');
+ }
});
});
diff --git a/packages/react-server-dom-webpack/src/__tests__/ReactFlightDOMEdge-test.js b/packages/react-server-dom-webpack/src/__tests__/ReactFlightDOMEdge-test.js
index 1c146014dcefa..0cb3897aea443 100644
--- a/packages/react-server-dom-webpack/src/__tests__/ReactFlightDOMEdge-test.js
+++ b/packages/react-server-dom-webpack/src/__tests__/ReactFlightDOMEdge-test.js
@@ -1172,7 +1172,16 @@ describe('ReactFlightDOMEdge', () => {
),
);
fizzController.abort('bam');
- expect(errors).toEqual(['bam']);
+ if (__DEV__) {
+ expect(errors).toEqual([new Error('Connection closed.')]);
+ } else {
+ // This is likely a bug. In Dev we get a connection closed error
+ // because the debug info creates a chunk that has a pending status
+ // and when the stream finishes we error if any chunks are still pending.
+ // In production there is no debug info so the missing chunk is never instantiated
+ // because nothing triggers model evaluation before the stream completes
+ expect(errors).toEqual(['bam']);
+ }
// Should still match the result when parsed
const result = await readResult(ssrStream);
const div = document.createElement('div');
diff --git a/packages/react-server-dom-webpack/src/__tests__/ReactFlightDOMNode-test.js b/packages/react-server-dom-webpack/src/__tests__/ReactFlightDOMNode-test.js
index 620da74ff1db4..f2dca4a45c7fa 100644
--- a/packages/react-server-dom-webpack/src/__tests__/ReactFlightDOMNode-test.js
+++ b/packages/react-server-dom-webpack/src/__tests__/ReactFlightDOMNode-test.js
@@ -509,7 +509,16 @@ describe('ReactFlightDOMNode', () => {
),
);
ssrStream.abort('bam');
- expect(errors).toEqual(['bam']);
+ if (__DEV__) {
+ expect(errors).toEqual([new Error('Connection closed.')]);
+ } else {
+ // This is likely a bug. In Dev we get a connection closed error
+ // because the debug info creates a chunk that has a pending status
+ // and when the stream finishes we error if any chunks are still pending.
+ // In production there is no debug info so the missing chunk is never instantiated
+ // because nothing triggers model evaluation before the stream completes
+ expect(errors).toEqual(['bam']);
+ }
// Should still match the result when parsed
const result = await readResult(ssrStream);
const div = document.createElement('div');
diff --git a/packages/react-server/src/ReactFlightServer.js b/packages/react-server/src/ReactFlightServer.js
index 9616d4b972911..c440c2bbf4fe5 100644
--- a/packages/react-server/src/ReactFlightServer.js
+++ b/packages/react-server/src/ReactFlightServer.js
@@ -649,9 +649,13 @@ function serializeThenable(
// We can no longer accept any resolved values
request.abortableTasks.delete(newTask);
newTask.status = ABORTED;
- const errorId: number = (request.fatalError: any);
- const model = stringify(serializeByValueID(errorId));
- emitModelChunk(request, newTask.id, model);
+ if (enableHalt && request.type === PRERENDER) {
+ request.pendingChunks--;
+ } else {
+ const errorId: number = (request.fatalError: any);
+ const model = stringify(serializeByValueID(errorId));
+ emitModelChunk(request, newTask.id, model);
+ }
return newTask.id;
}
if (typeof thenable.status === 'string') {
@@ -2293,7 +2297,17 @@ function renderModel(
if (typeof x.then === 'function') {
if (request.status === ABORTING) {
task.status = ABORTED;
- const errorId: number = (request.fatalError: any);
+ let errorId: number;
+ if (enableHalt && request.type === PRERENDER) {
+ // This is unfortunate that we would consume an id here. It suggests something
+ // isn't quite right with our model. When halting we don't emit any chunks
+ // but we're not in a position where we are avoiding emitting an entire
+ // chunk so we have to return something in this slot within the model so we
+ // consume an id and return it here knowing it will never resolve.
+ errorId = request.nextChunkId++;
+ } else {
+ errorId = (request.fatalError: any);
+ }
if (wasReactNode) {
return serializeLazyID(errorId);
}
@@ -2346,7 +2360,17 @@ function renderModel(
if (request.status === ABORTING) {
task.status = ABORTED;
- const errorId: number = (request.fatalError: any);
+ let errorId: number;
+ if (enableHalt && request.type === PRERENDER) {
+ // This is unfortunate that we would consume an id here. It suggests something
+ // isn't quite right with our model. When halting we don't emit any chunks
+ // but we're not in a position where we are avoiding emitting an entire
+ // chunk so we have to return something in this slot within the model so we
+ // consume an id and return it here knowing it will never resolve.
+ errorId = request.nextChunkId++;
+ } else {
+ errorId = (request.fatalError: any);
+ }
if (wasReactNode) {
return serializeLazyID(errorId);
}
@@ -3820,6 +3844,22 @@ function retryTask(request: Request, task: Task): void {
request.abortableTasks.delete(task);
task.status = COMPLETED;
} catch (thrownValue) {
+ if (request.status === ABORTING) {
+ request.abortableTasks.delete(task);
+ task.status = ABORTED;
+ if (enableHalt && request.type === PRERENDER) {
+ // When aborting a prerener with halt semantics we don't emit
+ // anything into the slot for a task that aborts, it remains unresolved
+ request.pendingChunks--;
+ } else {
+ // Otherwise we emit an error chunk into the task slot.
+ const errorId: number = (request.fatalError: any);
+ const model = stringify(serializeByValueID(errorId));
+ emitModelChunk(request, task.id, model);
+ }
+ return;
+ }
+
const x =
thrownValue === SuspenseException
? // This is a special type of exception used for Suspense. For historical
@@ -3832,14 +3872,6 @@ function retryTask(request: Request, task: Task): void {
if (typeof x === 'object' && x !== null) {
// $FlowFixMe[method-unbinding]
if (typeof x.then === 'function') {
- if (request.status === ABORTING) {
- request.abortableTasks.delete(task);
- task.status = ABORTED;
- const errorId: number = (request.fatalError: any);
- const model = stringify(serializeByValueID(errorId));
- emitModelChunk(request, task.id, model);
- return;
- }
// Something suspended again, let's pick it back up later.
task.status = PENDING;
task.thenableState = getThenableStateAfterSuspending();
@@ -3856,15 +3888,6 @@ function retryTask(request: Request, task: Task): void {
}
}
- if (request.status === ABORTING) {
- request.abortableTasks.delete(task);
- task.status = ABORTED;
- const errorId: number = (request.fatalError: any);
- const model = stringify(serializeByValueID(errorId));
- emitModelChunk(request, task.id, model);
- return;
- }
-
request.abortableTasks.delete(task);
task.status = ERRORED;
const digest = logRecoverableError(request, x, task);
@@ -3942,6 +3965,17 @@ function abortTask(task: Task, request: Request, errorId: number): void {
request.completedErrorChunks.push(processedChunk);
}
+function haltTask(task: Task, request: Request): void {
+ if (task.status === RENDERING) {
+ // this task will be halted by the render
+ return;
+ }
+ task.status = ABORTED;
+ // We don't actually emit anything for this task id because we are intentionally
+ // leaving the reference unfulfilled.
+ request.pendingChunks--;
+}
+
function flushCompletedChunks(
request: Request,
destination: Destination,
@@ -4087,12 +4121,6 @@ export function abort(request: Request, reason: mixed): void {
}
const abortableTasks = request.abortableTasks;
if (abortableTasks.size > 0) {
- // We have tasks to abort. We'll emit one error row and then emit a reference
- // to that row from every row that's still remaining if we are rendering. If we
- // are prerendering (and halt semantics are enabled) we will refer to an error row
- // but not actually emit it so the reciever can at that point rather than error.
- const errorId = request.nextChunkId++;
- request.fatalError = errorId;
if (
enablePostpone &&
typeof reason === 'object' &&
@@ -4101,10 +4129,20 @@ export function abort(request: Request, reason: mixed): void {
) {
const postponeInstance: Postpone = (reason: any);
logPostpone(request, postponeInstance.message, null);
- if (!enableHalt || request.type === PRERENDER) {
- // When prerendering with halt semantics we omit the referred to postpone.
+ if (enableHalt && request.type === PRERENDER) {
+ // When prerendering with halt semantics we simply halt the task
+ // and leave the reference unfulfilled.
+ abortableTasks.forEach(task => haltTask(task, request));
+ abortableTasks.clear();
+ } else {
+ // When rendering we produce a shared postpone chunk and then
+ // fulfill each task with a reference to that chunk.
+ const errorId = request.nextChunkId++;
+ request.fatalError = errorId;
request.pendingChunks++;
emitPostponeChunk(request, errorId, postponeInstance);
+ abortableTasks.forEach(task => abortTask(task, request, errorId));
+ abortableTasks.clear();
}
} else {
const error =
@@ -4120,14 +4158,22 @@ export function abort(request: Request, reason: mixed): void {
)
: reason;
const digest = logRecoverableError(request, error, null);
- if (!enableHalt || request.type === RENDER) {
- // When prerendering with halt semantics we omit the referred to error.
+ if (enableHalt && request.type === PRERENDER) {
+ // When prerendering with halt semantics we simply halt the task
+ // and leave the reference unfulfilled.
+ abortableTasks.forEach(task => haltTask(task, request));
+ abortableTasks.clear();
+ } else {
+ // When rendering we produce a shared error chunk and then
+ // fulfill each task with a reference to that chunk.
+ const errorId = request.nextChunkId++;
+ request.fatalError = errorId;
request.pendingChunks++;
emitErrorChunk(request, errorId, digest, error);
+ abortableTasks.forEach(task => abortTask(task, request, errorId));
+ abortableTasks.clear();
}
}
- abortableTasks.forEach(task => abortTask(task, request, errorId));
- abortableTasks.clear();
const onAllReady = request.onAllReady;
onAllReady();
}