Skip to content

Commit

Permalink
[Flight] When halting omit any reference rather than refer to a shar…
Browse files Browse the repository at this point in the history
…ed missing chunk (#30750)

When aborting a prerender we should leave references unfulfilled, not
share a common unfullfilled reference. functionally today this doesn't
matter because we don't have resuming but the semantic is that the row
was not available when the abort happened and in a resume the row should
fill in. But by pointing each task to a common unfulfilled chunk we lose
the ability for these references to resolves to distinct values on
resume.
  • Loading branch information
gnoff authored Aug 20, 2024
1 parent 2505bf9 commit 92d26c8
Show file tree
Hide file tree
Showing 5 changed files with 153 additions and 58 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -2797,7 +2797,16 @@ describe('ReactFlightDOM', () => {
abortFizz('bam');
});

expect(errors).toEqual(['bam']);
if (__DEV__) {
expect(errors).toEqual([new Error('Connection closed.')]);
} else {
// This is likely a bug. In Dev we get a connection closed error
// because the debug info creates a chunk that has a pending status
// and when the stream finishes we error if any chunks are still pending.
// In production there is no debug info so the missing chunk is never instantiated
// because nothing triggers model evaluation before the stream completes
expect(errors).toEqual(['bam']);
}

const container = document.createElement('div');
await readInto(container, fizzReadable);
Expand Down Expand Up @@ -2919,10 +2928,11 @@ describe('ReactFlightDOM', () => {
});

const {prelude} = await pendingResult;

expect(errors).toEqual(['boom']);
const response = ReactServerDOMClient.createFromReadableStream(
Readable.toWeb(prelude),
);

const preludeWeb = Readable.toWeb(prelude);
const response = ReactServerDOMClient.createFromReadableStream(preludeWeb);

const {writable: fizzWritable, readable: fizzReadable} = getTestStream();

Expand All @@ -2949,7 +2959,17 @@ describe('ReactFlightDOM', () => {
});

// one error per boundary
expect(errors).toEqual(['boom', 'boom', 'boom']);
if (__DEV__) {
const err = new Error('Connection closed.');
expect(errors).toEqual([err, err, err]);
} else {
// This is likely a bug. In Dev we get a connection closed error
// because the debug info creates a chunk that has a pending status
// and when the stream finishes we error if any chunks are still pending.
// In production there is no debug info so the missing chunk is never instantiated
// because nothing triggers model evaluation before the stream completes
expect(errors).toEqual(['boom', 'boom', 'boom']);
}

const container = document.createElement('div');
await readInto(container, fizzReadable);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2454,12 +2454,28 @@ describe('ReactFlightDOMBrowser', () => {
passThrough(prelude),
);
const container = document.createElement('div');
const root = ReactDOMClient.createRoot(container);
errors.length = 0;
const root = ReactDOMClient.createRoot(container, {
onUncaughtError(err) {
errors.push(err);
},
});

await act(() => {
root.render(<ClientRoot response={response} />);
});

expect(container.innerHTML).toBe('<div>loading...</div>');
if (__DEV__) {
expect(errors).toEqual([new Error('Connection closed.')]);
expect(container.innerHTML).toBe('');
} else {
// This is likely a bug. In Dev we get a connection closed error
// because the debug info creates a chunk that has a pending status
// and when the stream finishes we error if any chunks are still pending.
// In production there is no debug info so the missing chunk is never instantiated
// because nothing triggers model evaluation before the stream completes
expect(errors).toEqual([]);
expect(container.innerHTML).toBe('<div>loading...</div>');
}
});
});
Original file line number Diff line number Diff line change
Expand Up @@ -1172,7 +1172,16 @@ describe('ReactFlightDOMEdge', () => {
),
);
fizzController.abort('bam');
expect(errors).toEqual(['bam']);
if (__DEV__) {
expect(errors).toEqual([new Error('Connection closed.')]);
} else {
// This is likely a bug. In Dev we get a connection closed error
// because the debug info creates a chunk that has a pending status
// and when the stream finishes we error if any chunks are still pending.
// In production there is no debug info so the missing chunk is never instantiated
// because nothing triggers model evaluation before the stream completes
expect(errors).toEqual(['bam']);
}
// Should still match the result when parsed
const result = await readResult(ssrStream);
const div = document.createElement('div');
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -509,7 +509,16 @@ describe('ReactFlightDOMNode', () => {
),
);
ssrStream.abort('bam');
expect(errors).toEqual(['bam']);
if (__DEV__) {
expect(errors).toEqual([new Error('Connection closed.')]);
} else {
// This is likely a bug. In Dev we get a connection closed error
// because the debug info creates a chunk that has a pending status
// and when the stream finishes we error if any chunks are still pending.
// In production there is no debug info so the missing chunk is never instantiated
// because nothing triggers model evaluation before the stream completes
expect(errors).toEqual(['bam']);
}
// Should still match the result when parsed
const result = await readResult(ssrStream);
const div = document.createElement('div');
Expand Down
139 changes: 90 additions & 49 deletions packages/react-server/src/ReactFlightServer.js
Original file line number Diff line number Diff line change
Expand Up @@ -649,9 +649,13 @@ function serializeThenable(
// We can no longer accept any resolved values
request.abortableTasks.delete(newTask);
newTask.status = ABORTED;
const errorId: number = (request.fatalError: any);
const model = stringify(serializeByValueID(errorId));
emitModelChunk(request, newTask.id, model);
if (enableHalt && request.type === PRERENDER) {
request.pendingChunks--;
} else {
const errorId: number = (request.fatalError: any);
const model = stringify(serializeByValueID(errorId));
emitModelChunk(request, newTask.id, model);
}
return newTask.id;
}
if (typeof thenable.status === 'string') {
Expand Down Expand Up @@ -1633,6 +1637,24 @@ function outlineTask(request: Request, task: Task): ReactJSONValue {
return serializeLazyID(newTask.id);
}

function outlineHaltedTask(
request: Request,
task: Task,
allowLazy: boolean,
): ReactJSONValue {
// In the future if we track task state for resuming we'll maybe need to
// construnct an actual task here but since we're never going to retry it
// we just claim the id and serialize it according to the proper convention
const taskId = request.nextChunkId++;
if (allowLazy) {
// We're halting in a position that can handle a lazy reference
return serializeLazyID(taskId);
} else {
// We're halting in a position that needs a value reference
return serializeByValueID(taskId);
}
}

function renderElement(
request: Request,
task: Task,
Expand Down Expand Up @@ -2278,6 +2300,20 @@ function renderModel(
((model: any).$$typeof === REACT_ELEMENT_TYPE ||
(model: any).$$typeof === REACT_LAZY_TYPE);

if (request.status === ABORTING) {
task.status = ABORTED;
if (enableHalt && request.type === PRERENDER) {
// This will create a new task and refer to it in this slot
// the new task won't be retried because we are aborting
return outlineHaltedTask(request, task, wasReactNode);
}
const errorId = (request.fatalError: any);
if (wasReactNode) {
return serializeLazyID(errorId);
}
return serializeByValueID(errorId);
}

const x =
thrownValue === SuspenseException
? // This is a special type of exception used for Suspense. For historical
Expand All @@ -2291,14 +2327,6 @@ function renderModel(
if (typeof x === 'object' && x !== null) {
// $FlowFixMe[method-unbinding]
if (typeof x.then === 'function') {
if (request.status === ABORTING) {
task.status = ABORTED;
const errorId: number = (request.fatalError: any);
if (wasReactNode) {
return serializeLazyID(errorId);
}
return serializeByValueID(errorId);
}
// Something suspended, we'll need to create a new task and resolve it later.
const newTask = createTask(
request,
Expand Down Expand Up @@ -2344,15 +2372,6 @@ function renderModel(
}
}

if (request.status === ABORTING) {
task.status = ABORTED;
const errorId: number = (request.fatalError: any);
if (wasReactNode) {
return serializeLazyID(errorId);
}
return serializeByValueID(errorId);
}

// Restore the context. We assume that this will be restored by the inner
// functions in case nothing throws so we don't use "finally" here.
task.keyPath = prevKeyPath;
Expand Down Expand Up @@ -3820,6 +3839,22 @@ function retryTask(request: Request, task: Task): void {
request.abortableTasks.delete(task);
task.status = COMPLETED;
} catch (thrownValue) {
if (request.status === ABORTING) {
request.abortableTasks.delete(task);
task.status = ABORTED;
if (enableHalt && request.type === PRERENDER) {
// When aborting a prerener with halt semantics we don't emit
// anything into the slot for a task that aborts, it remains unresolved
request.pendingChunks--;
} else {
// Otherwise we emit an error chunk into the task slot.
const errorId: number = (request.fatalError: any);
const model = stringify(serializeByValueID(errorId));
emitModelChunk(request, task.id, model);
}
return;
}

const x =
thrownValue === SuspenseException
? // This is a special type of exception used for Suspense. For historical
Expand All @@ -3832,14 +3867,6 @@ function retryTask(request: Request, task: Task): void {
if (typeof x === 'object' && x !== null) {
// $FlowFixMe[method-unbinding]
if (typeof x.then === 'function') {
if (request.status === ABORTING) {
request.abortableTasks.delete(task);
task.status = ABORTED;
const errorId: number = (request.fatalError: any);
const model = stringify(serializeByValueID(errorId));
emitModelChunk(request, task.id, model);
return;
}
// Something suspended again, let's pick it back up later.
task.status = PENDING;
task.thenableState = getThenableStateAfterSuspending();
Expand All @@ -3856,15 +3883,6 @@ function retryTask(request: Request, task: Task): void {
}
}

if (request.status === ABORTING) {
request.abortableTasks.delete(task);
task.status = ABORTED;
const errorId: number = (request.fatalError: any);
const model = stringify(serializeByValueID(errorId));
emitModelChunk(request, task.id, model);
return;
}

request.abortableTasks.delete(task);
task.status = ERRORED;
const digest = logRecoverableError(request, x, task);
Expand Down Expand Up @@ -3942,6 +3960,17 @@ function abortTask(task: Task, request: Request, errorId: number): void {
request.completedErrorChunks.push(processedChunk);
}

function haltTask(task: Task, request: Request): void {
if (task.status === RENDERING) {
// this task will be halted by the render
return;
}
task.status = ABORTED;
// We don't actually emit anything for this task id because we are intentionally
// leaving the reference unfulfilled.
request.pendingChunks--;
}

function flushCompletedChunks(
request: Request,
destination: Destination,
Expand Down Expand Up @@ -4087,12 +4116,6 @@ export function abort(request: Request, reason: mixed): void {
}
const abortableTasks = request.abortableTasks;
if (abortableTasks.size > 0) {
// We have tasks to abort. We'll emit one error row and then emit a reference
// to that row from every row that's still remaining if we are rendering. If we
// are prerendering (and halt semantics are enabled) we will refer to an error row
// but not actually emit it so the reciever can at that point rather than error.
const errorId = request.nextChunkId++;
request.fatalError = errorId;
if (
enablePostpone &&
typeof reason === 'object' &&
Expand All @@ -4101,10 +4124,20 @@ export function abort(request: Request, reason: mixed): void {
) {
const postponeInstance: Postpone = (reason: any);
logPostpone(request, postponeInstance.message, null);
if (!enableHalt || request.type === PRERENDER) {
// When prerendering with halt semantics we omit the referred to postpone.
if (enableHalt && request.type === PRERENDER) {
// When prerendering with halt semantics we simply halt the task
// and leave the reference unfulfilled.
abortableTasks.forEach(task => haltTask(task, request));
abortableTasks.clear();
} else {
// When rendering we produce a shared postpone chunk and then
// fulfill each task with a reference to that chunk.
const errorId = request.nextChunkId++;
request.fatalError = errorId;
request.pendingChunks++;
emitPostponeChunk(request, errorId, postponeInstance);
abortableTasks.forEach(task => abortTask(task, request, errorId));
abortableTasks.clear();
}
} else {
const error =
Expand All @@ -4120,14 +4153,22 @@ export function abort(request: Request, reason: mixed): void {
)
: reason;
const digest = logRecoverableError(request, error, null);
if (!enableHalt || request.type === RENDER) {
// When prerendering with halt semantics we omit the referred to error.
if (enableHalt && request.type === PRERENDER) {
// When prerendering with halt semantics we simply halt the task
// and leave the reference unfulfilled.
abortableTasks.forEach(task => haltTask(task, request));
abortableTasks.clear();
} else {
// When rendering we produce a shared error chunk and then
// fulfill each task with a reference to that chunk.
const errorId = request.nextChunkId++;
request.fatalError = errorId;
request.pendingChunks++;
emitErrorChunk(request, errorId, digest, error);
abortableTasks.forEach(task => abortTask(task, request, errorId));
abortableTasks.clear();
}
}
abortableTasks.forEach(task => abortTask(task, request, errorId));
abortableTasks.clear();
const onAllReady = request.onAllReady;
onAllReady();
}
Expand Down

0 comments on commit 92d26c8

Please sign in to comment.