Skip to content

Commit 9d082b5

Browse files
authored
[Flight] model halted references explicitly (facebook#30731)
using infinitely suspending promises isn't right because this will parse as a promise which is only appropriate if the value we're halting at is a promise. Instead we need to have a special marker type that says this reference will never resolve. Additionally flight client needs to not error any halted references when the stream closes because they will otherwise appear as an error addresses: facebook#30705 (comment)
1 parent d2413bf commit 9d082b5

File tree

3 files changed

+149
-24
lines changed

3 files changed

+149
-24
lines changed

packages/react-client/src/ReactFlightClient.js

Lines changed: 23 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -46,6 +46,7 @@ import {
4646
enableRefAsProp,
4747
enableFlightReadableStream,
4848
enableOwnerStacks,
49+
enableHalt,
4950
} from 'shared/ReactFeatureFlags';
5051

5152
import {
@@ -1986,6 +1987,20 @@ function resolvePostponeDev(
19861987
}
19871988
}
19881989

1990+
function resolveBlocked(response: Response, id: number): void {
1991+
const chunks = response._chunks;
1992+
const chunk = chunks.get(id);
1993+
if (!chunk) {
1994+
chunks.set(id, createBlockedChunk(response));
1995+
} else if (chunk.status === PENDING) {
1996+
// This chunk as contructed via other means but it is actually a blocked chunk
1997+
// so we update it here. We check the status because it might have been aborted
1998+
// before we attempted to resolve it.
1999+
const blockedChunk: BlockedChunk<mixed> = (chunk: any);
2000+
blockedChunk.status = BLOCKED;
2001+
}
2002+
}
2003+
19892004
function resolveHint<Code: HintCode>(
19902005
response: Response,
19912006
code: Code,
@@ -2612,6 +2627,13 @@ function processFullStringRow(
26122627
}
26132628
}
26142629
// Fallthrough
2630+
case 35 /* "#" */: {
2631+
if (enableHalt) {
2632+
resolveBlocked(response, id);
2633+
return;
2634+
}
2635+
}
2636+
// Fallthrough
26152637
default: /* """ "{" "[" "t" "f" "n" "0" - "9" */ {
26162638
// We assume anything else is JSON.
26172639
resolveModel(response, id, row);
@@ -2668,6 +2690,7 @@ export function processBinaryChunk(
26682690
i++;
26692691
} else if (
26702692
(resolvedRowTag > 64 && resolvedRowTag < 91) /* "A"-"Z" */ ||
2693+
resolvedRowTag === 35 /* "#" */ ||
26712694
resolvedRowTag === 114 /* "r" */ ||
26722695
resolvedRowTag === 120 /* "x" */
26732696
) {

packages/react-server-dom-webpack/src/__tests__/ReactFlightDOM-test.js

Lines changed: 101 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2856,4 +2856,105 @@ describe('ReactFlightDOM', () => {
28562856
jest.advanceTimersByTime('100');
28572857
expect(await race).toBe('timeout');
28582858
});
2859+
2860+
// @gate enableHalt
2861+
it('will halt unfinished chunks inside Suspense when aborting a prerender', async () => {
2862+
const controller = new AbortController();
2863+
function ComponentThatAborts() {
2864+
controller.abort();
2865+
return null;
2866+
}
2867+
2868+
async function Greeting() {
2869+
await 1;
2870+
return 'hello world';
2871+
}
2872+
2873+
async function Farewell() {
2874+
return 'goodbye world';
2875+
}
2876+
2877+
async function Wrapper() {
2878+
return (
2879+
<Suspense fallback="loading too...">
2880+
<ComponentThatAborts />
2881+
</Suspense>
2882+
);
2883+
}
2884+
2885+
function App() {
2886+
return (
2887+
<div>
2888+
<Suspense fallback="loading...">
2889+
<Greeting />
2890+
</Suspense>
2891+
<Wrapper />
2892+
<Suspense fallback="loading three...">
2893+
<Farewell />
2894+
</Suspense>
2895+
</div>
2896+
);
2897+
}
2898+
2899+
const errors = [];
2900+
const {pendingResult} = await serverAct(() => {
2901+
return {
2902+
pendingResult: ReactServerDOMStaticServer.prerenderToNodeStream(
2903+
<App />,
2904+
{},
2905+
{
2906+
onError(x) {
2907+
errors.push(x);
2908+
},
2909+
signal: controller.signal,
2910+
},
2911+
),
2912+
};
2913+
});
2914+
2915+
controller.abort();
2916+
2917+
const {prelude} = await pendingResult;
2918+
expect(errors).toEqual([]);
2919+
2920+
const response = ReactServerDOMClient.createFromReadableStream(
2921+
Readable.toWeb(prelude),
2922+
);
2923+
2924+
const {writable: fizzWritable, readable: fizzReadable} = getTestStream();
2925+
2926+
function ClientApp() {
2927+
return use(response);
2928+
}
2929+
let abortFizz;
2930+
await serverAct(async () => {
2931+
const {pipe, abort} = ReactDOMFizzServer.renderToPipeableStream(
2932+
React.createElement(ClientApp),
2933+
{
2934+
onError(error, errorInfo) {
2935+
errors.push(error);
2936+
},
2937+
},
2938+
);
2939+
pipe(fizzWritable);
2940+
abortFizz = abort;
2941+
});
2942+
2943+
await serverAct(() => {
2944+
abortFizz('boom');
2945+
});
2946+
2947+
// one error per boundary
2948+
expect(errors).toEqual(['boom', 'boom', 'boom']);
2949+
2950+
const container = document.createElement('div');
2951+
await readInto(container, fizzReadable);
2952+
expect(getMeaningfulChildren(container)).toEqual(
2953+
<div>
2954+
{'loading...'}
2955+
{'loading too...'}
2956+
{'loading three...'}
2957+
</div>,
2958+
);
2959+
});
28592960
});

packages/react-server/src/ReactFlightServer.js

Lines changed: 25 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -617,7 +617,7 @@ function serializeThenable(
617617
request.abortableTasks.delete(newTask);
618618
newTask.status = ABORTED;
619619
if (enableHalt && request.fatalError === haltSymbol) {
620-
emitModelChunk(request, newTask.id, reusableInfinitePromiseModel);
620+
emitBlockedChunk(request, newTask.id);
621621
} else {
622622
const errorId: number = (request.fatalError: any);
623623
const model = stringify(serializeByValueID(errorId));
@@ -1820,7 +1820,6 @@ function serializeLazyID(id: number): string {
18201820
function serializeInfinitePromise(): string {
18211821
return '$@';
18221822
}
1823-
const reusableInfinitePromiseModel = stringify(serializeInfinitePromise());
18241823

18251824
function serializePromiseID(id: number): string {
18261825
return '$@' + id.toString(16);
@@ -2208,9 +2207,6 @@ function renderModel(
22082207
if (typeof x.then === 'function') {
22092208
if (request.status === ABORTING) {
22102209
task.status = ABORTED;
2211-
if (enableHalt && request.fatalError === haltSymbol) {
2212-
return serializeInfinitePromise();
2213-
}
22142210
const errorId: number = (request.fatalError: any);
22152211
if (wasReactNode) {
22162212
return serializeLazyID(errorId);
@@ -2264,9 +2260,6 @@ function renderModel(
22642260

22652261
if (request.status === ABORTING) {
22662262
task.status = ABORTED;
2267-
if (enableHalt && request.fatalError === haltSymbol) {
2268-
return serializeInfinitePromise();
2269-
}
22702263
const errorId: number = (request.fatalError: any);
22712264
if (wasReactNode) {
22722265
return serializeLazyID(errorId);
@@ -3008,6 +3001,12 @@ function emitPostponeChunk(
30083001
request.completedErrorChunks.push(processedChunk);
30093002
}
30103003

3004+
function emitBlockedChunk(request: Request, id: number): void {
3005+
const row = serializeRowHeader('#', id) + '\n';
3006+
const processedChunk = stringToChunk(row);
3007+
request.completedErrorChunks.push(processedChunk);
3008+
}
3009+
30113010
function emitErrorChunk(
30123011
request: Request,
30133012
id: number,
@@ -3757,7 +3756,7 @@ function retryTask(request: Request, task: Task): void {
37573756
request.abortableTasks.delete(task);
37583757
task.status = ABORTED;
37593758
if (enableHalt && request.fatalError === haltSymbol) {
3760-
emitModelChunk(request, task.id, reusableInfinitePromiseModel);
3759+
emitBlockedChunk(request, task.id);
37613760
} else {
37623761
const errorId: number = (request.fatalError: any);
37633762
const model = stringify(serializeByValueID(errorId));
@@ -3785,7 +3784,7 @@ function retryTask(request: Request, task: Task): void {
37853784
request.abortableTasks.delete(task);
37863785
task.status = ABORTED;
37873786
if (enableHalt && request.fatalError === haltSymbol) {
3788-
emitModelChunk(request, task.id, reusableInfinitePromiseModel);
3787+
emitBlockedChunk(request, task.id);
37893788
} else {
37903789
const errorId: number = (request.fatalError: any);
37913790
const model = stringify(serializeByValueID(errorId));
@@ -3830,6 +3829,7 @@ function performWork(request: Request): void {
38303829
currentRequest = request;
38313830
prepareToUseHooksForRequest(request);
38323831

3832+
const hadAbortableTasks = request.abortableTasks.size > 0;
38333833
try {
38343834
const pingedTasks = request.pingedTasks;
38353835
request.pingedTasks = [];
@@ -3840,10 +3840,11 @@ function performWork(request: Request): void {
38403840
if (request.destination !== null) {
38413841
flushCompletedChunks(request, request.destination);
38423842
}
3843-
if (request.abortableTasks.size === 0) {
3844-
// we're done rendering
3845-
const onAllReady = request.onAllReady;
3846-
onAllReady();
3843+
if (hadAbortableTasks && request.abortableTasks.size === 0) {
3844+
// We can ping after completing but if this happens there already
3845+
// wouldn't be any abortable tasks. So we only call allReady after
3846+
// the work which actually completed the last pending task
3847+
allReady(request);
38473848
}
38483849
} catch (error) {
38493850
logRecoverableError(request, error, null);
@@ -3868,15 +3869,6 @@ function abortTask(task: Task, request: Request, errorId: number): void {
38683869
request.completedErrorChunks.push(processedChunk);
38693870
}
38703871

3871-
function haltTask(task: Task, request: Request): void {
3872-
if (task.status === RENDERING) {
3873-
// This task will be aborted by the render
3874-
return;
3875-
}
3876-
task.status = ABORTED;
3877-
emitModelChunk(request, task.id, reusableInfinitePromiseModel);
3878-
}
3879-
38803872
function flushCompletedChunks(
38813873
request: Request,
38823874
destination: Destination,
@@ -4055,6 +4047,7 @@ export function abort(request: Request, reason: mixed): void {
40554047
}
40564048
abortableTasks.forEach(task => abortTask(task, request, errorId));
40574049
abortableTasks.clear();
4050+
allReady(request);
40584051
}
40594052
const abortListeners = request.abortListeners;
40604053
if (abortListeners.size > 0) {
@@ -4110,8 +4103,11 @@ export function halt(request: Request, reason: mixed): void {
41104103
// to that row from every row that's still remaining.
41114104
if (abortableTasks.size > 0) {
41124105
request.pendingChunks++;
4113-
abortableTasks.forEach(task => haltTask(task, request));
4106+
const errorId = request.nextChunkId++;
4107+
emitBlockedChunk(request, errorId);
4108+
abortableTasks.forEach(task => abortTask(task, request, errorId));
41144109
abortableTasks.clear();
4110+
allReady(request);
41154111
}
41164112
const abortListeners = request.abortListeners;
41174113
if (abortListeners.size > 0) {
@@ -4126,3 +4122,8 @@ export function halt(request: Request, reason: mixed): void {
41264122
fatalError(request, error);
41274123
}
41284124
}
4125+
4126+
function allReady(request: Request) {
4127+
const onAllReady = request.onAllReady;
4128+
onAllReady();
4129+
}

0 commit comments

Comments
 (0)