@@ -615,7 +615,7 @@ function serializeThenable(
615
615
request . abortableTasks . delete ( newTask ) ;
616
616
newTask . status = ABORTED ;
617
617
if ( enableHalt && request . fatalError === haltSymbol ) {
618
- emitModelChunk ( request , newTask . id , reusableInfinitePromiseModel ) ;
618
+ emitBlockedChunk ( request , newTask . id ) ;
619
619
} else {
620
620
const errorId : number = ( request . fatalError : any ) ;
621
621
const model = stringify ( serializeByValueID ( errorId ) ) ;
@@ -1818,7 +1818,6 @@ function serializeLazyID(id: number): string {
1818
1818
function serializeInfinitePromise ( ) : string {
1819
1819
return '$@' ;
1820
1820
}
1821
- const reusableInfinitePromiseModel = stringify ( serializeInfinitePromise ( ) ) ;
1822
1821
1823
1822
function serializePromiseID ( id : number ) : string {
1824
1823
return '$@' + id . toString ( 16 ) ;
@@ -2176,9 +2175,6 @@ function renderModel(
2176
2175
if ( typeof x . then === 'function' ) {
2177
2176
if ( request . status === ABORTING ) {
2178
2177
task . status = ABORTED ;
2179
- if ( enableHalt && request . fatalError === haltSymbol ) {
2180
- return serializeInfinitePromise ( ) ;
2181
- }
2182
2178
const errorId : number = ( request . fatalError : any ) ;
2183
2179
if ( wasReactNode ) {
2184
2180
return serializeLazyID ( errorId ) ;
@@ -2232,9 +2228,6 @@ function renderModel(
2232
2228
2233
2229
if ( request . status === ABORTING ) {
2234
2230
task . status = ABORTED ;
2235
- if ( enableHalt && request . fatalError === haltSymbol ) {
2236
- return serializeInfinitePromise ( ) ;
2237
- }
2238
2231
const errorId : number = ( request . fatalError : any ) ;
2239
2232
if ( wasReactNode ) {
2240
2233
return serializeLazyID ( errorId ) ;
@@ -2976,6 +2969,12 @@ function emitPostponeChunk(
2976
2969
request . completedErrorChunks . push ( processedChunk ) ;
2977
2970
}
2978
2971
2972
+ function emitBlockedChunk ( request : Request , id : number ) : void {
2973
+ const row = serializeRowHeader ( '#' , id ) + '\n' ;
2974
+ const processedChunk = stringToChunk ( row ) ;
2975
+ request . completedErrorChunks . push ( processedChunk ) ;
2976
+ }
2977
+
2979
2978
function emitErrorChunk (
2980
2979
request : Request ,
2981
2980
id : number ,
@@ -3725,7 +3724,7 @@ function retryTask(request: Request, task: Task): void {
3725
3724
request . abortableTasks . delete ( task ) ;
3726
3725
task . status = ABORTED ;
3727
3726
if ( enableHalt && request . fatalError === haltSymbol ) {
3728
- emitModelChunk ( request , task . id , reusableInfinitePromiseModel ) ;
3727
+ emitBlockedChunk ( request , task . id ) ;
3729
3728
} else {
3730
3729
const errorId : number = ( request . fatalError : any ) ;
3731
3730
const model = stringify ( serializeByValueID ( errorId ) ) ;
@@ -3753,7 +3752,7 @@ function retryTask(request: Request, task: Task): void {
3753
3752
request . abortableTasks . delete ( task ) ;
3754
3753
task . status = ABORTED ;
3755
3754
if ( enableHalt && request . fatalError === haltSymbol ) {
3756
- emitModelChunk ( request , task . id , reusableInfinitePromiseModel ) ;
3755
+ emitBlockedChunk ( request , task . id ) ;
3757
3756
} else {
3758
3757
const errorId : number = ( request . fatalError : any ) ;
3759
3758
const model = stringify ( serializeByValueID ( errorId ) ) ;
@@ -3798,6 +3797,7 @@ function performWork(request: Request): void {
3798
3797
currentRequest = request ;
3799
3798
prepareToUseHooksForRequest ( request ) ;
3800
3799
3800
+ const hadAbortableTasks = request . abortableTasks . size > 0 ;
3801
3801
try {
3802
3802
const pingedTasks = request . pingedTasks ;
3803
3803
request . pingedTasks = [ ] ;
@@ -3808,10 +3808,11 @@ function performWork(request: Request): void {
3808
3808
if ( request . destination !== null ) {
3809
3809
flushCompletedChunks ( request , request . destination ) ;
3810
3810
}
3811
- if ( request . abortableTasks . size === 0 ) {
3812
- // we're done rendering
3813
- const onAllReady = request . onAllReady ;
3814
- onAllReady ( ) ;
3811
+ if ( hadAbortableTasks && request . abortableTasks . size === 0 ) {
3812
+ // We can ping after completing but if this happens there already
3813
+ // wouldn't be any abortable tasks. So we only call allReady after
3814
+ // the work which actually completed the last pending task
3815
+ allReady ( request ) ;
3815
3816
}
3816
3817
} catch ( error ) {
3817
3818
logRecoverableError ( request , error , null ) ;
@@ -3836,15 +3837,6 @@ function abortTask(task: Task, request: Request, errorId: number): void {
3836
3837
request.completedErrorChunks.push(processedChunk);
3837
3838
}
3838
3839
3839
- function haltTask ( task : Task , request : Request ) : void {
3840
- if ( task . status === RENDERING ) {
3841
- // This task will be aborted by the render
3842
- return ;
3843
- }
3844
- task.status = ABORTED;
3845
- emitModelChunk(request, task.id, reusableInfinitePromiseModel);
3846
- }
3847
-
3848
3840
function flushCompletedChunks (
3849
3841
request : Request ,
3850
3842
destination : Destination ,
@@ -4023,6 +4015,7 @@ export function abort(request: Request, reason: mixed): void {
4023
4015
}
4024
4016
abortableTasks.forEach(task => abortTask(task, request, errorId));
4025
4017
abortableTasks.clear();
4018
+ allReady(request);
4026
4019
}
4027
4020
const abortListeners = request . abortListeners ;
4028
4021
if ( abortListeners . size > 0 ) {
@@ -4078,8 +4071,11 @@ export function halt(request: Request, reason: mixed): void {
4078
4071
// to that row from every row that's still remaining.
4079
4072
if (abortableTasks.size > 0 ) {
4080
4073
request . pendingChunks ++ ;
4081
- abortableTasks . forEach ( task => haltTask ( task , request ) ) ;
4074
+ const errorId = request . nextChunkId ++ ;
4075
+ emitBlockedChunk ( request , errorId ) ;
4076
+ abortableTasks . forEach ( task => abortTask ( task , request , errorId ) ) ;
4082
4077
abortableTasks . clear ( ) ;
4078
+ allReady ( request ) ;
4083
4079
}
4084
4080
const abortListeners = request.abortListeners;
4085
4081
if (abortListeners.size > 0 ) {
@@ -4094,3 +4090,8 @@ export function halt(request: Request, reason: mixed): void {
4094
4090
fatalError ( request , error ) ;
4095
4091
}
4096
4092
}
4093
+
4094
+ function allReady ( request : Request ) {
4095
+ const onAllReady = request . onAllReady ;
4096
+ onAllReady ( ) ;
4097
+ }
0 commit comments