@@ -617,7 +617,7 @@ function serializeThenable(
617
617
request . abortableTasks . delete ( newTask ) ;
618
618
newTask . status = ABORTED ;
619
619
if ( enableHalt && request . fatalError === haltSymbol ) {
620
- emitModelChunk ( request , newTask . id , reusableInfinitePromiseModel ) ;
620
+ emitBlockedChunk ( request , newTask . id ) ;
621
621
} else {
622
622
const errorId : number = ( request . fatalError : any ) ;
623
623
const model = stringify ( serializeByValueID ( errorId ) ) ;
@@ -1820,7 +1820,6 @@ function serializeLazyID(id: number): string {
1820
1820
function serializeInfinitePromise ( ) : string {
1821
1821
return '$@' ;
1822
1822
}
1823
- const reusableInfinitePromiseModel = stringify ( serializeInfinitePromise ( ) ) ;
1824
1823
1825
1824
function serializePromiseID ( id : number ) : string {
1826
1825
return '$@' + id . toString ( 16 ) ;
@@ -2208,9 +2207,6 @@ function renderModel(
2208
2207
if ( typeof x . then === 'function' ) {
2209
2208
if ( request . status === ABORTING ) {
2210
2209
task . status = ABORTED ;
2211
- if ( enableHalt && request . fatalError === haltSymbol ) {
2212
- return serializeInfinitePromise ( ) ;
2213
- }
2214
2210
const errorId : number = ( request . fatalError : any ) ;
2215
2211
if ( wasReactNode ) {
2216
2212
return serializeLazyID ( errorId ) ;
@@ -2264,9 +2260,6 @@ function renderModel(
2264
2260
2265
2261
if ( request . status === ABORTING ) {
2266
2262
task . status = ABORTED ;
2267
- if ( enableHalt && request . fatalError === haltSymbol ) {
2268
- return serializeInfinitePromise ( ) ;
2269
- }
2270
2263
const errorId : number = ( request . fatalError : any ) ;
2271
2264
if ( wasReactNode ) {
2272
2265
return serializeLazyID ( errorId ) ;
@@ -3008,6 +3001,12 @@ function emitPostponeChunk(
3008
3001
request . completedErrorChunks . push ( processedChunk ) ;
3009
3002
}
3010
3003
3004
+ function emitBlockedChunk ( request : Request , id : number ) : void {
3005
+ const row = serializeRowHeader ( '#' , id ) + '\n' ;
3006
+ const processedChunk = stringToChunk ( row ) ;
3007
+ request . completedErrorChunks . push ( processedChunk ) ;
3008
+ }
3009
+
3011
3010
function emitErrorChunk (
3012
3011
request : Request ,
3013
3012
id : number ,
@@ -3757,7 +3756,7 @@ function retryTask(request: Request, task: Task): void {
3757
3756
request . abortableTasks . delete ( task ) ;
3758
3757
task . status = ABORTED ;
3759
3758
if ( enableHalt && request . fatalError === haltSymbol ) {
3760
- emitModelChunk ( request , task . id , reusableInfinitePromiseModel ) ;
3759
+ emitBlockedChunk ( request , task . id ) ;
3761
3760
} else {
3762
3761
const errorId : number = ( request . fatalError : any ) ;
3763
3762
const model = stringify ( serializeByValueID ( errorId ) ) ;
@@ -3785,7 +3784,7 @@ function retryTask(request: Request, task: Task): void {
3785
3784
request . abortableTasks . delete ( task ) ;
3786
3785
task . status = ABORTED ;
3787
3786
if ( enableHalt && request . fatalError === haltSymbol ) {
3788
- emitModelChunk ( request , task . id , reusableInfinitePromiseModel ) ;
3787
+ emitBlockedChunk ( request , task . id ) ;
3789
3788
} else {
3790
3789
const errorId : number = ( request . fatalError : any ) ;
3791
3790
const model = stringify ( serializeByValueID ( errorId ) ) ;
@@ -3830,6 +3829,7 @@ function performWork(request: Request): void {
3830
3829
currentRequest = request ;
3831
3830
prepareToUseHooksForRequest ( request ) ;
3832
3831
3832
+ const hadAbortableTasks = request . abortableTasks . size > 0 ;
3833
3833
try {
3834
3834
const pingedTasks = request . pingedTasks ;
3835
3835
request . pingedTasks = [ ] ;
@@ -3840,10 +3840,11 @@ function performWork(request: Request): void {
3840
3840
if ( request . destination !== null ) {
3841
3841
flushCompletedChunks ( request , request . destination ) ;
3842
3842
}
3843
- if ( request . abortableTasks . size === 0 ) {
3844
- // we're done rendering
3845
- const onAllReady = request . onAllReady ;
3846
- onAllReady ( ) ;
3843
+ if ( hadAbortableTasks && request . abortableTasks . size === 0 ) {
3844
+ // We can ping after completing but if this happens there already
3845
+ // wouldn't be any abortable tasks. So we only call allReady after
3846
+ // the work which actually completed the last pending task
3847
+ allReady ( request ) ;
3847
3848
}
3848
3849
} catch ( error ) {
3849
3850
logRecoverableError ( request , error , null ) ;
@@ -3868,15 +3869,6 @@ function abortTask(task: Task, request: Request, errorId: number): void {
3868
3869
request.completedErrorChunks.push(processedChunk);
3869
3870
}
3870
3871
3871
- function haltTask ( task : Task , request : Request ) : void {
3872
- if ( task . status === RENDERING ) {
3873
- // This task will be aborted by the render
3874
- return ;
3875
- }
3876
- task.status = ABORTED;
3877
- emitModelChunk(request, task.id, reusableInfinitePromiseModel);
3878
- }
3879
-
3880
3872
function flushCompletedChunks (
3881
3873
request : Request ,
3882
3874
destination : Destination ,
@@ -4055,6 +4047,7 @@ export function abort(request: Request, reason: mixed): void {
4055
4047
}
4056
4048
abortableTasks.forEach(task => abortTask(task, request, errorId));
4057
4049
abortableTasks.clear();
4050
+ allReady(request);
4058
4051
}
4059
4052
const abortListeners = request . abortListeners ;
4060
4053
if ( abortListeners . size > 0 ) {
@@ -4110,8 +4103,11 @@ export function halt(request: Request, reason: mixed): void {
4110
4103
// to that row from every row that's still remaining.
4111
4104
if (abortableTasks.size > 0 ) {
4112
4105
request . pendingChunks ++ ;
4113
- abortableTasks . forEach ( task => haltTask ( task , request ) ) ;
4106
+ const errorId = request . nextChunkId ++ ;
4107
+ emitBlockedChunk ( request , errorId ) ;
4108
+ abortableTasks . forEach ( task => abortTask ( task , request , errorId ) ) ;
4114
4109
abortableTasks . clear ( ) ;
4110
+ allReady ( request ) ;
4115
4111
}
4116
4112
const abortListeners = request.abortListeners;
4117
4113
if (abortListeners.size > 0 ) {
@@ -4126,3 +4122,8 @@ export function halt(request: Request, reason: mixed): void {
4126
4122
fatalError ( request , error ) ;
4127
4123
}
4128
4124
}
4125
+
4126
+ function allReady ( request : Request ) {
4127
+ const onAllReady = request . onAllReady ;
4128
+ onAllReady ( ) ;
4129
+ }
0 commit comments