@@ -629,21 +629,7 @@ function serializeThenable(
629
629
}
630
630
case 'rejected': {
631
631
const x = thenable . reason ;
632
- if (
633
- enablePostpone &&
634
- typeof x === 'object' &&
635
- x !== null &&
636
- ( x : any ) . $$typeof === REACT_POSTPONE_TYPE
637
- ) {
638
- const postponeInstance : Postpone = ( x : any ) ;
639
- logPostpone ( request , postponeInstance . message , newTask ) ;
640
- emitPostponeChunk ( request , newTask . id , postponeInstance ) ;
641
- } else {
642
- const digest = logRecoverableError ( request , x , null ) ;
643
- emitErrorChunk ( request , newTask . id , digest , x ) ;
644
- }
645
- newTask.status = ERRORED;
646
- request.abortableTasks.delete(newTask);
632
+ erroredTask ( request , newTask , x ) ;
647
633
return newTask . id ;
648
634
}
649
635
default: {
@@ -698,21 +684,7 @@ function serializeThenable(
698
684
// We expect that the only status it might be otherwise is ABORTED.
699
685
// When we abort we emit chunks in each pending task slot and don't need
700
686
// to do so again here.
701
- if (
702
- enablePostpone &&
703
- typeof reason === 'object' &&
704
- reason !== null &&
705
- ( reason : any ) . $$typeof === REACT_POSTPONE_TYPE
706
- ) {
707
- const postponeInstance : Postpone = ( reason : any ) ;
708
- logPostpone ( request , postponeInstance . message , newTask ) ;
709
- emitPostponeChunk ( request , newTask . id , postponeInstance ) ;
710
- } else {
711
- const digest = logRecoverableError ( request , reason , newTask ) ;
712
- emitErrorChunk ( request , newTask . id , digest , reason ) ;
713
- }
714
- newTask.status = ERRORED;
715
- request.abortableTasks.delete(newTask);
687
+ erroredTask ( request , newTask , reason ) ;
716
688
enqueueFlush ( request ) ;
717
689
}
718
690
} ,
@@ -795,8 +767,7 @@ function serializeReadableStream(
795
767
}
796
768
aborted = true ;
797
769
request . abortListeners . delete ( abortStream ) ;
798
- const digest = logRecoverableError ( request , reason , streamTask ) ;
799
- emitErrorChunk ( request , streamTask . id , digest , reason ) ;
770
+ erroredTask ( request , streamTask , reason ) ;
800
771
enqueueFlush ( request ) ;
801
772
802
773
// $FlowFixMe should be able to pass mixed
@@ -808,30 +779,12 @@ function serializeReadableStream(
808
779
}
809
780
aborted = true ;
810
781
request . abortListeners . delete ( abortStream ) ;
811
- if (
812
- enablePostpone &&
813
- typeof reason === 'object' &&
814
- reason !== null &&
815
- ( reason : any ) . $$typeof === REACT_POSTPONE_TYPE
816
- ) {
817
- const postponeInstance : Postpone = ( reason : any ) ;
818
- logPostpone ( request , postponeInstance . message , streamTask ) ;
819
- if ( enableHalt && request . type === PRERENDER ) {
820
- request . pendingChunks -- ;
821
- } else {
822
- emitPostponeChunk ( request , streamTask . id , postponeInstance ) ;
823
- enqueueFlush ( request ) ;
824
- }
782
+ if ( enableHalt && request . type === PRERENDER ) {
783
+ request . pendingChunks -- ;
825
784
} else {
826
- const digest = logRecoverableError ( request , reason , streamTask ) ;
827
- if ( enableHalt && request . type === PRERENDER ) {
828
- request . pendingChunks -- ;
829
- } else {
830
- emitErrorChunk ( request , streamTask . id , digest , reason ) ;
831
- enqueueFlush ( request ) ;
832
- }
785
+ erroredTask ( request , streamTask , reason ) ;
786
+ enqueueFlush ( request ) ;
833
787
}
834
-
835
788
// $FlowFixMe should be able to pass mixed
836
789
reader.cancel(reason).then(error, error);
837
790
}
@@ -937,8 +890,7 @@ function serializeAsyncIterable(
937
890
}
938
891
aborted = true;
939
892
request.abortListeners.delete(abortIterable);
940
- const digest = logRecoverableError(request, reason, streamTask);
941
- emitErrorChunk(request, streamTask.id, digest, reason);
893
+ erroredTask(request, streamTask, reason);
942
894
enqueueFlush(request);
943
895
if (typeof (iterator: any).throw === 'function') {
944
896
// The iterator protocol doesn't necessarily include this but a generator do.
@@ -952,28 +904,11 @@ function serializeAsyncIterable(
952
904
}
953
905
aborted = true;
954
906
request.abortListeners.delete(abortIterable);
955
- if (
956
- enablePostpone &&
957
- typeof reason === 'object ' &&
958
- reason !== null &&
959
- ( reason : any ) . $$typeof === REACT_POSTPONE_TYPE
960
- ) {
961
- const postponeInstance : Postpone = ( reason : any ) ;
962
- logPostpone ( request , postponeInstance . message , streamTask ) ;
963
- if ( enableHalt && request . type === PRERENDER ) {
964
- request . pendingChunks -- ;
965
- } else {
966
- emitPostponeChunk ( request , streamTask . id , postponeInstance ) ;
967
- enqueueFlush ( request ) ;
968
- }
907
+ if (enableHalt && request . type === PRERENDER ) {
908
+ request . pendingChunks -- ;
969
909
} else {
970
- const digest = logRecoverableError ( request , reason , streamTask ) ;
971
- if ( enableHalt && request . type === PRERENDER ) {
972
- request . pendingChunks -- ;
973
- } else {
974
- emitErrorChunk ( request , streamTask . id , digest , reason ) ;
975
- enqueueFlush ( request ) ;
976
- }
910
+ erroredTask ( request , streamTask , reason ) ;
911
+ enqueueFlush ( request ) ;
977
912
}
978
913
if (typeof (iterator: any).throw === 'function') {
979
914
// The iterator protocol doesn't necessarily include this but a generator do.
@@ -2281,8 +2216,7 @@ function serializeBlob(request: Request, blob: Blob): string {
2281
2216
}
2282
2217
aborted = true ;
2283
2218
request . abortListeners . delete ( abortBlob ) ;
2284
- const digest = logRecoverableError ( request , reason , newTask ) ;
2285
- emitErrorChunk ( request , newTask . id , digest , reason ) ;
2219
+ erroredTask ( request , newTask , reason ) ;
2286
2220
enqueueFlush ( request ) ;
2287
2221
// $FlowFixMe should be able to pass mixed
2288
2222
reader . cancel ( reason ) . then ( error , error ) ;
@@ -2293,28 +2227,11 @@ function serializeBlob(request: Request, blob: Blob): string {
2293
2227
}
2294
2228
aborted = true ;
2295
2229
request . abortListeners . delete ( abortBlob ) ;
2296
- if (
2297
- enablePostpone &&
2298
- typeof reason === 'object' &&
2299
- reason !== null &&
2300
- ( reason : any ) . $$typeof === REACT_POSTPONE_TYPE
2301
- ) {
2302
- const postponeInstance : Postpone = ( reason : any ) ;
2303
- logPostpone ( request , postponeInstance . message , newTask ) ;
2304
- if ( enableHalt && request . type === PRERENDER ) {
2305
- request . pendingChunks -- ;
2306
- } else {
2307
- emitPostponeChunk ( request , newTask . id , postponeInstance ) ;
2308
- enqueueFlush ( request ) ;
2309
- }
2230
+ if ( enableHalt && request . type === PRERENDER ) {
2231
+ request . pendingChunks -- ;
2310
2232
} else {
2311
- const digest = logRecoverableError ( request , reason , newTask ) ;
2312
- if ( enableHalt && request . type === PRERENDER ) {
2313
- request . pendingChunks -- ;
2314
- } else {
2315
- emitErrorChunk ( request , newTask . id , digest , reason ) ;
2316
- enqueueFlush ( request ) ;
2317
- }
2233
+ erroredTask ( request , newTask , reason ) ;
2234
+ enqueueFlush ( request ) ;
2318
2235
}
2319
2236
// $FlowFixMe should be able to pass mixed
2320
2237
reader . cancel ( reason ) . then ( error , error ) ;
@@ -2414,24 +2331,6 @@ function renderModel(
2414
2331
return serializeLazyID ( newTask . id ) ;
2415
2332
}
2416
2333
return serializeByValueID ( newTask . id ) ;
2417
- } else if ( enablePostpone && x . $$typeof === REACT_POSTPONE_TYPE ) {
2418
- // Something postponed. We'll still send everything we have up until this point.
2419
- // We'll replace this element with a lazy reference that postpones on the client.
2420
- const postponeInstance : Postpone = ( x : any ) ;
2421
- request . pendingChunks ++ ;
2422
- const postponeId = request . nextChunkId ++ ;
2423
- logPostpone ( request , postponeInstance . message , task ) ;
2424
- emitPostponeChunk ( request , postponeId , postponeInstance ) ;
2425
-
2426
- // Restore the context. We assume that this will be restored by the inner
2427
- // functions in case nothing throws so we don't use "finally" here.
2428
- task . keyPath = prevKeyPath ;
2429
- task . implicitSlot = prevImplicitSlot ;
2430
-
2431
- if ( wasReactNode ) {
2432
- return serializeLazyID ( postponeId ) ;
2433
- }
2434
- return serializeByValueID ( postponeId ) ;
2435
2334
}
2436
2335
}
2437
2336
@@ -2443,8 +2342,21 @@ function renderModel(
2443
2342
// Something errored. We'll still send everything we have up until this point.
2444
2343
request . pendingChunks ++ ;
2445
2344
const errorId = request . nextChunkId ++ ;
2446
- const digest = logRecoverableError ( request , x , task ) ;
2447
- emitErrorChunk ( request , errorId , digest , x ) ;
2345
+ if (
2346
+ enablePostpone &&
2347
+ typeof x === 'object' &&
2348
+ x !== null &&
2349
+ x . $$typeof === REACT_POSTPONE_TYPE
2350
+ ) {
2351
+ // Something postponed. We'll still send everything we have up until this point.
2352
+ // We'll replace this element with a lazy reference that postpones on the client.
2353
+ const postponeInstance : Postpone = ( x : any ) ;
2354
+ logPostpone ( request , postponeInstance . message , task ) ;
2355
+ emitPostponeChunk ( request , errorId , postponeInstance ) ;
2356
+ } else {
2357
+ const digest = logRecoverableError ( request , x , task ) ;
2358
+ emitErrorChunk ( request , errorId , digest , x ) ;
2359
+ }
2448
2360
if ( wasReactNode ) {
2449
2361
// We'll replace this element with a lazy reference that throws on the client
2450
2362
// once it gets rendered.
@@ -3964,6 +3876,24 @@ function emitChunk(
3964
3876
emitModelChunk ( request , task . id , json ) ;
3965
3877
}
3966
3878
3879
+ function erroredTask ( request : Request , task : Task , error : mixed ) : void {
3880
+ request . abortableTasks . delete ( task ) ;
3881
+ task . status = ERRORED ;
3882
+ if (
3883
+ enablePostpone &&
3884
+ typeof error === 'object' &&
3885
+ error !== null &&
3886
+ error . $$typeof === REACT_POSTPONE_TYPE
3887
+ ) {
3888
+ const postponeInstance : Postpone = ( error : any ) ;
3889
+ logPostpone ( request , postponeInstance . message , task ) ;
3890
+ emitPostponeChunk ( request , task . id , postponeInstance ) ;
3891
+ } else {
3892
+ const digest = logRecoverableError ( request , error , task ) ;
3893
+ emitErrorChunk ( request , task . id , digest , error ) ;
3894
+ }
3895
+ }
3896
+
3967
3897
const emptyRoot = { } ;
3968
3898
3969
3899
function retryTask ( request : Request , task : Task ) : void {
@@ -4083,20 +4013,9 @@ function retryTask(request: Request, task: Task): void {
4083
4013
const ping = task . ping ;
4084
4014
x . then ( ping , ping ) ;
4085
4015
return ;
4086
- } else if ( enablePostpone && x . $$typeof === REACT_POSTPONE_TYPE ) {
4087
- request . abortableTasks . delete ( task ) ;
4088
- task . status = ERRORED ;
4089
- const postponeInstance : Postpone = ( x : any ) ;
4090
- logPostpone ( request , postponeInstance . message , task ) ;
4091
- emitPostponeChunk ( request , task . id , postponeInstance ) ;
4092
- return ;
4093
4016
}
4094
4017
}
4095
-
4096
- request . abortableTasks . delete ( task ) ;
4097
- task . status = ERRORED ;
4098
- const digest = logRecoverableError ( request , x , task ) ;
4099
- emitErrorChunk ( request , task . id , digest , x ) ;
4018
+ erroredTask ( request , task , x ) ;
4100
4019
} finally {
4101
4020
if ( __DEV__ ) {
4102
4021
debugID = prevDebugID ;
@@ -4336,29 +4255,27 @@ export function abort(request: Request, reason: mixed): void {
4336
4255
}
4337
4256
const abortableTasks = request . abortableTasks ;
4338
4257
if ( abortableTasks . size > 0 ) {
4339
- if (
4258
+ if ( enableHalt && request . type === PRERENDER ) {
4259
+ // When prerendering with halt semantics we simply halt the task
4260
+ // and leave the reference unfulfilled.
4261
+ abortableTasks . forEach ( task => haltTask ( task , request ) ) ;
4262
+ abortableTasks . clear ( ) ;
4263
+ } else if (
4340
4264
enablePostpone &&
4341
4265
typeof reason === 'object' &&
4342
4266
reason !== null &&
4343
4267
( reason : any ) . $$typeof === REACT_POSTPONE_TYPE
4344
4268
) {
4345
4269
const postponeInstance : Postpone = ( reason : any ) ;
4346
4270
logPostpone ( request , postponeInstance . message , null ) ;
4347
- if ( enableHalt && request . type === PRERENDER ) {
4348
- // When prerendering with halt semantics we simply halt the task
4349
- // and leave the reference unfulfilled.
4350
- abortableTasks . forEach ( task => haltTask ( task , request ) ) ;
4351
- abortableTasks . clear ( ) ;
4352
- } else {
4353
- // When rendering we produce a shared postpone chunk and then
4354
- // fulfill each task with a reference to that chunk.
4355
- const errorId = request . nextChunkId ++ ;
4356
- request . fatalError = errorId ;
4357
- request . pendingChunks ++ ;
4358
- emitPostponeChunk ( request , errorId , postponeInstance ) ;
4359
- abortableTasks . forEach ( task => abortTask ( task , request , errorId ) ) ;
4360
- abortableTasks . clear ( ) ;
4361
- }
4271
+ // When rendering we produce a shared postpone chunk and then
4272
+ // fulfill each task with a reference to that chunk.
4273
+ const errorId = request . nextChunkId ++ ;
4274
+ request . fatalError = errorId ;
4275
+ request . pendingChunks ++ ;
4276
+ emitPostponeChunk ( request , errorId , postponeInstance ) ;
4277
+ abortableTasks . forEach ( task => abortTask ( task , request , errorId ) ) ;
4278
+ abortableTasks . clear ( ) ;
4362
4279
} else {
4363
4280
const error =
4364
4281
reason === undefined
@@ -4373,21 +4290,14 @@ export function abort(request: Request, reason: mixed): void {
4373
4290
)
4374
4291
: reason ;
4375
4292
const digest = logRecoverableError ( request , error , null ) ;
4376
- if ( enableHalt && request . type === PRERENDER ) {
4377
- // When prerendering with halt semantics we simply halt the task
4378
- // and leave the reference unfulfilled.
4379
- abortableTasks . forEach ( task => haltTask ( task , request ) ) ;
4380
- abortableTasks . clear ( ) ;
4381
- } else {
4382
- // When rendering we produce a shared error chunk and then
4383
- // fulfill each task with a reference to that chunk.
4384
- const errorId = request . nextChunkId ++ ;
4385
- request . fatalError = errorId ;
4386
- request . pendingChunks ++ ;
4387
- emitErrorChunk ( request , errorId , digest , error ) ;
4388
- abortableTasks . forEach ( task => abortTask ( task , request , errorId ) ) ;
4389
- abortableTasks . clear ( ) ;
4390
- }
4293
+ // When rendering we produce a shared error chunk and then
4294
+ // fulfill each task with a reference to that chunk.
4295
+ const errorId = request . nextChunkId ++ ;
4296
+ request . fatalError = errorId ;
4297
+ request . pendingChunks ++ ;
4298
+ emitErrorChunk ( request , errorId , digest , error ) ;
4299
+ abortableTasks . forEach ( task => abortTask ( task , request , errorId ) ) ;
4300
+ abortableTasks . clear ( ) ;
4391
4301
}
4392
4302
const onAllReady = request . onAllReady ;
4393
4303
onAllReady ( ) ;
0 commit comments