@@ -629,21 +629,7 @@ function serializeThenable(
629629 }
630630 case 'rejected': {
631631 const x = thenable . reason ;
632- if (
633- enablePostpone &&
634- typeof x === 'object' &&
635- x !== null &&
636- ( x : any ) . $$typeof === REACT_POSTPONE_TYPE
637- ) {
638- const postponeInstance : Postpone = ( x : any ) ;
639- logPostpone ( request , postponeInstance . message , newTask ) ;
640- emitPostponeChunk ( request , newTask . id , postponeInstance ) ;
641- } else {
642- const digest = logRecoverableError ( request , x , null ) ;
643- emitErrorChunk ( request , newTask . id , digest , x ) ;
644- }
645- newTask.status = ERRORED;
646- request.abortableTasks.delete(newTask);
632+ erroredTask ( request , newTask , x ) ;
647633 return newTask . id ;
648634 }
649635 default: {
@@ -698,21 +684,7 @@ function serializeThenable(
698684 // We expect that the only status it might be otherwise is ABORTED.
699685 // When we abort we emit chunks in each pending task slot and don't need
700686 // to do so again here.
701- if (
702- enablePostpone &&
703- typeof reason === 'object' &&
704- reason !== null &&
705- ( reason : any ) . $$typeof === REACT_POSTPONE_TYPE
706- ) {
707- const postponeInstance : Postpone = ( reason : any ) ;
708- logPostpone ( request , postponeInstance . message , newTask ) ;
709- emitPostponeChunk ( request , newTask . id , postponeInstance ) ;
710- } else {
711- const digest = logRecoverableError ( request , reason , newTask ) ;
712- emitErrorChunk ( request , newTask . id , digest , reason ) ;
713- }
714- newTask.status = ERRORED;
715- request.abortableTasks.delete(newTask);
687+ erroredTask ( request , newTask , reason ) ;
716688 enqueueFlush ( request ) ;
717689 }
718690 } ,
@@ -795,8 +767,7 @@ function serializeReadableStream(
795767 }
796768 aborted = true ;
797769 request . abortListeners . delete ( abortStream ) ;
798- const digest = logRecoverableError ( request , reason , streamTask ) ;
799- emitErrorChunk ( request , streamTask . id , digest , reason ) ;
770+ erroredTask ( request , streamTask , reason ) ;
800771 enqueueFlush ( request ) ;
801772
802773 // $FlowFixMe should be able to pass mixed
@@ -808,30 +779,12 @@ function serializeReadableStream(
808779 }
809780 aborted = true ;
810781 request . abortListeners . delete ( abortStream ) ;
811- if (
812- enablePostpone &&
813- typeof reason === 'object' &&
814- reason !== null &&
815- ( reason : any ) . $$typeof === REACT_POSTPONE_TYPE
816- ) {
817- const postponeInstance : Postpone = ( reason : any ) ;
818- logPostpone ( request , postponeInstance . message , streamTask ) ;
819- if ( enableHalt && request . type === PRERENDER ) {
820- request . pendingChunks -- ;
821- } else {
822- emitPostponeChunk ( request , streamTask . id , postponeInstance ) ;
823- enqueueFlush ( request ) ;
824- }
782+ if ( enableHalt && request . type === PRERENDER ) {
783+ request . pendingChunks -- ;
825784 } else {
826- const digest = logRecoverableError ( request , reason , streamTask ) ;
827- if ( enableHalt && request . type === PRERENDER ) {
828- request . pendingChunks -- ;
829- } else {
830- emitErrorChunk ( request , streamTask . id , digest , reason ) ;
831- enqueueFlush ( request ) ;
832- }
785+ erroredTask ( request , streamTask , reason ) ;
786+ enqueueFlush ( request ) ;
833787 }
834-
835788 // $FlowFixMe should be able to pass mixed
836789 reader.cancel(reason).then(error, error);
837790 }
@@ -937,8 +890,7 @@ function serializeAsyncIterable(
937890 }
938891 aborted = true;
939892 request.abortListeners.delete(abortIterable);
940- const digest = logRecoverableError(request, reason, streamTask);
941- emitErrorChunk(request, streamTask.id, digest, reason);
893+ erroredTask(request, streamTask, reason);
942894 enqueueFlush(request);
943895 if (typeof (iterator: any).throw === 'function') {
944896 // The iterator protocol doesn't necessarily include this but a generator do.
@@ -952,28 +904,11 @@ function serializeAsyncIterable(
952904 }
953905 aborted = true;
954906 request.abortListeners.delete(abortIterable);
955- if (
956- enablePostpone &&
957- typeof reason === 'object ' &&
958- reason !== null &&
959- ( reason : any ) . $$typeof === REACT_POSTPONE_TYPE
960- ) {
961- const postponeInstance : Postpone = ( reason : any ) ;
962- logPostpone ( request , postponeInstance . message , streamTask ) ;
963- if ( enableHalt && request . type === PRERENDER ) {
964- request . pendingChunks -- ;
965- } else {
966- emitPostponeChunk ( request , streamTask . id , postponeInstance ) ;
967- enqueueFlush ( request ) ;
968- }
907+ if (enableHalt && request . type === PRERENDER ) {
908+ request . pendingChunks -- ;
969909 } else {
970- const digest = logRecoverableError ( request , reason , streamTask ) ;
971- if ( enableHalt && request . type === PRERENDER ) {
972- request . pendingChunks -- ;
973- } else {
974- emitErrorChunk ( request , streamTask . id , digest , reason ) ;
975- enqueueFlush ( request ) ;
976- }
910+ erroredTask ( request , streamTask , reason ) ;
911+ enqueueFlush ( request ) ;
977912 }
978913 if (typeof (iterator: any).throw === 'function') {
979914 // The iterator protocol doesn't necessarily include this but a generator do.
@@ -2281,8 +2216,7 @@ function serializeBlob(request: Request, blob: Blob): string {
22812216 }
22822217 aborted = true ;
22832218 request . abortListeners . delete ( abortBlob ) ;
2284- const digest = logRecoverableError ( request , reason , newTask ) ;
2285- emitErrorChunk ( request , newTask . id , digest , reason ) ;
2219+ erroredTask ( request , newTask , reason ) ;
22862220 enqueueFlush ( request ) ;
22872221 // $FlowFixMe should be able to pass mixed
22882222 reader . cancel ( reason ) . then ( error , error ) ;
@@ -2293,28 +2227,11 @@ function serializeBlob(request: Request, blob: Blob): string {
22932227 }
22942228 aborted = true ;
22952229 request . abortListeners . delete ( abortBlob ) ;
2296- if (
2297- enablePostpone &&
2298- typeof reason === 'object' &&
2299- reason !== null &&
2300- ( reason : any ) . $$typeof === REACT_POSTPONE_TYPE
2301- ) {
2302- const postponeInstance : Postpone = ( reason : any ) ;
2303- logPostpone ( request , postponeInstance . message , newTask ) ;
2304- if ( enableHalt && request . type === PRERENDER ) {
2305- request . pendingChunks -- ;
2306- } else {
2307- emitPostponeChunk ( request , newTask . id , postponeInstance ) ;
2308- enqueueFlush ( request ) ;
2309- }
2230+ if ( enableHalt && request . type === PRERENDER ) {
2231+ request . pendingChunks -- ;
23102232 } else {
2311- const digest = logRecoverableError ( request , reason , newTask ) ;
2312- if ( enableHalt && request . type === PRERENDER ) {
2313- request . pendingChunks -- ;
2314- } else {
2315- emitErrorChunk ( request , newTask . id , digest , reason ) ;
2316- enqueueFlush ( request ) ;
2317- }
2233+ erroredTask ( request , newTask , reason ) ;
2234+ enqueueFlush ( request ) ;
23182235 }
23192236 // $FlowFixMe should be able to pass mixed
23202237 reader . cancel ( reason ) . then ( error , error ) ;
@@ -2414,24 +2331,6 @@ function renderModel(
24142331 return serializeLazyID ( newTask . id ) ;
24152332 }
24162333 return serializeByValueID ( newTask . id ) ;
2417- } else if ( enablePostpone && x . $$typeof === REACT_POSTPONE_TYPE ) {
2418- // Something postponed. We'll still send everything we have up until this point.
2419- // We'll replace this element with a lazy reference that postpones on the client.
2420- const postponeInstance : Postpone = ( x : any ) ;
2421- request . pendingChunks ++ ;
2422- const postponeId = request . nextChunkId ++ ;
2423- logPostpone ( request , postponeInstance . message , task ) ;
2424- emitPostponeChunk ( request , postponeId , postponeInstance ) ;
2425-
2426- // Restore the context. We assume that this will be restored by the inner
2427- // functions in case nothing throws so we don't use "finally" here.
2428- task . keyPath = prevKeyPath ;
2429- task . implicitSlot = prevImplicitSlot ;
2430-
2431- if ( wasReactNode ) {
2432- return serializeLazyID ( postponeId ) ;
2433- }
2434- return serializeByValueID ( postponeId ) ;
24352334 }
24362335 }
24372336
@@ -2443,8 +2342,21 @@ function renderModel(
24432342 // Something errored. We'll still send everything we have up until this point.
24442343 request . pendingChunks ++ ;
24452344 const errorId = request . nextChunkId ++ ;
2446- const digest = logRecoverableError ( request , x , task ) ;
2447- emitErrorChunk ( request , errorId , digest , x ) ;
2345+ if (
2346+ enablePostpone &&
2347+ typeof x === 'object' &&
2348+ x !== null &&
2349+ x . $$typeof === REACT_POSTPONE_TYPE
2350+ ) {
2351+ // Something postponed. We'll still send everything we have up until this point.
2352+ // We'll replace this element with a lazy reference that postpones on the client.
2353+ const postponeInstance : Postpone = ( x : any ) ;
2354+ logPostpone ( request , postponeInstance . message , task ) ;
2355+ emitPostponeChunk ( request , errorId , postponeInstance ) ;
2356+ } else {
2357+ const digest = logRecoverableError ( request , x , task ) ;
2358+ emitErrorChunk ( request , errorId , digest , x ) ;
2359+ }
24482360 if ( wasReactNode ) {
24492361 // We'll replace this element with a lazy reference that throws on the client
24502362 // once it gets rendered.
@@ -3964,6 +3876,24 @@ function emitChunk(
39643876 emitModelChunk ( request , task . id , json ) ;
39653877}
39663878
3879+ function erroredTask ( request : Request , task : Task , error : mixed ) : void {
3880+ request . abortableTasks . delete ( task ) ;
3881+ task . status = ERRORED ;
3882+ if (
3883+ enablePostpone &&
3884+ typeof error === 'object' &&
3885+ error !== null &&
3886+ error . $$typeof === REACT_POSTPONE_TYPE
3887+ ) {
3888+ const postponeInstance : Postpone = ( error : any ) ;
3889+ logPostpone ( request , postponeInstance . message , task ) ;
3890+ emitPostponeChunk ( request , task . id , postponeInstance ) ;
3891+ } else {
3892+ const digest = logRecoverableError ( request , error , task ) ;
3893+ emitErrorChunk ( request , task . id , digest , error ) ;
3894+ }
3895+ }
3896+
39673897const emptyRoot = { } ;
39683898
39693899function retryTask ( request : Request , task : Task ) : void {
@@ -4083,20 +4013,9 @@ function retryTask(request: Request, task: Task): void {
40834013 const ping = task . ping ;
40844014 x . then ( ping , ping ) ;
40854015 return ;
4086- } else if ( enablePostpone && x . $$typeof === REACT_POSTPONE_TYPE ) {
4087- request . abortableTasks . delete ( task ) ;
4088- task . status = ERRORED ;
4089- const postponeInstance : Postpone = ( x : any ) ;
4090- logPostpone ( request , postponeInstance . message , task ) ;
4091- emitPostponeChunk ( request , task . id , postponeInstance ) ;
4092- return ;
40934016 }
40944017 }
4095-
4096- request . abortableTasks . delete ( task ) ;
4097- task . status = ERRORED ;
4098- const digest = logRecoverableError ( request , x , task ) ;
4099- emitErrorChunk ( request , task . id , digest , x ) ;
4018+ erroredTask ( request , task , x ) ;
41004019 } finally {
41014020 if ( __DEV__ ) {
41024021 debugID = prevDebugID ;
@@ -4336,29 +4255,27 @@ export function abort(request: Request, reason: mixed): void {
43364255 }
43374256 const abortableTasks = request . abortableTasks ;
43384257 if ( abortableTasks . size > 0 ) {
4339- if (
4258+ if ( enableHalt && request . type === PRERENDER ) {
4259+ // When prerendering with halt semantics we simply halt the task
4260+ // and leave the reference unfulfilled.
4261+ abortableTasks . forEach ( task => haltTask ( task , request ) ) ;
4262+ abortableTasks . clear ( ) ;
4263+ } else if (
43404264 enablePostpone &&
43414265 typeof reason === 'object' &&
43424266 reason !== null &&
43434267 ( reason : any ) . $$typeof === REACT_POSTPONE_TYPE
43444268 ) {
43454269 const postponeInstance : Postpone = ( reason : any ) ;
43464270 logPostpone ( request , postponeInstance . message , null ) ;
4347- if ( enableHalt && request . type === PRERENDER ) {
4348- // When prerendering with halt semantics we simply halt the task
4349- // and leave the reference unfulfilled.
4350- abortableTasks . forEach ( task => haltTask ( task , request ) ) ;
4351- abortableTasks . clear ( ) ;
4352- } else {
4353- // When rendering we produce a shared postpone chunk and then
4354- // fulfill each task with a reference to that chunk.
4355- const errorId = request . nextChunkId ++ ;
4356- request . fatalError = errorId ;
4357- request . pendingChunks ++ ;
4358- emitPostponeChunk ( request , errorId , postponeInstance ) ;
4359- abortableTasks . forEach ( task => abortTask ( task , request , errorId ) ) ;
4360- abortableTasks . clear ( ) ;
4361- }
4271+ // When rendering we produce a shared postpone chunk and then
4272+ // fulfill each task with a reference to that chunk.
4273+ const errorId = request . nextChunkId ++ ;
4274+ request . fatalError = errorId ;
4275+ request . pendingChunks ++ ;
4276+ emitPostponeChunk ( request , errorId , postponeInstance ) ;
4277+ abortableTasks . forEach ( task => abortTask ( task , request , errorId ) ) ;
4278+ abortableTasks . clear ( ) ;
43624279 } else {
43634280 const error =
43644281 reason === undefined
@@ -4373,21 +4290,14 @@ export function abort(request: Request, reason: mixed): void {
43734290 )
43744291 : reason ;
43754292 const digest = logRecoverableError ( request , error , null ) ;
4376- if ( enableHalt && request . type === PRERENDER ) {
4377- // When prerendering with halt semantics we simply halt the task
4378- // and leave the reference unfulfilled.
4379- abortableTasks . forEach ( task => haltTask ( task , request ) ) ;
4380- abortableTasks . clear ( ) ;
4381- } else {
4382- // When rendering we produce a shared error chunk and then
4383- // fulfill each task with a reference to that chunk.
4384- const errorId = request . nextChunkId ++ ;
4385- request . fatalError = errorId ;
4386- request . pendingChunks ++ ;
4387- emitErrorChunk ( request , errorId , digest , error ) ;
4388- abortableTasks . forEach ( task => abortTask ( task , request , errorId ) ) ;
4389- abortableTasks . clear ( ) ;
4390- }
4293+ // When rendering we produce a shared error chunk and then
4294+ // fulfill each task with a reference to that chunk.
4295+ const errorId = request . nextChunkId ++ ;
4296+ request . fatalError = errorId ;
4297+ request . pendingChunks ++ ;
4298+ emitErrorChunk ( request , errorId , digest , error ) ;
4299+ abortableTasks . forEach ( task => abortTask ( task , request , errorId ) ) ;
4300+ abortableTasks . clear ( ) ;
43914301 }
43924302 const onAllReady = request . onAllReady ;
43934303 onAllReady ( ) ;
0 commit comments