Skip to content

Commit 20a8358

Browse files
committed
Only emit timing chunks that advance time
1 parent 9c43f63 commit 20a8358

File tree

1 file changed

+26
-25
lines changed

1 file changed

+26
-25
lines changed

packages/react-server/src/ReactFlightServer.js

Lines changed: 26 additions & 25 deletions
Original file line numberDiff line numberDiff line change
@@ -1341,12 +1341,7 @@ function renderFunctionComponent<Props>(
13411341

13421342
// Track when we started rendering this component.
13431343
if (enableProfilerTimer && enableComponentPerformanceTrack) {
1344-
task.timed = true;
1345-
emitTimingChunk(
1346-
request,
1347-
componentDebugID,
1348-
(task.time = performance.now()),
1349-
);
1344+
advanceTaskTime(request, task, performance.now());
13501345
}
13511346

13521347
emitDebugChunk(request, componentDebugID, componentDebugInfo);
@@ -2008,7 +2003,7 @@ function visitAsyncNode(
20082003
// We log the environment at the time when the last promise pigned ping which may
20092004
// be later than what the environment was when we actually started awaiting.
20102005
const env = (0, request.environmentName)();
2011-
emitTimingChunk(request, task.id, startTime);
2006+
advanceTaskTime(request, task, startTime);
20122007
// Then emit a reference to us awaiting it in the current task.
20132008
request.pendingChunks++;
20142009
emitDebugChunk(request, task.id, {
@@ -2017,7 +2012,7 @@ function visitAsyncNode(
20172012
owner: node.owner,
20182013
stack: stack,
20192014
});
2020-
emitTimingChunk(request, task.id, (task.time = endTime));
2015+
advanceTaskTime(request, task, endTime);
20212016
}
20222017
}
20232018
}
@@ -2070,17 +2065,11 @@ function emitAsyncSequence(
20702065
const env = (0, request.environmentName)();
20712066
// If we don't have any thing awaited, the time we started awaiting was internal
20722067
// when we yielded after rendering. The current task time is basically that.
2073-
const awaitStartTime = task.time;
2074-
// If the end time finished before we started, it could've been a cached thing so
2075-
// we clamp it to the task time. Effectively leading to a zero-time await.
2076-
const awaitEndTime =
2077-
awaitedNode.end < task.time ? task.time : awaitedNode.end;
2078-
emitTimingChunk(request, task.id, awaitStartTime);
20792068
emitDebugChunk(request, task.id, {
20802069
awaited: ((awaitedNode: any): ReactIOInfo), // This is deduped by this reference.
20812070
env: env,
20822071
});
2083-
emitTimingChunk(request, task.id, (task.time = awaitEndTime));
2072+
advanceTaskTime(request, task, awaitedNode.end);
20842073
}
20852074
}
20862075

@@ -4294,19 +4283,13 @@ function forwardDebugInfo(
42944283
debugInfo: ReactDebugInfo,
42954284
) {
42964285
const id = task.id;
4297-
const minimumTime =
4298-
enableProfilerTimer && enableComponentPerformanceTrack ? task.time : 0;
42994286
for (let i = 0; i < debugInfo.length; i++) {
43004287
const info = debugInfo[i];
43014288
if (typeof info.time === 'number') {
43024289
// When forwarding time we need to ensure to convert it to the time space of the payload.
43034290
// We clamp the time to the starting render of the current component. It's as if it took
43044291
// no time to render and await if we reuse cached content.
4305-
emitTimingChunk(
4306-
request,
4307-
id,
4308-
info.time < minimumTime ? minimumTime : info.time,
4309-
);
4292+
advanceTaskTime(request, task, info.time);
43104293
} else {
43114294
if (typeof info.name === 'string') {
43124295
// We outline this model eagerly so that we can refer to by reference as an owner.
@@ -4383,6 +4366,24 @@ function emitTimingChunk(
43834366
request.completedRegularChunks.push(processedChunk);
43844367
}
43854368

4369+
function advanceTaskTime(
4370+
request: Request,
4371+
task: Task,
4372+
timestamp: number,
4373+
): void {
4374+
if (!enableProfilerTimer || !enableComponentPerformanceTrack) {
4375+
return;
4376+
}
4377+
// Emits a timing chunk, if the new timestamp is higher than the previous timestamp of this task.
4378+
// If it wasn't timed before, e.g. an outlined object, we need to emit the first timestamp and
4379+
// it is now timed.
4380+
if (!task.timed || timestamp > task.time) {
4381+
emitTimingChunk(request, task.id, timestamp);
4382+
task.time = timestamp;
4383+
}
4384+
task.timed = true;
4385+
}
4386+
43864387
function emitChunk(
43874388
request: Request,
43884389
task: Task,
@@ -4474,7 +4475,7 @@ function emitChunk(
44744475
function erroredTask(request: Request, task: Task, error: mixed): void {
44754476
if (enableProfilerTimer && enableComponentPerformanceTrack) {
44764477
if (task.timed) {
4477-
emitTimingChunk(request, task.id, (task.time = performance.now()));
4478+
advanceTaskTime(request, task, performance.now());
44784479
}
44794480
}
44804481
task.status = ERRORED;
@@ -4557,7 +4558,7 @@ function retryTask(request: Request, task: Task): void {
45574558
// We've finished rendering. Log the end time.
45584559
if (enableProfilerTimer && enableComponentPerformanceTrack) {
45594560
if (task.timed) {
4560-
emitTimingChunk(request, task.id, (task.time = performance.now()));
4561+
advanceTaskTime(request, task, performance.now());
45614562
}
45624563
}
45634564

@@ -4684,7 +4685,7 @@ function abortTask(task: Task, request: Request, errorId: number): void {
46844685
// Track when we aborted this task as its end time.
46854686
if (enableProfilerTimer && enableComponentPerformanceTrack) {
46864687
if (task.timed) {
4687-
emitTimingChunk(request, task.id, (task.time = performance.now()));
4688+
advanceTaskTime(request, task, performance.now());
46884689
}
46894690
}
46904691
// Instead of emitting an error per task.id, we emit a model that only

0 commit comments

Comments
 (0)