Revert "[Scheduler] Profiling features (#16145)" (#16392)

This reverts commit a34ca7bce6.
This commit is contained in:
Dan Abramov 2019-08-14 20:02:41 +01:00 committed by GitHub
parent b1a03dfdc8
commit 4ba1412305
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
18 changed files with 35 additions and 919 deletions

View File

@ -142,8 +142,6 @@ module.exports = {
],
globals: {
SharedArrayBuffer: true,
spyOnDev: true,
spyOnDevAndProd: true,
spyOnProd: true,

View File

@ -136,7 +136,6 @@ describe('ReactDebugFiberPerf', () => {
require('shared/ReactFeatureFlags').enableProfilerTimer = false;
require('shared/ReactFeatureFlags').replayFailedUnitOfWorkWithInvokeGuardedCallback = false;
require('shared/ReactFeatureFlags').debugRenderPhaseSideEffectsForStrictMode = false;
require('scheduler/src/SchedulerFeatureFlags').enableProfiling = false;
// Import after the polyfill is set up:
React = require('react');

View File

@ -110,20 +110,6 @@
);
}
function unstable_startLoggingProfilingEvents() {
return global.React.__SECRET_INTERNALS_DO_NOT_USE_OR_YOU_WILL_BE_FIRED.Scheduler.unstable_startLoggingProfilingEvents.apply(
this,
arguments
);
}
function unstable_stopLoggingProfilingEvents() {
return global.React.__SECRET_INTERNALS_DO_NOT_USE_OR_YOU_WILL_BE_FIRED.Scheduler.unstable_stopLoggingProfilingEvents.apply(
this,
arguments
);
}
return Object.freeze({
unstable_now: unstable_now,
unstable_scheduleCallback: unstable_scheduleCallback,
@ -138,8 +124,6 @@
unstable_pauseExecution: unstable_pauseExecution,
unstable_getFirstCallbackNode: unstable_getFirstCallbackNode,
unstable_forceFrameRate: unstable_forceFrameRate,
unstable_startLoggingProfilingEvents: unstable_startLoggingProfilingEvents,
unstable_stopLoggingProfilingEvents: unstable_stopLoggingProfilingEvents,
get unstable_IdlePriority() {
return global.React.__SECRET_INTERNALS_DO_NOT_USE_OR_YOU_WILL_BE_FIRED
.Scheduler.unstable_IdlePriority;
@ -160,9 +144,5 @@
return global.React.__SECRET_INTERNALS_DO_NOT_USE_OR_YOU_WILL_BE_FIRED
.Scheduler.unstable_UserBlockingPriority;
},
get unstable_sharedProfilingBuffer() {
return global.React.__SECRET_INTERNALS_DO_NOT_USE_OR_YOU_WILL_BE_FIRED
.Scheduler.unstable_getFirstCallbackNode;
},
});
});

View File

@ -104,20 +104,6 @@
);
}
function unstable_startLoggingProfilingEvents() {
return global.React.__SECRET_INTERNALS_DO_NOT_USE_OR_YOU_WILL_BE_FIRED.Scheduler.unstable_startLoggingProfilingEvents.apply(
this,
arguments
);
}
function unstable_stopLoggingProfilingEvents() {
return global.React.__SECRET_INTERNALS_DO_NOT_USE_OR_YOU_WILL_BE_FIRED.Scheduler.unstable_stopLoggingProfilingEvents.apply(
this,
arguments
);
}
return Object.freeze({
unstable_now: unstable_now,
unstable_scheduleCallback: unstable_scheduleCallback,
@ -132,8 +118,6 @@
unstable_pauseExecution: unstable_pauseExecution,
unstable_getFirstCallbackNode: unstable_getFirstCallbackNode,
unstable_forceFrameRate: unstable_forceFrameRate,
unstable_startLoggingProfilingEvents: unstable_startLoggingProfilingEvents,
unstable_stopLoggingProfilingEvents: unstable_stopLoggingProfilingEvents,
get unstable_IdlePriority() {
return global.React.__SECRET_INTERNALS_DO_NOT_USE_OR_YOU_WILL_BE_FIRED
.Scheduler.unstable_IdlePriority;
@ -154,9 +138,5 @@
return global.React.__SECRET_INTERNALS_DO_NOT_USE_OR_YOU_WILL_BE_FIRED
.Scheduler.unstable_UserBlockingPriority;
},
get unstable_sharedProfilingBuffer() {
return global.React.__SECRET_INTERNALS_DO_NOT_USE_OR_YOU_WILL_BE_FIRED
.Scheduler.unstable_getFirstCallbackNode;
},
});
});

View File

@ -104,20 +104,6 @@
);
}
function unstable_startLoggingProfilingEvents() {
return global.React.__SECRET_INTERNALS_DO_NOT_USE_OR_YOU_WILL_BE_FIRED.Scheduler.unstable_startLoggingProfilingEvents.apply(
this,
arguments
);
}
function unstable_stopLoggingProfilingEvents() {
return global.React.__SECRET_INTERNALS_DO_NOT_USE_OR_YOU_WILL_BE_FIRED.Scheduler.unstable_stopLoggingProfilingEvents.apply(
this,
arguments
);
}
return Object.freeze({
unstable_now: unstable_now,
unstable_scheduleCallback: unstable_scheduleCallback,
@ -132,8 +118,6 @@
unstable_pauseExecution: unstable_pauseExecution,
unstable_getFirstCallbackNode: unstable_getFirstCallbackNode,
unstable_forceFrameRate: unstable_forceFrameRate,
unstable_startLoggingProfilingEvents: unstable_startLoggingProfilingEvents,
unstable_stopLoggingProfilingEvents: unstable_stopLoggingProfilingEvents,
get unstable_IdlePriority() {
return global.React.__SECRET_INTERNALS_DO_NOT_USE_OR_YOU_WILL_BE_FIRED
.Scheduler.unstable_IdlePriority;
@ -154,9 +138,5 @@
return global.React.__SECRET_INTERNALS_DO_NOT_USE_OR_YOU_WILL_BE_FIRED
.Scheduler.unstable_UserBlockingPriority;
},
get unstable_sharedProfilingBuffer() {
return global.React.__SECRET_INTERNALS_DO_NOT_USE_OR_YOU_WILL_BE_FIRED
.Scheduler.unstable_getFirstCallbackNode;
},
});
});

View File

@ -8,14 +8,10 @@
/* eslint-disable no-var */
import {enableSchedulerDebugging} from './SchedulerFeatureFlags';
import {
enableSchedulerDebugging,
enableProfiling,
} from './SchedulerFeatureFlags';
import {
requestHostCallback as requestHostCallbackWithoutProfiling,
requestHostCallback,
requestHostTimeout,
cancelHostCallback,
cancelHostTimeout,
shouldYieldToHost,
getCurrentTime,
@ -25,26 +21,11 @@ import {
import {push, pop, peek} from './SchedulerMinHeap';
// TODO: Use symbols?
import {
ImmediatePriority,
UserBlockingPriority,
NormalPriority,
LowPriority,
IdlePriority,
} from './SchedulerPriorities';
import {
sharedProfilingBuffer,
markTaskRun,
markTaskYield,
markTaskCompleted,
markTaskCanceled,
markTaskErrored,
markSchedulerSuspended,
markSchedulerUnsuspended,
markTaskStart,
stopLoggingProfilingEvents,
startLoggingProfilingEvents,
} from './SchedulerProfiling';
var ImmediatePriority = 1;
var UserBlockingPriority = 2;
var NormalPriority = 3;
var LowPriority = 4;
var IdlePriority = 5;
// Max 31 bit integer. The max integer size in V8 for 32-bit systems.
// Math.pow(2, 30) - 1
@ -79,17 +60,15 @@ var isPerformingWork = false;
var isHostCallbackScheduled = false;
var isHostTimeoutScheduled = false;
function requestHostCallbackWithProfiling(cb, time) {
if (enableProfiling) {
markSchedulerSuspended(time);
requestHostCallbackWithoutProfiling(cb);
}
function flushTask(task, callback, currentTime) {
currentPriorityLevel = task.priorityLevel;
var didUserCallbackTimeout = task.expirationTime <= currentTime;
var continuationCallback = callback(didUserCallbackTimeout);
return typeof continuationCallback === 'function'
? continuationCallback
: null;
}
const requestHostCallback = enableProfiling
? requestHostCallbackWithProfiling
: requestHostCallbackWithoutProfiling;
function advanceTimers(currentTime) {
// Check for tasks that are no longer delayed and add them to the queue.
let timer = peek(timerQueue);
@ -102,10 +81,6 @@ function advanceTimers(currentTime) {
pop(timerQueue);
timer.sortIndex = timer.expirationTime;
push(taskQueue, timer);
if (enableProfiling) {
markTaskStart(timer);
timer.isQueued = true;
}
} else {
// Remaining timers are pending.
return;
@ -121,7 +96,7 @@ function handleTimeout(currentTime) {
if (!isHostCallbackScheduled) {
if (peek(taskQueue) !== null) {
isHostCallbackScheduled = true;
requestHostCallback(flushWork, currentTime);
requestHostCallback(flushWork);
} else {
const firstTimer = peek(timerQueue);
if (firstTimer !== null) {
@ -132,10 +107,6 @@ function handleTimeout(currentTime) {
}
function flushWork(hasTimeRemaining, initialTime) {
if (isHostCallbackScheduled) {
markSchedulerUnsuspended(initialTime);
}
// We'll need a host callback the next time work is scheduled.
isHostCallbackScheduled = false;
if (isHostTimeoutScheduled) {
@ -164,24 +135,15 @@ function flushWork(hasTimeRemaining, initialTime) {
const callback = currentTask.callback;
if (callback !== null) {
currentTask.callback = null;
currentPriorityLevel = currentTask.priorityLevel;
const didUserCallbackTimeout =
currentTask.expirationTime <= currentTime;
markTaskRun(currentTask, currentTime);
const continuationCallback = callback(didUserCallbackTimeout);
currentTime = getCurrentTime();
if (typeof continuationCallback === 'function') {
currentTask.callback = continuationCallback;
markTaskYield(currentTask, currentTime);
const continuation = flushTask(currentTask, callback, currentTime);
if (continuation !== null) {
currentTask.callback = continuation;
} else {
if (enableProfiling) {
markTaskCompleted(currentTask, currentTime);
currentTask.isQueued = false;
}
if (currentTask === peek(taskQueue)) {
pop(taskQueue);
}
}
currentTime = getCurrentTime();
advanceTimers(currentTime);
} else {
pop(taskQueue);
@ -190,8 +152,6 @@ function flushWork(hasTimeRemaining, initialTime) {
}
// Return whether there's additional work
if (currentTask !== null) {
markSchedulerSuspended(currentTime);
isHostCallbackScheduled = true;
return true;
} else {
let firstTimer = peek(timerQueue);
@ -200,18 +160,6 @@ function flushWork(hasTimeRemaining, initialTime) {
}
return false;
}
} catch (error) {
if (currentTask !== null) {
if (enableProfiling) {
const currentTime = getCurrentTime();
markTaskErrored(currentTask, currentTime);
currentTask.isQueued = false;
}
if (currentTask === peek(taskQueue)) {
pop(taskQueue);
}
}
throw error;
} finally {
currentTask = null;
currentPriorityLevel = previousPriorityLevel;
@ -302,8 +250,6 @@ function unstable_scheduleCallback(priorityLevel, callback, options) {
var startTime;
var timeout;
// TODO: Expose the current label when profiling, somehow
// var label;
if (typeof options === 'object' && options !== null) {
var delay = options.delay;
if (typeof delay === 'number' && delay > 0) {
@ -315,12 +261,6 @@ function unstable_scheduleCallback(priorityLevel, callback, options) {
typeof options.timeout === 'number'
? options.timeout
: timeoutForPriorityLevel(priorityLevel);
// if (enableProfiling) {
// var _label = options.label;
// if (typeof _label === 'string') {
// label = _label;
// }
// }
} else {
timeout = timeoutForPriorityLevel(priorityLevel);
startTime = currentTime;
@ -329,7 +269,7 @@ function unstable_scheduleCallback(priorityLevel, callback, options) {
var expirationTime = startTime + timeout;
var newTask = {
id: ++taskIdCounter,
id: taskIdCounter++,
callback,
priorityLevel,
startTime,
@ -337,13 +277,6 @@ function unstable_scheduleCallback(priorityLevel, callback, options) {
sortIndex: -1,
};
if (enableProfiling) {
newTask.isQueued = false;
// if (typeof options === 'object' && options !== null) {
// newTask.label = label;
// }
}
if (startTime > currentTime) {
// This is a delayed task.
newTask.sortIndex = startTime;
@ -362,15 +295,11 @@ function unstable_scheduleCallback(priorityLevel, callback, options) {
} else {
newTask.sortIndex = expirationTime;
push(taskQueue, newTask);
if (enableProfiling) {
markTaskStart(newTask, currentTime);
newTask.isQueued = true;
}
// Schedule a host callback, if needed. If we're already performing work,
// wait until the next time we yield.
if (!isHostCallbackScheduled && !isPerformingWork) {
isHostCallbackScheduled = true;
requestHostCallback(flushWork, currentTime);
requestHostCallback(flushWork);
}
}
@ -385,12 +314,7 @@ function unstable_continueExecution() {
isSchedulerPaused = false;
if (!isHostCallbackScheduled && !isPerformingWork) {
isHostCallbackScheduled = true;
if (enableProfiling) {
const currentTime = getCurrentTime();
requestHostCallbackWithProfiling(flushWork, currentTime);
} else {
requestHostCallback(flushWork);
}
requestHostCallback(flushWork);
}
}
@ -399,26 +323,10 @@ function unstable_getFirstCallbackNode() {
}
function unstable_cancelCallback(task) {
if (enableProfiling && task.isQueued) {
const currentTime = getCurrentTime();
markTaskCanceled(task, currentTime);
task.isQueued = false;
}
if (task !== null && task === peek(taskQueue)) {
pop(taskQueue);
if (enableProfiling && !isPerformingWork && taskQueue.length === 0) {
// The queue is now empty.
const currentTime = getCurrentTime();
markSchedulerUnsuspended(currentTime);
isHostCallbackScheduled = false;
cancelHostCallback();
}
} else {
// Null out the callback to indicate the task has been canceled. (Can't
// remove from the queue because you can't remove arbitrary nodes from an
// array based heap, only the first one.)
task.callback = null;
}
// Null out the callback to indicate the task has been canceled. (Can't remove
// from the queue because you can't remove arbitrary nodes from an array based
// heap, only the first one.)
task.callback = null;
}
function unstable_getCurrentPriorityLevel() {
@ -462,16 +370,3 @@ export {
getCurrentTime as unstable_now,
forceFrameRate as unstable_forceFrameRate,
};
export const unstable_startLoggingProfilingEvents = enableProfiling
? startLoggingProfilingEvents
: null;
export const unstable_stopLoggingProfilingEvents = enableProfiling
? stopLoggingProfilingEvents
: null;
// Expose a shared array buffer that contains profiling information.
export const unstable_sharedProfilingBuffer = enableProfiling
? sharedProfilingBuffer
: null;

View File

@ -11,4 +11,3 @@ export const enableIsInputPending = false;
export const requestIdleCallbackBeforeFirstFrame = false;
export const requestTimerEventBeforeFirstFrame = false;
export const enableMessageLoopImplementation = false;
export const enableProfiling = __PROFILE__;

View File

@ -1,18 +0,0 @@
/**
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*
* @flow
*/
export type PriorityLevel = 0 | 1 | 2 | 3 | 4 | 5;
// TODO: Use symbols?
export const NoPriority = 0;
export const ImmediatePriority = 1;
export const UserBlockingPriority = 2;
export const NormalPriority = 3;
export const LowPriority = 4;
export const IdlePriority = 5;

View File

@ -1,210 +0,0 @@
/**
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*
* @flow
*/
import type {PriorityLevel} from './SchedulerPriorities';
import {enableProfiling} from './SchedulerFeatureFlags';
import {NoPriority} from './SchedulerPriorities';
let runIdCounter: number = 0;
let mainThreadIdCounter: number = 0;
const profilingStateSize = 4;
export const sharedProfilingBuffer =
// $FlowFixMe Flow doesn't know about SharedArrayBuffer
typeof SharedArrayBuffer === 'function'
? new SharedArrayBuffer(profilingStateSize * Int32Array.BYTES_PER_ELEMENT)
: // $FlowFixMe Flow doesn't know about ArrayBuffer
new ArrayBuffer(profilingStateSize * Int32Array.BYTES_PER_ELEMENT);
const profilingState = enableProfiling
? new Int32Array(sharedProfilingBuffer)
: null;
const PRIORITY = 0;
const CURRENT_TASK_ID = 1;
const CURRENT_RUN_ID = 2;
const QUEUE_SIZE = 3;
if (enableProfiling && profilingState !== null) {
profilingState[PRIORITY] = NoPriority;
// This is maintained with a counter, because the size of the priority queue
// array might include canceled tasks.
profilingState[QUEUE_SIZE] = 0;
profilingState[CURRENT_TASK_ID] = 0;
}
const INITIAL_EVENT_LOG_SIZE = 1000;
let eventLogSize = 0;
let eventLogBuffer = null;
let eventLog = null;
let eventLogIndex = 0;
const TaskStartEvent = 1;
const TaskCompleteEvent = 2;
const TaskErrorEvent = 3;
const TaskCancelEvent = 4;
const TaskRunEvent = 5;
const TaskYieldEvent = 6;
const SchedulerSuspendEvent = 7;
const SchedulerResumeEvent = 8;
function logEvent(entries) {
if (eventLog !== null) {
const offset = eventLogIndex;
eventLogIndex += entries.length;
if (eventLogIndex + 1 > eventLogSize) {
eventLogSize = eventLogIndex + 1;
const newEventLog = new Int32Array(
eventLogSize * Int32Array.BYTES_PER_ELEMENT,
);
newEventLog.set(eventLog);
eventLogBuffer = newEventLog.buffer;
eventLog = newEventLog;
}
eventLog.set(entries, offset);
}
}
export function startLoggingProfilingEvents(): void {
eventLogSize = INITIAL_EVENT_LOG_SIZE;
eventLogBuffer = new ArrayBuffer(eventLogSize * Int32Array.BYTES_PER_ELEMENT);
eventLog = new Int32Array(eventLogBuffer);
eventLogIndex = 0;
}
export function stopLoggingProfilingEvents(): ArrayBuffer | null {
const buffer = eventLogBuffer;
eventLogBuffer = eventLog = null;
return buffer;
}
export function markTaskStart(
task: {id: number, priorityLevel: PriorityLevel},
time: number,
) {
if (enableProfiling) {
if (profilingState !== null) {
profilingState[QUEUE_SIZE]++;
}
if (eventLog !== null) {
logEvent([TaskStartEvent, time, task.id, task.priorityLevel]);
}
}
}
export function markTaskCompleted(
task: {
id: number,
priorityLevel: PriorityLevel,
},
time: number,
) {
if (enableProfiling) {
if (profilingState !== null) {
profilingState[PRIORITY] = NoPriority;
profilingState[CURRENT_TASK_ID] = 0;
profilingState[QUEUE_SIZE]--;
}
if (eventLog !== null) {
logEvent([TaskCompleteEvent, time, task.id]);
}
}
}
export function markTaskCanceled(
task: {
id: number,
priorityLevel: PriorityLevel,
},
time: number,
) {
if (enableProfiling) {
if (profilingState !== null) {
profilingState[QUEUE_SIZE]--;
}
if (eventLog !== null) {
logEvent([TaskCancelEvent, time, task.id]);
}
}
}
export function markTaskErrored(
task: {
id: number,
priorityLevel: PriorityLevel,
},
time: number,
) {
if (enableProfiling) {
if (profilingState !== null) {
profilingState[PRIORITY] = NoPriority;
profilingState[CURRENT_TASK_ID] = 0;
profilingState[QUEUE_SIZE]--;
}
if (eventLog !== null) {
logEvent([TaskErrorEvent, time, task.id]);
}
}
}
export function markTaskRun(
task: {id: number, priorityLevel: PriorityLevel},
time: number,
) {
if (enableProfiling) {
runIdCounter++;
if (profilingState !== null) {
profilingState[PRIORITY] = task.priorityLevel;
profilingState[CURRENT_TASK_ID] = task.id;
profilingState[CURRENT_RUN_ID] = runIdCounter;
}
if (eventLog !== null) {
logEvent([TaskRunEvent, time, task.id, runIdCounter]);
}
}
}
export function markTaskYield(task: {id: number}, time: number) {
if (enableProfiling) {
if (profilingState !== null) {
profilingState[PRIORITY] = NoPriority;
profilingState[CURRENT_TASK_ID] = 0;
profilingState[CURRENT_RUN_ID] = 0;
}
if (eventLog !== null) {
logEvent([TaskYieldEvent, time, task.id, runIdCounter]);
}
}
}
export function markSchedulerSuspended(time: number) {
if (enableProfiling) {
mainThreadIdCounter++;
if (eventLog !== null) {
logEvent([SchedulerSuspendEvent, time, mainThreadIdCounter]);
}
}
}
export function markSchedulerUnsuspended(time: number) {
if (enableProfiling) {
if (eventLog !== null) {
logEvent([SchedulerResumeEvent, time, mainThreadIdCounter]);
}
}
}

View File

@ -59,15 +59,11 @@ describe('SchedulerDOM', () => {
runPostMessageCallbacks(config);
}
let frameSize;
let startOfLatestFrame;
let currentTime;
let frameSize = 33;
let startOfLatestFrame = 0;
let currentTime = 0;
beforeEach(() => {
frameSize = 33;
startOfLatestFrame = 0;
currentTime = 0;
delete global.performance;
global.requestAnimationFrame = function(cb) {
return rAFCallbacks.push(() => {

View File

@ -1,449 +0,0 @@
/**
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*
* @emails react-core
* @jest-environment node
*/
/* eslint-disable no-for-of-loops/no-for-of-loops */
'use strict';
let Scheduler;
let sharedProfilingArray;
// let runWithPriority;
let ImmediatePriority;
let UserBlockingPriority;
let NormalPriority;
let LowPriority;
let IdlePriority;
let scheduleCallback;
let cancelCallback;
// let wrapCallback;
// let getCurrentPriorityLevel;
// let shouldYield;
function priorityLevelToString(priorityLevel) {
switch (priorityLevel) {
case ImmediatePriority:
return 'Immediate';
case UserBlockingPriority:
return 'User-blocking';
case NormalPriority:
return 'Normal';
case LowPriority:
return 'Low';
case IdlePriority:
return 'Idle';
default:
return null;
}
}
describe('Scheduler', () => {
if (!__PROFILE__) {
// The tests in this suite only apply when profiling is on
it('profiling APIs are not available', () => {
Scheduler = require('scheduler');
expect(Scheduler.unstable_stopLoggingProfilingEvents).toBe(null);
expect(Scheduler.unstable_sharedProfilingBuffer).toBe(null);
});
return;
}
beforeEach(() => {
jest.resetModules();
jest.mock('scheduler', () => require('scheduler/unstable_mock'));
Scheduler = require('scheduler');
sharedProfilingArray = new Int32Array(
Scheduler.unstable_sharedProfilingBuffer,
);
// runWithPriority = Scheduler.unstable_runWithPriority;
ImmediatePriority = Scheduler.unstable_ImmediatePriority;
UserBlockingPriority = Scheduler.unstable_UserBlockingPriority;
NormalPriority = Scheduler.unstable_NormalPriority;
LowPriority = Scheduler.unstable_LowPriority;
IdlePriority = Scheduler.unstable_IdlePriority;
scheduleCallback = Scheduler.unstable_scheduleCallback;
cancelCallback = Scheduler.unstable_cancelCallback;
// wrapCallback = Scheduler.unstable_wrapCallback;
// getCurrentPriorityLevel = Scheduler.unstable_getCurrentPriorityLevel;
// shouldYield = Scheduler.unstable_shouldYield;
});
const PRIORITY = 0;
const CURRENT_TASK_ID = 1;
const CURRENT_RUN_ID = 2;
const QUEUE_SIZE = 3;
afterEach(() => {
if (sharedProfilingArray[QUEUE_SIZE] !== 0) {
throw Error(
'Test exited, but the shared profiling buffer indicates that a task ' +
'is still running',
);
}
});
const TaskStartEvent = 1;
const TaskCompleteEvent = 2;
const TaskErrorEvent = 3;
const TaskCancelEvent = 4;
const TaskRunEvent = 5;
const TaskYieldEvent = 6;
const SchedulerSuspendEvent = 7;
const SchedulerResumeEvent = 8;
function stopProfilingAndPrintFlamegraph() {
const eventLog = new Int32Array(
Scheduler.unstable_stopLoggingProfilingEvents(),
);
const tasks = new Map();
const mainThreadRuns = [];
let i = 0;
processLog: while (i < eventLog.length) {
const instruction = eventLog[i];
const time = eventLog[i + 1];
switch (instruction) {
case 0: {
break processLog;
}
case TaskStartEvent: {
const taskId = eventLog[i + 2];
const priorityLevel = eventLog[i + 3];
const task = {
id: taskId,
priorityLevel,
label: null,
start: time,
end: -1,
exitStatus: null,
runs: [],
};
tasks.set(taskId, task);
i += 4;
break;
}
case TaskCompleteEvent: {
const taskId = eventLog[i + 2];
const task = tasks.get(taskId);
if (task === undefined) {
throw Error('Task does not exist.');
}
task.end = time;
task.exitStatus = 'completed';
i += 3;
break;
}
case TaskErrorEvent: {
const taskId = eventLog[i + 2];
const task = tasks.get(taskId);
if (task === undefined) {
throw Error('Task does not exist.');
}
task.end = time;
task.exitStatus = 'errored';
i += 3;
break;
}
case TaskCancelEvent: {
const taskId = eventLog[i + 2];
const task = tasks.get(taskId);
if (task === undefined) {
throw Error('Task does not exist.');
}
task.end = time;
task.exitStatus = 'canceled';
i += 3;
break;
}
case TaskRunEvent:
case TaskYieldEvent: {
const taskId = eventLog[i + 2];
const task = tasks.get(taskId);
if (task === undefined) {
throw Error('Task does not exist.');
}
task.runs.push(time);
i += 4;
break;
}
case SchedulerSuspendEvent:
case SchedulerResumeEvent: {
mainThreadRuns.push(time);
i += 3;
break;
}
default: {
throw Error('Unknown instruction type: ' + instruction);
}
}
}
// Now we can render the tasks as a flamegraph.
const labelColumnWidth = 30;
const msPerChar = 50;
let result = '';
const mainThreadLabelColumn = '!!! Main thread ';
let mainThreadTimelineColumn = '';
let isMainThreadBusy = false;
for (const time of mainThreadRuns) {
const index = time / msPerChar;
mainThreadTimelineColumn += (isMainThreadBusy ? '█' : ' ').repeat(
index - mainThreadTimelineColumn.length,
);
isMainThreadBusy = !isMainThreadBusy;
}
result += `${mainThreadLabelColumn}${mainThreadTimelineColumn}\n`;
const tasksByPriority = Array.from(tasks.values()).sort(
(t1, t2) => t1.priorityLevel - t2.priorityLevel,
);
for (const task of tasksByPriority) {
let label = task.label;
if (label === undefined) {
label = 'Task';
}
let labelColumn = `Task ${task.id} [${priorityLevelToString(
task.priorityLevel,
)}]`;
labelColumn += ' '.repeat(labelColumnWidth - labelColumn.length - 1);
// Add empty space up until the start mark
let timelineColumn = ' '.repeat(task.start / msPerChar);
let isRunning = false;
for (const time of task.runs) {
const index = time / msPerChar;
timelineColumn += (isRunning ? '█' : '░').repeat(
index - timelineColumn.length,
);
isRunning = !isRunning;
}
const endIndex = task.end / msPerChar;
timelineColumn += (isRunning ? '█' : '░').repeat(
endIndex - timelineColumn.length,
);
if (task.exitStatus !== 'completed') {
timelineColumn += `🡐 ${task.exitStatus}`;
}
result += `${labelColumn}${timelineColumn}\n`;
}
return '\n' + result;
}
function getProfilingInfo() {
const queueSize = sharedProfilingArray[QUEUE_SIZE];
if (queueSize === 0) {
return 'Empty Queue';
}
const priorityLevel = sharedProfilingArray[PRIORITY];
if (priorityLevel === 0) {
return 'Suspended, Queue Size: ' + queueSize;
}
return (
`Task: ${sharedProfilingArray[CURRENT_TASK_ID]}, ` +
`Run: ${sharedProfilingArray[CURRENT_RUN_ID]}, ` +
`Priority: ${priorityLevelToString(priorityLevel)}, ` +
`Queue Size: ${sharedProfilingArray[QUEUE_SIZE]}`
);
}
it('creates a basic flamegraph', () => {
Scheduler.unstable_startLoggingProfilingEvents();
Scheduler.unstable_advanceTime(100);
scheduleCallback(
NormalPriority,
() => {
Scheduler.unstable_advanceTime(300);
Scheduler.unstable_yieldValue(getProfilingInfo());
scheduleCallback(
UserBlockingPriority,
() => {
Scheduler.unstable_yieldValue(getProfilingInfo());
Scheduler.unstable_advanceTime(300);
},
{label: 'Bar'},
);
Scheduler.unstable_advanceTime(100);
Scheduler.unstable_yieldValue('Yield');
return () => {
Scheduler.unstable_yieldValue(getProfilingInfo());
Scheduler.unstable_advanceTime(300);
};
},
{label: 'Foo'},
);
expect(Scheduler).toFlushAndYieldThrough([
'Task: 1, Run: 1, Priority: Normal, Queue Size: 1',
'Yield',
]);
Scheduler.unstable_advanceTime(100);
expect(Scheduler).toFlushAndYield([
'Task: 2, Run: 2, Priority: User-blocking, Queue Size: 2',
'Task: 1, Run: 3, Priority: Normal, Queue Size: 1',
]);
expect(getProfilingInfo()).toEqual('Empty Queue');
expect(stopProfilingAndPrintFlamegraph()).toEqual(
`
!!! Main thread
Task 2 [User-blocking]
Task 1 [Normal]
`,
);
});
it('marks when a task is canceled', () => {
Scheduler.unstable_startLoggingProfilingEvents();
const task = scheduleCallback(NormalPriority, () => {
Scheduler.unstable_yieldValue(getProfilingInfo());
Scheduler.unstable_advanceTime(300);
Scheduler.unstable_yieldValue('Yield');
return () => {
Scheduler.unstable_yieldValue('Continuation');
Scheduler.unstable_advanceTime(200);
};
});
expect(Scheduler).toFlushAndYieldThrough([
'Task: 1, Run: 1, Priority: Normal, Queue Size: 1',
'Yield',
]);
Scheduler.unstable_advanceTime(100);
cancelCallback(task);
// Advance more time. This should not affect the size of the main
// thread row, since the Scheduler queue is empty.
Scheduler.unstable_advanceTime(1000);
expect(Scheduler).toFlushWithoutYielding();
// The main thread row should end when the callback is cancelled.
expect(stopProfilingAndPrintFlamegraph()).toEqual(
`
!!! Main thread
Task 1 [Normal] 🡐 canceled
`,
);
});
it('marks when a task errors', () => {
Scheduler.unstable_startLoggingProfilingEvents();
scheduleCallback(NormalPriority, () => {
Scheduler.unstable_advanceTime(300);
throw Error('Oops');
});
expect(Scheduler).toFlushAndThrow('Oops');
Scheduler.unstable_advanceTime(100);
// Advance more time. This should not affect the size of the main
// thread row, since the Scheduler queue is empty.
Scheduler.unstable_advanceTime(1000);
expect(Scheduler).toFlushWithoutYielding();
// The main thread row should end when the callback is cancelled.
expect(stopProfilingAndPrintFlamegraph()).toEqual(
`
!!! Main thread
Task 1 [Normal] 🡐 errored
`,
);
});
it('handles cancelling a task that already finished', () => {
Scheduler.unstable_startLoggingProfilingEvents();
const task = scheduleCallback(NormalPriority, () => {
Scheduler.unstable_yieldValue('A');
Scheduler.unstable_advanceTime(1000);
});
expect(Scheduler).toFlushAndYield(['A']);
cancelCallback(task);
expect(stopProfilingAndPrintFlamegraph()).toEqual(
`
!!! Main thread
Task 1 [Normal]
`,
);
});
it('handles cancelling a task multiple times', () => {
Scheduler.unstable_startLoggingProfilingEvents();
scheduleCallback(
NormalPriority,
() => {
Scheduler.unstable_yieldValue('A');
Scheduler.unstable_advanceTime(1000);
},
{label: 'A'},
);
Scheduler.unstable_advanceTime(200);
const task = scheduleCallback(
NormalPriority,
() => {
Scheduler.unstable_yieldValue('B');
Scheduler.unstable_advanceTime(1000);
},
{label: 'B'},
);
Scheduler.unstable_advanceTime(400);
cancelCallback(task);
cancelCallback(task);
cancelCallback(task);
expect(Scheduler).toFlushAndYield(['A']);
expect(stopProfilingAndPrintFlamegraph()).toEqual(
`
!!! Main thread
Task 1 [Normal]
Task 2 [Normal] 🡐 canceled
`,
);
});
it('handles cancelling a delayed task', () => {
Scheduler.unstable_startLoggingProfilingEvents();
const task = scheduleCallback(
NormalPriority,
() => Scheduler.unstable_yieldValue('A'),
{delay: 1000},
);
cancelCallback(task);
expect(Scheduler).toFlushWithoutYielding();
expect(stopProfilingAndPrintFlamegraph()).toEqual(
`
!!! Main thread
`,
);
});
it('resizes event log buffer if there are many events', () => {
const tasks = [];
for (let i = 0; i < 5000; i++) {
tasks.push(scheduleCallback(NormalPriority, () => {}));
}
expect(getProfilingInfo()).toEqual('Suspended, Queue Size: 5000');
tasks.forEach(task => cancelCallback(task));
expect(getProfilingInfo()).toEqual('Empty Queue');
});
});

View File

@ -13,5 +13,3 @@ export const {
requestTimerEventBeforeFirstFrame,
enableMessageLoopImplementation,
} = require('SchedulerFeatureFlags');
export const enableProfiling = __PROFILE__;

View File

@ -53,9 +53,8 @@ if (
}
}
};
const initialTime = Date.now();
getCurrentTime = function() {
return Date.now() - initialTime;
return Date.now();
};
requestHostCallback = function(cb) {
if (_callback !== null) {
@ -112,15 +111,10 @@ if (
typeof requestIdleCallback === 'function' &&
typeof cancelIdleCallback === 'function';
if (
typeof performance === 'object' &&
typeof performance.now === 'function'
) {
getCurrentTime = () => performance.now();
} else {
const initialTime = Date.now();
getCurrentTime = () => Date.now() - initialTime;
}
getCurrentTime =
typeof performance === 'object' && typeof performance.now === 'function'
? () => performance.now()
: () => Date.now();
let isRAFLoopRunning = false;
let isMessageLoopRunning = false;

View File

@ -411,13 +411,7 @@ const bundles = [
/******* React Scheduler (experimental) *******/
{
bundleTypes: [
NODE_DEV,
NODE_PROD,
FB_WWW_DEV,
FB_WWW_PROD,
FB_WWW_PROFILING,
],
bundleTypes: [NODE_DEV, NODE_PROD, FB_WWW_DEV, FB_WWW_PROD],
moduleType: ISOMORPHIC,
entry: 'scheduler',
global: 'Scheduler',

View File

@ -21,11 +21,6 @@ module.exports = {
process: true,
setImmediate: true,
Buffer: true,
// Scheduler profiling
SharedArrayBuffer: true,
Int32Array: true,
ArrayBuffer: true,
},
parserOptions: {
ecmaVersion: 5,

View File

@ -22,11 +22,6 @@ module.exports = {
// Node.js Server Rendering
setImmediate: true,
Buffer: true,
// Scheduler profiling
SharedArrayBuffer: true,
Int32Array: true,
ArrayBuffer: true,
},
parserOptions: {
ecmaVersion: 5,

View File

@ -21,11 +21,6 @@ module.exports = {
// Fabric. See https://github.com/facebook/react/pull/15490
// for more information
nativeFabricUIManager: true,
// Scheduler profiling
SharedArrayBuffer: true,
Int32Array: true,
ArrayBuffer: true,
},
parserOptions: {
ecmaVersion: 5,

View File

@ -24,11 +24,6 @@ module.exports = {
define: true,
require: true,
global: true,
// Scheduler profiling
SharedArrayBuffer: true,
Int32Array: true,
ArrayBuffer: true,
},
parserOptions: {
ecmaVersion: 5,