From 917d899c6b9e0e2a5ca7a864c64ce79140e2b212 Mon Sep 17 00:00:00 2001 From: Andrew Clark Date: Tue, 13 Aug 2019 11:02:52 -0700 Subject: [PATCH] Expose shared array buffer with profiling info Array contains - the priority Scheduler is currently running - the size of the queue - the id of the currently running task --- .eslintrc.js | 2 + .../npm/umd/scheduler.development.js | 4 ++ .../npm/umd/scheduler.production.min.js | 4 ++ .../npm/umd/scheduler.profiling.min.js | 4 ++ packages/scheduler/src/Scheduler.js | 30 ++++++-- .../scheduler/src/SchedulerFeatureFlags.js | 1 + packages/scheduler/src/SchedulerPriorities.js | 3 +- packages/scheduler/src/SchedulerProfiling.js | 53 +++++++++++++- .../SchedulerProfiling-test.internal.js | 70 ++++++++++++++++++- .../src/forks/SchedulerFeatureFlags.www.js | 1 + 10 files changed, 162 insertions(+), 10 deletions(-) diff --git a/.eslintrc.js b/.eslintrc.js index de78bd074902c..87e243938d0b3 100644 --- a/.eslintrc.js +++ b/.eslintrc.js @@ -140,6 +140,8 @@ module.exports = { ], globals: { + SharedArrayBuffer: true, + spyOnDev: true, spyOnDevAndProd: true, spyOnProd: true, diff --git a/packages/scheduler/npm/umd/scheduler.development.js b/packages/scheduler/npm/umd/scheduler.development.js index c3e461e8720dd..41a2412a35de2 100644 --- a/packages/scheduler/npm/umd/scheduler.development.js +++ b/packages/scheduler/npm/umd/scheduler.development.js @@ -144,5 +144,9 @@ return global.React.__SECRET_INTERNALS_DO_NOT_USE_OR_YOU_WILL_BE_FIRED .Scheduler.unstable_UserBlockingPriority; }, + get unstable_sharedProfilingBuffer() { + return global.React.__SECRET_INTERNALS_DO_NOT_USE_OR_YOU_WILL_BE_FIRED + .Scheduler.unstable_getFirstCallbackNode; + }, }); }); diff --git a/packages/scheduler/npm/umd/scheduler.production.min.js b/packages/scheduler/npm/umd/scheduler.production.min.js index 8daa4d998171e..6ea8e9df7fe43 100644 --- a/packages/scheduler/npm/umd/scheduler.production.min.js +++ b/packages/scheduler/npm/umd/scheduler.production.min.js @@ -138,5 +138,9 @@ return global.React.__SECRET_INTERNALS_DO_NOT_USE_OR_YOU_WILL_BE_FIRED .Scheduler.unstable_UserBlockingPriority; }, + get unstable_sharedProfilingBuffer() { + return global.React.__SECRET_INTERNALS_DO_NOT_USE_OR_YOU_WILL_BE_FIRED + .Scheduler.unstable_getFirstCallbackNode; + }, }); }); diff --git a/packages/scheduler/npm/umd/scheduler.profiling.min.js b/packages/scheduler/npm/umd/scheduler.profiling.min.js index 8daa4d998171e..6ea8e9df7fe43 100644 --- a/packages/scheduler/npm/umd/scheduler.profiling.min.js +++ b/packages/scheduler/npm/umd/scheduler.profiling.min.js @@ -138,5 +138,9 @@ return global.React.__SECRET_INTERNALS_DO_NOT_USE_OR_YOU_WILL_BE_FIRED .Scheduler.unstable_UserBlockingPriority; }, + get unstable_sharedProfilingBuffer() { + return global.React.__SECRET_INTERNALS_DO_NOT_USE_OR_YOU_WILL_BE_FIRED + .Scheduler.unstable_getFirstCallbackNode; + }, }); }); diff --git a/packages/scheduler/src/Scheduler.js b/packages/scheduler/src/Scheduler.js index 1450ba372b477..ea4efdf6d0aa1 100644 --- a/packages/scheduler/src/Scheduler.js +++ b/packages/scheduler/src/Scheduler.js @@ -10,6 +10,7 @@ import { enableSchedulerDebugging, + enableSharedProfilingBuffer, enableProfiling, } from './SchedulerFeatureFlags'; import { @@ -33,6 +34,7 @@ import { IdlePriority, } from './SchedulerPriorities'; import { + sharedProfilingBuffer, markTaskRun, markTaskYield, markTaskCompleted, @@ -83,6 +85,10 @@ function requestHostCallbackWithProfiling(cb) { } } +// Expose a shared array buffer that contains profiling information. +export const unstable_sharedProfilingBuffer = + enableProfiling && enableSharedProfilingBuffer ? sharedProfilingBuffer : null; + const requestHostCallback = enableProfiling ? requestHostCallbackWithProfiling : requestHostCallbackWithoutProfiling; @@ -96,7 +102,10 @@ function flushTask(task, callback, currentTime) { markTaskYield(task); return continuationCallback; } else { - markTaskCompleted(task); + if (enableProfiling) { + markTaskCompleted(task); + task.isQueued = false; + } return null; } } @@ -113,7 +122,10 @@ function advanceTimers(currentTime) { pop(timerQueue); timer.sortIndex = timer.expirationTime; push(taskQueue, timer); - markTaskStart(timer); + if (enableProfiling) { + markTaskStart(timer); + timer.isQueued = true; + } } else { // Remaining timers are pending. return; @@ -201,7 +213,10 @@ function flushWork(hasTimeRemaining, initialTime) { } } catch (error) { if (currentTask !== null) { - markTaskErrored(currentTask); + if (enableProfiling) { + markTaskErrored(currentTask); + currentTask.isQueued = false; + } if (currentTask === peek(taskQueue)) { pop(taskQueue); } @@ -332,6 +347,7 @@ function unstable_scheduleCallback(priorityLevel, callback, options) { }; if (enableProfiling) { + newTask.isQueued = false; if (typeof options === 'object' && options !== null) { newTask.label = label; } @@ -355,7 +371,10 @@ function unstable_scheduleCallback(priorityLevel, callback, options) { } else { newTask.sortIndex = expirationTime; push(taskQueue, newTask); - markTaskStart(newTask); + if (enableProfiling) { + markTaskStart(newTask); + newTask.isQueued = true; + } // Schedule a host callback, if needed. If we're already performing work, // wait until the next time we yield. if (!isHostCallbackScheduled && !isPerformingWork) { @@ -384,8 +403,9 @@ function unstable_getFirstCallbackNode() { } function unstable_cancelCallback(task) { - if (enableProfiling && task.callback !== null) { + if (enableProfiling && task.isQueued) { markTaskCanceled(task); + task.isQueued = false; } if (task !== null && task === peek(taskQueue)) { pop(taskQueue); diff --git a/packages/scheduler/src/SchedulerFeatureFlags.js b/packages/scheduler/src/SchedulerFeatureFlags.js index 9611038ce39bd..c93c6f012e921 100644 --- a/packages/scheduler/src/SchedulerFeatureFlags.js +++ b/packages/scheduler/src/SchedulerFeatureFlags.js @@ -13,3 +13,4 @@ export const requestTimerEventBeforeFirstFrame = false; export const enableMessageLoopImplementation = false; export const enableProfiling = __PROFILE__; export const enableUserTimingAPI = false; +export const enableSharedProfilingBuffer = false; diff --git a/packages/scheduler/src/SchedulerPriorities.js b/packages/scheduler/src/SchedulerPriorities.js index b7ce82a60cdd3..1d46ae0e48cd7 100644 --- a/packages/scheduler/src/SchedulerPriorities.js +++ b/packages/scheduler/src/SchedulerPriorities.js @@ -7,9 +7,10 @@ * @flow */ -export type PriorityLevel = 1 | 2 | 3 | 4 | 5; +export type PriorityLevel = 0 | 1 | 2 | 3 | 4 | 5; // TODO: Use symbols? +export const NoPriority = 0; export const ImmediatePriority = 1; export const UserBlockingPriority = 2; export const NormalPriority = 3; diff --git a/packages/scheduler/src/SchedulerProfiling.js b/packages/scheduler/src/SchedulerProfiling.js index 168a989720ada..909e4db0b23f0 100644 --- a/packages/scheduler/src/SchedulerProfiling.js +++ b/packages/scheduler/src/SchedulerProfiling.js @@ -11,8 +11,11 @@ import type {PriorityLevel} from './SchedulerPriorities'; import { enableProfiling, enableUserTimingAPI as enableUserTimingAPIFeatureFlag, + enableSharedProfilingBuffer, } from './SchedulerFeatureFlags'; +import {NoPriority} from './SchedulerPriorities'; + const enableUserTimingAPI = enableUserTimingAPIFeatureFlag && typeof performance !== 'undefined' && @@ -22,8 +25,35 @@ const enableUserTimingAPI = let runIdCounter: number = 0; let mainThreadIdCounter: number = 0; +const length = 3; +const size = Int32Array.BYTES_PER_ELEMENT * length; +export const sharedProfilingBuffer = + // $FlowFixMe Flow doesn't know about SharedArrayBuffer + typeof SharedArrayBuffer === 'function' + ? new SharedArrayBuffer(size) + : // $FlowFixMe Flow doesn't know about ArrayBuffer + new ArrayBuffer(size); +const profilingInfo = enableSharedProfilingBuffer + ? new Int32Array(sharedProfilingBuffer) + : null; + +const PRIORITY = 0; +const CURRENT_TASK_ID = 1; +const QUEUE_SIZE = 2; + +if (enableSharedProfilingBuffer && profilingInfo !== null) { + profilingInfo[PRIORITY] = NoPriority; + // This is maintained with a counter, because the size of the priority queue + // array might include canceled tasks. + profilingInfo[QUEUE_SIZE] = 0; + profilingInfo[CURRENT_TASK_ID] = 0; +} + export function markTaskStart(task: {id: number}) { if (enableProfiling) { + if (enableSharedProfilingBuffer && profilingInfo !== null) { + profilingInfo[QUEUE_SIZE]++; + } if (enableUserTimingAPI) { // Use extra field to track if delayed task starts. const taskStartMark = `SchedulerTask-${task.id}-Start`; @@ -39,6 +69,11 @@ export function markTaskCompleted(task: { label?: string, }) { if (enableProfiling) { + if (enableSharedProfilingBuffer && profilingInfo !== null) { + profilingInfo[PRIORITY] = NoPriority; + profilingInfo[CURRENT_TASK_ID] = 0; + profilingInfo[QUEUE_SIZE]--; + } if (enableUserTimingAPI) { const info = JSON.stringify({ priorityLevel: task.priorityLevel, @@ -58,6 +93,9 @@ export function markTaskCanceled(task: { label?: string, }) { if (enableProfiling) { + if (enableSharedProfilingBuffer && profilingInfo !== null) { + profilingInfo[QUEUE_SIZE]--; + } if (enableUserTimingAPI) { const info = JSON.stringify({ priorityLevel: task.priorityLevel, @@ -77,6 +115,11 @@ export function markTaskErrored(task: { label?: string, }) { if (enableProfiling) { + if (enableSharedProfilingBuffer && profilingInfo !== null) { + profilingInfo[PRIORITY] = NoPriority; + profilingInfo[CURRENT_TASK_ID] = 0; + profilingInfo[QUEUE_SIZE]--; + } if (enableUserTimingAPI) { const info = JSON.stringify({ priorityLevel: task.priorityLevel, @@ -90,8 +133,12 @@ export function markTaskErrored(task: { } } -export function markTaskRun(task: {id: number}) { +export function markTaskRun(task: {id: number, priorityLevel: PriorityLevel}) { if (enableProfiling) { + if (enableSharedProfilingBuffer && profilingInfo !== null) { + profilingInfo[PRIORITY] = task.priorityLevel; + profilingInfo[CURRENT_TASK_ID] = task.id; + } if (enableUserTimingAPI) { runIdCounter++; const runMark = `SchedulerTask-${task.id}-Run-${runIdCounter}`; @@ -103,6 +150,10 @@ export function markTaskRun(task: {id: number}) { export function markTaskYield(task: {id: number}) { if (enableProfiling) { + if (enableSharedProfilingBuffer && profilingInfo !== null) { + profilingInfo[PRIORITY] = NoPriority; + profilingInfo[CURRENT_TASK_ID] = 0; + } if (enableUserTimingAPI) { const yieldMark = `SchedulerTask-${task.id}-Yield-${runIdCounter}`; performance.mark(yieldMark); diff --git a/packages/scheduler/src/__tests__/SchedulerProfiling-test.internal.js b/packages/scheduler/src/__tests__/SchedulerProfiling-test.internal.js index 102f4333b9914..4e11fb873e6a6 100644 --- a/packages/scheduler/src/__tests__/SchedulerProfiling-test.internal.js +++ b/packages/scheduler/src/__tests__/SchedulerProfiling-test.internal.js @@ -13,6 +13,7 @@ 'use strict'; let Scheduler; +let sharedProfilingArray; // let runWithPriority; let ImmediatePriority; let UserBlockingPriority; @@ -51,8 +52,13 @@ describe('Scheduler', () => { performance = global.performance = createUserTimingPolyfill(); require('scheduler/src/SchedulerFeatureFlags').enableUserTimingAPI = true; + require('scheduler/src/SchedulerFeatureFlags').enableSharedProfilingBuffer = true; Scheduler = require('scheduler'); + sharedProfilingArray = new Int32Array( + Scheduler.unstable_sharedProfilingBuffer, + ); + // runWithPriority = Scheduler.unstable_runWithPriority; ImmediatePriority = Scheduler.unstable_ImmediatePriority; UserBlockingPriority = Scheduler.unstable_UserBlockingPriority; @@ -68,6 +74,12 @@ describe('Scheduler', () => { afterEach(() => { performance.assertAllUserTimingsAreCleared(); + if (sharedProfilingArray[2] !== 0) { + throw Error( + 'Test exited, but the shared profiling buffer indicates that a task ' + + 'is still running', + ); + } }); function createUserTimingPolyfill() { @@ -248,6 +260,42 @@ describe('Scheduler', () => { }; } + const PRIORITY = 0; + const CURRENT_TASK_ID = 1; + const QUEUE_SIZE = 2; + function getProfilingInfo() { + const queueSize = sharedProfilingArray[QUEUE_SIZE]; + + let priorityStr; + switch (sharedProfilingArray[PRIORITY]) { + case 0: { + return queueSize !== 0 + ? 'Suspended, Queue Size: ' + queueSize + : 'Empty Queue'; + } + case 1: + priorityStr = 'Immediate'; + break; + case 2: + priorityStr = 'User-Blocking'; + break; + case 3: + priorityStr = 'Normal'; + break; + case 4: + priorityStr = 'Low'; + break; + case 5: + priorityStr = 'Idle'; + break; + } + return `Current Task: ${ + sharedProfilingArray[QUEUE_SIZE] + }, Priority: ${priorityStr}, Queue Size: ${ + sharedProfilingArray[CURRENT_TASK_ID] + }`; + } + if (!__DEV__) { // The tests in this suite are dev only it("empty test so Jest doesn't complain that there are no tests in this file", () => {}); @@ -260,9 +308,11 @@ describe('Scheduler', () => { NormalPriority, () => { Scheduler.unstable_advanceTime(300); + Scheduler.unstable_yieldValue(getProfilingInfo()); scheduleCallback( UserBlockingPriority, () => { + Scheduler.unstable_yieldValue(getProfilingInfo()); Scheduler.unstable_advanceTime(300); }, {label: 'Bar'}, @@ -270,14 +320,24 @@ describe('Scheduler', () => { Scheduler.unstable_advanceTime(100); Scheduler.unstable_yieldValue('Yield'); return () => { + Scheduler.unstable_yieldValue(getProfilingInfo()); Scheduler.unstable_advanceTime(300); }; }, {label: 'Foo'}, ); - expect(Scheduler).toFlushAndYieldThrough(['Yield']); + expect(Scheduler).toFlushAndYieldThrough([ + 'Current Task: 1, Priority: Normal, Queue Size: 1', + 'Yield', + ]); Scheduler.unstable_advanceTime(100); - expect(Scheduler).toFlushWithoutYielding(); + expect(Scheduler).toFlushAndYield([ + 'Current Task: 2, Priority: User-Blocking, Queue Size: 2', + 'Current Task: 1, Priority: Normal, Queue Size: 1', + ]); + + expect(getProfilingInfo()).toEqual('Empty Queue'); + expect(performance.printUserTimings()).toEqual( ` !!! Main thread │ ██ @@ -289,6 +349,7 @@ describe('Scheduler', () => { it('marks when a task is canceled', () => { const task = scheduleCallback(NormalPriority, () => { + Scheduler.unstable_yieldValue(getProfilingInfo()); Scheduler.unstable_advanceTime(300); Scheduler.unstable_yieldValue('Yield'); return () => { @@ -297,7 +358,10 @@ describe('Scheduler', () => { }; }); - expect(Scheduler).toFlushAndYieldThrough(['Yield']); + expect(Scheduler).toFlushAndYieldThrough([ + 'Current Task: 1, Priority: Normal, Queue Size: 1', + 'Yield', + ]); Scheduler.unstable_advanceTime(100); cancelCallback(task); diff --git a/packages/scheduler/src/forks/SchedulerFeatureFlags.www.js b/packages/scheduler/src/forks/SchedulerFeatureFlags.www.js index 9ecd6dd749606..2460a7213681d 100644 --- a/packages/scheduler/src/forks/SchedulerFeatureFlags.www.js +++ b/packages/scheduler/src/forks/SchedulerFeatureFlags.www.js @@ -13,6 +13,7 @@ export const { requestTimerEventBeforeFirstFrame, enableMessageLoopImplementation, enableUserTimingAPI, + enableSharedProfilingBuffer, } = require('SchedulerFeatureFlags'); export const enableProfiling = __PROFILE__;