-
Notifications
You must be signed in to change notification settings - Fork 847
fix(performanceTimer): work in frames #4834
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from all commits
2cd4e5e
0201edf
1cdff25
206928d
83c7d5e
cf29413
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -17,8 +17,8 @@ const performanceTimer = (() => { | |
| return window.performance.now(); | ||
| } | ||
| } | ||
| let originalTime = null; | ||
| let lastRecordedTime = now(); | ||
| let axeStartTime = now(); | ||
| let axeStarted = false; | ||
|
|
||
| /** | ||
| * @typedef {utils.performanceTimer} Public API Methods | ||
|
|
@@ -28,98 +28,154 @@ const performanceTimer = (() => { | |
| * @member {Function} start Kicks off performance timing for overall axe audit | ||
| */ | ||
| start() { | ||
| this.reset(); | ||
| axeStarted = true; | ||
| this.mark('mark_axe_start'); | ||
| }, | ||
|
|
||
| /** | ||
| * @member {Function} end Concludes performance timing, compares start/end marks | ||
| */ | ||
| end() { | ||
| this.mark('mark_axe_end'); | ||
| this.measure('axe', 'mark_axe_start', 'mark_axe_end'); | ||
|
|
||
| this.measure('axe', 'mark_axe_start', 'mark_axe_end', true); | ||
| this.logMeasures('axe'); | ||
| this.clearMark('mark_axe_start', 'mark_axe_end'); | ||
| axeStarted = false; | ||
| }, | ||
|
|
||
| /** | ||
| * @member {Function} auditStart Starts an audit for a page or frame | ||
| */ | ||
| auditStart() { | ||
| if (!axeStarted) { | ||
| this.reset(); // We're in a frame | ||
| } | ||
| this.mark('mark_audit_start'); | ||
| }, | ||
|
|
||
| /** | ||
| * @member {Function} auditEnd Ends an audit for a page or frame, logs measurement of start/end marks | ||
| */ | ||
| auditEnd() { | ||
| this.mark('mark_audit_end'); | ||
| this.measure('audit_start_to_end', 'mark_audit_start', 'mark_audit_end'); | ||
| this.measure( | ||
| 'audit_start_to_end', | ||
| 'mark_audit_start', | ||
| 'mark_audit_end', | ||
| true | ||
| ); | ||
| // log audit/rule measures | ||
| this.logMeasures(); | ||
| this.clearMark('mark_audit_start', 'mark_audit_end'); | ||
| }, | ||
|
|
||
| /** | ||
| * @member {Function} mark Shims creating a new named time stamp, called a mark | ||
| * @param {String} markName String name to record how long it took to get there. | ||
| * A mark that already exists will be overwritten. | ||
| * | ||
| */ | ||
| mark(markName) { | ||
| if (window.performance && window.performance.mark !== undefined) { | ||
| if (window.performance?.mark) { | ||
| window.performance.mark(markName); | ||
| } | ||
| }, | ||
|
|
||
| /** | ||
| * @member {Function} measure Shims creating a measure to compare two marks, which can be logged | ||
| * @param {String} measureName String name to log what is being compared. | ||
| * Measures that already exist will be overwritten with a new time stamp. | ||
| * @param {String} startMark String name of mark to start measuring | ||
| * @param {String} endMark String name of mark to end measuring | ||
| */ | ||
| measure(measureName, startMark, endMark) { | ||
| if (window.performance && window.performance.measure !== undefined) { | ||
| window.performance.measure(measureName, startMark, endMark); | ||
| measure(measureName, startMark, endMark, keepMarks = false) { | ||
| if (!window.performance?.measure) { | ||
| return; | ||
| } | ||
| window.performance.measure(measureName, startMark, endMark); | ||
| if (!keepMarks) { | ||
| this.clearMark(startMark, endMark); | ||
| } | ||
| }, | ||
|
|
||
| /** | ||
| * @member {Function} logMeasures Iterates through measures and logs any that exist | ||
| * @member {Function} logMeasures Logs previously captured measures in chronological order. | ||
| * Starts from the most recent start()/auditStart() call. | ||
| * @param {String|undefined} measureName If provided, will only log up to the first matching measure. | ||
| */ | ||
| logMeasures(measureName) { | ||
| function logMeasure(req) { | ||
| log('Measure ' + req.name + ' took ' + req.duration + 'ms'); | ||
| } | ||
| const last = arr => (Array.isArray(arr) ? arr[arr.length - 1] : arr); | ||
| const logMeasure = req => { | ||
| this._log('Measure ' + req.name + ' took ' + req.duration + 'ms'); | ||
| }; | ||
| if ( | ||
| window.performance && | ||
| window.performance.getEntriesByType !== undefined | ||
| !window.performance?.getEntriesByType || | ||
| !window.performance?.getEntriesByName | ||
| ) { | ||
| // only output measures that were started after axe started, otherwise | ||
| // we get measures made by the page before axe ran (which is confusing) | ||
| const axeStart = | ||
| window.performance.getEntriesByName('mark_axe_start')[0]; | ||
| const measures = window.performance | ||
| .getEntriesByType('measure') | ||
| .filter(measure => measure.startTime >= axeStart.startTime); | ||
| for (let i = 0; i < measures.length; ++i) { | ||
| const req = measures[i]; | ||
| if (req.name === measureName) { | ||
| logMeasure(req); | ||
| return; | ||
| } | ||
| return; | ||
| } | ||
| // only output measures that were started after axe started, otherwise | ||
| // we get measures made by the page before axe ran (which is confusing) | ||
| const axeStart = | ||
| last(window.performance.getEntriesByName('mark_axe_start')) || | ||
| last(window.performance.getEntriesByName('mark_audit_start')); | ||
| if (!axeStart) { | ||
| this._log('Axe must be started before using performanceTimer'); | ||
| return; | ||
| } | ||
|
|
||
| const measures = window.performance | ||
| .getEntriesByType('measure') | ||
| .filter(measure => measure.startTime >= axeStart.startTime); | ||
| for (let i = 0; i < measures.length; ++i) { | ||
| const req = measures[i]; | ||
| if (req.name === measureName) { | ||
| logMeasure(req); | ||
| return; | ||
| } else if (!measureName) { | ||
| logMeasure(req); | ||
| } | ||
| } | ||
| }, | ||
|
|
||
| /** | ||
| * @member {Function} timeElapsed Records time since last audit | ||
| * @return {DOMTimeStamp} | ||
| */ | ||
| timeElapsed() { | ||
| return now() - lastRecordedTime; | ||
| const currentTime = now(); | ||
| return currentTime - axeStartTime; | ||
| }, | ||
|
|
||
| /** | ||
| * @member {Function} clearMark Clears a mark | ||
| * @param {String} markName String name of mark to clear | ||
| */ | ||
| clearMark(...markNames) { | ||
| if (!window.performance?.clearMarks) { | ||
| return; | ||
| } | ||
|
|
||
| for (const markName of markNames) { | ||
| window.performance.clearMarks(markName); | ||
|
Contributor
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. This should probably be equally as defensive about |
||
| } | ||
| }, | ||
|
|
||
| /** | ||
| * @member {Function} reset Resets the time for a new audit | ||
| */ | ||
| reset() { | ||
| if (!originalTime) { | ||
| originalTime = now(); | ||
| } | ||
| lastRecordedTime = now(); | ||
| axeStartTime = now(); | ||
| }, | ||
|
|
||
| /** | ||
| * Logs the message, available to override in testing | ||
| * @private | ||
| * @param {String} message | ||
| */ | ||
| _log(message) { | ||
| log(message); | ||
| } | ||
| }; | ||
| })(); | ||
|
|
||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,148 @@ | ||
| describe('performance timer', () => { | ||
| const { performanceTimer } = axe.utils; | ||
| const originalLog = performanceTimer._log; | ||
| let messages = []; | ||
| const sleep = ms => new Promise(resolve => setTimeout(resolve, ms)); | ||
|
|
||
| const getNumber = msg => { | ||
| const match = msg.match(/([0-9.]+)ms/); | ||
| return match ? parseFloat(match[1]) : 0; | ||
| }; | ||
|
|
||
| beforeEach(() => { | ||
| performanceTimer._log = msg => { | ||
| messages.push(msg); | ||
| }; | ||
| }); | ||
|
|
||
| afterEach(async () => { | ||
| try { | ||
| performanceTimer.end(); | ||
| } catch { | ||
| // ignore - performanceTimer.end() was called in a test instead | ||
| } | ||
| try { | ||
| performanceTimer.auditEnd(); | ||
| } catch { | ||
| // ignore - performanceTimer.auditEnd() was called in a test instead | ||
| } | ||
| performanceTimer._log = originalLog; | ||
| messages = []; | ||
| // Force a next tick in the test to avoid flakiness | ||
| await sleep(0); | ||
| }); | ||
|
|
||
| it('warns using performanceTimer before axe started', () => { | ||
| performanceTimer.mark('foo_start'); | ||
| performanceTimer.mark('foo_end'); | ||
| performanceTimer.measure('foo', 'foo_start', 'foo_end'); | ||
| performanceTimer.logMeasures('foo'); | ||
| assert.equal(messages.length, 1); | ||
| assert.match( | ||
| messages[0], | ||
| /Axe must be started before using performanceTimer/ | ||
| ); | ||
| }); | ||
|
|
||
| it('measures time elapsed between marks', async () => { | ||
| performanceTimer.start(); | ||
| performanceTimer.mark('foo_start'); | ||
| await sleep(100); | ||
| performanceTimer.mark('foo_end'); | ||
| performanceTimer.measure('foo', 'foo_start', 'foo_end'); | ||
| performanceTimer.logMeasures('foo'); | ||
|
|
||
| assert.equal(messages.length, 1); | ||
| assert.closeTo(getNumber(messages[0]), 100, 10); | ||
| }); | ||
|
|
||
| it('measures time elapsed if auditStart() was called', () => { | ||
| performanceTimer.auditStart(); | ||
| performanceTimer.mark('foo_start'); | ||
| performanceTimer.mark('foo_end'); | ||
| performanceTimer.measure('foo', 'foo_start', 'foo_end'); | ||
| performanceTimer.logMeasures('foo'); | ||
|
|
||
| assert.equal(messages.length, 1); | ||
| assert.match(messages[0], /Measure foo took [0-9.]+ms/); | ||
| }); | ||
|
|
||
| it('measures the time axe takes from .start() to .end()', () => { | ||
| performanceTimer.start(); | ||
| performanceTimer.end(); | ||
| assert.equal(messages.length, 1); | ||
| assert.match(messages[0], /Measure axe took [0-9.]+ms/); | ||
| }); | ||
|
|
||
| it('measures the time axe takes from .auditStart() to .auditEnd()', () => { | ||
| performanceTimer.auditStart(); | ||
| performanceTimer.auditEnd(); | ||
| assert.equal(messages.length, 1); | ||
| assert.match(messages[0], /Measure audit_start_to_end took [0-9.]+ms/); | ||
| }); | ||
|
|
||
| describe('logMeasures', () => { | ||
| beforeEach(() => { | ||
| performanceTimer.start(); | ||
| }); | ||
|
|
||
| it('logs a specific measure when provided', () => { | ||
| performanceTimer.mark('foo_start'); | ||
| performanceTimer.mark('foo_end'); | ||
| performanceTimer.measure('foo', 'foo_start', 'foo_end'); | ||
|
|
||
| performanceTimer.mark('bar_start'); | ||
| performanceTimer.mark('bar_end'); | ||
| performanceTimer.measure('bar', 'bar_start', 'bar_end'); | ||
|
|
||
| // Mot measured, should not show up | ||
| performanceTimer.mark('baz_start'); | ||
| performanceTimer.mark('baz_end'); | ||
|
|
||
| performanceTimer.logMeasures('foo'); | ||
| assert.equal(messages.length, 1); | ||
| assert.match(messages[0], /Measure foo took [0-9.]+ms/); | ||
| }); | ||
|
|
||
| it('logs all measures if no measure name is provided', () => { | ||
| performanceTimer.mark('foo_start'); | ||
| performanceTimer.mark('foo_end'); | ||
| performanceTimer.measure('foo', 'foo_start', 'foo_end'); | ||
|
|
||
| performanceTimer.mark('bar_start'); | ||
| performanceTimer.mark('bar_end'); | ||
| performanceTimer.measure('bar', 'bar_start', 'bar_end'); | ||
|
|
||
| // Mot measured, should not show up | ||
| performanceTimer.mark('baz_start'); | ||
| performanceTimer.mark('baz_end'); | ||
|
|
||
| performanceTimer.logMeasures(); | ||
| assert.equal(messages.length, 2); | ||
| assert.match(messages[0], /Measure foo took [0-9.]+ms/); | ||
| assert.match(messages[1], /Measure bar took [0-9.]+ms/); | ||
| }); | ||
| }); | ||
|
|
||
| describe('timeElapsed', () => { | ||
| it('returns the time elapsed since axe started', async () => { | ||
| performanceTimer.start(); | ||
| await sleep(100); | ||
| assert.closeTo(performanceTimer.timeElapsed(), 100, 10); | ||
|
Contributor
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. I was a bit worried when first reading this about how reliable a 10ms epsilon would be, but I verified in a few circleci runs + 20 trials locally that it does seem like it passes reliably. 👍 |
||
| }); | ||
|
|
||
| it('returns the time elapsed since auditStart() was called', async () => { | ||
| performanceTimer.auditStart(); | ||
| await sleep(100); | ||
| assert.closeTo(performanceTimer.timeElapsed(), 100, 10); | ||
| }); | ||
|
|
||
| it('does not use auditStart if axe started', async () => { | ||
| performanceTimer.start(); | ||
| await sleep(100); | ||
|
|
||
| performanceTimer.auditStart(); | ||
| assert.closeTo(performanceTimer.timeElapsed(), 100, 10); | ||
| }); | ||
| }); | ||
| }); | ||
Uh oh!
There was an error while loading. Please reload this page.