Bug 1154239 - Rework PerformanceStats.jsm for modularity and asynchronicity. r=Mossop

This commit is contained in:
David Rajchenbach-Teller
2015-04-14 16:29:40 +02:00
parent 4dea6f1118
commit 6840ca9ae7
6 changed files with 640 additions and 270 deletions

View File

@@ -12,18 +12,26 @@ const { AddonManager } = Cu.import("resource://gre/modules/AddonManager.jsm", {}
const { AddonWatcher } = Cu.import("resource://gre/modules/AddonWatcher.jsm", {});
const { PerformanceStats } = Cu.import("resource://gre/modules/PerformanceStats.jsm", {});
const { Services } = Cu.import("resource://gre/modules/Services.jsm", {});
const { Task } = Cu.import("resource://gre/modules/Task.jsm", {});
const UPDATE_TOPIC = "about:performance-update-immediately";
// about:performance observes notifications on this topic.
// if a notification is sent, this causes the page to be updated immediately,
// regardless of whether the page is on pause.
const UPDATE_IMMEDIATELY_TOPIC = "about:performance-update-immediately";
// about:performance posts notifications on this topic whenever the page
// is updated.
const UPDATE_COMPLETE_TOPIC = "about:performance-update-complete";
/**
* The various measures we display.
*/
const MEASURES = [
{key: "longestDuration", percentOfDeltaT: false, label: "Jank level"},
{key: "totalUserTime", percentOfDeltaT: true, label: "User (%)"},
{key: "totalSystemTime", percentOfDeltaT: true, label: "System (%)"},
{key: "totalCPOWTime", percentOfDeltaT: true, label: "Cross-Process (%)"},
{key: "ticks", percentOfDeltaT: false, label: "Activations"},
{probe: "jank", key: "longestDuration", percentOfDeltaT: false, label: "Jank level"},
{probe: "jank", key: "totalUserTime", percentOfDeltaT: true, label: "User (%)"},
{probe: "jank", key: "totalSystemTime", percentOfDeltaT: true, label: "System (%)"},
{probe: "cpow", key: "totalCPOWTime", percentOfDeltaT: true, label: "Cross-Process (%)"},
{probe: "ticks",key: "ticks", percentOfDeltaT: false, label: "Activations"},
];
/**
@@ -78,6 +86,8 @@ let AutoUpdate = {
};
let State = {
_monitor: PerformanceStats.getMonitor(["jank", "cpow", "ticks"]),
/**
* @type{PerformanceData}
*/
@@ -97,29 +107,31 @@ let State = {
/**
* Fetch the latest information, compute diffs.
*
* @return {object} An object with the following fields:
* @return {Promise}
* @resolve An object with the following fields:
* - `components`: an array of `PerformanceDiff` representing
* the components, sorted by `longestDuration`, then by `totalUserTime`
* - `process`: a `PerformanceDiff` representing the entire process;
* - `deltaT`: the number of milliseconds elapsed since the data
* was last displayed.
*/
update: function () {
let snapshot = PerformanceStats.getSnapshot();
update: Task.async(function*() {
let snapshot = yield this._monitor.promiseSnapshot();
let newData = new Map();
let deltas = [];
for (let componentNew of snapshot.componentsData) {
let {name, addonId, isSystem} = componentNew;
let key = JSON.stringify({name, addonId, isSystem});
let componentOld = State._componentsData.get(key);
deltas.push(componentNew.substract(componentOld));
deltas.push(componentNew.subtract(componentOld));
newData.set(key, componentNew);
}
State._componentsData = newData;
let now = window.performance.now();
let process = snapshot.processData.subtract(State._processData);
let result = {
components: deltas.filter(x => x.ticks > 0),
process: snapshot.processData.substract(State._processData),
components: deltas.filter(x => x.ticks.ticks > 0),
process: snapshot.processData.subtract(State._processData),
deltaT: now - State._date
};
result.components.sort((a, b) => {
@@ -134,19 +146,20 @@ let State = {
State._processData = snapshot.processData;
State._date = now;
return result;
}
})
};
function update() {
updateLiveData();
updateSlowAddons();
}
let update = Task.async(function*() {
yield updateLiveData();
yield updateSlowAddons();
Services.obs.notifyObservers(null, UPDATE_COMPLETE_TOPIC, "");
});
/**
* Update the list of slow addons
*/
function updateSlowAddons() {
let updateSlowAddons = Task.async(function*() {
try {
let data = AddonWatcher.alerts;
if (data.size == 0) {
@@ -218,12 +231,12 @@ function updateSlowAddons() {
} catch (ex) {
console.error(ex);
}
}
});
/**
* Update the table of live data.
*/
function updateLiveData() {
let updateLiveData = Task.async(function*() {
try {
let dataElt = document.getElementById("liveData");
dataElt.innerHTML = "";
@@ -239,9 +252,9 @@ function updateLiveData() {
headerElt.appendChild(el);
}
let deltas = State.update();
let deltas = yield State.update();
for (let item of deltas.components) {
for (let item of [deltas.process, ...deltas.components]) {
let row = document.createElement("tr");
if (item.addonId) {
row.classList.add("addon");
@@ -253,13 +266,14 @@ function updateLiveData() {
dataElt.appendChild(row);
// Measures
for (let {key, percentOfDeltaT} of MEASURES) {
for (let {probe, key, percentOfDeltaT} of MEASURES) {
let el = document.createElement("td");
el.classList.add(key);
el.classList.add("contents");
row.appendChild(el);
let value = percentOfDeltaT ? Math.round(item[key] / deltas.deltaT) : item[key];
let rawValue = item[probe][key];
let value = percentOfDeltaT ? Math.round(rawValue / deltas.deltaT) : rawValue;
if (key == "longestDuration") {
value += 1;
el.classList.add("jank" + value);
@@ -286,19 +300,21 @@ function updateLiveData() {
} catch (ex) {
console.error(ex);
}
}
});
function go() {
// Compute initial state immediately, then wait a little
// before we start computing diffs and refreshing.
document.getElementById("playButton").addEventListener("click", () => AutoUpdate.start());
document.getElementById("pauseButton").addEventListener("click", () => AutoUpdate.stop());
document.getElementById("intervalDropdown").addEventListener("change", () => AutoUpdate.updateRefreshRate());
// Compute initial state immediately, then wait a little
// before we start computing diffs and refreshing.
State.update();
window.setTimeout(update, 500);
let observer = update;
Services.obs.addObserver(update, UPDATE_TOPIC, false);
window.addEventListener("unload", () => Services.obs.removeObserver(update, UPDATE_TOPIC));
Services.obs.addObserver(update, UPDATE_IMMEDIATELY_TOPIC, false);
window.addEventListener("unload", () => Services.obs.removeObserver(update, UPDATE_IMMEDIATELY_TOPIC));
}

View File

@@ -21,29 +21,33 @@ function frameScript() {
});
addMessageListener("aboutperformance-test:hasItems", ({data: title}) => {
Services.obs.notifyObservers(null, "about:performance-update-immediately", "");
let hasPlatform = false;
let hasTitle = false;
let observer = function() {
Services.obs.removeObserver(observer, "about:performance-update-complete");
let hasPlatform = false;
let hasTitle = false;
try {
let eltData = content.document.getElementById("liveData");
if (!eltData) {
return;
try {
let eltData = content.document.getElementById("liveData");
if (!eltData) {
return;
}
// Find if we have a row for "platform"
hasPlatform = eltData.querySelector("tr.platform") != null;
// Find if we have a row for our content page
let titles = [for (eltContent of eltData.querySelectorAll("td.contents.name")) eltContent.textContent];
hasTitle = titles.includes(title);
} catch (ex) {
Cu.reportError("Error in content: " + ex);
Cu.reportError(ex.stack);
} finally {
sendAsyncMessage("aboutperformance-test:hasItems", {hasPlatform, hasTitle});
}
// Find if we have a row for "platform"
hasPlatform = eltData.querySelector("tr.platform") != null;
// Find if we have a row for our content page
let titles = [for (eltContent of eltData.querySelectorAll("td.contents.name")) eltContent.textContent];
hasTitle = titles.includes(title);
} catch (ex) {
Cu.reportError("Error in content: " + ex);
Cu.reportError(ex.stack);
} finally {
sendAsyncMessage("aboutperformance-test:hasItems", {hasPlatform, hasTitle});
}
Services.obs.addObserver(observer, "about:performance-update-complete", false);
Services.obs.notifyObservers(null, "about:performance-update-immediately", "");
});
}

View File

@@ -13,6 +13,8 @@ Cu.import("resource://gre/modules/XPCOMUtils.jsm");
XPCOMUtils.defineLazyModuleGetter(this, "Preferences",
"resource://gre/modules/Preferences.jsm");
XPCOMUtils.defineLazyModuleGetter(this, "Task",
"resource://gre/modules/Task.jsm");
XPCOMUtils.defineLazyModuleGetter(this, "console",
"resource://gre/modules/devtools/Console.jsm");
XPCOMUtils.defineLazyModuleGetter(this, "PerformanceStats",
@@ -23,7 +25,10 @@ XPCOMUtils.defineLazyServiceGetter(this, "Telemetry",
XPCOMUtils.defineLazyModuleGetter(this, "Services",
"resource://gre/modules/Services.jsm");
const FILTERS = ["longestDuration", "totalCPOWTime"];
const FILTERS = [
{probe: "jank", field: "longestDuration"},
{probe: "cpow", field: "totalCPOWTime"},
];
let AddonWatcher = {
_previousPerformanceIndicators: {},
@@ -35,6 +40,12 @@ let AddonWatcher = {
_stats: new Map(),
_timer: Cc["@mozilla.org/timer;1"].createInstance(Ci.nsITimer),
_callback: null,
/**
* A performance monitor used to pull data from SpiderMonkey.
*
* @type {PerformanceStats Monitor}
*/
_monitor: null,
/**
* The interval at which we poll the available performance information
* to find out about possibly slow add-ons, in milliseconds.
@@ -103,8 +114,13 @@ let AddonWatcher = {
}
if (isPaused) {
this._timer.cancel();
if (this._monitor) {
// We don't need the probes anymore, release them.
this._monitor.dispose();
}
this._monitor = null;
} else {
PerformanceStats.init();
this._monitor = PerformanceStats.getMonitor([for (filter of FILTERS) filter.probe]);
this._timer.initWithCallback(this._checkAddons.bind(this), this._interval, Ci.nsITimer.TYPE_REPEATING_SLACK);
}
this._isPaused = isPaused;
@@ -123,94 +139,96 @@ let AddonWatcher = {
* slice.
*/
_checkAddons: function() {
try {
let snapshot = PerformanceStats.getSnapshot();
return Task.spawn(function*() {
try {
let snapshot = yield this._monitor.promiseSnapshot();
let limits = {
// By default, warn if we have a total time of 1s of CPOW per 15 seconds
totalCPOWTime: Math.round(Preferences.get("browser.addon-watch.limits.totalCPOWTime", 1000000) * this._interval / 15000),
// By default, warn if we have skipped 4 consecutive frames
// at least once during the latest slice.
longestDuration: Math.round(Math.log2(Preferences.get("browser.addon-watch.limits.longestDuration", 128))),
};
let limits = {
// By default, warn if we have a total time of 1s of CPOW per 15 seconds
totalCPOWTime: Math.round(Preferences.get("browser.addon-watch.limits.totalCPOWTime", 1000000) * this._interval / 15000),
// By default, warn if we have skipped 4 consecutive frames
// at least once during the latest slice.
longestDuration: Math.round(Math.log2(Preferences.get("browser.addon-watch.limits.longestDuration", 128))),
};
// By default, warn only after an add-on has been spotted misbehaving 3 times.
let tolerance = Preferences.get("browser.addon-watch.tolerance", 3);
// By default, warn only after an add-on has been spotted misbehaving 3 times.
let tolerance = Preferences.get("browser.addon-watch.tolerance", 3);
for (let item of snapshot.componentsData) {
let addonId = item.addonId;
if (!item.isSystem || !addonId) {
// We are only interested in add-ons.
continue;
}
if (this._ignoreList.has(addonId)) {
// This add-on has been explicitly put in the ignore list
// by the user. Don't waste time with it.
continue;
}
let previous = this._previousPerformanceIndicators[addonId];
this._previousPerformanceIndicators[addonId] = item;
for (let item of snapshot.componentsData) {
let addonId = item.addonId;
if (!item.isSystem || !addonId) {
// We are only interested in add-ons.
continue;
}
if (this._ignoreList.has(addonId)) {
// This add-on has been explicitly put in the ignore list
// by the user. Don't waste time with it.
continue;
}
let previous = this._previousPerformanceIndicators[addonId];
this._previousPerformanceIndicators[addonId] = item;
if (!previous) {
// This is the first time we see the addon, so we are probably
// executed right during/after startup. Performance is always
// weird during startup, with the JIT warming up, competition
// in disk access, etc. so we do not take this as a reason to
// display the slow addon warning.
continue;
}
// Report misbehaviors to Telemetry
let diff = item.substract(previous);
if (diff.longestDuration > 5) {
Telemetry.getKeyedHistogramById("MISBEHAVING_ADDONS_JANK_LEVEL").
add(addonId, diff.longestDuration);
}
if (diff.totalCPOWTime > 0) {
Telemetry.getKeyedHistogramById("MISBEHAVING_ADDONS_CPOW_TIME_MS").
add(addonId, diff.totalCPOWTime / 1000);
}
// Store misbehaviors for about:performance and other clients
let stats = this._stats.get(addonId);
if (!stats) {
stats = {
peaks: {},
alerts: {},
};
this._stats.set(addonId, stats);
}
// Report misbehaviors to the user.
for (let filter of FILTERS) {
let peak = stats.peaks[filter] || 0;
stats.peaks[filter] = Math.max(diff[filter], peak);
if (limits[filter] <= 0 || diff[filter] <= limits[filter]) {
if (!previous) {
// This is the first time we see the addon, so we are probably
// executed right during/after startup. Performance is always
// weird during startup, with the JIT warming up, competition
// in disk access, etc. so we do not take this as a reason to
// display the slow addon warning.
continue;
}
stats.alerts[filter] = (stats.alerts[filter] || 0) + 1;
// Report misbehaviors to Telemetry
if (stats.alerts[filter] % tolerance != 0) {
continue;
let diff = item.subtract(previous);
if ("jank" in diff && diff.jank.longestDuration > 5) {
Telemetry.getKeyedHistogramById("MISBEHAVING_ADDONS_JANK_LEVEL").
add(addonId, diff.jank.longestDuration);
}
if ("cpow" in diff && diff.cpow.totalCPOWTime > 0) {
Telemetry.getKeyedHistogramById("MISBEHAVING_ADDONS_CPOW_TIME_MS").
add(addonId, diff.cpow.totalCPOWTime / 1000);
}
try {
this._callback(addonId, filter);
} catch (ex) {
Cu.reportError("Error in AddonWatcher._checkAddons callback " + ex);
Cu.reportError(ex.stack);
// Store misbehaviors for about:performance and other clients
let stats = this._stats.get(addonId);
if (!stats) {
stats = {
peaks: {},
alerts: {},
};
this._stats.set(addonId, stats);
}
// Report misbehaviors to the user.
for (let {probe, field: filter} of FILTERS) {
let peak = stats.peaks[filter] || 0;
let value = diff[probe][filter];
stats.peaks[filter] = Math.max(value, peak);
if (limits[filter] <= 0 || value <= limits[filter]) {
continue;
}
stats.alerts[filter] = (stats.alerts[filter] || 0) + 1;
if (stats.alerts[filter] % tolerance != 0) {
continue;
}
try {
this._callback(addonId, filter);
} catch (ex) {
Cu.reportError("Error in AddonWatcher._checkAddons callback " + ex);
Cu.reportError(ex.stack);
}
}
}
} catch (ex) {
Cu.reportError("Error in AddonWatcher._checkAddons " + ex);
Cu.reportError(Task.Debugging.generateReadableStack(ex.stack));
}
} catch (ex) {
Cu.reportError("Error in AddonWatcher._checkAddons " + ex);
Cu.reportError(ex.stack);
}
}.bind(this));
},
ignoreAddonForSession: function(addonid) {
this._ignoreList.add(addonid);

View File

@@ -12,29 +12,425 @@ const { classes: Cc, interfaces: Ci, utils: Cu } = Components;
/**
* API for querying and examining performance data.
*
* The data exposed by this API is computed internally by the JavaScript VM.
* See `PerformanceData` for the detail of the information provided by this
* API.
* This API exposes data from several probes implemented by the JavaScript VM.
* See `PerformanceStats.getMonitor()` for information on how to monitor data
* from one or more probes and `PerformanceData` for the information obtained
* from the probes.
*
* Data is collected by "Performance Group". Typically, a Performance Group
* is an add-on, or a frame, or the internals of the application.
*/
Cu.import("resource://gre/modules/XPCOMUtils.jsm", this);
Cu.import("resource://gre/modules/Services.jsm", this);
let performanceStatsService =
Cc["@mozilla.org/toolkit/performance-stats-service;1"].
getService(Ci.nsIPerformanceStatsService);
// The nsIPerformanceStatsService provides lower-level
// access to SpiderMonkey and the probes.
XPCOMUtils.defineLazyServiceGetter(this, "performanceStatsService",
"@mozilla.org/toolkit/performance-stats-service;1",
Ci.nsIPerformanceStatsService);
// The finalizer lets us automatically release (and when possible deactivate)
// probes when a monitor is garbage-collected.
XPCOMUtils.defineLazyServiceGetter(this, "finalizer",
"@mozilla.org/toolkit/finalizationwitness;1",
Ci.nsIFinalizationWitnessService
);
const PROPERTIES_NUMBERED = ["totalUserTime", "totalSystemTime", "totalCPOWTime", "ticks"];
// The topic used to notify that a PerformanceMonitor has been garbage-collected
// and that we can release/close the probes it holds.
const FINALIZATION_TOPIC = "performancemonitor-finalize";
const PROPERTIES_META_IMMUTABLE = ["name", "addonId", "isSystem"];
const PROPERTIES_META = [...PROPERTIES_META_IMMUTABLE, "windowId", "title"];
const PROPERTIES_FLAT = [...PROPERTIES_NUMBERED, ...PROPERTIES_META];
/**
* Information on a single component.
* Access to a low-level performance probe.
*
* Each probe is dedicated to some form of performance monitoring.
* As each probe may have a performance impact, a probe is activated
* only when a client has requested a PerformanceMonitor for this probe,
* and deactivated once all clients are disposed of.
*/
function Probe(name, impl) {
this._name = name;
this._counter = 0;
this._impl = impl;
}
Probe.prototype = {
/**
* Acquire the probe on behalf of a client.
*
* If the probe was inactive, activate it. Note that activating a probe
* can incur a memory or performance cost.
*/
acquire: function() {
if (this._counter == 0) {
this._impl.isActive = true;
}
this._counter++;
},
/**
* Release the probe on behalf of a client.
*
* If this was the last client for this probe, deactivate it.
*/
release: function() {
this._counter--;
if (this._counter == 0) {
this._impl.isActive = false;
}
},
/**
* Obtain data from this probe, once it is available.
*
* @param {nsIPerformanceStats} xpcom A xpcom object obtained from
* SpiderMonkey. Only the fields updated by the low-level probe
* are in a specified state.
* @return {object} An object containing the data extracted from this
* probe. Actual format depends on the probe.
*/
extract: function(xpcom) {
if (!this._impl.isActive) {
throw new Error(`Probe is inactive: ${this._name}`);
}
return this._impl.extract(xpcom);
},
/**
* @param {object} a An object returned by `this.extract()`.
* @param {object} b An object returned by `this.extract()`.
*
* @return {true} If `a` and `b` hold identical values.
*/
isEqual: function(a, b) {
if (a == null && b == null) {
return true;
}
if (a != null && b != null) {
return this._impl.isEqual(a, b);
}
return false;
},
/**
* @param {object} a An object returned by `this.extract()`. May
* NOT be `null`.
* @param {object} b An object returned by `this.extract()`. May
* be `null`.
*
* @return {object} An object representing `a - b`. If `b` is
* `null`, this is `a`.
*/
substract: function(a, b) {
if (a == null) {
throw new TypeError();
}
if (b == null) {
return a;
}
return this._impl.substract(a, b);
},
/**
* The name of the probe.
*/
get name() {
return this._name;
}
};
// Utility function. Return the position of the last non-0 item in an
// array, or -1 if there isn't any such item.
function lastNonZero(array) {
for (let i = array.length - 1; i >= 0; --i) {
if (array[i] != 0) {
return i;
}
}
return -1;
}
/**
* The actual Probes implemented by SpiderMonkey.
*/
let Probes = {
/**
* A probe measuring jank.
*
* Data provided by this probe uses the following format:
*
* @field {number} totalCPUTime The total amount of time spent using the
* CPU for this performance group, in µs.
* @field {number} totalSystemTime The total amount of time spent in the
* kernel for this performance group, in µs.
* @field {Array<number>} durations An array containing at each position `i`
* the number of times execution of this component has lasted at least `2^i`
* milliseconds.
* @field {number} longestDuration The index of the highest non-0 value in
* `durations`.
*/
jank: new Probe("jank", {
set isActive(x) { /* always active in the current implementation */ },
get isActive() { return true; },
extract: function(xpcom) {
let durations = xpcom.getDurations();
return {
totalUserTime: xpcom.totalUserTime,
totalSystemTime: xpcom.totalSystemTime,
durations: durations,
longestDuration: lastNonZero(durations)
}
},
isEqual: function(a, b) {
// invariant: `a` and `b` are both non-null
if (a.totalUserTime != b.totalUserTime) {
return false;
}
if (a.totalSystemTime != b.totalSystemTime) {
return false;
}
for (let i = 0; i < a.durations.length; ++i) {
if (a.durations[i] != b.durations[i]) {
return false;
}
}
return true;
},
substract: function(a, b) {
// invariant: `a` and `b` are both non-null
let result = {
totalUserTime: a.totalUserTime - b.totalUserTime,
totalSystemTime: a.totalSystemTime - b.totalSystemTime,
durations: [],
longestDuration: -1,
};
for (let i = 0; i < a.durations.length; ++i) {
result.durations[i] = a.durations[i] - b.durations[i];
}
result.longestDuration = lastNonZero(result.durations);
return result;
}
}),
/**
* A probe measuring CPOW activity.
*
* Data provided by this probe uses the following format:
*
* @field {number} totalCPOWTime The amount of wallclock time
* spent executing blocking cross-process calls, in µs.
*/
cpow: new Probe("cpow", {
set isActive(x) { /* always active in the current implementation */ },
get isActive() { return true; },
extract: function(xpcom) {
return {
totalCPOWTime: xpcom.totalCPOWTime
};
},
isEqual: function(a, b) {
return a.totalCPOWTime == b.totalCPOWTime;
},
substract: function(a, b) {
return {
totalCPOWTime: a.totalCPOWTime - b.totalCPOWTime
};
}
}),
/**
* A probe measuring activations, i.e. the number
* of times code execution has entered a given
* PerformanceGroup.
*
* Note that this probe is always active.
*
* Data provided by this probe uses the following format:
* @type {number} ticks The number of times execution has entered
* this performance group.
*/
ticks: new Probe("ticks", {
set isActive(x) { /* this probe cannot be deactivated */ },
get isActive() { return true; },
extract: function(xpcom) {
return {
ticks: xpcom.ticks
};
},
isEqual: function(a, b) {
return a.ticks == b.ticks;
},
substract: function(a, b) {
return {
ticks: a.ticks - b.ticks
};
}
}),
};
/**
* A monitor for a set of probes.
*
* Keeping probes active when they are unused is often a bad
* idea for performance reasons. Upon destruction, or whenever
* a client calls `dispose`, this monitor releases the probes,
* which may let the system deactivate them.
*/
function PerformanceMonitor(probes) {
this._probes = probes;
// Activate low-level features as needed
for (let probe of probes) {
probe.acquire();
}
// A finalization witness. At some point after the garbage-collection of
// `this` object, a notification of `FINALIZATION_TOPIC` will be triggered
// with `id` as message.
this._id = PerformanceMonitor.makeId();
this._finalizer = finalizer.make(FINALIZATION_TOPIC, this._id)
PerformanceMonitor._monitors.set(this._id, probes);
}
PerformanceMonitor.prototype = {
/**
* Return asynchronously a snapshot with the data
* for each probe monitored by this PerformanceMonitor.
*
* All numeric values are non-negative and can only increase. Depending on
* the probe and the underlying operating system, probes may not be available
* immediately and may miss some activity.
*
* Clients should NOT expect that the first call to `promiseSnapshot()`
* will return a `Snapshot` in which all values are 0. For most uses,
* the appropriate scenario is to perform a first call to `promiseSnapshot()`
* to obtain a baseline, and then watch evolution of the values by calling
* `promiseSnapshot()` and `substract()`.
*
* On the other hand, numeric values are also monotonic across several instances
* of a PerformanceMonitor with the same probes.
* let a = PerformanceStats.getMonitor(someProbes);
* let snapshot1 = yield a.promiseSnapshot();
*
* // ...
* let b = PerformanceStats.getMonitor(someProbes); // Same list of probes
* let snapshot2 = yield b.promiseSnapshot();
*
* // all values of `snapshot2` are greater or equal to values of `snapshot1`.
*
* @return {Promise}
* @resolve {Snapshot}
*/
promiseSnapshot: function() {
if (!this._finalizer) {
throw new Error("dispose() has already been called, this PerformanceMonitor is not usable anymore");
}
// Current implementation is actually synchronous.
return Promise.resolve().then(() => new Snapshot({
xpcom: performanceStatsService.getSnapshot(),
probes: this._probes
}));
},
/**
* Release the probes used by this monitor.
*
* Releasing probes as soon as they are unused is a good idea, as some probes
* cost CPU and/or memory.
*/
dispose: function() {
if (!this._finalizer) {
return;
}
this._finalizer.forget();
PerformanceMonitor.dispose(this._id);
// As a safeguard against double-release, reset everything to `null`
this._probes = null;
this._id = null;
this._finalizer = null;
}
};
/**
* @type {Map<string, Array<string>>} A map from id (as produced by `makeId`)
* to list of probes. Used to deallocate a list of probes during finalization.
*/
PerformanceMonitor._monitors = new Map();
/**
* Create a `PerformanceMonitor` for a list of probes, register it for
* finalization.
*/
PerformanceMonitor.make = function(probeNames) {
// Sanity checks
if (!Array.isArray(probeNames)) {
throw new TypeError("Expected an array, got " + probes);
}
let probes = [];
for (let probeName of probeNames) {
if (!(probeName in Probes)) {
throw new TypeError("Probe not implemented: " + k);
}
probes.push(Probes[probeName]);
}
return new PerformanceMonitor(probes);
};
/**
* Implementation of `dispose`.
*
* The actual implementation of `dispose` is as a method of `PerformanceMonitor`,
* rather than `PerformanceMonitor.prototype`, to avoid needing a strong reference
* to instances of `PerformanceMonitor`, which would defeat the purpose of
* finalization.
*/
PerformanceMonitor.dispose = function(id) {
let probes = PerformanceMonitor._monitors.get(id);
if (!probes) {
throw new TypeError("`dispose()` has already been called on this monitor");
}
PerformanceMonitor._monitors.delete(id);
for (let probe of probes) {
probe.release();
}
}
// Generate a unique id for each PerformanceMonitor. Used during
// finalization.
PerformanceMonitor._counter = 0;
PerformanceMonitor.makeId = function() {
return "PerformanceMonitor-" + (this._counter++);
}
// Once a `PerformanceMonitor` has been garbage-collected,
// release the probes unless `dispose()` has already been called.
Services.obs.addObserver(function(subject, topic, value) {
PerformanceMonitor.dispose(value);
}, FINALIZATION_TOPIC, false);
// Public API
this.PerformanceStats = {
/**
* Create a monitor for observing a set of performance probes.
*/
getMonitor: function(probes) {
return PerformanceMonitor.make(probes);
}
};
/**
* Information on a single performance group.
*
* This offers the following fields:
*
* @field {string} name The name of the component:
* @field {string} name The name of the performance group:
* - for the process itself, "<process>";
* - for platform code, "<platform>";
* - for an add-on, the identifier of the addon (e.g. "myaddon@foo.bar");
@@ -54,28 +450,22 @@ const PROPERTIES_FLAT = [...PROPERTIES_NUMBERED, ...PROPERTIES_META];
* @field {boolean} isSystem `true` if the component is a system component (i.e.
* an add-on or platform-code), `false` otherwise (i.e. a webpage).
*
* @field {number} totalUserTime The total amount of time spent executing code.
* @field {object|undefined} activations See the documentation of probe "ticks".
* `undefined` if this probe is not active.
*
* @field {number} totalSystemTime The total amount of time spent executing
* system calls.
* @field {object|undefined} jank See the documentation of probe "jank".
* `undefined` if this probe is not active.
*
* @field {number} totalCPOWTime The total amount of time spent waiting for
* blocking cross-process communications
*
* @field {number} ticks The number of times the JavaScript VM entered the code
* of this component to execute it.
*
* @field {Array<number>} durations An array containing at each position `i`
* the number of times execution of this component has lasted at least `2^i`
* milliseconds.
*
* All numeric values are non-negative and can only increase.
* @field {object|undefined} cpow See the documentation of probe "cpow".
* `undefined` if this probe is not active.
*/
function PerformanceData(xpcom) {
for (let k of PROPERTIES_FLAT) {
function PerformanceData({xpcom, probes}) {
for (let k of PROPERTIES_META) {
this[k] = xpcom[k];
}
this.durations = xpcom.getDurations();
for (let probe of probes) {
this[probe.name] = probe.extract(xpcom);
}
}
PerformanceData.prototype = {
/**
@@ -87,17 +477,12 @@ PerformanceData.prototype = {
if (!(to instanceof PerformanceData)) {
throw new TypeError();
}
for (let k of PROPERTIES_FLAT) {
if (this[k] != to[k]) {
for (let probeName of Object.keys(Probes)) {
let probe = Probes[probeName];
if (!probe.isEqual(this[probeName], to[probeName])) {
return false;
}
}
for (let i = 0; i < this.durations.length; ++i) {
if (to.durations[i] != this.durations[i]) {
return false;
}
}
return true;
},
/**
@@ -108,7 +493,7 @@ PerformanceData.prototype = {
*
* @return {PerformanceDiff} The performance usage between `to` and `this`.
*/
substract: function(to = null) {
subtract: function(to = null) {
return new PerformanceDiff(this, to);
}
};
@@ -117,57 +502,16 @@ PerformanceData.prototype = {
* The delta between two instances of `PerformanceData`.
*
* Used to monitor resource usage between two timestamps.
*
* @field {number} longestDuration An indication of the longest
* execution duration between two timestamps:
* - -1 == less than 1ms
* - 0 == [1, 2[ ms
* - 1 == [2, 4[ ms
* - 3 == [4, 8[ ms
* - 4 == [8, 16[ ms
* - ...
* - 7 == [128, ...] ms
*/
function PerformanceDiff(current, old = null) {
for (let k of PROPERTIES_META) {
this[k] = current[k];
}
if (old) {
if (!(old instanceof PerformanceData)) {
throw new TypeError();
}
if (current.durations.length != old.durations.length) {
throw new TypeError("Internal error: mismatched length for `durations`.");
}
this.durations = [];
this.longestDuration = -1;
for (let i = 0; i < current.durations.length; ++i) {
let delta = current.durations[i] - old.durations[i];
this.durations[i] = delta;
if (delta > 0) {
this.longestDuration = i;
}
}
for (let k of PROPERTIES_NUMBERED) {
this[k] = current[k] - old[k];
}
} else {
this.durations = current.durations.slice(0);
for (let k of PROPERTIES_NUMBERED) {
this[k] = current[k];
}
this.longestDuration = -1;
for (let i = this.durations.length - 1; i >= 0; --i) {
if (this.durations[i] > 0) {
this.longestDuration = i;
break;
}
for (let probeName of Object.keys(Probes)) {
let other = old ? old[probeName] : null;
if (probeName in current) {
this[probeName] = Probes[probeName].substract(current[probeName], other);
}
}
}
@@ -175,41 +519,16 @@ function PerformanceDiff(current, old = null) {
/**
* A snapshot of the performance usage of the process.
*/
function Snapshot(xpcom) {
function Snapshot({xpcom, probes}) {
this.componentsData = [];
let enumeration = xpcom.getComponentsData().enumerate();
while (enumeration.hasMoreElements()) {
let stat = enumeration.getNext().QueryInterface(Ci.nsIPerformanceStats);
this.componentsData.push(new PerformanceData(stat));
this.componentsData.push(new PerformanceData({xpcom: stat, probes}));
}
this.processData = new PerformanceData(xpcom.getProcessData());
this.processData = new PerformanceData({xpcom: xpcom.getProcessData(), probes});
}
this.PerformanceStats = {
/**
* Activate monitoring.
*/
init() {
//
// The implementation actually does nothing, as monitoring is
// initiated when loading the module.
//
// This function is actually provided as a gentle way to ensure
// that client code that imports `PerformanceStats` lazily
// does not forget to force the import, hence triggering
// actual load of the module.
//
},
/**
* Get a snapshot of the performance usage of the current process.
*
* @type {Snapshot}
*/
getSnapshot() {
return new Snapshot(performanceStatsService.getSnapshot());
},
};
// In the current implementation, all probes are always active.
performanceStatsService.isStopwatchActive = true;

View File

@@ -29,9 +29,13 @@ function frameScript() {
// Make sure that the stopwatch is now active.
performanceStatsService.isStopwatchActive = true;
let monitor = PerformanceStats.getMonitor(["jank", "cpow", "ticks"]);
addMessageListener("compartments-test:getStatistics", () => {
try {
sendAsyncMessage("compartments-test:getStatistics", PerformanceStats.getSnapshot());
monitor.promiseSnapshot().then(snapshot => {
sendAsyncMessage("compartments-test:getStatistics", snapshot);
});
} catch (ex) {
Cu.reportError("Error in content: " + ex);
Cu.reportError(ex.stack);
@@ -99,20 +103,25 @@ function monotinicity_tester(source, testName) {
for (let k of ["name", "addonId", "isSystem"]) {
SilentAssert.equal(prev[k], next[k], `Sanity check (${testName}): ${k} hasn't changed.`);
}
for (let k of ["totalUserTime", "totalSystemTime", "totalCPOWTime", "ticks"]) {
SilentAssert.equal(typeof next[k], "number", `Sanity check (${testName}): ${k} is a number.`);
SilentAssert.leq(prev[k], next[k], `Sanity check (${testName}): ${k} is monotonic.`);
SilentAssert.leq(0, next[k], `Sanity check (${testName}): ${k} is >= 0.`)
for (let [probe, k] of [
["jank", "totalUserTime"],
["jank", "totalSystemTime"],
["cpow", "totalCPOWTime"],
["ticks", "ticks"]
]) {
SilentAssert.equal(typeof next[probe][k], "number", `Sanity check (${testName}): ${k} is a number.`);
SilentAssert.leq(prev[probe][k], next[probe][k], `Sanity check (${testName}): ${k} is monotonic.`);
SilentAssert.leq(0, next[probe][k], `Sanity check (${testName}): ${k} is >= 0.`)
}
SilentAssert.equal(prev.durations.length, next.durations.length);
for (let i = 0; i < next.durations.length; ++i) {
SilentAssert.ok(typeof next.durations[i] == "number" && next.durations[i] >= 0,
SilentAssert.equal(prev.jank.durations.length, next.jank.durations.length);
for (let i = 0; i < next.jank.durations.length; ++i) {
SilentAssert.ok(typeof next.jank.durations[i] == "number" && next.jank.durations[i] >= 0,
`Sanity check (${testName}): durations[${i}] is a non-negative number.`);
SilentAssert.leq(prev.durations[i], next.durations[i],
`Sanity check (${testName}): durations[${i}] is monotonic.`)
SilentAssert.leq(prev.jank.durations[i], next.jank.durations[i],
`Sanity check (${testName}): durations[${i}] is monotonic.`);
}
for (let i = 0; i < next.durations.length - 1; ++i) {
SilentAssert.leq(next.durations[i + 1], next.durations[i],
for (let i = 0; i < next.jank.durations.length - 1; ++i) {
SilentAssert.leq(next.jank.durations[i + 1], next.jank.durations[i],
`Sanity check (${testName}): durations[${i}] >= durations[${i + 1}].`)
}
};
@@ -138,8 +147,12 @@ function monotinicity_tester(source, testName) {
let set = new Set();
let map = new Map();
for (let item of snapshot.componentsData) {
for (let k of ["totalUserTime", "totalSystemTime", "totalCPOWTime"]) {
SilentAssert.leq(item[k], snapshot.processData[k],
for (let [probe, k] of [
["jank", "totalUserTime"],
["jank", "totalSystemTime"],
["cpow", "totalCPOWTime"]
]) {
SilentAssert.leq(item[probe][k], snapshot.processData[probe][k],
`Sanity check (${testName}): component has a lower ${k} than process`);
}
@@ -170,8 +183,10 @@ function monotinicity_tester(source, testName) {
}
add_task(function* test() {
let monitor = PerformanceStats.getMonitor(["jank", "cpow", "ticks"]);
info("Extracting initial state");
let stats0 = PerformanceStats.getSnapshot();
let stats0 = yield monitor.promiseSnapshot();
Assert.notEqual(stats0.componentsData.length, 0, "There is more than one component");
Assert.ok(!stats0.componentsData.find(stat => stat.name.indexOf(URL) != -1),
"The url doesn't appear yet");
@@ -189,7 +204,7 @@ add_task(function* test() {
info("Deactivating sanity checks under Windows (bug 1151240)");
} else {
info("Setting up sanity checks");
monotinicity_tester(() => PerformanceStats.getSnapshot(), "parent process");
monotinicity_tester(() => monitor.promiseSnapshot(), "parent process");
monotinicity_tester(() => promiseContentResponseOrNull(browser, "compartments-test:getStatistics", null), "content process" );
}
@@ -230,11 +245,11 @@ add_task(function* test() {
info("Searching by title, we didn't find the main frame");
continue;
}
info("Searching by title, we found the main frame");
info(`Total user time: ${parent.totalUserTime}`);
if (skipTotalUserTime || parent.totalUserTime > 1000) {
if (skipTotalUserTime || parent.jank.totalUserTime > 1000) {
break;
} else {
info(`Not enough CPU time detected: ${parent.jank.totalUserTime}`)
}
}

View File

@@ -10,14 +10,12 @@ function run_test() {
run_next_test();
}
let monitor = PerformanceStats.getMonitor(["ticks", "jank", "cpow"]);
let promiseStatistics = Task.async(function*(name) {
yield Promise.resolve(); // Make sure that we wait until
// statistics have been updated.
let snapshot = PerformanceStats.getSnapshot();
do_print("Statistics: " + name);
do_print(JSON.stringify(snapshot.processData, null, "\t"));
do_print(JSON.stringify(snapshot.componentsData, null, "\t"));
return snapshot;
return monitor.promiseSnapshot();
});
let promiseSetMonitoring = Task.async(function*(to) {
@@ -105,13 +103,13 @@ add_task(function* test_measure() {
if (skipPrecisionTests) {
do_print("Skipping totalUserTime check under Windows XP, as timer is not always updated by the OS.")
} else {
Assert.ok(process2.totalUserTime - process1.totalUserTime >= 10000, `At least 10ms counted for process time (${process2.totalUserTime - process1.totalUserTime})`);
Assert.ok(process2.jank.totalUserTime - process1.jank.totalUserTime >= 10000, `At least 10ms counted for process time (${process2.jank.totalUserTime - process1.jank.totalUserTime})`);
}
Assert.equal(process2.totalCPOWTime, process1.totalCPOWTime, "We haven't used any CPOW time during the first burn");
Assert.equal(process4.totalUserTime, process3.totalUserTime, "After deactivating the stopwatch, we didn't count any time");
Assert.equal(process4.totalCPOWTime, process3.totalCPOWTime, "After deactivating the stopwatch, we didn't count any CPOW time");
Assert.equal(process2.jank.totalCPOWTime, process1.jank.totalCPOWTime, "We haven't used any CPOW time during the first burn");
Assert.equal(process4.jank.totalUserTime, process3.jank.totalUserTime, "After deactivating the stopwatch, we didn't count any time");
Assert.equal(process4.jank.totalCPOWTime, process3.jank.totalCPOWTime, "After deactivating the stopwatch, we didn't count any CPOW time");
let builtin1 = getBuiltinStatistics(stats1) || { totalUserTime: 0, totalCPOWTime: 0 };
let builtin1 = getBuiltinStatistics(stats1) || { jank: { totalUserTime: 0 }, cpow: { totalCPOWTime: 0 } };
let builtin2 = getBuiltinStatistics(stats2);
let builtin3 = getBuiltinStatistics(stats3);
let builtin4 = getBuiltinStatistics(stats4);
@@ -122,10 +120,10 @@ add_task(function* test_measure() {
if (skipPrecisionTests) {
do_print("Skipping totalUserTime check under Windows XP, as timer is not always updated by the OS.")
} else {
Assert.ok(builtin2.totalUserTime - builtin1.totalUserTime >= 10000, `At least 10ms counted for built-in statistics (${builtin2.totalUserTime - builtin1.totalUserTime})`);
Assert.ok(builtin2.jank.totalUserTime - builtin1.jank.totalUserTime >= 10000, `At least 10ms counted for built-in statistics (${builtin2.jank.totalUserTime - builtin1.jank.totalUserTime})`);
}
Assert.equal(builtin2.totalCPOWTime, builtin1.totalCPOWTime, "We haven't used any CPOW time during the first burn for the built-in");
Assert.equal(builtin2.totalCPOWTime, builtin1.totalCPOWTime, "No CPOW for built-in statistics");
Assert.equal(builtin4.totalUserTime, builtin3.totalUserTime, "After deactivating the stopwatch, we didn't count any time for the built-in");
Assert.equal(builtin4.totalCPOWTime, builtin3.totalCPOWTime, "After deactivating the stopwatch, we didn't count any CPOW time for the built-in");
Assert.equal(builtin2.jank.totalCPOWTime, builtin1.jank.totalCPOWTime, "We haven't used any CPOW time during the first burn for the built-in");
Assert.equal(builtin2.jank.totalCPOWTime, builtin1.jank.totalCPOWTime, "No CPOW for built-in statistics");
Assert.equal(builtin4.jank.totalUserTime, builtin3.jank.totalUserTime, "After deactivating the stopwatch, we didn't count any time for the built-in");
Assert.equal(builtin4.jank.totalCPOWTime, builtin3.jank.totalCPOWTime, "After deactivating the stopwatch, we didn't count any CPOW time for the built-in");
});