зеркало из https://github.com/mozilla/gecko-dev.git
Bug 1772097 - Part 2: Use plain object for lazy getter in services/common/. r=markh
Differential Revision: https://phabricator.services.mozilla.com/D147917
This commit is contained in:
Родитель
9b42b427a2
Коммит
f3d348703f
|
@ -50,7 +50,9 @@ const PREF_LOG_LEVEL = "services.common.hawk.log.appender.dump";
|
|||
// identifiable info, credentials, etc) will be logged.
|
||||
const PREF_LOG_SENSITIVE_DETAILS = "services.common.hawk.log.sensitive";
|
||||
|
||||
XPCOMUtils.defineLazyGetter(this, "log", function() {
|
||||
const lazy = {};
|
||||
|
||||
XPCOMUtils.defineLazyGetter(lazy, "log", function() {
|
||||
let log = Log.repository.getLogger("Hawk");
|
||||
// We set the log itself to "debug" and set the level from the preference to
|
||||
// the appender. This allows other things to send the logs to different
|
||||
|
@ -74,7 +76,7 @@ XPCOMUtils.defineLazyGetter(this, "log", function() {
|
|||
|
||||
// A boolean to indicate if personally identifiable information (or anything
|
||||
// else sensitive, such as credentials) should be logged.
|
||||
XPCOMUtils.defineLazyGetter(this, "logPII", function() {
|
||||
XPCOMUtils.defineLazyGetter(lazy, "logPII", function() {
|
||||
try {
|
||||
return Services.prefs.getBoolPref(PREF_LOG_SENSITIVE_DETAILS);
|
||||
} catch (_) {
|
||||
|
@ -158,11 +160,11 @@ HawkClient.prototype = {
|
|||
try {
|
||||
let serverDateMsec = Date.parse(dateString);
|
||||
this._localtimeOffsetMsec = serverDateMsec - this.now();
|
||||
log.debug(
|
||||
lazy.log.debug(
|
||||
"Clock offset vs " + this.host + ": " + this._localtimeOffsetMsec
|
||||
);
|
||||
} catch (err) {
|
||||
log.warn("Bad date header in server response: " + dateString);
|
||||
lazy.log.warn("Bad date header in server response: " + dateString);
|
||||
}
|
||||
},
|
||||
|
||||
|
@ -227,7 +229,7 @@ HawkClient.prototype = {
|
|||
// Keep a reference to the error, log a message about it, and return the
|
||||
// response anyway.
|
||||
error = e;
|
||||
log.warn("hawk request error", error);
|
||||
lazy.log.warn("hawk request error", error);
|
||||
return request.response;
|
||||
});
|
||||
|
||||
|
@ -238,7 +240,7 @@ HawkClient.prototype = {
|
|||
|
||||
let status = restResponse.status;
|
||||
|
||||
log.debug(
|
||||
lazy.log.debug(
|
||||
"(Response) " +
|
||||
path +
|
||||
": code: " +
|
||||
|
@ -246,8 +248,8 @@ HawkClient.prototype = {
|
|||
" - Status text: " +
|
||||
restResponse.statusText
|
||||
);
|
||||
if (logPII) {
|
||||
log.debug("Response text", restResponse.body);
|
||||
if (lazy.logPII) {
|
||||
lazy.log.debug("Response text", restResponse.body);
|
||||
}
|
||||
|
||||
// All responses may have backoff headers, which are a server-side safety
|
||||
|
@ -266,7 +268,7 @@ HawkClient.prototype = {
|
|||
if (status === 401 && retryOK && !("retry-after" in restResponse.headers)) {
|
||||
// Retry once if we were rejected due to a bad timestamp.
|
||||
// Clock offset is adjusted already in the top of this function.
|
||||
log.debug("Received 401 for " + path + ": retrying");
|
||||
lazy.log.debug("Received 401 for " + path + ": retrying");
|
||||
return this.request(
|
||||
path,
|
||||
method,
|
||||
|
@ -324,7 +326,7 @@ HawkClient.prototype = {
|
|||
try {
|
||||
backoffInterval = parseInt(headerVal, 10);
|
||||
} catch (ex) {
|
||||
log.error(
|
||||
lazy.log.error(
|
||||
"hawkclient response had invalid backoff value in '" +
|
||||
headerName +
|
||||
"' header: " +
|
||||
|
|
|
@ -24,8 +24,10 @@ const { Credentials } = ChromeUtils.import(
|
|||
"resource://gre/modules/Credentials.jsm"
|
||||
);
|
||||
|
||||
const lazy = {};
|
||||
|
||||
ChromeUtils.defineModuleGetter(
|
||||
this,
|
||||
lazy,
|
||||
"CryptoUtils",
|
||||
"resource://services-crypto/utils.js"
|
||||
);
|
||||
|
@ -94,7 +96,11 @@ HAWKAuthenticatedRESTRequest.prototype = {
|
|||
payload: (data && JSON.stringify(data)) || "",
|
||||
contentType,
|
||||
};
|
||||
let header = await CryptoUtils.computeHAWK(this.uri, method, options);
|
||||
let header = await lazy.CryptoUtils.computeHAWK(
|
||||
this.uri,
|
||||
method,
|
||||
options
|
||||
);
|
||||
this.setHeader("Authorization", header.field);
|
||||
}
|
||||
|
||||
|
@ -134,7 +140,7 @@ HAWKAuthenticatedRESTRequest.prototype = {
|
|||
*/
|
||||
async function deriveHawkCredentials(tokenHex, context, size = 96) {
|
||||
let token = CommonUtils.hexToBytes(tokenHex);
|
||||
let out = await CryptoUtils.hkdfLegacy(
|
||||
let out = await lazy.CryptoUtils.hkdfLegacy(
|
||||
token,
|
||||
undefined,
|
||||
Credentials.keyWord(context),
|
||||
|
|
|
@ -3,20 +3,22 @@
|
|||
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
|
||||
"use strict;";
|
||||
|
||||
const lazy = {};
|
||||
|
||||
ChromeUtils.defineModuleGetter(
|
||||
this,
|
||||
lazy,
|
||||
"Services",
|
||||
"resource://gre/modules/Services.jsm"
|
||||
);
|
||||
ChromeUtils.defineModuleGetter(
|
||||
this,
|
||||
lazy,
|
||||
"FileUtils",
|
||||
"resource://gre/modules/FileUtils.jsm"
|
||||
);
|
||||
ChromeUtils.defineModuleGetter(this, "Log", "resource://gre/modules/Log.jsm");
|
||||
ChromeUtils.defineModuleGetter(this, "OS", "resource://gre/modules/osfile.jsm");
|
||||
ChromeUtils.defineModuleGetter(lazy, "Log", "resource://gre/modules/Log.jsm");
|
||||
ChromeUtils.defineModuleGetter(lazy, "OS", "resource://gre/modules/osfile.jsm");
|
||||
ChromeUtils.defineModuleGetter(
|
||||
this,
|
||||
lazy,
|
||||
"CommonUtils",
|
||||
"resource://services-common/utils.js"
|
||||
);
|
||||
|
@ -63,7 +65,7 @@ const PR_UINT32_MAX = 0xffffffff;
|
|||
* during logging. Instead, one can periodically consume the input stream and
|
||||
* e.g. write it to disk asynchronously.
|
||||
*/
|
||||
class StorageStreamAppender extends Log.Appender {
|
||||
class StorageStreamAppender extends lazy.Log.Appender {
|
||||
constructor(formatter) {
|
||||
super(formatter);
|
||||
this._name = "StorageStreamAppender";
|
||||
|
@ -150,7 +152,7 @@ class FlushableStorageAppender extends StorageStreamAppender {
|
|||
}
|
||||
|
||||
append(message) {
|
||||
if (message.level >= Log.Level.Error) {
|
||||
if (message.level >= lazy.Log.Level.Error) {
|
||||
this.sawError = true;
|
||||
}
|
||||
StorageStreamAppender.prototype.append.call(this, message);
|
||||
|
@ -199,16 +201,16 @@ class FlushableStorageAppender extends StorageStreamAppender {
|
|||
);
|
||||
binaryStream.setInputStream(inputStream);
|
||||
|
||||
let outputDirectory = OS.Path.join(
|
||||
OS.Constants.Path.profileDir,
|
||||
let outputDirectory = lazy.OS.Path.join(
|
||||
lazy.OS.Constants.Path.profileDir,
|
||||
...subdirArray
|
||||
);
|
||||
await OS.File.makeDir(outputDirectory, {
|
||||
await lazy.OS.File.makeDir(outputDirectory, {
|
||||
ignoreExisting: true,
|
||||
from: OS.Constants.Path.profileDir,
|
||||
from: lazy.OS.Constants.Path.profileDir,
|
||||
});
|
||||
let fullOutputFileName = OS.Path.join(outputDirectory, outputFileName);
|
||||
let output = await OS.File.open(fullOutputFileName, { write: true });
|
||||
let fullOutputFileName = lazy.OS.Path.join(outputDirectory, outputFileName);
|
||||
let output = await lazy.OS.File.open(fullOutputFileName, { write: true });
|
||||
try {
|
||||
while (true) {
|
||||
let available = binaryStream.available();
|
||||
|
@ -253,9 +255,9 @@ LogManager.prototype = {
|
|||
this.logFilePrefix = logFilePrefix;
|
||||
if (!formatter) {
|
||||
// Create a formatter and various appenders to attach to the logs.
|
||||
formatter = new Log.BasicFormatter();
|
||||
consoleAppender = new Log.ConsoleAppender(formatter);
|
||||
dumpAppender = new Log.DumpAppender(formatter);
|
||||
formatter = new lazy.Log.BasicFormatter();
|
||||
consoleAppender = new lazy.Log.ConsoleAppender(formatter);
|
||||
dumpAppender = new lazy.Log.DumpAppender(formatter);
|
||||
}
|
||||
|
||||
allBranches.add(this._prefs._branchStr);
|
||||
|
@ -268,7 +270,7 @@ LogManager.prototype = {
|
|||
findSmallest = false
|
||||
) => {
|
||||
let observer = newVal => {
|
||||
let level = Log.Level[newVal] || defaultLevel;
|
||||
let level = lazy.Log.Level[newVal] || defaultLevel;
|
||||
if (findSmallest) {
|
||||
// As some of our appenders have global impact (ie, there is only one
|
||||
// place 'dump' goes to), we need to find the smallest value from all
|
||||
|
@ -277,7 +279,7 @@ LogManager.prototype = {
|
|||
// dump=Error, we need to keep dump=Debug so consumerA is respected.
|
||||
for (let branch of allBranches) {
|
||||
let lookPrefBranch = new Preferences(branch);
|
||||
let lookVal = Log.Level[lookPrefBranch.get(prefName)];
|
||||
let lookVal = lazy.Log.Level[lookPrefBranch.get(prefName)];
|
||||
if (lookVal && lookVal < level) {
|
||||
level = lookVal;
|
||||
}
|
||||
|
@ -295,13 +297,13 @@ LogManager.prototype = {
|
|||
this._observeConsolePref = setupAppender(
|
||||
consoleAppender,
|
||||
"log.appender.console",
|
||||
Log.Level.Fatal,
|
||||
lazy.Log.Level.Fatal,
|
||||
true
|
||||
);
|
||||
this._observeDumpPref = setupAppender(
|
||||
dumpAppender,
|
||||
"log.appender.dump",
|
||||
Log.Level.Error,
|
||||
lazy.Log.Level.Error,
|
||||
true
|
||||
);
|
||||
|
||||
|
@ -312,18 +314,18 @@ LogManager.prototype = {
|
|||
this._observeStreamPref = setupAppender(
|
||||
fapp,
|
||||
"log.appender.file.level",
|
||||
Log.Level.Debug
|
||||
lazy.Log.Level.Debug
|
||||
);
|
||||
|
||||
// now attach the appenders to all our logs.
|
||||
for (let logName of logNames) {
|
||||
let log = Log.repository.getLogger(logName);
|
||||
let log = lazy.Log.repository.getLogger(logName);
|
||||
for (let appender of [fapp, dumpAppender, consoleAppender]) {
|
||||
log.addAppender(appender);
|
||||
}
|
||||
}
|
||||
// and use the first specified log as a "root" for our log.
|
||||
this._log = Log.repository.getLogger(logNames[0] + ".LogManager");
|
||||
this._log = lazy.Log.repository.getLogger(logNames[0] + ".LogManager");
|
||||
},
|
||||
|
||||
/**
|
||||
|
@ -443,8 +445,11 @@ LogManager.prototype = {
|
|||
// determine if that file should be removed.
|
||||
async _deleteLogFiles(cbShouldDelete) {
|
||||
this._cleaningUpFileLogs = true;
|
||||
let logDir = FileUtils.getDir("ProfD", this._logFileSubDirectoryEntries);
|
||||
let iterator = new OS.File.DirectoryIterator(logDir.path);
|
||||
let logDir = lazy.FileUtils.getDir(
|
||||
"ProfD",
|
||||
this._logFileSubDirectoryEntries
|
||||
);
|
||||
let iterator = new lazy.OS.File.DirectoryIterator(logDir.path);
|
||||
|
||||
await iterator.forEach(async entry => {
|
||||
// Note that we don't check this.logFilePrefix is in the name - we cleanup
|
||||
|
@ -458,7 +463,7 @@ LogManager.prototype = {
|
|||
}
|
||||
try {
|
||||
// need to call .stat() as the enumerator doesn't give that to us on *nix.
|
||||
let info = await OS.File.stat(entry.path);
|
||||
let info = await lazy.OS.File.stat(entry.path);
|
||||
if (!cbShouldDelete(info)) {
|
||||
return;
|
||||
}
|
||||
|
@ -469,7 +474,7 @@ LogManager.prototype = {
|
|||
info.lastModificationDate.getTime() +
|
||||
")"
|
||||
);
|
||||
await OS.File.remove(entry.path);
|
||||
await lazy.OS.File.remove(entry.path);
|
||||
this._log.trace("Deleted " + entry.name);
|
||||
} catch (ex) {
|
||||
this._log.debug(
|
||||
|
@ -488,7 +493,7 @@ LogManager.prototype = {
|
|||
this._cleaningUpFileLogs = false;
|
||||
this._log.debug("Done deleting files.");
|
||||
// This notification is used only for tests.
|
||||
Services.obs.notifyObservers(
|
||||
lazy.Services.obs.notifyObservers(
|
||||
null,
|
||||
"services-tests:common:log-manager:cleanup-logs"
|
||||
);
|
||||
|
|
|
@ -18,8 +18,10 @@ const { CommonUtils } = ChromeUtils.import(
|
|||
"resource://services-common/utils.js"
|
||||
);
|
||||
|
||||
const lazy = {};
|
||||
|
||||
ChromeUtils.defineModuleGetter(
|
||||
this,
|
||||
lazy,
|
||||
"CryptoUtils",
|
||||
"resource://services-crypto/utils.js"
|
||||
);
|
||||
|
@ -710,7 +712,7 @@ TokenAuthenticatedRESTRequest.prototype = {
|
|||
__proto__: RESTRequest.prototype,
|
||||
|
||||
async dispatch(method, data) {
|
||||
let sig = await CryptoUtils.computeHTTPMACSHA1(
|
||||
let sig = await lazy.CryptoUtils.computeHTTPMACSHA1(
|
||||
this.authToken.id,
|
||||
this.authToken.key,
|
||||
method,
|
||||
|
|
|
@ -9,23 +9,24 @@ var EXPORTED_SYMBOLS = ["UptakeTelemetry", "Policy"];
|
|||
const { XPCOMUtils } = ChromeUtils.import(
|
||||
"resource://gre/modules/XPCOMUtils.jsm"
|
||||
);
|
||||
const lazy = {};
|
||||
ChromeUtils.defineModuleGetter(
|
||||
this,
|
||||
lazy,
|
||||
"AppConstants",
|
||||
"resource://gre/modules/AppConstants.jsm"
|
||||
);
|
||||
ChromeUtils.defineModuleGetter(
|
||||
this,
|
||||
lazy,
|
||||
"ClientID",
|
||||
"resource://gre/modules/ClientID.jsm"
|
||||
);
|
||||
ChromeUtils.defineModuleGetter(
|
||||
this,
|
||||
lazy,
|
||||
"Services",
|
||||
"resource://gre/modules/Services.jsm"
|
||||
);
|
||||
|
||||
XPCOMUtils.defineLazyGetter(this, "CryptoHash", () => {
|
||||
XPCOMUtils.defineLazyGetter(lazy, "CryptoHash", () => {
|
||||
return Components.Constructor(
|
||||
"@mozilla.org/security/hash;1",
|
||||
"nsICryptoHash",
|
||||
|
@ -34,7 +35,7 @@ XPCOMUtils.defineLazyGetter(this, "CryptoHash", () => {
|
|||
});
|
||||
|
||||
XPCOMUtils.defineLazyPreferenceGetter(
|
||||
this,
|
||||
lazy,
|
||||
"gSampleRate",
|
||||
"services.common.uptake.sampleRate"
|
||||
);
|
||||
|
@ -52,7 +53,7 @@ var Policy = {
|
|||
_clientIDHash: null,
|
||||
|
||||
getClientID() {
|
||||
return ClientID.getClientID();
|
||||
return lazy.ClientID.getClientID();
|
||||
},
|
||||
|
||||
/**
|
||||
|
@ -72,7 +73,7 @@ var Policy = {
|
|||
async _doComputeClientIDHash() {
|
||||
const clientID = await this.getClientID();
|
||||
let byteArr = new TextEncoder().encode(clientID);
|
||||
let hash = new CryptoHash("sha256");
|
||||
let hash = new lazy.CryptoHash("sha256");
|
||||
hash.update(byteArr, byteArr.length);
|
||||
const bytes = hash.finish(false);
|
||||
let rem = 0;
|
||||
|
@ -83,7 +84,7 @@ var Policy = {
|
|||
},
|
||||
|
||||
getChannel() {
|
||||
return AppConstants.MOZ_UPDATE_CHANNEL;
|
||||
return lazy.AppConstants.MOZ_UPDATE_CHANNEL;
|
||||
},
|
||||
};
|
||||
|
||||
|
@ -185,21 +186,24 @@ class UptakeTelemetry {
|
|||
// Contrary to histograms, Telemetry Events are not enabled by default.
|
||||
// Enable them on first call to `report()`.
|
||||
if (!this._eventsEnabled) {
|
||||
Services.telemetry.setEventRecordingEnabled(TELEMETRY_EVENTS_ID, true);
|
||||
lazy.Services.telemetry.setEventRecordingEnabled(
|
||||
TELEMETRY_EVENTS_ID,
|
||||
true
|
||||
);
|
||||
this._eventsEnabled = true;
|
||||
}
|
||||
|
||||
const hash = await UptakeTelemetry.Policy.getClientIDHash();
|
||||
const channel = UptakeTelemetry.Policy.getChannel();
|
||||
const shouldSendEvent =
|
||||
!["release", "esr"].includes(channel) || hash < gSampleRate;
|
||||
!["release", "esr"].includes(channel) || hash < lazy.gSampleRate;
|
||||
if (shouldSendEvent) {
|
||||
// The Event API requires `extra` values to be of type string. Force it!
|
||||
const extraStr = Object.keys(extra).reduce((acc, k) => {
|
||||
acc[k] = extra[k].toString();
|
||||
return acc;
|
||||
}, {});
|
||||
Services.telemetry.recordEvent(
|
||||
lazy.Services.telemetry.recordEvent(
|
||||
TELEMETRY_EVENTS_ID,
|
||||
"uptake",
|
||||
component,
|
||||
|
|
|
@ -9,7 +9,8 @@ const { XPCOMUtils } = ChromeUtils.import(
|
|||
"resource://gre/modules/XPCOMUtils.jsm"
|
||||
);
|
||||
const { Log } = ChromeUtils.import("resource://gre/modules/Log.jsm");
|
||||
ChromeUtils.defineModuleGetter(this, "OS", "resource://gre/modules/osfile.jsm");
|
||||
const lazy = {};
|
||||
ChromeUtils.defineModuleGetter(lazy, "OS", "resource://gre/modules/osfile.jsm");
|
||||
|
||||
var CommonUtils = {
|
||||
/*
|
||||
|
@ -430,7 +431,7 @@ var CommonUtils = {
|
|||
* @return a promise that resolves to the JSON contents of the named file.
|
||||
*/
|
||||
readJSON(path) {
|
||||
return OS.File.read(path, { encoding: "utf-8" }).then(data => {
|
||||
return lazy.OS.File.read(path, { encoding: "utf-8" }).then(data => {
|
||||
return JSON.parse(data);
|
||||
});
|
||||
},
|
||||
|
@ -444,7 +445,7 @@ var CommonUtils = {
|
|||
*/
|
||||
writeJSON(contents, path) {
|
||||
let data = JSON.stringify(contents);
|
||||
return OS.File.writeAtomic(path, data, {
|
||||
return lazy.OS.File.writeAtomic(path, data, {
|
||||
encoding: "utf-8",
|
||||
tmpPath: path + ".tmp",
|
||||
});
|
||||
|
|
Загрузка…
Ссылка в новой задаче