Merge mozilla-central to mozilla-inbound

This commit is contained in:
Carsten "Tomcat" Book 2016-11-25 16:14:48 +01:00
Родитель 4173ebaa20 a666835bf5
Коммит 671beb0867
116 изменённых файлов: 16781 добавлений и 15121 удалений

Просмотреть файл

@ -1,6 +1,16 @@
# Checks run by clang-tidy over Mozilla code.
# The following checks are currently enabled:
# * misc-use-override
Checks: '-*,misc-use-override'
# * modernize-raw-string-literal -
# Replace string literals containing escaped characters with raw string literals
# * modernize-use-bool-literals
# Replace integer literals which are cast to bool
# * modernize-loop-convert
# Converts for(...; ...; ...) loops to use the new range-based loops in C++11
# * modernize-use-default
# Replace default bodies of special member functions with = default;
# * modernize-use-override
# Use C++11's override and remove virtual where applicable
Checks: '-*, modernize-raw-string-literal, modernize-use-bool-literals, modernize-loop-convert, modernize-use-default, modernize-use-override'

Просмотреть файл

@ -118,8 +118,12 @@ devtools/server/actors/**
!devtools/server/actors/styles.js
!devtools/server/actors/webbrowser.js
!devtools/server/actors/webextension.js
!devtools/server/actors/webextension-inspected-window.js
devtools/server/performance/**
devtools/server/tests/**
devtools/server/tests/browser/**
!devtools/server/tests/browser/browser_webextension_inspected_window.js
devtools/server/tests/mochitest/**
devtools/server/tests/unit/**
devtools/shared/*.js
!devtools/shared/async-storage.js
!devtools/shared/async-utils.js

Просмотреть файл

@ -1,5 +1,5 @@
<?xml version='1.0' encoding='UTF-8'?>
<blocklist lastupdate="1479994717581" xmlns="http://www.mozilla.org/2006/addons-blocklist">
<blocklist lastupdate="1480081945919" xmlns="http://www.mozilla.org/2006/addons-blocklist">
<emItems>
<emItem blockID="i545" id="superlrcs@svenyor.net">
<prefs/>

Просмотреть файл

@ -5631,7 +5631,7 @@ function middleMousePaste(event) {
function stripUnsafeProtocolOnPaste(pasteData) {
// Don't allow pasting javascript URIs since we don't support
// LOAD_FLAGS_DISALLOW_INHERIT_PRINCIPAL for those.
return pasteData.replace(/^(?:\s*javascript:)+/i, "");
return pasteData.replace(/\r?\n/g, "").replace(/^(?:\s*javascript:)+/i, "");
}
// handleDroppedLink has the following 2 overloads:

Просмотреть файл

@ -701,38 +701,51 @@ file, You can obtain one at http://mozilla.org/MPL/2.0/.
]]></body>
</method>
<method name="onDragOver">
<parameter name="aEvent"/>
<body>
var types = aEvent.dataTransfer.types;
if (types.includes("application/x-moz-file") ||
types.includes("text/x-moz-url") ||
types.includes("text/uri-list") ||
types.includes("text/unicode"))
aEvent.preventDefault();
</body>
</method>
<method name="onDrop">
<method name="_getDroppableLink">
<parameter name="aEvent"/>
<body><![CDATA[
let links = browserDragAndDrop.dropLinks(aEvent);
// The URL bar automatically handles inputs with newline characters,
// so we can get away with treating text/x-moz-url flavours as text/plain.
if (links.length > 0 && links[0].url) {
let url = links[0].url;
aEvent.preventDefault();
this.value = url;
SetPageProxyState("invalid");
this.focus();
let url = links[0].url;
let strippedURL = stripUnsafeProtocolOnPaste(url);
if (strippedURL != url) {
aEvent.stopImmediatePropagation();
return null;
}
try {
urlSecurityCheck(url,
gBrowser.contentPrincipal,
Ci.nsIScriptSecurityManager.DISALLOW_INHERIT_PRINCIPAL);
} catch (ex) {
return;
return null;
}
return url;
}
return null;
]]></body>
</method>
<method name="onDragOver">
<parameter name="aEvent"/>
<body><![CDATA[
// We don't need the link here, so we ignore the return value.
if (!this._getDroppableLink(aEvent)) {
aEvent.dataTransfer.dropEffect = "none";
}
]]></body>
</method>
<method name="onDrop">
<parameter name="aEvent"/>
<body><![CDATA[
let url = this._getDroppableLink(aEvent);
if (url) {
this.value = url;
SetPageProxyState("invalid");
this.focus();
this.handleCommand();
}
]]></body>
@ -924,7 +937,7 @@ file, You can obtain one at http://mozilla.org/MPL/2.0/.
// Unfortunately we're not allowed to set the bits being pasted
// so cancel this event:
aEvent.preventDefault();
aEvent.stopPropagation();
aEvent.stopImmediatePropagation();
this.inputField.value = oldStart + pasteData + oldEnd;
// Fix up cursor/selection:

Просмотреть файл

@ -52,9 +52,13 @@ stubPreparedMessages.set("asdf()", new ConsoleMessage({
"type": "undefined"
},
"repeat": 1,
"repeatId": "{\"id\":null,\"allowRepeating\":true,\"source\":\"javascript\",\"timeStamp\":1479159921377,\"type\":\"result\",\"level\":\"error\",\"messageText\":\"ReferenceError: asdf is not defined\",\"parameters\":{\"type\":\"undefined\"},\"repeatId\":null,\"stacktrace\":null,\"frame\":null,\"groupId\":null,\"exceptionDocURL\":\"https://developer.mozilla.org/docs/Web/JavaScript/Reference/Errors/Not_defined?utm_source=mozilla&utm_medium=firefox-console-errors&utm_campaign=default\",\"userProvidedStyles\":null}",
"repeatId": "{\"id\":null,\"allowRepeating\":true,\"source\":\"javascript\",\"timeStamp\":1479159921377,\"type\":\"result\",\"level\":\"error\",\"messageText\":\"ReferenceError: asdf is not defined\",\"parameters\":{\"type\":\"undefined\"},\"repeatId\":null,\"stacktrace\":null,\"frame\":{\"source\":\"debugger eval code\",\"line\":1,\"column\":1},\"groupId\":null,\"exceptionDocURL\":\"https://developer.mozilla.org/docs/Web/JavaScript/Reference/Errors/Not_defined?utm_source=mozilla&utm_medium=firefox-console-errors&utm_campaign=default\",\"userProvidedStyles\":null}",
"stacktrace": null,
"frame": null,
"frame": {
"source": "debugger eval code",
"line": 1,
"column": 1
},
"groupId": null,
"exceptionDocURL": "https://developer.mozilla.org/docs/Web/JavaScript/Reference/Errors/Not_defined?utm_source=mozilla&utm_medium=firefox-console-errors&utm_campaign=default",
"userProvidedStyles": null
@ -72,9 +76,13 @@ stubPreparedMessages.set("1 + @", new ConsoleMessage({
"type": "undefined"
},
"repeat": 1,
"repeatId": "{\"id\":null,\"allowRepeating\":true,\"source\":\"javascript\",\"timeStamp\":1479159921399,\"type\":\"result\",\"level\":\"error\",\"messageText\":\"SyntaxError: illegal character\",\"parameters\":{\"type\":\"undefined\"},\"repeatId\":null,\"stacktrace\":null,\"frame\":null,\"groupId\":null,\"userProvidedStyles\":null}",
"repeatId": "{\"id\":null,\"allowRepeating\":true,\"source\":\"javascript\",\"timeStamp\":1479159921399,\"type\":\"result\",\"level\":\"error\",\"messageText\":\"SyntaxError: illegal character\",\"parameters\":{\"type\":\"undefined\"},\"repeatId\":null,\"stacktrace\":null,\"frame\":{\"source\":\"debugger eval code\",\"line\":1,\"column\":4},\"groupId\":null,\"userProvidedStyles\":null}",
"stacktrace": null,
"frame": null,
"frame": {
"source": "debugger eval code",
"line": 1,
"column": 4
},
"groupId": null,
"userProvidedStyles": null
}));
@ -128,7 +136,11 @@ stubPackets.set("asdf()", {
},
"exceptionMessage": "ReferenceError: asdf is not defined",
"exceptionDocURL": "https://developer.mozilla.org/docs/Web/JavaScript/Reference/Errors/Not_defined?utm_source=mozilla&utm_medium=firefox-console-errors&utm_campaign=default",
"frame": null,
"frame": {
"source": "debugger eval code",
"line": 1,
"column": 1
},
"helperResult": null
});
@ -158,7 +170,11 @@ stubPackets.set("1 + @", {
}
},
"exceptionMessage": "SyntaxError: illegal character",
"frame": null,
"frame": {
"source": "debugger eval code",
"line": 1,
"column": 4
},
"helperResult": null
});

Просмотреть файл

@ -23,9 +23,8 @@ add_task(function* () {
ok(request, "Page load was logged");
let client = hud.ui.webConsoleClient;
let args = [request.actor];
const postData = yield getPacket(client, "getRequestPostData", args);
const responseContent = yield getPacket(client, "getResponseContent", args);
const postData = yield client.getRequestPostData(request.actor);
const responseContent = yield client.getResponseContent(request.actor);
is(request.request.url, TEST_NETWORK_REQUEST_URI,
"Logged network entry is page load");

Просмотреть файл

@ -39,9 +39,8 @@ add_task(function* testPageLoad() {
ok(request, "Page load was logged");
let client = hud.ui.webConsoleClient;
let args = [request.actor];
const postData = yield getPacket(client, "getRequestPostData", args);
const responseContent = yield getPacket(client, "getResponseContent", args);
const postData = yield client.getRequestPostData(request.actor);
const responseContent = yield client.getResponseContent(request.actor);
is(request.request.url, TEST_NETWORK_REQUEST_URI,
"Logged network entry is page load");
@ -65,9 +64,8 @@ add_task(function* testXhrGet() {
ok(request, "testXhrGet() was logged");
let client = hud.ui.webConsoleClient;
let args = [request.actor];
const postData = yield getPacket(client, "getRequestPostData", args);
const responseContent = yield getPacket(client, "getResponseContent", args);
const postData = yield client.getRequestPostData(request.actor);
const responseContent = yield client.getResponseContent(request.actor);
is(request.request.method, "GET", "Method is correct");
ok(!postData.postData.text, "No request body was sent");
@ -89,9 +87,8 @@ add_task(function* testXhrPost() {
ok(request, "testXhrPost() was logged");
let client = hud.ui.webConsoleClient;
let args = [request.actor];
const postData = yield getPacket(client, "getRequestPostData", args);
const responseContent = yield getPacket(client, "getResponseContent", args);
const postData = yield client.getRequestPostData(request.actor);
const responseContent = yield client.getResponseContent(request.actor);
is(request.request.method, "POST", "Method is correct");
is(postData.postData.text, "Hello world!", "Request body was logged");
@ -120,9 +117,8 @@ add_task(function* testFormSubmission() {
ok(request, "testFormSubmission() was logged");
let client = hud.ui.webConsoleClient;
let args = [request.actor];
const postData = yield getPacket(client, "getRequestPostData", args);
const responseContent = yield getPacket(client, "getResponseContent", args);
const postData = yield client.getRequestPostData(request.actor);
const responseContent = yield client.getResponseContent(request.actor);
is(request.request.method, "POST", "Method is correct");
isnot(postData.postData.text

Просмотреть файл

@ -1790,21 +1790,6 @@ function getSourceActor(sources, URL) {
return item && item.value;
}
/**
* Make a request against an actor and resolve with the packet.
* @param object client
* The client to use when making the request.
* @param function requestType
* The client request function to run.
* @param array args
* The arguments to pass into the function.
*/
function getPacket(client, requestType, args) {
return new Promise(resolve => {
client[requestType](...args, packet => resolve(packet));
});
}
/**
* Verify that clicking on a link from a popup notification message tries to
* open the expected URL.

Просмотреть файл

@ -63,6 +63,7 @@ DevToolsModules(
'webaudio.js',
'webbrowser.js',
'webconsole.js',
'webextension-inspected-window.js',
'webextension.js',
'webgl.js',
'worker.js',

Просмотреть файл

@ -0,0 +1,469 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
"use strict";
const protocol = require("devtools/shared/protocol");
const {Ci, Cu, Cr} = require("chrome");
const Services = require("Services");
const {
XPCOMUtils,
} = Cu.import("resource://gre/modules/XPCOMUtils.jsm", {});
const {
webExtensionInspectedWindowSpec,
} = require("devtools/shared/specs/webextension-inspected-window");
function CustomizedReload(params) {
this.docShell = params.tabActor.window
.QueryInterface(Ci.nsIInterfaceRequestor)
.getInterface(Ci.nsIDocShell);
this.docShell.QueryInterface(Ci.nsIWebProgress);
this.inspectedWindowEval = params.inspectedWindowEval;
this.callerInfo = params.callerInfo;
this.ignoreCache = params.ignoreCache;
this.injectedScript = params.injectedScript;
this.userAgent = params.userAgent;
this.customizedReloadWindows = new WeakSet();
}
CustomizedReload.prototype = {
QueryInterface: XPCOMUtils.generateQI([Ci.nsIWebProgressListener,
Ci.nsISupportsWeakReference,
Ci.nsISupports]),
get window() {
return this.docShell.DOMWindow;
},
get webNavigation() {
return this.docShell
.QueryInterface(Ci.nsIInterfaceRequestor)
.getInterface(Ci.nsIWebNavigation);
},
start() {
if (!this.waitForReloadCompleted) {
this.waitForReloadCompleted = new Promise((resolve, reject) => {
this.resolveReloadCompleted = resolve;
this.rejectReloadCompleted = reject;
if (this.userAgent) {
this.docShell.customUserAgent = this.userAgent;
}
let reloadFlags = Ci.nsIWebNavigation.LOAD_FLAGS_NONE;
if (this.ignoreCache) {
reloadFlags |= Ci.nsIWebNavigation.LOAD_FLAGS_BYPASS_CACHE;
}
try {
if (this.injectedScript) {
// Listen to the newly created document elements only if there is an
// injectedScript to evaluate.
Services.obs.addObserver(this, "document-element-inserted", false);
}
// Watch the loading progress and clear the current CustomizedReload once the
// page has been reloaded (or if its reloading has been interrupted).
this.docShell.addProgressListener(this,
Ci.nsIWebProgress.NOTIFY_STATE_DOCUMENT);
this.webNavigation.reload(reloadFlags);
} catch (err) {
// Cancel the injected script listener if the reload fails
// (which will also report the error by rejecting the promise).
this.stop(err);
}
});
}
return this.waitForReloadCompleted;
},
observe(subject, topic, data) {
if (topic !== "document-element-inserted") {
return;
}
const document = subject;
const window = document && document.defaultView;
// Filter out non interesting documents.
if (!document || !document.location || !window) {
return;
}
let subjectDocShell = window.QueryInterface(Ci.nsIInterfaceRequestor)
.getInterface(Ci.nsIWebNavigation)
.QueryInterface(Ci.nsIDocShell);
// Keep track of the set of window objects where we are going to inject
// the injectedScript: the top level window and all its descendant
// that are still of type content (filtering out loaded XUL pages, if any).
if (window == this.window) {
this.customizedReloadWindows.add(window);
} else if (subjectDocShell.sameTypeParent) {
let parentWindow = subjectDocShell.sameTypeParent
.QueryInterface(Ci.nsIInterfaceRequestor)
.getInterface(Ci.nsIDOMWindow);
if (parentWindow && this.customizedReloadWindows.has(parentWindow)) {
this.customizedReloadWindows.add(window);
}
}
if (this.customizedReloadWindows.has(window)) {
const {
apiErrorResult
} = this.inspectedWindowEval(this.callerInfo, this.injectedScript, {}, window);
// Log only apiErrorResult, because no one is waiting for the
// injectedScript result, and any exception is going to be logged
// in the inspectedWindow webconsole.
if (apiErrorResult) {
console.error(
"Unexpected Error in injectedScript during inspectedWindow.reload for",
`${this.callerInfo.url}:${this.callerInfo.lineNumber}`,
apiErrorResult
);
}
}
},
onStateChange(webProgress, request, state, status) {
if (webProgress.DOMWindow !== this.window) {
return;
}
if (state & Ci.nsIWebProgressListener.STATE_STOP) {
if (status == Cr.NS_BINDING_ABORTED) {
// The customized reload has been interrupted and we can clear
// the CustomizedReload and reject the promise.
const url = this.window.location.href;
this.stop(new Error(
`devtools.inspectedWindow.reload on ${url} has been interrupted`
));
} else {
// Once the top level frame has been loaded, we can clear the customized reload
// and resolve the promise.
this.stop();
}
}
},
stop(error) {
if (this.stopped) {
return;
}
this.docShell.removeProgressListener(this);
if (this.injectedScript) {
Services.obs.removeObserver(this, "document-element-inserted", false);
}
// Reset the customized user agent.
if (this.userAgent && this.docShell.customUserAgent == this.userAgent) {
this.docShell.customUserAgent = null;
}
if (error) {
this.rejectReloadCompleted(error);
} else {
this.resolveReloadCompleted();
}
this.stopped = true;
}
};
var WebExtensionInspectedWindowActor = protocol.ActorClassWithSpec(
webExtensionInspectedWindowSpec,
{
/**
* Created the WebExtension InspectedWindow actor
*/
initialize(conn, tabActor) {
protocol.Actor.prototype.initialize.call(this, conn);
this.tabActor = tabActor;
},
destroy(conn) {
protocol.Actor.prototype.destroy.call(this, conn);
if (this.customizedReload) {
this.customizedReload.stop(
new Error("WebExtensionInspectedWindowActor destroyed")
);
delete this.customizedReload;
}
if (this._dbg) {
this._dbg.enabled = false;
delete this._dbg;
}
},
isSystemPrincipal(window) {
const principal = window.document.nodePrincipal;
return Services.scriptSecurityManager.isSystemPrincipal(principal);
},
get dbg() {
if (this._dbg) {
return this._dbg;
}
this._dbg = this.tabActor.makeDebugger();
return this._dbg;
},
get window() {
return this.tabActor.window;
},
get webNavigation() {
return this.tabActor.webNavigation;
},
/**
* Reload the target tab, optionally bypass cache, customize the userAgent and/or
* inject a script in targeted document or any of its sub-frame.
*
* @param {webExtensionCallerInfo} callerInfo
* the addonId and the url (the addon base url or the url of the actual caller
* filename and lineNumber) used to log useful debugging information in the
* produced error logs and eval stack trace.
*
* @param {webExtensionReloadOptions} options
* used to optionally enable the reload customizations.
* @param {boolean|undefined} options.ignoreCache
* enable/disable the cache bypass headers.
* @param {string|undefined} options.userAgent
* customize the userAgent during the page reload.
* @param {string|undefined} options.injectedScript
* evaluate the provided javascript code in the top level and every sub-frame
* created during the page reload, before any other script in the page has been
* executed.
*/
reload(callerInfo, {ignoreCache, userAgent, injectedScript}) {
if (this.isSystemPrincipal(this.window)) {
console.error("Ignored inspectedWindow.reload on system principal target for " +
`${callerInfo.url}:${callerInfo.lineNumber}`);
return {};
}
const delayedReload = () => {
// This won't work while the browser is shutting down and we don't really
// care.
if (Services.startup.shuttingDown) {
return;
}
if (injectedScript || userAgent) {
if (this.customizedReload) {
// TODO(rpl): check what chrome does, and evaluate if queue the new reload
// after the current one has been completed.
console.error(
"Reload already in progress. Ignored inspectedWindow.reload for " +
`${callerInfo.url}:${callerInfo.lineNumber}`
);
return;
}
try {
this.customizedReload = new CustomizedReload({
tabActor: this.tabActor,
inspectedWindowEval: this.eval.bind(this),
callerInfo, injectedScript, userAgent, ignoreCache,
});
this.customizedReload.start()
.then(() => {
delete this.customizedReload;
})
.catch(err => {
delete this.customizedReload;
throw err;
});
} catch (err) {
// Cancel the customized reload (if any) on exception during the
// reload setup.
if (this.customizedReload) {
this.customizedReload.stop(err);
}
throw err;
}
} else {
// If there is no custom user agent and/or injected script, then
// we can reload the target without subscribing any observer/listener.
let reloadFlags = Ci.nsIWebNavigation.LOAD_FLAGS_NONE;
if (ignoreCache) {
reloadFlags |= Ci.nsIWebNavigation.LOAD_FLAGS_BYPASS_CACHE;
}
this.webNavigation.reload(reloadFlags);
}
};
// Execute the reload in a dispatched runnable, so that we can
// return the reply to the caller before the reload is actually
// started.
Services.tm.currentThread.dispatch(delayedReload, 0);
return {};
},
/**
* Evaluate the provided javascript code in a target window (that is always the
* tabActor window when called through RDP protocol, or the passed customTargetWindow
* when called directly from the CustomizedReload instances).
*
* @param {webExtensionCallerInfo} callerInfo
* the addonId and the url (the addon base url or the url of the actual caller
* filename and lineNumber) used to log useful debugging information in the
* produced error logs and eval stack trace.
*
* @param {string} expression
* the javascript code to be evaluated in the target window
*
* @param {webExtensionEvalOptions} evalOptions
* used to optionally enable the eval customizations.
* NOTE: none of the eval options is currently implemented, they will be already
* reported as unsupported by the WebExtensions schema validation wrappers, but
* an additional level of error reporting is going to be applied here, so that
* if the server and the client have different ideas of which option is supported
* the eval call result will contain detailed informations (in the format usually
* expected for errors not raised in the evaluated javascript code).
*
* @param {DOMWindow|undefined} customTargetWindow
* Used in the CustomizedReload instances to evaluate the `injectedScript`
* javascript code in every sub-frame of the target window during the tab reload.
* NOTE: this parameter is not part of the RDP protocol exposed by this actor, when
* it is called over the remote debugging protocol the target window is always
* `tabActor.window`.
*/
eval(callerInfo, expression, options, customTargetWindow) {
const window = customTargetWindow || this.window;
if (Object.keys(options).length > 0) {
return {
exceptionInfo: {
isError: true,
code: "E_PROTOCOLERROR",
description: "Inspector protocol error: %s",
details: [
"The inspectedWindow.eval options are currently not supported",
],
},
};
}
if (!window) {
return {
exceptionInfo: {
isError: true,
code: "E_PROTOCOLERROR",
description: "Inspector protocol error: %s",
details: [
"The target window is not defined. inspectedWindow.eval not executed.",
],
},
};
}
if (this.isSystemPrincipal(window)) {
// On denied JS evaluation, report it using the same data format
// used in the corresponding chrome API method to report issues that are
// not exceptions raised in the evaluated javascript code.
return {
exceptionInfo: {
isError: true,
code: "E_PROTOCOLERROR",
description: "Inspector protocol error: %s",
details: [
"This target has a system principal. inspectedWindow.eval denied.",
],
},
};
}
const dbgWindow = this.dbg.makeGlobalObjectReference(window);
let evalCalledFrom = callerInfo.url;
if (callerInfo.lineNumber) {
evalCalledFrom += `:${callerInfo.lineNumber}`;
}
// TODO(rpl): add $0 and inspect(...) bindings (Bug 1300590)
const result = dbgWindow.executeInGlobalWithBindings(expression, {}, {
url: `debugger eval called from ${evalCalledFrom} - eval code`,
});
let evalResult;
if (result) {
if ("return" in result) {
evalResult = result.return;
} else if ("yield" in result) {
evalResult = result.yield;
} else if ("throw" in result) {
const throwErr = result.throw;
// XXXworkers: Calling unsafeDereference() returns an object with no
// toString method in workers. See Bug 1215120.
const unsafeDereference = throwErr && (typeof throwErr === "object") &&
throwErr.unsafeDereference();
const message = unsafeDereference && unsafeDereference.toString ?
unsafeDereference.toString() : String(throwErr);
const stack = unsafeDereference && unsafeDereference.stack ?
unsafeDereference.stack : null;
return {
exceptionInfo: {
isException: true,
value: `${message}\n\t${stack}`,
},
};
}
} else {
// TODO(rpl): can the result of executeInGlobalWithBinding be null or
// undefined? (which means that it is not a return, a yield or a throw).
console.error("Unexpected empty inspectedWindow.eval result for",
`${callerInfo.url}:${callerInfo.lineNumber}`);
}
if (evalResult) {
try {
if (evalResult && typeof evalResult === "object") {
evalResult = evalResult.unsafeDereference();
}
evalResult = JSON.parse(JSON.stringify(evalResult));
} catch (err) {
// The evaluation result cannot be sent over the RDP Protocol,
// report it as with the same data format used in the corresponding
// chrome API method.
return {
exceptionInfo: {
isError: true,
code: "E_PROTOCOLERROR",
description: "Inspector protocol error: %s",
details: [
String(err),
],
},
};
}
}
return {value: evalResult};
}
}
);
exports.WebExtensionInspectedWindowActor = WebExtensionInspectedWindowActor;

Просмотреть файл

@ -569,6 +569,11 @@ var DebuggerServer = {
constructor: "EmulationActor",
type: { tab: true }
});
this.registerModule("devtools/server/actors/webextension-inspected-window", {
prefix: "webExtensionInspectedWindow",
constructor: "WebExtensionInspectedWindowActor",
type: { tab: true }
});
},
/**

Просмотреть файл

@ -9,6 +9,7 @@ support-files =
doc_force_gc.html
doc_innerHTML.html
doc_perf.html
inspectedwindow-reload-target.sjs
navigate-first.html
navigate-second.html
storage-cookies-same-name.html
@ -97,3 +98,4 @@ skip-if = e10s # Bug 1183605 - devtools/server/tests/browser/ tests are still di
[browser_directorscript_actors.js]
skip-if = e10s # Bug 1183605 - devtools/server/tests/browser/ tests are still disabled in E10S
[browser_register_actor.js]
[browser_webextension_inspected_window.js]

Просмотреть файл

@ -0,0 +1,364 @@
/* vim: set ft=javascript ts=2 et sw=2 tw=80: */
/* Any copyright is dedicated to the Public Domain.
http://creativecommons.org/publicdomain/zero/1.0/ */
"use strict";
const {
WebExtensionInspectedWindowFront
} = require("devtools/shared/fronts/webextension-inspected-window");
const TEST_RELOAD_URL = `${MAIN_DOMAIN}/inspectedwindow-reload-target.sjs`;
const FAKE_CALLER_INFO = {
url: "moz-extension://fake-webextension-uuid/fake-caller-script.js",
lineNumber: 1,
addonId: "fake-webextension-uuid",
};
function* setup(pageUrl) {
yield addTab(pageUrl);
initDebuggerServer();
const client = new DebuggerClient(DebuggerServer.connectPipe());
const form = yield connectDebuggerClient(client);
const [, tabClient] = yield client.attachTab(form.actor);
const [, consoleClient] = yield client.attachConsole(form.consoleActor, []);
const inspectedWindowFront = new WebExtensionInspectedWindowFront(client, form);
return {
client, form,
tabClient, consoleClient,
inspectedWindowFront,
};
}
function* teardown({client}) {
yield client.close();
DebuggerServer.destroy();
gBrowser.removeCurrentTab();
}
function waitForNextTabNavigated(client) {
return new Promise(resolve => {
client.addListener("tabNavigated", function tabNavigatedListener(evt, pkt) {
if (pkt.state == "stop" && pkt.isFrameSwitching == false) {
client.removeListener("tabNavigated", tabNavigatedListener);
resolve();
}
});
});
}
function consoleEvalJS(consoleClient, jsCode) {
return new Promise(resolve => {
consoleClient.evaluateJS(jsCode, resolve);
});
}
// Script used as the injectedScript option in the inspectedWindow.reload tests.
function injectedScript() {
if (!window.pageScriptExecutedFirst) {
window.addEventListener("DOMContentLoaded", function listener() {
window.removeEventListener("DOMContentLoaded", listener);
if (document.querySelector("pre")) {
document.querySelector("pre").textContent = "injected script executed first";
}
});
}
}
// Script evaluated in the target tab, to collect the results of injectedScript
// evaluation in the inspectedWindow.reload tests.
function collectEvalResults() {
let results = [];
let iframeDoc = document;
while (iframeDoc) {
if (iframeDoc.querySelector("pre")) {
results.push(iframeDoc.querySelector("pre").textContent);
}
const iframe = iframeDoc.querySelector("iframe");
iframeDoc = iframe ? iframe.contentDocument : null;
}
return JSON.stringify(results);
}
add_task(function* test_successfull_inspectedWindowEval_result() {
const {client, inspectedWindowFront} = yield setup(MAIN_DOMAIN);
const result = yield inspectedWindowFront.eval(FAKE_CALLER_INFO, "window.location", {});
ok(result.value, "Got a result from inspectedWindow eval");
is(result.value.href, MAIN_DOMAIN,
"Got the expected window.location.href property value");
is(result.value.protocol, "http:",
"Got the expected window.location.protocol property value");
yield teardown({client});
});
add_task(function* test_error_inspectedWindowEval_result() {
const {client, inspectedWindowFront} = yield setup(MAIN_DOMAIN);
const result = yield inspectedWindowFront.eval(FAKE_CALLER_INFO, "window", {});
ok(!result.value, "Got a null result from inspectedWindow eval");
ok(result.exceptionInfo.isError, "Got an API Error result from inspectedWindow eval");
ok(!result.exceptionInfo.isException, "An error isException is false as expected");
is(result.exceptionInfo.code, "E_PROTOCOLERROR",
"Got the expected 'code' property in the error result");
is(result.exceptionInfo.description, "Inspector protocol error: %s",
"Got the expected 'description' property in the error result");
is(result.exceptionInfo.details.length, 1,
"The 'details' array property should contains 1 element");
ok(result.exceptionInfo.details[0].includes("cyclic object value"),
"Got the expected content in the error results's details");
yield teardown({client});
});
add_task(function* test_system_principal_denied_error_inspectedWindowEval_result() {
const {client, inspectedWindowFront} = yield setup("about:addons");
const result = yield inspectedWindowFront.eval(FAKE_CALLER_INFO, "window", {});
ok(!result.value, "Got a null result from inspectedWindow eval");
ok(result.exceptionInfo.isError,
"Got an API Error result from inspectedWindow eval on a system principal page");
is(result.exceptionInfo.code, "E_PROTOCOLERROR",
"Got the expected 'code' property in the error result");
is(result.exceptionInfo.description, "Inspector protocol error: %s",
"Got the expected 'description' property in the error result");
is(result.exceptionInfo.details.length, 1,
"The 'details' array property should contains 1 element");
is(result.exceptionInfo.details[0],
"This target has a system principal. inspectedWindow.eval denied.",
"Got the expected content in the error results's details");
yield teardown({client});
});
add_task(function* test_exception_inspectedWindowEval_result() {
const {client, inspectedWindowFront} = yield setup(MAIN_DOMAIN);
const result = yield inspectedWindowFront.eval(
FAKE_CALLER_INFO, "throw Error('fake eval error');", {});
ok(result.exceptionInfo.isException, "Got an exception as expected");
ok(!result.value, "Got an undefined eval value");
ok(!result.exceptionInfo.isError, "An exception should not be isError=true");
ok(result.exceptionInfo.value.includes("Error: fake eval error"),
"Got the expected exception message");
const expectedCallerInfo =
`called from ${FAKE_CALLER_INFO.url}:${FAKE_CALLER_INFO.lineNumber}`;
ok(result.exceptionInfo.value.includes(expectedCallerInfo),
"Got the expected caller info in the exception message");
const expectedStack = `eval code:1:7`;
ok(result.exceptionInfo.value.includes(expectedStack),
"Got the expected stack trace in the exception message");
yield teardown({client});
});
add_task(function* test_exception_inspectedWindowReload() {
const {
client, consoleClient, inspectedWindowFront,
} = yield setup(`${TEST_RELOAD_URL}?test=cache`);
// Test reload with bypassCache=false.
const waitForNoBypassCacheReload = waitForNextTabNavigated(client);
const reloadResult = yield inspectedWindowFront.reload(FAKE_CALLER_INFO,
{ignoreCache: false});
ok(!reloadResult, "Got the expected undefined result from inspectedWindow reload");
yield waitForNoBypassCacheReload;
const noBypassCacheEval = yield consoleEvalJS(consoleClient,
"document.body.textContent");
is(noBypassCacheEval.result, "empty cache headers",
"Got the expected result with reload forceBypassCache=false");
// Test reload with bypassCache=true.
const waitForForceBypassCacheReload = waitForNextTabNavigated(client);
yield inspectedWindowFront.reload(FAKE_CALLER_INFO, {ignoreCache: true});
yield waitForForceBypassCacheReload;
const forceBypassCacheEval = yield consoleEvalJS(consoleClient,
"document.body.textContent");
is(forceBypassCacheEval.result, "no-cache:no-cache",
"Got the expected result with reload forceBypassCache=true");
yield teardown({client});
});
add_task(function* test_exception_inspectedWindowReload_customUserAgent() {
const {
client, consoleClient, inspectedWindowFront,
} = yield setup(`${TEST_RELOAD_URL}?test=user-agent`);
// Test reload with custom userAgent.
const waitForCustomUserAgentReload = waitForNextTabNavigated(client);
yield inspectedWindowFront.reload(FAKE_CALLER_INFO,
{userAgent: "Customized User Agent"});
yield waitForCustomUserAgentReload;
const customUserAgentEval = yield consoleEvalJS(consoleClient,
"document.body.textContent");
is(customUserAgentEval.result, "Customized User Agent",
"Got the expected result on reload with a customized userAgent");
// Test reload with no custom userAgent.
const waitForNoCustomUserAgentReload = waitForNextTabNavigated(client);
yield inspectedWindowFront.reload(FAKE_CALLER_INFO, {});
yield waitForNoCustomUserAgentReload;
const noCustomUserAgentEval = yield consoleEvalJS(consoleClient,
"document.body.textContent");
is(noCustomUserAgentEval.result, window.navigator.userAgent,
"Got the expected result with reload without a customized userAgent");
yield teardown({client});
});
add_task(function* test_exception_inspectedWindowReload_injectedScript() {
const {
client, consoleClient, inspectedWindowFront,
} = yield setup(`${TEST_RELOAD_URL}?test=injected-script&frames=3`);
// Test reload with an injectedScript.
const waitForInjectedScriptReload = waitForNextTabNavigated(client);
yield inspectedWindowFront.reload(FAKE_CALLER_INFO,
{injectedScript: `new ${injectedScript}`});
yield waitForInjectedScriptReload;
const injectedScriptEval = yield consoleEvalJS(consoleClient,
`(${collectEvalResults})()`);
const expectedResult = (new Array(4)).fill("injected script executed first");
SimpleTest.isDeeply(JSON.parse(injectedScriptEval.result), expectedResult,
"Got the expected result on reload with an injected script");
// Test reload without an injectedScript.
const waitForNoInjectedScriptReload = waitForNextTabNavigated(client);
yield inspectedWindowFront.reload(FAKE_CALLER_INFO, {});
yield waitForNoInjectedScriptReload;
const noInjectedScriptEval = yield consoleEvalJS(consoleClient,
`(${collectEvalResults})()`);
const newExpectedResult = (new Array(4)).fill("injected script NOT executed");
SimpleTest.isDeeply(JSON.parse(noInjectedScriptEval.result), newExpectedResult,
"Got the expected result on reload with no injected script");
yield teardown({client});
});
add_task(function* test_exception_inspectedWindowReload_multiple_calls() {
const {
client, consoleClient, inspectedWindowFront,
} = yield setup(`${TEST_RELOAD_URL}?test=user-agent`);
// Test reload with custom userAgent three times (and then
// check that only the first one has affected the page reload.
const waitForCustomUserAgentReload = waitForNextTabNavigated(client);
inspectedWindowFront.reload(FAKE_CALLER_INFO, {userAgent: "Customized User Agent 1"});
inspectedWindowFront.reload(FAKE_CALLER_INFO, {userAgent: "Customized User Agent 2"});
yield waitForCustomUserAgentReload;
const customUserAgentEval = yield consoleEvalJS(consoleClient,
"document.body.textContent");
is(customUserAgentEval.result, "Customized User Agent 1",
"Got the expected result on reload with a customized userAgent");
// Test reload with no custom userAgent.
const waitForNoCustomUserAgentReload = waitForNextTabNavigated(client);
yield inspectedWindowFront.reload(FAKE_CALLER_INFO, {});
yield waitForNoCustomUserAgentReload;
const noCustomUserAgentEval = yield consoleEvalJS(consoleClient,
"document.body.textContent");
is(noCustomUserAgentEval.result, window.navigator.userAgent,
"Got the expected result with reload without a customized userAgent");
yield teardown({client});
});
add_task(function* test_exception_inspectedWindowReload_stopped() {
const {
client, consoleClient, inspectedWindowFront,
} = yield setup(`${TEST_RELOAD_URL}?test=injected-script&frames=3`);
// Test reload on a page that calls window.stop() immediately during the page loading
const waitForPageLoad = waitForNextTabNavigated(client);
yield inspectedWindowFront.eval(FAKE_CALLER_INFO,
"window.location += '&stop=windowStop'");
info("Load a webpage that calls 'window.stop()' while is still loading");
yield waitForPageLoad;
info("Starting a reload with an injectedScript");
const waitForInjectedScriptReload = waitForNextTabNavigated(client);
yield inspectedWindowFront.reload(FAKE_CALLER_INFO,
{injectedScript: `new ${injectedScript}`});
yield waitForInjectedScriptReload;
const injectedScriptEval = yield consoleEvalJS(consoleClient,
`(${collectEvalResults})()`);
// The page should have stopped during the reload and only one injected script
// is expected.
const expectedResult = (new Array(1)).fill("injected script executed first");
SimpleTest.isDeeply(JSON.parse(injectedScriptEval.result), expectedResult,
"The injected script has been executed on the 'stopped' page reload");
// Reload again with no options.
info("Reload the tab again without any reload options");
const waitForNoInjectedScriptReload = waitForNextTabNavigated(client);
yield inspectedWindowFront.reload(FAKE_CALLER_INFO, {});
yield waitForNoInjectedScriptReload;
const noInjectedScriptEval = yield consoleEvalJS(consoleClient,
`(${collectEvalResults})()`);
// The page should have stopped during the reload and no injected script should
// have been executed during this second reload (or it would mean that the previous
// customized reload was still pending and has wrongly affected the second reload)
const newExpectedResult = (new Array(1)).fill("injected script NOT executed");
SimpleTest.isDeeply(
JSON.parse(noInjectedScriptEval.result), newExpectedResult,
"No injectedScript should have been evaluated during the second reload"
);
yield teardown({client});
});
// TODO: check eval with $0 binding once implemented (Bug 1300590)

Просмотреть файл

@ -0,0 +1,75 @@
Components.utils.importGlobalProperties(["URLSearchParams"]);
function handleRequest(request, response) {
let params = new URLSearchParams(request.queryString);
switch(params.get("test")) {
case "cache":
handleCacheTestRequest(request, response);
break;
case "user-agent":
handleUserAgentTestRequest(request, response);
break;
case "injected-script":
handleInjectedScriptTestRequest(request, response, params);
break;
}
}
function handleCacheTestRequest(request, response) {
response.setHeader("Content-Type", "text/plain; charset=UTF-8", false);
if (request.hasHeader("pragma") && request.hasHeader("cache-control")) {
response.write(`${request.getHeader("pragma")}:${request.getHeader("cache-control")}`);
} else {
response.write("empty cache headers");
}
}
function handleUserAgentTestRequest(request, response) {
response.setHeader("Content-Type", "text/plain; charset=UTF-8", false);
if (request.hasHeader("user-agent")) {
response.write(request.getHeader("user-agent"));
} else {
response.write("no user agent header");
}
}
function handleInjectedScriptTestRequest(request, response, params) {
response.setHeader("Content-Type", "text/html; charset=UTF-8", false);
const frames = parseInt(params.get("frames"));
let content = "";
if (frames > 0) {
// Output an iframe in seamless mode, so that there is an higher chance that in case
// of test failures we get a screenshot where the nested iframes are all visible.
content = `<iframe seamless src="?test=injected-script&frames=${frames - 1}"></iframe>`;
}
if (params.get("stop") == "windowStop") {
content = "<script>window.stop();</script>" + content;
}
response.write(`<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8">
<style>
iframe { width: 100%; height: ${frames * 150}px; }
</style>
</head>
<body>
<h1>IFRAME ${frames}</h1>
<pre>injected script NOT executed</pre>
<script>
window.pageScriptExecutedFirst = true;
</script>
${content}
</body>
</html>
`);
}

Просмотреть файл

@ -37,5 +37,6 @@ DevToolsModules(
'stylesheets.js',
'timeline.js',
'webaudio.js',
'webextension-inspected-window.js',
'webgl.js'
)

Просмотреть файл

@ -0,0 +1,27 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
"use strict";
const {
webExtensionInspectedWindowSpec,
} = require("devtools/shared/specs/webextension-inspected-window");
const protocol = require("devtools/shared/protocol");
/**
* The corresponding Front object for the WebExtensionInspectedWindowActor.
*/
const WebExtensionInspectedWindowFront = protocol.FrontClassWithSpec(
webExtensionInspectedWindowSpec,
{
initialize: function (client, { webExtensionInspectedWindowActor }) {
protocol.Front.prototype.initialize.call(this, client, {
actor: webExtensionInspectedWindowActor
});
this.manage(this);
}
}
);
exports.WebExtensionInspectedWindowFront = WebExtensionInspectedWindowFront;

Просмотреть файл

@ -45,6 +45,7 @@ DevToolsModules(
'stylesheets.js',
'timeline.js',
'webaudio.js',
'webextension-inspected-window.js',
'webgl.js',
'worker.js'
)

Просмотреть файл

@ -0,0 +1,106 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
"use strict";
const {
Arg,
RetVal,
generateActorSpec,
types,
} = require("devtools/shared/protocol");
/**
* Sent with the eval and reload requests, used to inform the
* webExtensionInspectedWindowActor about the caller information
* to be able to evaluate code as being executed from the caller
* WebExtension sources, or log errors with information that can
* help the addon developer to more easily identify the affected
* lines in his own addon code.
*/
types.addDictType("webExtensionCallerInfo", {
// Information related to the line of code that has originated
// the request.
url: "string",
lineNumber: "nullable:number",
// The called addonId.
addonId: "string",
});
/**
* RDP type related to the inspectedWindow.eval method request.
*/
types.addDictType("webExtensionEvalOptions", {
frameURL: "nullable:string",
contextSecurityOrigin: "nullable:string",
useContentScriptContext: "nullable:boolean",
});
/**
* RDP type related to the inspectedWindow.eval method result errors.
*
* This type has been modelled on the same data format
* used in the corresponding chrome API method.
*/
types.addDictType("webExtensionEvalExceptionInfo", {
// The following properties are set if the error has not occurred
// in the evaluated JS code.
isError: "nullable:boolean",
code: "nullable:string",
description: "nullable:string",
details: "nullable:array:json",
// The following properties are set if the error has occurred
// in the evaluated JS code.
isException: "nullable:string",
value: "nullable:string",
});
/**
* RDP type related to the inspectedWindow.eval method result.
*/
types.addDictType("webExtensionEvalResult", {
// The following properties are set if the evaluation has been
// completed successfully.
value: "nullable:json",
// The following properties are set if the evalutation has been
// completed with errors.
exceptionInfo: "nullable:webExtensionEvalExceptionInfo",
});
/**
* RDP type related to the inspectedWindow.reload method request.
*/
types.addDictType("webExtensionReloadOptions", {
ignoreCache: "nullable:boolean",
userAgent: "nullable:string",
injectedScript: "nullable:string",
});
const webExtensionInspectedWindowSpec = generateActorSpec({
typeName: "webExtensionInspectedWindow",
methods: {
reload: {
request: {
webExtensionCallerInfo: Arg(0, "webExtensionCallerInfo"),
options: Arg(1, "webExtensionReloadOptions"),
},
},
eval: {
request: {
webExtensionCallerInfo: Arg(0, "webExtensionCallerInfo"),
expression: Arg(1, "string"),
options: Arg(2, "webExtensionEvalOptions"),
},
response: {
evalResult: RetVal("webExtensionEvalResult"),
},
},
},
});
exports.webExtensionInspectedWindowSpec = webExtensionInspectedWindowSpec;

Просмотреть файл

@ -182,6 +182,8 @@ WebConsoleClient.prototype = {
* this.CACHED_MESSAGES for known types.
* @param function onResponse
* The function invoked when the response is received.
* @return request
* Request object that implements both Promise and EventEmitter interfaces
*/
getCachedMessages: function (types, onResponse) {
let packet = {
@ -189,7 +191,7 @@ WebConsoleClient.prototype = {
type: "getCachedMessages",
messageTypes: types,
};
this._client.request(packet, onResponse);
return this._client.request(packet, onResponse);
},
/**
@ -199,13 +201,15 @@ WebConsoleClient.prototype = {
* The WebConsoleObjectActor ID to send the request to.
* @param function onResponse
* The function invoked when the response is received.
* @return request
* Request object that implements both Promise and EventEmitter interfaces
*/
inspectObjectProperties: function (actor, onResponse) {
let packet = {
to: actor,
type: "inspectProperties",
};
this._client.request(packet, onResponse);
return this._client.request(packet, onResponse);
},
/**
@ -244,6 +248,8 @@ WebConsoleClient.prototype = {
* exists. This is used by helper functions that can
* reference the currently selected node in the Inspector,
* like $0.
* @return request
* Request object that implements both Promise and EventEmitter interfaces
*/
evaluateJS: function (string, onResponse, options = {}) {
let packet = {
@ -256,7 +262,7 @@ WebConsoleClient.prototype = {
selectedNodeActor: options.selectedNodeActor,
selectedObjectActor: options.selectedObjectActor,
};
this._client.request(packet, onResponse);
return this._client.request(packet, onResponse);
},
/**
@ -266,8 +272,7 @@ WebConsoleClient.prototype = {
evaluateJSAsync: function (string, onResponse, options = {}) {
// Pre-37 servers don't support async evaluation.
if (!this.traits.evaluateJSAsync) {
this.evaluateJS(string, onResponse, options);
return;
return this.evaluateJS(string, onResponse, options);
}
let packet = {
@ -281,12 +286,23 @@ WebConsoleClient.prototype = {
selectedObjectActor: options.selectedObjectActor,
};
this._client.request(packet, response => {
// Null check this in case the client has been detached while waiting
// for a response.
if (this.pendingEvaluationResults) {
this.pendingEvaluationResults.set(response.resultID, onResponse);
}
return new Promise((resolve, reject) => {
this._client.request(packet, response => {
// Null check this in case the client has been detached while waiting
// for a response.
if (this.pendingEvaluationResults) {
this.pendingEvaluationResults.set(response.resultID, resp => {
if (onResponse) {
onResponse(resp);
}
if (resp.error) {
reject(resp);
} else {
resolve(resp);
}
});
}
});
});
},
@ -326,6 +342,8 @@ WebConsoleClient.prototype = {
* The function invoked when the response is received.
* @param string frameActor
* The id of the frame actor that made the call.
* @return request
* Request object that implements both Promise and EventEmitter interfaces
*/
autocomplete: function (string, cursor, onResponse, frameActor) {
let packet = {
@ -335,18 +353,21 @@ WebConsoleClient.prototype = {
cursor: cursor,
frameActor: frameActor,
};
this._client.request(packet, onResponse);
return this._client.request(packet, onResponse);
},
/**
* Clear the cache of messages (page errors and console API calls).
*
* @return request
* Request object that implements both Promise and EventEmitter interfaces
*/
clearMessagesCache: function () {
let packet = {
to: this._actor,
type: "clearMessagesCache",
};
this._client.request(packet);
return this._client.request(packet);
},
/**
@ -356,6 +377,8 @@ WebConsoleClient.prototype = {
* An array with the preferences you want to retrieve.
* @param function [onResponse]
* Optional function to invoke when the response is received.
* @return request
* Request object that implements both Promise and EventEmitter interfaces
*/
getPreferences: function (preferences, onResponse) {
let packet = {
@ -363,7 +386,7 @@ WebConsoleClient.prototype = {
type: "getPreferences",
preferences: preferences,
};
this._client.request(packet, onResponse);
return this._client.request(packet, onResponse);
},
/**
@ -373,6 +396,8 @@ WebConsoleClient.prototype = {
* An object with the preferences you want to change.
* @param function [onResponse]
* Optional function to invoke when the response is received.
* @return request
* Request object that implements both Promise and EventEmitter interfaces
*/
setPreferences: function (preferences, onResponse) {
let packet = {
@ -380,7 +405,7 @@ WebConsoleClient.prototype = {
type: "setPreferences",
preferences: preferences,
};
this._client.request(packet, onResponse);
return this._client.request(packet, onResponse);
},
/**
@ -390,13 +415,15 @@ WebConsoleClient.prototype = {
* The NetworkEventActor ID.
* @param function onResponse
* The function invoked when the response is received.
* @return request
* Request object that implements both Promise and EventEmitter interfaces
*/
getRequestHeaders: function (actor, onResponse) {
let packet = {
to: actor,
type: "getRequestHeaders",
};
this._client.request(packet, onResponse);
return this._client.request(packet, onResponse);
},
/**
@ -406,13 +433,15 @@ WebConsoleClient.prototype = {
* The NetworkEventActor ID.
* @param function onResponse
* The function invoked when the response is received.
* @return request
* Request object that implements both Promise and EventEmitter interfaces
*/
getRequestCookies: function (actor, onResponse) {
let packet = {
to: actor,
type: "getRequestCookies",
};
this._client.request(packet, onResponse);
return this._client.request(packet, onResponse);
},
/**
@ -422,13 +451,15 @@ WebConsoleClient.prototype = {
* The NetworkEventActor ID.
* @param function onResponse
* The function invoked when the response is received.
* @return request
* Request object that implements both Promise and EventEmitter interfaces
*/
getRequestPostData: function (actor, onResponse) {
let packet = {
to: actor,
type: "getRequestPostData",
};
this._client.request(packet, onResponse);
return this._client.request(packet, onResponse);
},
/**
@ -438,13 +469,15 @@ WebConsoleClient.prototype = {
* The NetworkEventActor ID.
* @param function onResponse
* The function invoked when the response is received.
* @return request
* Request object that implements both Promise and EventEmitter interfaces
*/
getResponseHeaders: function (actor, onResponse) {
let packet = {
to: actor,
type: "getResponseHeaders",
};
this._client.request(packet, onResponse);
return this._client.request(packet, onResponse);
},
/**
@ -454,13 +487,15 @@ WebConsoleClient.prototype = {
* The NetworkEventActor ID.
* @param function onResponse
* The function invoked when the response is received.
* @return request
* Request object that implements both Promise and EventEmitter interfaces
*/
getResponseCookies: function (actor, onResponse) {
let packet = {
to: actor,
type: "getResponseCookies",
};
this._client.request(packet, onResponse);
return this._client.request(packet, onResponse);
},
/**
@ -470,13 +505,15 @@ WebConsoleClient.prototype = {
* The NetworkEventActor ID.
* @param function onResponse
* The function invoked when the response is received.
* @return request
* Request object that implements both Promise and EventEmitter interfaces
*/
getResponseContent: function (actor, onResponse) {
let packet = {
to: actor,
type: "getResponseContent",
};
this._client.request(packet, onResponse);
return this._client.request(packet, onResponse);
},
/**
@ -486,13 +523,15 @@ WebConsoleClient.prototype = {
* The NetworkEventActor ID.
* @param function onResponse
* The function invoked when the response is received.
* @return request
* Request object that implements both Promise and EventEmitter interfaces
*/
getEventTimings: function (actor, onResponse) {
let packet = {
to: actor,
type: "getEventTimings",
};
this._client.request(packet, onResponse);
return this._client.request(packet, onResponse);
},
/**
@ -502,13 +541,15 @@ WebConsoleClient.prototype = {
* The NetworkEventActor ID.
* @param function onResponse
* The function invoked when the response is received.
* @return request
* Request object that implements both Promise and EventEmitter interfaces
*/
getSecurityInfo: function (actor, onResponse) {
let packet = {
to: actor,
type: "getSecurityInfo",
};
this._client.request(packet, onResponse);
return this._client.request(packet, onResponse);
},
/**
@ -518,6 +559,8 @@ WebConsoleClient.prototype = {
* The details of the HTTP request.
* @param function onResponse
* The function invoked when the response is received.
* @return request
* Request object that implements both Promise and EventEmitter interfaces
*/
sendHTTPRequest: function (data, onResponse) {
let packet = {
@ -525,7 +568,7 @@ WebConsoleClient.prototype = {
type: "sendHTTPRequest",
request: data
};
this._client.request(packet, onResponse);
return this._client.request(packet, onResponse);
},
/**
@ -537,6 +580,8 @@ WebConsoleClient.prototype = {
* known listeners.
* @param function onResponse
* Function to invoke when the server response is received.
* @return request
* Request object that implements both Promise and EventEmitter interfaces
*/
startListeners: function (listeners, onResponse) {
let packet = {
@ -544,7 +589,7 @@ WebConsoleClient.prototype = {
type: "startListeners",
listeners: listeners,
};
this._client.request(packet, onResponse);
return this._client.request(packet, onResponse);
},
/**
@ -556,6 +601,8 @@ WebConsoleClient.prototype = {
* known listeners.
* @param function onResponse
* Function to invoke when the server response is received.
* @return request
* Request object that implements both Promise and EventEmitter interfaces
*/
stopListeners: function (listeners, onResponse) {
let packet = {
@ -563,7 +610,7 @@ WebConsoleClient.prototype = {
type: "stopListeners",
listeners: listeners,
};
this._client.request(packet, onResponse);
return this._client.request(packet, onResponse);
},
/**

Просмотреть файл

@ -24,13 +24,11 @@ let {MAX_AUTOCOMPLETE_ATTEMPTS,MAX_AUTOCOMPLETIONS} = require("devtools/shared/w
// evaluateJS and once with evaluateJSAsync.
let evaluatingSync = true;
function evaluateJS(input, options = {}) {
return new Promise((resolve, reject) => {
if (evaluatingSync) {
gState.client.evaluateJS(input, resolve, options);
} else {
gState.client.evaluateJSAsync(input, resolve, options);
}
});
if (evaluatingSync) {
return gState.client.evaluateJS(input, null, options);
} else {
return gState.client.evaluateJSAsync(input, null, options);
}
}
function startTest()

Просмотреть файл

@ -136,6 +136,11 @@ TextTrack::GetId(nsAString& aId) const
void
TextTrack::AddCue(TextTrackCue& aCue)
{
TextTrack* oldTextTrack = aCue.GetTrack();
if (oldTextTrack) {
ErrorResult dummy;
oldTextTrack->RemoveCue(aCue, dummy);
}
mCueList->AddCue(aCue);
aCue.SetTrack(this);
if (mTextTrackList) {

Просмотреть файл

@ -812,6 +812,7 @@ skip-if = appname == "seamonkey"
[test_preserve_playbackrate_after_ui_play.html]
[test_progress.html]
[test_reactivate.html]
skip-if = true # see bug 1319725
[test_readyState.html]
[test_referer.html]
[test_replay_metadata.html]

Просмотреть файл

@ -204,6 +204,9 @@ MP4VideoInfo::Update(const mp4parse_track_info* track,
mDisplay.height = video->display_height;
mImage.width = video->image_width;
mImage.height = video->image_height;
if (video->extra_data.data) {
mExtraData->AppendElements(video->extra_data.data, video->extra_data.length);
}
}
#endif

Просмотреть файл

@ -309,6 +309,10 @@ MP4Metadata::GetTrackInfo(mozilla::TrackInfo::TrackType aType,
VideoInfo *videoRust = infoRust->GetAsVideoInfo(), *video = info->GetAsVideoInfo();
MOZ_DIAGNOSTIC_ASSERT(videoRust->mDisplay == video->mDisplay);
MOZ_DIAGNOSTIC_ASSERT(videoRust->mImage == video->mImage);
MOZ_DIAGNOSTIC_ASSERT(*videoRust->mExtraData == *video->mExtraData);
// mCodecSpecificConfig is for video/mp4-es, not video/avc. Since video/mp4-es
// is supported on b2g only, it could be removed from TrackInfo.
MOZ_DIAGNOSTIC_ASSERT(*videoRust->mCodecSpecificConfig == *video->mCodecSpecificConfig);
break;
}
default:

Просмотреть файл

@ -69,6 +69,7 @@ typedef struct mp4parse_track_video_info {
uint32_t display_height;
uint16_t image_width;
uint16_t image_height;
mp4parse_byte_data extra_data;
} mp4parse_track_video_info;
typedef struct mp4parse_fragment_info {

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Просмотреть файл

@ -148,8 +148,8 @@ fn read_truncated_ftyp() {
let mut context = MediaContext::new();
match read_mp4(&mut stream, &mut context) {
Err(Error::UnexpectedEOF) => (),
Ok(_) => assert!(false, "expected an error result"),
_ => assert!(false, "expected a different error result"),
Ok(_) => panic!("expected an error result"),
_ => panic!("expected a different error result"),
}
}
@ -593,8 +593,8 @@ fn serialize_opus_header() {
};
let mut v = Vec::<u8>::new();
super::serialize_opus_header(&opus, &mut v).unwrap();
assert!(v.len() == 19);
assert!(v == vec![
assert_eq!(v.len(), 19);
assert_eq!(v, vec![
0x4f, 0x70, 0x75, 0x73, 0x48,0x65, 0x61, 0x64,
0x01, 0x01, 0x56, 0x01,
0xc0, 0x5d, 0x00, 0x00,
@ -615,8 +615,8 @@ fn serialize_opus_header() {
};
let mut v = Vec::<u8>::new();
super::serialize_opus_header(&opus, &mut v).unwrap();
assert!(v.len() == 27);
assert!(v == vec![
assert_eq!(v.len(), 27);
assert_eq!(v, vec![
0x4f, 0x70, 0x75, 0x73, 0x48,0x65, 0x61, 0x64,
0x01, 0x06, 0x98, 0x00,
0x80, 0xbb, 0x00, 0x00,
@ -645,8 +645,8 @@ fn avcc_limit() {
let mut track = super::Track::new(0);
match super::read_video_sample_entry(&mut stream, &mut track) {
Err(Error::InvalidData(s)) => assert_eq!(s, "avcC box exceeds BUF_SIZE_LIMIT"),
Ok(_) => assert!(false, "expected an error result"),
_ => assert!(false, "expected a different error result"),
Ok(_) => panic!("expected an error result"),
_ => panic!("expected a different error result"),
}
}
@ -671,8 +671,8 @@ fn esds_limit() {
let mut track = super::Track::new(0);
match super::read_audio_sample_entry(&mut stream, &mut track) {
Err(Error::InvalidData(s)) => assert_eq!(s, "esds box exceeds BUF_SIZE_LIMIT"),
Ok(_) => assert!(false, "expected an error result"),
_ => assert!(false, "expected a different error result"),
Ok(_) => panic!("expected an error result"),
_ => panic!("expected a different error result"),
}
}
@ -697,8 +697,8 @@ fn esds_limit_2() {
let mut track = super::Track::new(0);
match super::read_audio_sample_entry(&mut stream, &mut track) {
Err(Error::UnexpectedEOF) => (),
Ok(_) => assert!(false, "expected an error result"),
_ => assert!(false, "expected a different error result"),
Ok(_) => panic!("expected an error result"),
_ => panic!("expected a different error result"),
}
}
@ -713,8 +713,8 @@ fn read_elst_zero_entries() {
let mut stream = iter.next_box().unwrap().unwrap();
match super::read_elst(&mut stream) {
Err(Error::InvalidData(s)) => assert_eq!(s, "invalid edit count"),
Ok(_) => assert!(false, "expected an error result"),
_ => assert!(false, "expected a different error result"),
Ok(_) => panic!("expected an error result"),
_ => panic!("expected a different error result"),
}
}
@ -741,8 +741,8 @@ fn read_edts_bogus() {
let mut track = super::Track::new(0);
match super::read_edts(&mut stream, &mut track) {
Err(Error::InvalidData(s)) => assert_eq!(s, "expected additional edit"),
Ok(_) => assert!(false, "expected an error result"),
_ => assert!(false, "expected a different error result"),
Ok(_) => panic!("expected an error result"),
_ => panic!("expected a different error result"),
}
}
@ -827,7 +827,7 @@ fn skip_padding_in_stsd() {
fn read_qt_wave_atom() {
let esds = make_fullbox(BoxSize::Auto, b"esds", 0, |s| {
s.B8(0x03) // elementary stream descriptor tag
.B8(0x0b) // esds length
.B8(0x12) // esds length
.append_repeated(0, 2)
.B8(0x00) // flags
.B8(0x04) // decoder config descriptor tag

Просмотреть файл

@ -78,8 +78,8 @@ fn public_api() {
mp4::AudioCodecSpecific::FLACSpecificBox(flac) => {
// STREAMINFO block must be present and first.
assert!(flac.blocks.len() > 0);
assert!(flac.blocks[0].block_type == 0);
assert!(flac.blocks[0].data.len() == 34);
assert_eq!(flac.blocks[0].block_type, 0);
assert_eq!(flac.blocks[0].data.len(), 34);
"FLAC"
}
mp4::AudioCodecSpecific::OpusSpecificBox(opus) => {

Просмотреть файл

@ -141,7 +141,7 @@ pub struct mp4parse_track_audio_info {
// TODO(kinetik):
// int32_t profile;
// int32_t extended_profile; // check types
codec_specific_config: mp4parse_byte_data,
pub codec_specific_config: mp4parse_byte_data,
}
#[repr(C)]
@ -150,9 +150,7 @@ pub struct mp4parse_track_video_info {
pub display_height: u32,
pub image_width: u16,
pub image_height: u16,
// TODO(kinetik):
// extra_data
// codec_specific_config
pub extra_data: mp4parse_byte_data,
}
#[repr(C)]
@ -336,7 +334,7 @@ fn media_time_to_us(time: MediaScaledTime, scale: MediaTimeScale) -> Option<u64>
}
fn track_time_to_us(time: TrackScaledTime, scale: TrackTimeScale) -> Option<u64> {
assert!(time.1 == scale.1);
assert_eq!(time.1, scale.1);
let microseconds_per_second = 1000000;
rational_scale(time.0, scale.0, microseconds_per_second)
}
@ -460,11 +458,11 @@ pub unsafe extern fn mp4parse_get_track_audio_info(parser: *mut mp4parse_parser,
match audio.codec_specific {
AudioCodecSpecific::ES_Descriptor(ref v) => {
if v.codec_specific_config.len() > std::u32::MAX as usize {
if v.codec_esds.len() > std::u32::MAX as usize {
return MP4PARSE_ERROR_INVALID;
}
(*info).codec_specific_config.length = v.codec_specific_config.len() as u32;
(*info).codec_specific_config.data = v.codec_specific_config.as_ptr();
(*info).codec_specific_config.length = v.codec_esds.len() as u32;
(*info).codec_specific_config.data = v.codec_esds.as_ptr();
if let Some(rate) = v.audio_sample_rate {
(*info).sample_rate = rate;
}
@ -547,6 +545,13 @@ pub unsafe extern fn mp4parse_get_track_video_info(parser: *mut mp4parse_parser,
(*info).image_width = video.width;
(*info).image_height = video.height;
match video.codec_specific {
VideoCodecSpecific::AVCConfig(ref avc) => {
(*info).extra_data.set_data(avc);
},
_ => {},
}
MP4PARSE_OK
}
@ -686,9 +691,9 @@ fn get_track_count_null_parser() {
unsafe {
let mut count: u32 = 0;
let rv = mp4parse_get_track_count(std::ptr::null(), std::ptr::null_mut());
assert!(rv == MP4PARSE_ERROR_BADARG);
assert_eq!(rv, MP4PARSE_ERROR_BADARG);
let rv = mp4parse_get_track_count(std::ptr::null(), &mut count);
assert!(rv == MP4PARSE_ERROR_BADARG);
assert_eq!(rv, MP4PARSE_ERROR_BADARG);
}
}
@ -737,6 +742,7 @@ fn arg_validation() {
display_height: 0,
image_width: 0,
image_height: 0,
extra_data: mp4parse_byte_data::default(),
};
assert_eq!(MP4PARSE_ERROR_BADARG, mp4parse_get_track_video_info(std::ptr::null_mut(), 0, &mut dummy_video));
@ -781,6 +787,7 @@ fn arg_validation_with_parser() {
display_height: 0,
image_width: 0,
image_height: 0,
extra_data: mp4parse_byte_data::default(),
};
assert_eq!(MP4PARSE_ERROR_BADARG, mp4parse_get_track_video_info(parser, 0, &mut dummy_video));
@ -807,7 +814,7 @@ fn get_track_count_poisoned_parser() {
let mut count: u32 = 0;
let rv = mp4parse_get_track_count(parser, &mut count);
assert!(rv == MP4PARSE_ERROR_BADARG);
assert_eq!(rv, MP4PARSE_ERROR_BADARG);
}
}
@ -852,6 +859,7 @@ fn arg_validation_with_data() {
display_height: 0,
image_width: 0,
image_height: 0,
extra_data: mp4parse_byte_data::default(),
};
assert_eq!(MP4PARSE_OK, mp4parse_get_track_video_info(parser, 0, &mut video));
assert_eq!(video.display_width, 320);
@ -883,7 +891,8 @@ fn arg_validation_with_data() {
let mut video = mp4parse_track_video_info { display_width: 0,
display_height: 0,
image_width: 0,
image_height: 0 };
image_height: 0,
extra_data: mp4parse_byte_data::default(),};
assert_eq!(MP4PARSE_ERROR_BADARG, mp4parse_get_track_video_info(parser, 3, &mut video));
assert_eq!(video.display_width, 0);
assert_eq!(video.display_height, 0);

Просмотреть файл

@ -24,6 +24,34 @@ class Variant;
namespace detail {
template <typename...>
struct FirstTypeIsInRest;
template <typename First>
struct FirstTypeIsInRest<First> : FalseType {};
template <typename First, typename Second, typename... Rest>
struct FirstTypeIsInRest<First, Second, Rest...>
{
static constexpr bool value =
IsSame<First, Second>::value ||
FirstTypeIsInRest<First, Rest...>::value;
};
template <typename...>
struct TypesAreDistinct;
template <>
struct TypesAreDistinct<> : TrueType { };
template<typename First, typename... Rest>
struct TypesAreDistinct<First, Rest...>
{
static constexpr bool value =
!FirstTypeIsInRest<First, Rest...>::value &&
TypesAreDistinct<Rest...>::value;
};
// MaxSizeOf computes the maximum sizeof(T) for each T in Ts.
template<typename T, typename... Ts>
@ -428,6 +456,7 @@ struct AsVariantTemporary
template<typename... Ts>
class MOZ_INHERIT_TYPE_ANNOTATIONS_FROM_TEMPLATE_ARGS Variant
{
static_assert(detail::TypesAreDistinct<Ts...>::value, "Variant with duplicate types is not supported");
using Tag = typename detail::VariantTag<Ts...>::Type;
using Impl = detail::VariantImplementation<Tag, 0, Ts...>;
using RawData = AlignedStorage<detail::MaxSizeOf<Ts...>::size>;

Просмотреть файл

@ -1931,6 +1931,9 @@ public abstract class GeckoApp
// Remember interaction
final UrlAnnotations urlAnnotations = BrowserDB.from(getApplicationContext()).getUrlAnnotations();
urlAnnotations.insertHomeScreenShortcut(getContentResolver(), aURI, true);
// After shortcut is created, show the mobile desktop.
ActivityUtils.goToHomeScreen(this);
}
private Bitmap getLauncherIcon(Bitmap aSource, int size) {

Просмотреть файл

@ -16,6 +16,7 @@ public class SiteIdentity {
private MixedMode mMixedModeActive;
private MixedMode mMixedModeDisplay;
private TrackingMode mTrackingMode;
private boolean mSecurityException;
private String mHost;
private String mOwner;
private String mSupplemental;
@ -129,6 +130,7 @@ public class SiteIdentity {
public void resetIdentity() {
mSecurityMode = SecurityMode.UNKNOWN;
mSecurityException = false;
mOrigin = null;
mHost = null;
mOwner = null;
@ -143,6 +145,7 @@ public class SiteIdentity {
mMixedModeActive = MixedMode.UNKNOWN;
mMixedModeDisplay = MixedMode.UNKNOWN;
mTrackingMode = TrackingMode.UNKNOWN;
mSecurityException = false;
}
void update(JSONObject identityData) {
@ -187,6 +190,8 @@ public class SiteIdentity {
mCountry = identityData.optString("country", null);
mVerifier = identityData.optString("verifier", null);
mSecure = identityData.optBoolean("secure", false);
mSecurityException = identityData.optBoolean("securityException", false);
} catch (Exception e) {
resetIdentity();
}
@ -243,6 +248,10 @@ public class SiteIdentity {
return mMixedModeDisplay;
}
public boolean isSecurityException() {
return mSecurityException;
}
public TrackingMode getTrackingMode() {
return mTrackingMode;
}

Просмотреть файл

@ -214,8 +214,9 @@ final class JellyBeanAsyncCodec implements AsyncCodec {
int result = mCodec.dequeueInputBuffer(DEQUEUE_TIMEOUT_US);
if (result >= 0) {
mCallbackSender.notifyInputBuffer(result);
schedulePollingIfNotCanceled(BufferPoller.MSG_POLL_INPUT_BUFFERS);
} else if (result != MediaCodec.INFO_TRY_AGAIN_LATER) {
} else if (result == MediaCodec.INFO_TRY_AGAIN_LATER) {
mBufferPoller.schedulePollingIfNotCanceled(BufferPoller.MSG_POLL_INPUT_BUFFERS);
} else {
mCallbackSender.notifyError(result);
}
}
@ -229,9 +230,6 @@ final class JellyBeanAsyncCodec implements AsyncCodec {
mOutputEnded = true;
}
mCallbackSender.notifyOutputBuffer(result, info);
if (!hasMessages(MSG_POLL_INPUT_BUFFERS)) {
schedulePollingIfNotCanceled(MSG_POLL_INPUT_BUFFERS);
}
} else if (result == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
mOutputBuffers = mCodec.getOutputBuffers();
} else if (result == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
@ -326,8 +324,10 @@ final class JellyBeanAsyncCodec implements AsyncCodec {
mInputEnded = false;
mOutputEnded = false;
mInputBuffers = mCodec.getInputBuffers();
for (int i = 0; i < mInputBuffers.length; i++) {
mBufferPoller.schedulePolling(BufferPoller.MSG_POLL_INPUT_BUFFERS);
}
mOutputBuffers = mCodec.getOutputBuffers();
mBufferPoller.schedulePolling(BufferPoller.MSG_POLL_INPUT_BUFFERS);
}
@Override
@ -344,8 +344,8 @@ final class JellyBeanAsyncCodec implements AsyncCodec {
return;
}
mBufferPoller.schedulePolling(BufferPoller.MSG_POLL_INPUT_BUFFERS);
mBufferPoller.schedulePolling(BufferPoller.MSG_POLL_OUTPUT_BUFFERS);
mBufferPoller.schedulePolling(BufferPoller.MSG_POLL_INPUT_BUFFERS);
}
@Override
@ -399,7 +399,9 @@ final class JellyBeanAsyncCodec implements AsyncCodec {
mOutputEnded = false;
cancelPendingTasks();
mCodec.flush();
mBufferPoller.schedulePolling(BufferPoller.MSG_POLL_INPUT_BUFFERS);
for (int i = 0; i < mInputBuffers.length; i++) {
mBufferPoller.schedulePolling(BufferPoller.MSG_POLL_INPUT_BUFFERS);
}
}
private void cancelPendingTasks() {

Просмотреть файл

@ -30,6 +30,7 @@ import org.mozilla.gecko.icons.IconCallback;
import org.mozilla.gecko.icons.IconResponse;
import org.mozilla.gecko.icons.Icons;
import org.mozilla.gecko.Experiments;
import org.mozilla.gecko.util.ActivityUtils;
import org.mozilla.gecko.util.ThreadUtils;
/**
@ -123,23 +124,14 @@ public class HomeScreenPrompt extends Locales.LocaleAwareActivity implements Ico
Telemetry.sendUIEvent(TelemetryContract.Event.ACTION, TelemetryContract.Method.BUTTON, TELEMETRY_EXTRA);
goToHomeScreen();
ActivityUtils.goToHomeScreen(HomeScreenPrompt.this);
finish();
}
});
}
/**
* Finish this activity and launch the default home screen activity.
*/
private void goToHomeScreen() {
Intent intent = new Intent(Intent.ACTION_MAIN);
intent.addCategory(Intent.CATEGORY_HOME);
intent.setFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
startActivity(intent);
finish();
}
private void loadShortcutIcon() {
Icons.with(this)

Просмотреть файл

@ -363,6 +363,14 @@ public class SiteIdentityPopup extends AnchoredPopup implements GeckoEventListen
mSecurityState.setText(R.string.identity_connection_insecure);
mSecurityState.setTextColor(ContextCompat.getColor(mContext, R.color.placeholder_active_grey));
} else if (siteIdentity.isSecurityException()) {
mIcon.setImageResource(R.drawable.lock_inactive);
setSecurityStateIcon(R.drawable.warning_major, 1);
mSecurityState.setText(R.string.identity_connection_insecure);
mSecurityState.setTextColor(ContextCompat.getColor(mContext, R.color.placeholder_active_grey));
} else {
// Connection is secure.
mIcon.setImageResource(R.drawable.lock_secure);

Просмотреть файл

@ -359,16 +359,20 @@ public class ToolbarDisplayLayout extends ThemedLinearLayout {
final MixedMode activeMixedMode;
final MixedMode displayMixedMode;
final TrackingMode trackingMode;
final boolean securityException;
if (siteIdentity == null) {
securityMode = SecurityMode.UNKNOWN;
activeMixedMode = MixedMode.UNKNOWN;
displayMixedMode = MixedMode.UNKNOWN;
trackingMode = TrackingMode.UNKNOWN;
securityException = false;
} else {
securityMode = siteIdentity.getSecurityMode();
activeMixedMode = siteIdentity.getMixedModeActive();
displayMixedMode = siteIdentity.getMixedModeDisplay();
trackingMode = siteIdentity.getTrackingMode();
securityException = siteIdentity.isSecurityException();
}
// This is a bit tricky, but we have one icon and three potential indicators.
@ -387,6 +391,8 @@ public class ToolbarDisplayLayout extends ThemedLinearLayout {
if (AboutPages.isTitlelessAboutPage(tab.getURL())) {
// We always want to just show a search icon on about:home
imageLevel = LEVEL_SEARCH_ICON;
} else if (securityException) {
imageLevel = LEVEL_WARNING_MINOR;
} else if (trackingMode == TrackingMode.TRACKING_CONTENT_LOADED) {
imageLevel = LEVEL_SHIELD_DISABLED;
} else if (trackingMode == TrackingMode.TRACKING_CONTENT_BLOCKED) {

Просмотреть файл

@ -5918,9 +5918,10 @@ var IdentityHandler = {
if (this._lastLocation.hostname &&
this._overrideService.hasMatchingOverride(this._lastLocation.hostname,
(this._lastLocation.port || 443),
iData.cert, {}, {}))
iData.cert, {}, {})) {
result.verifier = Strings.browser.GetStringFromName("identity.identified.verified_by_you");
result.securityException = true;
}
return result;
},

Просмотреть файл

@ -6,6 +6,7 @@
package org.mozilla.gecko.util;
import android.app.Activity;
import android.content.Intent;
import android.view.View;
import android.view.Window;
import android.view.WindowManager;
@ -56,4 +57,16 @@ public class ActivityUtils {
final int flags = window.getAttributes().flags;
return ((flags & WindowManager.LayoutParams.FLAG_FULLSCREEN) != 0);
}
/**
* Finish this activity and launch the default home screen activity.
*/
public static void goToHomeScreen(Activity activity) {
Intent intent = new Intent(Intent.ACTION_MAIN);
intent.addCategory(Intent.CATEGORY_HOME);
intent.setFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
activity.startActivity(intent);
}
}

Просмотреть файл

@ -1 +1 @@
version = "1.1"
version = "1.2.1"

Просмотреть файл

@ -5,8 +5,9 @@
'Commands exposed to commandlines'
import logging
from optparse import OptionParser, make_option
from argparse import ArgumentParser
from compare_locales import version
from compare_locales.paths import EnumerateApp
from compare_locales.compare import compareApp, compareDirs
from compare_locales.webapps import compare_web_app
@ -17,37 +18,35 @@ class BaseCommand(object):
This handles command line parsing, and general sugar for setuptools
entry_points.
"""
options = [
make_option('-v', '--verbose', action='count', dest='v', default=0,
help='Make more noise'),
make_option('-q', '--quiet', action='count', dest='q', default=0,
help='Make less noise'),
make_option('-m', '--merge',
help='''Use this directory to stage merged files,
use {ab_CD} to specify a different directory for each locale'''),
]
data_option = make_option('--data', choices=['text', 'exhibit', 'json'],
default='text',
help='''Choose data and format (one of text,
exhibit, json); text: (default) Show which files miss which strings, together
with warnings and errors. Also prints a summary; json: Serialize the internal
tree, useful for tools. Also always succeeds; exhibit: Serialize the summary
data in a json useful for Exhibit
''')
def __init__(self):
self.parser = None
def get_parser(self):
"""Get an OptionParser, with class docstring as usage, and
self.options.
"""Get an ArgumentParser, with class docstring as description.
"""
parser = OptionParser()
parser.set_usage(self.__doc__)
for option in self.options:
parser.add_option(option)
parser = ArgumentParser(description=self.__doc__)
parser.add_argument('--version', action='version',
version='%(prog)s ' + version)
parser.add_argument('-v', '--verbose', action='count', dest='v',
default=0, help='Make more noise')
parser.add_argument('-q', '--quiet', action='count', dest='q',
default=0, help='Make less noise')
parser.add_argument('-m', '--merge',
help='''Use this directory to stage merged files,
use {ab_CD} to specify a different directory for each locale''')
return parser
def add_data_argument(self, parser):
parser.add_argument('--data', choices=['text', 'exhibit', 'json'],
default='text',
help='''Choose data and format (one of text,
exhibit, json); text: (default) Show which files miss which strings, together
with warnings and errors. Also prints a summary; json: Serialize the internal
tree, useful for tools. Also always succeeds; exhibit: Serialize the summary
data in a json useful for Exhibit
''')
@classmethod
def call(cls):
"""Entry_point for setuptools.
@ -60,15 +59,15 @@ data in a json useful for Exhibit
def handle_(self):
"""The instance part of the classmethod call."""
self.parser = self.get_parser()
(options, args) = self.parser.parse_args()
args = self.parser.parse_args()
# log as verbose or quiet as we want, warn by default
logging.basicConfig()
logging.getLogger().setLevel(logging.WARNING -
(options.v - options.q)*10)
observer = self.handle(args, options)
print observer.serialize(type=options.data).encode('utf-8', 'replace')
(args.v - args.q) * 10)
observer = self.handle(args)
print observer.serialize(type=args.data).encode('utf-8', 'replace')
def handle(self, args, options):
def handle(self, args):
"""Subclasses need to implement this method for the actual
command handling.
"""
@ -76,39 +75,42 @@ data in a json useful for Exhibit
class CompareLocales(BaseCommand):
"""usage: %prog [options] l10n.ini l10n_base_dir [locale ...]
Check the localization status of a gecko application.
"""Check the localization status of a gecko application.
The first argument is a path to the l10n.ini file for the application,
followed by the base directory of the localization repositories.
Then you pass in the list of locale codes you want to compare. If there are
not locales given, the list of locales will be taken from the all-locales file
of the application\'s l10n.ini."""
options = BaseCommand.options + [
make_option('--clobber-merge', action="store_true", default=False,
dest='clobber',
help="""WARNING: DATALOSS.
def get_parser(self):
parser = super(CompareLocales, self).get_parser()
parser.add_argument('ini_file', metavar='l10n.ini',
help='INI file for the project')
parser.add_argument('l10n_base_dir', metavar='l10n-base-dir',
help='Parent directory of localizations')
parser.add_argument('locales', nargs='*', metavar='locale-code',
help='Locale code and top-level directory of '
'each localization')
parser.add_argument('--clobber-merge', action="store_true",
default=False, dest='clobber',
help="""WARNING: DATALOSS.
Use this option with care. If specified, the merge directory will
be clobbered for each module. That means, the subdirectory will
be completely removed, any files that were there are lost.
Be careful to specify the right merge directory when using this option."""),
make_option('-r', '--reference', default='en-US', dest='reference',
help='Explicitly set the reference '
'localization. [default: en-US]'),
BaseCommand.data_option
]
Be careful to specify the right merge directory when using this option.""")
parser.add_argument('-r', '--reference', default='en-US',
dest='reference',
help='Explicitly set the reference '
'localization. [default: en-US]')
self.add_data_argument(parser)
return parser
def handle(self, args, options):
if len(args) < 2:
self.parser.error('Need to pass in list of languages')
inipath, l10nbase = args[:2]
locales = args[2:]
app = EnumerateApp(inipath, l10nbase, locales)
app.reference = options.reference
def handle(self, args):
app = EnumerateApp(args.ini_file, args.l10n_base_dir, args.locales)
app.reference = args.reference
try:
observer = compareApp(app, merge_stage=options.merge,
clobber=options.clobber)
observer = compareApp(app, merge_stage=args.merge,
clobber=args.clobber)
except (OSError, IOError), exc:
print "FAIL: " + str(exc)
self.parser.exit(2)
@ -116,39 +118,38 @@ Be careful to specify the right merge directory when using this option."""),
class CompareDirs(BaseCommand):
"""usage: %prog [options] reference localization
Check the localization status of a directory tree.
"""Check the localization status of a directory tree.
The first argument is a path to the reference data,the second is the
localization to be tested."""
options = BaseCommand.options + [
BaseCommand.data_option
]
def get_parser(self):
parser = super(CompareDirs, self).get_parser()
parser.add_argument('reference')
parser.add_argument('localization')
self.add_data_argument(parser)
return parser
def handle(self, args, options):
if len(args) != 2:
self.parser.error('Reference and localizatino required')
reference, locale = args
observer = compareDirs(reference, locale, merge_stage=options.merge)
def handle(self, args):
observer = compareDirs(args.reference, args.localization,
merge_stage=args.merge)
return observer
class CompareWebApp(BaseCommand):
"""usage: %prog [options] webapp [locale locale]
Check the localization status of a gaia-style web app.
"""Check the localization status of a gaia-style web app.
The first argument is the directory of the web app.
Following arguments explicitly state the locales to test.
If none are given, test all locales in manifest.webapp or files."""
options = BaseCommand.options[:-1] + [
BaseCommand.data_option]
def get_parser(self):
parser = super(CompareWebApp, self).get_parser()
parser.add_argument('webapp')
parser.add_argument('locales', nargs='*', metavar='locale-code',
help='Locale code and top-level directory of '
'each localization')
self.add_data_argument(parser)
return parser
def handle(self, args, options):
if len(args) < 1:
self.parser.error('Webapp directory required')
basedir = args[0]
locales = args[1:]
observer = compare_web_app(basedir, locales)
def handle(self, args):
observer = compare_web_app(args.webapp, args.locales)
return observer

Просмотреть файл

@ -383,13 +383,13 @@ class ContentComparer:
self.merge_stage = merge_stage
def merge(self, ref_entities, ref_map, ref_file, l10n_file, missing,
skips, p):
skips, ctx, canMerge, encoding):
outfile = os.path.join(self.merge_stage, l10n_file.module,
l10n_file.file)
outdir = os.path.dirname(outfile)
if not os.path.isdir(outdir):
os.makedirs(outdir)
if not p.canMerge:
if not canMerge:
shutil.copyfile(ref_file.fullpath, outfile)
print "copied reference to " + outfile
return
@ -402,16 +402,16 @@ class ContentComparer:
if not isinstance(skip, parser.Junk)])
if skips:
# we need to skip a few errornous blocks in the input, copy by hand
f = codecs.open(outfile, 'wb', p.encoding)
f = codecs.open(outfile, 'wb', encoding)
offset = 0
for skip in skips:
chunk = skip.span
f.write(p.contents[offset:chunk[0]])
f.write(ctx.contents[offset:chunk[0]])
offset = chunk[1]
f.write(p.contents[offset:])
f.write(ctx.contents[offset:])
else:
shutil.copyfile(l10n_file.fullpath, outfile)
f = codecs.open(outfile, 'ab', p.encoding)
f = codecs.open(outfile, 'ab', encoding)
print "adding to " + outfile
def ensureNewline(s):
@ -458,20 +458,10 @@ class ContentComparer:
try:
p.readContents(l10n.getContents())
l10n_entities, l10n_map = p.parse()
l10n_ctx = p.ctx
except Exception, e:
self.notify('error', l10n, str(e))
return
lines = []
def _getLine(offset):
if not lines:
lines.append(0)
for m in self.nl.finditer(p.contents):
lines.append(m.end())
for i in xrange(len(lines), 0, -1):
if offset >= lines[i - 1]:
return (i, offset - lines[i - 1])
return (1, offset)
l10n_list = l10n_map.keys()
l10n_list.sort()
@ -501,9 +491,10 @@ class ContentComparer:
if isinstance(l10n_entities[l10n_map[item_or_pair]],
parser.Junk):
junk = l10n_entities[l10n_map[item_or_pair]]
params = (junk.val,) + junk.span
params = (junk.val,) + junk.position() + junk.position(-1)
self.notify('error', l10n,
'Unparsed content "%s" at %d-%d' % params)
'Unparsed content "%s" from line %d colum %d'
' to line %d column %d' % params)
if self.merge_stage is not None:
skips.append(junk)
elif self.notify('obsoleteEntity', l10n,
@ -528,17 +519,17 @@ class ContentComparer:
for tp, pos, msg, cat in checker.check(refent, l10nent):
# compute real src position, if first line,
# col needs adjustment
_l, _offset = _getLine(l10nent.val_span[0])
if isinstance(pos, tuple):
_l, col = l10nent.value_position()
# line, column
if pos[0] == 1:
col = pos[1] + _offset
col = col + pos[1]
else:
col = pos[1]
_l += pos[0] - 1
_l += pos[0] - 1
else:
_l, col = _getLine(l10nent.val_span[0] + pos)
# skip error entities when merging
_l, col = l10nent.value_position(pos)
# skip error entities when merging
if tp == 'error' and self.merge_stage is not None:
skips.append(l10nent)
self.notify(tp, l10n,
@ -548,7 +539,10 @@ class ContentComparer:
if missing:
self.notify('missing', l10n, missing)
if self.merge_stage is not None and (missings or skips):
self.merge(ref[0], ref[1], ref_file, l10n, missings, skips, p)
self.merge(
ref[0], ref[1], ref_file,
l10n, missings, skips, l10n_ctx,
p.canMerge, p.encoding)
if report:
self.notify('report', l10n, report)
if obsolete:

Просмотреть файл

@ -3,76 +3,93 @@
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import re
import bisect
import codecs
import logging
from HTMLParser import HTMLParser
__constructors = []
class Entity(object):
class EntityBase(object):
'''
Abstraction layer for a localizable entity.
Currently supported are grammars of the form:
1: pre white space
2: pre comments
3: entity definition
4: entity key (name)
5: entity value
6: post comment (and white space) in the same line (dtd only)
2: entity definition
3: entity key (name)
4: entity value
5: post white space
<--[1]
<!-- pre comments --> <--[2]
<!ENTITY key "value"> <!-- comment -->
<!ENTITY key "value">
<-------[3]---------><------[6]------>
<-------[2]--------->
'''
def __init__(self, contents, pp,
span, pre_ws_span, pre_comment_span, def_span,
def __init__(self, ctx, pp, pre_comment,
span, pre_ws_span, def_span,
key_span, val_span, post_span):
self.contents = contents
self.ctx = ctx
self.span = span
self.pre_ws_span = pre_ws_span
self.pre_comment_span = pre_comment_span
self.def_span = def_span
self.key_span = key_span
self.val_span = val_span
self.post_span = post_span
self.pp = pp
self.pre_comment = pre_comment
pass
def position(self, offset=0):
"""Get the 1-based line and column of the character
with given offset into the Entity.
If offset is negative, return the end of the Entity.
"""
if offset < 0:
pos = self.span[1]
else:
pos = self.span[0] + offset
return self.ctx.lines(pos)[0]
def value_position(self, offset=0):
"""Get the 1-based line and column of the character
with given offset into the value.
If offset is negative, return the end of the value.
"""
if offset < 0:
pos = self.val_span[1]
else:
pos = self.val_span[0] + offset
return self.ctx.lines(pos)[0]
# getter helpers
def get_all(self):
return self.contents[self.span[0]:self.span[1]]
return self.ctx.contents[self.span[0]:self.span[1]]
def get_pre_ws(self):
return self.contents[self.pre_ws_span[0]:self.pre_ws_span[1]]
def get_pre_comment(self):
return self.contents[self.pre_comment_span[0]:
self.pre_comment_span[1]]
return self.ctx.contents[self.pre_ws_span[0]:self.pre_ws_span[1]]
def get_def(self):
return self.contents[self.def_span[0]:self.def_span[1]]
return self.ctx.contents[self.def_span[0]:self.def_span[1]]
def get_key(self):
return self.contents[self.key_span[0]:self.key_span[1]]
return self.ctx.contents[self.key_span[0]:self.key_span[1]]
def get_val(self):
return self.pp(self.contents[self.val_span[0]:self.val_span[1]])
return self.pp(self.ctx.contents[self.val_span[0]:self.val_span[1]])
def get_raw_val(self):
return self.contents[self.val_span[0]:self.val_span[1]]
return self.ctx.contents[self.val_span[0]:self.val_span[1]]
def get_post(self):
return self.contents[self.post_span[0]:self.post_span[1]]
return self.ctx.contents[self.post_span[0]:self.post_span[1]]
# getters
all = property(get_all)
pre_ws = property(get_pre_ws)
pre_comment = property(get_pre_comment)
definition = property(get_def)
key = property(get_key)
val = property(get_val)
@ -83,6 +100,32 @@ class Entity(object):
return self.key
class Entity(EntityBase):
pass
class Comment(EntityBase):
def __init__(self, ctx, span, pre_ws_span, def_span,
post_span):
self.ctx = ctx
self.span = span
self.pre_ws_span = pre_ws_span
self.def_span = def_span
self.post_span = post_span
self.pp = lambda v: v
@property
def key(self):
return None
@property
def val(self):
return None
def __repr__(self):
return self.all
class Junk(object):
'''
An almost-Entity, representing junk data that we didn't parse.
@ -91,16 +134,28 @@ class Junk(object):
'''
junkid = 0
def __init__(self, contents, span):
self.contents = contents
def __init__(self, ctx, span):
self.ctx = ctx
self.span = span
self.pre_ws = self.pre_comment = self.definition = self.post = ''
self.pre_ws = self.definition = self.post = ''
self.__class__.junkid += 1
self.key = '_junk_%d_%d-%d' % (self.__class__.junkid, span[0], span[1])
def position(self, offset=0):
"""Get the 1-based line and column of the character
with given offset into the Entity.
If offset is negative, return the end of the Entity.
"""
if offset < 0:
pos = self.span[1]
else:
pos = self.span[0] + offset
return self.ctx.lines(pos)[0]
# getter helpers
def get_all(self):
return self.contents[self.span[0]:self.span[1]]
return self.ctx.contents[self.span[0]:self.span[1]]
# getters
all = property(get_all)
@ -110,26 +165,65 @@ class Junk(object):
return self.key
class Whitespace(EntityBase):
'''Entity-like object representing an empty file with whitespace,
if allowed
'''
def __init__(self, ctx, span):
self.ctx = ctx
self.key_span = self.val_span = self.span = span
self.def_span = self.pre_ws_span = (span[0], span[0])
self.post_span = (span[1], span[1])
self.pp = lambda v: v
def __repr__(self):
return self.raw_val
class Parser:
canMerge = True
tail = re.compile('\s+\Z')
class Context(object):
"Fixture for content and line numbers"
def __init__(self, contents):
self.contents = contents
self._lines = None
def lines(self, *positions):
# return line and column tuples, 1-based
if self._lines is None:
nl = re.compile('\n', re.M)
self._lines = [m.end()
for m in nl.finditer(self.contents)]
line_nrs = [bisect.bisect(self._lines, p) for p in positions]
# compute columns
pos_ = [
(1 + line, 1 + p - (self._lines[line-1] if line else 0))
for line, p in zip(line_nrs, positions)]
return pos_
def __init__(self):
if not hasattr(self, 'encoding'):
self.encoding = 'utf-8'
pass
self.ctx = None
self.last_comment = None
def readFile(self, file):
f = codecs.open(file, 'r', self.encoding)
try:
self.contents = f.read()
except UnicodeDecodeError, e:
(logging.getLogger('locales')
.error("Can't read file: " + file + '; ' + str(e)))
self.contents = u''
f.close()
with open(file, 'rU') as f:
try:
self.readContents(f.read())
except UnicodeDecodeError, e:
(logging.getLogger('locales')
.error("Can't read file: " + file + '; ' + str(e)))
def readContents(self, contents):
(self.contents, length) = codecs.getdecoder(self.encoding)(contents)
'''Read contents and create parsing context.
contents are in native encoding, but with normalized line endings.
'''
(contents, length) = codecs.getdecoder(self.encoding)(contents)
self.ctx = Parser.Context(contents)
def parse(self):
l = []
@ -143,52 +237,57 @@ class Parser:
return val
def __iter__(self):
contents = self.contents
return self.walk(onlyEntities=True)
def walk(self, onlyEntities=False):
if not self.ctx:
# loading file failed, or we just didn't load anything
return
ctx = self.ctx
contents = ctx.contents
offset = 0
self.header, offset = self.getHeader(contents, offset)
self.footer = ''
entity, offset = self.getEntity(contents, offset)
entity, offset = self.getEntity(ctx, offset)
while entity:
yield entity
entity, offset = self.getEntity(contents, offset)
f = self.reFooter.match(contents, offset)
if f:
self.footer = f.group()
offset = f.end()
if (not onlyEntities or
type(entity) is Entity or
type(entity) is Junk):
yield entity
entity, offset = self.getEntity(ctx, offset)
if len(contents) > offset:
yield Junk(contents, (offset, len(contents)))
pass
yield Junk(ctx, (offset, len(contents)))
def getHeader(self, contents, offset):
header = ''
h = self.reHeader.match(contents)
if h:
header = h.group()
offset = h.end()
return (header, offset)
def getEntity(self, contents, offset):
m = self.reKey.match(contents, offset)
def getEntity(self, ctx, offset):
m = self.reKey.match(ctx.contents, offset)
if m:
offset = m.end()
entity = self.createEntity(contents, m)
entity = self.createEntity(ctx, m)
return (entity, offset)
# first check if footer has a non-empty match,
# 'cause then we don't find junk
m = self.reFooter.match(contents, offset)
if m and m.end() > offset:
return (None, offset)
m = self.reKey.search(contents, offset)
m = self.reComment.match(ctx.contents, offset)
if m:
# we didn't match, but search, so there's junk between offset
# and start. We'll match() on the next turn
junkend = m.start()
return (Junk(contents, (offset, junkend)), junkend)
return (None, offset)
offset = m.end()
self.last_comment = Comment(ctx, *[m.span(i) for i in xrange(4)])
return (self.last_comment, offset)
return self.getTrailing(ctx, offset, self.reKey, self.reComment)
def createEntity(self, contents, m):
return Entity(contents, self.postProcessValue,
*[m.span(i) for i in xrange(7)])
def getTrailing(self, ctx, offset, *expressions):
junkend = None
for exp in expressions:
m = exp.search(ctx.contents, offset)
if m:
junkend = min(junkend, m.start()) if junkend else m.start()
if junkend is None:
if self.tail.match(ctx.contents, offset):
white_end = len(ctx.contents)
return (Whitespace(ctx, (offset, white_end)), white_end)
else:
return (None, offset)
return (Junk(ctx, (offset, junkend)), junkend)
def createEntity(self, ctx, m):
pre_comment = unicode(self.last_comment) if self.last_comment else ''
self.last_comment = ''
return Entity(ctx, self.postProcessValue, pre_comment,
*[m.span(i) for i in xrange(6)])
def getParser(path):
@ -230,22 +329,20 @@ class DTDParser(Parser):
# [#x0300-#x036F] | [#x203F-#x2040]
NameChar = NameStartChar + ur'\-\.0-9' + u'\xB7\u0300-\u036F\u203F-\u2040'
Name = '[' + NameStartChar + '][' + NameChar + ']*'
reKey = re.compile('(?:(?P<pre>\s*)(?P<precomment>(?:' + XmlComment +
'\s*)*)(?P<entity><!ENTITY\s+(?P<key>' + Name +
reKey = re.compile('(?:(?P<pre>\s*)(?P<entity><!ENTITY\s+(?P<key>' + Name +
')\s+(?P<val>\"[^\"]*\"|\'[^\']*\'?)\s*>)'
'(?P<post>[ \t]*(?:' + XmlComment + '\s*)*\n?)?)',
re.DOTALL)
'(?P<post>\s*)?)',
re.DOTALL | re.M)
# add BOM to DTDs, details in bug 435002
reHeader = re.compile(u'^\ufeff?'
u'(\s*<!--.*(http://mozilla.org/MPL/2.0/|'
u'LICENSE BLOCK)([^-]+-)*[^-]+-->)?', re.S)
reFooter = re.compile('\s*(<!--([^-]+-)*[^-]+-->\s*)*$')
rePE = re.compile('(?:(\s*)((?:' + XmlComment + '\s*)*)'
'(<!ENTITY\s+%\s+(' + Name +
')\s+SYSTEM\s+(\"[^\"]*\"|\'[^\']*\')\s*>\s*%' + Name +
';)([ \t]*(?:' + XmlComment + '\s*)*\n?)?)')
reHeader = re.compile(u'^\ufeff')
reComment = re.compile('(\s*)(<!--(-?[%s])*?-->)(\s*)' % CharMinusDash,
re.S)
rePE = re.compile(u'(?:(\s*)'
u'(<!ENTITY\s+%\s+(' + Name +
u')\s+SYSTEM\s+(\"[^\"]*\"|\'[^\']*\')\s*>\s*%' + Name +
u';)([ \t]*(?:' + XmlComment + u'\s*)*\n?)?)')
def getEntity(self, contents, offset):
def getEntity(self, ctx, offset):
'''
Overload Parser.getEntity to special-case ParsedEntities.
Just check for a parsed entity if that method claims junk.
@ -253,20 +350,26 @@ class DTDParser(Parser):
<!ENTITY % foo SYSTEM "url">
%foo;
'''
entity, inneroffset = Parser.getEntity(self, contents, offset)
if offset is 0 and self.reHeader.match(ctx.contents):
offset += 1
entity, inneroffset = Parser.getEntity(self, ctx, offset)
if (entity and isinstance(entity, Junk)) or entity is None:
m = self.rePE.match(contents, offset)
m = self.rePE.match(ctx.contents, offset)
if m:
inneroffset = m.end()
entity = Entity(contents, self.postProcessValue,
*[m.span(i) for i in xrange(7)])
self.last_comment = ''
entity = Entity(ctx, self.postProcessValue, '',
*[m.span(i) for i in xrange(6)])
return (entity, inneroffset)
def createEntity(self, contents, m):
def createEntity(self, ctx, m):
valspan = m.span('val')
valspan = (valspan[0]+1, valspan[1]-1)
return Entity(contents, self.postProcessValue, m.span(),
m.span('pre'), m.span('precomment'),
pre_comment = unicode(self.last_comment) if self.last_comment else ''
self.last_comment = ''
return Entity(ctx, self.postProcessValue, pre_comment,
m.span(),
m.span('pre'),
m.span('entity'), m.span('key'), valspan,
m.span('post'))
@ -278,30 +381,30 @@ class PropertiesParser(Parser):
def __init__(self):
self.reKey = re.compile('^(\s*)'
'((?:[#!].*?\n\s*)*)'
'([^#!\s\n][^=:\n]*?)\s*[:=][ \t]*', re.M)
self.reHeader = re.compile('^\s*([#!].*\s*)+')
self.reFooter = re.compile('\s*([#!].*\s*)*$')
self.reComment = re.compile('(\s*)(((?:[#!][^\n]*\n?)+))', re.M)
self._escapedEnd = re.compile(r'\\+$')
self._trailingWS = re.compile(r'[ \t]*$')
self._trailingWS = re.compile(r'\s*(?:\n|\Z)', re.M)
Parser.__init__(self)
def getHeader(self, contents, offset):
header = ''
h = self.reHeader.match(contents, offset)
if h:
candidate = h.group()
if 'http://mozilla.org/MPL/2.0/' in candidate or \
'LICENSE BLOCK' in candidate:
header = candidate
offset = h.end()
return (header, offset)
def getEntity(self, contents, offset):
def getEntity(self, ctx, offset):
# overwritten to parse values line by line
contents = ctx.contents
m = self.reComment.match(contents, offset)
if m:
spans = [m.span(i) for i in xrange(3)]
start_trailing = offset = m.end()
while offset < len(contents):
m = self._trailingWS.match(contents, offset)
if not m:
break
offset = m.end()
spans.append((start_trailing, offset))
self.last_comment = Comment(ctx, *spans)
return (self.last_comment, offset)
m = self.reKey.match(contents, offset)
if m:
offset = m.end()
startline = offset = m.end()
while True:
endval = nextline = contents.find('\n', offset)
if nextline == -1:
@ -315,26 +418,24 @@ class PropertiesParser(Parser):
# backslashes at end of line, if 2*n, not escaped
if len(_e.group()) % 2 == 0:
break
startline = offset
# strip trailing whitespace
ws = self._trailingWS.search(contents, m.end(), offset)
ws = self._trailingWS.search(contents, startline)
if ws:
endval -= ws.end() - ws.start()
entity = Entity(contents, self.postProcessValue,
endval = ws.start()
offset = ws.end()
pre_comment = (unicode(self.last_comment) if self.last_comment
else '')
self.last_comment = ''
entity = Entity(ctx, self.postProcessValue, pre_comment,
(m.start(), offset), # full span
m.span(1), # leading whitespan
m.span(2), # leading comment span
(m.start(3), offset), # entity def span
m.span(3), # key span
(m.start(2), offset), # entity def span
m.span(2), # key span
(m.end(), endval), # value span
(offset, offset)) # post comment span, empty
return (entity, offset)
m = self.reKey.search(contents, offset)
if m:
# we didn't match, but search, so there's junk between offset
# and start. We'll match() on the next turn
junkend = m.start()
return (Junk(contents, (offset, junkend)), junkend)
return (None, offset)
return self.getTrailing(ctx, offset, self.reKey, self.reComment)
def postProcessValue(self, val):
@ -349,18 +450,77 @@ class PropertiesParser(Parser):
return val
class DefinesInstruction(EntityBase):
'''Entity-like object representing processing instructions in inc files
'''
def __init__(self, ctx, span, pre_ws_span, def_span, val_span, post_span):
self.ctx = ctx
self.span = span
self.pre_ws_span = pre_ws_span
self.def_span = def_span
self.key_span = self.val_span = val_span
self.post_span = post_span
self.pp = lambda v: v
def __repr__(self):
return self.raw_val
class DefinesParser(Parser):
# can't merge, #unfilter needs to be the last item, which we don't support
canMerge = False
tail = re.compile(r'(?!)') # never match
def __init__(self):
self.reKey = re.compile('^(\s*)((?:^#(?!define\s).*\s*)*)'
'(#define[ \t]+(\w+)[ \t]+(.*?))([ \t]*$\n?)',
self.reComment = re.compile(
'((?:[ \t]*\n)*)'
'((?:^# .*?(?:\n|\Z))+)'
'((?:[ \t]*(?:\n|\Z))*)', re.M)
self.reKey = re.compile('((?:[ \t]*\n)*)'
'(#define[ \t]+(\w+)(?:[ \t](.*?))?(?:\n|\Z))'
'((?:[ \t]*(?:\n|\Z))*)',
re.M)
self.reHeader = re.compile('^\s*(#(?!define\s).*\s*)*')
self.reFooter = re.compile('\s*(#(?!define\s).*\s*)*$', re.M)
self.rePI = re.compile('((?:[ \t]*\n)*)'
'(#(\w+)[ \t]+(.*?)(?:\n|\Z))'
'((?:[ \t]*(?:\n|\Z))*)',
re.M)
Parser.__init__(self)
def getEntity(self, ctx, offset):
contents = ctx.contents
m = self.reComment.match(contents, offset)
if m:
offset = m.end()
self.last_comment = Comment(ctx, *[m.span(i) for i in xrange(4)])
return (self.last_comment, offset)
m = self.reKey.match(contents, offset)
if m:
offset = m.end()
return (self.createEntity(ctx, m), offset)
m = self.rePI.match(contents, offset)
if m:
offset = m.end()
return (DefinesInstruction(ctx, *[m.span(i) for i in xrange(5)]),
offset)
return self.getTrailing(ctx, offset,
self.reComment, self.reKey, self.rePI)
class IniSection(EntityBase):
'''Entity-like object representing sections in ini files
'''
def __init__(self, ctx, span, pre_ws_span, def_span, val_span, post_span):
self.ctx = ctx
self.span = span
self.pre_ws_span = pre_ws_span
self.def_span = def_span
self.key_span = self.val_span = val_span
self.post_span = post_span
self.pp = lambda v: v
def __repr__(self):
return self.raw_val
class IniParser(Parser):
'''
@ -373,149 +533,40 @@ class IniParser(Parser):
...
'''
def __init__(self):
self.reHeader = re.compile('^((?:\s*|[;#].*)\n)*\[.+?\]\n', re.M)
self.reKey = re.compile('(\s*)((?:[;#].*\n\s*)*)((.+?)=(.*))(\n?)')
self.reFooter = re.compile('\s*([;#].*\s*)*$')
self.reComment = re.compile(
'((?:[ \t]*\n)*)'
'((?:^[;#].*?(?:\n|\Z))+)'
'((?:[ \t]*(?:\n|\Z))*)', re.M)
self.reSection = re.compile(
'((?:[ \t]*\n)*)'
'(\[(.*?)\])'
'((?:[ \t]*(?:\n|\Z))*)', re.M)
self.reKey = re.compile(
'((?:[ \t]*\n)*)'
'((.+?)=(.*))'
'((?:[ \t]*(?:\n|\Z))*)', re.M)
Parser.__init__(self)
DECL, COMMENT, START, END, CONTENT = range(5)
class BookmarksParserInner(HTMLParser):
class Token(object):
_type = None
content = ''
def __str__(self):
return self.content
class DeclToken(Token):
_type = DECL
def __init__(self, decl):
self.content = decl
pass
def __str__(self):
return '<!%s>' % self.content
pass
class CommentToken(Token):
_type = COMMENT
def __init__(self, comment):
self.content = comment
pass
def __str__(self):
return '<!--%s-->' % self.content
pass
class StartToken(Token):
_type = START
def __init__(self, tag, attrs, content):
self.tag = tag
self.attrs = dict(attrs)
self.content = content
pass
pass
class EndToken(Token):
_type = END
def __init__(self, tag):
self.tag = tag
pass
def __str__(self):
return '</%s>' % self.tag.upper()
pass
class ContentToken(Token):
_type = CONTENT
def __init__(self, content):
self.content = content
pass
pass
def __init__(self):
HTMLParser.__init__(self)
self.tokens = []
def parse(self, contents):
self.tokens = []
self.feed(contents)
self.close()
return self.tokens
# Called when we hit an end DL tag to reset the folder selections
def handle_decl(self, decl):
self.tokens.append(self.DeclToken(decl))
# Called when we hit an end DL tag to reset the folder selections
def handle_comment(self, comment):
self.tokens.append(self.CommentToken(comment))
def handle_starttag(self, tag, attrs):
self.tokens.append(self.StartToken(tag, attrs,
self.get_starttag_text()))
# Called when text data is encountered
def handle_data(self, data):
if self.tokens[-1]._type == CONTENT:
self.tokens[-1].content += data
else:
self.tokens.append(self.ContentToken(data))
def handle_charref(self, data):
self.handle_data('&#%s;' % data)
def handle_entityref(self, data):
self.handle_data('&%s;' % data)
# Called when we hit an end DL tag to reset the folder selections
def handle_endtag(self, tag):
self.tokens.append(self.EndToken(tag))
class BookmarksParser(Parser):
canMerge = False
class BMEntity(object):
def __init__(self, key, val):
self.key = key
self.val = val
def __iter__(self):
p = BookmarksParserInner()
tks = p.parse(self.contents)
i = 0
k = []
for i in xrange(len(tks)):
t = tks[i]
if t._type == START:
k.append(t.tag)
keys = t.attrs.keys()
keys.sort()
for attrname in keys:
yield self.BMEntity('.'.join(k) + '.@' + attrname,
t.attrs[attrname])
if i + 1 < len(tks) and tks[i+1]._type == CONTENT:
i += 1
t = tks[i]
v = t.content.strip()
if v:
yield self.BMEntity('.'.join(k), v)
elif t._type == END:
k.pop()
def getEntity(self, ctx, offset):
contents = ctx.contents
m = self.reComment.match(contents, offset)
if m:
offset = m.end()
self.last_comment = Comment(ctx, *[m.span(i) for i in xrange(4)])
return (self.last_comment, offset)
m = self.reSection.match(contents, offset)
if m:
offset = m.end()
return (IniSection(ctx, *[m.span(i) for i in xrange(5)]), offset)
m = self.reKey.match(contents, offset)
if m:
offset = m.end()
return (self.createEntity(ctx, m), offset)
return self.getTrailing(ctx, offset,
self.reComment, self.reSection, self.reKey)
__constructors = [('\\.dtd$', DTDParser()),
('\\.properties$', PropertiesParser()),
('\\.ini$', IniParser()),
('\\.inc$', DefinesParser()),
('bookmarks\\.html$', BookmarksParser())]
('\\.inc$', DefinesParser())]

Просмотреть файл

@ -9,7 +9,7 @@ from itertools import izip_longest
from pkg_resources import resource_string
import re
from compare_locales.parser import getParser
from compare_locales import parser
class ParserTestMixin():
@ -20,7 +20,7 @@ class ParserTestMixin():
def setUp(self):
'''Create a parser for this test.
'''
self.parser = getParser(self.filename)
self.parser = parser.getParser(self.filename)
def tearDown(self):
'tear down this test'
@ -38,12 +38,13 @@ class ParserTestMixin():
of reference keys and values.
'''
self.parser.readContents(content)
entities = [entity for entity in self.parser]
entities = list(self.parser.walk())
for entity, ref in izip_longest(entities, refs):
self.assertTrue(entity, 'excess reference entity')
self.assertTrue(ref, 'excess parsed entity')
self.assertEqual(entity.val, ref[1])
if ref[0].startswith('_junk'):
self.assertTrue(re.match(ref[0], entity.key))
else:
self.assertTrue(entity, 'excess reference entity ' + unicode(ref))
self.assertTrue(ref, 'excess parsed entity ' + unicode(entity))
if isinstance(entity, parser.Entity):
self.assertEqual(entity.key, ref[0])
self.assertEqual(entity.val, ref[1])
else:
self.assertEqual(type(entity).__name__, ref[0])
self.assertIn(ref[1], entity.all)

Просмотреть файл

@ -6,7 +6,7 @@
import unittest
from compare_locales.checks import getChecker
from compare_locales.parser import getParser, Entity
from compare_locales.parser import getParser, Parser, Entity
from compare_locales.paths import File
@ -239,14 +239,16 @@ class TestAndroid(unittest.TestCase):
u"\\u0022, or put string in apostrophes."
def getEntity(self, v):
return Entity(v, lambda s: s, (0, len(v)), (), (0, 0), (), (),
ctx = Parser.Context(v)
return Entity(ctx, lambda s: s, '', (0, len(v)), (), (), (),
(0, len(v)), ())
def getDTDEntity(self, v):
v = v.replace('"', '&quot;')
return Entity('<!ENTITY foo "%s">' % v,
lambda s: s,
(0, len(v) + 16), (), (0, 0), (), (9, 12),
ctx = Parser.Context('<!ENTITY foo "%s">' % v)
return Entity(ctx,
lambda s: s, '',
(0, len(v) + 16), (), (), (9, 12),
(14, len(v) + 14), ())
def test_android_dtd(self):

Просмотреть файл

@ -0,0 +1,95 @@
# -*- coding: utf-8 -*-
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import unittest
from compare_locales.tests import ParserTestMixin
mpl2 = '''\
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
'''
class TestDefinesParser(ParserTestMixin, unittest.TestCase):
filename = 'defines.inc'
def testBrowser(self):
self._test(mpl2 + '''#filter emptyLines
#define MOZ_LANGPACK_CREATOR mozilla.org
# If non-English locales wish to credit multiple contributors, uncomment this
# variable definition and use the format specified.
# #define MOZ_LANGPACK_CONTRIBUTORS <em:contributor>Joe Solon</em:contributor>
#unfilter emptyLines
''', (
('Comment', mpl2),
('DefinesInstruction', 'filter emptyLines'),
('MOZ_LANGPACK_CREATOR', 'mozilla.org'),
('Comment', '#define'),
('DefinesInstruction', 'unfilter emptyLines')))
def testBrowserWithContributors(self):
self._test(mpl2 + '''#filter emptyLines
#define MOZ_LANGPACK_CREATOR mozilla.org
# If non-English locales wish to credit multiple contributors, uncomment this
# variable definition and use the format specified.
#define MOZ_LANGPACK_CONTRIBUTORS <em:contributor>Joe Solon</em:contributor>
#unfilter emptyLines
''', (
('Comment', mpl2),
('DefinesInstruction', 'filter emptyLines'),
('MOZ_LANGPACK_CREATOR', 'mozilla.org'),
('Comment', 'non-English'),
('MOZ_LANGPACK_CONTRIBUTORS',
'<em:contributor>Joe Solon</em:contributor>'),
('DefinesInstruction', 'unfilter emptyLines')))
def testCommentWithNonAsciiCharacters(self):
self._test(mpl2 + '''#filter emptyLines
# e.g. #define seamonkey_l10n <DT><A HREF="http://www.mozilla.cz/produkty/seamonkey/">SeaMonkey v češtině</a>
#define seamonkey_l10n_long
#unfilter emptyLines
''', (
('Comment', mpl2),
('DefinesInstruction', 'filter emptyLines'),
('Comment', u'češtině'),
('seamonkey_l10n_long', ''),
('DefinesInstruction', 'unfilter emptyLines')))
def testToolkit(self):
self._test('''#define MOZ_LANG_TITLE English (US)
''', (
('MOZ_LANG_TITLE', 'English (US)'),))
def testToolkitEmpty(self):
self._test('', tuple())
def test_empty_file(self):
'''Test that empty files generate errors
defines.inc are interesting that way, as their
content is added to the generated file.
'''
self._test('\n', (('Junk', '\n'),))
self._test('\n\n', (('Junk', '\n\n'),))
self._test(' \n\n', (('Junk', ' \n\n'),))
if __name__ == '__main__':
unittest.main()

Просмотреть файл

@ -8,7 +8,7 @@
import unittest
import re
from compare_locales.parser import getParser
from compare_locales import parser
from compare_locales.tests import ParserTestMixin
@ -30,9 +30,9 @@ class TestDTD(ParserTestMixin, unittest.TestCase):
'''
quoteRef = (
('good.one', 'one'),
('_junk_\\d_25-56$', '<!ENTITY bad.one "bad " quote">'),
('Junk', '<!ENTITY bad.one "bad " quote">'),
('good.two', 'two'),
('_junk_\\d_82-119$', '<!ENTITY bad.two "bad "quoted" word">'),
('Junk', '<!ENTITY bad.two "bad "quoted" word">'),
('good.three', 'three'),
('good.four', 'good \' quote'),
('good.five', 'good \'quoted\' word'),)
@ -62,25 +62,68 @@ class TestDTD(ParserTestMixin, unittest.TestCase):
<!ENTITY commented "out">
-->
''',
(('first', 'string'), ('second', 'string')))
(('first', 'string'), ('second', 'string'),
('Comment', 'out')))
def test_license_header(self):
p = getParser('foo.dtd')
p = parser.getParser('foo.dtd')
p.readContents(self.resource('triple-license.dtd'))
for e in p:
self.assertEqual(e.key, 'foo')
self.assertEqual(e.val, 'value')
self.assert_('MPL' in p.header)
entities = list(p.walk())
self.assert_(isinstance(entities[0], parser.Comment))
self.assertIn('MPL', entities[0].all)
e = entities[1]
self.assert_(isinstance(e, parser.Entity))
self.assertEqual(e.key, 'foo')
self.assertEqual(e.val, 'value')
self.assertEqual(len(entities), 2)
p.readContents('''\
<!-- This Source Code Form is subject to the terms of the Mozilla Public
- License, v. 2.0. If a copy of the MPL was not distributed with this file,
- You can obtain one at http://mozilla.org/MPL/2.0/. -->
<!ENTITY foo "value">
''')
for e in p:
self.assertEqual(e.key, 'foo')
self.assertEqual(e.val, 'value')
self.assert_('MPL' in p.header)
entities = list(p.walk())
self.assert_(isinstance(entities[0], parser.Comment))
self.assertIn('MPL', entities[0].all)
e = entities[1]
self.assert_(isinstance(e, parser.Entity))
self.assertEqual(e.key, 'foo')
self.assertEqual(e.val, 'value')
self.assertEqual(len(entities), 2)
def testBOM(self):
self._test(u'\ufeff<!ENTITY foo.label "stuff">'.encode('utf-8'),
(('foo.label', 'stuff'),))
def test_trailing_whitespace(self):
self._test('<!ENTITY foo.label "stuff">\n \n',
(('foo.label', 'stuff'),))
def test_unicode_comment(self):
self._test('<!-- \xe5\x8f\x96 -->',
(('Comment', u'\u53d6'),))
def test_empty_file(self):
self._test('', tuple())
self._test('\n', (('Whitespace', '\n'),))
self._test('\n\n', (('Whitespace', '\n\n'),))
self._test(' \n\n', (('Whitespace', ' \n\n'),))
def test_positions(self):
self.parser.readContents('''\
<!ENTITY one "value">
<!ENTITY two "other
escaped value">
''')
one, two = list(self.parser)
self.assertEqual(one.position(), (1, 1))
self.assertEqual(one.value_position(), (1, 16))
self.assertEqual(one.position(-1), (2, 1))
self.assertEqual(two.position(), (2, 1))
self.assertEqual(two.value_position(), (2, 16))
self.assertEqual(two.value_position(-1), (3, 14))
self.assertEqual(two.value_position(10), (3, 5))
if __name__ == '__main__':
unittest.main()

Просмотреть файл

@ -23,23 +23,30 @@ class TestIniParser(ParserTestMixin, unittest.TestCase):
self._test('''; This file is in the UTF-8 encoding
[Strings]
TitleText=Some Title
''', (('TitleText', 'Some Title'),))
self.assert_('UTF-8' in self.parser.header)
''', (
('Comment', 'UTF-8 encoding'),
('IniSection', 'Strings'),
('TitleText', 'Some Title'),))
def testMPL2_Space_UTF(self):
self._test(mpl2 + '''
; This file is in the UTF-8 encoding
[Strings]
TitleText=Some Title
''', (('TitleText', 'Some Title'),))
self.assert_('MPL' in self.parser.header)
''', (
('Comment', mpl2),
('Comment', 'UTF-8'),
('IniSection', 'Strings'),
('TitleText', 'Some Title'),))
def testMPL2_Space(self):
self._test(mpl2 + '''
[Strings]
TitleText=Some Title
''', (('TitleText', 'Some Title'),))
self.assert_('MPL' in self.parser.header)
''', (
('Comment', mpl2),
('IniSection', 'Strings'),
('TitleText', 'Some Title'),))
def testMPL2_MultiSpace(self):
self._test(mpl2 + '''\
@ -48,26 +55,33 @@ TitleText=Some Title
[Strings]
TitleText=Some Title
''', (('TitleText', 'Some Title'),))
self.assert_('MPL' in self.parser.header)
''', (
('Comment', mpl2),
('Comment', 'more comments'),
('IniSection', 'Strings'),
('TitleText', 'Some Title'),))
def testMPL2_JunkBeforeCategory(self):
self._test(mpl2 + '''\
Junk
[Strings]
TitleText=Some Title
''', (('_junk_\\d+_0-213$', mpl2 + '''\
Junk
[Strings]'''), ('TitleText', 'Some Title')))
self.assert_('MPL' not in self.parser.header)
''', (
('Comment', mpl2),
('Junk', 'Junk'),
('IniSection', 'Strings'),
('TitleText', 'Some Title')))
def test_TrailingComment(self):
self._test(mpl2 + '''
[Strings]
TitleText=Some Title
;Stray trailing comment
''', (('TitleText', 'Some Title'),))
self.assert_('MPL' in self.parser.header)
''', (
('Comment', mpl2),
('IniSection', 'Strings'),
('TitleText', 'Some Title'),
('Comment', 'Stray trailing')))
def test_SpacedTrailingComments(self):
self._test(mpl2 + '''
@ -77,8 +91,11 @@ TitleText=Some Title
;Stray trailing comment
;Second stray comment
''', (('TitleText', 'Some Title'),))
self.assert_('MPL' in self.parser.header)
''', (
('Comment', mpl2),
('IniSection', 'Strings'),
('TitleText', 'Some Title'),
('Comment', 'Second stray comment')))
def test_TrailingCommentsAndJunk(self):
self._test(mpl2 + '''
@ -89,14 +106,13 @@ TitleText=Some Title
Junk
;Second stray comment
''', (('TitleText', 'Some Title'), ('_junk_\\d+_231-284$', '''\
;Stray trailing comment
Junk
;Second stray comment
''')))
self.assert_('MPL' in self.parser.header)
''', (
('Comment', mpl2),
('IniSection', 'Strings'),
('TitleText', 'Some Title'),
('Comment', 'Stray trailing'),
('Junk', 'Junk'),
('Comment', 'Second stray comment')))
def test_JunkInbetweenEntries(self):
self._test(mpl2 + '''
@ -106,10 +122,18 @@ TitleText=Some Title
Junk
Good=other string
''', (('TitleText', 'Some Title'), ('_junk_\\d+_231-236$', '''\
''', (
('Comment', mpl2),
('IniSection', 'Strings'),
('TitleText', 'Some Title'),
('Junk', 'Junk'),
('Good', 'other string')))
Junk'''), ('Good', 'other string')))
self.assert_('MPL' in self.parser.header)
def test_empty_file(self):
self._test('', tuple())
self._test('\n', (('Whitespace', '\n'),))
self._test('\n\n', (('Whitespace', '\n\n'),))
self._test(' \n\n', (('Whitespace', ' \n\n'),))
if __name__ == '__main__':
unittest.main()

Просмотреть файл

@ -13,7 +13,6 @@ from compare_locales.compare import ContentComparer
class ContentMixin(object):
maxDiff = None # we got big dictionaries to compare
extension = None # OVERLOAD
def reference(self, content):
@ -29,6 +28,7 @@ class TestProperties(unittest.TestCase, ContentMixin):
extension = '.properties'
def setUp(self):
self.maxDiff = None
self.tmp = mkdtemp()
os.mkdir(os.path.join(self.tmp, "merge"))
@ -98,7 +98,8 @@ eff = effVal""")
self.reference("""foo = fooVal
bar = %d barVal
eff = effVal""")
self.localized("""bar = %S lBar
self.localized("""\
bar = %S lBar
eff = leffVal
""")
cc = ContentComparer()
@ -116,7 +117,7 @@ eff = leffVal
('l10n.properties',
{'value': {
'error': [u'argument 1 `S` should be `d` '
u'at line 1, column 6 for bar'],
u'at line 1, column 7 for bar'],
'missingEntity': [u'foo']}}
)
]}
@ -160,6 +161,7 @@ class TestDTD(unittest.TestCase, ContentMixin):
extension = '.dtd'
def setUp(self):
self.maxDiff = None
self.tmp = mkdtemp()
os.mkdir(os.path.join(self.tmp, "merge"))
@ -248,7 +250,9 @@ class TestDTD(unittest.TestCase, ContentMixin):
('l10n.dtd',
{'value': {
'error': [u'Unparsed content "<!ENTY bar '
u'\'gimmick\'>" at 23-44'],
u'\'gimmick\'>" '
u'from line 2 colum 1 to '
u'line 2 column 22'],
'missingEntity': [u'bar']}}
)
]}

Просмотреть файл

@ -0,0 +1,44 @@
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import unittest
from compare_locales import parser
class TestParserContext(unittest.TestCase):
def test_lines(self):
"Test that Parser.Context.lines returns 1-based tuples"
ctx = parser.Parser.Context('''first line
second line
third line
''')
self.assertEqual(
ctx.lines(0, 1),
[(1, 1), (1, 2)]
)
self.assertEqual(
ctx.lines(len('first line')),
[(1, len('first line') + 1)]
)
self.assertEqual(
ctx.lines(len('first line') + 1),
[(2, 1)]
)
self.assertEqual(
ctx.lines(len(ctx.contents)),
[(4, 1)]
)
def test_empty_parser(self):
p = parser.Parser()
entities, _map = p.parse()
self.assertListEqual(
entities,
[]
)
self.assertDictEqual(
_map,
{}
)

Просмотреть файл

@ -24,7 +24,7 @@ and still has another line coming
('one_line', 'This is one line'),
('two_line', u'This is the first of two lines'),
('one_line_trailing', u'This line ends in \\'),
('_junk_\\d+_113-126$', 'and has junk\n'),
('Junk', 'and has junk\n'),
('two_lines_triple', 'This line is one of two and ends in \\'
'and still has another line coming')))
@ -63,8 +63,7 @@ and an end''', (('bar', 'one line with a # part that looks like a comment '
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
foo=value
''', (('foo', 'value'),))
self.assert_('MPL' in self.parser.header)
''', (('Comment', 'MPL'), ('foo', 'value')))
def test_escapes(self):
self.parser.readContents(r'''
@ -88,8 +87,64 @@ second = string
#
#commented out
''', (('first', 'string'), ('second', 'string')))
''', (('first', 'string'), ('second', 'string'),
('Comment', 'commented out')))
def test_trailing_newlines(self):
self._test('''\
foo = bar
\x20\x20
''', (('foo', 'bar'),))
def test_just_comments(self):
self._test('''\
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
# LOCALIZATION NOTE These strings are used inside the Promise debugger
# which is available as a panel in the Debugger.
''', (('Comment', 'MPL'), ('Comment', 'LOCALIZATION NOTE')))
def test_just_comments_without_trailing_newline(self):
self._test('''\
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
# LOCALIZATION NOTE These strings are used inside the Promise debugger
# which is available as a panel in the Debugger.''', (
('Comment', 'MPL'), ('Comment', 'LOCALIZATION NOTE')))
def test_trailing_comment_and_newlines(self):
self._test('''\
# LOCALIZATION NOTE These strings are used inside the Promise debugger
# which is available as a panel in the Debugger.
''', (('Comment', 'LOCALIZATION NOTE'),))
def test_empty_file(self):
self._test('', tuple())
self._test('\n', (('Whitespace', '\n'),))
self._test('\n\n', (('Whitespace', '\n\n'),))
self._test(' \n\n', (('Whitespace', ' \n\n'),))
def test_positions(self):
self.parser.readContents('''\
one = value
two = other \\
escaped value
''')
one, two = list(self.parser)
self.assertEqual(one.position(), (1, 1))
self.assertEqual(one.value_position(), (1, 7))
self.assertEqual(two.position(), (2, 1))
self.assertEqual(two.value_position(), (2, 7))
self.assertEqual(two.value_position(-1), (3, 14))
self.assertEqual(two.value_position(10), (3, 3))
if __name__ == '__main__':
unittest.main()

Просмотреть файл

@ -83,7 +83,7 @@ class Manifest(object):
except (ValueError, IOError), e:
if self.watcher:
self.watcher.notify('error', self.file, str(e))
return False
return {}
return self.extract_manifest_strings(manifest)
def extract_manifest_strings(self, manifest_fragment):

Просмотреть файл

@ -1165,4 +1165,4 @@ static const TransportSecurityPreload kPublicKeyPinningPreloadList[] = {
static const int32_t kUnknownId = -1;
static const PRTime kPreloadPKPinsExpirationTime = INT64_C(1488464530999000);
static const PRTime kPreloadPKPinsExpirationTime = INT64_C(1488551716626000);

Просмотреть файл

@ -184,9 +184,10 @@ for var in ('DLL_PREFIX', 'DLL_SUFFIX'):
DEFINES['CERT_AddTempCertToPerm'] = '__CERT_AddTempCertToPerm'
USE_LIBS += [
'crmf',
]
if not CONFIG['MOZ_SYSTEM_NSS']:
USE_LIBS += [
'crmf',
]
include('/ipc/chromium/chromium-config.mozbuild')

Просмотреть файл

@ -16,7 +16,6 @@
1password.com: did not receive HSTS header
1whw.co.uk: could not connect to host
206rc.net: max-age too low: 2592000
20hs.cn: did not receive HSTS header
24hourpaint.com: did not receive HSTS header
25daysof.io: could not connect to host
2859cc.com: could not connect to host
@ -27,6 +26,7 @@
360gradus.com: did not receive HSTS header
365.or.jp: did not receive HSTS header
3chit.cf: could not connect to host
404.sh: could not connect to host
420dongstorm.com: could not connect to host
42ms.org: could not connect to host
4455software.com: did not receive HSTS header
@ -116,7 +116,6 @@ agrios.de: did not receive HSTS header
agro-id.gov.ua: could not connect to host
ahabingo.com: did not receive HSTS header
ahri.ovh: could not connect to host
aidanmontare.net: could not connect to host
aidanwoods.com: did not receive HSTS header
airbnb.com: did not receive HSTS header
aircomms.com: did not receive HSTS header
@ -141,9 +140,9 @@ alarmsystemreviews.com: did not receive HSTS header
albertopimienta.com: did not receive HSTS header
alcazaar.com: could not connect to host
alecvannoten.be: did not receive HSTS header
alenan.org: could not connect to host
alessandro.pw: did not receive HSTS header
alethearose.com: did not receive HSTS header
alexandra-schulze.de: could not connect to host
alexandre.sh: did not receive HSTS header
alexhaydock.co.uk: [Exception... "Component returned failure code: 0x80004005 (NS_ERROR_FAILURE) [nsISiteSecurityService.processHeader]" nsresult: "0x80004005 (NS_ERROR_FAILURE)" location: "JS frame :: /builds/slave/m-cen-l64-periodicupdate-00000/getHSTSPreloadList.js :: processStsHeader :: line 121" data: no]
alexisabarca.com: did not receive HSTS header
@ -163,6 +162,7 @@ alpha.irccloud.com: could not connect to host
alphabit-secure.com: could not connect to host
alphabuild.io: did not receive HSTS header
alphalabs.xyz: could not connect to host
alterbaum.net: did not receive HSTS header
altfire.ca: could not connect to host
altmv.com: max-age too low: 7776000
amaforums.org: could not connect to host
@ -193,6 +193,7 @@ andymartin.cc: did not receive HSTS header
anfsanchezo.co: could not connect to host
anfsanchezo.me: could not connect to host
anghami.com: did not receive HSTS header
anime.my: could not connect to host
animeday.ml: could not connect to host
animesfusion.com.br: could not connect to host
animesharp.com: could not connect to host
@ -241,6 +242,7 @@ aradulconteaza.ro: could not connect to host
aran.me.uk: did not receive HSTS header
arboineuropa.nl: did not receive HSTS header
arbu.eu: max-age too low: 2419200
argh.io: could not connect to host
arlen.se: could not connect to host
armory.consulting: could not connect to host
armory.supplies: could not connect to host
@ -257,12 +259,13 @@ asc16.com: could not connect to host
asdpress.cn: could not connect to host
aserver.co: could not connect to host
asmui.ga: could not connect to host
asrob.eu: did not receive HSTS header
asrob.eu: could not connect to host
ass.org.au: did not receive HSTS header
assdecoeur.org: could not connect to host
asset-alive.com: did not receive HSTS header
asset-alive.net: did not receive HSTS header
astrolpost.com: could not connect to host
astromelody.com: could not connect to host
atavio.at: could not connect to host
atavio.ch: could not connect to host
atavio.de: did not receive HSTS header
@ -273,9 +276,11 @@ athul.xyz: did not receive HSTS header
atlex.nl: did not receive HSTS header
atomik.pro: could not connect to host
atop.io: could not connect to host
attilagyorffy.com: could not connect to host
attimidesigns.com: did not receive HSTS header
au.search.yahoo.com: max-age too low: 172800
aubiosales.com: did not receive HSTS header
aufmerksamkeitsstudie.com: could not connect to host
aujapan.ru: could not connect to host
aurainfosec.com.au: could not connect to host
ausnah.me: could not connect to host
@ -287,7 +292,6 @@ auto-serwis.zgorzelec.pl: did not receive HSTS header
auto4trade.nl: could not connect to host
autojuhos.sk: did not receive HSTS header
autokovrik-diskont.ru: did not receive HSTS header
automacity.com: could not connect to host
autotsum.com: could not connect to host
autumnwindsagility.com: could not connect to host
auverbox.ovh: did not receive HSTS header
@ -296,6 +300,7 @@ av.de: did not receive HSTS header
avec-ou-sans-ordonnance.fr: could not connect to host
avinet.com: max-age too low: 0
awg-mode.de: did not receive HSTS header
awxg.com: could not connect to host
axado.com.br: did not receive HSTS header
axeny.com: did not receive HSTS header
az.search.yahoo.com: did not receive HSTS header
@ -321,6 +326,7 @@ barely.sexy: did not receive HSTS header
bashcode.ninja: could not connect to host
basicsolutionsus.com: did not receive HSTS header
bassh.net: did not receive HSTS header
baum.ga: did not receive HSTS header
baumstark.ca: did not receive HSTS header
bazarstupava.sk: did not receive HSTS header
bcbsmagentprofile.com: could not connect to host
@ -329,7 +335,6 @@ bckp.de: could not connect to host
bcm.com.au: max-age too low: 0
bcnx.de: max-age too low: 0
bcsytv.com: could not connect to host
bdikaros-network.net: could not connect to host
be.search.yahoo.com: did not receive HSTS header
beach-inspector.com: did not receive HSTS header
beachi.es: could not connect to host
@ -356,9 +361,11 @@ bermytraq.bm: [Exception... "Component returned failure code: 0x80004005 (NS_ERR
berrymark.be: max-age too low: 0
besixdouze.world: could not connect to host
betafive.net: could not connect to host
betlander.com: did not receive HSTS header
betnet.fr: could not connect to host
betplanning.it: did not receive HSTS header
bets.de: did not receive HSTS header
bettercrypto.org: could not connect to host
bettween.com: could not connect to host
betz.ro: did not receive HSTS header
beulahtabernacle.com: could not connect to host
@ -434,7 +441,7 @@ bodybuilding-legends.com: could not connect to host
bodyweightsolution.com: did not receive HSTS header
boensou.com: did not receive HSTS header
bogosity.se: could not connect to host
bohan.life: could not connect to host
bohan.life: did not receive HSTS header
boltdata.io: did not receive HSTS header
bonapp.restaurant: could not connect to host
bonfi.net: did not receive HSTS header
@ -486,7 +493,6 @@ buhler.pro: did not receive HSTS header
buildsaver.co.za: did not receive HSTS header
built.by: did not receive HSTS header
bulletpoint.cz: did not receive HSTS header
bulmafox.com: could not connect to host
bumarkamoda.com: could not connect to host
bunaken.asia: could not connect to host
burian-server.cz: could not connect to host
@ -514,7 +520,6 @@ cabarave.com: could not connect to host
cabusar.fr: could not connect to host
caconnect.org: could not connect to host
cadao.me: did not receive HSTS header
cadmail.nl: could not connect to host
cadusilva.com: did not receive HSTS header
cafe-scientifique.org.ec: could not connect to host
caim.cz: did not receive HSTS header
@ -522,13 +527,11 @@ cajapopcorn.com: did not receive HSTS header
cake.care: could not connect to host
calgaryconstructionjobs.com: could not connect to host
calix.com: max-age too low: 0
call.me: did not receive HSTS header
calltrackingreports.com: could not connect to host
calvin.me: max-age too low: 2592000
calvinallen.net: could not connect to host
calyxinstitute.org: [Exception... "Component returned failure code: 0x80004005 (NS_ERROR_FAILURE) [nsISiteSecurityService.processHeader]" nsresult: "0x80004005 (NS_ERROR_FAILURE)" location: "JS frame :: /builds/slave/m-cen-l64-periodicupdate-00000/getHSTSPreloadList.js :: processStsHeader :: line 121" data: no]
camolist.com: could not connect to host
campbellsoftware.co.uk: could not connect to host
canadiangamblingchoice.com: did not receive HSTS header
cancelmyprofile.com: did not receive HSTS header
candicontrols.com: did not receive HSTS header
@ -614,13 +617,12 @@ chris-web.info: could not connect to host
chriskyrouac.com: could not connect to host
christiaandruif.nl: could not connect to host
christianbro.gq: could not connect to host
christophheich.me: could not connect to host
christophheich.me: did not receive HSTS header
chrisupjohn.com: could not connect to host
chrome-devtools-frontend.appspot.com: did not receive HSTS header (error ignored - included regardless)
chrome.google.com: did not receive HSTS header (error ignored - included regardless)
chroniclesofgeorge.com: did not receive HSTS header
chua.cf: could not connect to host
chxdf.net: did not receive HSTS header
cidr.ml: could not connect to host
cigarblogs.net: could not connect to host
cigi.site: could not connect to host
@ -695,7 +697,6 @@ compalytics.com: could not connect to host
comparejewelleryprices.co.uk: could not connect to host
completionist.audio: could not connect to host
computeremergency.com.au: did not receive HSTS header
computersystems.guru: could not connect to host
concord-group.co.jp: did not receive HSTS header
condesaelectronics.com: max-age too low: 0
confirm365.com: could not connect to host
@ -721,8 +722,8 @@ correctpaardbatterijnietje.nl: did not receive HSTS header
corruption-mc.net: could not connect to host
corruption-rsps.net: could not connect to host
corruption-server.net: could not connect to host
corzntin.fr: could not connect to host
count.sh: could not connect to host
couragefound.org: could not connect to host
couragewhispers.ca: did not receive HSTS header
coursdeprogrammation.com: could not connect to host
coursella.com: did not receive HSTS header
@ -741,7 +742,6 @@ crazyhotseeds.com: did not receive HSTS header
creativephysics.ml: could not connect to host
creativeplayuk.com: did not receive HSTS header
crendontech.com: could not connect to host
crestasantos.com: could not connect to host
crestoncottage.com: could not connect to host
criena.net: could not connect to host
critical.today: could not connect to host
@ -755,7 +755,6 @@ crtvmgmt.com: could not connect to host
crudysql.com: could not connect to host
cruzr.xyz: could not connect to host
crypt.guru: could not connect to host
cryptearth.de: could not connect to host
cryptify.eu: did not receive HSTS header
cryptobells.com: could not connect to host
cryptobin.org: could not connect to host
@ -763,7 +762,6 @@ cryptojar.io: did not receive HSTS header
cryptoki.fr: max-age too low: 7776000
cryptopush.com: did not receive HSTS header
crysadm.com: max-age too low: 1
crystalchandelierservices.com: could not connect to host
crystalclassics.co.uk: did not receive HSTS header
csapak.com: max-age too low: 0
csawctf.poly.edu: could not connect to host
@ -779,6 +777,7 @@ cubeserver.eu: could not connect to host
cubewano.com: could not connect to host
cujanovic.com: did not receive HSTS header
cumshots-video.ru: could not connect to host
cupi.co: did not receive HSTS header
cupidmentor.com: did not receive HSTS header
curroapp.com: could not connect to host
custe.rs: could not connect to host
@ -788,10 +787,10 @@ cybershambles.com: could not connect to host
cycleluxembourg.lu: did not receive HSTS header
cydia-search.io: could not connect to host
cyphertite.com: could not connect to host
cysec.biz: could not connect to host
dad256.tk: could not connect to host
dah5.com: did not receive HSTS header
dailystormerpodcasts.com: did not receive HSTS header
daimadi.com: could not connect to host
dakrib.net: could not connect to host
dalingk.co: could not connect to host
dango.in: did not receive HSTS header
@ -799,7 +798,6 @@ daniel-steuer.de: did not receive HSTS header
danieldk.eu: did not receive HSTS header
danielworthy.com: did not receive HSTS header
danijobs.com: could not connect to host
dannyrohde.de: could not connect to host
danpiel.net: could not connect to host
danrl.de: did not receive HSTS header
daolerp.xyz: could not connect to host
@ -811,6 +809,7 @@ darkengine.io: could not connect to host
darkhole.cn: could not connect to host
darknebula.space: could not connect to host
darkpony.ru: could not connect to host
darrenellis.xyz: did not receive HSTS header
dashburst.com: did not receive HSTS header
dashnimorad.com: did not receive HSTS header
data-abundance.com: could not connect to host
@ -874,7 +873,6 @@ deux.solutions: could not connect to host
deuxsol.com: could not connect to host
deuxsolutions.com: could not connect to host
devcu.net: did not receive HSTS header
devdesco.com: could not connect to host
devh.de: did not receive HSTS header
devincrow.me: could not connect to host
devinfo.net: did not receive HSTS header
@ -890,13 +888,13 @@ digitaldaddy.net: could not connect to host
digitalriver.tk: could not connect to host
digitalskillswap.com: could not connect to host
dinamoelektrik.com: max-age too low: 0
dinkum.online: could not connect to host
dirk-weise.de: could not connect to host
discoveringdocker.com: did not receive HSTS header
discovery.lookout.com: did not receive HSTS header
dislocated.de: did not receive HSTS header
dissimulo.me: could not connect to host
dittvertshus.no: could not connect to host
diycc.org: could not connect to host
dizihocasi.com: did not receive HSTS header
dizorg.net: could not connect to host
dj4et.de: did not receive HSTS header
@ -914,12 +912,10 @@ doeswindowssuckforeveryoneorjustme.com: could not connect to host
dogbox.se: did not receive HSTS header
dohosting.ru: could not connect to host
dokan.online: could not connect to host
doku-gilde.de: could not connect to host
dollarstore24.com: could not connect to host
dollywiki.co.uk: could not connect to host
dolphin-cloud.com: could not connect to host
dolphincorp.co.uk: could not connect to host
domadillo.com: could not connect to host
domaris.de: did not receive HSTS header
dominique-mueller.de: did not receive HSTS header
donttrustrobots.nl: could not connect to host
@ -961,6 +957,7 @@ duch.cloud: could not connect to host
duesee.org: could not connect to host
duria.de: max-age too low: 3600
dwhd.org: [Exception... "Component returned failure code: 0x80004005 (NS_ERROR_FAILURE) [nsISiteSecurityService.processHeader]" nsresult: "0x80004005 (NS_ERROR_FAILURE)" location: "JS frame :: /builds/slave/m-cen-l64-periodicupdate-00000/getHSTSPreloadList.js :: processStsHeader :: line 121" data: no]
dworzak.ch: could not connect to host
dycontrol.de: could not connect to host
dylanscott.com.au: did not receive HSTS header
dymersion.com: did not receive HSTS header
@ -996,6 +993,7 @@ edp-collaborative.com: max-age too low: 2500
eduvance.in: did not receive HSTS header
eeqj.com: could not connect to host
efficienthealth.com: did not receive HSTS header
effortlesshr.com: did not receive HSTS header
egg-ortho.ch: did not receive HSTS header
egit.co: could not connect to host
ego-world.org: could not connect to host
@ -1007,6 +1005,8 @@ ekostecki.de: did not receive HSTS header
el-soul.com: did not receive HSTS header
elaintehtaat.fi: did not receive HSTS header
elanguest.pl: could not connect to host
elbetech.net: could not connect to host
eldinhadzic.com: could not connect to host
electricianforum.co.uk: could not connect to host
electromc.com: could not connect to host
elemprendedor.com.ve: could not connect to host
@ -1032,10 +1032,6 @@ enargia.jp: max-age too low: 0
encode.space: did not receive HSTS header
encoder.pw: could not connect to host
encrypted.google.com: did not receive HSTS header (error ignored - included regardless)
endohaus.ca: could not connect to host
endohaus.com: could not connect to host
endohaus.eu: could not connect to host
endohaus.us: could not connect to host
endzeit-architekten.com: did not receive HSTS header
engelwerbung.com: [Exception... "Component returned failure code: 0x80004005 (NS_ERROR_FAILURE) [nsISiteSecurityService.processHeader]" nsresult: "0x80004005 (NS_ERROR_FAILURE)" location: "JS frame :: /builds/slave/m-cen-l64-periodicupdate-00000/getHSTSPreloadList.js :: processStsHeader :: line 121" data: no]
englishforums.com: could not connect to host
@ -1063,6 +1059,7 @@ erawanarifnugroho.com: did not receive HSTS header
eressea.xyz: did not receive HSTS header
ericbond.net: could not connect to host
ericyl.com: did not receive HSTS header
ernesto.at: could not connect to host
eromixx.com: did not receive HSTS header
erotalia.es: could not connect to host
eroticen.com: did not receive HSTS header
@ -1075,6 +1072,7 @@ esclear.de: did not receive HSTS header
escotour.com: did not receive HSTS header
esec.rs: did not receive HSTS header
espra.com: could not connect to host
esquonic.com: could not connect to host
essexcosmeticdentists.co.uk: did not receive HSTS header
essexghosthunters.co.uk: did not receive HSTS header
estilosapeca.com: could not connect to host
@ -1091,7 +1089,7 @@ euph.eu: could not connect to host
euren.se: could not connect to host
euroshop24.net: could not connect to host
evantage.org: could not connect to host
evdenevenakliyatankara.pw: could not connect to host
evdenevenakliyatankara.pw: did not receive HSTS header
everybooks.com: max-age too low: 60
everylab.org: could not connect to host
evi.be: did not receive HSTS header
@ -1138,6 +1136,7 @@ fasset.jp: could not connect to host
fastopen.ml: could not connect to host
fatgeekflix.net: could not connect to host
fatherhood.gov: did not receive HSTS header
fatlossguide.xyz: could not connect to host
fatox.de: could not connect to host
fatwin.pw: could not connect to host
fayolle.info: [Exception... "Component returned failure code: 0x80004005 (NS_ERROR_FAILURE) [nsISiteSecurityService.processHeader]" nsresult: "0x80004005 (NS_ERROR_FAILURE)" location: "JS frame :: /builds/slave/m-cen-l64-periodicupdate-00000/getHSTSPreloadList.js :: processStsHeader :: line 121" data: no]
@ -1244,6 +1243,7 @@ frontmin.com: did not receive HSTS header
frost-ci.xyz: could not connect to host
fruitusers.com: could not connect to host
frusky.net: could not connect to host
fsf.moe: could not connect to host
ftctele.com: did not receive HSTS header
fuckgfw233.org: could not connect to host
fugle.de: could not connect to host
@ -1259,6 +1259,7 @@ futureyouhealth.com: did not receive HSTS header
fx-rk.com: did not receive HSTS header
fysiohaenraets.nl: did not receive HSTS header
fzn.io: could not connect to host
g01.in.ua: could not connect to host
g2a.co: did not receive HSTS header
g2g.com: did not receive HSTS header
g4w.co: did not receive HSTS header (error ignored - included regardless)
@ -1289,7 +1290,7 @@ gatilagata.com.br: did not receive HSTS header
gdpventure.com: max-age too low: 0
gedankenbude.info: did not receive HSTS header
geekbundle.org: did not receive HSTS header
geekcast.co.uk: could not connect to host
geekcast.co.uk: did not receive HSTS header
geli-graphics.com: did not receive HSTS header
gem-indonesia.net: could not connect to host
genuu.com: could not connect to host
@ -1358,6 +1359,7 @@ gogold-g.com: could not connect to host
gold24.in: did not receive HSTS header
goldendata.io: could not connect to host
goldminer.ga: could not connect to host
goldpros.com: did not receive HSTS header
golocal-media.de: did not receive HSTS header
gonzalosanchez.mx: did not receive HSTS header
goodenough.nz: did not receive HSTS header
@ -1365,7 +1367,6 @@ goodwin43.ru: did not receive HSTS header
google: could not connect to host (error ignored - included regardless)
googlemail.com: did not receive HSTS header (error ignored - included regardless)
googleplex.com: did not receive HSTS header (error ignored - included regardless)
goolok.com: did not receive HSTS header
gorilla-gym.site: could not connect to host
goto.google.com: did not receive HSTS header (error ignored - included regardless)
gottcode.org: did not receive HSTS header
@ -1380,6 +1381,8 @@ graph.no: did not receive HSTS header
gravity-net.de: could not connect to host
graycell.net: could not connect to host
grazetech.com: could not connect to host
greboid.co.uk: could not connect to host
greboid.com: could not connect to host
greenhillantiques.co.uk: did not receive HSTS header
greenvines.com.tw: did not receive HSTS header
gregorytlee.me: did not receive HSTS header
@ -1434,6 +1437,7 @@ hakugin.org: could not connect to host
halo.red: could not connect to host
hancc.net: did not receive HSTS header
hanfu.la: could not connect to host
hang333.pw: could not connect to host
hannover-banditen.de: did not receive HSTS header
hao2taiwan.com: max-age too low: 0
haozi.me: could not connect to host
@ -1476,6 +1480,7 @@ helpmebuild.com: did not receive HSTS header
hemdal.se: could not connect to host
hencagon.com: could not connect to host
henriknoerr.com: could not connect to host
hepteract.us: did not receive HSTS header
hermes-net.de: did not receive HSTS header
herpaderp.net: did not receive HSTS header
herzbotschaft.de: did not receive HSTS header
@ -1509,7 +1514,6 @@ horosho.in: could not connect to host
horseboners.xxx: did not receive HSTS header
hortifarm.ro: did not receive HSTS header
hosted-service.com: did not receive HSTS header
hostedbgp.net: could not connect to host
hostedtalkgadget.google.com: did not receive HSTS header (error ignored - included regardless)
hostgarou.com: did not receive HSTS header
hostinaus.com.au: could not connect to host
@ -1529,11 +1533,11 @@ http418.xyz: could not connect to host
httpstatuscode418.xyz: could not connect to host
hu.search.yahoo.com: did not receive HSTS header
huarongdao.com: did not receive HSTS header
hugofs.com: could not connect to host
hugosleep.com.au: did not receive HSTS header
humblefinances.com: could not connect to host
humeurs.net: could not connect to host
humpteedumptee.in: did not receive HSTS header
huntshomeinspections.com: [Exception... "Component returned failure code: 0x80004005 (NS_ERROR_FAILURE) [nsISiteSecurityService.processHeader]" nsresult: "0x80004005 (NS_ERROR_FAILURE)" location: "JS frame :: /builds/slave/m-cen-l64-periodicupdate-00000/getHSTSPreloadList.js :: processStsHeader :: line 121" data: no]
hurricanelabs.com: did not receive HSTS header
hydra.ws: could not connect to host
hyper69.com: did not receive HSTS header
@ -1700,6 +1704,7 @@ jamesbywater.me.uk: could not connect to host
jamesconroyfinn.com: did not receive HSTS header
jamesdoell.com: could not connect to host
jamesdoylephoto.com: did not receive HSTS header
jamesf.xyz: could not connect to host
jamesmorrison.me: did not receive HSTS header
jamourtney.com: could not connect to host
jan27.org: did not receive HSTS header
@ -1730,7 +1735,6 @@ jbradaric.me: could not connect to host
jcch.de: could not connect to host
jcor.me: did not receive HSTS header
jctf.io: could not connect to host
jdh8.org: could not connect to host
jeff393.com: could not connect to host
jefftickle.com: could not connect to host
jenjoit.de: could not connect to host
@ -1758,7 +1762,6 @@ jinbo123.com: did not receive HSTS header
jkb.pics: could not connect to host
jkbuster.com: could not connect to host
jmdekker.it: could not connect to host
jmk.hu: did not receive HSTS header
joakimalgroy.com: could not connect to host
jobmedic.com: did not receive HSTS header
joedavison.me: could not connect to host
@ -1789,7 +1792,6 @@ jualautoclave.com: did not receive HSTS header
jualssh.com: could not connect to host
juliamweber.de: [Exception... "Component returned failure code: 0x80004005 (NS_ERROR_FAILURE) [nsISiteSecurityService.processHeader]" nsresult: "0x80004005 (NS_ERROR_FAILURE)" location: "JS frame :: /builds/slave/m-cen-l64-periodicupdate-00000/getHSTSPreloadList.js :: processStsHeader :: line 121" data: no]
julian-kipka.de: did not receive HSTS header
julibear.com: could not connect to host
jumbox.xyz: could not connect to host
junaos.xyz: did not receive HSTS header
junge-selbsthilfe.info: could not connect to host
@ -1802,7 +1804,6 @@ juwairen.cn: could not connect to host
jvoice.net: could not connect to host
jwilsson.me: could not connect to host
jxm.in: could not connect to host
jznet.org: did not receive HSTS header
k-dev.de: could not connect to host
ka-clan.com: could not connect to host
kabuabc.com: did not receive HSTS header
@ -1815,6 +1816,7 @@ kamikano.com: could not connect to host
kaplatz.is: could not connect to host
kapucini.si: max-age too low: 0
karaoketonight.com: could not connect to host
kasilag.me: did not receive HSTS header
katiaetdavid.fr: could not connect to host
katproxy.online: could not connect to host
katproxy.site: could not connect to host
@ -1842,7 +1844,6 @@ kgxtech.com: max-age too low: 2592000
kickass.al: could not connect to host
kid-dachau.de: did not receive HSTS header
kiel-media.de: did not receive HSTS header
kilianvalkhof.com: did not receive HSTS header
kimpost.org: could not connect to host
kinderwagen-test24.de: could not connect to host
kingmanhall.org: could not connect to host
@ -1850,10 +1851,10 @@ kinnon.enterprises: could not connect to host
kinogb.net: max-age too low: 0
kionetworks.com: did not receive HSTS header
kipira.com: could not connect to host
kirbear.com: could not connect to host
kirkforcongress.com: could not connect to host
kirkforsenate.com: did not receive HSTS header
kirkpatrickdavis.com: could not connect to host
kisa.io: could not connect to host
kisalt.im: did not receive HSTS header
kissart.net: could not connect to host
kissflow.com: did not receive HSTS header
@ -1891,7 +1892,6 @@ kr.search.yahoo.com: did not receive HSTS header
kraynik.com: could not connect to host
kredite.sale: could not connect to host
kriegt.es: could not connect to host
krislamoureux.com: could not connect to host
krmela.com: could not connect to host
kroetenfuchs.de: could not connect to host
kropkait.pl: could not connect to host
@ -1962,7 +1962,6 @@ leopold.email: could not connect to host
leopoldina.net: did not receive HSTS header
leopotamgroup.com: could not connect to host
leovanna.co.uk: [Exception... "Component returned failure code: 0x80004005 (NS_ERROR_FAILURE) [nsISiteSecurityService.processHeader]" nsresult: "0x80004005 (NS_ERROR_FAILURE)" location: "JS frame :: /builds/slave/m-cen-l64-periodicupdate-00000/getHSTSPreloadList.js :: processStsHeader :: line 121" data: no]
leppis-it.de: could not connect to host
lerasenglish.com: did not receive HSTS header
lerner.moscow: did not receive HSTS header
les-corsaires.net: could not connect to host
@ -1996,7 +1995,6 @@ lifestylehunter.co.uk: did not receive HSTS header
lifetimemoneymachine.com: did not receive HSTS header
lightarmory.com: could not connect to host
lightpaste.com: could not connect to host
likeaross.com: could not connect to host
lillpopp.eu: max-age too low: 10
lilpwny.com: could not connect to host
limalama.eu: max-age too low: 1
@ -2005,7 +2003,7 @@ limpido.it: could not connect to host
lindberg.io: did not receive HSTS header
lingros-test.tk: could not connect to host
linguaquote.com: did not receive HSTS header
linkenheil.org: could not connect to host
linkages.org: could not connect to host
linmi.cc: did not receive HSTS header
linorman1997.me: could not connect to host
linuxeyecandy.com: could not connect to host
@ -2022,6 +2020,7 @@ loafbox.com: could not connect to host
locktheirphone.com: could not connect to host
locomotive.ca: did not receive HSTS header
login.corp.google.com: max-age too low: 7776000 (error ignored - included regardless)
login.gov: could not connect to host
loginseite.com: could not connect to host
loli.bz: could not connect to host
lolicore.ch: could not connect to host
@ -2075,7 +2074,6 @@ luxwatch.com: could not connect to host
lv.search.yahoo.com: did not receive HSTS header
lzkill.com: could not connect to host
m-ali.xyz: did not receive HSTS header
m-plan.com: did not receive HSTS header
m.gparent.org: could not connect to host
m.nu: did not receive HSTS header
m3-gmbh.de: did not receive HSTS header
@ -2085,6 +2083,7 @@ maartenvandekamp.nl: did not receive HSTS header
macbolo.com: could not connect to host
macchaberrycream.com: could not connect to host
macchedil.com: did not receive HSTS header
macgeneral.de: did not receive HSTS header
madars.org: did not receive HSTS header
maddin.ga: could not connect to host
madebymagnitude.com: did not receive HSTS header
@ -2211,11 +2210,10 @@ mijn-email.org: could not connect to host
mikaelemilsson.net: did not receive HSTS header
mikeburns.com: could not connect to host
mikeg.de: did not receive HSTS header
mikek.work: did not receive HSTS header
mikeology.org: could not connect to host
mikonmaa.fi: could not connect to host
miku.be: could not connect to host
miku.hatsune.my: could not connect to host
miku.hatsune.my: max-age too low: 5184000
milesgeek.com: did not receive HSTS header
mindoktor.se: did not receive HSTS header
minecraftserverz.com: could not connect to host
@ -2264,6 +2262,7 @@ moparscape.org: did not receive HSTS header
mor.gl: could not connect to host
morbitzer.de: did not receive HSTS header
morethanadream.lv: could not connect to host
morganino.eu: could not connect to host
moriz.net: could not connect to host
morningcalculation.com: could not connect to host
morotech.com.br: max-age too low: 2592000
@ -2329,7 +2328,6 @@ mystudy.me: [Exception... "Component returned failure code: 0x80004005 (NS_ERROR
myvirtualserver.com: max-age too low: 2592000
myzone.com: did not receive HSTS header
mziulu.me: could not connect to host
n-soft.info: could not connect to host
n0psled.nl: could not connect to host
n2x.in: could not connect to host
n4l.pw: could not connect to host
@ -2358,8 +2356,6 @@ ncc60205.info: could not connect to host
ncpc.gov: could not connect to host
nct.org.uk: max-age too low: 1
nctx.co.uk: did not receive HSTS header
ndtblog.com: could not connect to host
ndtmarket.place: could not connect to host
near.st: did not receive HSTS header
neel.ch: could not connect to host
neftaly.com: did not receive HSTS header
@ -2407,7 +2403,7 @@ nicestresser.fr: could not connect to host
nicky.io: did not receive HSTS header
nicolasbettag.me: could not connect to host
niconiconi.xyz: could not connect to host
niconode.com: could not connect to host
niconode.com: did not receive HSTS header
nien.chat: could not connect to host
nightx.uk: could not connect to host
niho.jp: [Exception... "Component returned failure code: 0x80004005 (NS_ERROR_FAILURE) [nsISiteSecurityService.processHeader]" nsresult: "0x80004005 (NS_ERROR_FAILURE)" location: "JS frame :: /builds/slave/m-cen-l64-periodicupdate-00000/getHSTSPreloadList.js :: processStsHeader :: line 121" data: no]
@ -2424,7 +2420,6 @@ nmctest.net: could not connect to host
nnya.cat: did not receive HSTS header
no17sifangjie.cc: could not connect to host
nocallaghan.com: could not connect to host
nocit.dk: could not connect to host
nocs.cn: did not receive HSTS header
noctinus.tk: could not connect to host
nodari.com.ar: did not receive HSTS header
@ -2482,7 +2477,7 @@ nutsandboltsmedia.com: did not receive HSTS header
nwgh.org: max-age too low: 86400
nyantec.com: did not receive HSTS header
nysepho.pw: could not connect to host
nystart.no: did not receive HSTS header
nystart.no: could not connect to host
nz.search.yahoo.com: max-age too low: 172800
nzb.cat: max-age too low: 7776000
o0o.one: did not receive HSTS header
@ -2493,7 +2488,7 @@ occentus.net: did not receive HSTS header
ochaken.cf: could not connect to host
octothorpe.club: could not connect to host
odifi.com: could not connect to host
odin.xxx: could not connect to host
odin.xxx: did not receive HSTS header
oe8.bet: could not connect to host
ofcourselanguages.com: could not connect to host
offshore-firma.org: could not connect to host
@ -2619,7 +2614,6 @@ paste.linode.com: could not connect to host
pastebin.linode.com: could not connect to host
pastenib.com: could not connect to host
paster.li: did not receive HSTS header
pastie.se: did not receive HSTS header
patientinsight.net: could not connect to host
patrz.eu: could not connect to host
patt.us: did not receive HSTS header
@ -2633,6 +2627,8 @@ payroll.ch: did not receive HSTS header
pbapp.net: did not receive HSTS header
pbprint.ru: max-age too low: 0
pc-nf.de: could not connect to host
pcfeuerwehr.de: did not receive HSTS header
pcfun.net: did not receive HSTS header
pchax.net: did not receive HSTS header
peissen.com: [Exception... "Component returned failure code: 0x80004005 (NS_ERROR_FAILURE) [nsISiteSecurityService.processHeader]" nsresult: "0x80004005 (NS_ERROR_FAILURE)" location: "JS frame :: /builds/slave/m-cen-l64-periodicupdate-00000/getHSTSPreloadList.js :: processStsHeader :: line 121" data: no]
pekkapikkarainen.fi: could not connect to host
@ -2673,7 +2669,7 @@ pippen.io: could not connect to host
pir9.com: max-age too low: 2592000
piratedb.com: could not connect to host
piratedot.com: could not connect to host
piratelist.online: did not receive HSTS header
piratelist.online: could not connect to host
piratenlogin.de: could not connect to host
pirati.cz: max-age too low: 604800
pirlitu.com: did not receive HSTS header
@ -2709,6 +2705,7 @@ poleartschool.com: could not connect to host
polimat.org: could not connect to host
politically-incorrect.xyz: could not connect to host
politologos.org: could not connect to host
polymathematician.com: could not connect to host
polypho.nyc: could not connect to host
pompompoes.com: could not connect to host
pontokay.com.br: did not receive HSTS header
@ -2721,6 +2718,7 @@ postcodewise.co.uk: did not receive HSTS header
posterspy.com: did not receive HSTS header
postscheduler.org: could not connect to host
posylka.de: did not receive HSTS header
potatofrom.space: could not connect to host
poussinooz.fr: could not connect to host
povitria.net: could not connect to host
power-of-interest.com: did not receive HSTS header
@ -2744,12 +2742,13 @@ privacyrup.net: could not connect to host
prnt.li: did not receive HSTS header
pro-zone.com: could not connect to host
prodpad.com: did not receive HSTS header
production.vn: did not receive HSTS header
production.vn: could not connect to host
professionalboundaries.com: did not receive HSTS header
profi-durchgangsmelder.de: did not receive HSTS header
profundr.com: could not connect to host
progg.no: could not connect to host
prohostonline.fi: could not connect to host
proj.org.cn: could not connect to host
promecon-gmbh.de: did not receive HSTS header
prontolight.com: did not receive HSTS header
prosocialmachines.com: could not connect to host
@ -2760,7 +2759,6 @@ proximato.com: could not connect to host
proxybay.al: could not connect to host
proxybay.club: could not connect to host
proxybay.info: did not receive HSTS header
prxio.date: could not connect to host
prxio.site: could not connect to host
prytkov.com: did not receive HSTS header
psw.academy: did not receive HSTS header
@ -2782,6 +2780,7 @@ pypi-mirrors.org: could not connect to host
pypi-status.org: could not connect to host
pyplo.org: did not receive HSTS header
pypt.lt: did not receive HSTS header
pzme.me: did not receive HSTS header
q2.si: did not receive HSTS header
qingxuan.info: max-age too low: 864000
qinxi1992.com: did not receive HSTS header
@ -2796,7 +2795,6 @@ quantumcourse.org: did not receive HSTS header
queercoders.com: did not receive HSTS header
quemmeliga.com: could not connect to host
questsandrewards.com: could not connect to host
quotehex.com: did not receive HSTS header
quranserver.net: could not connect to host
qvi.st: did not receive HSTS header
qwaser.fr: could not connect to host
@ -2814,7 +2812,7 @@ randomcage.com: did not receive HSTS header
rankthespot.com: did not receive HSTS header
rapidresearch.me: could not connect to host
rapidthunder.io: could not connect to host
rasing.me: did not receive HSTS header
rasing.me: could not connect to host
ratajczak.fr: could not connect to host
raulfraile.net: could not connect to host
rawet.se: did not receive HSTS header
@ -2848,12 +2846,12 @@ relisten.nl: did not receive HSTS header
remitatm.com: could not connect to host
remodela.com.ve: could not connect to host
renem.net: did not receive HSTS header
renideo.fr: did not receive HSTS header
renlong.org: could not connect to host
renrenss.com: did not receive HSTS header
rent-a-coder.de: did not receive HSTS header
rentcarassist.com: could not connect to host
renteater.com: could not connect to host
repaxan.com: could not connect to host
replacemychina.com: did not receive HSTS header
reprolife.co.uk: max-age too low: 0
res-rheingau.de: did not receive HSTS header
@ -2890,11 +2888,9 @@ rk6.cz: could not connect to host
rkmantpur.org: did not receive HSTS header
rme.li: did not receive HSTS header
roan24.pl: did not receive HSTS header
robandjanine.com: could not connect to host
robertglastra.com: could not connect to host
robigalia.org: did not receive HSTS header
robin.info: could not connect to host
robspc.repair: could not connect to host
robtex.org: did not receive HSTS header
rochman.id: could not connect to host
rocksberg.net: did not receive HSTS header
@ -2977,10 +2973,8 @@ saveaward.gov: could not connect to host
saveyour.biz: did not receive HSTS header
sawamura-rental.com: did not receive HSTS header
sb-group.dk: did not receive HSTS header
sbiewald.de: could not connect to host
sby.de: did not receive HSTS header
sc4le.com: could not connect to host
scannabi.com: could not connect to host
schmitz.link: could not connect to host
schnell-gold.com: could not connect to host
schoop.me: [Exception... "Component returned failure code: 0x80004005 (NS_ERROR_FAILURE) [nsISiteSecurityService.processHeader]" nsresult: "0x80004005 (NS_ERROR_FAILURE)" location: "JS frame :: /builds/slave/m-cen-l64-periodicupdate-00000/getHSTSPreloadList.js :: processStsHeader :: line 121" data: no]
@ -3001,7 +2995,6 @@ scriptict.nl: could not connect to host
sdmoscow.ru: could not connect to host
sdrobs.com: did not receive HSTS header
sdsl-speedtest.de: could not connect to host
sealbaker.com: could not connect to host
search-one.de: did not receive HSTS header
sebastian-lutsch.de: could not connect to host
sebster.com: did not receive HSTS header
@ -3047,6 +3040,7 @@ servercode.ca: did not receive HSTS header
serverdensity.io: did not receive HSTS header
servergno.me: did not receive HSTS header
seryo.moe: could not connect to host
sesha.co.za: could not connect to host
setphaserstostun.org: could not connect to host
setuid.de: could not connect to host
setuid.io: did not receive HSTS header
@ -3156,7 +3150,7 @@ socialhams.net: did not receive HSTS header
socialhead.io: could not connect to host
socialspirit.com.br: did not receive HSTS header
sockeye.cc: could not connect to host
socomponents.co.uk: could not connect to host
socomponents.co.uk: did not receive HSTS header
sodiao.cc: could not connect to host
sogeek.me: did not receive HSTS header
solidfuelappliancespares.co.uk: did not receive HSTS header
@ -3168,12 +3162,14 @@ somethingnew.xyz: did not receive HSTS header
songzhuolun.com: did not receive HSTS header
sonic.sk: max-age too low: 0
sonicrainboom.rocks: did not receive HSTS header
sorenstudios.com: could not connect to host
sotar.us: [Exception... "Component returned failure code: 0x80004005 (NS_ERROR_FAILURE) [nsISiteSecurityService.processHeader]" nsresult: "0x80004005 (NS_ERROR_FAILURE)" location: "JS frame :: /builds/slave/m-cen-l64-periodicupdate-00000/getHSTSPreloadList.js :: processStsHeader :: line 121" data: no]
sotor.de: did not receive HSTS header
soulboy.io: did not receive HSTS header
soulema.com: [Exception... "Component returned failure code: 0x80004005 (NS_ERROR_FAILURE) [nsISiteSecurityService.processHeader]" nsresult: "0x80004005 (NS_ERROR_FAILURE)" location: "JS frame :: /builds/slave/m-cen-l64-periodicupdate-00000/getHSTSPreloadList.js :: processStsHeader :: line 121" data: no]
soulfulglamour.uk: could not connect to host
sourcelair.com: did not receive HSTS header
sourceway.de: could not connect to host
southside-crew.club: could not connect to host
southworcestershiregpservices.co.uk: could not connect to host
souyar.de: could not connect to host
@ -3212,7 +3208,7 @@ sproutconnections.com: did not receive HSTS header
sprybear.com: did not receive HSTS header
square.gs: could not connect to host
squatldf.org: [Exception... "Component returned failure code: 0x80004005 (NS_ERROR_FAILURE) [nsISiteSecurityService.processHeader]" nsresult: "0x80004005 (NS_ERROR_FAILURE)" location: "JS frame :: /builds/slave/m-cen-l64-periodicupdate-00000/getHSTSPreloadList.js :: processStsHeader :: line 121" data: no]
sqzryang.com: max-age too low: 604800
sqzryang.com: did not receive HSTS header
srevilak.net: did not receive HSTS header
srna.sk: could not connect to host
srrr.ca: could not connect to host
@ -3275,6 +3271,7 @@ subbing.work: could not connect to host
subdimension.org: did not receive HSTS header
subrosa.io: could not connect to host
subtitle.rip: could not connect to host
succ.in: could not connect to host
sudo.li: did not receive HSTS header
suite73.org: could not connect to host
suitocracy.com: did not receive HSTS header
@ -3315,7 +3312,7 @@ szaszm.tk: max-age too low: 0
t.facebook.com: did not receive HSTS header
taabe.xyz: did not receive HSTS header
tablet.facebook.com: did not receive HSTS header
tacomafia.net: did not receive HSTS header
tacomafia.net: could not connect to host
tadigitalstore.com: could not connect to host
tafoma.com: did not receive HSTS header
tageau.com: could not connect to host
@ -3416,7 +3413,6 @@ theurbanyoga.com: did not receive HSTS header
thevintagenews.com: [Exception... "Component returned failure code: 0x80004005 (NS_ERROR_FAILURE) [nsISiteSecurityService.processHeader]" nsresult: "0x80004005 (NS_ERROR_FAILURE)" location: "JS frame :: /builds/slave/m-cen-l64-periodicupdate-00000/getHSTSPreloadList.js :: processStsHeader :: line 121" data: no]
thezonders.com: did not receive HSTS header
thierfreund.de: could not connect to host
thingies.site: could not connect to host
thinkcoding.de: could not connect to host
thinlyveiledcontempt.com: could not connect to host
thirdpartytrade.com: did not receive HSTS header
@ -3459,13 +3455,13 @@ tobiasmathes.name: could not connect to host
tobiasofficial.at: could not connect to host
todo.is: did not receive HSTS header
todobazar.es: could not connect to host
tokage.me: could not connect to host
tokyopopline.com: did not receive HSTS header
tollmanz.com: did not receive HSTS header
tomberek.info: could not connect to host
tomeara.net: could not connect to host
tomharling.co.uk: max-age too low: 86400
tomharling.uk: max-age too low: 86400
tomharris.tech: could not connect to host
tomlankhorst.nl: did not receive HSTS header
tommsy.com: did not receive HSTS header
tonburi.jp: [Exception... "Component returned failure code: 0x80004005 (NS_ERROR_FAILURE) [nsISiteSecurityService.processHeader]" nsresult: "0x80004005 (NS_ERROR_FAILURE)" location: "JS frame :: /builds/slave/m-cen-l64-periodicupdate-00000/getHSTSPreloadList.js :: processStsHeader :: line 121" data: no]
@ -3536,7 +3532,6 @@ tyrelius.com: did not receive HSTS header
tyroproducts.eu: did not receive HSTS header
tzappa.net: could not connect to host
u-blox.com: did not receive HSTS header
u03.fr: could not connect to host
ua.search.yahoo.com: did not receive HSTS header
ubicloud.de: could not connect to host
ublox.com: did not receive HSTS header
@ -3592,6 +3587,7 @@ usercare.com: did not receive HSTS header
userify.com: did not receive HSTS header
ustr.gov: max-age too low: 86400
utumno.ch: could not connect to host
uvarov.pw: could not connect to host
uy.search.yahoo.com: did not receive HSTS header
uz.search.yahoo.com: did not receive HSTS header
uzmandroid.com: could not connect to host
@ -3618,7 +3614,7 @@ vcdove.com: did not receive HSTS header
vcr.re: could not connect to host
veblen.com: could not connect to host
vechkasov.ru: did not receive HSTS header
vemokin.net: did not receive HSTS header
vemokin.net: could not connect to host
venixplays-stream.ml: could not connect to host
verifikatorindonesia.com: could not connect to host
vermontcareergateway.org: could not connect to host
@ -3635,12 +3631,14 @@ videomuz.com: did not receive HSTS header
vidz.ga: could not connect to host
vieaw.com: could not connect to host
viktorsvantesson.net: did not receive HSTS header
villenvinkit.com: could not connect to host
vincentkooijman.at: did not receive HSTS header
vincentkooijman.nl: did not receive HSTS header
vintageheartcoffee.com: did not receive HSTS header
vio.no: did not receive HSTS header
viperdns.com: could not connect to host
vipmusic.ga: could not connect to host
vipnettikasinoklubi.com: could not connect to host
visitbroadstairs.com: could not connect to host
vissanum.com: did not receive HSTS header
vistarait.com: did not receive HSTS header
@ -3748,7 +3746,6 @@ whyworldhot.com: could not connect to host
wienholding.at: max-age too low: 0
wieninternational.at: could not connect to host
wiire.me: could not connect to host
wikileaks.com: could not connect to host
wilf1rst.com: could not connect to host
william.si: did not receive HSTS header
willosagiede.com: did not receive HSTS header
@ -3905,7 +3902,6 @@ yuko.moe: could not connect to host
yukontec.com: [Exception... "Component returned failure code: 0x80004005 (NS_ERROR_FAILURE) [nsISiteSecurityService.processHeader]" nsresult: "0x80004005 (NS_ERROR_FAILURE)" location: "JS frame :: /builds/slave/m-cen-l64-periodicupdate-00000/getHSTSPreloadList.js :: processStsHeader :: line 121" data: no]
yunzhu.li: did not receive HSTS header
yunzhu.org: could not connect to host
yutabon.com: could not connect to host
yux.io: did not receive HSTS header
yzal.io: could not connect to host
z33.ch: did not receive HSTS header
@ -3935,7 +3931,7 @@ zera.com.au: could not connect to host
zett4.me: could not connect to host
zeytin.pro: could not connect to host
zh.search.yahoo.com: did not receive HSTS header
zhaojin97.cn: max-age too low: 604800
zhaojin97.cn: did not receive HSTS header
zhendingresources.com: max-age too low: 0
zigcore.com.br: could not connect to host
zirtue.io: could not connect to host

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Просмотреть файл

@ -144,6 +144,8 @@ this.setBasicCredentials =
// Return an identity configuration suitable for testing with our identity
// providers. |overrides| can specify overrides for any default values.
// |server| is optional, but if specified, will be used to form the cluster
// URL for the FxA identity.
this.makeIdentityConfig = function(overrides) {
// first setup the defaults.
let result = {
@ -249,51 +251,60 @@ this.configureFxAccountIdentity = function(authService,
authService._account = config.fxaccount.user.email;
}
this.configureIdentity = async function(identityOverrides) {
let config = makeIdentityConfig(identityOverrides);
this.configureIdentity = async function(identityOverrides, server) {
let config = makeIdentityConfig(identityOverrides, server);
let ns = {};
Cu.import("resource://services-sync/service.js", ns);
if (server) {
ns.Service.serverURL = server.baseURI;
}
ns.Service._clusterManager = ns.Service.identity.createClusterManager(ns.Service);
if (ns.Service.identity instanceof BrowserIDManager) {
// do the FxAccounts thang...
// If a server was specified, ensure FxA has a correct cluster URL available.
if (server && !config.fxaccount.token.endpoint) {
let ep = server.baseURI;
if (!ep.endsWith("/")) {
ep += "/";
}
ep += "1.1/" + config.username + "/";
config.fxaccount.token.endpoint = ep;
}
configureFxAccountIdentity(ns.Service.identity, config);
await ns.Service.identity.initializeWithCurrentIdentity();
// need to wait until this identity manager is readyToAuthenticate.
await ns.Service.identity.whenReadyToAuthenticate.promise;
// and cheat to avoid requiring each test do an explicit login - give it
// a cluster URL.
if (config.fxaccount.token.endpoint) {
ns.Service.clusterURL = config.fxaccount.token.endpoint;
}
return;
}
// old style identity provider.
if (server) {
ns.Service.clusterURL = server.baseURI + "/";
}
ns.Service.identity.username = config.username;
ns.Service._updateCachedURLs();
setBasicCredentials(config.username, config.sync.password, config.sync.syncKey);
}
this.SyncTestingInfrastructure = function (server, username, password, syncKey) {
this.SyncTestingInfrastructure = async function (server, username, password) {
let ns = {};
Cu.import("resource://services-sync/service.js", ns);
ensureLegacyIdentityManager();
let config = makeIdentityConfig();
// XXX - hacks for the sync identity provider.
if (username)
config.username = username;
if (password)
config.sync.password = password;
if (syncKey)
config.sync.syncKey = syncKey;
let cb = Async.makeSpinningCallback();
configureIdentity(config).then(cb, cb);
cb.wait();
let i = server.identity;
let uri = i.primaryScheme + "://" + i.primaryHost + ":" +
i.primaryPort + "/";
ns.Service.serverURL = uri;
ns.Service.clusterURL = uri;
this.logStats = initTestLogging();
this.fakeFilesystem = new FakeFilesystemService({});
this.fakeGUIDService = new FakeGUIDService();
this.fakeCryptoService = new FakeCryptoService();
let config = makeIdentityConfig({ username, password });
await configureIdentity(config, server);
return {
logStats: initTestLogging(),
fakeFilesystem: new FakeFilesystemService({}),
fakeGUIDService: new FakeGUIDService(),
fakeCryptoService: new FakeCryptoService(),
}
}
/**

Просмотреть файл

@ -377,7 +377,7 @@ this.BrowserIDManager.prototype = {
* Changes will not persist unless persistSyncCredentials() is called.
*/
set basicPassword(value) {
throw "basicPassword setter should be not used in BrowserIDManager";
throw new Error("basicPassword setter should be not used in BrowserIDManager");
},
/**

Просмотреть файл

@ -192,6 +192,8 @@ MIN_PASS_LENGTH: 8,
DEVICE_TYPE_DESKTOP: "desktop",
DEVICE_TYPE_MOBILE: "mobile",
SQLITE_MAX_VARIABLE_NUMBER: 999,
})) {
this[key] = val;
this.EXPORTED_SYMBOLS.push(key);

Просмотреть файл

@ -161,19 +161,25 @@ Tracker.prototype = {
return true;
},
removeChangedID: function (id) {
if (!id) {
this._log.warn("Attempted to remove undefined ID to tracker");
removeChangedID: function (...ids) {
if (!ids.length || this.ignoreAll) {
return false;
}
if (this.ignoreAll || this._ignored.includes(id)) {
return false;
}
if (this.changedIDs[id] != null) {
this._log.trace("Removing changed ID " + id);
delete this.changedIDs[id];
this.saveChangedIDs();
for (let id of ids) {
if (!id) {
this._log.warn("Attempted to remove undefined ID from tracker");
continue;
}
if (this._ignored.includes(id)) {
this._log.debug(`Not removing ignored ID ${id} from tracker`);
continue;
}
if (this.changedIDs[id] != null) {
this._log.trace("Removing changed ID " + id);
delete this.changedIDs[id];
}
}
this.saveChangedIDs();
return true;
},

Просмотреть файл

@ -64,6 +64,50 @@ HistoryEngine.prototype = {
notifyHistoryObservers("onEndUpdateBatch");
}
},
pullNewChanges() {
let modifiedGUIDs = Object.keys(this._tracker.changedIDs);
if (!modifiedGUIDs.length) {
return new Changeset();
}
let db = PlacesUtils.history.QueryInterface(Ci.nsPIPlacesDatabase)
.DBConnection;
// Filter out hidden pages and `TRANSITION_FRAMED_LINK` visits. These are
// excluded when rendering the history menu, so we use the same constraints
// for Sync. We also don't want to sync `TRANSITION_EMBED` visits, but those
// aren't stored in the database.
for (let startIndex = 0;
startIndex < modifiedGUIDs.length;
startIndex += SQLITE_MAX_VARIABLE_NUMBER) {
let chunkLength = Math.min(SQLITE_MAX_VARIABLE_NUMBER,
modifiedGUIDs.length - startIndex);
let query = `
SELECT DISTINCT p.guid FROM moz_places p
JOIN moz_historyvisits v ON p.id = v.place_id
WHERE p.guid IN (${new Array(chunkLength).fill("?").join(",")}) AND
(p.hidden = 1 OR v.visit_type IN (0,
${PlacesUtils.history.TRANSITION_FRAMED_LINK}))
`;
let statement = db.createAsyncStatement(query);
try {
for (let i = 0; i < chunkLength; i++) {
statement.bindByIndex(i, modifiedGUIDs[startIndex + i]);
}
let results = Async.querySpinningly(statement, ["guid"]);
let guids = results.map(result => result.guid);
this._tracker.removeChangedID(...guids);
} finally {
statement.finalize();
}
}
return new Changeset(this._tracker.changedIDs);
},
};
function HistoryStore(name, engine) {
@ -72,7 +116,7 @@ function HistoryStore(name, engine) {
// Explicitly nullify our references to our cached services so we don't leak
Svc.Obs.add("places-shutdown", function() {
for (let query in this._stmts) {
let stmt = this._stmts;
let stmt = this._stmts[query];
stmt.finalize();
}
this._stmts = {};
@ -165,12 +209,18 @@ HistoryStore.prototype = {
_urlCols: ["url", "title", "frecency"],
get _allUrlStm() {
return this._getStmt(
"SELECT url " +
"FROM moz_places " +
"WHERE last_visit_date > :cutoff_date " +
"ORDER BY frecency DESC " +
"LIMIT :max_results");
// Filter out hidden pages and framed link visits. See `pullNewChanges`
// for more info.
return this._getStmt(`
SELECT DISTINCT p.url
FROM moz_places p
JOIN moz_historyvisits v ON p.id = v.place_id
WHERE p.last_visit_date > :cutoff_date AND
p.hidden = 0 AND
v.visit_type NOT IN (0,
${PlacesUtils.history.TRANSITION_FRAMED_LINK})
ORDER BY frecency DESC
LIMIT :max_results`);
},
_allUrlCols: ["url"],

Просмотреть файл

@ -43,25 +43,25 @@ const EHTestsCommon = {
"/1.1/janedoe/info/collections": handler_401,
// Maintenance or overloaded (503 + Retry-After) at info/collections.
"/maintenance/1.1/broken.info/info/collections": EHTestsCommon.service_unavailable,
"/1.1/broken.info/info/collections": EHTestsCommon.service_unavailable,
// Maintenance or overloaded (503 + Retry-After) at meta/global.
"/maintenance/1.1/broken.meta/storage/meta/global": EHTestsCommon.service_unavailable,
"/maintenance/1.1/broken.meta/info/collections": collectionsHelper.handler,
"/1.1/broken.meta/storage/meta/global": EHTestsCommon.service_unavailable,
"/1.1/broken.meta/info/collections": collectionsHelper.handler,
// Maintenance or overloaded (503 + Retry-After) at crypto/keys.
"/maintenance/1.1/broken.keys/storage/meta/global": upd("meta", global.handler()),
"/maintenance/1.1/broken.keys/info/collections": collectionsHelper.handler,
"/maintenance/1.1/broken.keys/storage/crypto/keys": EHTestsCommon.service_unavailable,
"/1.1/broken.keys/storage/meta/global": upd("meta", global.handler()),
"/1.1/broken.keys/info/collections": collectionsHelper.handler,
"/1.1/broken.keys/storage/crypto/keys": EHTestsCommon.service_unavailable,
// Maintenance or overloaded (503 + Retry-After) at wiping collection.
"/maintenance/1.1/broken.wipe/info/collections": collectionsHelper.handler,
"/maintenance/1.1/broken.wipe/storage/meta/global": upd("meta", global.handler()),
"/maintenance/1.1/broken.wipe/storage/crypto/keys":
"/1.1/broken.wipe/info/collections": collectionsHelper.handler,
"/1.1/broken.wipe/storage/meta/global": upd("meta", global.handler()),
"/1.1/broken.wipe/storage/crypto/keys":
upd("crypto", (new ServerWBO("keys")).handler()),
"/maintenance/1.1/broken.wipe/storage": EHTestsCommon.service_unavailable,
"/maintenance/1.1/broken.wipe/storage/clients": upd("clients", clientsColl.handler()),
"/maintenance/1.1/broken.wipe/storage/catapult": EHTestsCommon.service_unavailable
"/1.1/broken.wipe/storage": EHTestsCommon.service_unavailable,
"/1.1/broken.wipe/storage/clients": upd("clients", clientsColl.handler()),
"/1.1/broken.wipe/storage/catapult": EHTestsCommon.service_unavailable
});
},
@ -93,9 +93,7 @@ const EHTestsCommon = {
},
async setUp(server) {
await configureIdentity({ username: "johndoe" });
Service.serverURL = server.baseURI + "/";
Service.clusterURL = server.baseURI + "/";
await configureIdentity({ username: "johndoe" }, server);
return EHTestsCommon.generateAndUploadKeys()
},

Просмотреть файл

@ -295,7 +295,7 @@ function assert_success_ping(ping) {
ok(!!ping);
assert_valid_ping(ping);
ping.syncs.forEach(record => {
ok(!record.failureReason);
ok(!record.failureReason, JSON.stringify(record.failureReason));
equal(undefined, record.status);
greater(record.engines.length, 0);
for (let e of record.engines) {

Просмотреть файл

@ -143,7 +143,7 @@ add_test(function test_get_changed_ids() {
advance_test();
});
add_test(function test_disabled_install_semantics() {
add_task(async function test_disabled_install_semantics() {
_("Ensure that syncing a disabled add-on preserves proper state.");
// This is essentially a test for bug 712542, which snuck into the original
@ -151,12 +151,11 @@ add_test(function test_disabled_install_semantics() {
// disabled state and incoming syncGUID is preserved, even on the next sync.
const USER = "foo";
const PASSWORD = "password";
const PASSPHRASE = "abcdeabcdeabcdeabcdeabcdea";
const ADDON_ID = "addon1@tests.mozilla.org";
let server = new SyncServer();
server.start();
new SyncTestingInfrastructure(server.server, USER, PASSWORD, PASSPHRASE);
await SyncTestingInfrastructure(server, USER, PASSWORD);
generateNewKeys(Service.collectionKeys);
@ -199,9 +198,7 @@ add_test(function test_disabled_install_semantics() {
// At this point the non-restartless extension should be staged for install.
// Don't need this server any more.
let cb = Async.makeSpinningCallback();
amoServer.stop(cb);
cb.wait();
await promiseStopServer(amoServer);
// We ensure the reconciler has recorded the proper ID and enabled state.
let addon = reconciler.getAddonStateFromSyncGUID(id);
@ -224,7 +221,7 @@ add_test(function test_disabled_install_semantics() {
do_check_eq(ADDON_ID, payload.addonID);
do_check_false(payload.enabled);
server.stop(advance_test);
promiseStopServer(server);
});
add_test(function cleanup() {

Просмотреть файл

@ -23,7 +23,7 @@ const store = engine._store;
store._log.level = Log.Level.Trace;
engine._log.level = Log.Level.Trace;
function setup() {
async function setup() {
let server = serverForUsers({"foo": "password"}, {
meta: {global: {engines: {bookmarks: {version: engine.version,
syncID: engine.syncID}}}},
@ -32,10 +32,16 @@ function setup() {
generateNewKeys(Service.collectionKeys);
new SyncTestingInfrastructure(server.server);
await SyncTestingInfrastructure(server);
let collection = server.user("foo").collection("bookmarks");
// The bookmarks engine *always* tracks changes, meaning we might try
// and sync due to the bookmarks we ourselves create! Worse, because we
// do an engine sync only, there's no locking - so we end up with multiple
// syncs running. Neuter that by making the threshold very large.
Service.scheduler.syncThreshold = 10000000;
Svc.Obs.notify("weave:engine:start-tracking"); // We skip usual startup...
return { server, collection };
@ -130,7 +136,7 @@ async function validate(collection, expectedFailures = []) {
add_task(async function test_dupe_bookmark() {
_("Ensure that a bookmark we consider a dupe is handled correctly.");
let { server, collection } = this.setup();
let { server, collection } = await this.setup();
try {
// The parent folder and one bookmark in it.
@ -181,7 +187,7 @@ add_task(async function test_dupe_bookmark() {
add_task(async function test_dupe_reparented_bookmark() {
_("Ensure that a bookmark we consider a dupe from a different parent is handled correctly");
let { server, collection } = this.setup();
let { server, collection } = await this.setup();
try {
// The parent folder and one bookmark in it.
@ -247,7 +253,7 @@ add_task(async function test_dupe_reparented_bookmark() {
add_task(async function test_dupe_reparented_locally_changed_bookmark() {
_("Ensure that a bookmark with local changes we consider a dupe from a different parent is handled correctly");
let { server, collection } = this.setup();
let { server, collection } = await this.setup();
try {
// The parent folder and one bookmark in it.
@ -324,7 +330,7 @@ add_task(async function test_dupe_reparented_to_earlier_appearing_parent_bookmar
_("Ensure that a bookmark we consider a dupe from a different parent that " +
"appears in the same sync before the dupe item");
let { server, collection } = this.setup();
let { server, collection } = await this.setup();
try {
// The parent folder and one bookmark in it.
@ -401,7 +407,7 @@ add_task(async function test_dupe_reparented_to_later_appearing_parent_bookmark(
_("Ensure that a bookmark we consider a dupe from a different parent that " +
"doesn't exist locally as we process the child, but does appear in the same sync");
let { server, collection } = this.setup();
let { server, collection } = await this.setup();
try {
// The parent folder and one bookmark in it.
@ -478,7 +484,7 @@ add_task(async function test_dupe_reparented_to_future_arriving_parent_bookmark(
_("Ensure that a bookmark we consider a dupe from a different parent that " +
"doesn't exist locally and doesn't appear in this Sync is handled correctly");
let { server, collection } = this.setup();
let { server, collection } = await this.setup();
try {
// The parent folder and one bookmark in it.
@ -598,7 +604,7 @@ add_task(async function test_dupe_empty_folder() {
_("Ensure that an empty folder we consider a dupe is handled correctly.");
// Empty folders aren't particularly interesting in practice (as that seems
// an edge-case) but duping folders with items is broken - bug 1293163.
let { server, collection } = this.setup();
let { server, collection } = await this.setup();
try {
// The folder we will end up duping away.

Просмотреть файл

@ -51,7 +51,7 @@ add_task(async function test_delete_invalid_roots_from_server() {
let store = engine._store;
let tracker = engine._tracker;
let server = serverForFoo(engine);
new SyncTestingInfrastructure(server.server);
await SyncTestingInfrastructure(server);
let collection = server.user("foo").collection("bookmarks");
@ -108,7 +108,7 @@ add_task(async function test_change_during_sync() {
let store = engine._store;
let tracker = engine._tracker;
let server = serverForFoo(engine);
new SyncTestingInfrastructure(server.server);
await SyncTestingInfrastructure(server);
let collection = server.user("foo").collection("bookmarks");
@ -256,7 +256,7 @@ add_task(async function test_change_during_sync() {
add_task(async function bad_record_allIDs() {
let server = new SyncServer();
server.start();
let syncTesting = new SyncTestingInfrastructure(server.server);
let syncTesting = await SyncTestingInfrastructure(server);
_("Ensure that bad Places queries don't cause an error in getAllIDs.");
let engine = new BookmarksEngine(Service);
@ -285,6 +285,12 @@ add_task(async function bad_record_allIDs() {
});
function serverForFoo(engine) {
// The bookmarks engine *always* tracks changes, meaning we might try
// and sync due to the bookmarks we ourselves create! Worse, because we
// do an engine sync only, there's no locking - so we end up with multiple
// syncs running. Neuter that by making the threshold very large.
Service.scheduler.syncThreshold = 10000000;
return serverForUsers({"foo": "password"}, {
meta: {global: {engines: {bookmarks: {version: engine.version,
syncID: engine.syncID}}}},
@ -298,7 +304,7 @@ add_task(async function test_processIncoming_error_orderChildren() {
let engine = new BookmarksEngine(Service);
let store = engine._store;
let server = serverForFoo(engine);
new SyncTestingInfrastructure(server.server);
await SyncTestingInfrastructure(server);
let collection = server.user("foo").collection("bookmarks");
@ -368,7 +374,7 @@ add_task(async function test_restorePromptsReupload() {
let engine = new BookmarksEngine(Service);
let store = engine._store;
let server = serverForFoo(engine);
new SyncTestingInfrastructure(server.server);
await SyncTestingInfrastructure(server);
let collection = server.user("foo").collection("bookmarks");
@ -538,7 +544,7 @@ add_task(async function test_mismatched_types() {
let engine = new BookmarksEngine(Service);
let store = engine._store;
let server = serverForFoo(engine);
new SyncTestingInfrastructure(server.server);
await SyncTestingInfrastructure(server);
_("GUID: " + store.GUIDForId(6, true));
@ -583,7 +589,7 @@ add_task(async function test_bookmark_guidMap_fail() {
let server = serverForFoo(engine);
let coll = server.user("foo").collection("bookmarks");
new SyncTestingInfrastructure(server.server);
await SyncTestingInfrastructure(server);
// Add one item to the server.
let itemID = PlacesUtils.bookmarks.createFolder(

Просмотреть файл

@ -15,10 +15,9 @@ function prepareBookmarkItem(collection, id) {
return b;
}
function run_test() {
ensureLegacyIdentityManager();
Service.identity.username = "john@example.com";
Service.identity.syncKey = "abcdeabcdeabcdeabcdeabcdea";
add_task(async function test_bookmark_record() {
await configureIdentity();
generateNewKeys(Service.collectionKeys);
let keyBundle = Service.identity.syncKeyBundle;
@ -45,4 +44,4 @@ function run_test() {
do_check_eq(payload.stuff, "my payload here");
do_check_eq(bookmarkItem.getTypeObject(bookmarkItem.type), Bookmark);
do_check_neq(payload, bookmarkItem.payload); // wrap.data.payload is the encrypted one
}
});

Просмотреть файл

@ -59,7 +59,7 @@ function serverForFoo(engine) {
// handled locally.
add_task(async function test_annotation_uploaded() {
let server = serverForFoo(engine);
new SyncTestingInfrastructure(server.server);
await SyncTestingInfrastructure(server);
let startCount = smartBookmarkCount();
@ -168,13 +168,13 @@ add_task(async function test_annotation_uploaded() {
store.wipe();
Svc.Prefs.resetBranch("");
Service.recordManager.clearCache();
server.stop(run_next_test);
await promiseStopServer(server);
}
});
add_test(function test_smart_bookmarks_duped() {
add_task(async function test_smart_bookmarks_duped() {
let server = serverForFoo(engine);
new SyncTestingInfrastructure(server.server);
await SyncTestingInfrastructure(server);
let parent = PlacesUtils.toolbarFolderId;
let uri =
@ -219,7 +219,7 @@ add_test(function test_smart_bookmarks_duped() {
} finally {
// Clean up.
store.wipe();
server.stop(do_test_finished);
await promiseStopServer(server);
Svc.Prefs.resetBranch("");
Service.recordManager.clearCache();
}

Просмотреть файл

@ -37,7 +37,19 @@ function check_record_version(user, id) {
equal("1.5", cleartext.protocols[1]);
}
add_test(function test_bad_hmac() {
// compare 2 different command arrays, taking into account that a flowID
// attribute must exist, be unique in the commands, but isn't specified in
// "expected" as the value isn't known.
function compareCommands(actual, expected, description) {
let tweakedActual = JSON.parse(JSON.stringify(actual));
tweakedActual.map(elt => delete elt.flowID);
deepEqual(tweakedActual, expected, description);
// each item must have a unique flowID.
let allIDs = new Set(actual.map(elt => elt.flowID).filter(fid => !!fid));
equal(allIDs.size, actual.length, "all items have unique IDs");
}
add_task(async function test_bad_hmac() {
_("Ensure that Clients engine deletes corrupt records.");
let contents = {
meta: {global: {engines: {clients: {version: engine.version,
@ -81,10 +93,8 @@ add_test(function test_bad_hmac() {
}
try {
ensureLegacyIdentityManager();
let passphrase = "abcdeabcdeabcdeabcdeabcdea";
Service.serverURL = server.baseURI;
Service.login("foo", "ilovejane", passphrase);
await configureIdentity({username: "foo"}, server);
Service.login("foo");
generateNewKeys(Service.collectionKeys);
@ -174,7 +184,7 @@ add_test(function test_bad_hmac() {
} finally {
Svc.Prefs.resetBranch("");
Service.recordManager.clearCache();
server.stop(run_next_test);
await promiseStopServer(server);
}
});
@ -193,7 +203,7 @@ add_test(function test_properties() {
}
});
add_test(function test_full_sync() {
add_task(async function test_full_sync() {
_("Ensure that Clients engine fetches all records for each sync.");
let now = Date.now() / 1000;
@ -206,7 +216,7 @@ add_test(function test_full_sync() {
let server = serverForUsers({"foo": "password"}, contents);
let user = server.user("foo");
new SyncTestingInfrastructure(server.server);
await SyncTestingInfrastructure(server);
generateNewKeys(Service.collectionKeys);
let activeID = Utils.makeGUID();
@ -261,12 +271,12 @@ add_test(function test_full_sync() {
try {
server.deleteCollections("foo");
} finally {
server.stop(run_next_test);
await promiseStopServer(server);
}
}
});
add_test(function test_sync() {
add_task(async function test_sync() {
_("Ensure that Clients engine uploads a new client record once a week.");
let contents = {
@ -278,7 +288,7 @@ add_test(function test_sync() {
let server = serverForUsers({"foo": "password"}, contents);
let user = server.user("foo");
new SyncTestingInfrastructure(server.server);
await SyncTestingInfrastructure(server);
generateNewKeys(Service.collectionKeys);
function clientWBO() {
@ -316,7 +326,7 @@ add_test(function test_sync() {
} finally {
Svc.Prefs.resetBranch("");
Service.recordManager.clearCache();
server.stop(run_next_test);
await promiseStopServer(server);
}
});
@ -514,7 +524,7 @@ add_test(function test_process_incoming_commands() {
ok(!engine.processIncomingCommands());
});
add_test(function test_filter_duplicate_names() {
add_task(async function test_filter_duplicate_names() {
_("Ensure that we exclude clients with identical names that haven't synced in a week.");
let now = Date.now() / 1000;
@ -527,7 +537,7 @@ add_test(function test_filter_duplicate_names() {
let server = serverForUsers({"foo": "password"}, contents);
let user = server.user("foo");
new SyncTestingInfrastructure(server.server);
await SyncTestingInfrastructure(server);
generateNewKeys(Service.collectionKeys);
// Synced recently.
@ -665,12 +675,12 @@ add_test(function test_filter_duplicate_names() {
try {
server.deleteCollections("foo");
} finally {
server.stop(run_next_test);
await promiseStopServer(server);
}
}
});
add_test(function test_command_sync() {
add_task(async function test_command_sync() {
_("Ensure that commands are synced across clients.");
engine._store.wipe();
@ -683,7 +693,7 @@ add_test(function test_command_sync() {
crypto: {}
};
let server = serverForUsers({"foo": "password"}, contents);
new SyncTestingInfrastructure(server.server);
await SyncTestingInfrastructure(server);
let user = server.user("foo");
let remoteId = Utils.makeGUID();
@ -743,7 +753,7 @@ add_test(function test_command_sync() {
let collection = server.getCollection("foo", "clients");
collection.remove(remoteId);
} finally {
server.stop(run_next_test);
await promiseStopServer(server);
}
}
});
@ -870,7 +880,7 @@ add_test(function test_optional_client_fields() {
run_next_test();
});
add_test(function test_merge_commands() {
add_task(async function test_merge_commands() {
_("Verifies local commands for remote clients are merged with the server's");
let now = Date.now() / 1000;
@ -883,7 +893,7 @@ add_test(function test_merge_commands() {
let server = serverForUsers({"foo": "password"}, contents);
let user = server.user("foo");
new SyncTestingInfrastructure(server.server);
await SyncTestingInfrastructure(server);
generateNewKeys(Service.collectionKeys);
let desktopID = Utils.makeGUID();
@ -944,12 +954,12 @@ add_test(function test_merge_commands() {
try {
server.deleteCollections("foo");
} finally {
server.stop(run_next_test);
await promiseStopServer(server);
}
}
});
add_test(function test_duplicate_remote_commands() {
add_task(async function test_duplicate_remote_commands() {
_("Verifies local commands for remote clients are sent only once (bug 1289287)");
let now = Date.now() / 1000;
@ -962,7 +972,7 @@ add_test(function test_duplicate_remote_commands() {
let server = serverForUsers({"foo": "password"}, contents);
let user = server.user("foo");
new SyncTestingInfrastructure(server.server);
await SyncTestingInfrastructure(server);
generateNewKeys(Service.collectionKeys);
let desktopID = Utils.makeGUID();
@ -1014,12 +1024,12 @@ add_test(function test_duplicate_remote_commands() {
try {
server.deleteCollections("foo");
} finally {
server.stop(run_next_test);
await promiseStopServer(server);
}
}
});
add_test(function test_upload_after_reboot() {
add_task(async function test_upload_after_reboot() {
_("Multiple downloads, reboot, then upload (bug 1289287)");
let now = Date.now() / 1000;
@ -1032,7 +1042,7 @@ add_test(function test_upload_after_reboot() {
let server = serverForUsers({"foo": "password"}, contents);
let user = server.user("foo");
new SyncTestingInfrastructure(server.server);
await SyncTestingInfrastructure(server);
generateNewKeys(Service.collectionKeys);
let deviceBID = Utils.makeGUID();
@ -1105,12 +1115,12 @@ add_test(function test_upload_after_reboot() {
try {
server.deleteCollections("foo");
} finally {
server.stop(run_next_test);
await promiseStopServer(server);
}
}
});
add_test(function test_keep_cleared_commands_after_reboot() {
add_task(async function test_keep_cleared_commands_after_reboot() {
_("Download commands, fail upload, reboot, then apply new commands (bug 1289287)");
let now = Date.now() / 1000;
@ -1123,7 +1133,7 @@ add_test(function test_keep_cleared_commands_after_reboot() {
let server = serverForUsers({"foo": "password"}, contents);
let user = server.user("foo");
new SyncTestingInfrastructure(server.server);
await SyncTestingInfrastructure(server);
generateNewKeys(Service.collectionKeys);
let deviceBID = Utils.makeGUID();
@ -1223,12 +1233,12 @@ add_test(function test_keep_cleared_commands_after_reboot() {
try {
server.deleteCollections("foo");
} finally {
server.stop(run_next_test);
await promiseStopServer(server);
}
}
});
add_test(function test_deleted_commands() {
add_task(async function test_deleted_commands() {
_("Verifies commands for a deleted client are discarded");
let now = Date.now() / 1000;
@ -1241,7 +1251,7 @@ add_test(function test_deleted_commands() {
let server = serverForUsers({"foo": "password"}, contents);
let user = server.user("foo");
new SyncTestingInfrastructure(server.server);
await SyncTestingInfrastructure(server);
generateNewKeys(Service.collectionKeys);
let activeID = Utils.makeGUID();
@ -1292,12 +1302,12 @@ add_test(function test_deleted_commands() {
try {
server.deleteCollections("foo");
} finally {
server.stop(run_next_test);
await promiseStopServer(server);
}
}
});
add_test(function test_send_uri_ack() {
add_task(async function test_send_uri_ack() {
_("Ensure a sent URI is deleted when the client syncs");
let now = Date.now() / 1000;
@ -1310,7 +1320,7 @@ add_test(function test_send_uri_ack() {
let server = serverForUsers({"foo": "password"}, contents);
let user = server.user("foo");
new SyncTestingInfrastructure(server.server);
await SyncTestingInfrastructure(server);
generateNewKeys(Service.collectionKeys);
try {
@ -1355,12 +1365,12 @@ add_test(function test_send_uri_ack() {
try {
server.deleteCollections("foo");
} finally {
server.stop(run_next_test);
await promiseStopServer(server);
}
}
});
add_test(function test_command_sync() {
add_task(async function test_command_sync() {
_("Notify other clients when writing their record.");
engine._store.wipe();
@ -1373,7 +1383,7 @@ add_test(function test_command_sync() {
crypto: {}
};
let server = serverForUsers({"foo": "password"}, contents);
new SyncTestingInfrastructure(server.server);
await SyncTestingInfrastructure(server);
let user = server.user("foo");
let collection = server.getCollection("foo", "clients");
@ -1427,7 +1437,7 @@ add_test(function test_command_sync() {
try {
server.deleteCollections("foo");
} finally {
server.stop(run_next_test);
await promiseStopServer(server);
}
}
});

Просмотреть файл

@ -7,17 +7,14 @@ Cu.import("resource://services-sync/service.js");
Cu.import("resource://services-sync/util.js");
Cu.import("resource://testing-common/services/sync/utils.js");
function run_test() {
add_task(async function test_clients_escape() {
_("Set up test fixtures.");
ensureLegacyIdentityManager();
Service.identity.username = "john@example.com";
Service.clusterURL = "http://fakebase/";
await configureIdentity();
let baseUri = "http://fakebase/1.1/foo/storage/";
let pubUri = baseUri + "keys/pubkey";
let privUri = baseUri + "keys/privkey";
Service.identity.syncKey = "abcdeabcdeabcdeabcdeabcdea";
let keyBundle = Service.identity.syncKeyBundle;
let engine = Service.clientsEngine;
@ -61,4 +58,4 @@ function run_test() {
} finally {
Svc.Prefs.resetBranch("");
}
}
});

Просмотреть файл

@ -24,8 +24,6 @@ add_identity_test(this, async function test_missing_crypto_collection() {
};
}
await configureIdentity({username: "johndoe"});
let handlers = {
"/1.1/johndoe/info/collections": maybe_empty(johnHelper.handler),
"/1.1/johndoe/storage/crypto/keys": johnU("crypto", new ServerWBO("keys").handler()),
@ -41,7 +39,7 @@ add_identity_test(this, async function test_missing_crypto_collection() {
johnU(coll, new ServerCollection({}, true).handler());
}
let server = httpd_setup(handlers);
Service.serverURL = server.baseURI;
await configureIdentity({username: "johndoe"}, server);
try {
let fresh = 0;

Просмотреть файл

@ -14,8 +14,6 @@ Cu.import("resource://services-sync/util.js");
Cu.import("resource://testing-common/services/sync/utils.js");
add_task(async function test_locally_changed_keys() {
let passphrase = "abcdeabcdeabcdeabcdeabcdea";
let hmacErrorCount = 0;
function counting(f) {
return function() {
@ -53,9 +51,9 @@ add_task(async function test_locally_changed_keys() {
getBrowserState: () => JSON.stringify(myTabs)
};
setBasicCredentials("johndoe", "password", passphrase);
Service.serverURL = server.baseURI;
Service.clusterURL = server.baseURI;
await configureIdentity({ username: "johndoe" }, server);
// We aren't doing a .login yet, so fudge the cluster URL.
Service.clusterURL = Service.identity._token.endpoint;
Service.engineManager.register(HistoryEngine);
Service.engineManager.unregister("addons");
@ -82,7 +80,7 @@ add_task(async function test_locally_changed_keys() {
do_check_true(serverKeys.upload(Service.resource(Service.cryptoKeysURL)).success);
// Check that login works.
do_check_true(Service.login("johndoe", "ilovejane", passphrase));
do_check_true(Service.login("johndoe"));
do_check_true(Service.isLoggedIn);
// Sync should upload records.
@ -208,8 +206,6 @@ function run_test() {
Log.repository.rootLogger.addAppender(new Log.DumpAppender());
validate_all_future_pings();
ensureLegacyIdentityManager();
run_next_test();
}

Просмотреть файл

@ -8,7 +8,7 @@ Cu.import("resource://services-sync/util.js");
Cu.import("resource://testing-common/services/sync/rotaryengine.js");
Cu.import("resource://testing-common/services/sync/utils.js");
add_test(function test_processIncoming_abort() {
add_task(async function test_processIncoming_abort() {
_("An abort exception, raised in applyIncoming, will abort _processIncoming.");
let engine = new RotaryEngine(Service);
@ -21,7 +21,7 @@ add_test(function test_processIncoming_abort() {
"/1.1/foo/storage/rotary": collection.handler()
});
new SyncTestingInfrastructure(server);
await SyncTestingInfrastructure(server);
generateNewKeys(Service.collectionKeys);
_("Create some server data.");
@ -59,7 +59,7 @@ add_test(function test_processIncoming_abort() {
do_check_eq(err, undefined);
server.stop(run_next_test);
await promiseStopServer(server);
Svc.Prefs.resetBranch("");
Service.recordManager.clearCache();
});

Просмотреть файл

@ -52,8 +52,6 @@ function run_test() {
Log.repository.getLogger("Sync.SyncScheduler").level = Log.Level.Trace;
Log.repository.getLogger("Sync.ErrorHandler").level = Log.Level.Trace;
ensureLegacyIdentityManager();
run_next_test();
}
@ -101,7 +99,7 @@ add_identity_test(this, async function test_401_logout() {
}
// Make sync fail due to login rejected.
await configureIdentity({username: "janedoe"});
await configureIdentity({username: "janedoe"}, server);
Service._updateCachedURLs();
_("Starting first sync.");
@ -406,21 +404,19 @@ add_identity_test(this, function test_shouldReportLoginFailureWithNoCluster() {
do_check_false(errorHandler.shouldReportError());
});
// XXX - how to arrange for 'Service.identity.basicPassword = null;' in
// an fxaccounts environment?
add_task(async function test_login_syncAndReportErrors_non_network_error() {
// Test non-network errors are reported
// when calling syncAndReportErrors
let server = EHTestsCommon.sync_httpd_setup();
await EHTestsCommon.setUp(server);
Service.identity.basicPassword = null;
Service.identity.resetSyncKey();
let promiseObserved = promiseOneObserver("weave:ui:login:error");
setLastSync(NON_PROLONGED_ERROR_DURATION);
errorHandler.syncAndReportErrors();
await promiseObserved;
do_check_eq(Status.login, LOGIN_FAILED_NO_PASSWORD);
do_check_eq(Status.login, LOGIN_FAILED_NO_PASSPHRASE);
clean();
await promiseStopServer(server);
@ -452,25 +448,24 @@ add_identity_test(this, async function test_sync_syncAndReportErrors_non_network
do_check_eq(Status.sync, CREDENTIALS_CHANGED);
// If we clean this tick, telemetry won't get the right error
await promiseStopServer(server);
await promiseNextTick();
clean();
await promiseStopServer(server);
});
// XXX - how to arrange for 'Service.identity.basicPassword = null;' in
// an fxaccounts environment?
add_task(async function test_login_syncAndReportErrors_prolonged_non_network_error() {
// Test prolonged, non-network errors are
// reported when calling syncAndReportErrors.
let server = EHTestsCommon.sync_httpd_setup();
await EHTestsCommon.setUp(server);
Service.identity.basicPassword = null;
Service.identity.resetSyncKey();
let promiseObserved = promiseOneObserver("weave:ui:login:error");
setLastSync(PROLONGED_ERROR_DURATION);
errorHandler.syncAndReportErrors();
await promiseObserved;
do_check_eq(Status.login, LOGIN_FAILED_NO_PASSWORD);
do_check_eq(Status.login, LOGIN_FAILED_NO_PASSPHRASE);
clean();
await promiseStopServer(server);
@ -502,8 +497,9 @@ add_identity_test(this, async function test_sync_syncAndReportErrors_prolonged_n
do_check_eq(Status.sync, CREDENTIALS_CHANGED);
// If we clean this tick, telemetry won't get the right error
await promiseStopServer(server);
await promiseNextTick();
clean();
await promiseStopServer(server);
});
add_identity_test(this, async function test_login_syncAndReportErrors_network_error() {
@ -581,7 +577,7 @@ add_task(async function test_login_prolonged_non_network_error() {
// Test prolonged, non-network errors are reported
let server = EHTestsCommon.sync_httpd_setup();
await EHTestsCommon.setUp(server);
Service.identity.basicPassword = null;
Service.identity.resetSyncKey();
let promiseObserved = promiseOneObserver("weave:ui:login:error");
@ -620,8 +616,8 @@ add_task(async function test_sync_prolonged_non_network_error() {
await promiseObserved;
do_check_eq(Status.sync, PROLONGED_SYNC_FAILURE);
do_check_true(errorHandler.didReportProlongedError);
await promiseStopServer(server);
clean();
await promiseStopServer(server);
});
add_identity_test(this, async function test_login_prolonged_network_error() {
@ -663,14 +659,14 @@ add_task(async function test_login_non_network_error() {
// Test non-network errors are reported
let server = EHTestsCommon.sync_httpd_setup();
await EHTestsCommon.setUp(server);
Service.identity.basicPassword = null;
Service.identity.resetSyncKey();
let promiseObserved = promiseOneObserver("weave:ui:login:error");
setLastSync(NON_PROLONGED_ERROR_DURATION);
Service.sync();
await promiseObserved;
do_check_eq(Status.login, LOGIN_FAILED_NO_PASSWORD);
do_check_eq(Status.login, LOGIN_FAILED_NO_PASSPHRASE);
do_check_false(errorHandler.didReportProlongedError);
clean();
@ -767,8 +763,8 @@ add_identity_test(this, async function test_sync_server_maintenance_error() {
do_check_eq(Status.sync, SERVER_MAINTENANCE);
do_check_false(errorHandler.didReportProlongedError);
await promiseStopServer(server);
clean();
await promiseStopServer(server);
});
add_identity_test(this, async function test_info_collections_login_server_maintenance_error() {
@ -776,10 +772,7 @@ add_identity_test(this, async function test_info_collections_login_server_mainte
let server = EHTestsCommon.sync_httpd_setup();
await EHTestsCommon.setUp(server);
Service.username = "broken.info";
await configureIdentity({username: "broken.info"});
Service.serverURL = server.baseURI + "/maintenance/";
Service.clusterURL = server.baseURI + "/maintenance/";
await configureIdentity({username: "broken.info"}, server);
let backoffInterval;
Svc.Obs.add("weave:service:backoff:interval", function observe(subject, data) {
@ -817,9 +810,7 @@ add_identity_test(this, async function test_meta_global_login_server_maintenance
let server = EHTestsCommon.sync_httpd_setup();
await EHTestsCommon.setUp(server);
await configureIdentity({username: "broken.meta"});
Service.serverURL = server.baseURI + "/maintenance/";
Service.clusterURL = server.baseURI + "/maintenance/";
await configureIdentity({username: "broken.meta"}, server);
let backoffInterval;
Svc.Obs.add("weave:service:backoff:interval", function observe(subject, data) {

Просмотреть файл

@ -52,8 +52,6 @@ function run_test() {
Log.repository.getLogger("Sync.SyncScheduler").level = Log.Level.Trace;
Log.repository.getLogger("Sync.ErrorHandler").level = Log.Level.Trace;
ensureLegacyIdentityManager();
run_next_test();
}
@ -71,9 +69,7 @@ add_identity_test(this, async function test_crypto_keys_login_server_maintenance
let server = EHTestsCommon.sync_httpd_setup();
await EHTestsCommon.setUp(server);
await configureIdentity({username: "broken.keys"});
Service.serverURL = server.baseURI + "/maintenance/";
Service.clusterURL = server.baseURI + "/maintenance/";
await configureIdentity({username: "broken.keys"}, server);
// Force re-download of keys
Service.collectionKeys.clear();
@ -144,9 +140,7 @@ add_identity_test(this, async function test_info_collections_login_prolonged_ser
let server = EHTestsCommon.sync_httpd_setup();
await EHTestsCommon.setUp(server);
await configureIdentity({username: "broken.info"});
Service.serverURL = server.baseURI + "/maintenance/";
Service.clusterURL = server.baseURI + "/maintenance/";
await configureIdentity({username: "broken.info"}, server);
let backoffInterval;
Svc.Obs.add("weave:service:backoff:interval", function observe(subject, data) {
@ -178,9 +172,7 @@ add_identity_test(this, async function test_meta_global_login_prolonged_server_m
let server = EHTestsCommon.sync_httpd_setup();
await EHTestsCommon.setUp(server);
await configureIdentity({username: "broken.meta"});
Service.serverURL = server.baseURI + "/maintenance/";
Service.clusterURL = server.baseURI + "/maintenance/";
await configureIdentity({username: "broken.meta"}, server);
let backoffInterval;
Svc.Obs.add("weave:service:backoff:interval", function observe(subject, data) {
@ -212,9 +204,7 @@ add_identity_test(this, async function test_download_crypto_keys_login_prolonged
let server = EHTestsCommon.sync_httpd_setup();
await EHTestsCommon.setUp(server);
await configureIdentity({username: "broken.keys"});
Service.serverURL = server.baseURI + "/maintenance/";
Service.clusterURL = server.baseURI + "/maintenance/";
await configureIdentity({username: "broken.keys"}, server);
// Force re-download of keys
Service.collectionKeys.clear();
@ -247,9 +237,7 @@ add_identity_test(this, async function test_upload_crypto_keys_login_prolonged_s
let server = EHTestsCommon.sync_httpd_setup();
// Start off with an empty account, do not upload a key.
await configureIdentity({username: "broken.keys"});
Service.serverURL = server.baseURI + "/maintenance/";
Service.clusterURL = server.baseURI + "/maintenance/";
await configureIdentity({username: "broken.keys"}, server);
let backoffInterval;
Svc.Obs.add("weave:service:backoff:interval", function observe(subject, data) {
@ -282,9 +270,7 @@ add_identity_test(this, async function test_wipeServer_login_prolonged_server_ma
let server = EHTestsCommon.sync_httpd_setup();
// Start off with an empty account, do not upload a key.
await configureIdentity({username: "broken.wipe"});
Service.serverURL = server.baseURI + "/maintenance/";
Service.clusterURL = server.baseURI + "/maintenance/";
await configureIdentity({username: "broken.wipe"}, server);
let backoffInterval;
Svc.Obs.add("weave:service:backoff:interval", function observe(subject, data) {
@ -317,9 +303,7 @@ add_identity_test(this, async function test_wipeRemote_prolonged_server_maintena
let server = EHTestsCommon.sync_httpd_setup();
server.registerPathHandler("/1.1/broken.wipe/storage/catapult", EHTestsCommon.service_unavailable);
await configureIdentity({username: "broken.wipe"});
Service.serverURL = server.baseURI + "/maintenance/";
Service.clusterURL = server.baseURI + "/maintenance/";
await configureIdentity({username: "broken.wipe"}, server);
EHTestsCommon.generateAndUploadKeys();
let engine = engineManager.get("catapult");
@ -387,9 +371,7 @@ add_identity_test(this, async function test_info_collections_login_syncAndReport
let server = EHTestsCommon.sync_httpd_setup();
await EHTestsCommon.setUp(server);
await configureIdentity({username: "broken.info"});
Service.serverURL = server.baseURI + "/maintenance/";
Service.clusterURL = server.baseURI + "/maintenance/";
await configureIdentity({username: "broken.info"}, server);
let backoffInterval;
Svc.Obs.add("weave:service:backoff:interval", function observe(subject, data) {
@ -422,9 +404,7 @@ add_identity_test(this, async function test_meta_global_login_syncAndReportError
let server = EHTestsCommon.sync_httpd_setup();
await EHTestsCommon.setUp(server);
await configureIdentity({username: "broken.meta"});
Service.serverURL = server.baseURI + "/maintenance/";
Service.clusterURL = server.baseURI + "/maintenance/";
await configureIdentity({username: "broken.meta"}, server);
let backoffInterval;
Svc.Obs.add("weave:service:backoff:interval", function observe(subject, data) {
@ -457,9 +437,7 @@ add_identity_test(this, async function test_download_crypto_keys_login_syncAndRe
let server = EHTestsCommon.sync_httpd_setup();
await EHTestsCommon.setUp(server);
await configureIdentity({username: "broken.keys"});
Service.serverURL = server.baseURI + "/maintenance/";
Service.clusterURL = server.baseURI + "/maintenance/";
await configureIdentity({username: "broken.keys"}, server);
// Force re-download of keys
Service.collectionKeys.clear();
@ -494,9 +472,7 @@ add_identity_test(this, async function test_upload_crypto_keys_login_syncAndRepo
let server = EHTestsCommon.sync_httpd_setup();
// Start off with an empty account, do not upload a key.
await configureIdentity({username: "broken.keys"});
Service.serverURL = server.baseURI + "/maintenance/";
Service.clusterURL = server.baseURI + "/maintenance/";
await configureIdentity({username: "broken.keys"}, server);
let backoffInterval;
Svc.Obs.add("weave:service:backoff:interval", function observe(subject, data) {
@ -529,9 +505,7 @@ add_identity_test(this, async function test_wipeServer_login_syncAndReportErrors
let server = EHTestsCommon.sync_httpd_setup();
// Start off with an empty account, do not upload a key.
await configureIdentity({username: "broken.wipe"});
Service.serverURL = server.baseURI + "/maintenance/";
Service.clusterURL = server.baseURI + "/maintenance/";
await configureIdentity({username: "broken.wipe"}, server);
let backoffInterval;
Svc.Obs.add("weave:service:backoff:interval", function observe(subject, data) {
@ -563,9 +537,7 @@ add_identity_test(this, async function test_wipeRemote_syncAndReportErrors_serve
// wiping all remote devices.
let server = EHTestsCommon.sync_httpd_setup();
await configureIdentity({username: "broken.wipe"});
Service.serverURL = server.baseURI + "/maintenance/";
Service.clusterURL = server.baseURI + "/maintenance/";
await configureIdentity({username: "broken.wipe"}, server);
EHTestsCommon.generateAndUploadKeys();
let engine = engineManager.get("catapult");
@ -635,9 +607,7 @@ add_identity_test(this, async function test_info_collections_login_syncAndReport
let server = EHTestsCommon.sync_httpd_setup();
await EHTestsCommon.setUp(server);
await configureIdentity({username: "broken.info"});
Service.serverURL = server.baseURI + "/maintenance/";
Service.clusterURL = server.baseURI + "/maintenance/";
await configureIdentity({username: "broken.info"}, server);
let backoffInterval;
Svc.Obs.add("weave:service:backoff:interval", function observe(subject, data) {
@ -672,9 +642,7 @@ add_identity_test(this, async function test_meta_global_login_syncAndReportError
let server = EHTestsCommon.sync_httpd_setup();
await EHTestsCommon.setUp(server);
await configureIdentity({username: "broken.meta"});
Service.serverURL = server.baseURI + "/maintenance/";
Service.clusterURL = server.baseURI + "/maintenance/";
await configureIdentity({username: "broken.meta"}, server);
let backoffInterval;
Svc.Obs.add("weave:service:backoff:interval", function observe(subject, data) {
@ -709,9 +677,7 @@ add_identity_test(this, async function test_download_crypto_keys_login_syncAndRe
let server = EHTestsCommon.sync_httpd_setup();
await EHTestsCommon.setUp(server);
await configureIdentity({username: "broken.keys"});
Service.serverURL = server.baseURI + "/maintenance/";
Service.clusterURL = server.baseURI + "/maintenance/";
await configureIdentity({username: "broken.keys"}, server);
// Force re-download of keys
Service.collectionKeys.clear();
@ -748,9 +714,7 @@ add_identity_test(this, async function test_upload_crypto_keys_login_syncAndRepo
let server = EHTestsCommon.sync_httpd_setup();
// Start off with an empty account, do not upload a key.
await configureIdentity({username: "broken.keys"});
Service.serverURL = server.baseURI + "/maintenance/";
Service.clusterURL = server.baseURI + "/maintenance/";
await configureIdentity({username: "broken.keys"}, server);
let backoffInterval;
Svc.Obs.add("weave:service:backoff:interval", function observe(subject, data) {
@ -785,9 +749,7 @@ add_identity_test(this, async function test_wipeServer_login_syncAndReportErrors
let server = EHTestsCommon.sync_httpd_setup();
// Start off with an empty account, do not upload a key.
await configureIdentity({username: "broken.wipe"});
Service.serverURL = server.baseURI + "/maintenance/";
Service.clusterURL = server.baseURI + "/maintenance/";
await configureIdentity({username: "broken.wipe"}, server);
let backoffInterval;
Svc.Obs.add("weave:service:backoff:interval", function observe(subject, data) {

Просмотреть файл

@ -44,9 +44,7 @@ function sync_httpd_setup(infoHandler) {
}
async function setUp(server) {
await configureIdentity({username: "johndoe"});
Service.serverURL = server.baseURI + "/";
Service.clusterURL = server.baseURI + "/";
await configureIdentity({username: "johndoe"}, server);
new FakeCryptoService();
}

Просмотреть файл

@ -54,9 +54,7 @@ function sync_httpd_setup() {
}
async function setUp(server) {
await configureIdentity({username: "johndoe"});
Service.serverURL = server.baseURI + "/";
Service.clusterURL = server.baseURI + "/";
await configureIdentity({username: "johndoe"}, server);
new FakeCryptoService();
}

Просмотреть файл

@ -16,7 +16,7 @@ add_test(function test_setup() {
PlacesTestUtils.clearHistory().then(run_next_test);
});
add_test(function test_processIncoming_mobile_history_batched() {
add_task(async function test_processIncoming_mobile_history_batched() {
_("SyncEngine._processIncoming works on history engine.");
let FAKE_DOWNLOAD_LIMIT = 100;
@ -37,7 +37,7 @@ add_test(function test_processIncoming_mobile_history_batched() {
"/1.1/foo/storage/history": collection.handler()
});
new SyncTestingInfrastructure(server);
await SyncTestingInfrastructure(server);
// Let's create some 234 server side history records. They're all at least
// 10 minutes old.
@ -132,11 +132,10 @@ add_test(function test_processIncoming_mobile_history_batched() {
}
} finally {
PlacesTestUtils.clearHistory().then(() => {
server.stop(do_test_finished);
Svc.Prefs.resetBranch("");
Service.recordManager.clearCache();
});
await PlacesTestUtils.clearHistory();
await promiseStopServer(server);
Svc.Prefs.resetBranch("");
Service.recordManager.clearCache();
}
});

Просмотреть файл

@ -1,6 +1,7 @@
/* Any copyright is dedicated to the Public Domain.
http://creativecommons.org/publicdomain/zero/1.0/ */
Cu.import("resource://gre/modules/PlacesDBUtils.jsm");
Cu.import("resource://gre/modules/PlacesUtils.jsm");
Cu.import("resource://gre/modules/XPCOMUtils.jsm");
Cu.import("resource://services-sync/engines.js");
@ -9,17 +10,6 @@ Cu.import("resource://services-sync/engines/history.js");
Cu.import("resource://services-sync/service.js");
Cu.import("resource://services-sync/util.js");
function onScoreUpdated(callback) {
Svc.Obs.add("weave:engine:score:updated", function observer() {
Svc.Obs.remove("weave:engine:score:updated", observer);
try {
callback();
} catch (ex) {
do_throw(ex);
}
});
}
Service.engineManager.clear();
Service.engineManager.register(HistoryEngine);
var engine = Service.engineManager.get("history");
@ -28,37 +18,18 @@ var tracker = engine._tracker;
// Don't write out by default.
tracker.persistChangedIDs = false;
var _counter = 0;
function addVisit() {
let uriString = "http://getfirefox.com/" + _counter++;
async function addVisit(suffix, referrer = null, transition = PlacesUtils.history.TRANSITION_LINK) {
let uriString = "http://getfirefox.com/" + suffix;
let uri = Utils.makeURI(uriString);
_("Adding visit for URI " + uriString);
let place = {
uri: uri,
visits: [ {
visitDate: Date.now() * 1000,
transitionType: PlacesUtils.history.TRANSITION_LINK
} ]
};
let cb = Async.makeSpinningCallback();
PlacesUtils.asyncHistory.updatePlaces(place, {
handleError: function () {
_("Error adding visit for " + uriString);
cb(new Error("Error adding history entry"));
},
handleResult: function () {
},
handleCompletion: function () {
_("Added visit for " + uriString);
cb();
}
await PlacesTestUtils.addVisits({
uri,
visitDate: Date.now() * 1000,
transition,
referrer,
});
// Spin the event loop to embed this async call in a sync API.
cb.wait();
return uri;
}
@ -68,99 +39,145 @@ function run_test() {
run_next_test();
}
add_test(function test_empty() {
async function verifyTrackerEmpty() {
let changes = engine.pullNewChanges();
equal(changes.count(), 0);
equal(tracker.score, 0);
}
async function verifyTrackedCount(expected) {
let changes = engine.pullNewChanges();
equal(changes.count(), expected);
}
async function verifyTrackedItems(tracked) {
let changes = engine.pullNewChanges();
let trackedIDs = new Set(changes.ids());
for (let guid of tracked) {
ok(changes.has(guid), `${guid} should be tracked`);
ok(changes.getModifiedTimestamp(guid) > 0,
`${guid} should have a modified time`);
trackedIDs.delete(guid);
}
equal(trackedIDs.size, 0, `Unhandled tracked IDs: ${
JSON.stringify(Array.from(trackedIDs))}`);
}
async function startTracking() {
Svc.Obs.notify("weave:engine:start-tracking");
}
async function stopTracking() {
Svc.Obs.notify("weave:engine:stop-tracking");
}
async function resetTracker() {
tracker.clearChangedIDs();
tracker.resetScore();
}
async function cleanup() {
await PlacesTestUtils.clearHistory();
await resetTracker();
await stopTracking();
}
add_task(async function test_empty() {
_("Verify we've got an empty, disabled tracker to work with.");
do_check_empty(tracker.changedIDs);
do_check_eq(tracker.score, 0);
await verifyTrackerEmpty();
do_check_false(tracker._isTracking);
run_next_test();
await cleanup();
});
add_test(function test_not_tracking(next) {
add_task(async function test_not_tracking() {
_("Create history item. Won't show because we haven't started tracking yet");
addVisit();
Utils.nextTick(function() {
do_check_empty(tracker.changedIDs);
do_check_eq(tracker.score, 0);
run_next_test();
});
await addVisit("not_tracking");
await verifyTrackerEmpty();
await cleanup();
});
add_test(function test_start_tracking() {
add_task(async function test_start_tracking() {
_("Add hook for save completion.");
tracker.persistChangedIDs = true;
tracker.onSavedChangedIDs = function () {
_("changedIDs written to disk. Proceeding.");
// Turn this back off.
tracker.persistChangedIDs = false;
delete tracker.onSavedChangedIDs;
run_next_test();
};
let savePromise = new Promise(resolve => {
tracker.persistChangedIDs = true;
tracker.onSavedChangedIDs = function () {
// Turn this back off.
tracker.persistChangedIDs = false;
delete tracker.onSavedChangedIDs;
resolve();
};
});
_("Tell the tracker to start tracking changes.");
onScoreUpdated(function() {
_("Score updated in test_start_tracking.");
do_check_attribute_count(tracker.changedIDs, 1);
do_check_eq(tracker.score, SCORE_INCREMENT_SMALL);
});
await startTracking();
let scorePromise = promiseOneObserver("weave:engine:score:updated");
await addVisit("start_tracking");
await scorePromise;
Svc.Obs.notify("weave:engine:start-tracking");
addVisit();
_("Score updated in test_start_tracking.");
await verifyTrackedCount(1);
do_check_eq(tracker.score, SCORE_INCREMENT_SMALL);
await savePromise;
_("changedIDs written to disk. Proceeding.");
await cleanup();
});
add_test(function test_start_tracking_twice() {
_("Verifying preconditions from test_start_tracking.");
do_check_attribute_count(tracker.changedIDs, 1);
add_task(async function test_start_tracking_twice() {
_("Verifying preconditions.");
await startTracking();
await addVisit("start_tracking_twice1");
await verifyTrackedCount(1);
do_check_eq(tracker.score, SCORE_INCREMENT_SMALL);
_("Notifying twice won't do any harm.");
onScoreUpdated(function() {
_("Score updated in test_start_tracking_twice.");
do_check_attribute_count(tracker.changedIDs, 2);
do_check_eq(tracker.score, 2 * SCORE_INCREMENT_SMALL);
run_next_test();
});
await startTracking();
let scorePromise = promiseOneObserver("weave:engine:score:updated");
await addVisit("start_tracking_twice2");
await scorePromise;
Svc.Obs.notify("weave:engine:start-tracking");
addVisit();
_("Score updated in test_start_tracking_twice.");
await verifyTrackedCount(2);
do_check_eq(tracker.score, 2 * SCORE_INCREMENT_SMALL);
await cleanup();
});
add_test(function test_track_delete() {
add_task(async function test_track_delete() {
_("Deletions are tracked.");
// This isn't present because we weren't tracking when it was visited.
let uri = Utils.makeURI("http://getfirefox.com/0");
await addVisit("track_delete");
let uri = Utils.makeURI("http://getfirefox.com/track_delete");
let guid = engine._store.GUIDForUri(uri);
do_check_false(guid in tracker.changedIDs);
await verifyTrackerEmpty();
onScoreUpdated(function() {
do_check_true(guid in tracker.changedIDs);
do_check_attribute_count(tracker.changedIDs, 3);
do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE + 2 * SCORE_INCREMENT_SMALL);
run_next_test();
});
do_check_eq(tracker.score, 2 * SCORE_INCREMENT_SMALL);
await startTracking();
let scorePromise = promiseOneObserver("weave:engine:score:updated");
PlacesUtils.history.removePage(uri);
await scorePromise;
await verifyTrackedItems([guid]);
do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE);
await cleanup();
});
add_test(function test_dont_track_expiration() {
add_task(async function test_dont_track_expiration() {
_("Expirations are not tracked.");
let uriToExpire = addVisit();
let uriToExpire = await addVisit("to_expire");
let guidToExpire = engine._store.GUIDForUri(uriToExpire);
let uriToRemove = addVisit();
let uriToRemove = await addVisit("to_remove");
let guidToRemove = engine._store.GUIDForUri(uriToRemove);
tracker.clearChangedIDs();
do_check_false(guidToExpire in tracker.changedIDs);
do_check_false(guidToRemove in tracker.changedIDs);
await resetTracker();
await verifyTrackerEmpty();
onScoreUpdated(function() {
do_check_false(guidToExpire in tracker.changedIDs);
do_check_true(guidToRemove in tracker.changedIDs);
do_check_attribute_count(tracker.changedIDs, 1);
run_next_test();
});
await startTracking();
let scorePromise = promiseOneObserver("weave:engine:score:updated");
// Observe expiration.
Services.obs.addObserver(function onExpiration(aSubject, aTopic, aData) {
@ -174,30 +191,61 @@ add_test(function test_dont_track_expiration() {
Cc["@mozilla.org/places/expiration;1"]
.getService(Ci.nsIObserver)
.observe(null, "places-debug-start-expiration", 1);
await scorePromise;
await verifyTrackedItems([guidToRemove]);
await cleanup();
});
add_test(function test_stop_tracking() {
add_task(async function test_stop_tracking() {
_("Let's stop tracking again.");
tracker.clearChangedIDs();
Svc.Obs.notify("weave:engine:stop-tracking");
addVisit();
Utils.nextTick(function() {
do_check_empty(tracker.changedIDs);
run_next_test();
});
await stopTracking();
await addVisit("stop_tracking");
await verifyTrackerEmpty();
await cleanup();
});
add_test(function test_stop_tracking_twice() {
add_task(async function test_stop_tracking_twice() {
await stopTracking();
await addVisit("stop_tracking_twice1");
_("Notifying twice won't do any harm.");
Svc.Obs.notify("weave:engine:stop-tracking");
addVisit();
Utils.nextTick(function() {
do_check_empty(tracker.changedIDs);
run_next_test();
});
await stopTracking();
await addVisit("stop_tracking_twice2");
await verifyTrackerEmpty();
await cleanup();
});
add_test(function cleanup() {
_("Clean up.");
PlacesTestUtils.clearHistory().then(run_next_test);
add_task(async function test_filter_hidden() {
await startTracking();
_("Add visit; should be hidden by the redirect");
let hiddenURI = await addVisit("hidden");
let hiddenGUID = engine._store.GUIDForUri(hiddenURI);
_(`Hidden visit GUID: ${hiddenGUID}`);
_("Add redirect visit; should be tracked");
let trackedURI = await addVisit("redirect", hiddenURI,
PlacesUtils.history.TRANSITION_REDIRECT_PERMANENT);
let trackedGUID = engine._store.GUIDForUri(trackedURI);
_(`Tracked visit GUID: ${trackedGUID}`);
_("Add visit for framed link; should be ignored");
let embedURI = await addVisit("framed_link", null,
PlacesUtils.history.TRANSITION_FRAMED_LINK);
let embedGUID = engine._store.GUIDForUri(embedURI);
_(`Framed link visit GUID: ${embedGUID}`);
_("Run Places maintenance to mark redirect visit as hidden");
let maintenanceFinishedPromise =
promiseOneObserver("places-maintenance-finished");
PlacesDBUtils.maintenanceOnIdle();
await maintenanceFinishedPromise;
await verifyTrackedItems([trackedGUID]);
await cleanup();
});

Просмотреть файл

@ -20,10 +20,6 @@ var hmacErrorCount = 0;
function shared_setup() {
hmacErrorCount = 0;
// Do not instantiate SyncTestingInfrastructure; we need real crypto.
ensureLegacyIdentityManager();
setBasicCredentials("foo", "foo", "aabcdeabcdeabcdeabcdeabcde");
// Make sure RotaryEngine is the only one we sync.
Service.engineManager._engines = {};
Service.engineManager.register(RotaryEngine);
@ -79,7 +75,9 @@ add_task(async function hmac_error_during_404() {
};
let server = sync_httpd_setup(handlers);
Service.serverURL = server.baseURI;
// Do not instantiate SyncTestingInfrastructure; we need real crypto.
await configureIdentity({ username: "foo" }, server);
Service.login();
try {
_("Syncing.");
@ -102,7 +100,7 @@ add_task(async function hmac_error_during_404() {
}
});
add_test(function hmac_error_during_node_reassignment() {
add_task(async function hmac_error_during_node_reassignment() {
_("Attempt to replicate an HMAC error during node reassignment.");
let [engine, rotaryColl, clientsColl, keysWBO, global] = shared_setup();
@ -156,7 +154,9 @@ add_test(function hmac_error_during_node_reassignment() {
};
let server = sync_httpd_setup(handlers);
Service.serverURL = server.baseURI;
// Do not instantiate SyncTestingInfrastructure; we need real crypto.
await configureIdentity({ username: "foo" }, server);
_("Syncing.");
// First hit of clients will 401. This will happen after meta/global and
// keys -- i.e., in the middle of the sync, but before RotaryEngine.
@ -202,44 +202,46 @@ add_test(function hmac_error_during_node_reassignment() {
}
_("Make sure that syncing again causes recovery.");
onSyncFinished = function() {
_("== First sync done.");
_("---------------------------");
await new Promise(resolve => {
onSyncFinished = function() {
_("== Second (automatic) sync done.");
hasData = rotaryColl.wbo("flying") ||
rotaryColl.wbo("scotsman");
hasKeys = keysWBO.modified;
do_check_true(!hasData == !hasKeys);
_("== First sync done.");
_("---------------------------");
onSyncFinished = function() {
_("== Second (automatic) sync done.");
hasData = rotaryColl.wbo("flying") ||
rotaryColl.wbo("scotsman");
hasKeys = keysWBO.modified;
do_check_true(!hasData == !hasKeys);
// Kick off another sync. Can't just call it, because we're inside the
// lock...
Utils.nextTick(function() {
_("Now a fresh sync will get no HMAC errors.");
_("Partially resetting client, as if after a restart, and forcing redownload.");
Service.collectionKeys.clear();
engine.lastSync = 0;
hmacErrorCount = 0;
// Kick off another sync. Can't just call it, because we're inside the
// lock...
Utils.nextTick(function() {
_("Now a fresh sync will get no HMAC errors.");
_("Partially resetting client, as if after a restart, and forcing redownload.");
Service.collectionKeys.clear();
engine.lastSync = 0;
hmacErrorCount = 0;
onSyncFinished = function() {
// Two rotary items, one client record... no errors.
do_check_eq(hmacErrorCount, 0)
onSyncFinished = function() {
// Two rotary items, one client record... no errors.
do_check_eq(hmacErrorCount, 0)
Svc.Obs.remove("weave:service:sync:finish", obs);
Svc.Obs.remove("weave:service:sync:error", obs);
Svc.Obs.remove("weave:service:sync:finish", obs);
Svc.Obs.remove("weave:service:sync:error", obs);
Svc.Prefs.resetBranch("");
Service.recordManager.clearCache();
server.stop(run_next_test);
};
Svc.Prefs.resetBranch("");
Service.recordManager.clearCache();
server.stop(resolve);
};
Service.sync();
},
this);
Service.sync();
},
this);
};
};
};
onwards();
onwards();
});
});
function run_test() {

Просмотреть файл

@ -41,9 +41,7 @@ function sync_httpd_setup() {
}
async function setUp(server) {
await configureIdentity({username: "johndoe"});
Service.serverURL = server.baseURI + "/";
Service.clusterURL = server.baseURI + "/";
await configureIdentity({username: "johndoe"}, server);
generateNewKeys(Service.collectionKeys);
let serverKeys = Service.collectionKeys.asWBO("crypto", "keys");
serverKeys.encrypt(Service.identity.syncKeyBundle);

Просмотреть файл

@ -27,13 +27,10 @@ function prepareCryptoWrap(collection, id) {
return w;
}
function run_test() {
add_task(async function test_records_crypto() {
let server;
do_test_pending();
ensureLegacyIdentityManager();
Service.identity.username = "john@example.com";
Service.identity.syncKey = "a-abcde-abcde-abcde-abcde-abcde";
await configureIdentity({ username: "john@example.com" });
let keyBundle = Service.identity.syncKeyBundle;
try {
@ -177,6 +174,6 @@ function run_test() {
log.info("Done!");
}
finally {
server.stop(do_test_finished);
await promiseStopServer(server);
}
}
});

Просмотреть файл

@ -79,7 +79,6 @@ function test_fetch() {
function run_test() {
initTestLogging("Trace");
ensureLegacyIdentityManager();
test_toJSON();
test_fetch();

Просмотреть файл

@ -26,7 +26,8 @@ function uaHandler(f) {
};
}
function run_test() {
add_task(async function setup() {
Log.repository.rootLogger.addAppender(new Log.DumpAppender());
meta_global = new ServerWBO('global');
server = httpd_setup({
@ -34,10 +35,7 @@ function run_test() {
"/1.1/johndoe/storage/meta/global": uaHandler(meta_global.handler()),
});
ensureLegacyIdentityManager();
setBasicCredentials("johndoe", "ilovejane");
Service.serverURL = server.baseURI + "/";
Service.clusterURL = server.baseURI + "/";
await configureIdentity({ username: "johndoe" }, server);
_("Server URL: " + server.baseURI);
// Note this string is missing the trailing ".destkop" as the test
@ -47,11 +45,11 @@ function run_test() {
" FxSync/" + WEAVE_VERSION + "." +
Services.appinfo.appBuildID;
run_next_test();
}
})
add_test(function test_fetchInfo() {
_("Testing _fetchInfo.");
Service.login();
Service._fetchInfo();
_("User-Agent: " + ua);
do_check_eq(ua, expectedUA + ".desktop");

Просмотреть файл

@ -43,8 +43,8 @@ function sync_httpd_setup() {
return httpd_setup(handlers);
}
function setUp(server) {
new SyncTestingInfrastructure(server, "johndoe", "ilovejane", "sekrit");
async function setUp(server) {
await SyncTestingInfrastructure(server, "johndoe", "ilovejane");
}
function run_test() {
@ -78,24 +78,23 @@ add_test(function test_tracker_score_updated() {
}
});
add_test(function test_sync_triggered() {
add_task(async function test_sync_triggered() {
let server = sync_httpd_setup();
setUp(server);
await setUp(server);
Service.login();
Service.scheduler.syncThreshold = MULTI_DEVICE_THRESHOLD;
Svc.Obs.add("weave:service:sync:finish", function onSyncFinish() {
Svc.Obs.remove("weave:service:sync:finish", onSyncFinish);
_("Sync completed!");
server.stop(run_next_test);
});
do_check_eq(Status.login, LOGIN_SUCCEEDED);
tracker.score += SCORE_INCREMENT_XLARGE;
await promiseOneObserver("weave:service:sync:finish")
await promiseStopServer(server);
});
add_test(function test_clients_engine_sync_triggered() {
add_task(async function test_clients_engine_sync_triggered() {
_("Ensure that client engine score changes trigger a sync.");
// The clients engine is not registered like other engines. Therefore,
@ -103,25 +102,22 @@ add_test(function test_clients_engine_sync_triggered() {
// global score tracker gives it that treatment. See bug 676042 for more.
let server = sync_httpd_setup();
setUp(server);
await setUp(server);
Service.login();
const TOPIC = "weave:service:sync:finish";
Svc.Obs.add(TOPIC, function onSyncFinish() {
Svc.Obs.remove(TOPIC, onSyncFinish);
_("Sync due to clients engine change completed.");
server.stop(run_next_test);
});
Service.scheduler.syncThreshold = MULTI_DEVICE_THRESHOLD;
do_check_eq(Status.login, LOGIN_SUCCEEDED);
Service.clientsEngine._tracker.score += SCORE_INCREMENT_XLARGE;
await promiseOneObserver("weave:service:sync:finish");
_("Sync due to clients engine change completed.");
await promiseStopServer(server);
});
add_test(function test_incorrect_credentials_sync_not_triggered() {
add_task(async function test_incorrect_credentials_sync_not_triggered() {
_("Ensure that score changes don't trigger a sync if Status.login != LOGIN_SUCCEEDED.");
let server = sync_httpd_setup();
setUp(server);
await setUp(server);
// Ensure we don't actually try to sync.
function onSyncStart() {
@ -129,21 +125,20 @@ add_test(function test_incorrect_credentials_sync_not_triggered() {
}
Svc.Obs.add("weave:service:sync:start", onSyncStart);
// First wait >100ms (nsITimers can take up to that much time to fire, so
// we can account for the timer in delayedAutoconnect) and then one event
// loop tick (to account for a possible call to weave:service:sync:start).
Utils.namedTimer(function() {
Utils.nextTick(function() {
Svc.Obs.remove("weave:service:sync:start", onSyncStart);
do_check_eq(Status.login, LOGIN_FAILED_LOGIN_REJECTED);
Service.startOver();
server.stop(run_next_test);
});
}, 150, {}, "timer");
// Faking incorrect credentials to prevent score update.
Status.login = LOGIN_FAILED_LOGIN_REJECTED;
tracker.score += SCORE_INCREMENT_XLARGE;
// First wait >100ms (nsITimers can take up to that much time to fire, so
// we can account for the timer in delayedAutoconnect) and then one event
// loop tick (to account for a possible call to weave:service:sync:start).
await promiseNamedTimer(150, {}, "timer");
await promiseNextTick();
Svc.Obs.remove("weave:service:sync:start", onSyncStart);
do_check_eq(Status.login, LOGIN_FAILED_LOGIN_REJECTED);
Service.startOver();
await promiseStopServer(server);
});

Просмотреть файл

@ -7,13 +7,12 @@ Cu.import("resource://services-sync/util.js");
Cu.import("resource://testing-common/services/sync/fakeservices.js");
Cu.import("resource://testing-common/services/sync/utils.js");
function test_urls() {
add_task(async function test_urls() {
_("URL related Service properties correspond to preference settings.");
try {
ensureLegacyIdentityManager();
do_check_true(!!Service.serverURL); // actual value may change
do_check_eq(Service.clusterURL, "");
do_check_eq(Service.userBaseURL, undefined);
do_check_false(Service.userBaseURL);
do_check_eq(Service.infoURL, undefined);
do_check_eq(Service.storageURL, undefined);
do_check_eq(Service.metaURL, undefined);
@ -23,12 +22,12 @@ function test_urls() {
// Since we don't have a cluster URL yet, these will still not be defined.
do_check_eq(Service.infoURL, undefined);
do_check_eq(Service.userBaseURL, undefined);
do_check_false(Service.userBaseURL);
do_check_eq(Service.storageURL, undefined);
do_check_eq(Service.metaURL, undefined);
Service.serverURL = "http://weave.server/";
Service.clusterURL = "http://weave.cluster/";
Service.clusterURL = "http://weave.cluster/1.1/johndoe/";
do_check_eq(Service.userBaseURL, "http://weave.cluster/1.1/johndoe/");
do_check_eq(Service.infoURL,
@ -60,9 +59,6 @@ function test_urls() {
do_check_eq(Service.identity.username, null);
_("The 'serverURL' attributes updates/resets preferences.");
// Identical value doesn't do anything
Service.serverURL = Service.serverURL;
do_check_eq(Service.clusterURL, "http://weave.cluster/");
Service.serverURL = "http://different.auth.node/";
do_check_eq(Svc.Prefs.get("serverURL"), "http://different.auth.node/");
@ -71,10 +67,10 @@ function test_urls() {
} finally {
Svc.Prefs.resetBranch("");
}
}
});
function test_syncID() {
add_test(function test_syncID() {
_("Service.syncID is auto-generated, corresponds to preference.");
new FakeGUIDService();
@ -92,10 +88,11 @@ function test_syncID() {
} finally {
Svc.Prefs.resetBranch("");
new FakeGUIDService();
run_next_test();
}
}
});
function test_locked() {
add_test(function test_locked() {
_("The 'locked' attribute can be toggled with lock() and unlock()");
// Defaults to false
@ -109,10 +106,5 @@ function test_locked() {
Service.unlock();
do_check_eq(Service.locked, false);
}
function run_test() {
test_urls();
test_syncID();
test_locked();
}
run_next_test();
});

Просмотреть файл

@ -9,7 +9,8 @@ Cu.import("resource://testing-common/services/sync/utils.js");
function login_handling(handler) {
return function (request, response) {
if (basic_auth_matches(request, "johndoe", "ilovejane")) {
if (request.hasHeader("Authorization") &&
request.getHeader("Authorization").includes('Hawk id="id"')) {
handler(request, response);
} else {
let body = "Unauthorized";
@ -19,7 +20,7 @@ function login_handling(handler) {
};
}
function run_test() {
add_task(async function run_test() {
let logger = Log.repository.rootLogger;
Log.repository.rootLogger.addAppender(new Log.DumpAppender());
@ -27,7 +28,6 @@ function run_test() {
let upd = collectionsHelper.with_updated_collection;
let collections = collectionsHelper.collections;
do_test_pending();
let server = httpd_setup({
"/1.1/johndoe/storage/crypto/keys": upd("crypto", new ServerWBO("keys").handler()),
"/1.1/johndoe/storage/meta/global": upd("meta", new ServerWBO("global").handler()),
@ -38,7 +38,7 @@ function run_test() {
try {
_("Set up test fixtures.");
new SyncTestingInfrastructure(server, "johndoe", "ilovejane", "foo");
await SyncTestingInfrastructure(server, "johndoe", "ilovejane");
Service.scheduler.globalScore = GLOBAL_SCORE;
// Avoid daily ping
Svc.Prefs.set("lastPing", Math.floor(Date.now() / 1000));
@ -54,7 +54,8 @@ function run_test() {
do_check_eq(Service.status.login, LOGIN_SUCCEEDED);
_("Simulate having changed the password somewhere else.");
Service.identity.basicPassword = "ilovejosephine";
Service.identity._token.id = "somethingelse";
Service.identity.unlockAndVerifyAuthState = () => Promise.resolve(LOGIN_FAILED_LOGIN_REJECTED);
_("Let's try to sync.");
Service.sync();
@ -79,6 +80,6 @@ function run_test() {
} finally {
Svc.Prefs.resetBranch("");
server.stop(do_test_finished);
await promiseStopServer(server);
}
}
});

Просмотреть файл

@ -57,7 +57,7 @@ function sync_httpd_setup(handlers) {
return httpd_setup(handlers);
}
function setUp() {
async function setUp() {
syncedEngines = [];
let engine = Service.engineManager.get("steam");
engine.enabled = true;
@ -70,8 +70,7 @@ function setUp() {
let server = sync_httpd_setup({
"/1.1/johndoe/storage/meta/global": new ServerWBO("global", {}).handler(),
});
new SyncTestingInfrastructure(server, "johndoe", "ilovejane",
"abcdeabcdeabcdeabcdeabcdea");
await SyncTestingInfrastructure(server, "johndoe", "ilovejane");
return server;
}
@ -84,9 +83,9 @@ function run_test() {
run_next_test();
}
add_test(function test_noEngines() {
add_task(async function test_noEngines() {
_("Test: An empty array of engines to sync does nothing.");
let server = setUp();
let server = await setUp();
try {
_("Sync with no engines specified.");
@ -95,13 +94,13 @@ add_test(function test_noEngines() {
} finally {
Service.startOver();
server.stop(run_next_test);
await promiseStopServer(server);
}
});
add_test(function test_oneEngine() {
add_task(async function test_oneEngine() {
_("Test: Only one engine is synced.");
let server = setUp();
let server = await setUp();
try {
@ -111,13 +110,13 @@ add_test(function test_oneEngine() {
} finally {
Service.startOver();
server.stop(run_next_test);
await promiseStopServer(server);
}
});
add_test(function test_bothEnginesSpecified() {
add_task(async function test_bothEnginesSpecified() {
_("Test: All engines are synced when specified in the correct order (1).");
let server = setUp();
let server = await setUp();
try {
_("Sync with both engines specified.");
@ -126,13 +125,13 @@ add_test(function test_bothEnginesSpecified() {
} finally {
Service.startOver();
server.stop(run_next_test);
await promiseStopServer(server);
}
});
add_test(function test_bothEnginesSpecified() {
add_task(async function test_bothEnginesSpecified() {
_("Test: All engines are synced when specified in the correct order (2).");
let server = setUp();
let server = await setUp();
try {
_("Sync with both engines specified.");
@ -141,13 +140,13 @@ add_test(function test_bothEnginesSpecified() {
} finally {
Service.startOver();
server.stop(run_next_test);
await promiseStopServer(server);
}
});
add_test(function test_bothEnginesDefault() {
add_task(async function test_bothEnginesDefault() {
_("Test: All engines are synced when nothing is specified.");
let server = setUp();
let server = await setUp();
try {
Service.sync();
@ -155,6 +154,6 @@ add_test(function test_bothEnginesDefault() {
} finally {
Service.startOver();
server.stop(run_next_test);
await promiseStopServer(server);
}
});

Просмотреть файл

@ -68,9 +68,8 @@ function sync_httpd_setup(handlers) {
return httpd_setup(handlers);
}
function setUp(server) {
new SyncTestingInfrastructure(server, "johndoe", "ilovejane",
"abcdeabcdeabcdeabcdeabcdea");
async function setUp(server) {
await SyncTestingInfrastructure(server, "johndoe", "ilovejane");
// Ensure that the server has valid keys so that logging in will work and not
// result in a server wipe, rendering many of these tests useless.
generateNewKeys(Service.collectionKeys);
@ -91,14 +90,14 @@ function run_test() {
run_next_test();
}
add_test(function test_newAccount() {
add_task(async function test_newAccount() {
_("Test: New account does not disable locally enabled engines.");
let engine = Service.engineManager.get("steam");
let server = sync_httpd_setup({
"/1.1/johndoe/storage/meta/global": new ServerWBO("global", {}).handler(),
"/1.1/johndoe/storage/steam": new ServerWBO("steam", {}).handler()
});
setUp(server);
await setUp(server);
try {
_("Engine is enabled from the beginning.");
@ -113,11 +112,11 @@ add_test(function test_newAccount() {
do_check_true(engine.enabled);
} finally {
Service.startOver();
server.stop(run_next_test);
await promiseStopServer(server);
}
});
add_test(function test_enabledLocally() {
add_task(async function test_enabledLocally() {
_("Test: Engine is disabled on remote clients and enabled locally");
Service.syncID = "abcdefghij";
let engine = Service.engineManager.get("steam");
@ -128,7 +127,7 @@ add_test(function test_enabledLocally() {
"/1.1/johndoe/storage/meta/global": metaWBO.handler(),
"/1.1/johndoe/storage/steam": new ServerWBO("steam", {}).handler()
});
setUp(server);
await setUp(server);
try {
_("Enable engine locally.");
@ -144,11 +143,11 @@ add_test(function test_enabledLocally() {
do_check_true(engine.enabled);
} finally {
Service.startOver();
server.stop(run_next_test);
await promiseStopServer(server);
}
});
add_test(function test_disabledLocally() {
add_task(async function test_disabledLocally() {
_("Test: Engine is enabled on remote clients and disabled locally");
Service.syncID = "abcdefghij";
let engine = Service.engineManager.get("steam");
@ -164,7 +163,7 @@ add_test(function test_disabledLocally() {
"/1.1/johndoe/storage/meta/global": metaWBO.handler(),
"/1.1/johndoe/storage/steam": steamCollection.handler()
});
setUp(server);
await setUp(server);
try {
_("Disable engine locally.");
@ -186,11 +185,11 @@ add_test(function test_disabledLocally() {
do_check_false(engine.enabled);
} finally {
Service.startOver();
server.stop(run_next_test);
await promiseStopServer(server);
}
});
add_test(function test_disabledLocally_wipe503() {
add_task(async function test_disabledLocally_wipe503() {
_("Test: Engine is enabled on remote clients and disabled locally");
Service.syncID = "abcdefghij";
let engine = Service.engineManager.get("steam");
@ -213,7 +212,7 @@ add_test(function test_disabledLocally_wipe503() {
"/1.1/johndoe/storage/meta/global": metaWBO.handler(),
"/1.1/johndoe/storage/steam": service_unavailable
});
setUp(server);
await setUp(server);
_("Disable engine locally.");
Service._ignorePrefObserver = true;
@ -221,20 +220,18 @@ add_test(function test_disabledLocally_wipe503() {
Service._ignorePrefObserver = false;
engine.enabled = false;
Svc.Obs.add("weave:ui:sync:error", function onSyncError() {
Svc.Obs.remove("weave:ui:sync:error", onSyncError);
do_check_eq(Service.status.sync, SERVER_MAINTENANCE);
Service.startOver();
server.stop(run_next_test);
});
let promiseObserved = promiseOneObserver("weave:ui:sync:error");
_("Sync.");
Service.errorHandler.syncAndReportErrors();
await promiseObserved;
do_check_eq(Service.status.sync, SERVER_MAINTENANCE);
Service.startOver();
await promiseStopServer(server);
});
add_test(function test_enabledRemotely() {
add_task(async function test_enabledRemotely() {
_("Test: Engine is disabled locally and enabled on a remote client");
Service.syncID = "abcdefghij";
let engine = Service.engineManager.get("steam");
@ -251,7 +248,7 @@ add_test(function test_enabledRemotely() {
"/1.1/johndoe/storage/steam":
upd("steam", new ServerWBO("steam", {}).handler())
});
setUp(server);
await setUp(server);
// We need to be very careful how we do this, so that we don't trigger a
// fresh start!
@ -274,11 +271,11 @@ add_test(function test_enabledRemotely() {
do_check_eq(metaWBO.data.engines.steam.syncID, engine.syncID);
} finally {
Service.startOver();
server.stop(run_next_test);
await promiseStopServer(server);
}
});
add_test(function test_disabledRemotelyTwoClients() {
add_task(async function test_disabledRemotelyTwoClients() {
_("Test: Engine is enabled locally and disabled on a remote client... with two clients.");
Service.syncID = "abcdefghij";
let engine = Service.engineManager.get("steam");
@ -292,7 +289,7 @@ add_test(function test_disabledRemotelyTwoClients() {
"/1.1/johndoe/storage/steam":
upd("steam", new ServerWBO("steam", {}).handler())
});
setUp(server);
await setUp(server);
try {
_("Enable engine locally.");
@ -318,11 +315,11 @@ add_test(function test_disabledRemotelyTwoClients() {
} finally {
Service.startOver();
server.stop(run_next_test);
await promiseStopServer(server);
}
});
add_test(function test_disabledRemotely() {
add_task(async function test_disabledRemotely() {
_("Test: Engine is enabled locally and disabled on a remote client");
Service.syncID = "abcdefghij";
let engine = Service.engineManager.get("steam");
@ -333,7 +330,7 @@ add_test(function test_disabledRemotely() {
"/1.1/johndoe/storage/meta/global": metaWBO.handler(),
"/1.1/johndoe/storage/steam": new ServerWBO("steam", {}).handler()
});
setUp(server);
await setUp(server);
try {
_("Enable engine locally.");
@ -349,11 +346,11 @@ add_test(function test_disabledRemotely() {
} finally {
Service.startOver();
server.stop(run_next_test);
await promiseStopServer(server);
}
});
add_test(function test_dependentEnginesEnabledLocally() {
add_task(async function test_dependentEnginesEnabledLocally() {
_("Test: Engine is disabled on remote clients and enabled locally");
Service.syncID = "abcdefghij";
let steamEngine = Service.engineManager.get("steam");
@ -366,7 +363,7 @@ add_test(function test_dependentEnginesEnabledLocally() {
"/1.1/johndoe/storage/steam": new ServerWBO("steam", {}).handler(),
"/1.1/johndoe/storage/stirling": new ServerWBO("stirling", {}).handler()
});
setUp(server);
await setUp(server);
try {
_("Enable engine locally. Doing it on one is enough.");
@ -384,11 +381,11 @@ add_test(function test_dependentEnginesEnabledLocally() {
do_check_true(stirlingEngine.enabled);
} finally {
Service.startOver();
server.stop(run_next_test);
await promiseStopServer(server);
}
});
add_test(function test_dependentEnginesDisabledLocally() {
add_task(async function test_dependentEnginesDisabledLocally() {
_("Test: Two dependent engines are enabled on remote clients and disabled locally");
Service.syncID = "abcdefghij";
let steamEngine = Service.engineManager.get("steam");
@ -410,7 +407,7 @@ add_test(function test_dependentEnginesDisabledLocally() {
"/1.1/johndoe/storage/steam": steamCollection.handler(),
"/1.1/johndoe/storage/stirling": stirlingCollection.handler()
});
setUp(server);
await setUp(server);
try {
_("Disable engines locally. Doing it on one is enough.");
@ -437,6 +434,6 @@ add_test(function test_dependentEnginesDisabledLocally() {
do_check_false(stirlingEngine.enabled);
} finally {
Service.startOver();
server.stop(run_next_test);
await promiseStopServer(server);
}
});

Просмотреть файл

@ -60,7 +60,7 @@ add_identity_test(this, async function test_wipeServer_list_success() {
try {
await setUpTestFixtures(server);
new SyncTestingInfrastructure(server, "johndoe", "irrelevant", "irrelevant");
await SyncTestingInfrastructure(server, "johndoe", "irrelevant");
_("Confirm initial environment.");
do_check_false(steam_coll.deleted);
@ -94,7 +94,7 @@ add_identity_test(this, async function test_wipeServer_list_503() {
try {
await setUpTestFixtures(server);
new SyncTestingInfrastructure(server, "johndoe", "irrelevant", "irrelevant");
await SyncTestingInfrastructure(server, "johndoe", "irrelevant");
_("Confirm initial environment.");
do_check_false(steam_coll.deleted);
@ -142,7 +142,7 @@ add_identity_test(this, async function test_wipeServer_all_success() {
await setUpTestFixtures(server);
_("Try deletion.");
new SyncTestingInfrastructure(server, "johndoe", "irrelevant", "irrelevant");
await SyncTestingInfrastructure(server, "johndoe", "irrelevant");
let returnedTimestamp = Service.wipeServer();
do_check_true(deleted);
do_check_eq(returnedTimestamp, serverTimestamp);
@ -174,7 +174,7 @@ add_identity_test(this, async function test_wipeServer_all_404() {
await setUpTestFixtures(server);
_("Try deletion.");
new SyncTestingInfrastructure(server, "johndoe", "irrelevant", "irrelevant");
await SyncTestingInfrastructure(server, "johndoe", "irrelevant");
let returnedTimestamp = Service.wipeServer();
do_check_true(deleted);
do_check_eq(returnedTimestamp, serverTimestamp);
@ -203,7 +203,7 @@ add_identity_test(this, async function test_wipeServer_all_503() {
_("Try deletion.");
let error;
try {
new SyncTestingInfrastructure(server, "johndoe", "irrelevant", "irrelevant");
await SyncTestingInfrastructure(server, "johndoe", "irrelevant");
Service.wipeServer();
do_throw("Should have thrown!");
} catch (ex) {

Просмотреть файл

@ -10,12 +10,13 @@ function makeSteamEngine() {
return new SyncEngine('Steam', Service);
}
var server;
var server = httpd_setup({});
function test_url_attributes() {
add_task(async function test_url_attributes() {
_("SyncEngine url attributes");
let syncTesting = new SyncTestingInfrastructure(server);
Service.clusterURL = "https://cluster/";
let syncTesting = await SyncTestingInfrastructure(server);
Service.clusterURL = "https://cluster/1.1/foo/";
let engine = makeSteamEngine();
try {
do_check_eq(engine.storageURL, "https://cluster/1.1/foo/storage/");
@ -24,11 +25,11 @@ function test_url_attributes() {
} finally {
Svc.Prefs.resetBranch("");
}
}
});
function test_syncID() {
add_task(async function test_syncID() {
_("SyncEngine.syncID corresponds to preference");
let syncTesting = new SyncTestingInfrastructure(server);
let syncTesting = await SyncTestingInfrastructure(server);
let engine = makeSteamEngine();
try {
// Ensure pristine environment
@ -44,11 +45,11 @@ function test_syncID() {
} finally {
Svc.Prefs.resetBranch("");
}
}
})
function test_lastSync() {
add_task(async function test_lastSync() {
_("SyncEngine.lastSync and SyncEngine.lastSyncLocal correspond to preferences");
let syncTesting = new SyncTestingInfrastructure(server);
let syncTesting = await SyncTestingInfrastructure(server);
let engine = makeSteamEngine();
try {
// Ensure pristine environment
@ -74,11 +75,11 @@ function test_lastSync() {
} finally {
Svc.Prefs.resetBranch("");
}
}
})
function test_toFetch() {
add_task(async function test_toFetch() {
_("SyncEngine.toFetch corresponds to file on disk");
let syncTesting = new SyncTestingInfrastructure(server);
let syncTesting = await SyncTestingInfrastructure(server);
const filename = "weave/toFetch/steam.json";
let engine = makeSteamEngine();
try {
@ -104,11 +105,11 @@ function test_toFetch() {
} finally {
Svc.Prefs.resetBranch("");
}
}
});
function test_previousFailed() {
add_task(async function test_previousFailed() {
_("SyncEngine.previousFailed corresponds to file on disk");
let syncTesting = new SyncTestingInfrastructure(server);
let syncTesting = await SyncTestingInfrastructure(server);
const filename = "weave/failed/steam.json";
let engine = makeSteamEngine();
try {
@ -134,11 +135,11 @@ function test_previousFailed() {
} finally {
Svc.Prefs.resetBranch("");
}
}
});
function test_resetClient() {
add_task(async function test_resetClient() {
_("SyncEngine.resetClient resets lastSync and toFetch");
let syncTesting = new SyncTestingInfrastructure(server);
let syncTesting = await SyncTestingInfrastructure(server);
let engine = makeSteamEngine();
try {
// Ensure pristine environment
@ -159,9 +160,9 @@ function test_resetClient() {
} finally {
Svc.Prefs.resetBranch("");
}
}
});
function test_wipeServer() {
add_task(async function test_wipeServer() {
_("SyncEngine.wipeServer deletes server data and resets the client.");
let engine = makeSteamEngine();
@ -170,7 +171,7 @@ function test_wipeServer() {
let server = httpd_setup({
"/1.1/foo/storage/steam": steamCollection.handler()
});
let syncTesting = new SyncTestingInfrastructure(server);
let syncTesting = await SyncTestingInfrastructure(server);
do_test_pending();
try {
@ -188,17 +189,8 @@ function test_wipeServer() {
server.stop(do_test_finished);
Svc.Prefs.resetBranch("");
}
}
});
function run_test() {
server = httpd_setup({});
test_url_attributes();
test_syncID();
test_lastSync();
test_toFetch();
test_previousFailed();
test_resetClient();
test_wipeServer();
server.stop(run_next_test);
}
add_task(async function finish() {
await promiseStopServer(server);
});

Просмотреть файл

@ -21,9 +21,9 @@ function clean() {
Service.recordManager.clearCache();
}
function cleanAndGo(server) {
async function cleanAndGo(server) {
clean();
server.stop(run_next_test);
await promiseStopServer(server);
}
async function promiseClean(server) {
@ -38,7 +38,7 @@ function configureService(server, username, password) {
Service.identity.basicPassword = password || "password";
}
function createServerAndConfigureClient() {
async function createServerAndConfigureClient() {
let engine = new RotaryEngine(Service);
let contents = {
@ -54,9 +54,7 @@ function createServerAndConfigureClient() {
server.createContents(USER, contents);
server.start();
Service.serverURL = server.baseURI;
Service.clusterURL = server.baseURI;
Service.identity.username = USER;
await SyncTestingInfrastructure(server, USER);
Service._updateCachedURLs();
return [engine, server, USER];
@ -82,7 +80,7 @@ function run_test() {
* different scenarios below.
*/
add_test(function test_syncStartup_emptyOrOutdatedGlobalsResetsSync() {
add_task(async function test_syncStartup_emptyOrOutdatedGlobalsResetsSync() {
_("SyncEngine._syncStartup resets sync and wipes server data if there's no or an outdated global record");
// Some server side data that's going to be wiped
@ -98,7 +96,7 @@ add_test(function test_syncStartup_emptyOrOutdatedGlobalsResetsSync() {
"/1.1/foo/storage/rotary": collection.handler()
});
let syncTesting = new SyncTestingInfrastructure(server);
let syncTesting = await SyncTestingInfrastructure(server);
Service.identity.username = "foo";
let engine = makeRotaryEngine();
@ -130,11 +128,11 @@ add_test(function test_syncStartup_emptyOrOutdatedGlobalsResetsSync() {
do_check_eq(collection.payload("scotsman"), undefined);
} finally {
cleanAndGo(server);
await cleanAndGo(server);
}
});
add_test(function test_syncStartup_serverHasNewerVersion() {
add_task(async function test_syncStartup_serverHasNewerVersion() {
_("SyncEngine._syncStartup ");
let global = new ServerWBO('global', {engines: {rotary: {version: 23456}}});
@ -142,7 +140,7 @@ add_test(function test_syncStartup_serverHasNewerVersion() {
"/1.1/foo/storage/meta/global": global.handler()
});
let syncTesting = new SyncTestingInfrastructure(server);
let syncTesting = await SyncTestingInfrastructure(server);
Service.identity.username = "foo";
let engine = makeRotaryEngine();
@ -159,16 +157,16 @@ add_test(function test_syncStartup_serverHasNewerVersion() {
do_check_eq(error.failureCode, VERSION_OUT_OF_DATE);
} finally {
cleanAndGo(server);
await cleanAndGo(server);
}
});
add_test(function test_syncStartup_syncIDMismatchResetsClient() {
add_task(async function test_syncStartup_syncIDMismatchResetsClient() {
_("SyncEngine._syncStartup resets sync if syncIDs don't match");
let server = sync_httpd_setup({});
let syncTesting = new SyncTestingInfrastructure(server);
let syncTesting = await SyncTestingInfrastructure(server);
Service.identity.username = "foo";
// global record with a different syncID than our engine has
@ -195,12 +193,12 @@ add_test(function test_syncStartup_syncIDMismatchResetsClient() {
do_check_eq(engine.lastSync, 0);
} finally {
cleanAndGo(server);
await cleanAndGo(server);
}
});
add_test(function test_processIncoming_emptyServer() {
add_task(async function test_processIncoming_emptyServer() {
_("SyncEngine._processIncoming working with an empty server backend");
let collection = new ServerCollection();
@ -208,7 +206,7 @@ add_test(function test_processIncoming_emptyServer() {
"/1.1/foo/storage/rotary": collection.handler()
});
let syncTesting = new SyncTestingInfrastructure(server);
let syncTesting = await SyncTestingInfrastructure(server);
Service.identity.username = "foo";
let engine = makeRotaryEngine();
@ -219,12 +217,12 @@ add_test(function test_processIncoming_emptyServer() {
do_check_eq(engine.lastSync, 0);
} finally {
cleanAndGo(server);
await cleanAndGo(server);
}
});
add_test(function test_processIncoming_createFromServer() {
add_task(async function test_processIncoming_createFromServer() {
_("SyncEngine._processIncoming creates new records from server data");
// Some server records that will be downloaded
@ -247,7 +245,7 @@ add_test(function test_processIncoming_createFromServer() {
"/1.1/foo/storage/rotary/scotsman": collection.wbo("scotsman").handler()
});
let syncTesting = new SyncTestingInfrastructure(server);
let syncTesting = await SyncTestingInfrastructure(server);
Service.identity.username = "foo";
generateNewKeys(Service.collectionKeys);
@ -280,12 +278,12 @@ add_test(function test_processIncoming_createFromServer() {
do_check_eq(engine._store.items['../pathological'], "Pathological Case");
} finally {
cleanAndGo(server);
await cleanAndGo(server);
}
});
add_test(function test_processIncoming_reconcile() {
add_task(async function test_processIncoming_reconcile() {
_("SyncEngine._processIncoming updates local records");
let collection = new ServerCollection();
@ -332,7 +330,7 @@ add_test(function test_processIncoming_reconcile() {
"/1.1/foo/storage/rotary": collection.handler()
});
let syncTesting = new SyncTestingInfrastructure(server);
let syncTesting = await SyncTestingInfrastructure(server);
Service.identity.username = "foo";
let engine = makeRotaryEngine();
@ -391,16 +389,16 @@ add_test(function test_processIncoming_reconcile() {
// The 'nukeme' record marked as deleted is removed.
do_check_eq(engine._store.items.nukeme, undefined);
} finally {
cleanAndGo(server);
await cleanAndGo(server);
}
});
add_test(function test_processIncoming_reconcile_local_deleted() {
add_task(async function test_processIncoming_reconcile_local_deleted() {
_("Ensure local, duplicate ID is deleted on server.");
// When a duplicate is resolved, the local ID (which is never taken) should
// be deleted on the server.
let [engine, server, user] = createServerAndConfigureClient();
let [engine, server, user] = await createServerAndConfigureClient();
let now = Date.now() / 1000 - 10;
engine.lastSync = now;
@ -427,13 +425,13 @@ add_test(function test_processIncoming_reconcile_local_deleted() {
do_check_eq(1, collection.count());
do_check_neq(undefined, collection.wbo("DUPE_INCOMING"));
cleanAndGo(server);
await cleanAndGo(server);
});
add_test(function test_processIncoming_reconcile_equivalent() {
add_task(async function test_processIncoming_reconcile_equivalent() {
_("Ensure proper handling of incoming records that match local.");
let [engine, server, user] = createServerAndConfigureClient();
let [engine, server, user] = await createServerAndConfigureClient();
let now = Date.now() / 1000 - 10;
engine.lastSync = now;
@ -450,17 +448,17 @@ add_test(function test_processIncoming_reconcile_equivalent() {
do_check_attribute_count(engine._store.items, 1);
cleanAndGo(server);
await cleanAndGo(server);
});
add_test(function test_processIncoming_reconcile_locally_deleted_dupe_new() {
add_task(async function test_processIncoming_reconcile_locally_deleted_dupe_new() {
_("Ensure locally deleted duplicate record newer than incoming is handled.");
// This is a somewhat complicated test. It ensures that if a client receives
// a modified record for an item that is deleted locally but with a different
// ID that the incoming record is ignored. This is a corner case for record
// handling, but it needs to be supported.
let [engine, server, user] = createServerAndConfigureClient();
let [engine, server, user] = await createServerAndConfigureClient();
let now = Date.now() / 1000 - 10;
engine.lastSync = now;
@ -489,16 +487,16 @@ add_test(function test_processIncoming_reconcile_locally_deleted_dupe_new() {
let payload = JSON.parse(JSON.parse(wbo.payload).ciphertext);
do_check_true(payload.deleted);
cleanAndGo(server);
await cleanAndGo(server);
});
add_test(function test_processIncoming_reconcile_locally_deleted_dupe_old() {
add_task(async function test_processIncoming_reconcile_locally_deleted_dupe_old() {
_("Ensure locally deleted duplicate record older than incoming is restored.");
// This is similar to the above test except it tests the condition where the
// incoming record is newer than the local deletion, therefore overriding it.
let [engine, server, user] = createServerAndConfigureClient();
let [engine, server, user] = await createServerAndConfigureClient();
let now = Date.now() / 1000 - 10;
engine.lastSync = now;
@ -528,13 +526,13 @@ add_test(function test_processIncoming_reconcile_locally_deleted_dupe_old() {
let payload = JSON.parse(JSON.parse(wbo.payload).ciphertext);
do_check_eq("incoming", payload.denomination);
cleanAndGo(server);
await cleanAndGo(server);
});
add_test(function test_processIncoming_reconcile_changed_dupe() {
add_task(async function test_processIncoming_reconcile_changed_dupe() {
_("Ensure that locally changed duplicate record is handled properly.");
let [engine, server, user] = createServerAndConfigureClient();
let [engine, server, user] = await createServerAndConfigureClient();
let now = Date.now() / 1000 - 10;
engine.lastSync = now;
@ -565,15 +563,15 @@ add_test(function test_processIncoming_reconcile_changed_dupe() {
let payload = JSON.parse(JSON.parse(wbo.payload).ciphertext);
do_check_eq("local", payload.denomination);
cleanAndGo(server);
await cleanAndGo(server);
});
add_test(function test_processIncoming_reconcile_changed_dupe_new() {
add_task(async function test_processIncoming_reconcile_changed_dupe_new() {
_("Ensure locally changed duplicate record older than incoming is ignored.");
// This test is similar to the above except the incoming record is younger
// than the local record. The incoming record should be authoritative.
let [engine, server, user] = createServerAndConfigureClient();
let [engine, server, user] = await createServerAndConfigureClient();
let now = Date.now() / 1000 - 10;
engine.lastSync = now;
@ -602,10 +600,10 @@ add_test(function test_processIncoming_reconcile_changed_dupe_new() {
do_check_neq(undefined, wbo);
let payload = JSON.parse(JSON.parse(wbo.payload).ciphertext);
do_check_eq("incoming", payload.denomination);
cleanAndGo(server);
await cleanAndGo(server);
});
add_test(function test_processIncoming_mobile_batchSize() {
add_task(async function test_processIncoming_mobile_batchSize() {
_("SyncEngine._processIncoming doesn't fetch everything at once on mobile clients");
Svc.Prefs.set("client.type", "mobile");
@ -634,7 +632,7 @@ add_test(function test_processIncoming_mobile_batchSize() {
"/1.1/foo/storage/rotary": collection.handler()
});
let syncTesting = new SyncTestingInfrastructure(server);
let syncTesting = await SyncTestingInfrastructure(server);
let engine = makeRotaryEngine();
let meta_global = Service.recordManager.set(engine.metaURL,
@ -671,7 +669,7 @@ add_test(function test_processIncoming_mobile_batchSize() {
}
} finally {
cleanAndGo(server);
await cleanAndGo(server);
}
});
@ -709,7 +707,7 @@ add_task(async function test_processIncoming_store_toFetch() {
"/1.1/foo/storage/rotary": collection.handler()
});
let syncTesting = new SyncTestingInfrastructure(server);
let syncTesting = await SyncTestingInfrastructure(server);
let meta_global = Service.recordManager.set(engine.metaURL,
new WBORecord(engine.metaURL));
@ -743,7 +741,7 @@ add_task(async function test_processIncoming_store_toFetch() {
});
add_test(function test_processIncoming_resume_toFetch() {
add_task(async function test_processIncoming_resume_toFetch() {
_("toFetch and previousFailed items left over from previous syncs are fetched on the next sync, along with new items.");
Service.identity.username = "foo";
@ -782,7 +780,7 @@ add_test(function test_processIncoming_resume_toFetch() {
"/1.1/foo/storage/rotary": collection.handler()
});
let syncTesting = new SyncTestingInfrastructure(server);
let syncTesting = await SyncTestingInfrastructure(server);
let meta_global = Service.recordManager.set(engine.metaURL,
new WBORecord(engine.metaURL));
@ -807,12 +805,12 @@ add_test(function test_processIncoming_resume_toFetch() {
do_check_eq(engine._store.items.failed2, "Record No. 2");
do_check_eq(engine.previousFailed.length, 0);
} finally {
cleanAndGo(server);
await cleanAndGo(server);
}
});
add_test(function test_processIncoming_applyIncomingBatchSize_smaller() {
add_task(async function test_processIncoming_applyIncomingBatchSize_smaller() {
_("Ensure that a number of incoming items less than applyIncomingBatchSize is still applied.");
Service.identity.username = "foo";
@ -840,7 +838,7 @@ add_test(function test_processIncoming_applyIncomingBatchSize_smaller() {
"/1.1/foo/storage/rotary": collection.handler()
});
let syncTesting = new SyncTestingInfrastructure(server);
let syncTesting = await SyncTestingInfrastructure(server);
let meta_global = Service.recordManager.set(engine.metaURL,
new WBORecord(engine.metaURL));
@ -862,12 +860,12 @@ add_test(function test_processIncoming_applyIncomingBatchSize_smaller() {
do_check_eq(engine.previousFailed[1], "record-no-8");
} finally {
cleanAndGo(server);
await cleanAndGo(server);
}
});
add_test(function test_processIncoming_applyIncomingBatchSize_multiple() {
add_task(async function test_processIncoming_applyIncomingBatchSize_multiple() {
_("Ensure that incoming items are applied according to applyIncomingBatchSize.");
Service.identity.username = "foo";
@ -896,7 +894,7 @@ add_test(function test_processIncoming_applyIncomingBatchSize_multiple() {
"/1.1/foo/storage/rotary": collection.handler()
});
let syncTesting = new SyncTestingInfrastructure(server);
let syncTesting = await SyncTestingInfrastructure(server);
let meta_global = Service.recordManager.set(engine.metaURL,
new WBORecord(engine.metaURL));
@ -915,12 +913,12 @@ add_test(function test_processIncoming_applyIncomingBatchSize_multiple() {
do_check_attribute_count(engine._store.items, APPLY_BATCH_SIZE * 3);
} finally {
cleanAndGo(server);
await cleanAndGo(server);
}
});
add_test(function test_processIncoming_notify_count() {
add_task(async function test_processIncoming_notify_count() {
_("Ensure that failed records are reported only once.");
Service.identity.username = "foo";
@ -948,7 +946,7 @@ add_test(function test_processIncoming_notify_count() {
"/1.1/foo/storage/rotary": collection.handler()
});
let syncTesting = new SyncTestingInfrastructure(server);
let syncTesting = await SyncTestingInfrastructure(server);
let meta_global = Service.recordManager.set(engine.metaURL,
new WBORecord(engine.metaURL));
@ -1004,12 +1002,12 @@ add_test(function test_processIncoming_notify_count() {
Svc.Obs.remove("weave:engine:sync:applied", onApplied);
} finally {
cleanAndGo(server);
await cleanAndGo(server);
}
});
add_test(function test_processIncoming_previousFailed() {
add_task(async function test_processIncoming_previousFailed() {
_("Ensure that failed records are retried.");
Service.identity.username = "foo";
Svc.Prefs.set("client.type", "mobile");
@ -1038,7 +1036,7 @@ add_test(function test_processIncoming_previousFailed() {
"/1.1/foo/storage/rotary": collection.handler()
});
let syncTesting = new SyncTestingInfrastructure(server);
let syncTesting = await SyncTestingInfrastructure(server);
let meta_global = Service.recordManager.set(engine.metaURL,
new WBORecord(engine.metaURL));
@ -1090,12 +1088,12 @@ add_test(function test_processIncoming_previousFailed() {
do_check_eq(engine._store.items['record-no-12'], "Record No. 12");
do_check_eq(engine._store.items['record-no-13'], "Record No. 13");
} finally {
cleanAndGo(server);
await cleanAndGo(server);
}
});
add_test(function test_processIncoming_failed_records() {
add_task(async function test_processIncoming_failed_records() {
_("Ensure that failed records from _reconcile and applyIncomingBatch are refetched.");
Service.identity.username = "foo";
@ -1154,7 +1152,7 @@ add_test(function test_processIncoming_failed_records() {
"/1.1/foo/storage/rotary": recording_handler(collection)
});
let syncTesting = new SyncTestingInfrastructure(server);
let syncTesting = await SyncTestingInfrastructure(server);
let meta_global = Service.recordManager.set(engine.metaURL,
new WBORecord(engine.metaURL));
@ -1224,7 +1222,7 @@ add_test(function test_processIncoming_failed_records() {
do_check_eq(batchDownload(BOGUS_RECORDS.length), 4);
} finally {
cleanAndGo(server);
await cleanAndGo(server);
}
});
@ -1267,7 +1265,7 @@ add_task(async function test_processIncoming_decrypt_failed() {
"/1.1/foo/storage/rotary": collection.handler()
});
let syncTesting = new SyncTestingInfrastructure(server);
let syncTesting = await SyncTestingInfrastructure(server);
let meta_global = Service.recordManager.set(engine.metaURL,
new WBORecord(engine.metaURL));
@ -1310,7 +1308,7 @@ add_task(async function test_processIncoming_decrypt_failed() {
});
add_test(function test_uploadOutgoing_toEmptyServer() {
add_task(async function test_uploadOutgoing_toEmptyServer() {
_("SyncEngine._uploadOutgoing uploads new records to server");
Service.identity.username = "foo";
@ -1324,7 +1322,7 @@ add_test(function test_uploadOutgoing_toEmptyServer() {
"/1.1/foo/storage/rotary/scotsman": collection.wbo("scotsman").handler()
});
let syncTesting = new SyncTestingInfrastructure(server);
let syncTesting = await SyncTestingInfrastructure(server);
generateNewKeys(Service.collectionKeys);
let engine = makeRotaryEngine();
@ -1364,12 +1362,12 @@ add_test(function test_uploadOutgoing_toEmptyServer() {
do_check_eq(collection.payload("flying"), undefined);
} finally {
cleanAndGo(server);
await cleanAndGo(server);
}
});
add_test(function test_uploadOutgoing_huge() {
add_task(async function test_uploadOutgoing_huge() {
Service.identity.username = "foo";
let collection = new ServerCollection();
collection._wbos.flying = new ServerWBO('flying');
@ -1380,7 +1378,7 @@ add_test(function test_uploadOutgoing_huge() {
"/1.1/foo/storage/rotary/flying": collection.wbo("flying").handler(),
});
let syncTesting = new SyncTestingInfrastructure(server);
let syncTesting = await SyncTestingInfrastructure(server);
generateNewKeys(Service.collectionKeys);
let engine = makeRotaryEngine();
@ -1429,7 +1427,7 @@ add_task(async function test_uploadOutgoing_failed() {
"/1.1/foo/storage/rotary": collection.handler()
});
let syncTesting = new SyncTestingInfrastructure(server);
let syncTesting = await SyncTestingInfrastructure(server);
let engine = makeRotaryEngine();
engine.lastSync = 123; // needs to be non-zero so that tracker is queried
@ -1482,7 +1480,7 @@ add_task(async function test_uploadOutgoing_failed() {
POST requests. More comprehensive unit-tests for this "batching" are in
test_postqueue.js.
*/
add_test(function test_uploadOutgoing_MAX_UPLOAD_RECORDS() {
add_task(async function test_uploadOutgoing_MAX_UPLOAD_RECORDS() {
_("SyncEngine._uploadOutgoing uploads in batches of MAX_UPLOAD_RECORDS");
Service.identity.username = "foo";
@ -1524,7 +1522,7 @@ add_test(function test_uploadOutgoing_MAX_UPLOAD_RECORDS() {
"/1.1/foo/storage/rotary": collection.handler()
});
let syncTesting = new SyncTestingInfrastructure(server);
let syncTesting = await SyncTestingInfrastructure(server);
try {
@ -1543,11 +1541,11 @@ add_test(function test_uploadOutgoing_MAX_UPLOAD_RECORDS() {
do_check_eq(noOfUploads, Math.ceil(234/MAX_UPLOAD_RECORDS));
} finally {
cleanAndGo(server);
await cleanAndGo(server);
}
});
add_test(function test_uploadOutgoing_largeRecords() {
add_task(async function test_uploadOutgoing_largeRecords() {
_("SyncEngine._uploadOutgoing throws on records larger than MAX_UPLOAD_BYTES");
Service.identity.username = "foo";
@ -1569,7 +1567,7 @@ add_test(function test_uploadOutgoing_largeRecords() {
"/1.1/foo/storage/rotary": collection.handler()
});
let syncTesting = new SyncTestingInfrastructure(server);
let syncTesting = await SyncTestingInfrastructure(server);
try {
engine._syncStartup();
@ -1581,17 +1579,17 @@ add_test(function test_uploadOutgoing_largeRecords() {
}
ok(!!error);
} finally {
cleanAndGo(server);
await cleanAndGo(server);
}
});
add_test(function test_syncFinish_noDelete() {
add_task(async function test_syncFinish_noDelete() {
_("SyncEngine._syncFinish resets tracker's score");
let server = httpd_setup({});
let syncTesting = new SyncTestingInfrastructure(server);
let syncTesting = await SyncTestingInfrastructure(server);
let engine = makeRotaryEngine();
engine._delete = {}; // Nothing to delete
engine._tracker.score = 100;
@ -1603,7 +1601,7 @@ add_test(function test_syncFinish_noDelete() {
});
add_test(function test_syncFinish_deleteByIds() {
add_task(async function test_syncFinish_deleteByIds() {
_("SyncEngine._syncFinish deletes server records slated for deletion (list of record IDs).");
Service.identity.username = "foo";
@ -1621,7 +1619,7 @@ add_test(function test_syncFinish_deleteByIds() {
let server = httpd_setup({
"/1.1/foo/storage/rotary": collection.handler()
});
let syncTesting = new SyncTestingInfrastructure(server);
let syncTesting = await SyncTestingInfrastructure(server);
let engine = makeRotaryEngine();
try {
@ -1638,12 +1636,12 @@ add_test(function test_syncFinish_deleteByIds() {
do_check_eq(engine._delete.ids, undefined);
} finally {
cleanAndGo(server);
await cleanAndGo(server);
}
});
add_test(function test_syncFinish_deleteLotsInBatches() {
add_task(async function test_syncFinish_deleteLotsInBatches() {
_("SyncEngine._syncFinish deletes server records in batches of 100 (list of record IDs).");
Service.identity.username = "foo";
@ -1672,7 +1670,7 @@ add_test(function test_syncFinish_deleteLotsInBatches() {
"/1.1/foo/storage/rotary": collection.handler()
});
let syncTesting = new SyncTestingInfrastructure(server);
let syncTesting = await SyncTestingInfrastructure(server);
let engine = makeRotaryEngine();
try {
@ -1709,7 +1707,7 @@ add_test(function test_syncFinish_deleteLotsInBatches() {
do_check_eq(engine._delete.ids, undefined);
} finally {
cleanAndGo(server);
await cleanAndGo(server);
}
});
@ -1723,7 +1721,7 @@ add_task(async function test_sync_partialUpload() {
let server = sync_httpd_setup({
"/1.1/foo/storage/rotary": collection.handler()
});
let syncTesting = new SyncTestingInfrastructure(server);
let syncTesting = await SyncTestingInfrastructure(server);
generateNewKeys(Service.collectionKeys);
let engine = makeRotaryEngine();
@ -1789,7 +1787,7 @@ add_task(async function test_sync_partialUpload() {
}
});
add_test(function test_canDecrypt_noCryptoKeys() {
add_task(async function test_canDecrypt_noCryptoKeys() {
_("SyncEngine.canDecrypt returns false if the engine fails to decrypt items on the server, e.g. due to a missing crypto key collection.");
Service.identity.username = "foo";
@ -1805,18 +1803,18 @@ add_test(function test_canDecrypt_noCryptoKeys() {
"/1.1/foo/storage/rotary": collection.handler()
});
let syncTesting = new SyncTestingInfrastructure(server);
let syncTesting = await SyncTestingInfrastructure(server);
let engine = makeRotaryEngine();
try {
do_check_false(engine.canDecrypt());
} finally {
cleanAndGo(server);
await cleanAndGo(server);
}
});
add_test(function test_canDecrypt_true() {
add_task(async function test_canDecrypt_true() {
_("SyncEngine.canDecrypt returns true if the engine can decrypt the items on the server.");
Service.identity.username = "foo";
@ -1831,19 +1829,19 @@ add_test(function test_canDecrypt_true() {
"/1.1/foo/storage/rotary": collection.handler()
});
let syncTesting = new SyncTestingInfrastructure(server);
let syncTesting = await SyncTestingInfrastructure(server);
let engine = makeRotaryEngine();
try {
do_check_true(engine.canDecrypt());
} finally {
cleanAndGo(server);
await cleanAndGo(server);
}
});
add_test(function test_syncapplied_observer() {
add_task(async function test_syncapplied_observer() {
Service.identity.username = "foo";
const NUMBER_OF_RECORDS = 10;
@ -1862,7 +1860,7 @@ add_test(function test_syncapplied_observer() {
"/1.1/foo/storage/rotary": collection.handler()
});
let syncTesting = new SyncTestingInfrastructure(server);
let syncTesting = await SyncTestingInfrastructure(server);
let meta_global = Service.recordManager.set(engine.metaURL,
new WBORecord(engine.metaURL));
@ -1895,7 +1893,7 @@ add_test(function test_syncapplied_observer() {
do_check_true(Service.scheduler.hasIncomingItems);
} finally {
cleanAndGo(server);
await cleanAndGo(server);
Service.scheduler.hasIncomingItems = false;
Svc.Obs.remove("weave:engine:sync:applied", onApplied);
}

Просмотреть файл

@ -58,9 +58,7 @@ function sync_httpd_setup() {
}
async function setUp(server) {
await configureIdentity({username: "johndoe"});
Service.clusterURL = server.baseURI + "/";
await configureIdentity({username: "johndoe"}, server);
generateNewKeys(Service.collectionKeys);
let serverKeys = Service.collectionKeys.asWBO("crypto", "keys");
@ -692,13 +690,18 @@ add_identity_test(this, async function test_no_sync_node() {
let server = sync_httpd_setup();
await setUp(server);
Service.serverURL = server.baseURI + "/";
oldfc = Service._clusterManager._findCluster;
Service._clusterManager._findCluster = () => null;
Service.clusterURL = "";
try {
Service.sync();
do_check_eq(Status.sync, NO_SYNC_NODE_FOUND);
do_check_eq(scheduler.syncTimer.delay, NO_SYNC_NODE_INTERVAL);
Service.sync();
do_check_eq(Status.sync, NO_SYNC_NODE_FOUND);
do_check_eq(scheduler.syncTimer.delay, NO_SYNC_NODE_INTERVAL);
await cleanUpAndGo(server);
await cleanUpAndGo(server);
} finally {
Service._clusterManager._findCluster = oldfc;
}
});
add_identity_test(this, async function test_sync_failed_partial_500s() {

Просмотреть файл

@ -49,7 +49,7 @@ add_test(function test_getOpenURLs() {
run_next_test();
});
add_test(function test_tab_engine_skips_incoming_local_record() {
add_task(async function test_tab_engine_skips_incoming_local_record() {
_("Ensure incoming records that match local client ID are never applied.");
let [engine, store] = getMocks();
let localID = engine.service.clientsEngine.localID;
@ -78,7 +78,7 @@ add_test(function test_tab_engine_skips_incoming_local_record() {
"/1.1/foo/storage/tabs": collection.handler()
});
let syncTesting = new SyncTestingInfrastructure(server);
let syncTesting = await SyncTestingInfrastructure(server);
Service.identity.username = "foo";
let meta_global = Service.recordManager.set(engine.metaURL,
@ -88,17 +88,20 @@ add_test(function test_tab_engine_skips_incoming_local_record() {
generateNewKeys(Service.collectionKeys);
let syncFinish = engine._syncFinish;
engine._syncFinish = function () {
equal(applied.length, 1, "Remote client record was applied");
equal(applied[0].id, remoteID, "Remote client ID matches");
let promiseFinished = new Promise(resolve => {
let syncFinish = engine._syncFinish;
engine._syncFinish = function () {
equal(applied.length, 1, "Remote client record was applied");
equal(applied[0].id, remoteID, "Remote client ID matches");
syncFinish.call(engine);
run_next_test();
}
syncFinish.call(engine);
resolve();
}
});
_("Start sync");
engine._sync();
await promiseFinished;
});
add_test(function test_reconcile() {

Просмотреть файл

@ -101,7 +101,7 @@ add_task(async function test_processIncoming_error() {
syncID: engine.syncID}}}},
bookmarks: {}
});
new SyncTestingInfrastructure(server.server);
await SyncTestingInfrastructure(server);
let collection = server.user("foo").collection("bookmarks");
try {
// Create a bogus record that when synced down will provoke a
@ -124,7 +124,7 @@ add_task(async function test_processIncoming_error() {
}
ok(!!error);
ok(!!ping);
equal(ping.uid, "0".repeat(32));
equal(ping.uid, "f".repeat(32)); // as setup by SyncTestingInfrastructure
deepEqual(ping.failureReason, {
name: "othererror",
error: "error.engine.reason.record_download_fail"
@ -151,7 +151,7 @@ add_task(async function test_uploading() {
syncID: engine.syncID}}}},
bookmarks: {}
});
new SyncTestingInfrastructure(server.server);
await SyncTestingInfrastructure(server);
let parent = PlacesUtils.toolbarFolderId;
let uri = Utils.makeURI("http://getfirefox.com/");
@ -200,7 +200,7 @@ add_task(async function test_upload_failed() {
"/1.1/foo/storage/rotary": collection.handler()
});
let syncTesting = new SyncTestingInfrastructure(server);
await SyncTestingInfrastructure(server);
let engine = new RotaryEngine(Service);
engine.lastSync = 123; // needs to be non-zero so that tracker is queried
@ -248,7 +248,7 @@ add_task(async function test_sync_partialUpload() {
let server = sync_httpd_setup({
"/1.1/foo/storage/rotary": collection.handler()
});
let syncTesting = new SyncTestingInfrastructure(server);
await SyncTestingInfrastructure(server);
generateNewKeys(Service.collectionKeys);
let engine = new RotaryEngine(Service);
@ -331,7 +331,7 @@ add_task(async function test_generic_engine_fail() {
syncID: engine.syncID}}}},
steam: {}
});
new SyncTestingInfrastructure(server.server);
await SyncTestingInfrastructure(server);
let e = new Error("generic failure message")
engine._errToThrow = e;
@ -358,7 +358,7 @@ add_task(async function test_engine_fail_ioerror() {
syncID: engine.syncID}}}},
steam: {}
});
new SyncTestingInfrastructure(server.server);
await SyncTestingInfrastructure(server);
// create an IOError to re-throw as part of Sync.
try {
// (Note that fakeservices.js has replaced Utils.jsonMove etc, but for
@ -398,7 +398,7 @@ add_task(async function test_initial_sync_engines() {
conf[e] = {};
}
let server = serverForUsers({"foo": "password"}, conf);
new SyncTestingInfrastructure(server.server);
await SyncTestingInfrastructure(server);
try {
let ping = await wait_for_ping(() => Service.sync(), true);
@ -431,7 +431,7 @@ add_task(async function test_nserror() {
syncID: engine.syncID}}}},
steam: {}
});
new SyncTestingInfrastructure(server.server);
await SyncTestingInfrastructure(server);
engine._errToThrow = Components.Exception("NS_ERROR_UNKNOWN_HOST", Cr.NS_ERROR_UNKNOWN_HOST);
try {
let ping = await sync_and_validate_telem(true);
@ -461,7 +461,6 @@ add_identity_test(this, async function test_discarding() {
let server;
try {
await configureIdentity({ username: "johndoe" });
let handlers = {
"/1.1/johndoe/info/collections": helper.handler,
"/1.1/johndoe/storage/crypto/keys": upd("crypto", new ServerWBO("keys").handler()),
@ -475,7 +474,7 @@ add_identity_test(this, async function test_discarding() {
}
server = httpd_setup(handlers);
Service.serverURL = server.baseURI;
await configureIdentity({ username: "johndoe" }, server);
telem.submit = () => ok(false, "Submitted telemetry ping when we should not have");
for (let i = 0; i < 5; ++i) {
@ -506,7 +505,7 @@ add_task(async function test_no_foreign_engines_in_error_ping() {
steam: {}
});
engine._errToThrow = new Error("Oh no!");
new SyncTestingInfrastructure(server.server);
await SyncTestingInfrastructure(server);
try {
let ping = await sync_and_validate_telem(true);
equal(ping.status.service, SYNC_FAILED_PARTIAL);
@ -527,7 +526,7 @@ add_task(async function test_sql_error() {
syncID: engine.syncID}}}},
steam: {}
});
new SyncTestingInfrastructure(server.server);
await SyncTestingInfrastructure(server);
engine._sync = function() {
// Just grab a DB connection and issue a bogus SQL statement synchronously.
let db = PlacesUtils.history.QueryInterface(Ci.nsPIPlacesDatabase).DBConnection;
@ -553,7 +552,7 @@ add_task(async function test_no_foreign_engines_in_success_ping() {
steam: {}
});
new SyncTestingInfrastructure(server.server);
await SyncTestingInfrastructure(server);
try {
let ping = await sync_and_validate_telem();
ok(ping.engines.every(e => e.name !== "bogus"));

Просмотреть файл

@ -9,6 +9,8 @@ import time
from copy import deepcopy
import mozversion
from mozprofile import Profile
from mozrunner import Runner, FennecEmulatorRunner
@ -47,7 +49,7 @@ class GeckoInstance(object):
"dom.ipc.cpows.forbid-unsafe-from-browser": False,
}
def __init__(self, host, port, bin, profile=None, addons=None,
def __init__(self, host=None, port=None, bin=None, profile=None, addons=None,
app_args=None, symbols_path=None, gecko_log=None, prefs=None,
workspace=None, verbose=0):
self.runner_class = Runner
@ -133,6 +135,20 @@ class GeckoInstance(object):
profile_name)
self.profile = Profile.clone(**profile_args)
@classmethod
def create(cls, app=None, *args, **kwargs):
try:
if not app:
app_id = mozversion.get_version(binary=kwargs.get('bin'))['application_id']
app = app_ids[app_id]
instance_class = apps[app]
except KeyError:
msg = 'Application "{0}" unknown (should be one of {1})'
raise NotImplementedError(msg.format(app, apps.keys()))
return instance_class(*args, **kwargs)
def start(self):
self._update_profile()
self.runner = self.runner_class(**self._get_runner_args())
@ -318,6 +334,11 @@ class NullOutput(object):
apps = {
'fxdesktop': DesktopInstance,
'fennec': FennecInstance,
'fxdesktop': DesktopInstance,
}
app_ids = {
'{aa3c5121-dab2-40e2-81ca-7ea25febc110}': 'fennec',
'{ec8030f7-c20a-464f-9b0e-13a3a9e97384}': 'fxdesktop',
}

Просмотреть файл

@ -2,7 +2,6 @@
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import ConfigParser
import base64
import datetime
import json
@ -15,14 +14,13 @@ import warnings
from contextlib import contextmanager
from decorators import do_process_check
from keys import Keys
import errors
import geckoinstance
import transport
from timeout import Timeouts
from .decorators import do_process_check
from .geckoinstance import GeckoInstance
from .keys import Keys
from .timeout import Timeouts
WEBELEMENT_KEY = "ELEMENT"
W3C_WEBELEMENT_KEY = "element-6066-11e4-a52e-4f735466cecf"
@ -584,40 +582,17 @@ class Marionette(object):
startup_timeout = startup_timeout or self.DEFAULT_STARTUP_TIMEOUT
if self.bin:
self.instance = self._create_instance(app, instance_args)
if not Marionette.is_port_available(self.port, host=self.host):
ex_msg = "{0}:{1} is unavailable.".format(self.host, self.port)
raise errors.MarionetteException(message=ex_msg)
self.instance = GeckoInstance.create(
app, host=self.host, port=self.port, bin=self.bin, **instance_args)
self.instance.start()
self.raise_for_port(timeout=startup_timeout)
self.timeout = Timeouts(self)
def _create_instance(self, app, instance_args):
if not Marionette.is_port_available(self.port, host=self.host):
ex_msg = "{0}:{1} is unavailable.".format(self.host, self.port)
raise errors.MarionetteException(message=ex_msg)
if app:
# select instance class for the given app
try:
instance_class = geckoinstance.apps[app]
except KeyError:
msg = 'Application "{0}" unknown (should be one of {1})'
raise NotImplementedError(
msg.format(app, geckoinstance.apps.keys()))
else:
try:
if not isinstance(self.bin, basestring):
raise TypeError("bin must be a string if app is not specified")
config = ConfigParser.RawConfigParser()
config.read(os.path.join(os.path.dirname(self.bin),
'application.ini'))
app = config.get('App', 'Name')
instance_class = geckoinstance.apps[app.lower()]
except (ConfigParser.NoOptionError,
ConfigParser.NoSectionError,
KeyError):
instance_class = geckoinstance.GeckoInstance
return instance_class(host=self.host, port=self.port, bin=self.bin,
**instance_args)
@property
def profile_path(self):
if self.instance and self.instance.profile:

Просмотреть файл

@ -1 +1,2 @@
mozrunner >= 6.13
mozversion >= 1.1

Просмотреть файл

@ -2,12 +2,7 @@
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from argparse import ArgumentParser
from copy import deepcopy
import json
import mozinfo
import moznetwork
import os
import random
import re
@ -17,15 +12,20 @@ import time
import traceback
import unittest
import warnings
import mozprofile
from argparse import ArgumentParser
from copy import deepcopy
import mozinfo
import moznetwork
import mozprofile
import mozversion
from manifestparser import TestManifest
from manifestparser.filters import tags
from marionette_driver.marionette import Marionette
from moztest.adapters.unit import StructuredTestRunner, StructuredTestResult
from moztest.results import TestResultCollection, TestResult, relevant_line
import mozversion
import httpd
@ -506,6 +506,13 @@ class BaseMarionetteTestRunner(object):
socket_timeout=BaseMarionetteArguments.socket_timeout_default,
startup_timeout=None, addons=None, workspace=None,
verbose=0, e10s=True, emulator=False, **kwargs):
self._appinfo = None
self._appName = None
self._capabilities = None
self._filename_pattern = None
self._version_info = {}
self.extra_kwargs = kwargs
self.test_kwargs = deepcopy(kwargs)
self.address = address
@ -522,9 +529,6 @@ class BaseMarionetteTestRunner(object):
self.repeat = repeat
self.symbols_path = symbols_path
self.socket_timeout = socket_timeout
self._capabilities = None
self._appinfo = None
self._appName = None
self.shuffle = shuffle
self.shuffle_seed = shuffle_seed
self.server_root = server_root
@ -543,7 +547,6 @@ class BaseMarionetteTestRunner(object):
self.workspace_path = workspace or os.getcwd()
self.verbose = verbose
self.e10s = e10s
self._filename_pattern = None
def gather_debug(test, status):
rv = {}
@ -676,6 +679,17 @@ class BaseMarionetteTestRunner(object):
self.tests = []
self.cleanup()
@property
def version_info(self):
if not self._version_info:
try:
# TODO: Get version_info in Fennec case
self._version_info = mozversion.get_version(binary=self.bin)
except Exception:
self.logger.warning("Failed to retrieve version information for {}".format(
self.bin))
return self._version_info
def reset_test_stats(self):
self.passed = 0
self.failed = 0
@ -825,15 +839,10 @@ class BaseMarionetteTestRunner(object):
except Exception:
self.logger.warning('Could not get device info.')
# TODO: Get version_info in Fennec case
version_info = None
if self.bin:
version_info = mozversion.get_version(binary=self.bin)
self.logger.info("running with e10s: {}".format(self.e10s))
self.logger.suite_start(self.tests,
version_info=version_info,
version_info=self.version_info,
device_info=device_info)
self._log_skipped_tests()

0
testing/marionette/harness/marionette/runner/httpd.py Normal file → Executable file
Просмотреть файл

Некоторые файлы не были показаны из-за слишком большого количества измененных файлов Показать больше