зеркало из https://github.com/mozilla/gecko-dev.git
Merge mozilla-central to inbound. a=merge CLOSED TREE
This commit is contained in:
Коммит
2680808115
|
@ -276,14 +276,6 @@ dependencies = [
|
|||
"libc 0.2.33 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "core-foundation-sys"
|
||||
version = "0.3.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"libc 0.2.33 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "core-foundation-sys"
|
||||
version = "0.5.1"
|
||||
|
@ -1903,7 +1895,7 @@ version = "0.1.0"
|
|||
dependencies = [
|
||||
"bitflags 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"boxfnonce 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"core-foundation-sys 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"core-foundation-sys 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"env_logger 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"libc 0.2.33 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"libudev 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
|
@ -2248,7 +2240,6 @@ dependencies = [
|
|||
"checksum cmake 0.1.29 (registry+https://github.com/rust-lang/crates.io-index)" = "56d741ea7a69e577f6d06b36b7dff4738f680593dc27a701ffa8506b73ce28bb"
|
||||
"checksum cookie 0.10.1 (registry+https://github.com/rust-lang/crates.io-index)" = "746858cae4eae40fff37e1998320068df317bc247dc91a67c6cfa053afdc2abb"
|
||||
"checksum core-foundation 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "286e0b41c3a20da26536c6000a280585d519fd07b3956b43aed8a79e9edce980"
|
||||
"checksum core-foundation-sys 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "41115a6aa5d3e1e5ef98148373f25971d1fad53818553f216495f9e67e90a624"
|
||||
"checksum core-foundation-sys 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "716c271e8613ace48344f723b60b900a93150271e5be206212d052bbc0883efa"
|
||||
"checksum core-graphics 0.13.0 (registry+https://github.com/rust-lang/crates.io-index)" = "fb0ed45fdc32f9ab426238fba9407dfead7bacd7900c9b4dd3f396f46eafdae3"
|
||||
"checksum core-text 9.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "2bd581c37283d0c23311d179aefbb891f2324ee0405da58a26e8594ab76e5748"
|
||||
|
|
|
@ -464,6 +464,13 @@ HandlerProvider::MarshalAs(REFIID aIid)
|
|||
return aIid;
|
||||
}
|
||||
|
||||
HRESULT
|
||||
HandlerProvider::DisconnectHandlerRemotes()
|
||||
{
|
||||
IUnknown* unk = static_cast<IGeckoBackChannel*>(this);
|
||||
return ::CoDisconnectObject(unk, 0);
|
||||
}
|
||||
|
||||
REFIID
|
||||
HandlerProvider::GetEffectiveOutParamIid(REFIID aCallIid,
|
||||
ULONG aCallMethod)
|
||||
|
|
|
@ -47,6 +47,7 @@ public:
|
|||
STDMETHODIMP WriteHandlerPayload(NotNull<mscom::IInterceptor*> aInterceptor,
|
||||
NotNull<IStream*> aStream) override;
|
||||
STDMETHODIMP_(REFIID) MarshalAs(REFIID aIid) override;
|
||||
STDMETHODIMP DisconnectHandlerRemotes() override;
|
||||
STDMETHODIMP_(REFIID) GetEffectiveOutParamIid(REFIID aCallIid,
|
||||
ULONG aCallMethod) override;
|
||||
STDMETHODIMP NewInstance(REFIID aIid,
|
||||
|
|
|
@ -46,6 +46,7 @@
|
|||
#include "mozilla/ReverseIterator.h"
|
||||
#include "nsIXULRuntime.h"
|
||||
#include "mozilla/mscom/AsyncInvoker.h"
|
||||
#include "mozilla/mscom/Interceptor.h"
|
||||
|
||||
#include "oleacc.h"
|
||||
|
||||
|
@ -103,6 +104,24 @@ AccessibleWrap::Shutdown()
|
|||
}
|
||||
}
|
||||
|
||||
if (XRE_IsContentProcess()) {
|
||||
// Bug 1434822: To improve performance for cross-process COM, we disable COM
|
||||
// garbage collection. However, this means we never receive Release calls
|
||||
// from clients, so defunct accessibles can never be deleted. Since we
|
||||
// know when an accessible is shutting down, we can work around this by
|
||||
// forcing COM to disconnect this object from all of its remote clients,
|
||||
// which will cause associated references to be released.
|
||||
IUnknown* unk = static_cast<IAccessible*>(this);
|
||||
mscom::Interceptor::DisconnectRemotesForTarget(unk);
|
||||
// If an accessible was retrieved via IAccessibleHypertext::hyperlink*,
|
||||
// it will have a different Interceptor that won't be matched by the above
|
||||
// call, even though it's the same object. Therefore, call it again with
|
||||
// the IAccessibleHyperlink pointer. We can remove this horrible hack once
|
||||
// bug 1440267 is fixed.
|
||||
unk = static_cast<IAccessibleHyperlink*>(this);
|
||||
mscom::Interceptor::DisconnectRemotesForTarget(unk);
|
||||
}
|
||||
|
||||
Accessible::Shutdown();
|
||||
}
|
||||
|
||||
|
|
|
@ -565,7 +565,7 @@ var gSync = {
|
|||
const state = UIState.get();
|
||||
if (state.status == UIState.STATUS_SIGNED_IN) {
|
||||
this.updateSyncStatus({ syncing: true });
|
||||
setTimeout(() => Weave.Service.errorHandler.syncAndReportErrors(), 0);
|
||||
Services.tm.dispatchToMainThread(() => Weave.Service.sync());
|
||||
}
|
||||
},
|
||||
|
||||
|
|
|
@ -69,8 +69,7 @@ function mockFunctions() {
|
|||
email: "user@mozilla.com"
|
||||
});
|
||||
|
||||
// mock service.errorHandler.syncAndReportErrors()
|
||||
service.errorHandler.syncAndReportErrors = mocked_syncAndReportErrors;
|
||||
service.sync = mocked_syncAndReportErrors;
|
||||
}
|
||||
|
||||
function mocked_syncAndReportErrors() {
|
||||
|
@ -79,10 +78,10 @@ function mocked_syncAndReportErrors() {
|
|||
|
||||
function restoreValues() {
|
||||
UIState.get = getState;
|
||||
service.syncAndReportErrors = originalSync;
|
||||
service.sync = originalSync;
|
||||
}
|
||||
|
||||
function storeInitialValues() {
|
||||
getState = UIState.get;
|
||||
originalSync = service.syncAndReportErrors;
|
||||
originalSync = service.sync;
|
||||
}
|
||||
|
|
|
@ -369,12 +369,18 @@ class JSONPoliciesProvider {
|
|||
}
|
||||
|
||||
_getConfigurationFile() {
|
||||
let configFile = Services.dirsvc.get("XREAppDist", Ci.nsIFile);
|
||||
configFile.append(POLICIES_FILENAME);
|
||||
let configFile = null;
|
||||
try {
|
||||
configFile = Services.dirsvc.get("XREAppDist", Ci.nsIFile);
|
||||
configFile.append(POLICIES_FILENAME);
|
||||
} catch (ex) {
|
||||
// Getting the correct directory will fail in xpcshell tests. This should
|
||||
// be handled the same way as if the configFile simply does not exist.
|
||||
}
|
||||
|
||||
let alternatePath = Services.prefs.getStringPref(PREF_ALTERNATE_PATH, "");
|
||||
|
||||
if (alternatePath && !configFile.exists()) {
|
||||
if (alternatePath && (!configFile || !configFile.exists())) {
|
||||
// We only want to use the alternate file path if the file on the install
|
||||
// folder doesn't exist. Otherwise it'd be possible for a user to override
|
||||
// the admin-provided policies by changing the user-controlled prefs.
|
||||
|
|
|
@ -190,6 +190,14 @@ var Policies = {
|
|||
}
|
||||
},
|
||||
|
||||
"DisableSysAddonUpdate": {
|
||||
onBeforeAddons(manager, param) {
|
||||
if (param) {
|
||||
manager.disallowFeature("SysAddonUpdate");
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
"DisplayBookmarksToolbar": {
|
||||
onBeforeUIStartup(manager, param) {
|
||||
if (param) {
|
||||
|
|
|
@ -158,6 +158,15 @@
|
|||
"enum": [true]
|
||||
},
|
||||
|
||||
"DisableSysAddonUpdate": {
|
||||
"description": "Prevent the browser from installing and updating system addons.",
|
||||
"first_available": "60.0",
|
||||
"enterprise_only": true,
|
||||
|
||||
"type": "boolean",
|
||||
"enum": [true]
|
||||
},
|
||||
|
||||
"DisplayBookmarksToolbar": {
|
||||
"description": "Causes the bookmarks toolbar to be displayed by default.",
|
||||
"first_available": "60.0",
|
||||
|
|
|
@ -0,0 +1,52 @@
|
|||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
|
||||
|
||||
"use strict";
|
||||
|
||||
ChromeUtils.import("resource://gre/modules/Services.jsm");
|
||||
ChromeUtils.import("resource://gre/modules/osfile.jsm");
|
||||
ChromeUtils.defineModuleGetter(this, "FileTestUtils",
|
||||
"resource://testing-common/FileTestUtils.jsm");
|
||||
|
||||
this.EXPORTED_SYMBOLS = ["EnterprisePolicyTesting"];
|
||||
|
||||
this.EnterprisePolicyTesting = {
|
||||
// |json| must be an object representing the desired policy configuration, OR a
|
||||
// path to the JSON file containing the policy configuration.
|
||||
setupPolicyEngineWithJson: async function setupPolicyEngineWithJson(json, customSchema) {
|
||||
let filePath;
|
||||
if (typeof(json) == "object") {
|
||||
filePath = FileTestUtils.getTempFile("policies.json").path;
|
||||
|
||||
// This file gets automatically deleted by FileTestUtils
|
||||
// at the end of the test run.
|
||||
await OS.File.writeAtomic(filePath, JSON.stringify(json), {
|
||||
encoding: "utf-8",
|
||||
});
|
||||
} else {
|
||||
filePath = json;
|
||||
}
|
||||
|
||||
Services.prefs.setStringPref("browser.policies.alternatePath", filePath);
|
||||
|
||||
let promise = new Promise(resolve => {
|
||||
Services.obs.addObserver(function observer() {
|
||||
Services.obs.removeObserver(observer, "EnterprisePolicies:AllPoliciesApplied");
|
||||
dump(`bytesized: setupPolicyEngineWithJson resolving`);
|
||||
resolve();
|
||||
}, "EnterprisePolicies:AllPoliciesApplied");
|
||||
});
|
||||
|
||||
// Clear any previously used custom schema
|
||||
Cu.unload("resource:///modules/policies/schema.jsm");
|
||||
|
||||
if (customSchema) {
|
||||
let schemaModule = ChromeUtils.import("resource:///modules/policies/schema.jsm", {});
|
||||
schemaModule.schema = customSchema;
|
||||
}
|
||||
|
||||
Services.obs.notifyObservers(null, "EnterprisePolicies:Restart");
|
||||
return promise;
|
||||
},
|
||||
};
|
|
@ -4,43 +4,14 @@
|
|||
|
||||
"use strict";
|
||||
|
||||
ChromeUtils.defineModuleGetter(this, "FileTestUtils",
|
||||
"resource://testing-common/FileTestUtils.jsm");
|
||||
const {EnterprisePolicyTesting} = ChromeUtils.import("resource://testing-common/EnterprisePolicyTesting.jsm", {});
|
||||
|
||||
async function setupPolicyEngineWithJson(json, customSchema) {
|
||||
let filePath;
|
||||
if (typeof(json) == "object") {
|
||||
filePath = FileTestUtils.getTempFile("policies.json").path;
|
||||
|
||||
// This file gets automatically deleted by FileTestUtils
|
||||
// at the end of the test run.
|
||||
await OS.File.writeAtomic(filePath, JSON.stringify(json), {
|
||||
encoding: "utf-8",
|
||||
});
|
||||
} else {
|
||||
filePath = getTestFilePath(json ? json : "non-existing-file.json");
|
||||
if (typeof(json) != "object") {
|
||||
let filePath = getTestFilePath(json ? json : "non-existing-file.json");
|
||||
return EnterprisePolicyTesting.setupPolicyEngineWithJson(filePath, customSchema);
|
||||
}
|
||||
|
||||
Services.prefs.setStringPref("browser.policies.alternatePath", filePath);
|
||||
|
||||
let resolve = null;
|
||||
let promise = new Promise((r) => resolve = r);
|
||||
|
||||
Services.obs.addObserver(function observer() {
|
||||
Services.obs.removeObserver(observer, "EnterprisePolicies:AllPoliciesApplied");
|
||||
resolve();
|
||||
}, "EnterprisePolicies:AllPoliciesApplied");
|
||||
|
||||
// Clear any previously used custom schema
|
||||
Cu.unload("resource:///modules/policies/schema.jsm");
|
||||
|
||||
if (customSchema) {
|
||||
let schemaModule = ChromeUtils.import("resource:///modules/policies/schema.jsm", {});
|
||||
schemaModule.schema = customSchema;
|
||||
}
|
||||
|
||||
Services.obs.notifyObservers(null, "EnterprisePolicies:Restart");
|
||||
return promise;
|
||||
return EnterprisePolicyTesting.setupPolicyEngineWithJson(json, customSchema);
|
||||
}
|
||||
|
||||
add_task(async function policies_headjs_startWithCleanSlate() {
|
||||
|
|
|
@ -12,3 +12,7 @@ BROWSER_CHROME_MANIFESTS += [
|
|||
'browser/disable_app_update/browser.ini',
|
||||
'browser/disable_developer_tools/browser.ini',
|
||||
]
|
||||
|
||||
TESTING_JS_MODULES += [
|
||||
'EnterprisePolicyTesting.jsm',
|
||||
]
|
||||
|
|
|
@ -124,11 +124,6 @@ if (AppConstants.MOZ_CRASHREPORTER) {
|
|||
var gPrivacyPane = {
|
||||
_pane: null,
|
||||
|
||||
/**
|
||||
* Whether the use has selected the auto-start private browsing mode in the UI.
|
||||
*/
|
||||
_autoStartPrivateBrowsing: false,
|
||||
|
||||
/**
|
||||
* Whether the prompt to restart Firefox should appear when changing the autostart pref.
|
||||
*/
|
||||
|
@ -531,8 +526,6 @@ var gPrivacyPane = {
|
|||
prefsForKeepingHistory: {
|
||||
"places.history.enabled": true, // History is enabled
|
||||
"browser.formfill.enable": true, // Form information is saved
|
||||
"network.cookie.cookieBehavior": 0, // All cookies are enabled
|
||||
"network.cookie.lifetimePolicy": 0, // Cookies use supplied lifetime
|
||||
"privacy.sanitize.sanitizeOnShutdown": false, // Private date is NOT cleared on shutdown
|
||||
},
|
||||
|
||||
|
@ -546,8 +539,6 @@ var gPrivacyPane = {
|
|||
dependentControls: [
|
||||
"rememberHistory",
|
||||
"rememberForms",
|
||||
"keepUntil",
|
||||
"keepCookiesUntil",
|
||||
"alwaysClear",
|
||||
"clearDataSettings"
|
||||
],
|
||||
|
@ -624,11 +615,6 @@ var gPrivacyPane = {
|
|||
// select the remember forms history option
|
||||
Preferences.get("browser.formfill.enable").value = true;
|
||||
|
||||
// select the allow cookies option
|
||||
Preferences.get("network.cookie.cookieBehavior").value = 0;
|
||||
// select the cookie lifetime policy option
|
||||
Preferences.get("network.cookie.lifetimePolicy").value = 0;
|
||||
|
||||
// select the clear on close option
|
||||
Preferences.get("privacy.sanitize.sanitizeOnShutdown").value = false;
|
||||
break;
|
||||
|
@ -641,12 +627,16 @@ var gPrivacyPane = {
|
|||
|
||||
/**
|
||||
* Update the privacy micro-management controls based on the
|
||||
* value of the private browsing auto-start checkbox.
|
||||
* value of the private browsing auto-start preference.
|
||||
*/
|
||||
updatePrivacyMicroControls() {
|
||||
// Set "Keep cookies until..." to "I close Nightly" and disable the setting
|
||||
// when we're in auto private mode (or reset it back otherwise).
|
||||
document.getElementById("keepCookiesUntil").value = this.readKeepCookiesUntil();
|
||||
this.readAcceptCookies();
|
||||
|
||||
if (document.getElementById("historyMode").value == "custom") {
|
||||
let disabled = this._autoStartPrivateBrowsing =
|
||||
Preferences.get("browser.privatebrowsing.autostart").value;
|
||||
let disabled = Preferences.get("browser.privatebrowsing.autostart").value;
|
||||
this.dependentControls.forEach(function(aElement) {
|
||||
let control = document.getElementById(aElement);
|
||||
let preferenceId = control.getAttribute("preference");
|
||||
|
@ -662,16 +652,6 @@ var gPrivacyPane = {
|
|||
control.disabled = disabled || preference.locked;
|
||||
});
|
||||
|
||||
// adjust the cookie controls status
|
||||
this.readAcceptCookies();
|
||||
let lifetimePolicy = Preferences.get("network.cookie.lifetimePolicy").value;
|
||||
if (lifetimePolicy != Ci.nsICookieService.ACCEPT_NORMALLY &&
|
||||
lifetimePolicy != Ci.nsICookieService.ACCEPT_SESSION &&
|
||||
lifetimePolicy != Ci.nsICookieService.ACCEPT_FOR_N_DAYS) {
|
||||
lifetimePolicy = Ci.nsICookieService.ACCEPT_NORMALLY;
|
||||
}
|
||||
document.getElementById("keepCookiesUntil").value = disabled ? 2 : lifetimePolicy;
|
||||
|
||||
// adjust the checked state of the sanitizeOnShutdown checkbox
|
||||
document.getElementById("alwaysClear").checked = disabled ? false :
|
||||
Preferences.get("privacy.sanitize.sanitizeOnShutdown").value;
|
||||
|
@ -689,6 +669,61 @@ var gPrivacyPane = {
|
|||
}
|
||||
},
|
||||
|
||||
// CLEAR PRIVATE DATA
|
||||
|
||||
/*
|
||||
* Preferences:
|
||||
*
|
||||
* privacy.sanitize.sanitizeOnShutdown
|
||||
* - true if the user's private data is cleared on startup according to the
|
||||
* Clear Private Data settings, false otherwise
|
||||
*/
|
||||
|
||||
/**
|
||||
* Displays the Clear Private Data settings dialog.
|
||||
*/
|
||||
showClearPrivateDataSettings() {
|
||||
gSubDialog.open("chrome://browser/content/preferences/sanitize.xul", "resizable=no");
|
||||
},
|
||||
|
||||
|
||||
/**
|
||||
* Displays a dialog from which individual parts of private data may be
|
||||
* cleared.
|
||||
*/
|
||||
clearPrivateDataNow(aClearEverything) {
|
||||
var ts = Preferences.get("privacy.sanitize.timeSpan");
|
||||
var timeSpanOrig = ts.value;
|
||||
|
||||
if (aClearEverything) {
|
||||
ts.value = 0;
|
||||
}
|
||||
|
||||
gSubDialog.open("chrome://browser/content/sanitize.xul", "resizable=no", null, () => {
|
||||
// reset the timeSpan pref
|
||||
if (aClearEverything) {
|
||||
ts.value = timeSpanOrig;
|
||||
}
|
||||
|
||||
Services.obs.notifyObservers(null, "clear-private-data");
|
||||
});
|
||||
},
|
||||
|
||||
/**
|
||||
* Enables or disables the "Settings..." button depending
|
||||
* on the privacy.sanitize.sanitizeOnShutdown preference value
|
||||
*/
|
||||
_updateSanitizeSettingsButton() {
|
||||
var settingsButton = document.getElementById("clearDataSettings");
|
||||
var sanitizeOnShutdownPref = Preferences.get("privacy.sanitize.sanitizeOnShutdown");
|
||||
|
||||
settingsButton.disabled = !sanitizeOnShutdownPref.value;
|
||||
},
|
||||
|
||||
toggleDoNotDisturbNotifications(event) {
|
||||
AlertsServiceDND.manualDoNotDisturb = event.target.checked;
|
||||
},
|
||||
|
||||
// PRIVATE BROWSING
|
||||
|
||||
/**
|
||||
|
@ -774,19 +809,7 @@ var gPrivacyPane = {
|
|||
null, params);
|
||||
},
|
||||
|
||||
// HISTORY
|
||||
|
||||
/*
|
||||
* Preferences:
|
||||
*
|
||||
* places.history.enabled
|
||||
* - whether history is enabled or not
|
||||
* browser.formfill.enable
|
||||
* - true if entries in forms and the search bar should be saved, false
|
||||
* otherwise
|
||||
*/
|
||||
|
||||
// COOKIES
|
||||
// COOKIES AND SITE DATA
|
||||
|
||||
/*
|
||||
* Preferences:
|
||||
|
@ -804,6 +827,22 @@ var gPrivacyPane = {
|
|||
* 2 means keep cookies until the browser is closed
|
||||
*/
|
||||
|
||||
readKeepCookiesUntil() {
|
||||
let privateBrowsing = Preferences.get("browser.privatebrowsing.autostart").value;
|
||||
if (privateBrowsing) {
|
||||
return "2";
|
||||
}
|
||||
|
||||
let lifetimePolicy = Preferences.get("network.cookie.lifetimePolicy").value;
|
||||
if (lifetimePolicy != Ci.nsICookieService.ACCEPT_NORMALLY &&
|
||||
lifetimePolicy != Ci.nsICookieService.ACCEPT_SESSION &&
|
||||
lifetimePolicy != Ci.nsICookieService.ACCEPT_FOR_N_DAYS) {
|
||||
return Ci.nsICookieService.ACCEPT_NORMALLY;
|
||||
}
|
||||
|
||||
return lifetimePolicy;
|
||||
},
|
||||
|
||||
/**
|
||||
* Reads the network.cookie.cookieBehavior preference value and
|
||||
* enables/disables the rest of the cookie UI accordingly, returning true
|
||||
|
@ -820,7 +859,9 @@ var gPrivacyPane = {
|
|||
var acceptCookies = (pref.value != 2);
|
||||
|
||||
acceptThirdPartyLabel.disabled = acceptThirdPartyMenu.disabled = !acceptCookies;
|
||||
keepUntil.disabled = menu.disabled = this._autoStartPrivateBrowsing || !acceptCookies;
|
||||
|
||||
let privateBrowsing = Preferences.get("browser.privatebrowsing.autostart").value;
|
||||
keepUntil.disabled = menu.disabled = privateBrowsing || !acceptCookies;
|
||||
|
||||
// Our top-level setting is a radiogroup that only sets "enable all"
|
||||
// and "disable all", so convert the pref value accordingly.
|
||||
|
@ -893,59 +934,35 @@ var gPrivacyPane = {
|
|||
null, params);
|
||||
},
|
||||
|
||||
// CLEAR PRIVATE DATA
|
||||
|
||||
/*
|
||||
* Preferences:
|
||||
*
|
||||
* privacy.sanitize.sanitizeOnShutdown
|
||||
* - true if the user's private data is cleared on startup according to the
|
||||
* Clear Private Data settings, false otherwise
|
||||
*/
|
||||
|
||||
/**
|
||||
* Displays the Clear Private Data settings dialog.
|
||||
*/
|
||||
showClearPrivateDataSettings() {
|
||||
gSubDialog.open("chrome://browser/content/preferences/sanitize.xul", "resizable=no");
|
||||
showSiteDataSettings() {
|
||||
gSubDialog.open("chrome://browser/content/preferences/siteDataSettings.xul");
|
||||
},
|
||||
|
||||
toggleSiteData(shouldShow) {
|
||||
let clearButton = document.getElementById("clearSiteDataButton");
|
||||
let settingsButton = document.getElementById("siteDataSettings");
|
||||
clearButton.disabled = !shouldShow;
|
||||
settingsButton.disabled = !shouldShow;
|
||||
},
|
||||
|
||||
/**
|
||||
* Displays a dialog from which individual parts of private data may be
|
||||
* cleared.
|
||||
*/
|
||||
clearPrivateDataNow(aClearEverything) {
|
||||
var ts = Preferences.get("privacy.sanitize.timeSpan");
|
||||
var timeSpanOrig = ts.value;
|
||||
showSiteDataLoading() {
|
||||
let totalSiteDataSizeLabel = document.getElementById("totalSiteDataSize");
|
||||
let prefStrBundle = document.getElementById("bundlePreferences");
|
||||
totalSiteDataSizeLabel.textContent = prefStrBundle.getString("loadingSiteDataSize1");
|
||||
},
|
||||
|
||||
if (aClearEverything) {
|
||||
ts.value = 0;
|
||||
}
|
||||
|
||||
gSubDialog.open("chrome://browser/content/sanitize.xul", "resizable=no", null, () => {
|
||||
// reset the timeSpan pref
|
||||
if (aClearEverything) {
|
||||
ts.value = timeSpanOrig;
|
||||
}
|
||||
|
||||
Services.obs.notifyObservers(null, "clear-private-data");
|
||||
updateTotalDataSizeLabel(siteDataUsage) {
|
||||
SiteDataManager.getCacheSize().then(function(cacheUsage) {
|
||||
let prefStrBundle = document.getElementById("bundlePreferences");
|
||||
let totalSiteDataSizeLabel = document.getElementById("totalSiteDataSize");
|
||||
let totalUsage = siteDataUsage + cacheUsage;
|
||||
let size = DownloadUtils.convertByteUnits(totalUsage);
|
||||
totalSiteDataSizeLabel.textContent = prefStrBundle.getFormattedString("totalSiteDataSize2", size);
|
||||
});
|
||||
},
|
||||
|
||||
/**
|
||||
* Enables or disables the "Settings..." button depending
|
||||
* on the privacy.sanitize.sanitizeOnShutdown preference value
|
||||
*/
|
||||
_updateSanitizeSettingsButton() {
|
||||
var settingsButton = document.getElementById("clearDataSettings");
|
||||
var sanitizeOnShutdownPref = Preferences.get("privacy.sanitize.sanitizeOnShutdown");
|
||||
|
||||
settingsButton.disabled = !sanitizeOnShutdownPref.value;
|
||||
},
|
||||
|
||||
toggleDoNotDisturbNotifications(event) {
|
||||
AlertsServiceDND.manualDoNotDisturb = event.target.checked;
|
||||
clearSiteData() {
|
||||
gSubDialog.open("chrome://browser/content/preferences/clearSiteData.xul");
|
||||
},
|
||||
|
||||
// GEOLOCATION
|
||||
|
@ -1387,37 +1404,6 @@ var gPrivacyPane = {
|
|||
gSubDialog.open("chrome://pippki/content/device_manager.xul");
|
||||
},
|
||||
|
||||
showSiteDataSettings() {
|
||||
gSubDialog.open("chrome://browser/content/preferences/siteDataSettings.xul");
|
||||
},
|
||||
|
||||
toggleSiteData(shouldShow) {
|
||||
let clearButton = document.getElementById("clearSiteDataButton");
|
||||
let settingsButton = document.getElementById("siteDataSettings");
|
||||
clearButton.disabled = !shouldShow;
|
||||
settingsButton.disabled = !shouldShow;
|
||||
},
|
||||
|
||||
showSiteDataLoading() {
|
||||
let totalSiteDataSizeLabel = document.getElementById("totalSiteDataSize");
|
||||
let prefStrBundle = document.getElementById("bundlePreferences");
|
||||
totalSiteDataSizeLabel.textContent = prefStrBundle.getString("loadingSiteDataSize1");
|
||||
},
|
||||
|
||||
updateTotalDataSizeLabel(siteDataUsage) {
|
||||
SiteDataManager.getCacheSize().then(function(cacheUsage) {
|
||||
let prefStrBundle = document.getElementById("bundlePreferences");
|
||||
let totalSiteDataSizeLabel = document.getElementById("totalSiteDataSize");
|
||||
let totalUsage = siteDataUsage + cacheUsage;
|
||||
let size = DownloadUtils.convertByteUnits(totalUsage);
|
||||
totalSiteDataSizeLabel.textContent = prefStrBundle.getFormattedString("totalSiteDataSize2", size);
|
||||
});
|
||||
},
|
||||
|
||||
clearSiteData() {
|
||||
gSubDialog.open("chrome://browser/content/preferences/clearSiteData.xul");
|
||||
},
|
||||
|
||||
initDataCollection() {
|
||||
this._setupLearnMoreLink("toolkit.datacollection.infoURL",
|
||||
"dataCollectionPrivacyNotice");
|
||||
|
|
|
@ -186,6 +186,7 @@
|
|||
<!-- Please don't remove the wrapping hbox/vbox/box for these elements. It's used to properly compute the search tooltip position. -->
|
||||
<hbox>
|
||||
<menulist id="keepCookiesUntil"
|
||||
onsyncfrompreference="return gPrivacyPane.readKeepCookiesUntil();"
|
||||
preference="network.cookie.lifetimePolicy">
|
||||
<menupopup>
|
||||
<menuitem label="&expire.label;" value="0"/>
|
||||
|
|
|
@ -116,20 +116,20 @@ function test_dependent_elements(win) {
|
|||
}
|
||||
|
||||
function test_dependent_cookie_elements(win) {
|
||||
let controls = [
|
||||
win.document.getElementById("acceptThirdPartyLabel"),
|
||||
win.document.getElementById("acceptThirdPartyMenu"),
|
||||
win.document.getElementById("keepUntil"),
|
||||
win.document.getElementById("keepCookiesUntil"),
|
||||
];
|
||||
let keepUntil = win.document.getElementById("keepUntil");
|
||||
let keepCookiesUntil = win.document.getElementById("keepCookiesUntil");
|
||||
let acceptThirdPartyLabel = win.document.getElementById("acceptThirdPartyLabel");
|
||||
let acceptThirdPartyMenu = win.document.getElementById("acceptThirdPartyMenu");
|
||||
|
||||
let controls = [acceptThirdPartyLabel, acceptThirdPartyMenu, keepUntil, keepCookiesUntil];
|
||||
controls.forEach(function(control) {
|
||||
ok(control, "the dependent cookie controls should exist");
|
||||
});
|
||||
let acceptcookies = win.document.getElementById("acceptCookies");
|
||||
ok(acceptcookies, "the accept cookies checkbox should exist");
|
||||
|
||||
function expect_disabled(disabled) {
|
||||
controls.forEach(function(control) {
|
||||
function expect_disabled(disabled, c = controls) {
|
||||
c.forEach(function(control) {
|
||||
is(control.disabled, disabled,
|
||||
control.getAttribute("id") + " should " + (disabled ? "" : "not ") + "be disabled");
|
||||
});
|
||||
|
@ -142,6 +142,19 @@ function test_dependent_cookie_elements(win) {
|
|||
acceptcookies.value = "1";
|
||||
controlChanged(acceptcookies);
|
||||
expect_disabled(false);
|
||||
|
||||
let historymode = win.document.getElementById("historyMode");
|
||||
|
||||
// The History mode setting for "never remember history" should still
|
||||
// disable the "keep cookies until..." menu.
|
||||
historymode.value = "dontremember";
|
||||
controlChanged(historymode);
|
||||
expect_disabled(true, [keepUntil, keepCookiesUntil]);
|
||||
expect_disabled(false, [acceptThirdPartyLabel, acceptThirdPartyMenu]);
|
||||
|
||||
historymode.value = "remember";
|
||||
controlChanged(historymode);
|
||||
expect_disabled(false);
|
||||
}
|
||||
|
||||
function test_dependent_clearonclose_elements(win) {
|
||||
|
|
|
@ -94,6 +94,7 @@
|
|||
#elif defined(XP_LINUX)
|
||||
#include "mozilla/Sandbox.h"
|
||||
#include "mozilla/SandboxInfo.h"
|
||||
#include "CubebUtils.h"
|
||||
#elif defined(XP_MACOSX)
|
||||
#include "mozilla/Sandbox.h"
|
||||
#endif
|
||||
|
@ -1671,6 +1672,11 @@ ContentChild::RecvSetProcessSandbox(const MaybeFileDesc& aBroker)
|
|||
// On Linux, we have to support systems that can't use any sandboxing.
|
||||
if (!SandboxInfo::Get().CanSandboxContent()) {
|
||||
sandboxEnabled = false;
|
||||
} else {
|
||||
// Pre-start audio before sandboxing; see bug 1443612.
|
||||
if (!Preferences::GetBool("media.cubeb.sandbox")) {
|
||||
Unused << CubebUtils::GetCubebContext();
|
||||
}
|
||||
}
|
||||
|
||||
if (sandboxEnabled) {
|
||||
|
|
|
@ -7,7 +7,7 @@ authors = ["Kyle Machulis <kyle@nonpolynomial.com>", "J.C. Jones <jc@mozilla.com
|
|||
libudev = "^0.2"
|
||||
|
||||
[target.'cfg(target_os = "macos")'.dependencies]
|
||||
core-foundation-sys = "0.3.1"
|
||||
core-foundation-sys = "0.5.1"
|
||||
|
||||
[target.'cfg(target_os = "windows")'.dependencies]
|
||||
winapi = "0.2.8"
|
||||
|
|
|
@ -202,7 +202,6 @@ impl IOHIDDeviceMatcher {
|
|||
string.len() as CFIndex,
|
||||
kCFStringEncodingUTF8,
|
||||
false as Boolean,
|
||||
kCFAllocatorNull,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1203,7 +1203,7 @@ GeckoChildProcessHost::LaunchAndroidService(const char* type,
|
|||
crashAnnotationFd = it->first;
|
||||
it++;
|
||||
}
|
||||
int32_t handle = java::GeckoProcessManager::Start(type, jargs, crashFd, ipcFd, crashAnnotationFd);
|
||||
int32_t handle = java::GeckoProcessManager::Start(type, jargs, ipcFd, crashFd, crashAnnotationFd);
|
||||
|
||||
if (process_handle) {
|
||||
*process_handle = handle;
|
||||
|
|
|
@ -103,7 +103,7 @@ FastMarshaler::GetMarshalFlags(DWORD aDestContext, DWORD aMshlFlags)
|
|||
return aMshlFlags;
|
||||
}
|
||||
|
||||
if (!IsCallerExternalProcess()) {
|
||||
if (IsCallerExternalProcess()) {
|
||||
return aMshlFlags;
|
||||
}
|
||||
|
||||
|
|
|
@ -23,6 +23,7 @@ struct HandlerProvider
|
|||
virtual STDMETHODIMP GetHandlerPayloadSize(NotNull<IInterceptor*> aInterceptor, NotNull<DWORD*> aOutPayloadSize) = 0;
|
||||
virtual STDMETHODIMP WriteHandlerPayload(NotNull<IInterceptor*> aInterceptor, NotNull<IStream*> aStream) = 0;
|
||||
virtual STDMETHODIMP_(REFIID) MarshalAs(REFIID aIid) = 0;
|
||||
virtual STDMETHODIMP DisconnectHandlerRemotes() = 0;
|
||||
};
|
||||
|
||||
struct IHandlerProvider : public IUnknown
|
||||
|
|
|
@ -433,6 +433,7 @@ Interceptor::ReleaseMarshalData(IStream* pStm)
|
|||
HRESULT
|
||||
Interceptor::DisconnectObject(DWORD dwReserved)
|
||||
{
|
||||
mEventSink->DisconnectHandlerRemotes();
|
||||
return mStdMarshal->DisconnectObject(dwReserved);
|
||||
}
|
||||
|
||||
|
@ -849,5 +850,30 @@ Interceptor::Release()
|
|||
return WeakReferenceSupport::Release();
|
||||
}
|
||||
|
||||
/* static */ HRESULT
|
||||
Interceptor::DisconnectRemotesForTarget(IUnknown* aTarget)
|
||||
{
|
||||
MOZ_ASSERT(aTarget);
|
||||
|
||||
detail::LiveSetAutoLock lock(GetLiveSet());
|
||||
|
||||
// It is not an error if the interceptor doesn't exist, so we return
|
||||
// S_FALSE instead of an error in that case.
|
||||
RefPtr<IWeakReference> existingWeak(Move(GetLiveSet().Get(aTarget)));
|
||||
if (!existingWeak) {
|
||||
return S_FALSE;
|
||||
}
|
||||
|
||||
RefPtr<IWeakReferenceSource> existingStrong;
|
||||
if (FAILED(existingWeak->ToStrongRef(getter_AddRefs(existingStrong)))) {
|
||||
return S_FALSE;
|
||||
}
|
||||
// Since we now hold a strong ref on the interceptor, we may now release the
|
||||
// lock.
|
||||
lock.Unlock();
|
||||
|
||||
return ::CoDisconnectObject(existingStrong, 0);
|
||||
}
|
||||
|
||||
} // namespace mscom
|
||||
} // namespace mozilla
|
||||
|
|
|
@ -74,6 +74,26 @@ public:
|
|||
static HRESULT Create(STAUniquePtr<IUnknown> aTarget, IInterceptorSink* aSink,
|
||||
REFIID aInitialIid, void** aOutInterface);
|
||||
|
||||
/**
|
||||
* Disconnect all remote clients for a given target.
|
||||
* Because Interceptors disable COM garbage collection to improve
|
||||
* performance, they never receive Release calls from remote clients. If
|
||||
* the object can be shut down while clients still hold a reference, this
|
||||
* function can be used to force COM to disconnect all remote connections
|
||||
* (using CoDisconnectObject) and thus release the associated references to
|
||||
* the Interceptor, its target and any objects associated with the
|
||||
* HandlerProvider.
|
||||
* Note that the specified target must be the same IUnknown pointer used to
|
||||
* create the Interceptor. Where there is multiple inheritance, querying for
|
||||
* IID_IUnknown and calling this function with that pointer alone will not
|
||||
* disconnect remotes for all interfaces. If you expect that the same object
|
||||
* may be fetched with different initial interfaces, you should call this
|
||||
* function once for each possible IUnknown pointer.
|
||||
* @return S_OK if there was an Interceptor for the given target,
|
||||
* S_FALSE if there was not.
|
||||
*/
|
||||
static HRESULT DisconnectRemotesForTarget(IUnknown* aTarget);
|
||||
|
||||
// IUnknown
|
||||
STDMETHODIMP QueryInterface(REFIID riid, void** ppv) override;
|
||||
STDMETHODIMP_(ULONG) AddRef() override;
|
||||
|
|
|
@ -589,6 +589,16 @@ MainThreadHandoff::MarshalAs(REFIID aIid)
|
|||
return mHandlerProvider->MarshalAs(aIid);
|
||||
}
|
||||
|
||||
HRESULT
|
||||
MainThreadHandoff::DisconnectHandlerRemotes()
|
||||
{
|
||||
if (!mHandlerProvider) {
|
||||
return E_NOTIMPL;
|
||||
}
|
||||
|
||||
return mHandlerProvider->DisconnectHandlerRemotes();
|
||||
}
|
||||
|
||||
HRESULT
|
||||
MainThreadHandoff::OnWalkInterface(REFIID aIid, PVOID* aInterface,
|
||||
BOOL aIsInParam, BOOL aIsOutParam)
|
||||
|
|
|
@ -66,6 +66,7 @@ public:
|
|||
STDMETHODIMP WriteHandlerPayload(NotNull<IInterceptor*> aInterceptor,
|
||||
NotNull<IStream*> aStream) override;
|
||||
STDMETHODIMP_(REFIID) MarshalAs(REFIID aIid) override;
|
||||
STDMETHODIMP DisconnectHandlerRemotes() override;
|
||||
|
||||
// ICallFrameWalker
|
||||
STDMETHODIMP OnWalkInterface(REFIID aIid, PVOID* aInterface, BOOL aIsInParam,
|
||||
|
|
|
@ -233,7 +233,7 @@ public:
|
|||
// in-use -> idle -- no one forcing it to remain instantiated
|
||||
r_log(LOG_GENERIC,LOG_DEBUG,"Shutting down wrapped SingletonThread %p",
|
||||
mThread.get());
|
||||
mThread->Shutdown();
|
||||
mThread->AsyncShutdown();
|
||||
mThread = nullptr;
|
||||
// It'd be nice to use a timer instead... But be careful of
|
||||
// xpcom-shutdown-threads in that case
|
||||
|
|
|
@ -7,3 +7,4 @@
|
|||
#include mobile.js
|
||||
|
||||
pref("privacy.trackingprotection.pbmode.enabled", false);
|
||||
pref("dom.ipc.processCount", 1);
|
||||
|
|
|
@ -969,11 +969,6 @@ public abstract class GeckoApp extends GeckoActivity
|
|||
|
||||
earlyStartJavaSampler(intent);
|
||||
|
||||
// GeckoLoader wants to dig some environment variables out of the
|
||||
// incoming intent, so pass it in here. GeckoLoader will do its
|
||||
// business later and dispose of the reference.
|
||||
GeckoLoader.setLastIntent(intent);
|
||||
|
||||
// Workaround for <http://code.google.com/p/android/issues/detail?id=20915>.
|
||||
try {
|
||||
Class.forName("android.os.AsyncTask");
|
||||
|
@ -1012,12 +1007,12 @@ public abstract class GeckoApp extends GeckoActivity
|
|||
|
||||
} else {
|
||||
final String action = intent.getAction();
|
||||
final String args = GeckoApplication.addDefaultGeckoArgs(
|
||||
intent.getStringExtra("args"));
|
||||
final String[] args = GeckoApplication.getDefaultGeckoArgs();
|
||||
final int flags = ACTION_DEBUG.equals(action) ? GeckoThread.FLAG_DEBUGGING : 0;
|
||||
|
||||
sAlreadyLoaded = true;
|
||||
GeckoThread.initMainProcess(/* profile */ null, args, flags);
|
||||
GeckoThread.initMainProcess(/* profile */ null, args,
|
||||
intent.getExtras(), flags);
|
||||
|
||||
// Speculatively pre-fetch the profile in the background.
|
||||
ThreadUtils.postToBackgroundThread(new Runnable() {
|
||||
|
|
|
@ -17,6 +17,7 @@ import android.os.Environment;
|
|||
import android.os.Process;
|
||||
import android.os.SystemClock;
|
||||
import android.provider.MediaStore;
|
||||
import android.support.annotation.Nullable;
|
||||
import android.support.design.widget.Snackbar;
|
||||
import android.text.TextUtils;
|
||||
import android.util.Base64;
|
||||
|
@ -102,7 +103,7 @@ public class GeckoApplication extends Application
|
|||
return sSessionUUID;
|
||||
}
|
||||
|
||||
public static String addDefaultGeckoArgs(String args) {
|
||||
public static @Nullable String[] getDefaultGeckoArgs() {
|
||||
if (!AppConstants.MOZILLA_OFFICIAL) {
|
||||
// In un-official builds, we want to load Javascript resources fresh
|
||||
// with each build. In official builds, the startup cache is purged by
|
||||
|
@ -110,9 +111,9 @@ public class GeckoApplication extends Application
|
|||
// buildid, so we purge here instead.
|
||||
Log.w(LOG_TAG, "STARTUP PERFORMANCE WARNING: un-official build: purging the " +
|
||||
"startup (JavaScript) caches.");
|
||||
args = (args != null) ? (args + " -purgecaches") : "-purgecaches";
|
||||
return new String[] { "-purgecaches" };
|
||||
}
|
||||
return args;
|
||||
return null;
|
||||
}
|
||||
|
||||
public static String getDefaultUAString() {
|
||||
|
|
|
@ -10,6 +10,7 @@ import android.app.Service;
|
|||
import android.app.PendingIntent;
|
||||
import android.content.Context;
|
||||
import android.content.Intent;
|
||||
import android.os.Bundle;
|
||||
import android.os.IBinder;
|
||||
import android.os.Handler;
|
||||
import android.os.Looper;
|
||||
|
@ -115,6 +116,11 @@ public class GeckoService extends Service {
|
|||
|
||||
private static Intent getIntentForAction(final Context context, final String action) {
|
||||
final Intent intent = new Intent(action, /* uri */ null, context, GeckoService.class);
|
||||
final Bundle extras = GeckoThread.getActiveExtras();
|
||||
if (extras != null && extras.size() > 0) {
|
||||
intent.replaceExtras(extras);
|
||||
}
|
||||
|
||||
final GeckoProfile profile = GeckoThread.getActiveProfile();
|
||||
if (profile != null) {
|
||||
setIntentProfile(intent, profile.getName(), profile.getDir().getAbsolutePath());
|
||||
|
@ -162,7 +168,7 @@ public class GeckoService extends Service {
|
|||
|
||||
if (!GeckoThread.initMainProcessWithProfile(
|
||||
profileName, profileDir != null ? new File(profileDir) : null,
|
||||
GeckoApplication.addDefaultGeckoArgs(null))) {
|
||||
GeckoApplication.getDefaultGeckoArgs(), intent.getExtras())) {
|
||||
Log.w(LOGTAG, "Ignoring due to profile mismatch: " +
|
||||
profileName + " [" + profileDir + ']');
|
||||
|
||||
|
|
|
@ -100,12 +100,9 @@ public class TestRunnerActivity extends Activity {
|
|||
protected void onCreate(Bundle savedInstanceState) {
|
||||
super.onCreate(savedInstanceState);
|
||||
|
||||
Intent intent = getIntent();
|
||||
GeckoLoader.setLastIntent(new SafeIntent(getIntent()));
|
||||
|
||||
final String intentArgs = intent.getStringExtra("args");
|
||||
final String args = intentArgs != null ? "-purgecaches " + intentArgs : "-purgecaches";
|
||||
GeckoSession.preload(this, args, false /* no multiprocess, see below */);
|
||||
final Intent intent = getIntent();
|
||||
GeckoSession.preload(this, new String[] { "-purgecaches" },
|
||||
intent.getExtras(), false /* no multiprocess, see below */);
|
||||
|
||||
// We can't use e10s because we get deadlocked when quickly creating and
|
||||
// destroying sessions. Bug 1348361.
|
||||
|
|
|
@ -6,9 +6,12 @@ package org.mozilla.gecko.process;
|
|||
|
||||
import org.mozilla.gecko.process.IProcessManager;
|
||||
|
||||
import android.os.Bundle;
|
||||
import android.os.ParcelFileDescriptor;
|
||||
|
||||
interface IChildProcess {
|
||||
int getPid();
|
||||
boolean start(in IProcessManager procMan, in String[] args, in ParcelFileDescriptor crashReporterPfd, in ParcelFileDescriptor ipcPfd, in ParcelFileDescriptor crashAnnotationPfd);
|
||||
boolean start(in IProcessManager procMan, in String[] args, in Bundle extras,
|
||||
in ParcelFileDescriptor ipcPfd, in ParcelFileDescriptor crashReporterPfd,
|
||||
in ParcelFileDescriptor crashAnnotationPfd);
|
||||
}
|
||||
|
|
|
@ -16,11 +16,13 @@ import org.mozilla.gecko.util.ThreadUtils;
|
|||
import android.content.Context;
|
||||
import android.content.res.Configuration;
|
||||
import android.content.res.Resources;
|
||||
import android.os.Bundle;
|
||||
import android.os.Handler;
|
||||
import android.os.Looper;
|
||||
import android.os.Message;
|
||||
import android.os.MessageQueue;
|
||||
import android.os.SystemClock;
|
||||
import android.support.annotation.Nullable;
|
||||
import android.text.TextUtils;
|
||||
import android.util.Log;
|
||||
|
||||
|
@ -28,6 +30,7 @@ import java.io.File;
|
|||
import java.io.FilenameFilter;
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Locale;
|
||||
import java.util.StringTokenizer;
|
||||
|
||||
|
@ -120,21 +123,20 @@ public class GeckoThread extends Thread {
|
|||
@WrapForJNI
|
||||
private static int uiThreadId;
|
||||
|
||||
private boolean mInitialized;
|
||||
private String[] mArgs;
|
||||
|
||||
// Main process parameters
|
||||
public static final int FLAG_DEBUGGING = 1; // Debugging mode.
|
||||
public static final int FLAG_PRELOAD_CHILD = 2; // Preload child during main thread start.
|
||||
|
||||
private GeckoProfile mProfile;
|
||||
private String mExtraArgs;
|
||||
private int mFlags;
|
||||
private static final String EXTRA_ARGS = "args";
|
||||
private static final String EXTRA_IPC_FD = "ipcFd";
|
||||
private static final String EXTRA_CRASH_FD = "crashFd";
|
||||
private static final String EXTRA_CRASH_ANNOTATION_FD = "crashAnnotationFd";
|
||||
|
||||
// Child process parameters
|
||||
private int mCrashFileDescriptor = -1;
|
||||
private int mIPCFileDescriptor = -1;
|
||||
private int mCrashAnnotationFileDescriptor = -1;
|
||||
private boolean mInitialized;
|
||||
private GeckoProfile mProfile;
|
||||
private String[] mArgs;
|
||||
private Bundle mExtras;
|
||||
private int mFlags;
|
||||
|
||||
GeckoThread() {
|
||||
setName("Gecko");
|
||||
|
@ -142,12 +144,12 @@ public class GeckoThread extends Thread {
|
|||
|
||||
@WrapForJNI
|
||||
private static boolean isChildProcess() {
|
||||
return INSTANCE.mIPCFileDescriptor != -1;
|
||||
return INSTANCE.mExtras.getInt(EXTRA_IPC_FD, -1) != -1;
|
||||
}
|
||||
|
||||
private synchronized boolean init(final GeckoProfile profile, final String[] args,
|
||||
final String extraArgs, final int flags,
|
||||
final int crashFd, final int ipcFd,
|
||||
final Bundle extras, final int flags,
|
||||
final int ipcFd, final int crashFd,
|
||||
final int crashAnnotationFd) {
|
||||
ThreadUtils.assertOnUiThread();
|
||||
uiThreadId = android.os.Process.myTid();
|
||||
|
@ -158,29 +160,29 @@ public class GeckoThread extends Thread {
|
|||
|
||||
mProfile = profile;
|
||||
mArgs = args;
|
||||
mExtraArgs = extraArgs;
|
||||
mFlags = flags;
|
||||
mCrashFileDescriptor = crashFd;
|
||||
mIPCFileDescriptor = ipcFd;
|
||||
mCrashAnnotationFileDescriptor = crashAnnotationFd;
|
||||
|
||||
mExtras = (extras != null) ? new Bundle(extras) : new Bundle(3);
|
||||
mExtras.putInt(EXTRA_IPC_FD, ipcFd);
|
||||
mExtras.putInt(EXTRA_CRASH_FD, crashFd);
|
||||
mExtras.putInt(EXTRA_CRASH_ANNOTATION_FD, crashAnnotationFd);
|
||||
|
||||
mInitialized = true;
|
||||
notifyAll();
|
||||
return true;
|
||||
}
|
||||
|
||||
public static boolean initMainProcess(final GeckoProfile profile, final String extraArgs,
|
||||
final int flags) {
|
||||
return INSTANCE.init(profile, /* args */ null, extraArgs, flags,
|
||||
/* crashFd */ -1, /* ipcFd */ -1,
|
||||
/* crashAnnotationFd */ -1);
|
||||
public static boolean initMainProcess(final GeckoProfile profile, final String[] args,
|
||||
final Bundle extras, final int flags) {
|
||||
return INSTANCE.init(profile, args, extras, flags,
|
||||
/* fd */ -1, /* fd */ -1, /* fd */ -1);
|
||||
}
|
||||
|
||||
public static boolean initChildProcess(final String[] args, final int crashFd,
|
||||
final int ipcFd,
|
||||
public static boolean initChildProcess(final String[] args, final Bundle extras,
|
||||
final int ipcFd, final int crashFd,
|
||||
final int crashAnnotationFd) {
|
||||
return INSTANCE.init(/* profile */ null, args, /* extraArgs */ null,
|
||||
/* flags */ 0, crashFd, ipcFd, crashAnnotationFd);
|
||||
return INSTANCE.init(/* profile */ null, args, extras, /* flags */ 0,
|
||||
ipcFd, crashFd, crashAnnotationFd);
|
||||
}
|
||||
|
||||
private static boolean canUseProfile(final Context context, final GeckoProfile profile,
|
||||
|
@ -215,7 +217,8 @@ public class GeckoThread extends Thread {
|
|||
|
||||
public static boolean initMainProcessWithProfile(final String profileName,
|
||||
final File profileDir,
|
||||
final String args) {
|
||||
final String[] args,
|
||||
final Bundle extras) {
|
||||
if (profileName == null) {
|
||||
throw new IllegalArgumentException("Null profile name");
|
||||
}
|
||||
|
@ -234,8 +237,8 @@ public class GeckoThread extends Thread {
|
|||
}
|
||||
|
||||
// We haven't initialized yet; okay to initialize now.
|
||||
return initMainProcess(GeckoProfile.get(context, profileName, profileDir),
|
||||
args, /* flags */ 0);
|
||||
return initMainProcess(GeckoProfile.get(context, profileName, profileDir), args,
|
||||
extras, /* flags */ 0);
|
||||
}
|
||||
|
||||
public static boolean launch() {
|
||||
|
@ -280,7 +283,6 @@ public class GeckoThread extends Thread {
|
|||
}
|
||||
|
||||
final String resourcePath = context.getPackageResourcePath();
|
||||
GeckoLoader.setupGeckoEnvironment(context, context.getFilesDir().getPath());
|
||||
|
||||
try {
|
||||
loadGeckoLibs(context, resourcePath);
|
||||
|
@ -323,8 +325,13 @@ public class GeckoThread extends Thread {
|
|||
args.add(profile.getName());
|
||||
}
|
||||
|
||||
if (mExtraArgs != null) {
|
||||
final StringTokenizer st = new StringTokenizer(mExtraArgs);
|
||||
if (mArgs != null) {
|
||||
args.addAll(Arrays.asList(mArgs));
|
||||
}
|
||||
|
||||
final String extraArgs = mExtras.getString(EXTRA_ARGS, null);
|
||||
if (extraArgs != null) {
|
||||
final StringTokenizer st = new StringTokenizer(extraArgs);
|
||||
while (st.hasMoreTokens()) {
|
||||
final String token = st.nextToken();
|
||||
if ("-P".equals(token) || "-profile".equals(token)) {
|
||||
|
@ -356,11 +363,20 @@ public class GeckoThread extends Thread {
|
|||
}
|
||||
if (mProfile == null) {
|
||||
final Context context = GeckoAppShell.getApplicationContext();
|
||||
mProfile = GeckoProfile.initFromArgs(context, mExtraArgs);
|
||||
mProfile = GeckoProfile.initFromArgs(context, mExtras.getString(EXTRA_ARGS, null));
|
||||
}
|
||||
return mProfile;
|
||||
}
|
||||
|
||||
public static @Nullable Bundle getActiveExtras() {
|
||||
synchronized (INSTANCE) {
|
||||
if (!INSTANCE.mInitialized) {
|
||||
return null;
|
||||
}
|
||||
return INSTANCE.mExtras;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void run() {
|
||||
Log.i(LOGTAG, "preparing to run Gecko");
|
||||
|
@ -406,8 +422,6 @@ public class GeckoThread extends Thread {
|
|||
}
|
||||
}
|
||||
|
||||
final String[] args = isChildProcess() ? mArgs : getMainProcessArgs();
|
||||
|
||||
if ((mFlags & FLAG_DEBUGGING) != 0) {
|
||||
try {
|
||||
Thread.sleep(5 * 1000 /* 5 seconds */);
|
||||
|
@ -417,12 +431,20 @@ public class GeckoThread extends Thread {
|
|||
|
||||
Log.w(LOGTAG, "zerdatime " + SystemClock.elapsedRealtime() + " - runGecko");
|
||||
|
||||
final Context context = GeckoAppShell.getApplicationContext();
|
||||
final String[] args = isChildProcess() ? mArgs : getMainProcessArgs();
|
||||
|
||||
if ((mFlags & FLAG_DEBUGGING) != 0) {
|
||||
Log.i(LOGTAG, "RunGecko - args = " + TextUtils.join(" ", args));
|
||||
}
|
||||
|
||||
GeckoLoader.setupGeckoEnvironment(context, context.getFilesDir().getPath(), mExtras);
|
||||
|
||||
// And go.
|
||||
GeckoLoader.nativeRun(args, mCrashFileDescriptor, mIPCFileDescriptor, mCrashAnnotationFileDescriptor);
|
||||
GeckoLoader.nativeRun(args,
|
||||
mExtras.getInt(EXTRA_IPC_FD, -1),
|
||||
mExtras.getInt(EXTRA_CRASH_FD, -1),
|
||||
mExtras.getInt(EXTRA_CRASH_ANNOTATION_FD, -1));
|
||||
|
||||
// And... we're done.
|
||||
final boolean restarting = isState(State.RESTARTING);
|
||||
|
|
|
@ -15,6 +15,7 @@ import java.util.zip.ZipFile;
|
|||
import android.content.Context;
|
||||
import android.content.Intent;
|
||||
import android.os.Build;
|
||||
import android.os.Bundle;
|
||||
import android.os.Environment;
|
||||
import java.util.ArrayList;
|
||||
import android.util.Log;
|
||||
|
@ -26,7 +27,6 @@ import org.mozilla.geckoview.BuildConfig;
|
|||
public final class GeckoLoader {
|
||||
private static final String LOGTAG = "GeckoLoader";
|
||||
|
||||
private static volatile SafeIntent sIntent;
|
||||
private static File sCacheFile;
|
||||
private static File sGREDir;
|
||||
|
||||
|
@ -34,7 +34,6 @@ public final class GeckoLoader {
|
|||
private static boolean sSQLiteLibsLoaded;
|
||||
private static boolean sNSSLibsLoaded;
|
||||
private static boolean sMozGlueLoaded;
|
||||
private static String[] sEnvList;
|
||||
|
||||
private GeckoLoader() {
|
||||
// prevent instantiation
|
||||
|
@ -91,35 +90,19 @@ public final class GeckoLoader {
|
|||
return tmpDir;
|
||||
}
|
||||
|
||||
public static void setLastIntent(SafeIntent intent) {
|
||||
sIntent = intent;
|
||||
}
|
||||
|
||||
public static void addEnvironmentToIntent(Intent intent) {
|
||||
if (sEnvList != null) {
|
||||
for (int ix = 0; ix < sEnvList.length; ix++) {
|
||||
intent.putExtra("env" + ix, sEnvList[ix]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public static void setupGeckoEnvironment(Context context, String profilePath) {
|
||||
public synchronized static void setupGeckoEnvironment(final Context context,
|
||||
final String profilePath,
|
||||
final Bundle extras) {
|
||||
// if we have an intent (we're being launched by an activity)
|
||||
// read in any environmental variables from it here
|
||||
final SafeIntent intent = sIntent;
|
||||
if (intent != null) {
|
||||
final ArrayList<String> envList = new ArrayList<String>();
|
||||
String env = intent.getStringExtra("env0");
|
||||
if (extras != null) {
|
||||
String env = extras.getString("env0");
|
||||
Log.d(LOGTAG, "Gecko environment env0: " + env);
|
||||
for (int c = 1; env != null; c++) {
|
||||
envList.add(env);
|
||||
putenv(env);
|
||||
env = intent.getStringExtra("env" + c);
|
||||
env = extras.getString("env" + c);
|
||||
Log.d(LOGTAG, "env" + c + ": " + env);
|
||||
}
|
||||
if (envList.size() > 0) {
|
||||
sEnvList = envList.toArray(new String[envList.size()]);
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
|
@ -159,27 +142,17 @@ public final class GeckoLoader {
|
|||
Log.d(LOGTAG, "Unable to obtain user manager service on a device with SDK version " + Build.VERSION.SDK_INT);
|
||||
}
|
||||
}
|
||||
setupLocaleEnvironment();
|
||||
|
||||
// We don't need this any more.
|
||||
sIntent = null;
|
||||
putenv("LANG=" + Locale.getDefault().toString());
|
||||
|
||||
// env from extras could have reset out linker flags; set them again.
|
||||
loadLibsSetupLocked(context);
|
||||
}
|
||||
|
||||
private static void loadLibsSetupLocked(Context context) {
|
||||
// The package data lib directory isn't placed in ld.so's
|
||||
// search path, so we have to manually load libraries that
|
||||
// libxul will depend on. Not ideal.
|
||||
|
||||
File cacheFile = getCacheDir(context);
|
||||
putenv("GRE_HOME=" + getGREDir(context).getPath());
|
||||
|
||||
// setup the libs cache
|
||||
String linkerCache = System.getenv("MOZ_LINKER_CACHE");
|
||||
if (linkerCache == null) {
|
||||
linkerCache = cacheFile.getPath();
|
||||
putenv("MOZ_LINKER_CACHE=" + linkerCache);
|
||||
}
|
||||
|
||||
putenv("GRE_HOME=" + getGREDir(context).getPath());
|
||||
putenv("MOZ_LINKER_CACHE=" + getCacheDir(context).getPath());
|
||||
putenv("MOZ_LINKER_EXTRACT=1");
|
||||
}
|
||||
|
||||
|
@ -468,10 +441,6 @@ public final class GeckoLoader {
|
|||
loadGeckoLibsNative(apkName);
|
||||
}
|
||||
|
||||
private static void setupLocaleEnvironment() {
|
||||
putenv("LANG=" + Locale.getDefault().toString());
|
||||
}
|
||||
|
||||
@SuppressWarnings("serial")
|
||||
public static class AbortException extends Exception {
|
||||
public AbortException(String msg) {
|
||||
|
@ -494,7 +463,7 @@ public final class GeckoLoader {
|
|||
public static native boolean verifyCRCs(String apkName);
|
||||
|
||||
// These methods are implemented in mozglue/android/APKOpen.cpp
|
||||
public static native void nativeRun(String[] args, int crashFd, int ipcFd, int crashAnnotationFd);
|
||||
public static native void nativeRun(String[] args, int ipcFd, int crashFd, int crashAnnotationFd);
|
||||
private static native void loadGeckoLibsNative(String apkName);
|
||||
private static native void loadSQLiteLibsNative(String apkName);
|
||||
private static native void loadNSSLibsNative(String apkName);
|
||||
|
|
|
@ -10,6 +10,7 @@ package org.mozilla.gecko.mozglue;
|
|||
import android.content.Intent;
|
||||
import android.net.Uri;
|
||||
import android.os.Bundle;
|
||||
import android.support.annotation.Nullable;
|
||||
import android.util.Log;
|
||||
|
||||
import java.util.ArrayList;
|
||||
|
@ -41,6 +42,18 @@ public class SafeIntent {
|
|||
}
|
||||
}
|
||||
|
||||
public @Nullable Bundle getExtras() {
|
||||
try {
|
||||
return intent.getExtras();
|
||||
} catch (OutOfMemoryError e) {
|
||||
Log.w(LOGTAG, "Couldn't get intent extras: OOM. Malformed?");
|
||||
return null;
|
||||
} catch (RuntimeException e) {
|
||||
Log.w(LOGTAG, "Couldn't get intent extras.", e);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
public boolean getBooleanExtra(final String name, final boolean defaultValue) {
|
||||
try {
|
||||
return intent.getBooleanExtra(name, defaultValue);
|
||||
|
|
|
@ -5,15 +5,16 @@
|
|||
package org.mozilla.gecko.process;
|
||||
|
||||
import org.mozilla.gecko.GeckoAppShell;
|
||||
import org.mozilla.gecko.GeckoThread;
|
||||
import org.mozilla.gecko.IGeckoEditableParent;
|
||||
import org.mozilla.gecko.annotation.WrapForJNI;
|
||||
import org.mozilla.gecko.mozglue.GeckoLoader;
|
||||
import org.mozilla.gecko.util.ThreadUtils;
|
||||
|
||||
import android.content.ComponentName;
|
||||
import android.content.Context;
|
||||
import android.content.Intent;
|
||||
import android.content.ServiceConnection;
|
||||
import android.os.Bundle;
|
||||
import android.os.IBinder;
|
||||
import android.os.ParcelFileDescriptor;
|
||||
import android.os.Process;
|
||||
|
@ -74,7 +75,6 @@ public final class GeckoProcessManager extends IProcessManager.Stub {
|
|||
final Intent intent = new Intent();
|
||||
intent.setClassName(context,
|
||||
GeckoServiceChildProcess.class.getName() + '$' + mType);
|
||||
GeckoLoader.addEnvironmentToIntent(intent);
|
||||
|
||||
if (context.bindService(intent, this, Context.BIND_AUTO_CREATE)) {
|
||||
waitForChildLocked();
|
||||
|
@ -168,13 +168,13 @@ public final class GeckoProcessManager extends IProcessManager.Stub {
|
|||
|
||||
@WrapForJNI
|
||||
private static int start(final String type, final String[] args,
|
||||
final int crashFd, final int ipcFd,
|
||||
final int ipcFd, final int crashFd,
|
||||
final int crashAnnotationFd) {
|
||||
return INSTANCE.start(type, args, crashFd, ipcFd, crashAnnotationFd, /* retry */ false);
|
||||
return INSTANCE.start(type, args, ipcFd, crashFd, crashAnnotationFd, /* retry */ false);
|
||||
}
|
||||
|
||||
private int start(final String type, final String[] args, final int crashFd,
|
||||
final int ipcFd, final int crashAnnotationFd,
|
||||
private int start(final String type, final String[] args, final int ipcFd,
|
||||
final int crashFd, final int crashAnnotationFd,
|
||||
final boolean retry) {
|
||||
final ChildConnection connection = getConnection(type);
|
||||
final IChildProcess child = connection.bind();
|
||||
|
@ -182,12 +182,13 @@ public final class GeckoProcessManager extends IProcessManager.Stub {
|
|||
return 0;
|
||||
}
|
||||
|
||||
final ParcelFileDescriptor crashPfd;
|
||||
final Bundle extras = GeckoThread.getActiveExtras();
|
||||
final ParcelFileDescriptor ipcPfd;
|
||||
final ParcelFileDescriptor crashPfd;
|
||||
final ParcelFileDescriptor crashAnnotationPfd;
|
||||
try {
|
||||
crashPfd = (crashFd >= 0) ? ParcelFileDescriptor.fromFd(crashFd) : null;
|
||||
ipcPfd = ParcelFileDescriptor.fromFd(ipcFd);
|
||||
crashPfd = (crashFd >= 0) ? ParcelFileDescriptor.fromFd(crashFd) : null;
|
||||
crashAnnotationPfd = (crashAnnotationFd >= 0) ? ParcelFileDescriptor.fromFd(crashAnnotationFd) : null;
|
||||
} catch (final IOException e) {
|
||||
Log.e(LOGTAG, "Cannot create fd for " + type, e);
|
||||
|
@ -196,7 +197,7 @@ public final class GeckoProcessManager extends IProcessManager.Stub {
|
|||
|
||||
boolean started = false;
|
||||
try {
|
||||
started = child.start(this, args, crashPfd, ipcPfd, crashAnnotationPfd);
|
||||
started = child.start(this, args, extras, ipcPfd, crashPfd, crashAnnotationPfd);
|
||||
} catch (final RemoteException e) {
|
||||
}
|
||||
|
||||
|
@ -207,10 +208,13 @@ public final class GeckoProcessManager extends IProcessManager.Stub {
|
|||
}
|
||||
Log.w(LOGTAG, "Attempting to kill running child " + type);
|
||||
connection.unbind();
|
||||
return start(type, args, crashFd, ipcFd, crashAnnotationFd, /* retry */ true);
|
||||
return start(type, args, ipcFd, crashFd, crashAnnotationFd, /* retry */ true);
|
||||
}
|
||||
|
||||
try {
|
||||
if (crashAnnotationPfd != null) {
|
||||
crashAnnotationPfd.close();
|
||||
}
|
||||
if (crashPfd != null) {
|
||||
crashPfd.close();
|
||||
}
|
||||
|
|
|
@ -16,6 +16,7 @@ import org.mozilla.gecko.util.ThreadUtils;
|
|||
import android.app.Service;
|
||||
import android.content.Intent;
|
||||
import android.os.Binder;
|
||||
import android.os.Bundle;
|
||||
import android.os.IBinder;
|
||||
import android.os.ParcelFileDescriptor;
|
||||
import android.os.Process;
|
||||
|
@ -61,8 +62,9 @@ public class GeckoServiceChildProcess extends Service {
|
|||
@Override
|
||||
public boolean start(final IProcessManager procMan,
|
||||
final String[] args,
|
||||
final ParcelFileDescriptor crashReporterPfd,
|
||||
final Bundle extras,
|
||||
final ParcelFileDescriptor ipcPfd,
|
||||
final ParcelFileDescriptor crashReporterPfd,
|
||||
final ParcelFileDescriptor crashAnnotationPfd) {
|
||||
synchronized (GeckoServiceChildProcess.class) {
|
||||
if (sProcessManager != null) {
|
||||
|
@ -72,15 +74,17 @@ public class GeckoServiceChildProcess extends Service {
|
|||
sProcessManager = procMan;
|
||||
}
|
||||
|
||||
final int ipcFd = ipcPfd.detachFd();
|
||||
final int crashReporterFd = crashReporterPfd != null ?
|
||||
crashReporterPfd.detachFd() : -1;
|
||||
final int ipcFd = ipcPfd != null ? ipcPfd.detachFd() : -1;
|
||||
final int crashAnnotationFd = crashAnnotationPfd != null ? crashAnnotationPfd.detachFd() : -1;
|
||||
final int crashAnnotationFd = crashAnnotationPfd != null ?
|
||||
crashAnnotationPfd.detachFd() : -1;
|
||||
|
||||
ThreadUtils.postToUiThread(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
if (GeckoThread.initChildProcess(args, crashReporterFd, ipcFd, crashAnnotationFd)) {
|
||||
if (GeckoThread.initChildProcess(args, extras, ipcFd, crashReporterFd,
|
||||
crashAnnotationFd)) {
|
||||
GeckoThread.launch();
|
||||
}
|
||||
}
|
||||
|
@ -91,7 +95,6 @@ public class GeckoServiceChildProcess extends Service {
|
|||
|
||||
@Override
|
||||
public IBinder onBind(final Intent intent) {
|
||||
GeckoLoader.setLastIntent(new SafeIntent(intent));
|
||||
GeckoThread.launch(); // Preload Gecko.
|
||||
return mBinder;
|
||||
}
|
||||
|
|
|
@ -31,11 +31,13 @@ import android.content.res.Resources;
|
|||
import android.database.Cursor;
|
||||
import android.net.Uri;
|
||||
import android.os.Binder;
|
||||
import android.os.Bundle;
|
||||
import android.os.IBinder;
|
||||
import android.os.IInterface;
|
||||
import android.os.Parcel;
|
||||
import android.os.Parcelable;
|
||||
import android.os.SystemClock;
|
||||
import android.support.annotation.Nullable;
|
||||
import android.support.annotation.NonNull;
|
||||
import android.util.Log;
|
||||
|
||||
|
@ -551,8 +553,9 @@ public class GeckoSession extends LayerSession
|
|||
*
|
||||
* @param context Activity or Application Context for starting GeckoSession.
|
||||
*/
|
||||
public static void preload(final Context context) {
|
||||
preload(context, /* geckoArgs */ null, /* multiprocess */ false);
|
||||
public static void preload(final @NonNull Context context) {
|
||||
preload(context, /* geckoArgs */ null,
|
||||
/* extras */ null, /* multiprocess */ false);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -563,15 +566,21 @@ public class GeckoSession extends LayerSession
|
|||
* @param geckoArgs Arguments to be passed to Gecko, if Gecko is not already running.
|
||||
* @param multiprocess True if child process in multiprocess mode should be preloaded.
|
||||
*/
|
||||
public static void preload(final Context context, final String geckoArgs,
|
||||
public static void preload(final @NonNull Context context,
|
||||
final @Nullable String[] geckoArgs,
|
||||
final @Nullable Bundle extras,
|
||||
final boolean multiprocess) {
|
||||
final Context appContext = context.getApplicationContext();
|
||||
if (GeckoAppShell.getApplicationContext() == null) {
|
||||
if (!appContext.equals(GeckoAppShell.getApplicationContext())) {
|
||||
GeckoAppShell.setApplicationContext(appContext);
|
||||
}
|
||||
|
||||
if (GeckoThread.isLaunched()) {
|
||||
return;
|
||||
}
|
||||
|
||||
final int flags = multiprocess ? GeckoThread.FLAG_PRELOAD_CHILD : 0;
|
||||
if (GeckoThread.initMainProcess(/* profile */ null, geckoArgs, flags)) {
|
||||
if (GeckoThread.initMainProcess(/* profile */ null, geckoArgs, extras, flags)) {
|
||||
GeckoThread.launch();
|
||||
}
|
||||
}
|
||||
|
@ -584,19 +593,23 @@ public class GeckoSession extends LayerSession
|
|||
return mNativeQueue.isReady();
|
||||
}
|
||||
|
||||
public void openWindow(final Context appContext) {
|
||||
public void openWindow(final @Nullable Context appContext) {
|
||||
ThreadUtils.assertOnUiThread();
|
||||
|
||||
if (isOpen()) {
|
||||
throw new IllegalStateException("Session is open");
|
||||
}
|
||||
|
||||
if (!GeckoThread.isLaunched()) {
|
||||
if (appContext != null) {
|
||||
final boolean multiprocess =
|
||||
mSettings.getBoolean(GeckoSessionSettings.USE_MULTIPROCESS);
|
||||
preload(appContext, /* geckoArgs */ null, multiprocess);
|
||||
preload(appContext, /* geckoArgs */ null, /* extras */ null, multiprocess);
|
||||
}
|
||||
|
||||
openWindow();
|
||||
}
|
||||
|
||||
private void openWindow() {
|
||||
final String chromeUri = mSettings.getString(GeckoSessionSettings.CHROME_URI);
|
||||
final int screenId = mSettings.getInt(GeckoSessionSettings.SCREEN_ID);
|
||||
final boolean isPrivate = mSettings.getBoolean(GeckoSessionSettings.USE_PRIVATE_MODE);
|
||||
|
|
|
@ -48,25 +48,18 @@ public class GeckoViewActivity extends Activity {
|
|||
Log.i(LOGTAG, "zerdatime " + SystemClock.elapsedRealtime() +
|
||||
" - application start");
|
||||
|
||||
String geckoArgs = null;
|
||||
final String intentArgs = getIntent().getStringExtra("args");
|
||||
final String[] geckoArgs;
|
||||
|
||||
if (BuildConfig.DEBUG) {
|
||||
// In debug builds, we want to load JavaScript resources fresh with each build.
|
||||
geckoArgs = "-purgecaches";
|
||||
}
|
||||
|
||||
if (!TextUtils.isEmpty(intentArgs)) {
|
||||
if (geckoArgs == null) {
|
||||
geckoArgs = intentArgs;
|
||||
} else {
|
||||
geckoArgs += " " + intentArgs;
|
||||
}
|
||||
geckoArgs = new String[] { "-purgecaches" };
|
||||
} else {
|
||||
geckoArgs = null;
|
||||
}
|
||||
|
||||
final boolean useMultiprocess = getIntent().getBooleanExtra(USE_MULTIPROCESS_EXTRA,
|
||||
true);
|
||||
GeckoSession.preload(this, geckoArgs, useMultiprocess);
|
||||
GeckoSession.preload(this, geckoArgs, getIntent().getExtras(), useMultiprocess);
|
||||
|
||||
setContentView(R.layout.geckoview_activity);
|
||||
|
||||
|
|
|
@ -392,7 +392,7 @@ FreeArgv(char** argv, int argc)
|
|||
}
|
||||
|
||||
extern "C" APKOPEN_EXPORT void MOZ_JNICALL
|
||||
Java_org_mozilla_gecko_mozglue_GeckoLoader_nativeRun(JNIEnv *jenv, jclass jc, jobjectArray jargs, int crashFd, int ipcFd, int crashAnnotationFd)
|
||||
Java_org_mozilla_gecko_mozglue_GeckoLoader_nativeRun(JNIEnv *jenv, jclass jc, jobjectArray jargs, int ipcFd, int crashFd, int crashAnnotationFd)
|
||||
{
|
||||
int argc = 0;
|
||||
char** argv = CreateArgvFromObjectArray(jenv, jargs, &argc);
|
||||
|
@ -407,7 +407,7 @@ Java_org_mozilla_gecko_mozglue_GeckoLoader_nativeRun(JNIEnv *jenv, jclass jc, jo
|
|||
gBootstrap->GeckoStart(jenv, argv, argc, sAppData);
|
||||
ElfLoader::Singleton.ExpectShutdown(true);
|
||||
} else {
|
||||
gBootstrap->XRE_SetAndroidChildFds(jenv, crashFd, ipcFd, crashAnnotationFd);
|
||||
gBootstrap->XRE_SetAndroidChildFds(jenv, ipcFd, crashFd, crashAnnotationFd);
|
||||
gBootstrap->XRE_SetProcessType(argv[argc - 1]);
|
||||
|
||||
XREChildData childData;
|
||||
|
|
|
@ -106,7 +106,6 @@ CREDENTIALS_CHANGED: "error.sync.reason.credentials_changed",
|
|||
ABORT_SYNC_COMMAND: "aborting sync, process commands said so",
|
||||
NO_SYNC_NODE_FOUND: "error.sync.reason.no_node_found",
|
||||
OVER_QUOTA: "error.sync.reason.over_quota",
|
||||
PROLONGED_SYNC_FAILURE: "error.sync.prolonged_failure",
|
||||
SERVER_MAINTENANCE: "error.sync.reason.serverMaintenance",
|
||||
|
||||
RESPONSE_OVER_QUOTA: "14",
|
||||
|
|
|
@ -57,7 +57,6 @@ SyncScheduler.prototype = {
|
|||
this.idleInterval = getThrottledIntervalPreference("scheduler.idleInterval");
|
||||
this.activeInterval = getThrottledIntervalPreference("scheduler.activeInterval");
|
||||
this.immediateInterval = getThrottledIntervalPreference("scheduler.immediateInterval");
|
||||
this.eolInterval = getThrottledIntervalPreference("scheduler.eolInterval");
|
||||
|
||||
// A user is non-idle on startup by default.
|
||||
this.idle = false;
|
||||
|
@ -406,12 +405,6 @@ SyncScheduler.prototype = {
|
|||
},
|
||||
|
||||
adjustSyncInterval: function adjustSyncInterval() {
|
||||
if (Status.eol) {
|
||||
this._log.debug("Server status is EOL; using eolInterval.");
|
||||
this.syncInterval = this.eolInterval;
|
||||
return;
|
||||
}
|
||||
|
||||
if (this.numClients <= 1) {
|
||||
this._log.trace("Adjusting syncInterval to singleDeviceInterval.");
|
||||
this.syncInterval = this.singleDeviceInterval;
|
||||
|
@ -661,21 +654,7 @@ function ErrorHandler(service) {
|
|||
this.init();
|
||||
}
|
||||
ErrorHandler.prototype = {
|
||||
MINIMUM_ALERT_INTERVAL_MSEC: 604800000, // One week.
|
||||
|
||||
/**
|
||||
* Flag that turns on error reporting for all errors, incl. network errors.
|
||||
*/
|
||||
dontIgnoreErrors: false,
|
||||
|
||||
/**
|
||||
* Flag that indicates if we have already reported a prolonged failure.
|
||||
* Once set, we don't report it again, meaning this error is only reported
|
||||
* one per run.
|
||||
*/
|
||||
didReportProlongedError: false,
|
||||
|
||||
init: function init() {
|
||||
init() {
|
||||
Svc.Obs.add("weave:engine:sync:applied", this);
|
||||
Svc.Obs.add("weave:engine:sync:error", this);
|
||||
Svc.Obs.add("weave:service:login:error", this);
|
||||
|
@ -701,7 +680,7 @@ ErrorHandler.prototype = {
|
|||
this._logManager = new LogManager(Svc.Prefs, logs, "sync");
|
||||
},
|
||||
|
||||
observe: function observe(subject, topic, data) {
|
||||
observe(subject, topic, data) {
|
||||
this._log.trace("Handling " + topic);
|
||||
switch (topic) {
|
||||
case "weave:engine:sync:applied":
|
||||
|
@ -732,14 +711,6 @@ ErrorHandler.prototype = {
|
|||
case "weave:service:login:error":
|
||||
this._log.error("Sync encountered a login error");
|
||||
this.resetFileLog();
|
||||
|
||||
if (this.shouldReportError()) {
|
||||
this.notifyOnNextTick("weave:ui:login:error");
|
||||
} else {
|
||||
this.notifyOnNextTick("weave:ui:clear-error");
|
||||
}
|
||||
|
||||
this.dontIgnoreErrors = false;
|
||||
break;
|
||||
case "weave:service:sync:error": {
|
||||
if (Status.sync == CREDENTIALS_CHANGED) {
|
||||
|
@ -758,14 +729,6 @@ ErrorHandler.prototype = {
|
|||
// Not a shutdown related exception...
|
||||
this._log.error("Sync encountered an error", exception);
|
||||
this.resetFileLog();
|
||||
|
||||
if (this.shouldReportError()) {
|
||||
this.notifyOnNextTick("weave:ui:sync:error");
|
||||
} else {
|
||||
this.notifyOnNextTick("weave:ui:sync:finish");
|
||||
}
|
||||
|
||||
this.dontIgnoreErrors = false;
|
||||
break;
|
||||
}
|
||||
case "weave:service:sync:finish":
|
||||
|
@ -784,18 +747,8 @@ ErrorHandler.prototype = {
|
|||
|
||||
if (Status.service == SYNC_FAILED_PARTIAL) {
|
||||
this._log.error("Some engines did not sync correctly.");
|
||||
this.resetFileLog();
|
||||
|
||||
if (this.shouldReportError()) {
|
||||
this.dontIgnoreErrors = false;
|
||||
this.notifyOnNextTick("weave:ui:sync:error");
|
||||
break;
|
||||
}
|
||||
} else {
|
||||
this.resetFileLog();
|
||||
}
|
||||
this.dontIgnoreErrors = false;
|
||||
this.notifyOnNextTick("weave:ui:sync:finish");
|
||||
this.resetFileLog();
|
||||
break;
|
||||
case "weave:service:start-over:finish":
|
||||
// ensure we capture any logs between the last sync and the reset completing.
|
||||
|
@ -804,27 +757,6 @@ ErrorHandler.prototype = {
|
|||
}
|
||||
},
|
||||
|
||||
notifyOnNextTick: function notifyOnNextTick(topic) {
|
||||
CommonUtils.nextTick(function() {
|
||||
this._log.trace("Notifying " + topic +
|
||||
". Status.login is " + Status.login +
|
||||
". Status.sync is " + Status.sync);
|
||||
Svc.Obs.notify(topic);
|
||||
}, this);
|
||||
},
|
||||
|
||||
/**
|
||||
* Trigger a sync and don't muffle any errors, particularly network errors.
|
||||
*/
|
||||
syncAndReportErrors: function syncAndReportErrors() {
|
||||
this._log.debug("Beginning user-triggered sync.");
|
||||
|
||||
this.dontIgnoreErrors = true;
|
||||
CommonUtils.nextTick(() => {
|
||||
this.service.sync({why: "user"});
|
||||
}, this);
|
||||
},
|
||||
|
||||
async _dumpAddons() {
|
||||
// Just dump the items that sync may be concerned with. Specifically,
|
||||
// active extensions that are not hidden.
|
||||
|
@ -846,170 +778,16 @@ ErrorHandler.prototype = {
|
|||
* Generate a log file for the sync that just completed
|
||||
* and refresh the input & output streams.
|
||||
*/
|
||||
resetFileLog: function resetFileLog() {
|
||||
let onComplete = logType => {
|
||||
Svc.Obs.notify("weave:service:reset-file-log");
|
||||
this._log.trace("Notified: " + Date.now());
|
||||
if (logType == this._logManager.ERROR_LOG_WRITTEN) {
|
||||
Cu.reportError("Sync encountered an error - see about:sync-log for the log file.");
|
||||
}
|
||||
};
|
||||
|
||||
async resetFileLog() {
|
||||
// If we're writing an error log, dump extensions that may be causing problems.
|
||||
let beforeResetLog;
|
||||
if (this._logManager.sawError) {
|
||||
beforeResetLog = this._dumpAddons();
|
||||
} else {
|
||||
beforeResetLog = Promise.resolve();
|
||||
await this._dumpAddons();
|
||||
}
|
||||
// Note we do not return the promise here - the caller doesn't need to wait
|
||||
// for this to complete.
|
||||
beforeResetLog
|
||||
.then(() => this._logManager.resetFileLog())
|
||||
.then(onComplete, onComplete);
|
||||
},
|
||||
|
||||
/**
|
||||
* Translates server error codes to meaningful strings.
|
||||
*
|
||||
* @param code
|
||||
* server error code as an integer
|
||||
*/
|
||||
errorStr: function errorStr(code) {
|
||||
switch (code.toString()) {
|
||||
case "1":
|
||||
return "illegal-method";
|
||||
case "2":
|
||||
return "invalid-captcha";
|
||||
case "3":
|
||||
return "invalid-username";
|
||||
case "4":
|
||||
return "cannot-overwrite-resource";
|
||||
case "5":
|
||||
return "userid-mismatch";
|
||||
case "6":
|
||||
return "json-parse-failure";
|
||||
case "7":
|
||||
return "invalid-password";
|
||||
case "8":
|
||||
return "invalid-record";
|
||||
case "9":
|
||||
return "weak-password";
|
||||
default:
|
||||
return "generic-server-error";
|
||||
}
|
||||
},
|
||||
|
||||
// A function to indicate if Sync errors should be "reported" - which in this
|
||||
// context really means "should be notify observers of an error" - but note
|
||||
// that since bug 1180587, no one is going to surface an error to the user.
|
||||
shouldReportError: function shouldReportError() {
|
||||
if (Status.login == MASTER_PASSWORD_LOCKED) {
|
||||
this._log.trace("shouldReportError: false (master password locked).");
|
||||
return false;
|
||||
}
|
||||
|
||||
if (this.dontIgnoreErrors) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (Status.login == LOGIN_FAILED_LOGIN_REJECTED) {
|
||||
// An explicit LOGIN_REJECTED state is always reported (bug 1081158)
|
||||
this._log.trace("shouldReportError: true (login was rejected)");
|
||||
return true;
|
||||
}
|
||||
|
||||
let lastSync = Svc.Prefs.get("lastSync");
|
||||
if (lastSync && ((Date.now() - Date.parse(lastSync)) >
|
||||
Svc.Prefs.get("errorhandler.networkFailureReportTimeout") * 1000)) {
|
||||
Status.sync = PROLONGED_SYNC_FAILURE;
|
||||
if (this.didReportProlongedError) {
|
||||
this._log.trace("shouldReportError: false (prolonged sync failure, but" +
|
||||
" we've already reported it).");
|
||||
return false;
|
||||
}
|
||||
this._log.trace("shouldReportError: true (first prolonged sync failure).");
|
||||
this.didReportProlongedError = true;
|
||||
return true;
|
||||
}
|
||||
|
||||
// We got a 401 mid-sync. Wait for the next sync before actually handling
|
||||
// an error. This assumes that we'll get a 401 again on a login fetch in
|
||||
// order to report the error.
|
||||
if (!this.service.clusterURL) {
|
||||
this._log.trace("shouldReportError: false (no cluster URL; " +
|
||||
"possible node reassignment).");
|
||||
return false;
|
||||
}
|
||||
|
||||
|
||||
let result = (![Status.login, Status.sync].includes(SERVER_MAINTENANCE) &&
|
||||
![Status.login, Status.sync].includes(LOGIN_FAILED_NETWORK_ERROR));
|
||||
this._log.trace("shouldReportError: ${result} due to login=${login}, sync=${sync}",
|
||||
{result, login: Status.login, sync: Status.sync});
|
||||
return result;
|
||||
},
|
||||
|
||||
get currentAlertMode() {
|
||||
return Svc.Prefs.get("errorhandler.alert.mode");
|
||||
},
|
||||
|
||||
set currentAlertMode(str) {
|
||||
return Svc.Prefs.set("errorhandler.alert.mode", str);
|
||||
},
|
||||
|
||||
get earliestNextAlert() {
|
||||
return Svc.Prefs.get("errorhandler.alert.earliestNext", 0) * 1000;
|
||||
},
|
||||
|
||||
set earliestNextAlert(msec) {
|
||||
return Svc.Prefs.set("errorhandler.alert.earliestNext", msec / 1000);
|
||||
},
|
||||
|
||||
clearServerAlerts() {
|
||||
// If we have any outstanding alerts, apparently they're no longer relevant.
|
||||
Svc.Prefs.resetBranch("errorhandler.alert");
|
||||
},
|
||||
|
||||
/**
|
||||
* X-Weave-Alert headers can include a JSON object:
|
||||
*
|
||||
* {
|
||||
* "code": // One of "hard-eol", "soft-eol".
|
||||
* "url": // For "Learn more" link.
|
||||
* "message": // Logged in Sync logs.
|
||||
* }
|
||||
*/
|
||||
handleServerAlert(xwa) {
|
||||
if (!xwa.code) {
|
||||
this._log.warn("Got structured X-Weave-Alert, but no alert code.");
|
||||
return;
|
||||
}
|
||||
|
||||
switch (xwa.code) {
|
||||
// Gently and occasionally notify the user that this service will be
|
||||
// shutting down.
|
||||
case "soft-eol":
|
||||
// Fall through.
|
||||
|
||||
// Tell the user that this service has shut down, and drop our syncing
|
||||
// frequency dramatically.
|
||||
case "hard-eol":
|
||||
// Note that both of these alerts should be subservient to future "sign
|
||||
// in with your Firefox Account" storage alerts.
|
||||
if ((this.currentAlertMode != xwa.code) ||
|
||||
(this.earliestNextAlert < Date.now())) {
|
||||
CommonUtils.nextTick(function() {
|
||||
Svc.Obs.notify("weave:eol", xwa);
|
||||
}, this);
|
||||
this._log.error("X-Weave-Alert: " + xwa.code + ": " + xwa.message);
|
||||
this.earliestNextAlert = Date.now() + this.MINIMUM_ALERT_INTERVAL_MSEC;
|
||||
this.currentAlertMode = xwa.code;
|
||||
}
|
||||
break;
|
||||
default:
|
||||
this._log.debug("Got unexpected X-Weave-Alert code: " + xwa.code);
|
||||
const logType = await this._logManager.resetFileLog();
|
||||
if (logType == this._logManager.ERROR_LOG_WRITTEN) {
|
||||
Cu.reportError("Sync encountered an error - see about:sync-log for the log file.");
|
||||
}
|
||||
Svc.Obs.notify("weave:service:reset-file-log");
|
||||
},
|
||||
|
||||
/**
|
||||
|
@ -1021,27 +799,6 @@ ErrorHandler.prototype = {
|
|||
checkServerError(resp) {
|
||||
// In this case we were passed a resolved value of Resource#_doRequest.
|
||||
switch (resp.status) {
|
||||
case 200:
|
||||
case 404:
|
||||
case 513:
|
||||
let xwa = resp.headers["x-weave-alert"];
|
||||
|
||||
// Only process machine-readable alerts.
|
||||
if (!xwa || !xwa.startsWith("{")) {
|
||||
this.clearServerAlerts();
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
xwa = JSON.parse(xwa);
|
||||
} catch (ex) {
|
||||
this._log.warn("Malformed X-Weave-Alert from server: " + xwa);
|
||||
return;
|
||||
}
|
||||
|
||||
this.handleServerAlert(xwa);
|
||||
break;
|
||||
|
||||
case 400:
|
||||
if (resp == RESPONSE_OVER_QUOTA) {
|
||||
Status.sync = OVER_QUOTA;
|
||||
|
|
|
@ -520,7 +520,7 @@ Sync11Service.prototype = {
|
|||
throw ex;
|
||||
}
|
||||
|
||||
// Always check for errors; this is also where we look for X-Weave-Alert.
|
||||
// Always check for errors.
|
||||
this.errorHandler.checkServerError(info);
|
||||
if (!info.success) {
|
||||
this._log.error("Aborting sync: failed to get collections.");
|
||||
|
|
|
@ -59,15 +59,6 @@ var Status = {
|
|||
this.service = code == SYNC_SUCCEEDED ? STATUS_OK : SYNC_FAILED;
|
||||
},
|
||||
|
||||
get eol() {
|
||||
let modePref = PREFS_BRANCH + "errorhandler.alert.mode";
|
||||
try {
|
||||
return Services.prefs.getCharPref(modePref) == "hard-eol";
|
||||
} catch (ex) {
|
||||
return false;
|
||||
}
|
||||
},
|
||||
|
||||
get engines() {
|
||||
return this._engines;
|
||||
},
|
||||
|
|
|
@ -7,7 +7,6 @@
|
|||
pref("services.sync.lastversion", "firstrun");
|
||||
pref("services.sync.sendVersionInfo", true);
|
||||
|
||||
pref("services.sync.scheduler.eolInterval", 604800); // 1 week
|
||||
pref("services.sync.scheduler.idleInterval", 3600); // 1 hour
|
||||
pref("services.sync.scheduler.activeInterval", 600); // 10 minutes
|
||||
pref("services.sync.scheduler.immediateInterval", 90); // 1.5 minutes
|
||||
|
@ -15,8 +14,6 @@ pref("services.sync.scheduler.idleTime", 300); // 5 minutes
|
|||
|
||||
pref("services.sync.scheduler.fxa.singleDeviceInterval", 3600); // 1 hour
|
||||
|
||||
pref("services.sync.errorhandler.networkFailureReportTimeout", 1209600); // 2 weeks
|
||||
|
||||
// Note that new engines are typically added with a default of disabled, so
|
||||
// when an existing sync user gets the Firefox upgrade that supports the engine
|
||||
// it starts as disabled until the user has explicitly opted in.
|
||||
|
|
|
@ -12,8 +12,9 @@ ChromeUtils.import("resource://services-sync/util.js");
|
|||
ChromeUtils.import("resource://gre/modules/FileUtils.jsm");
|
||||
ChromeUtils.import("resource://gre/modules/PromiseUtils.jsm");
|
||||
|
||||
var fakeServer = new SyncServer();
|
||||
const fakeServer = new SyncServer();
|
||||
fakeServer.start();
|
||||
const fakeServerUrl = "http://localhost:" + fakeServer.port;
|
||||
|
||||
registerCleanupFunction(function() {
|
||||
return promiseStopServer(fakeServer).finally(() => {
|
||||
|
@ -21,25 +22,7 @@ registerCleanupFunction(function() {
|
|||
});
|
||||
});
|
||||
|
||||
var fakeServerUrl = "http://localhost:" + fakeServer.port;
|
||||
|
||||
const logsdir = FileUtils.getDir("ProfD", ["weave", "logs"], true);
|
||||
|
||||
const PROLONGED_ERROR_DURATION =
|
||||
(Svc.Prefs.get("errorhandler.networkFailureReportTimeout") * 2) * 1000;
|
||||
|
||||
const NON_PROLONGED_ERROR_DURATION =
|
||||
(Svc.Prefs.get("errorhandler.networkFailureReportTimeout") / 2) * 1000;
|
||||
|
||||
function setLastSync(lastSyncValue) {
|
||||
Svc.Prefs.set("lastSync", (new Date(Date.now() - lastSyncValue)).toString());
|
||||
}
|
||||
|
||||
// This relies on Service/ErrorHandler being a singleton. Fixing this will take
|
||||
// a lot of work.
|
||||
let errorHandler = Service.errorHandler;
|
||||
let engine;
|
||||
|
||||
add_task(async function setup() {
|
||||
await Service.engineManager.clear();
|
||||
await Service.engineManager.register(EHTestsCommon.CatapultEngine);
|
||||
|
@ -52,7 +35,6 @@ async function clean() {
|
|||
await promiseLogReset;
|
||||
Status.resetSync();
|
||||
Status.resetBackoff();
|
||||
errorHandler.didReportProlongedError = false;
|
||||
// Move log levels back to trace (startOver will have reversed this), sicne
|
||||
syncTestLogging();
|
||||
}
|
||||
|
@ -130,259 +112,27 @@ add_task(async function test_credentials_changed_logout() {
|
|||
await promiseStopServer(server);
|
||||
});
|
||||
|
||||
add_task(function test_no_lastSync_pref() {
|
||||
syncTestLogging();
|
||||
// Test reported error.
|
||||
Status.resetSync();
|
||||
errorHandler.dontIgnoreErrors = true;
|
||||
Status.sync = CREDENTIALS_CHANGED;
|
||||
Assert.ok(errorHandler.shouldReportError());
|
||||
|
||||
// Test unreported error.
|
||||
Status.resetSync();
|
||||
errorHandler.dontIgnoreErrors = true;
|
||||
Status.login = LOGIN_FAILED_NETWORK_ERROR;
|
||||
Assert.ok(errorHandler.shouldReportError());
|
||||
|
||||
});
|
||||
|
||||
add_task(function test_shouldReportError() {
|
||||
Status.login = MASTER_PASSWORD_LOCKED;
|
||||
Assert.ok(!errorHandler.shouldReportError());
|
||||
|
||||
// Give ourselves a clusterURL so that the temporary 401 no-error situation
|
||||
// doesn't come into play.
|
||||
Service.clusterURL = fakeServerUrl;
|
||||
|
||||
// Test dontIgnoreErrors, non-network, non-prolonged, sync error reported
|
||||
Status.resetSync();
|
||||
setLastSync(NON_PROLONGED_ERROR_DURATION);
|
||||
errorHandler.dontIgnoreErrors = true;
|
||||
Status.sync = CREDENTIALS_CHANGED;
|
||||
Assert.ok(errorHandler.shouldReportError());
|
||||
|
||||
// Test dontIgnoreErrors, non-network, prolonged, sync error reported
|
||||
Status.resetSync();
|
||||
setLastSync(PROLONGED_ERROR_DURATION);
|
||||
errorHandler.dontIgnoreErrors = true;
|
||||
Status.sync = CREDENTIALS_CHANGED;
|
||||
Assert.ok(errorHandler.shouldReportError());
|
||||
|
||||
// Test dontIgnoreErrors, network, non-prolonged, login error reported
|
||||
Status.resetSync();
|
||||
setLastSync(NON_PROLONGED_ERROR_DURATION);
|
||||
errorHandler.dontIgnoreErrors = true;
|
||||
Status.login = LOGIN_FAILED_NETWORK_ERROR;
|
||||
Assert.ok(errorHandler.shouldReportError());
|
||||
|
||||
// Test dontIgnoreErrors, network, non-prolonged, sync error reported
|
||||
Status.resetSync();
|
||||
setLastSync(NON_PROLONGED_ERROR_DURATION);
|
||||
errorHandler.dontIgnoreErrors = true;
|
||||
Status.sync = LOGIN_FAILED_NETWORK_ERROR;
|
||||
Assert.ok(errorHandler.shouldReportError());
|
||||
|
||||
// Test dontIgnoreErrors, network, prolonged, login error reported
|
||||
Status.resetSync();
|
||||
setLastSync(PROLONGED_ERROR_DURATION);
|
||||
errorHandler.dontIgnoreErrors = true;
|
||||
Status.login = LOGIN_FAILED_NETWORK_ERROR;
|
||||
Assert.ok(errorHandler.shouldReportError());
|
||||
|
||||
// Test dontIgnoreErrors, network, prolonged, sync error reported
|
||||
Status.resetSync();
|
||||
setLastSync(PROLONGED_ERROR_DURATION);
|
||||
errorHandler.dontIgnoreErrors = true;
|
||||
Status.sync = LOGIN_FAILED_NETWORK_ERROR;
|
||||
Assert.ok(errorHandler.shouldReportError());
|
||||
|
||||
// Test non-network, prolonged, sync error reported
|
||||
Status.resetSync();
|
||||
setLastSync(PROLONGED_ERROR_DURATION);
|
||||
errorHandler.dontIgnoreErrors = false;
|
||||
errorHandler.didReportProlongedError = false;
|
||||
Status.sync = CREDENTIALS_CHANGED;
|
||||
Assert.ok(errorHandler.shouldReportError());
|
||||
Assert.ok(errorHandler.didReportProlongedError);
|
||||
errorHandler.didReportProlongedError = false;
|
||||
|
||||
// Test network, prolonged, login error reported
|
||||
Status.resetSync();
|
||||
setLastSync(PROLONGED_ERROR_DURATION);
|
||||
errorHandler.dontIgnoreErrors = false;
|
||||
Status.login = LOGIN_FAILED_NETWORK_ERROR;
|
||||
Assert.ok(errorHandler.shouldReportError());
|
||||
Assert.ok(errorHandler.didReportProlongedError);
|
||||
errorHandler.didReportProlongedError = false;
|
||||
|
||||
// Test network, prolonged, sync error reported
|
||||
Status.resetSync();
|
||||
setLastSync(PROLONGED_ERROR_DURATION);
|
||||
errorHandler.dontIgnoreErrors = false;
|
||||
Status.sync = LOGIN_FAILED_NETWORK_ERROR;
|
||||
Assert.ok(errorHandler.shouldReportError());
|
||||
Assert.ok(errorHandler.didReportProlongedError);
|
||||
errorHandler.didReportProlongedError = false;
|
||||
|
||||
// Test non-network, non-prolonged, sync error reported
|
||||
Status.resetSync();
|
||||
setLastSync(NON_PROLONGED_ERROR_DURATION);
|
||||
errorHandler.dontIgnoreErrors = false;
|
||||
Status.sync = CREDENTIALS_CHANGED;
|
||||
Assert.ok(errorHandler.shouldReportError());
|
||||
Assert.ok(!errorHandler.didReportProlongedError);
|
||||
|
||||
// Test network, non-prolonged, login error reported
|
||||
Status.resetSync();
|
||||
setLastSync(NON_PROLONGED_ERROR_DURATION);
|
||||
errorHandler.dontIgnoreErrors = false;
|
||||
Status.login = LOGIN_FAILED_NETWORK_ERROR;
|
||||
Assert.ok(!errorHandler.shouldReportError());
|
||||
Assert.ok(!errorHandler.didReportProlongedError);
|
||||
|
||||
// Test network, non-prolonged, sync error reported
|
||||
Status.resetSync();
|
||||
setLastSync(NON_PROLONGED_ERROR_DURATION);
|
||||
errorHandler.dontIgnoreErrors = false;
|
||||
Status.sync = LOGIN_FAILED_NETWORK_ERROR;
|
||||
Assert.ok(!errorHandler.shouldReportError());
|
||||
Assert.ok(!errorHandler.didReportProlongedError);
|
||||
|
||||
// Test server maintenance, sync errors are not reported
|
||||
Status.resetSync();
|
||||
setLastSync(NON_PROLONGED_ERROR_DURATION);
|
||||
errorHandler.dontIgnoreErrors = false;
|
||||
Status.sync = SERVER_MAINTENANCE;
|
||||
Assert.ok(!errorHandler.shouldReportError());
|
||||
Assert.ok(!errorHandler.didReportProlongedError);
|
||||
|
||||
// Test server maintenance, login errors are not reported
|
||||
Status.resetSync();
|
||||
setLastSync(NON_PROLONGED_ERROR_DURATION);
|
||||
errorHandler.dontIgnoreErrors = false;
|
||||
Status.login = SERVER_MAINTENANCE;
|
||||
Assert.ok(!errorHandler.shouldReportError());
|
||||
Assert.ok(!errorHandler.didReportProlongedError);
|
||||
|
||||
// Test prolonged, server maintenance, sync errors are reported
|
||||
Status.resetSync();
|
||||
setLastSync(PROLONGED_ERROR_DURATION);
|
||||
errorHandler.dontIgnoreErrors = false;
|
||||
Status.sync = SERVER_MAINTENANCE;
|
||||
Assert.ok(errorHandler.shouldReportError());
|
||||
Assert.ok(errorHandler.didReportProlongedError);
|
||||
errorHandler.didReportProlongedError = false;
|
||||
|
||||
// Test prolonged, server maintenance, login errors are reported
|
||||
Status.resetSync();
|
||||
setLastSync(PROLONGED_ERROR_DURATION);
|
||||
errorHandler.dontIgnoreErrors = false;
|
||||
Status.login = SERVER_MAINTENANCE;
|
||||
Assert.ok(errorHandler.shouldReportError());
|
||||
Assert.ok(errorHandler.didReportProlongedError);
|
||||
errorHandler.didReportProlongedError = false;
|
||||
|
||||
// Test dontIgnoreErrors, server maintenance, sync errors are reported
|
||||
Status.resetSync();
|
||||
setLastSync(NON_PROLONGED_ERROR_DURATION);
|
||||
errorHandler.dontIgnoreErrors = true;
|
||||
Status.sync = SERVER_MAINTENANCE;
|
||||
Assert.ok(errorHandler.shouldReportError());
|
||||
// dontIgnoreErrors means we don't set didReportProlongedError
|
||||
Assert.ok(!errorHandler.didReportProlongedError);
|
||||
|
||||
// Test dontIgnoreErrors, server maintenance, login errors are reported
|
||||
Status.resetSync();
|
||||
setLastSync(NON_PROLONGED_ERROR_DURATION);
|
||||
errorHandler.dontIgnoreErrors = true;
|
||||
Status.login = SERVER_MAINTENANCE;
|
||||
Assert.ok(errorHandler.shouldReportError());
|
||||
Assert.ok(!errorHandler.didReportProlongedError);
|
||||
|
||||
// Test dontIgnoreErrors, prolonged, server maintenance,
|
||||
// sync errors are reported
|
||||
Status.resetSync();
|
||||
setLastSync(PROLONGED_ERROR_DURATION);
|
||||
errorHandler.dontIgnoreErrors = true;
|
||||
Status.sync = SERVER_MAINTENANCE;
|
||||
Assert.ok(errorHandler.shouldReportError());
|
||||
Assert.ok(!errorHandler.didReportProlongedError);
|
||||
|
||||
// Test dontIgnoreErrors, prolonged, server maintenance,
|
||||
// login errors are reported
|
||||
Status.resetSync();
|
||||
setLastSync(PROLONGED_ERROR_DURATION);
|
||||
errorHandler.dontIgnoreErrors = true;
|
||||
Status.login = SERVER_MAINTENANCE;
|
||||
Assert.ok(errorHandler.shouldReportError());
|
||||
Assert.ok(!errorHandler.didReportProlongedError);
|
||||
});
|
||||
|
||||
add_task(async function test_shouldReportError_master_password() {
|
||||
_("Test error ignored due to locked master password");
|
||||
let server = await EHTestsCommon.sync_httpd_setup();
|
||||
await EHTestsCommon.setUp(server);
|
||||
|
||||
// Monkey patch Service.verifyLogin to imitate
|
||||
// master password being locked.
|
||||
Service._verifyLogin = Service.verifyLogin;
|
||||
Service.verifyLogin = async function() {
|
||||
Status.login = MASTER_PASSWORD_LOCKED;
|
||||
return false;
|
||||
};
|
||||
|
||||
setLastSync(NON_PROLONGED_ERROR_DURATION);
|
||||
await Service.sync();
|
||||
Assert.ok(!errorHandler.shouldReportError());
|
||||
|
||||
// Clean up.
|
||||
Service.verifyLogin = Service._verifyLogin;
|
||||
await clean();
|
||||
await promiseStopServer(server);
|
||||
});
|
||||
|
||||
// Test that even if we don't have a cluster URL, a login failure due to
|
||||
// authentication errors is always reported.
|
||||
add_task(function test_shouldReportLoginFailureWithNoCluster() {
|
||||
// Ensure no clusterURL - any error not specific to login should not be reported.
|
||||
Service.clusterURL = "";
|
||||
|
||||
// Test explicit "login rejected" state.
|
||||
Status.resetSync();
|
||||
// If we have a LOGIN_REJECTED state, we always report the error.
|
||||
Status.login = LOGIN_FAILED_LOGIN_REJECTED;
|
||||
Assert.ok(errorHandler.shouldReportError());
|
||||
// But any other status with a missing clusterURL is treated as a mid-sync
|
||||
// 401 (ie, should be treated as a node reassignment)
|
||||
Status.login = LOGIN_SUCCEEDED;
|
||||
Assert.ok(!errorHandler.shouldReportError());
|
||||
});
|
||||
|
||||
add_task(async function test_login_syncAndReportErrors_non_network_error() {
|
||||
add_task(async function test_login_non_network_error() {
|
||||
enableValidationPrefs();
|
||||
|
||||
// Test non-network errors are reported
|
||||
// when calling syncAndReportErrors
|
||||
// when calling sync
|
||||
let server = await EHTestsCommon.sync_httpd_setup();
|
||||
await EHTestsCommon.setUp(server);
|
||||
Service.identity._syncKeyBundle = null;
|
||||
|
||||
let promiseObserved = promiseOneObserver("weave:ui:login:error");
|
||||
|
||||
setLastSync(NON_PROLONGED_ERROR_DURATION);
|
||||
errorHandler.syncAndReportErrors();
|
||||
await promiseObserved;
|
||||
await Service.sync();
|
||||
Assert.equal(Status.login, LOGIN_FAILED_NO_PASSPHRASE);
|
||||
|
||||
await clean();
|
||||
await promiseStopServer(server);
|
||||
});
|
||||
|
||||
add_task(async function test_sync_syncAndReportErrors_non_network_error() {
|
||||
add_task(async function test_sync_non_network_error() {
|
||||
enableValidationPrefs();
|
||||
|
||||
// Test non-network errors are reported
|
||||
// when calling syncAndReportErrors
|
||||
// when calling sync
|
||||
let server = await EHTestsCommon.sync_httpd_setup();
|
||||
await EHTestsCommon.setUp(server);
|
||||
|
||||
|
@ -393,16 +143,12 @@ add_task(async function test_sync_syncAndReportErrors_non_network_error() {
|
|||
|
||||
await EHTestsCommon.generateCredentialsChangedFailure();
|
||||
|
||||
let promiseObserved = promiseOneObserver("weave:ui:sync:error");
|
||||
|
||||
setLastSync(NON_PROLONGED_ERROR_DURATION);
|
||||
let ping = await wait_for_ping(() => errorHandler.syncAndReportErrors(), true);
|
||||
let ping = await sync_and_validate_telem(true);
|
||||
equal(ping.status.sync, CREDENTIALS_CHANGED);
|
||||
deepEqual(ping.failureReason, {
|
||||
name: "unexpectederror",
|
||||
error: "Error: Aborting sync, remote setup failed"
|
||||
});
|
||||
await promiseObserved;
|
||||
|
||||
Assert.equal(Status.sync, CREDENTIALS_CHANGED);
|
||||
// If we clean this tick, telemetry won't get the right error
|
||||
|
@ -411,219 +157,33 @@ add_task(async function test_sync_syncAndReportErrors_non_network_error() {
|
|||
await promiseStopServer(server);
|
||||
});
|
||||
|
||||
add_task(async function test_login_syncAndReportErrors_prolonged_non_network_error() {
|
||||
add_task(async function test_login_sync_network_error() {
|
||||
enableValidationPrefs();
|
||||
|
||||
// Test prolonged, non-network errors are
|
||||
// reported when calling syncAndReportErrors.
|
||||
let server = await EHTestsCommon.sync_httpd_setup();
|
||||
await EHTestsCommon.setUp(server);
|
||||
Service.identity._syncKeyBundle = null;
|
||||
|
||||
let promiseObserved = promiseOneObserver("weave:ui:login:error");
|
||||
|
||||
setLastSync(PROLONGED_ERROR_DURATION);
|
||||
errorHandler.syncAndReportErrors();
|
||||
await promiseObserved;
|
||||
Assert.equal(Status.login, LOGIN_FAILED_NO_PASSPHRASE);
|
||||
|
||||
await clean();
|
||||
await promiseStopServer(server);
|
||||
});
|
||||
|
||||
add_task(async function test_sync_syncAndReportErrors_prolonged_non_network_error() {
|
||||
enableValidationPrefs();
|
||||
|
||||
// Test prolonged, non-network errors are
|
||||
// reported when calling syncAndReportErrors.
|
||||
let server = await EHTestsCommon.sync_httpd_setup();
|
||||
await EHTestsCommon.setUp(server);
|
||||
|
||||
// By calling sync, we ensure we're logged in.
|
||||
await Service.sync();
|
||||
Assert.equal(Status.sync, SYNC_SUCCEEDED);
|
||||
Assert.ok(Service.isLoggedIn);
|
||||
|
||||
await EHTestsCommon.generateCredentialsChangedFailure();
|
||||
|
||||
let promiseObserved = promiseOneObserver("weave:ui:sync:error");
|
||||
|
||||
setLastSync(PROLONGED_ERROR_DURATION);
|
||||
let ping = await wait_for_ping(() => errorHandler.syncAndReportErrors(), true);
|
||||
equal(ping.status.sync, CREDENTIALS_CHANGED);
|
||||
deepEqual(ping.failureReason, {
|
||||
name: "unexpectederror",
|
||||
error: "Error: Aborting sync, remote setup failed"
|
||||
});
|
||||
await promiseObserved;
|
||||
|
||||
Assert.equal(Status.sync, CREDENTIALS_CHANGED);
|
||||
// If we clean this tick, telemetry won't get the right error
|
||||
await Async.promiseYield();
|
||||
await clean();
|
||||
await promiseStopServer(server);
|
||||
});
|
||||
|
||||
add_task(async function test_login_syncAndReportErrors_network_error() {
|
||||
enableValidationPrefs();
|
||||
|
||||
// Test network errors are reported when calling syncAndReportErrors.
|
||||
// Test network errors are reported when calling sync.
|
||||
await configureIdentity({username: "broken.wipe"});
|
||||
Service.clusterURL = fakeServerUrl;
|
||||
|
||||
let promiseObserved = promiseOneObserver("weave:ui:login:error");
|
||||
|
||||
setLastSync(NON_PROLONGED_ERROR_DURATION);
|
||||
errorHandler.syncAndReportErrors();
|
||||
await promiseObserved;
|
||||
|
||||
await Service.sync();
|
||||
Assert.equal(Status.login, LOGIN_FAILED_NETWORK_ERROR);
|
||||
|
||||
await clean();
|
||||
});
|
||||
|
||||
|
||||
add_task(async function test_sync_syncAndReportErrors_network_error() {
|
||||
add_task(async function test_sync_network_error() {
|
||||
enableValidationPrefs();
|
||||
|
||||
// Test network errors are reported when calling syncAndReportErrors.
|
||||
// Test network errors are reported when calling sync.
|
||||
Services.io.offline = true;
|
||||
|
||||
let promiseUISyncError = promiseOneObserver("weave:ui:sync:error");
|
||||
|
||||
setLastSync(NON_PROLONGED_ERROR_DURATION);
|
||||
errorHandler.syncAndReportErrors();
|
||||
await promiseUISyncError;
|
||||
await Service.sync();
|
||||
Assert.equal(Status.sync, LOGIN_FAILED_NETWORK_ERROR);
|
||||
|
||||
Services.io.offline = false;
|
||||
await clean();
|
||||
});
|
||||
|
||||
add_task(async function test_login_syncAndReportErrors_prolonged_network_error() {
|
||||
enableValidationPrefs();
|
||||
|
||||
// Test prolonged, network errors are reported
|
||||
// when calling syncAndReportErrors.
|
||||
await configureIdentity({username: "johndoe"});
|
||||
|
||||
Service.clusterURL = fakeServerUrl;
|
||||
|
||||
let promiseObserved = promiseOneObserver("weave:ui:login:error");
|
||||
|
||||
setLastSync(PROLONGED_ERROR_DURATION);
|
||||
errorHandler.syncAndReportErrors();
|
||||
await promiseObserved;
|
||||
Assert.equal(Status.login, LOGIN_FAILED_NETWORK_ERROR);
|
||||
|
||||
await clean();
|
||||
});
|
||||
|
||||
add_task(async function test_sync_syncAndReportErrors_prolonged_network_error() {
|
||||
enableValidationPrefs();
|
||||
|
||||
// Test prolonged, network errors are reported
|
||||
// when calling syncAndReportErrors.
|
||||
Services.io.offline = true;
|
||||
|
||||
let promiseUISyncError = promiseOneObserver("weave:ui:sync:error");
|
||||
|
||||
setLastSync(PROLONGED_ERROR_DURATION);
|
||||
errorHandler.syncAndReportErrors();
|
||||
await promiseUISyncError;
|
||||
Assert.equal(Status.sync, LOGIN_FAILED_NETWORK_ERROR);
|
||||
|
||||
Services.io.offline = false;
|
||||
await clean();
|
||||
});
|
||||
|
||||
add_task(async function test_login_prolonged_non_network_error() {
|
||||
enableValidationPrefs();
|
||||
|
||||
// Test prolonged, non-network errors are reported
|
||||
let server = await EHTestsCommon.sync_httpd_setup();
|
||||
await EHTestsCommon.setUp(server);
|
||||
Service.identity._syncKeyBundle = null;
|
||||
|
||||
let promiseObserved = promiseOneObserver("weave:ui:login:error");
|
||||
|
||||
setLastSync(PROLONGED_ERROR_DURATION);
|
||||
await Service.sync();
|
||||
await promiseObserved;
|
||||
Assert.equal(Status.sync, PROLONGED_SYNC_FAILURE);
|
||||
Assert.ok(errorHandler.didReportProlongedError);
|
||||
|
||||
await clean();
|
||||
await promiseStopServer(server);
|
||||
});
|
||||
|
||||
add_task(async function test_sync_prolonged_non_network_error() {
|
||||
enableValidationPrefs();
|
||||
|
||||
// Test prolonged, non-network errors are reported
|
||||
let server = await EHTestsCommon.sync_httpd_setup();
|
||||
await EHTestsCommon.setUp(server);
|
||||
|
||||
// By calling sync, we ensure we're logged in.
|
||||
await Service.sync();
|
||||
Assert.equal(Status.sync, SYNC_SUCCEEDED);
|
||||
Assert.ok(Service.isLoggedIn);
|
||||
|
||||
await EHTestsCommon.generateCredentialsChangedFailure();
|
||||
|
||||
let promiseObserved = promiseOneObserver("weave:ui:sync:error");
|
||||
|
||||
setLastSync(PROLONGED_ERROR_DURATION);
|
||||
|
||||
let ping = await sync_and_validate_telem(true);
|
||||
equal(ping.status.sync, PROLONGED_SYNC_FAILURE);
|
||||
deepEqual(ping.failureReason, {
|
||||
name: "unexpectederror",
|
||||
error: "Error: Aborting sync, remote setup failed"
|
||||
});
|
||||
await promiseObserved;
|
||||
Assert.equal(Status.sync, PROLONGED_SYNC_FAILURE);
|
||||
Assert.ok(errorHandler.didReportProlongedError);
|
||||
await clean();
|
||||
await promiseStopServer(server);
|
||||
});
|
||||
|
||||
add_task(async function test_login_prolonged_network_error() {
|
||||
enableValidationPrefs();
|
||||
|
||||
// Test prolonged, network errors are reported
|
||||
await configureIdentity({username: "johndoe"});
|
||||
Service.clusterURL = fakeServerUrl;
|
||||
|
||||
let promiseObserved = promiseOneObserver("weave:ui:login:error");
|
||||
|
||||
setLastSync(PROLONGED_ERROR_DURATION);
|
||||
await Service.sync();
|
||||
await promiseObserved;
|
||||
Assert.equal(Status.sync, PROLONGED_SYNC_FAILURE);
|
||||
Assert.ok(errorHandler.didReportProlongedError);
|
||||
|
||||
await clean();
|
||||
});
|
||||
|
||||
add_task(async function test_sync_prolonged_network_error() {
|
||||
enableValidationPrefs();
|
||||
|
||||
// Test prolonged, network errors are reported
|
||||
Services.io.offline = true;
|
||||
|
||||
let promiseUISyncError = promiseOneObserver("weave:ui:sync:error");
|
||||
|
||||
setLastSync(PROLONGED_ERROR_DURATION);
|
||||
await Service.sync();
|
||||
await promiseUISyncError;
|
||||
Assert.equal(Status.sync, PROLONGED_SYNC_FAILURE);
|
||||
Assert.ok(errorHandler.didReportProlongedError);
|
||||
|
||||
Services.io.offline = false;
|
||||
await clean();
|
||||
});
|
||||
|
||||
add_task(async function test_login_non_network_error() {
|
||||
enableValidationPrefs();
|
||||
|
||||
|
@ -632,13 +192,8 @@ add_task(async function test_login_non_network_error() {
|
|||
await EHTestsCommon.setUp(server);
|
||||
Service.identity._syncKeyBundle = null;
|
||||
|
||||
let promiseObserved = promiseOneObserver("weave:ui:login:error");
|
||||
|
||||
setLastSync(NON_PROLONGED_ERROR_DURATION);
|
||||
await Service.sync();
|
||||
await promiseObserved;
|
||||
Assert.equal(Status.login, LOGIN_FAILED_NO_PASSPHRASE);
|
||||
Assert.ok(!errorHandler.didReportProlongedError);
|
||||
|
||||
await clean();
|
||||
await promiseStopServer(server);
|
||||
|
@ -658,13 +213,8 @@ add_task(async function test_sync_non_network_error() {
|
|||
|
||||
await EHTestsCommon.generateCredentialsChangedFailure();
|
||||
|
||||
let promiseObserved = promiseOneObserver("weave:ui:sync:error");
|
||||
|
||||
setLastSync(NON_PROLONGED_ERROR_DURATION);
|
||||
await Service.sync();
|
||||
await promiseObserved;
|
||||
Assert.equal(Status.sync, CREDENTIALS_CHANGED);
|
||||
Assert.ok(!errorHandler.didReportProlongedError);
|
||||
|
||||
await clean();
|
||||
await promiseStopServer(server);
|
||||
|
@ -676,14 +226,10 @@ add_task(async function test_login_network_error() {
|
|||
await configureIdentity({username: "johndoe"});
|
||||
Service.clusterURL = fakeServerUrl;
|
||||
|
||||
let promiseObserved = promiseOneObserver("weave:ui:clear-error");
|
||||
// Test network errors are not reported.
|
||||
|
||||
setLastSync(NON_PROLONGED_ERROR_DURATION);
|
||||
await Service.sync();
|
||||
await promiseObserved;
|
||||
Assert.equal(Status.login, LOGIN_FAILED_NETWORK_ERROR);
|
||||
Assert.ok(!errorHandler.didReportProlongedError);
|
||||
|
||||
Services.io.offline = false;
|
||||
await clean();
|
||||
|
@ -695,13 +241,8 @@ add_task(async function test_sync_network_error() {
|
|||
// Test network errors are not reported.
|
||||
Services.io.offline = true;
|
||||
|
||||
let promiseSyncFinished = promiseOneObserver("weave:ui:sync:finish");
|
||||
|
||||
setLastSync(NON_PROLONGED_ERROR_DURATION);
|
||||
await Service.sync();
|
||||
await promiseSyncFinished;
|
||||
Assert.equal(Status.sync, LOGIN_FAILED_NETWORK_ERROR);
|
||||
Assert.ok(!errorHandler.didReportProlongedError);
|
||||
|
||||
Services.io.offline = false;
|
||||
await clean();
|
||||
|
@ -719,24 +260,14 @@ add_task(async function test_sync_server_maintenance_error() {
|
|||
engine.exception = {status: 503,
|
||||
headers: {"retry-after": BACKOFF}};
|
||||
|
||||
function onSyncError() {
|
||||
do_throw("Shouldn't get here!");
|
||||
}
|
||||
Svc.Obs.add("weave:ui:sync:error", onSyncError);
|
||||
|
||||
Assert.equal(Status.service, STATUS_OK);
|
||||
|
||||
let promiseObserved = promiseOneObserver("weave:ui:sync:finish");
|
||||
|
||||
setLastSync(NON_PROLONGED_ERROR_DURATION);
|
||||
let ping = await sync_and_validate_telem(true);
|
||||
equal(ping.status.sync, SERVER_MAINTENANCE);
|
||||
deepEqual(ping.engines.find(e => e.failureReason).failureReason, { name: "httperror", code: 503 });
|
||||
|
||||
await promiseObserved;
|
||||
Assert.equal(Status.service, SYNC_FAILED_PARTIAL);
|
||||
Assert.equal(Status.sync, SERVER_MAINTENANCE);
|
||||
Assert.ok(!errorHandler.didReportProlongedError);
|
||||
|
||||
await clean();
|
||||
await promiseStopServer(server);
|
||||
|
@ -757,27 +288,16 @@ add_task(async function test_info_collections_login_server_maintenance_error() {
|
|||
backoffInterval = subject;
|
||||
});
|
||||
|
||||
function onUIUpdate() {
|
||||
do_throw("Shouldn't experience UI update!");
|
||||
}
|
||||
Svc.Obs.add("weave:ui:login:error", onUIUpdate);
|
||||
|
||||
Assert.ok(!Status.enforceBackoff);
|
||||
Assert.equal(Status.service, STATUS_OK);
|
||||
|
||||
let promiseObserved = promiseOneObserver("weave:ui:clear-error");
|
||||
|
||||
setLastSync(NON_PROLONGED_ERROR_DURATION);
|
||||
await Service.sync();
|
||||
await promiseObserved;
|
||||
|
||||
Assert.ok(Status.enforceBackoff);
|
||||
Assert.equal(backoffInterval, 42);
|
||||
Assert.equal(Status.service, LOGIN_FAILED);
|
||||
Assert.equal(Status.login, SERVER_MAINTENANCE);
|
||||
Assert.ok(!errorHandler.didReportProlongedError);
|
||||
|
||||
Svc.Obs.remove("weave:ui:login:error", onUIUpdate);
|
||||
await clean();
|
||||
await promiseStopServer(server);
|
||||
});
|
||||
|
@ -797,27 +317,16 @@ add_task(async function test_meta_global_login_server_maintenance_error() {
|
|||
backoffInterval = subject;
|
||||
});
|
||||
|
||||
function onUIUpdate() {
|
||||
do_throw("Shouldn't get here!");
|
||||
}
|
||||
Svc.Obs.add("weave:ui:login:error", onUIUpdate);
|
||||
|
||||
Assert.ok(!Status.enforceBackoff);
|
||||
Assert.equal(Status.service, STATUS_OK);
|
||||
|
||||
let promiseObserved = promiseOneObserver("weave:ui:clear-error");
|
||||
|
||||
setLastSync(NON_PROLONGED_ERROR_DURATION);
|
||||
await Service.sync();
|
||||
await promiseObserved;
|
||||
|
||||
Assert.ok(Status.enforceBackoff);
|
||||
Assert.equal(backoffInterval, 42);
|
||||
Assert.equal(Status.service, LOGIN_FAILED);
|
||||
Assert.equal(Status.login, SERVER_MAINTENANCE);
|
||||
Assert.ok(!errorHandler.didReportProlongedError);
|
||||
|
||||
Svc.Obs.remove("weave:ui:login:error", onUIUpdate);
|
||||
await clean();
|
||||
await promiseStopServer(server);
|
||||
});
|
||||
|
|
|
@ -12,15 +12,16 @@ ChromeUtils.import("resource://services-sync/util.js");
|
|||
ChromeUtils.import("resource://gre/modules/FileUtils.jsm");
|
||||
ChromeUtils.import("resource://gre/modules/PromiseUtils.jsm");
|
||||
|
||||
var fakeServer = new SyncServer();
|
||||
const fakeServer = new SyncServer();
|
||||
fakeServer.start();
|
||||
const fakeServerUrl = "http://localhost:" + fakeServer.port;
|
||||
|
||||
registerCleanupFunction(function() {
|
||||
return promiseStopServer(fakeServer);
|
||||
return promiseStopServer(fakeServer).finally(() => {
|
||||
Svc.Prefs.resetBranch("");
|
||||
});
|
||||
});
|
||||
|
||||
var fakeServerUrl = "http://localhost:" + fakeServer.port;
|
||||
|
||||
const logsdir = FileUtils.getDir("ProfD", ["weave", "logs"], true);
|
||||
|
||||
function removeLogFiles() {
|
||||
|
@ -40,38 +41,7 @@ function getLogFiles() {
|
|||
return result;
|
||||
}
|
||||
|
||||
const PROLONGED_ERROR_DURATION =
|
||||
(Svc.Prefs.get("errorhandler.networkFailureReportTimeout") * 2) * 1000;
|
||||
|
||||
const NON_PROLONGED_ERROR_DURATION =
|
||||
(Svc.Prefs.get("errorhandler.networkFailureReportTimeout") / 2) * 1000;
|
||||
|
||||
function setLastSync(lastSyncValue) {
|
||||
Svc.Prefs.set("lastSync", (new Date(Date.now() - lastSyncValue)).toString());
|
||||
}
|
||||
|
||||
// This relies on Service/ErrorHandler being a singleton. Fixing this will take
|
||||
// a lot of work.
|
||||
var errorHandler = Service.errorHandler;
|
||||
let engine;
|
||||
|
||||
async function syncAndWait(topic) {
|
||||
let promise1 = promiseOneObserver(topic);
|
||||
// also wait for the log file to be written
|
||||
let promise2 = promiseOneObserver("weave:service:reset-file-log");
|
||||
await Service.sync();
|
||||
await promise1;
|
||||
await promise2;
|
||||
}
|
||||
|
||||
async function syncAndReportErrorsAndWait(topic) {
|
||||
let promise1 = promiseOneObserver(topic);
|
||||
// also wait for the log file to be written
|
||||
let promise2 = promiseOneObserver("weave:service:reset-file-log");
|
||||
errorHandler.syncAndReportErrors();
|
||||
await promise1;
|
||||
await promise2;
|
||||
}
|
||||
add_task(async function setup() {
|
||||
await Service.engineManager.clear();
|
||||
await Service.engineManager.register(EHTestsCommon.CatapultEngine);
|
||||
|
@ -84,7 +54,6 @@ async function clean() {
|
|||
await promiseLogReset;
|
||||
Status.resetSync();
|
||||
Status.resetBackoff();
|
||||
errorHandler.didReportProlongedError = false;
|
||||
removeLogFiles();
|
||||
// Move log levels back to trace (startOver will have reversed this), sicne
|
||||
syncTestLogging();
|
||||
|
@ -109,24 +78,18 @@ add_task(async function test_crypto_keys_login_server_maintenance_error() {
|
|||
backoffInterval = subject;
|
||||
});
|
||||
|
||||
function onUIUpdate() {
|
||||
do_throw("Shouldn't get here!");
|
||||
}
|
||||
Svc.Obs.add("weave:ui:login:error", onUIUpdate);
|
||||
|
||||
Assert.ok(!Status.enforceBackoff);
|
||||
Assert.equal(Status.service, STATUS_OK);
|
||||
|
||||
setLastSync(NON_PROLONGED_ERROR_DURATION);
|
||||
await syncAndWait("weave:ui:clear-error");
|
||||
let promiseObserved = promiseOneObserver("weave:service:reset-file-log");
|
||||
await Service.sync();
|
||||
await promiseObserved;
|
||||
|
||||
Assert.ok(Status.enforceBackoff);
|
||||
Assert.equal(backoffInterval, 42);
|
||||
Assert.equal(Status.service, LOGIN_FAILED);
|
||||
Assert.equal(Status.login, SERVER_MAINTENANCE);
|
||||
Assert.ok(!errorHandler.didReportProlongedError);
|
||||
|
||||
Svc.Obs.remove("weave:ui:login:error", onUIUpdate);
|
||||
await clean();
|
||||
await promiseStopServer(server);
|
||||
});
|
||||
|
@ -141,7 +104,9 @@ add_task(async function test_lastSync_not_updated_on_complete_failure() {
|
|||
await configureIdentity({username: "johndoe"}, server);
|
||||
|
||||
// Do an initial sync that we expect to be successful.
|
||||
let promiseObserved = promiseOneObserver("weave:service:reset-file-log");
|
||||
await sync_and_validate_telem(false);
|
||||
await promiseObserved;
|
||||
|
||||
Assert.equal(Status.service, STATUS_OK);
|
||||
Assert.equal(Status.sync, SYNC_SUCCEEDED);
|
||||
|
@ -154,7 +119,9 @@ add_task(async function test_lastSync_not_updated_on_complete_failure() {
|
|||
server.registerPathHandler("/1.1/johndoe/info/collections",
|
||||
EHTestsCommon.service_unavailable);
|
||||
|
||||
promiseObserved = promiseOneObserver("weave:service:reset-file-log");
|
||||
await sync_and_validate_telem(true);
|
||||
await promiseObserved;
|
||||
|
||||
Assert.equal(Status.sync, SERVER_MAINTENANCE);
|
||||
Assert.equal(Status.service, SYNC_FAILED);
|
||||
|
@ -166,204 +133,6 @@ add_task(async function test_lastSync_not_updated_on_complete_failure() {
|
|||
await promiseStopServer(server);
|
||||
});
|
||||
|
||||
add_task(async function test_info_collections_login_prolonged_server_maintenance_error() {
|
||||
enableValidationPrefs();
|
||||
|
||||
// Test info/collections prolonged server maintenance errors are reported.
|
||||
let server = await EHTestsCommon.sync_httpd_setup();
|
||||
await EHTestsCommon.setUp(server);
|
||||
|
||||
await configureIdentity({username: "broken.info"}, server);
|
||||
|
||||
let backoffInterval;
|
||||
Svc.Obs.add("weave:service:backoff:interval", function observe(subject, data) {
|
||||
Svc.Obs.remove("weave:service:backoff:interval", observe);
|
||||
backoffInterval = subject;
|
||||
});
|
||||
|
||||
Assert.ok(!Status.enforceBackoff);
|
||||
Assert.equal(Status.service, STATUS_OK);
|
||||
|
||||
setLastSync(PROLONGED_ERROR_DURATION);
|
||||
await syncAndWait("weave:ui:login:error");
|
||||
|
||||
Assert.ok(Status.enforceBackoff);
|
||||
Assert.equal(backoffInterval, 42);
|
||||
Assert.equal(Status.service, SYNC_FAILED);
|
||||
Assert.equal(Status.sync, PROLONGED_SYNC_FAILURE);
|
||||
Assert.ok(errorHandler.didReportProlongedError);
|
||||
|
||||
await clean();
|
||||
await promiseStopServer(server);
|
||||
});
|
||||
|
||||
add_task(async function test_meta_global_login_prolonged_server_maintenance_error() {
|
||||
enableValidationPrefs();
|
||||
|
||||
// Test meta/global prolonged server maintenance errors are reported.
|
||||
let server = await EHTestsCommon.sync_httpd_setup();
|
||||
await EHTestsCommon.setUp(server);
|
||||
|
||||
await configureIdentity({username: "broken.meta"}, server);
|
||||
|
||||
let backoffInterval;
|
||||
Svc.Obs.add("weave:service:backoff:interval", function observe(subject, data) {
|
||||
Svc.Obs.remove("weave:service:backoff:interval", observe);
|
||||
backoffInterval = subject;
|
||||
});
|
||||
|
||||
Assert.ok(!Status.enforceBackoff);
|
||||
Assert.equal(Status.service, STATUS_OK);
|
||||
|
||||
setLastSync(PROLONGED_ERROR_DURATION);
|
||||
await syncAndWait("weave:ui:login:error");
|
||||
|
||||
Assert.ok(Status.enforceBackoff);
|
||||
Assert.equal(backoffInterval, 42);
|
||||
Assert.equal(Status.service, SYNC_FAILED);
|
||||
Assert.equal(Status.sync, PROLONGED_SYNC_FAILURE);
|
||||
Assert.ok(errorHandler.didReportProlongedError);
|
||||
|
||||
await clean();
|
||||
await promiseStopServer(server);
|
||||
});
|
||||
|
||||
add_task(async function test_download_crypto_keys_login_prolonged_server_maintenance_error() {
|
||||
enableValidationPrefs();
|
||||
|
||||
// Test crypto/keys prolonged server maintenance errors are reported.
|
||||
let server = await EHTestsCommon.sync_httpd_setup();
|
||||
await EHTestsCommon.setUp(server);
|
||||
|
||||
await configureIdentity({username: "broken.keys"}, server);
|
||||
// Force re-download of keys
|
||||
Service.collectionKeys.clear();
|
||||
|
||||
let backoffInterval;
|
||||
Svc.Obs.add("weave:service:backoff:interval", function observe(subject, data) {
|
||||
Svc.Obs.remove("weave:service:backoff:interval", observe);
|
||||
backoffInterval = subject;
|
||||
});
|
||||
|
||||
Assert.ok(!Status.enforceBackoff);
|
||||
Assert.equal(Status.service, STATUS_OK);
|
||||
|
||||
setLastSync(PROLONGED_ERROR_DURATION);
|
||||
await syncAndWait("weave:ui:login:error");
|
||||
Assert.ok(Status.enforceBackoff);
|
||||
Assert.equal(backoffInterval, 42);
|
||||
Assert.equal(Status.service, SYNC_FAILED);
|
||||
Assert.equal(Status.sync, PROLONGED_SYNC_FAILURE);
|
||||
Assert.ok(errorHandler.didReportProlongedError);
|
||||
|
||||
await clean();
|
||||
await promiseStopServer(server);
|
||||
});
|
||||
|
||||
add_task(async function test_upload_crypto_keys_login_prolonged_server_maintenance_error() {
|
||||
enableValidationPrefs();
|
||||
|
||||
// Test crypto/keys prolonged server maintenance errors are reported.
|
||||
let server = await EHTestsCommon.sync_httpd_setup();
|
||||
|
||||
// Start off with an empty account, do not upload a key.
|
||||
await configureIdentity({username: "broken.keys"}, server);
|
||||
|
||||
let backoffInterval;
|
||||
Svc.Obs.add("weave:service:backoff:interval", function observe(subject, data) {
|
||||
Svc.Obs.remove("weave:service:backoff:interval", observe);
|
||||
backoffInterval = subject;
|
||||
});
|
||||
|
||||
Assert.ok(!Status.enforceBackoff);
|
||||
Assert.equal(Status.service, STATUS_OK);
|
||||
|
||||
setLastSync(PROLONGED_ERROR_DURATION);
|
||||
await syncAndWait("weave:ui:login:error");
|
||||
|
||||
Assert.ok(Status.enforceBackoff);
|
||||
Assert.equal(backoffInterval, 42);
|
||||
Assert.equal(Status.service, SYNC_FAILED);
|
||||
Assert.equal(Status.sync, PROLONGED_SYNC_FAILURE);
|
||||
Assert.ok(errorHandler.didReportProlongedError);
|
||||
|
||||
await clean();
|
||||
await promiseStopServer(server);
|
||||
});
|
||||
|
||||
add_task(async function test_wipeServer_login_prolonged_server_maintenance_error() {
|
||||
enableValidationPrefs();
|
||||
|
||||
// Test that we report prolonged server maintenance errors that occur whilst
|
||||
// wiping the server.
|
||||
let server = await EHTestsCommon.sync_httpd_setup();
|
||||
|
||||
// Start off with an empty account, do not upload a key.
|
||||
await configureIdentity({username: "broken.wipe"}, server);
|
||||
|
||||
let backoffInterval;
|
||||
Svc.Obs.add("weave:service:backoff:interval", function observe(subject, data) {
|
||||
Svc.Obs.remove("weave:service:backoff:interval", observe);
|
||||
backoffInterval = subject;
|
||||
});
|
||||
|
||||
Assert.ok(!Status.enforceBackoff);
|
||||
Assert.equal(Status.service, STATUS_OK);
|
||||
|
||||
setLastSync(PROLONGED_ERROR_DURATION);
|
||||
await syncAndWait("weave:ui:login:error");
|
||||
|
||||
Assert.ok(Status.enforceBackoff);
|
||||
Assert.equal(backoffInterval, 42);
|
||||
Assert.equal(Status.service, SYNC_FAILED);
|
||||
Assert.equal(Status.sync, PROLONGED_SYNC_FAILURE);
|
||||
Assert.ok(errorHandler.didReportProlongedError);
|
||||
|
||||
await clean();
|
||||
await promiseStopServer(server);
|
||||
});
|
||||
|
||||
add_task(async function test_wipeRemote_prolonged_server_maintenance_error() {
|
||||
enableValidationPrefs();
|
||||
|
||||
// Test that we report prolonged server maintenance errors that occur whilst
|
||||
// wiping all remote devices.
|
||||
let server = await EHTestsCommon.sync_httpd_setup();
|
||||
|
||||
server.registerPathHandler("/1.1/broken.wipe/storage/catapult", EHTestsCommon.service_unavailable);
|
||||
await configureIdentity({username: "broken.wipe"}, server);
|
||||
await EHTestsCommon.generateAndUploadKeys();
|
||||
|
||||
engine.exception = null;
|
||||
engine.enabled = true;
|
||||
|
||||
let backoffInterval;
|
||||
Svc.Obs.add("weave:service:backoff:interval", function observe(subject, data) {
|
||||
Svc.Obs.remove("weave:service:backoff:interval", observe);
|
||||
backoffInterval = subject;
|
||||
});
|
||||
|
||||
let promiseObserved = promiseOneObserver("weave:ui:sync:error");
|
||||
|
||||
Assert.ok(!Status.enforceBackoff);
|
||||
Assert.equal(Status.service, STATUS_OK);
|
||||
|
||||
Svc.Prefs.set("firstSync", "wipeRemote");
|
||||
setLastSync(PROLONGED_ERROR_DURATION);
|
||||
let ping = await sync_and_validate_telem(true);
|
||||
deepEqual(ping.failureReason, { name: "httperror", code: 503 });
|
||||
await promiseObserved;
|
||||
|
||||
Assert.ok(Status.enforceBackoff);
|
||||
Assert.equal(backoffInterval, 42);
|
||||
Assert.equal(Status.service, SYNC_FAILED);
|
||||
Assert.equal(Status.sync, PROLONGED_SYNC_FAILURE);
|
||||
Assert.equal(Svc.Prefs.get("firstSync"), "wipeRemote");
|
||||
Assert.ok(errorHandler.didReportProlongedError);
|
||||
await promiseStopServer(server);
|
||||
await clean();
|
||||
});
|
||||
|
||||
add_task(async function test_sync_syncAndReportErrors_server_maintenance_error() {
|
||||
enableValidationPrefs();
|
||||
|
||||
|
@ -379,12 +148,12 @@ add_task(async function test_sync_syncAndReportErrors_server_maintenance_error()
|
|||
|
||||
Assert.equal(Status.service, STATUS_OK);
|
||||
|
||||
setLastSync(NON_PROLONGED_ERROR_DURATION);
|
||||
await syncAndReportErrorsAndWait("weave:ui:sync:error");
|
||||
let promiseObserved = promiseOneObserver("weave:service:reset-file-log");
|
||||
await Service.sync();
|
||||
await promiseObserved;
|
||||
|
||||
Assert.equal(Status.service, SYNC_FAILED_PARTIAL);
|
||||
Assert.equal(Status.sync, SERVER_MAINTENANCE);
|
||||
Assert.ok(!errorHandler.didReportProlongedError);
|
||||
|
||||
await clean();
|
||||
await promiseStopServer(server);
|
||||
|
@ -409,14 +178,14 @@ add_task(async function test_info_collections_login_syncAndReportErrors_server_m
|
|||
Assert.ok(!Status.enforceBackoff);
|
||||
Assert.equal(Status.service, STATUS_OK);
|
||||
|
||||
setLastSync(NON_PROLONGED_ERROR_DURATION);
|
||||
await syncAndReportErrorsAndWait("weave:ui:login:error");
|
||||
let promiseObserved = promiseOneObserver("weave:service:reset-file-log");
|
||||
await Service.sync();
|
||||
await promiseObserved;
|
||||
|
||||
Assert.ok(Status.enforceBackoff);
|
||||
Assert.equal(backoffInterval, 42);
|
||||
Assert.equal(Status.service, LOGIN_FAILED);
|
||||
Assert.equal(Status.login, SERVER_MAINTENANCE);
|
||||
Assert.ok(!errorHandler.didReportProlongedError);
|
||||
|
||||
await clean();
|
||||
await promiseStopServer(server);
|
||||
|
@ -441,14 +210,14 @@ add_task(async function test_meta_global_login_syncAndReportErrors_server_mainte
|
|||
Assert.ok(!Status.enforceBackoff);
|
||||
Assert.equal(Status.service, STATUS_OK);
|
||||
|
||||
setLastSync(NON_PROLONGED_ERROR_DURATION);
|
||||
await syncAndReportErrorsAndWait("weave:ui:login:error");
|
||||
let promiseObserved = promiseOneObserver("weave:service:reset-file-log");
|
||||
await Service.sync();
|
||||
await promiseObserved;
|
||||
|
||||
Assert.ok(Status.enforceBackoff);
|
||||
Assert.equal(backoffInterval, 42);
|
||||
Assert.equal(Status.service, LOGIN_FAILED);
|
||||
Assert.equal(Status.login, SERVER_MAINTENANCE);
|
||||
Assert.ok(!errorHandler.didReportProlongedError);
|
||||
|
||||
await clean();
|
||||
await promiseStopServer(server);
|
||||
|
@ -475,14 +244,14 @@ add_task(async function test_download_crypto_keys_login_syncAndReportErrors_serv
|
|||
Assert.ok(!Status.enforceBackoff);
|
||||
Assert.equal(Status.service, STATUS_OK);
|
||||
|
||||
setLastSync(NON_PROLONGED_ERROR_DURATION);
|
||||
await syncAndReportErrorsAndWait("weave:ui:login:error");
|
||||
let promiseObserved = promiseOneObserver("weave:service:reset-file-log");
|
||||
await Service.sync();
|
||||
await promiseObserved;
|
||||
|
||||
Assert.ok(Status.enforceBackoff);
|
||||
Assert.equal(backoffInterval, 42);
|
||||
Assert.equal(Status.service, LOGIN_FAILED);
|
||||
Assert.equal(Status.login, SERVER_MAINTENANCE);
|
||||
Assert.ok(!errorHandler.didReportProlongedError);
|
||||
|
||||
await clean();
|
||||
await promiseStopServer(server);
|
||||
|
@ -507,14 +276,14 @@ add_task(async function test_upload_crypto_keys_login_syncAndReportErrors_server
|
|||
Assert.ok(!Status.enforceBackoff);
|
||||
Assert.equal(Status.service, STATUS_OK);
|
||||
|
||||
setLastSync(NON_PROLONGED_ERROR_DURATION);
|
||||
await syncAndReportErrorsAndWait("weave:ui:login:error");
|
||||
let promiseObserved = promiseOneObserver("weave:service:reset-file-log");
|
||||
await Service.sync();
|
||||
await promiseObserved;
|
||||
|
||||
Assert.ok(Status.enforceBackoff);
|
||||
Assert.equal(backoffInterval, 42);
|
||||
Assert.equal(Status.service, LOGIN_FAILED);
|
||||
Assert.equal(Status.login, SERVER_MAINTENANCE);
|
||||
Assert.ok(!errorHandler.didReportProlongedError);
|
||||
|
||||
await clean();
|
||||
await promiseStopServer(server);
|
||||
|
@ -539,14 +308,14 @@ add_task(async function test_wipeServer_login_syncAndReportErrors_server_mainten
|
|||
Assert.ok(!Status.enforceBackoff);
|
||||
Assert.equal(Status.service, STATUS_OK);
|
||||
|
||||
setLastSync(NON_PROLONGED_ERROR_DURATION);
|
||||
await syncAndReportErrorsAndWait("weave:ui:login:error");
|
||||
let promiseObserved = promiseOneObserver("weave:service:reset-file-log");
|
||||
await Service.sync();
|
||||
await promiseObserved;
|
||||
|
||||
Assert.ok(Status.enforceBackoff);
|
||||
Assert.equal(backoffInterval, 42);
|
||||
Assert.equal(Status.service, LOGIN_FAILED);
|
||||
Assert.equal(Status.login, SERVER_MAINTENANCE);
|
||||
Assert.ok(!errorHandler.didReportProlongedError);
|
||||
|
||||
await clean();
|
||||
await promiseStopServer(server);
|
||||
|
@ -575,215 +344,16 @@ add_task(async function test_wipeRemote_syncAndReportErrors_server_maintenance_e
|
|||
Assert.equal(Status.service, STATUS_OK);
|
||||
|
||||
Svc.Prefs.set("firstSync", "wipeRemote");
|
||||
setLastSync(NON_PROLONGED_ERROR_DURATION);
|
||||
await syncAndReportErrorsAndWait("weave:ui:sync:error");
|
||||
|
||||
let promiseObserved = promiseOneObserver("weave:service:reset-file-log");
|
||||
await Service.sync();
|
||||
await promiseObserved;
|
||||
|
||||
Assert.ok(Status.enforceBackoff);
|
||||
Assert.equal(backoffInterval, 42);
|
||||
Assert.equal(Status.service, SYNC_FAILED);
|
||||
Assert.equal(Status.sync, SERVER_MAINTENANCE);
|
||||
Assert.equal(Svc.Prefs.get("firstSync"), "wipeRemote");
|
||||
Assert.ok(!errorHandler.didReportProlongedError);
|
||||
|
||||
await clean();
|
||||
await promiseStopServer(server);
|
||||
});
|
||||
|
||||
add_task(async function test_sync_syncAndReportErrors_prolonged_server_maintenance_error() {
|
||||
enableValidationPrefs();
|
||||
|
||||
// Test prolonged server maintenance errors are
|
||||
// reported when calling syncAndReportErrors.
|
||||
let server = await EHTestsCommon.sync_httpd_setup();
|
||||
await EHTestsCommon.setUp(server);
|
||||
|
||||
const BACKOFF = 42;
|
||||
engine.enabled = true;
|
||||
engine.exception = {status: 503,
|
||||
headers: {"retry-after": BACKOFF}};
|
||||
|
||||
Assert.equal(Status.service, STATUS_OK);
|
||||
|
||||
setLastSync(PROLONGED_ERROR_DURATION);
|
||||
await syncAndReportErrorsAndWait("weave:ui:sync:error");
|
||||
|
||||
Assert.equal(Status.service, SYNC_FAILED_PARTIAL);
|
||||
Assert.equal(Status.sync, SERVER_MAINTENANCE);
|
||||
// syncAndReportErrors means dontIgnoreErrors, which means
|
||||
// didReportProlongedError not touched.
|
||||
Assert.ok(!errorHandler.didReportProlongedError);
|
||||
|
||||
await clean();
|
||||
await promiseStopServer(server);
|
||||
});
|
||||
|
||||
add_task(async function test_info_collections_login_syncAndReportErrors_prolonged_server_maintenance_error() {
|
||||
enableValidationPrefs();
|
||||
|
||||
// Test info/collections server maintenance errors are reported
|
||||
// when calling syncAndReportErrors.
|
||||
let server = await EHTestsCommon.sync_httpd_setup();
|
||||
await EHTestsCommon.setUp(server);
|
||||
|
||||
await configureIdentity({username: "broken.info"}, server);
|
||||
|
||||
let backoffInterval;
|
||||
Svc.Obs.add("weave:service:backoff:interval", function observe(subject, data) {
|
||||
Svc.Obs.remove("weave:service:backoff:interval", observe);
|
||||
backoffInterval = subject;
|
||||
});
|
||||
|
||||
Assert.ok(!Status.enforceBackoff);
|
||||
Assert.equal(Status.service, STATUS_OK);
|
||||
|
||||
setLastSync(PROLONGED_ERROR_DURATION);
|
||||
await syncAndReportErrorsAndWait("weave:ui:login:error");
|
||||
|
||||
Assert.ok(Status.enforceBackoff);
|
||||
Assert.equal(backoffInterval, 42);
|
||||
Assert.equal(Status.service, LOGIN_FAILED);
|
||||
Assert.equal(Status.login, SERVER_MAINTENANCE);
|
||||
// syncAndReportErrors means dontIgnoreErrors, which means
|
||||
// didReportProlongedError not touched.
|
||||
Assert.ok(!errorHandler.didReportProlongedError);
|
||||
|
||||
await clean();
|
||||
await promiseStopServer(server);
|
||||
});
|
||||
|
||||
add_task(async function test_meta_global_login_syncAndReportErrors_prolonged_server_maintenance_error() {
|
||||
enableValidationPrefs();
|
||||
|
||||
// Test meta/global server maintenance errors are reported
|
||||
// when calling syncAndReportErrors.
|
||||
let server = await EHTestsCommon.sync_httpd_setup();
|
||||
await EHTestsCommon.setUp(server);
|
||||
|
||||
await configureIdentity({username: "broken.meta"}, server);
|
||||
|
||||
let backoffInterval;
|
||||
Svc.Obs.add("weave:service:backoff:interval", function observe(subject, data) {
|
||||
Svc.Obs.remove("weave:service:backoff:interval", observe);
|
||||
backoffInterval = subject;
|
||||
});
|
||||
|
||||
Assert.ok(!Status.enforceBackoff);
|
||||
Assert.equal(Status.service, STATUS_OK);
|
||||
|
||||
setLastSync(PROLONGED_ERROR_DURATION);
|
||||
await syncAndReportErrorsAndWait("weave:ui:login:error");
|
||||
|
||||
Assert.ok(Status.enforceBackoff);
|
||||
Assert.equal(backoffInterval, 42);
|
||||
Assert.equal(Status.service, LOGIN_FAILED);
|
||||
Assert.equal(Status.login, SERVER_MAINTENANCE);
|
||||
// syncAndReportErrors means dontIgnoreErrors, which means
|
||||
// didReportProlongedError not touched.
|
||||
Assert.ok(!errorHandler.didReportProlongedError);
|
||||
|
||||
await clean();
|
||||
await promiseStopServer(server);
|
||||
});
|
||||
|
||||
add_task(async function test_download_crypto_keys_login_syncAndReportErrors_prolonged_server_maintenance_error() {
|
||||
enableValidationPrefs();
|
||||
|
||||
// Test crypto/keys server maintenance errors are reported
|
||||
// when calling syncAndReportErrors.
|
||||
let server = await EHTestsCommon.sync_httpd_setup();
|
||||
await EHTestsCommon.setUp(server);
|
||||
|
||||
await configureIdentity({username: "broken.keys"}, server);
|
||||
// Force re-download of keys
|
||||
Service.collectionKeys.clear();
|
||||
|
||||
let backoffInterval;
|
||||
Svc.Obs.add("weave:service:backoff:interval", function observe(subject, data) {
|
||||
Svc.Obs.remove("weave:service:backoff:interval", observe);
|
||||
backoffInterval = subject;
|
||||
});
|
||||
|
||||
Assert.ok(!Status.enforceBackoff);
|
||||
Assert.equal(Status.service, STATUS_OK);
|
||||
|
||||
setLastSync(PROLONGED_ERROR_DURATION);
|
||||
await syncAndReportErrorsAndWait("weave:ui:login:error");
|
||||
|
||||
Assert.ok(Status.enforceBackoff);
|
||||
Assert.equal(backoffInterval, 42);
|
||||
Assert.equal(Status.service, LOGIN_FAILED);
|
||||
Assert.equal(Status.login, SERVER_MAINTENANCE);
|
||||
// syncAndReportErrors means dontIgnoreErrors, which means
|
||||
// didReportProlongedError not touched.
|
||||
Assert.ok(!errorHandler.didReportProlongedError);
|
||||
|
||||
await clean();
|
||||
await promiseStopServer(server);
|
||||
});
|
||||
|
||||
add_task(async function test_upload_crypto_keys_login_syncAndReportErrors_prolonged_server_maintenance_error() {
|
||||
enableValidationPrefs();
|
||||
|
||||
// Test crypto/keys server maintenance errors are reported
|
||||
// when calling syncAndReportErrors.
|
||||
let server = await EHTestsCommon.sync_httpd_setup();
|
||||
|
||||
// Start off with an empty account, do not upload a key.
|
||||
await configureIdentity({username: "broken.keys"}, server);
|
||||
|
||||
let backoffInterval;
|
||||
Svc.Obs.add("weave:service:backoff:interval", function observe(subject, data) {
|
||||
Svc.Obs.remove("weave:service:backoff:interval", observe);
|
||||
backoffInterval = subject;
|
||||
});
|
||||
|
||||
Assert.ok(!Status.enforceBackoff);
|
||||
Assert.equal(Status.service, STATUS_OK);
|
||||
|
||||
setLastSync(PROLONGED_ERROR_DURATION);
|
||||
await syncAndReportErrorsAndWait("weave:ui:login:error");
|
||||
|
||||
Assert.ok(Status.enforceBackoff);
|
||||
Assert.equal(backoffInterval, 42);
|
||||
Assert.equal(Status.service, LOGIN_FAILED);
|
||||
Assert.equal(Status.login, SERVER_MAINTENANCE);
|
||||
// syncAndReportErrors means dontIgnoreErrors, which means
|
||||
// didReportProlongedError not touched.
|
||||
Assert.ok(!errorHandler.didReportProlongedError);
|
||||
|
||||
await clean();
|
||||
await promiseStopServer(server);
|
||||
});
|
||||
|
||||
add_task(async function test_wipeServer_login_syncAndReportErrors_prolonged_server_maintenance_error() {
|
||||
enableValidationPrefs();
|
||||
|
||||
// Test crypto/keys server maintenance errors are reported
|
||||
// when calling syncAndReportErrors.
|
||||
let server = await EHTestsCommon.sync_httpd_setup();
|
||||
|
||||
// Start off with an empty account, do not upload a key.
|
||||
await configureIdentity({username: "broken.wipe"}, server);
|
||||
|
||||
let backoffInterval;
|
||||
Svc.Obs.add("weave:service:backoff:interval", function observe(subject, data) {
|
||||
Svc.Obs.remove("weave:service:backoff:interval", observe);
|
||||
backoffInterval = subject;
|
||||
});
|
||||
|
||||
Assert.ok(!Status.enforceBackoff);
|
||||
Assert.equal(Status.service, STATUS_OK);
|
||||
|
||||
setLastSync(PROLONGED_ERROR_DURATION);
|
||||
await syncAndReportErrorsAndWait("weave:ui:login:error");
|
||||
|
||||
Assert.ok(Status.enforceBackoff);
|
||||
Assert.equal(backoffInterval, 42);
|
||||
Assert.equal(Status.service, LOGIN_FAILED);
|
||||
Assert.equal(Status.login, SERVER_MAINTENANCE);
|
||||
// syncAndReportErrors means dontIgnoreErrors, which means
|
||||
// didReportProlongedError not touched.
|
||||
Assert.ok(!errorHandler.didReportProlongedError);
|
||||
|
||||
await clean();
|
||||
await promiseStopServer(server);
|
||||
|
@ -845,7 +415,7 @@ add_task(async function test_sync_engine_generic_fail() {
|
|||
await promiseStopServer(server);
|
||||
});
|
||||
|
||||
add_task(async function test_logs_on_sync_error_despite_shouldReportError() {
|
||||
add_task(async function test_logs_on_sync_error() {
|
||||
enableValidationPrefs();
|
||||
|
||||
_("Ensure that an error is still logged when weave:service:sync:error " +
|
||||
|
@ -857,7 +427,6 @@ add_task(async function test_logs_on_sync_error_despite_shouldReportError() {
|
|||
|
||||
// Ensure that we report no error.
|
||||
Status.login = MASTER_PASSWORD_LOCKED;
|
||||
Assert.ok(!errorHandler.shouldReportError());
|
||||
|
||||
let promiseObserved = promiseOneObserver("weave:service:reset-file-log");
|
||||
Svc.Obs.notify("weave:service:sync:error", {});
|
||||
|
@ -871,7 +440,7 @@ add_task(async function test_logs_on_sync_error_despite_shouldReportError() {
|
|||
await clean();
|
||||
});
|
||||
|
||||
add_task(async function test_logs_on_login_error_despite_shouldReportError() {
|
||||
add_task(async function test_logs_on_login_error() {
|
||||
enableValidationPrefs();
|
||||
|
||||
_("Ensure that an error is still logged when weave:service:login:error " +
|
||||
|
@ -883,7 +452,6 @@ add_task(async function test_logs_on_login_error_despite_shouldReportError() {
|
|||
|
||||
// Ensure that we report no error.
|
||||
Status.login = MASTER_PASSWORD_LOCKED;
|
||||
Assert.ok(!errorHandler.shouldReportError());
|
||||
|
||||
let promiseObserved = promiseOneObserver("weave:service:reset-file-log");
|
||||
Svc.Obs.notify("weave:service:login:error", {});
|
||||
|
|
|
@ -1,121 +0,0 @@
|
|||
/* Any copyright is dedicated to the Public Domain.
|
||||
http://creativecommons.org/publicdomain/zero/1.0/ */
|
||||
|
||||
ChromeUtils.import("resource://services-sync/service.js");
|
||||
ChromeUtils.import("resource://services-sync/status.js");
|
||||
ChromeUtils.import("resource://services-sync/util.js");
|
||||
|
||||
ChromeUtils.import("resource://testing-common/services/sync/fakeservices.js");
|
||||
|
||||
function baseHandler(eolCode, request, response, statusCode, status, body) {
|
||||
let alertBody = {
|
||||
code: eolCode,
|
||||
message: "Service is EOLed.",
|
||||
url: "http://getfirefox.com",
|
||||
};
|
||||
response.setHeader("X-Weave-Timestamp", "" + new_timestamp(), false);
|
||||
response.setHeader("X-Weave-Alert", "" + JSON.stringify(alertBody), false);
|
||||
response.setStatusLine(request.httpVersion, statusCode, status);
|
||||
response.bodyOutputStream.write(body, body.length);
|
||||
}
|
||||
|
||||
function handler513(request, response) {
|
||||
let statusCode = 513;
|
||||
let status = "Upgrade Required";
|
||||
let body = "{}";
|
||||
baseHandler("hard-eol", request, response, statusCode, status, body);
|
||||
}
|
||||
|
||||
function handler200(eolCode) {
|
||||
return function(request, response) {
|
||||
let statusCode = 200;
|
||||
let status = "OK";
|
||||
let body = "{\"meta\": 123456789010}";
|
||||
baseHandler(eolCode, request, response, statusCode, status, body);
|
||||
};
|
||||
}
|
||||
|
||||
function sync_httpd_setup(infoHandler) {
|
||||
let handlers = {
|
||||
"/1.1/johndoe/info/collections": infoHandler,
|
||||
};
|
||||
return httpd_setup(handlers);
|
||||
}
|
||||
|
||||
async function setUp(server) {
|
||||
await configureIdentity({username: "johndoe"}, server);
|
||||
new FakeCryptoService();
|
||||
}
|
||||
|
||||
function do_check_soft_eol(eh, start) {
|
||||
// We subtract 1000 because the stored value is in second precision.
|
||||
Assert.ok(eh.earliestNextAlert >= (start + eh.MINIMUM_ALERT_INTERVAL_MSEC - 1000));
|
||||
Assert.equal("soft-eol", eh.currentAlertMode);
|
||||
}
|
||||
function do_check_hard_eol(eh, start) {
|
||||
// We subtract 1000 because the stored value is in second precision.
|
||||
Assert.ok(eh.earliestNextAlert >= (start + eh.MINIMUM_ALERT_INTERVAL_MSEC - 1000));
|
||||
Assert.equal("hard-eol", eh.currentAlertMode);
|
||||
Assert.ok(Status.eol);
|
||||
}
|
||||
|
||||
add_task(async function test_200_hard() {
|
||||
let eh = Service.errorHandler;
|
||||
let start = Date.now();
|
||||
let server = sync_httpd_setup(handler200("hard-eol"));
|
||||
await setUp(server);
|
||||
|
||||
let promiseObserved = promiseOneObserver("weave:eol");
|
||||
|
||||
await Service._fetchInfo();
|
||||
Service.scheduler.adjustSyncInterval(); // As if we failed or succeeded in syncing.
|
||||
|
||||
let { subject } = await promiseObserved;
|
||||
Assert.equal("hard-eol", subject.code);
|
||||
do_check_hard_eol(eh, start);
|
||||
Assert.equal(Service.scheduler.eolInterval, Service.scheduler.syncInterval);
|
||||
eh.clearServerAlerts();
|
||||
await promiseStopServer(server);
|
||||
});
|
||||
|
||||
add_task(async function test_513_hard() {
|
||||
let eh = Service.errorHandler;
|
||||
let start = Date.now();
|
||||
let server = sync_httpd_setup(handler513);
|
||||
await setUp(server);
|
||||
|
||||
let promiseObserved = promiseOneObserver("weave:eol");
|
||||
|
||||
try {
|
||||
await Service._fetchInfo();
|
||||
Service.scheduler.adjustSyncInterval(); // As if we failed or succeeded in syncing.
|
||||
} catch (ex) {
|
||||
// Because fetchInfo will fail on a 513.
|
||||
}
|
||||
let { subject } = await promiseObserved;
|
||||
Assert.equal("hard-eol", subject.code);
|
||||
do_check_hard_eol(eh, start);
|
||||
Assert.equal(Service.scheduler.eolInterval, Service.scheduler.syncInterval);
|
||||
eh.clearServerAlerts();
|
||||
|
||||
await promiseStopServer(server);
|
||||
});
|
||||
|
||||
add_task(async function test_200_soft() {
|
||||
let eh = Service.errorHandler;
|
||||
let start = Date.now();
|
||||
let server = sync_httpd_setup(handler200("soft-eol"));
|
||||
await setUp(server);
|
||||
|
||||
let promiseObserved = promiseOneObserver("weave:eol");
|
||||
|
||||
await Service._fetchInfo();
|
||||
Service.scheduler.adjustSyncInterval(); // As if we failed or succeeded in syncing.
|
||||
let { subject } = await promiseObserved;
|
||||
Assert.equal("soft-eol", subject.code);
|
||||
do_check_soft_eol(eh, start);
|
||||
Assert.equal(Service.scheduler.singleDeviceInterval, Service.scheduler.syncInterval);
|
||||
eh.clearServerAlerts();
|
||||
|
||||
await promiseStopServer(server);
|
||||
});
|
|
@ -18,15 +18,8 @@ const logsdir = FileUtils.getDir("ProfD", ["weave", "logs"], true);
|
|||
const CLEANUP_DELAY = 2000;
|
||||
const DELAY_BUFFER = 500; // Buffer for timers on different OS platforms.
|
||||
|
||||
const PROLONGED_ERROR_DURATION =
|
||||
(Svc.Prefs.get("errorhandler.networkFailureReportTimeout") * 2) * 1000;
|
||||
|
||||
var errorHandler = Service.errorHandler;
|
||||
|
||||
function setLastSync(lastSyncValue) {
|
||||
Svc.Prefs.set("lastSync", (new Date(Date.now() - lastSyncValue)).toString());
|
||||
}
|
||||
|
||||
function run_test() {
|
||||
validate_all_future_pings();
|
||||
run_next_test();
|
||||
|
@ -142,8 +135,7 @@ add_test(function test_sync_error_logOnError_false() {
|
|||
run_next_test();
|
||||
});
|
||||
|
||||
// Fake an unsuccessful sync due to prolonged failure.
|
||||
setLastSync(PROLONGED_ERROR_DURATION);
|
||||
// Fake an unsuccessful sync.
|
||||
Svc.Obs.notify("weave:service:sync:error");
|
||||
});
|
||||
|
||||
|
@ -183,8 +175,7 @@ add_test(function test_sync_error_logOnError_true() {
|
|||
});
|
||||
});
|
||||
|
||||
// Fake an unsuccessful sync due to prolonged failure.
|
||||
setLastSync(PROLONGED_ERROR_DURATION);
|
||||
// Fake an unsuccessful sync.
|
||||
Svc.Obs.notify("weave:service:sync:error");
|
||||
});
|
||||
|
||||
|
@ -203,8 +194,7 @@ add_test(function test_login_error_logOnError_false() {
|
|||
run_next_test();
|
||||
});
|
||||
|
||||
// Fake an unsuccessful login due to prolonged failure.
|
||||
setLastSync(PROLONGED_ERROR_DURATION);
|
||||
// Fake an unsuccessful login.
|
||||
Svc.Obs.notify("weave:service:login:error");
|
||||
});
|
||||
|
||||
|
@ -244,8 +234,7 @@ add_test(function test_login_error_logOnError_true() {
|
|||
});
|
||||
});
|
||||
|
||||
// Fake an unsuccessful login due to prolonged failure.
|
||||
setLastSync(PROLONGED_ERROR_DURATION);
|
||||
// Fake an unsuccessful login.
|
||||
Svc.Obs.notify("weave:service:login:error");
|
||||
});
|
||||
|
||||
|
@ -353,8 +342,7 @@ add_test(function test_errorLog_dumpAddons() {
|
|||
});
|
||||
});
|
||||
|
||||
// Fake an unsuccessful sync due to prolonged failure.
|
||||
setLastSync(PROLONGED_ERROR_DURATION);
|
||||
// Fake an unsuccessful sync.
|
||||
Svc.Obs.notify("weave:service:sync:error");
|
||||
});
|
||||
|
||||
|
|
|
@ -25,13 +25,6 @@ add_task(async function setup() {
|
|||
|
||||
// Setup the FxA identity manager and cluster manager.
|
||||
Status.__authManager = Service.identity = new BrowserIDManager();
|
||||
|
||||
// None of the failures in this file should result in a UI error.
|
||||
function onUIError() {
|
||||
do_throw("Errors should not be presented in the UI.");
|
||||
}
|
||||
Svc.Obs.add("weave:ui:login:error", onUIError);
|
||||
Svc.Obs.add("weave:ui:sync:error", onUIError);
|
||||
});
|
||||
|
||||
|
||||
|
|
|
@ -16,13 +16,6 @@ ChromeUtils.import("resource://gre/modules/PromiseUtils.jsm");
|
|||
|
||||
add_task(async function setup() {
|
||||
validate_all_future_pings();
|
||||
|
||||
// None of the failures in this file should result in a UI error.
|
||||
function onUIError() {
|
||||
do_throw("Errors should not be presented in the UI.");
|
||||
}
|
||||
Svc.Obs.add("weave:ui:login:error", onUIError);
|
||||
Svc.Obs.add("weave:ui:sync:error", onUIError);
|
||||
});
|
||||
|
||||
/**
|
||||
|
|
|
@ -220,11 +220,8 @@ add_task(async function test_disabledLocally_wipe503() {
|
|||
Service._ignorePrefObserver = false;
|
||||
engine.enabled = false;
|
||||
|
||||
let promiseObserved = promiseOneObserver("weave:ui:sync:error");
|
||||
|
||||
_("Sync.");
|
||||
Service.errorHandler.syncAndReportErrors();
|
||||
await promiseObserved;
|
||||
await Service.sync();
|
||||
Assert.equal(Service.status.sync, SERVER_MAINTENANCE);
|
||||
|
||||
await Service.startOver();
|
||||
|
|
|
@ -104,7 +104,6 @@ skip-if = os == "android"
|
|||
[test_errorhandler_sync_checkServerError.js]
|
||||
# Bug 676978: test hangs on Android (see also testing/xpcshell/xpcshell.ini)
|
||||
skip-if = os == "android"
|
||||
[test_errorhandler_eol.js]
|
||||
[test_hmac_error.js]
|
||||
[test_interval_triggers.js]
|
||||
[test_node_reassignment.js]
|
||||
|
|
|
@ -268,7 +268,6 @@ var TPS = {
|
|||
* directly called via TPS.Sync()!
|
||||
*/
|
||||
delayAutoSync: function TPS_delayAutoSync() {
|
||||
Weave.Svc.Prefs.set("scheduler.eolInterval", 7200);
|
||||
Weave.Svc.Prefs.set("scheduler.immediateInterval", 7200);
|
||||
Weave.Svc.Prefs.set("scheduler.idleInterval", 7200);
|
||||
Weave.Svc.Prefs.set("scheduler.activeInterval", 7200);
|
||||
|
|
|
@ -3109,11 +3109,3 @@ impl ISizeAndMarginsComputer for InlineFlexItem {
|
|||
fragment.margin.inline_end)
|
||||
}
|
||||
}
|
||||
|
||||
/// A stacking context, a pseudo-stacking context, or a non-stacking context.
|
||||
#[derive(Clone, Copy, PartialEq)]
|
||||
pub enum BlockStackingContextType {
|
||||
NonstackingContext,
|
||||
PseudoStackingContext,
|
||||
StackingContext,
|
||||
}
|
||||
|
|
|
@ -11,7 +11,7 @@
|
|||
#![deny(unsafe_code)]
|
||||
|
||||
use app_units::{Au, AU_PER_PX};
|
||||
use block::{BlockFlow, BlockStackingContextType};
|
||||
use block::BlockFlow;
|
||||
use canvas_traits::canvas::{CanvasMsg, FromLayoutMsg};
|
||||
use context::LayoutContext;
|
||||
use display_list::ToLayout;
|
||||
|
@ -73,11 +73,10 @@ use style_traits::CSSPixel;
|
|||
use style_traits::ToCss;
|
||||
use style_traits::cursor::CursorKind;
|
||||
use table_cell::CollapsedBordersForCell;
|
||||
use webrender_api::{self, BorderRadius, BorderSide, BoxShadowClipMode, ClipMode, ColorF};
|
||||
use webrender_api::{ComplexClipRegion, ExternalScrollId, FilterOp, GlyphInstance};
|
||||
use webrender_api::{ImageRendering, LayoutRect, LayoutSize, LayoutTransform, LayoutVector2D};
|
||||
use webrender_api::{LineStyle, LocalClip, NormalBorder, ScrollPolicy};
|
||||
use webrender_api::{ScrollSensitivity, StickyOffsetBounds};
|
||||
use webrender_api::{self, BorderRadius, BorderSide, BoxShadowClipMode, ColorF, ExternalScrollId};
|
||||
use webrender_api::{FilterOp, GlyphInstance, ImageRendering, LayoutRect, LayoutSize};
|
||||
use webrender_api::{LayoutTransform, LayoutVector2D, LineStyle, LocalClip, NormalBorder};
|
||||
use webrender_api::{ScrollPolicy, ScrollSensitivity, StickyOffsetBounds};
|
||||
|
||||
fn establishes_containing_block_for_absolute(
|
||||
flags: StackingContextCollectionFlags,
|
||||
|
@ -162,13 +161,15 @@ pub struct InlineNodeBorderInfo {
|
|||
struct StackingContextInfo {
|
||||
children: Vec<StackingContext>,
|
||||
clip_scroll_nodes: Vec<ClipScrollNodeIndex>,
|
||||
real_stacking_context_id: StackingContextId,
|
||||
}
|
||||
|
||||
impl StackingContextInfo {
|
||||
fn new() -> StackingContextInfo {
|
||||
fn new(real_stacking_context_id: StackingContextId) -> StackingContextInfo {
|
||||
StackingContextInfo {
|
||||
children: Vec::new(),
|
||||
clip_scroll_nodes: Vec::new(),
|
||||
real_stacking_context_id,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -236,10 +237,16 @@ impl StackingContextCollectionState {
|
|||
),
|
||||
};
|
||||
|
||||
let mut stacking_context_info = FnvHashMap::default();
|
||||
stacking_context_info.insert(
|
||||
StackingContextId::root(),
|
||||
StackingContextInfo::new(StackingContextId::root())
|
||||
);
|
||||
|
||||
StackingContextCollectionState {
|
||||
pipeline_id: pipeline_id,
|
||||
root_stacking_context: StackingContext::root(),
|
||||
stacking_context_info: FnvHashMap::default(),
|
||||
stacking_context_info,
|
||||
clip_scroll_nodes: vec![root_node],
|
||||
current_stacking_context_id: StackingContextId::root(),
|
||||
current_real_stacking_context_id: StackingContextId::root(),
|
||||
|
@ -252,9 +259,25 @@ impl StackingContextCollectionState {
|
|||
}
|
||||
}
|
||||
|
||||
fn generate_stacking_context_id(&mut self) -> StackingContextId {
|
||||
fn allocate_stacking_context_info(
|
||||
&mut self,
|
||||
stacking_context_type: StackingContextType
|
||||
) -> StackingContextId {
|
||||
let next_stacking_context_id = self.next_stacking_context_id.next();
|
||||
mem::replace(&mut self.next_stacking_context_id, next_stacking_context_id)
|
||||
let allocated_id =
|
||||
mem::replace(&mut self.next_stacking_context_id, next_stacking_context_id);
|
||||
|
||||
let real_stacking_context_id = match stacking_context_type {
|
||||
StackingContextType::Real => allocated_id,
|
||||
_ => self.current_real_stacking_context_id,
|
||||
};
|
||||
|
||||
self.stacking_context_info.insert(
|
||||
allocated_id,
|
||||
StackingContextInfo::new(real_stacking_context_id)
|
||||
);
|
||||
|
||||
allocated_id
|
||||
}
|
||||
|
||||
fn add_stacking_context(
|
||||
|
@ -262,10 +285,7 @@ impl StackingContextCollectionState {
|
|||
parent_id: StackingContextId,
|
||||
stacking_context: StackingContext,
|
||||
) {
|
||||
let info = self.stacking_context_info
|
||||
.entry(parent_id)
|
||||
.or_insert(StackingContextInfo::new());
|
||||
info.children.push(stacking_context);
|
||||
self.stacking_context_info.get_mut(&parent_id).unwrap().children.push(stacking_context);
|
||||
}
|
||||
|
||||
fn add_clip_scroll_node(&mut self, clip_scroll_node: ClipScrollNode) -> ClipScrollNodeIndex {
|
||||
|
@ -275,10 +295,11 @@ impl StackingContextCollectionState {
|
|||
// the scroll root before it is defined.
|
||||
self.clip_scroll_nodes.push(clip_scroll_node);
|
||||
let index = ClipScrollNodeIndex(self.clip_scroll_nodes.len() - 1);
|
||||
let info = self.stacking_context_info
|
||||
.entry(self.current_real_stacking_context_id)
|
||||
.or_insert(StackingContextInfo::new());
|
||||
info.clip_scroll_nodes.push(index);
|
||||
self.stacking_context_info
|
||||
.get_mut(&self.current_real_stacking_context_id)
|
||||
.unwrap()
|
||||
.clip_scroll_nodes
|
||||
.push(index);
|
||||
index
|
||||
}
|
||||
}
|
||||
|
@ -390,6 +411,34 @@ impl<'a> DisplayListBuildState<'a> {
|
|||
)
|
||||
}
|
||||
|
||||
fn add_late_clip_node(&mut self, rect: LayoutRect, radii: BorderRadius) -> ClipScrollNodeIndex {
|
||||
let mut clip = ClippingRegion::from_rect(rect);
|
||||
clip.intersect_with_rounded_rect(rect, radii);
|
||||
|
||||
let node = ClipScrollNode {
|
||||
parent_index: self.current_clipping_and_scrolling.scrolling,
|
||||
clip,
|
||||
content_rect: LayoutRect::zero(), // content_rect isn't important for clips.
|
||||
node_type: ClipScrollNodeType::Clip,
|
||||
};
|
||||
|
||||
// We want the scroll root to be defined before any possible item that could use it,
|
||||
// so we make sure that it is added to the beginning of the parent "real" (non-pseudo)
|
||||
// stacking context. This ensures that item reordering will not result in an item using
|
||||
// the scroll root before it is defined.
|
||||
self.clip_scroll_nodes.push(node);
|
||||
let index = ClipScrollNodeIndex(self.clip_scroll_nodes.len() - 1);
|
||||
let real_stacking_context_id =
|
||||
self.stacking_context_info[&self.current_stacking_context_id].real_stacking_context_id;
|
||||
self.stacking_context_info
|
||||
.get_mut(&real_stacking_context_id)
|
||||
.unwrap()
|
||||
.clip_scroll_nodes
|
||||
.push(index);
|
||||
|
||||
index
|
||||
}
|
||||
|
||||
pub fn to_display_list(mut self) -> DisplayList {
|
||||
let mut list = Vec::new();
|
||||
let root_context = mem::replace(&mut self.root_stacking_context, StackingContext::root());
|
||||
|
@ -413,9 +462,7 @@ impl<'a> DisplayListBuildState<'a> {
|
|||
child_items.sort_by(|a, b| a.base().section.cmp(&b.base().section));
|
||||
child_items.reverse();
|
||||
|
||||
let mut info = self.stacking_context_info
|
||||
.remove(&stacking_context.id)
|
||||
.unwrap_or_else(StackingContextInfo::new);
|
||||
let mut info = self.stacking_context_info.remove(&stacking_context.id).unwrap();
|
||||
|
||||
info.children.sort();
|
||||
|
||||
|
@ -801,18 +848,15 @@ impl FragmentDisplayListBuilding for Fragment {
|
|||
},
|
||||
}
|
||||
|
||||
let clip = if !border_radii.is_zero() {
|
||||
LocalClip::RoundedRect(
|
||||
bounds.to_layout(),
|
||||
ComplexClipRegion::new(bounds.to_layout(), border_radii, ClipMode::Clip),
|
||||
)
|
||||
} else {
|
||||
LocalClip::Rect(bounds.to_layout())
|
||||
};
|
||||
let previous_clipping_and_scrolling = state.current_clipping_and_scrolling;
|
||||
if !border_radii.is_zero() {
|
||||
let clip_id = state.add_late_clip_node(bounds.to_layout(), border_radii);
|
||||
state.current_clipping_and_scrolling = ClippingAndScrolling::simple(clip_id);
|
||||
}
|
||||
|
||||
let base = state.create_base_display_item(
|
||||
&bounds,
|
||||
clip,
|
||||
LocalClip::Rect(bounds.to_layout()),
|
||||
self.node,
|
||||
style.get_cursor(CursorKind::Default),
|
||||
display_list_section,
|
||||
|
@ -822,6 +866,8 @@ impl FragmentDisplayListBuilding for Fragment {
|
|||
color: background_color.to_layout(),
|
||||
})));
|
||||
|
||||
state.current_clipping_and_scrolling = previous_clipping_and_scrolling;
|
||||
|
||||
// The background image is painted on top of the background color.
|
||||
// Implements background image, per spec:
|
||||
// http://www.w3.org/TR/CSS21/colors.html#background
|
||||
|
@ -1608,25 +1654,31 @@ impl FragmentDisplayListBuilding for Fragment {
|
|||
stacking_relative_border_box: &Rect<Au>,
|
||||
clip: &Rect<Au>,
|
||||
) {
|
||||
let previous_clipping_and_scrolling = state.current_clipping_and_scrolling;
|
||||
|
||||
// Compute the context box position relative to the parent stacking context.
|
||||
let stacking_relative_content_box =
|
||||
self.stacking_relative_content_box(stacking_relative_border_box);
|
||||
|
||||
// Adjust the clipping region as necessary to account for `border-radius`.
|
||||
let build_local_clip = |style: &ComputedValues| {
|
||||
let radii = build_border_radius_for_inner_rect(&stacking_relative_border_box, style);
|
||||
let create_base_display_item = |state: &mut DisplayListBuildState| {
|
||||
let layout_rect = stacking_relative_border_box.to_layout();
|
||||
|
||||
// Adjust the clipping region as necessary to account for `border-radius`.
|
||||
let radii =
|
||||
build_border_radius_for_inner_rect(&stacking_relative_border_box, &self.style);
|
||||
|
||||
if !radii.is_zero() {
|
||||
LocalClip::RoundedRect(
|
||||
stacking_relative_border_box.to_layout(),
|
||||
ComplexClipRegion::new(
|
||||
stacking_relative_content_box.to_layout(),
|
||||
radii,
|
||||
ClipMode::Clip,
|
||||
),
|
||||
)
|
||||
} else {
|
||||
LocalClip::Rect(stacking_relative_border_box.to_layout())
|
||||
let clip_id = state.add_late_clip_node(layout_rect, radii);
|
||||
state.current_clipping_and_scrolling = ClippingAndScrolling::simple(clip_id);
|
||||
}
|
||||
|
||||
state.create_base_display_item(
|
||||
&stacking_relative_content_box,
|
||||
LocalClip::Rect(layout_rect),
|
||||
self.node,
|
||||
self.style.get_cursor(CursorKind::Default),
|
||||
DisplayListSection::Content,
|
||||
)
|
||||
};
|
||||
|
||||
match self.specific {
|
||||
|
@ -1707,15 +1759,9 @@ impl FragmentDisplayListBuilding for Fragment {
|
|||
None => return warn!("No pipeline id for iframe {}.", browsing_context_id),
|
||||
};
|
||||
|
||||
let base = state.create_base_display_item(
|
||||
&stacking_relative_content_box,
|
||||
build_local_clip(&self.style),
|
||||
self.node,
|
||||
self.style.get_cursor(CursorKind::Default),
|
||||
DisplayListSection::Content,
|
||||
);
|
||||
let base = create_base_display_item(state);
|
||||
let item = DisplayItem::Iframe(Box::new(IframeDisplayItem {
|
||||
base: base,
|
||||
base,
|
||||
iframe: pipeline_id,
|
||||
}));
|
||||
|
||||
|
@ -1730,15 +1776,9 @@ impl FragmentDisplayListBuilding for Fragment {
|
|||
SpecificFragmentInfo::Image(ref image_fragment) => {
|
||||
// Place the image into the display list.
|
||||
if let Some(ref image) = image_fragment.image {
|
||||
let base = state.create_base_display_item(
|
||||
&stacking_relative_content_box,
|
||||
build_local_clip(&self.style),
|
||||
self.node,
|
||||
self.style.get_cursor(CursorKind::Default),
|
||||
DisplayListSection::Content,
|
||||
);
|
||||
let base = create_base_display_item(state);
|
||||
state.add_display_item(DisplayItem::Image(Box::new(ImageDisplayItem {
|
||||
base: base,
|
||||
base,
|
||||
webrender_image: WebRenderImageInfo::from_image(image),
|
||||
stretch_size: stacking_relative_content_box.size.to_layout(),
|
||||
tile_spacing: LayoutSize::zero(),
|
||||
|
@ -1765,15 +1805,9 @@ impl FragmentDisplayListBuilding for Fragment {
|
|||
},
|
||||
};
|
||||
|
||||
let base = state.create_base_display_item(
|
||||
&stacking_relative_content_box,
|
||||
build_local_clip(&self.style),
|
||||
self.node,
|
||||
self.style.get_cursor(CursorKind::Default),
|
||||
DisplayListSection::Content,
|
||||
);
|
||||
let base = create_base_display_item(state);
|
||||
let display_item = DisplayItem::Image(Box::new(ImageDisplayItem {
|
||||
base: base,
|
||||
base,
|
||||
webrender_image: WebRenderImageInfo {
|
||||
width: computed_width as u32,
|
||||
height: computed_height as u32,
|
||||
|
@ -1794,6 +1828,8 @@ impl FragmentDisplayListBuilding for Fragment {
|
|||
panic!("Shouldn't see table column fragments here.")
|
||||
},
|
||||
}
|
||||
|
||||
state.current_clipping_and_scrolling = previous_clipping_and_scrolling;
|
||||
}
|
||||
|
||||
fn create_stacking_context(
|
||||
|
@ -2061,7 +2097,7 @@ pub trait BlockFlowDisplayListBuilding {
|
|||
&mut self,
|
||||
state: &mut StackingContextCollectionState,
|
||||
preserved_state: &mut SavedStackingContextCollectionState,
|
||||
stacking_context_type: BlockStackingContextType,
|
||||
stacking_context_type: Option<StackingContextType>,
|
||||
flags: StackingContextCollectionFlags,
|
||||
) -> ClippingAndScrolling;
|
||||
fn setup_clip_scroll_node_for_position(
|
||||
|
@ -2082,6 +2118,7 @@ pub trait BlockFlowDisplayListBuilding {
|
|||
);
|
||||
fn create_pseudo_stacking_context_for_block(
|
||||
&mut self,
|
||||
stacking_context_type: StackingContextType,
|
||||
parent_stacking_context_id: StackingContextId,
|
||||
parent_clip_and_scroll_info: ClippingAndScrolling,
|
||||
state: &mut StackingContextCollectionState,
|
||||
|
@ -2108,10 +2145,10 @@ pub trait BlockFlowDisplayListBuilding {
|
|||
background: &style_structs::Background,
|
||||
background_color: RGBA);
|
||||
|
||||
fn block_stacking_context_type(
|
||||
fn stacking_context_type(
|
||||
&self,
|
||||
flags: StackingContextCollectionFlags,
|
||||
) -> BlockStackingContextType;
|
||||
) -> Option<StackingContextType>;
|
||||
}
|
||||
|
||||
/// This structure manages ensuring that modification to StackingContextCollectionState is
|
||||
|
@ -2267,15 +2304,14 @@ impl BlockFlowDisplayListBuilding for BlockFlow {
|
|||
) {
|
||||
let mut preserved_state = SavedStackingContextCollectionState::new(state);
|
||||
|
||||
let block_stacking_context_type = self.block_stacking_context_type(flags);
|
||||
self.base.stacking_context_id = match block_stacking_context_type {
|
||||
BlockStackingContextType::NonstackingContext => state.current_stacking_context_id,
|
||||
BlockStackingContextType::PseudoStackingContext |
|
||||
BlockStackingContextType::StackingContext => state.generate_stacking_context_id(),
|
||||
let stacking_context_type = self.stacking_context_type(flags);
|
||||
self.base.stacking_context_id = match stacking_context_type {
|
||||
None => state.current_stacking_context_id,
|
||||
Some(sc_type) => state.allocate_stacking_context_info(sc_type),
|
||||
};
|
||||
state.current_stacking_context_id = self.base.stacking_context_id;
|
||||
|
||||
if block_stacking_context_type == BlockStackingContextType::StackingContext {
|
||||
if stacking_context_type == Some(StackingContextType::Real) {
|
||||
state.current_real_stacking_context_id = self.base.stacking_context_id;
|
||||
}
|
||||
|
||||
|
@ -2286,7 +2322,7 @@ impl BlockFlowDisplayListBuilding for BlockFlow {
|
|||
let containing_clipping_and_scrolling = self.setup_clipping_for_block(
|
||||
state,
|
||||
&mut preserved_state,
|
||||
block_stacking_context_type,
|
||||
stacking_context_type,
|
||||
flags,
|
||||
);
|
||||
|
||||
|
@ -2294,19 +2330,18 @@ impl BlockFlowDisplayListBuilding for BlockFlow {
|
|||
state.containing_block_clipping_and_scrolling = state.current_clipping_and_scrolling;
|
||||
}
|
||||
|
||||
match block_stacking_context_type {
|
||||
BlockStackingContextType::NonstackingContext => {
|
||||
self.base.collect_stacking_contexts_for_children(state);
|
||||
},
|
||||
BlockStackingContextType::PseudoStackingContext => {
|
||||
self.create_pseudo_stacking_context_for_block(
|
||||
match stacking_context_type {
|
||||
None => self.base.collect_stacking_contexts_for_children(state),
|
||||
Some(StackingContextType::Real) => {
|
||||
self.create_real_stacking_context_for_block(
|
||||
preserved_state.stacking_context_id,
|
||||
containing_clipping_and_scrolling,
|
||||
state,
|
||||
);
|
||||
},
|
||||
BlockStackingContextType::StackingContext => {
|
||||
self.create_real_stacking_context_for_block(
|
||||
Some(stacking_context_type) => {
|
||||
self.create_pseudo_stacking_context_for_block(
|
||||
stacking_context_type,
|
||||
preserved_state.stacking_context_id,
|
||||
containing_clipping_and_scrolling,
|
||||
state,
|
||||
|
@ -2321,7 +2356,7 @@ impl BlockFlowDisplayListBuilding for BlockFlow {
|
|||
&mut self,
|
||||
state: &mut StackingContextCollectionState,
|
||||
preserved_state: &mut SavedStackingContextCollectionState,
|
||||
stacking_context_type: BlockStackingContextType,
|
||||
stacking_context_type: Option<StackingContextType>,
|
||||
flags: StackingContextCollectionFlags,
|
||||
) -> ClippingAndScrolling {
|
||||
// If this block is absolutely positioned, we should be clipped and positioned by
|
||||
|
@ -2347,7 +2382,7 @@ impl BlockFlowDisplayListBuilding for BlockFlow {
|
|||
self.stacking_relative_border_box(CoordinateSystem::Parent)
|
||||
};
|
||||
|
||||
if stacking_context_type == BlockStackingContextType::StackingContext {
|
||||
if stacking_context_type == Some(StackingContextType::Real) {
|
||||
self.transform_clip_to_coordinate_space(state, preserved_state);
|
||||
}
|
||||
|
||||
|
@ -2578,26 +2613,16 @@ impl BlockFlowDisplayListBuilding for BlockFlow {
|
|||
|
||||
fn create_pseudo_stacking_context_for_block(
|
||||
&mut self,
|
||||
stacking_context_type: StackingContextType,
|
||||
parent_stacking_context_id: StackingContextId,
|
||||
parent_clipping_and_scrolling: ClippingAndScrolling,
|
||||
state: &mut StackingContextCollectionState,
|
||||
) {
|
||||
let creation_mode = if self.base
|
||||
.flags
|
||||
.contains(FlowFlags::IS_ABSOLUTELY_POSITIONED) ||
|
||||
self.fragment.style.get_box().position != StylePosition::Static
|
||||
{
|
||||
StackingContextType::PseudoPositioned
|
||||
} else {
|
||||
assert!(self.base.flags.is_float());
|
||||
StackingContextType::PseudoFloat
|
||||
};
|
||||
|
||||
let new_context = self.fragment.create_stacking_context(
|
||||
self.base.stacking_context_id,
|
||||
&self.base,
|
||||
ScrollPolicy::Scrollable,
|
||||
creation_mode,
|
||||
stacking_context_type,
|
||||
parent_clipping_and_scrolling,
|
||||
);
|
||||
state.add_stacking_context(parent_stacking_context_id, new_context);
|
||||
|
@ -2694,34 +2719,31 @@ impl BlockFlowDisplayListBuilding for BlockFlow {
|
|||
}
|
||||
|
||||
#[inline]
|
||||
fn block_stacking_context_type(
|
||||
fn stacking_context_type(
|
||||
&self,
|
||||
flags: StackingContextCollectionFlags,
|
||||
) -> BlockStackingContextType {
|
||||
) -> Option<StackingContextType>{
|
||||
if flags.contains(StackingContextCollectionFlags::NEVER_CREATES_STACKING_CONTEXT) {
|
||||
return BlockStackingContextType::NonstackingContext;
|
||||
return None;
|
||||
}
|
||||
|
||||
if self.fragment.establishes_stacking_context() {
|
||||
return BlockStackingContextType::StackingContext;
|
||||
return Some(StackingContextType::Real);
|
||||
}
|
||||
|
||||
if self.base
|
||||
.flags
|
||||
.contains(FlowFlags::IS_ABSOLUTELY_POSITIONED)
|
||||
{
|
||||
return BlockStackingContextType::PseudoStackingContext;
|
||||
if self.base.flags.contains(FlowFlags::IS_ABSOLUTELY_POSITIONED) {
|
||||
return Some(StackingContextType::PseudoPositioned);
|
||||
}
|
||||
|
||||
if self.fragment.style.get_box().position != StylePosition::Static {
|
||||
return BlockStackingContextType::PseudoStackingContext;
|
||||
return Some(StackingContextType::PseudoPositioned);
|
||||
}
|
||||
|
||||
if self.base.flags.is_float() {
|
||||
return BlockStackingContextType::PseudoStackingContext;
|
||||
return Some(StackingContextType::PseudoFloat);
|
||||
}
|
||||
|
||||
BlockStackingContextType::NonstackingContext
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -2757,7 +2779,8 @@ impl InlineFlowDisplayListBuilding for InlineFlow {
|
|||
|
||||
if !fragment.collect_stacking_contexts_for_blocklike_fragment(state) {
|
||||
if fragment.establishes_stacking_context() {
|
||||
fragment.stacking_context_id = state.generate_stacking_context_id();
|
||||
fragment.stacking_context_id =
|
||||
state.allocate_stacking_context_info(StackingContextType::Real);
|
||||
|
||||
let current_stacking_context_id = state.current_stacking_context_id;
|
||||
let stacking_context = fragment.create_stacking_context(
|
||||
|
|
|
@ -40,6 +40,7 @@ WPT_MANIFEST_PATH = wpt_path("include.ini")
|
|||
config = {
|
||||
"skip-check-length": False,
|
||||
"skip-check-licenses": False,
|
||||
"check-alphabetical-order": True,
|
||||
"check-ordered-json-keys": [],
|
||||
"lint-scripts": [],
|
||||
"blocked-packages": {},
|
||||
|
@ -506,6 +507,7 @@ def check_rust(file_name, lines):
|
|||
indent = 0
|
||||
prev_indent = 0
|
||||
|
||||
check_alphabetical_order = config["check-alphabetical-order"]
|
||||
decl_message = "{} is not in alphabetical order"
|
||||
decl_expected = "\n\t\033[93mexpected: {}\033[0m"
|
||||
decl_found = "\n\t\033[91mfound: {}\033[0m"
|
||||
|
@ -670,7 +672,7 @@ def check_rust(file_name, lines):
|
|||
crate_name = line[13:-1]
|
||||
if indent not in prev_crate:
|
||||
prev_crate[indent] = ""
|
||||
if prev_crate[indent] > crate_name:
|
||||
if prev_crate[indent] > crate_name and check_alphabetical_order:
|
||||
yield(idx + 1, decl_message.format("extern crate declaration")
|
||||
+ decl_expected.format(prev_crate[indent])
|
||||
+ decl_found.format(crate_name))
|
||||
|
@ -687,12 +689,12 @@ def check_rust(file_name, lines):
|
|||
if match:
|
||||
features = map(lambda w: w.strip(), match.group(1).split(','))
|
||||
sorted_features = sorted(features)
|
||||
if sorted_features != features:
|
||||
if sorted_features != features and check_alphabetical_order:
|
||||
yield(idx + 1, decl_message.format("feature attribute")
|
||||
+ decl_expected.format(tuple(sorted_features))
|
||||
+ decl_found.format(tuple(features)))
|
||||
|
||||
if prev_feature_name > sorted_features[0]:
|
||||
if prev_feature_name > sorted_features[0] and check_alphabetical_order:
|
||||
yield(idx + 1, decl_message.format("feature attribute")
|
||||
+ decl_expected.format(prev_feature_name + " after " + sorted_features[0])
|
||||
+ decl_found.format(prev_feature_name + " before " + sorted_features[0]))
|
||||
|
@ -717,7 +719,7 @@ def check_rust(file_name, lines):
|
|||
if prev_use:
|
||||
current_use_cut = current_use.replace("{self,", ".").replace("{", ".")
|
||||
prev_use_cut = prev_use.replace("{self,", ".").replace("{", ".")
|
||||
if indent == current_indent and current_use_cut < prev_use_cut:
|
||||
if indent == current_indent and current_use_cut < prev_use_cut and check_alphabetical_order:
|
||||
yield(idx + 1, decl_message.format("use statement")
|
||||
+ decl_expected.format(prev_use)
|
||||
+ decl_found.format(current_use))
|
||||
|
@ -743,7 +745,7 @@ def check_rust(file_name, lines):
|
|||
prev_mod[indent] = ""
|
||||
if match == -1 and not line.endswith(";"):
|
||||
yield (idx + 1, "mod declaration spans multiple lines")
|
||||
if prev_mod[indent] and mod < prev_mod[indent]:
|
||||
if prev_mod[indent] and mod < prev_mod[indent] and check_alphabetical_order:
|
||||
yield(idx + 1, decl_message.format("mod declaration")
|
||||
+ decl_expected.format(prev_mod[indent])
|
||||
+ decl_found.format(mod))
|
||||
|
@ -760,7 +762,7 @@ def check_rust(file_name, lines):
|
|||
derives = map(lambda w: w.strip(), match.group(1).split(','))
|
||||
# sort, compare and report
|
||||
sorted_derives = sorted(derives)
|
||||
if sorted_derives != derives:
|
||||
if sorted_derives != derives and check_alphabetical_order:
|
||||
yield(idx + 1, decl_message.format("derivable traits list")
|
||||
+ decl_expected.format(", ".join(sorted_derives))
|
||||
+ decl_found.format(", ".join(derives)))
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
[configs]
|
||||
skip-check-length = false
|
||||
skip-check-licenses = false
|
||||
check-alphabetical-order = true
|
||||
check-ordered-json-keys = [
|
||||
"./resources/prefs.json",
|
||||
]
|
||||
|
|
|
@ -1 +0,0 @@
|
|||
{"files":{"Cargo.toml":"4516face4b018e46fc36cfe24eff335671684032a75435305d62fd5f280f05a7","build.rs":"4dbf3e5a423e5eba48fc7f11e4f7638c054e53a3750c888059a96d96f8923ef2","src/array.rs":"d648ed8cf0ccb72c3ca0d9e018a3db804edad9685739eba13f8f515e04f3708b","src/base.rs":"c995d91c9e5aed99ea28fc75561cfd573f5e8ff806ef512194e7b616c35c308f","src/bundle.rs":"ff5f5253f331b7fa054414a3f256d74760e3ce805b720cdb735a2e46cc66dce6","src/data.rs":"21e968951fe56e080d33474f4438de2dfb7e0c8af426a6dfb100efdd6c530eec","src/date.rs":"f6cdcb94658fafc5bacb83cfbd20ad97502b8ddf6bd1c0c0d6a2545a4f7b7420","src/dictionary.rs":"97c40c1afc719b970968179112ad76c3c89b6b4eb4ea18f7ac3f059d98cce736","src/error.rs":"61bc31a401ec6c8495668175eade9284e257da056fc666af74a5555af5daf33f","src/lib.rs":"8bdbc6ed8fcbbc5b69d7634031ff44d50b9ac789279eb89b80c280ea156c98b3","src/messageport.rs":"59ba92ca90bb9b3162b6df44188fac18cd979250f33a52361144c902e86529bd","src/number.rs":"8881c7cd1b510c654c445485de898f83abda91557fd3e6f9daccf2d1b9c4c57e","src/propertylist.rs":"cc2b27f8f8ebc80c03871b7b1ad50ee348539b016078ce721c86b8cd5f9d75bd","src/runloop.rs":"7feab3bbb9913c3b40285bc37b920f9fe4d937d1db08d8ae69a2ec9597713598","src/set.rs":"51e978fc81d4c55013dfc6df4e76c58daaf5deedf9aafda0f6a9e04e7575272c","src/string.rs":"27b92e8e5d3fc95a521dc6447ccfefd9eb28ec5f42bd8571defd124d950b133f","src/url.rs":"8a3f4137ca26e379a24666b4caa7a08f9726f838e53e9dbb92ba474036309669"},"package":"41115a6aa5d3e1e5ef98148373f25971d1fad53818553f216495f9e67e90a624"}
|
|
@ -1,12 +0,0 @@
|
|||
[package]
|
||||
name = "core-foundation-sys"
|
||||
description = "Bindings to Core Foundation for OS X"
|
||||
homepage = "https://github.com/servo/core-foundation-rs"
|
||||
repository = "https://github.com/servo/core-foundation-rs"
|
||||
version = "0.3.1"
|
||||
authors = ["The Servo Project Developers"]
|
||||
license = "MIT / Apache-2.0"
|
||||
build = "build.rs"
|
||||
|
||||
[dependencies]
|
||||
libc = "0.2"
|
|
@ -1,12 +0,0 @@
|
|||
// Copyright 2013-2015 The Servo Project Developers. See the COPYRIGHT
|
||||
// file at the top-level directory of this distribution.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
||||
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
fn main() {
|
||||
println!("cargo:rustc-link-lib=framework=CoreFoundation");
|
||||
}
|
|
@ -1,61 +0,0 @@
|
|||
// Copyright 2013-2015 The Servo Project Developers. See the COPYRIGHT
|
||||
// file at the top-level directory of this distribution.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
||||
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
use libc::c_void;
|
||||
|
||||
use base::{CFRange, CFIndex, CFAllocatorRef, CFTypeID};
|
||||
|
||||
/// FIXME(pcwalton): This is wrong.
|
||||
pub type CFArrayRetainCallBack = *const u8;
|
||||
|
||||
/// FIXME(pcwalton): This is wrong.
|
||||
pub type CFArrayReleaseCallBack = *const u8;
|
||||
|
||||
/// FIXME(pcwalton): This is wrong.
|
||||
pub type CFArrayCopyDescriptionCallBack = *const u8;
|
||||
|
||||
/// FIXME(pcwalton): This is wrong.
|
||||
pub type CFArrayEqualCallBack = *const u8;
|
||||
|
||||
#[repr(C)]
|
||||
#[derive(Clone, Copy)]
|
||||
pub struct CFArrayCallBacks {
|
||||
pub version: CFIndex,
|
||||
pub retain: CFArrayRetainCallBack,
|
||||
pub release: CFArrayReleaseCallBack,
|
||||
pub copyDescription: CFArrayCopyDescriptionCallBack,
|
||||
pub equal: CFArrayEqualCallBack,
|
||||
}
|
||||
|
||||
#[repr(C)]
|
||||
pub struct __CFArray(c_void);
|
||||
|
||||
pub type CFArrayRef = *const __CFArray;
|
||||
|
||||
extern {
|
||||
/*
|
||||
* CFArray.h
|
||||
*/
|
||||
pub static kCFTypeArrayCallBacks: CFArrayCallBacks;
|
||||
|
||||
pub fn CFArrayCreate(allocator: CFAllocatorRef, values: *const *const c_void,
|
||||
numValues: CFIndex, callBacks: *const CFArrayCallBacks) -> CFArrayRef;
|
||||
pub fn CFArrayCreateCopy(allocator: CFAllocatorRef , theArray: CFArrayRef) -> CFArrayRef;
|
||||
|
||||
// CFArrayBSearchValues
|
||||
// CFArrayContainsValue
|
||||
pub fn CFArrayGetCount(theArray: CFArrayRef) -> CFIndex;
|
||||
// CFArrayGetCountOfValue
|
||||
// CFArrayGetFirstIndexOfValue
|
||||
// CFArrayGetLastIndexOfValue
|
||||
pub fn CFArrayGetValues(theArray: CFArrayRef, range: CFRange, values: *mut *const c_void);
|
||||
pub fn CFArrayGetValueAtIndex(theArray: CFArrayRef, idx: CFIndex) -> *const c_void;
|
||||
// CFArrayApplyFunction
|
||||
pub fn CFArrayGetTypeID() -> CFTypeID;
|
||||
}
|
|
@ -1,76 +0,0 @@
|
|||
// Copyright 2013-2015 The Servo Project Developers. See the COPYRIGHT
|
||||
// file at the top-level directory of this distribution.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
||||
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
use libc::{c_uint, c_long, c_ulong, c_void, c_int};
|
||||
|
||||
pub type Boolean = u8;
|
||||
pub type CFIndex = c_long;
|
||||
pub type mach_port_t = c_uint;
|
||||
pub type CFAllocatorRef = *const c_void;
|
||||
pub type CFNullRef = *const c_void;
|
||||
pub type CFHashCode = c_ulong;
|
||||
pub type CFTypeID = c_ulong;
|
||||
pub type CFTypeRef = *const c_void;
|
||||
pub type CFOptionFlags = u32;
|
||||
pub type OSStatus = i32;
|
||||
pub type SInt32 = c_int;
|
||||
|
||||
#[repr(C)]
|
||||
#[derive(Clone, Copy)]
|
||||
pub struct CFRange {
|
||||
pub location: CFIndex,
|
||||
pub length: CFIndex
|
||||
}
|
||||
|
||||
// for back-compat
|
||||
impl CFRange {
|
||||
pub fn init(location: CFIndex, length: CFIndex) -> CFRange {
|
||||
CFRange {
|
||||
location: location,
|
||||
length: length,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
extern {
|
||||
/*
|
||||
* CFBase.h
|
||||
*/
|
||||
|
||||
/* CFAllocator Reference */
|
||||
// N.B. Many CFAllocator functions and constants are omitted here.
|
||||
pub static kCFAllocatorDefault: CFAllocatorRef;
|
||||
pub static kCFAllocatorSystemDefault: CFAllocatorRef;
|
||||
pub static kCFAllocatorMalloc: CFAllocatorRef;
|
||||
pub static kCFAllocatorMallocZone: CFAllocatorRef;
|
||||
pub static kCFAllocatorNull: CFAllocatorRef;
|
||||
pub static kCFAllocatorUseContext: CFAllocatorRef;
|
||||
|
||||
/* CFNull Reference */
|
||||
|
||||
pub static kCFNull: CFNullRef;
|
||||
|
||||
/* CFType Reference */
|
||||
|
||||
//fn CFCopyDescription
|
||||
//fn CFCopyTypeIDDescription
|
||||
//fn CFEqual
|
||||
//fn CFGetAllocator
|
||||
pub fn CFEqual(cf1: CFTypeRef, cf2: CFTypeRef) -> Boolean;
|
||||
pub fn CFGetRetainCount(cf: CFTypeRef) -> CFIndex;
|
||||
pub fn CFGetTypeID(cf: CFTypeRef) -> CFTypeID;
|
||||
pub fn CFHash(cf: CFTypeRef) -> CFHashCode;
|
||||
//fn CFMakeCollectable
|
||||
pub fn CFRelease(cf: CFTypeRef);
|
||||
pub fn CFRetain(cf: CFTypeRef) -> CFTypeRef;
|
||||
pub fn CFShow(obj: CFTypeRef);
|
||||
|
||||
/* Base Utilities Reference */
|
||||
// N.B. Some things missing here.
|
||||
}
|
|
@ -1,31 +0,0 @@
|
|||
// Copyright 2013-2015 The Servo Project Developers. See the COPYRIGHT
|
||||
// file at the top-level directory of this distribution.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
||||
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
use libc::c_void;
|
||||
|
||||
use base::CFTypeID;
|
||||
use dictionary::CFDictionaryRef;
|
||||
use string::CFStringRef;
|
||||
|
||||
#[repr(C)]
|
||||
pub struct __CFBundle(c_void);
|
||||
|
||||
pub type CFBundleRef = *const __CFBundle;
|
||||
|
||||
extern {
|
||||
/*
|
||||
* CFBundle.h
|
||||
*/
|
||||
pub fn CFBundleGetBundleWithIdentifier(bundleID: CFStringRef) -> CFBundleRef;
|
||||
pub fn CFBundleGetFunctionPointerForName(bundle: CFBundleRef, function_name: CFStringRef) -> *const c_void;
|
||||
pub fn CFBundleGetMainBundle() -> CFBundleRef;
|
||||
pub fn CFBundleGetInfoDictionary(bundle: CFBundleRef) -> CFDictionaryRef;
|
||||
|
||||
pub fn CFBundleGetTypeID() -> CFTypeID;
|
||||
}
|
|
@ -1,22 +0,0 @@
|
|||
use libc::c_void;
|
||||
|
||||
use base::{CFAllocatorRef, CFTypeID, CFIndex};
|
||||
|
||||
#[repr(C)]
|
||||
pub struct __CFData(c_void);
|
||||
|
||||
pub type CFDataRef = *const __CFData;
|
||||
|
||||
extern {
|
||||
/*
|
||||
* CFData.h
|
||||
*/
|
||||
|
||||
pub fn CFDataCreate(allocator: CFAllocatorRef,
|
||||
bytes: *const u8, length: CFIndex) -> CFDataRef;
|
||||
//fn CFDataFind
|
||||
pub fn CFDataGetBytePtr(theData: CFDataRef) -> *const u8;
|
||||
pub fn CFDataGetLength(theData: CFDataRef) -> CFIndex;
|
||||
|
||||
pub fn CFDataGetTypeID() -> CFTypeID;
|
||||
}
|
|
@ -1,15 +0,0 @@
|
|||
// Copyright 2013-2015 The Servo Project Developers. See the COPYRIGHT
|
||||
// file at the top-level directory of this distribution.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
||||
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
pub type CFTimeInterval = f64;
|
||||
pub type CFAbsoluteTime = CFTimeInterval;
|
||||
|
||||
extern {
|
||||
pub fn CFAbsoluteTimeGetCurrent() -> CFAbsoluteTime;
|
||||
}
|
|
@ -1,79 +0,0 @@
|
|||
// Copyright 2013-2015 The Servo Project Developers. See the COPYRIGHT
|
||||
// file at the top-level directory of this distribution.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
||||
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
use libc::{c_void};
|
||||
|
||||
use base::{CFAllocatorRef, CFIndex, CFTypeID, Boolean};
|
||||
|
||||
pub type CFDictionaryApplierFunction = extern "C" fn (key: *const c_void,
|
||||
value: *const c_void,
|
||||
context: *mut c_void);
|
||||
pub type CFDictionaryCopyDescriptionCallBack = *const u8;
|
||||
pub type CFDictionaryEqualCallBack = *const u8;
|
||||
pub type CFDictionaryHashCallBack = *const u8;
|
||||
pub type CFDictionaryReleaseCallBack = *const u8;
|
||||
pub type CFDictionaryRetainCallBack = *const u8;
|
||||
|
||||
#[allow(dead_code)]
|
||||
#[repr(C)]
|
||||
#[derive(Clone, Copy)]
|
||||
pub struct CFDictionaryKeyCallBacks {
|
||||
pub version: CFIndex,
|
||||
pub retain: CFDictionaryRetainCallBack,
|
||||
pub release: CFDictionaryReleaseCallBack,
|
||||
pub copyDescription: CFDictionaryCopyDescriptionCallBack,
|
||||
pub equal: CFDictionaryEqualCallBack,
|
||||
pub hash: CFDictionaryHashCallBack
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
#[repr(C)]
|
||||
#[derive(Clone, Copy)]
|
||||
pub struct CFDictionaryValueCallBacks {
|
||||
pub version: CFIndex,
|
||||
pub retain: CFDictionaryRetainCallBack,
|
||||
pub release: CFDictionaryReleaseCallBack,
|
||||
pub copyDescription: CFDictionaryCopyDescriptionCallBack,
|
||||
pub equal: CFDictionaryEqualCallBack
|
||||
}
|
||||
|
||||
#[repr(C)]
|
||||
pub struct __CFDictionary(c_void);
|
||||
|
||||
pub type CFDictionaryRef = *const __CFDictionary;
|
||||
pub type CFMutableDictionaryRef = *const __CFDictionary;
|
||||
|
||||
extern {
|
||||
/*
|
||||
* CFDictionary.h
|
||||
*/
|
||||
|
||||
pub static kCFTypeDictionaryKeyCallBacks: CFDictionaryKeyCallBacks;
|
||||
pub static kCFTypeDictionaryValueCallBacks: CFDictionaryValueCallBacks;
|
||||
|
||||
pub fn CFDictionaryContainsKey(theDict: CFDictionaryRef, key: *const c_void) -> Boolean;
|
||||
pub fn CFDictionaryCreate(allocator: CFAllocatorRef, keys: *const *const c_void, values: *const *const c_void,
|
||||
numValues: CFIndex, keyCallBacks: *const CFDictionaryKeyCallBacks,
|
||||
valueCallBacks: *const CFDictionaryValueCallBacks)
|
||||
-> CFDictionaryRef;
|
||||
pub fn CFDictionaryGetCount(theDict: CFDictionaryRef) -> CFIndex;
|
||||
pub fn CFDictionaryGetTypeID() -> CFTypeID;
|
||||
pub fn CFDictionaryGetValueIfPresent(theDict: CFDictionaryRef, key: *const c_void, value: *mut *const c_void)
|
||||
-> Boolean;
|
||||
pub fn CFDictionaryApplyFunction(theDict: CFDictionaryRef,
|
||||
applier: CFDictionaryApplierFunction,
|
||||
context: *mut c_void);
|
||||
pub fn CFDictionarySetValue(theDict: CFMutableDictionaryRef,
|
||||
key: *const c_void,
|
||||
value: *const c_void);
|
||||
pub fn CFDictionaryGetKeysAndValues(theDict: CFDictionaryRef,
|
||||
keys: *mut *const c_void,
|
||||
values: *mut *const c_void);
|
||||
|
||||
}
|
|
@ -1,32 +0,0 @@
|
|||
// Copyright 2016 The Servo Project Developers. See the COPYRIGHT
|
||||
// file at the top-level directory of this distribution.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
||||
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
use libc::c_void;
|
||||
|
||||
use base::{CFTypeID, CFIndex};
|
||||
use string::CFStringRef;
|
||||
|
||||
#[repr(C)]
|
||||
pub struct __CFError(c_void);
|
||||
|
||||
pub type CFErrorRef = *mut __CFError;
|
||||
|
||||
extern "C" {
|
||||
pub fn CFErrorGetTypeID() -> CFTypeID;
|
||||
|
||||
pub static kCFErrorDomainPOSIX: CFStringRef;
|
||||
pub static kCFErrorDomainOSStatus: CFStringRef;
|
||||
pub static kCFErrorDomainMach: CFStringRef;
|
||||
pub static kCFErrorDomainCocoa: CFStringRef;
|
||||
|
||||
pub fn CFErrorGetDomain(err: CFErrorRef) -> CFStringRef;
|
||||
pub fn CFErrorGetCode(err: CFErrorRef) -> CFIndex;
|
||||
|
||||
pub fn CFErrorCopyDescription(err: CFErrorRef) -> CFStringRef;
|
||||
}
|
|
@ -1,26 +0,0 @@
|
|||
// Copyright 2013-2015 The Servo Project Developers. See the COPYRIGHT
|
||||
// file at the top-level directory of this distribution.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
||||
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
#![allow(non_snake_case, non_camel_case_types, non_upper_case_globals, improper_ctypes)]
|
||||
|
||||
extern crate libc;
|
||||
|
||||
pub mod array;
|
||||
pub mod base;
|
||||
pub mod bundle;
|
||||
pub mod data;
|
||||
pub mod date;
|
||||
pub mod dictionary;
|
||||
pub mod error;
|
||||
pub mod messageport;
|
||||
pub mod number;
|
||||
pub mod propertylist;
|
||||
pub mod runloop;
|
||||
pub mod set;
|
||||
pub mod string;
|
||||
pub mod url;
|
|
@ -1,79 +0,0 @@
|
|||
// Copyright 2013-2015 The Servo Project Developers. See the COPYRIGHT
|
||||
// file at the top-level directory of this distribution.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
||||
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
use libc::c_void;
|
||||
|
||||
use base::{CFAllocatorRef, CFIndex, CFTypeID, Boolean};
|
||||
use data::CFDataRef;
|
||||
use date::CFTimeInterval;
|
||||
use runloop::CFRunLoopSourceRef;
|
||||
use string::CFStringRef;
|
||||
|
||||
#[repr(C)]
|
||||
#[derive(Copy, Clone)]
|
||||
#[derive(Debug)]
|
||||
pub struct CFMessagePortContext {
|
||||
pub version: CFIndex,
|
||||
pub info: *mut c_void,
|
||||
pub retain: Option<unsafe extern fn(info: *const c_void) -> *const c_void>,
|
||||
pub release: Option<unsafe extern fn(info: *const c_void)>,
|
||||
pub copyDescription: Option<unsafe extern fn(info: *const c_void)
|
||||
-> CFStringRef>,
|
||||
}
|
||||
|
||||
pub type CFMessagePortCallBack = Option<
|
||||
unsafe extern fn(local: CFMessagePortRef,
|
||||
msgid: i32,
|
||||
data: CFDataRef,
|
||||
info: *mut c_void) -> CFDataRef>;
|
||||
|
||||
pub type CFMessagePortInvalidationCallBack = Option<
|
||||
unsafe extern "C" fn(ms: CFMessagePortRef, info: *mut c_void)>;
|
||||
|
||||
#[repr(C)]
|
||||
pub struct __CFMessagePort(c_void);
|
||||
pub type CFMessagePortRef = *const __CFMessagePort;
|
||||
|
||||
extern {
|
||||
/*
|
||||
* CFMessagePort.h
|
||||
*/
|
||||
pub fn CFMessagePortGetTypeID() -> CFTypeID;
|
||||
pub fn CFMessagePortCreateLocal(allocator: CFAllocatorRef,
|
||||
name: CFStringRef,
|
||||
callout: CFMessagePortCallBack,
|
||||
context: *const CFMessagePortContext,
|
||||
shouldFreeInfo: *mut Boolean)
|
||||
-> CFMessagePortRef;
|
||||
pub fn CFMessagePortCreateRemote(allocator: CFAllocatorRef,
|
||||
name: CFStringRef) -> CFMessagePortRef;
|
||||
pub fn CFMessagePortIsRemote(ms: CFMessagePortRef) -> Boolean;
|
||||
pub fn CFMessagePortGetName(ms: CFMessagePortRef) -> CFStringRef;
|
||||
pub fn CFMessagePortSetName(ms: CFMessagePortRef, newName: CFStringRef)
|
||||
-> Boolean;
|
||||
pub fn CFMessagePortGetContext(ms: CFMessagePortRef,
|
||||
context: *mut CFMessagePortContext);
|
||||
pub fn CFMessagePortInvalidate(ms: CFMessagePortRef);
|
||||
pub fn CFMessagePortIsValid(ms: CFMessagePortRef) -> Boolean;
|
||||
pub fn CFMessagePortGetInvalidationCallBack(ms: CFMessagePortRef)
|
||||
-> CFMessagePortInvalidationCallBack;
|
||||
pub fn CFMessagePortSetInvalidationCallBack(ms: CFMessagePortRef,
|
||||
callout: CFMessagePortInvalidationCallBack);
|
||||
pub fn CFMessagePortSendRequest(remote: CFMessagePortRef, msgid: i32,
|
||||
data: CFDataRef,
|
||||
sendTimeout: CFTimeInterval,
|
||||
rcvTimeout: CFTimeInterval,
|
||||
replyMode: CFStringRef,
|
||||
returnData: *mut CFDataRef) -> i32;
|
||||
pub fn CFMessagePortCreateRunLoopSource(allocator: CFAllocatorRef,
|
||||
local: CFMessagePortRef,
|
||||
order: CFIndex)
|
||||
-> CFRunLoopSourceRef;
|
||||
// CFMessagePortSetDispatchQueue
|
||||
}
|
|
@ -1,59 +0,0 @@
|
|||
// Copyright 2013-2015 The Servo Project Developers. See the COPYRIGHT
|
||||
// file at the top-level directory of this distribution.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
||||
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
use libc::c_void;
|
||||
|
||||
use base::{CFAllocatorRef, CFTypeID};
|
||||
|
||||
#[repr(C)]
|
||||
pub struct __CFBoolean(c_void);
|
||||
|
||||
pub type CFBooleanRef = *const __CFBoolean;
|
||||
|
||||
pub type CFNumberType = u32;
|
||||
|
||||
// members of enum CFNumberType
|
||||
// static kCFNumberSInt8Type: CFNumberType = 1;
|
||||
// static kCFNumberSInt16Type: CFNumberType = 2;
|
||||
pub static kCFNumberSInt32Type: CFNumberType = 3;
|
||||
pub static kCFNumberSInt64Type: CFNumberType = 4;
|
||||
// static kCFNumberFloat32Type: CFNumberType = 5;
|
||||
pub static kCFNumberFloat64Type: CFNumberType = 6;
|
||||
// static kCFNumberCharType: CFNumberType = 7;
|
||||
// static kCFNumberShortType: CFNumberType = 8;
|
||||
// static kCFNumberIntType: CFNumberType = 9;
|
||||
// static kCFNumberLongType: CFNumberType = 10;
|
||||
// static kCFNumberLongLongType: CFNumberType = 11;
|
||||
// static kCFNumberFloatType: CFNumberType = 12;
|
||||
// static kCFNumberDoubleType: CFNumberType = 13;
|
||||
// static kCFNumberCFIndexType: CFNumberType = 14;
|
||||
// static kCFNumberNSIntegerType: CFNumberType = 15;
|
||||
// static kCFNumberCGFloatType: CFNumberType = 16;
|
||||
// static kCFNumberMaxType: CFNumberType = 16;
|
||||
|
||||
#[repr(C)]
|
||||
pub struct __CFNumber;
|
||||
|
||||
pub type CFNumberRef = *const __CFNumber;
|
||||
|
||||
extern {
|
||||
/*
|
||||
* CFNumber.h
|
||||
*/
|
||||
pub static kCFBooleanTrue: CFBooleanRef;
|
||||
pub static kCFBooleanFalse: CFBooleanRef;
|
||||
|
||||
pub fn CFBooleanGetTypeID() -> CFTypeID;
|
||||
pub fn CFNumberCreate(allocator: CFAllocatorRef, theType: CFNumberType, valuePtr: *const c_void)
|
||||
-> CFNumberRef;
|
||||
//fn CFNumberGetByteSize
|
||||
pub fn CFNumberGetValue(number: CFNumberRef, theType: CFNumberType, valuePtr: *mut c_void) -> bool;
|
||||
//fn CFNumberCompare
|
||||
pub fn CFNumberGetTypeID() -> CFTypeID;
|
||||
}
|
|
@ -1,37 +0,0 @@
|
|||
use base::{CFAllocatorRef, CFIndex, CFOptionFlags, CFTypeRef};
|
||||
use data::CFDataRef;
|
||||
use error::CFErrorRef;
|
||||
|
||||
pub type CFPropertyListRef = CFTypeRef;
|
||||
|
||||
pub type CFPropertyListFormat = CFIndex;
|
||||
pub const kCFPropertyListOpenStepFormat: CFPropertyListFormat = 1;
|
||||
pub const kCFPropertyListXMLFormat_v1_0: CFPropertyListFormat = 100;
|
||||
pub const kCFPropertyListBinaryFormat_v1_0: CFPropertyListFormat = 200;
|
||||
|
||||
pub type CFPropertyListMutabilityOptions = CFOptionFlags;
|
||||
pub const kCFPropertyListImmutable: CFPropertyListMutabilityOptions = 0;
|
||||
pub const kCFPropertyListMutableContainers: CFPropertyListMutabilityOptions = 1;
|
||||
pub const kCFPropertyListMutableContainersAndLeaves: CFPropertyListMutabilityOptions = 2;
|
||||
|
||||
extern "C" {
|
||||
// CFPropertyList.h
|
||||
//
|
||||
|
||||
// fn CFPropertyListCreateDeepCopy
|
||||
// fn CFPropertyListIsValid
|
||||
pub fn CFPropertyListCreateWithData(allocator: CFAllocatorRef,
|
||||
data: CFDataRef,
|
||||
options: CFPropertyListMutabilityOptions,
|
||||
format: *mut CFPropertyListFormat,
|
||||
error: *mut CFErrorRef)
|
||||
-> CFPropertyListRef;
|
||||
// fn CFPropertyListCreateWithStream
|
||||
// fn CFPropertyListWrite
|
||||
pub fn CFPropertyListCreateData(allocator: CFAllocatorRef,
|
||||
propertyList: CFPropertyListRef,
|
||||
format: CFPropertyListFormat,
|
||||
options: CFOptionFlags,
|
||||
error: *mut CFErrorRef)
|
||||
-> CFDataRef;
|
||||
}
|
|
@ -1,164 +0,0 @@
|
|||
// Copyright 2013-2015 The Servo Project Developers. See the COPYRIGHT
|
||||
// file at the top-level directory of this distribution.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
||||
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
use libc::c_void;
|
||||
|
||||
use array::CFArrayRef;
|
||||
use base::{Boolean, CFIndex, CFTypeID, CFAllocatorRef, CFOptionFlags, CFHashCode, mach_port_t};
|
||||
use date::{CFAbsoluteTime, CFTimeInterval};
|
||||
use string::CFStringRef;
|
||||
|
||||
#[repr(C)]
|
||||
pub struct __CFRunLoop(c_void);
|
||||
|
||||
pub type CFRunLoopRef = *const __CFRunLoop;
|
||||
|
||||
#[repr(C)]
|
||||
pub struct __CFRunLoopSource(c_void);
|
||||
|
||||
pub type CFRunLoopSourceRef = *const __CFRunLoopSource;
|
||||
|
||||
#[repr(C)]
|
||||
pub struct __CFRunLoopObserver(c_void);
|
||||
|
||||
pub type CFRunLoopObserverRef = *const __CFRunLoopObserver;
|
||||
|
||||
// Reasons for CFRunLoopRunInMode() to Return
|
||||
pub const kCFRunLoopRunFinished: i32 = 1;
|
||||
pub const kCFRunLoopRunStopped: i32 = 2;
|
||||
pub const kCFRunLoopRunTimedOut: i32 = 3;
|
||||
pub const kCFRunLoopRunHandledSource: i32 = 4;
|
||||
|
||||
// Run Loop Observer Activities
|
||||
//typedef CF_OPTIONS(CFOptionFlags, CFRunLoopActivity) {
|
||||
pub type CFRunLoopActivity = CFOptionFlags;
|
||||
pub const kCFRunLoopEntry: CFOptionFlags = 1 << 0;
|
||||
pub const kCFRunLoopBeforeTimers: CFOptionFlags = 1 << 1;
|
||||
pub const kCFRunLoopBeforeSources: CFOptionFlags = 1 << 2;
|
||||
pub const kCFRunLoopBeforeWaiting: CFOptionFlags = 1 << 5;
|
||||
pub const kCFRunLoopAfterWaiting: CFOptionFlags = 1 << 6;
|
||||
pub const kCFRunLoopExit: CFOptionFlags = 1 << 7;
|
||||
pub const kCFRunLoopAllActivities: CFOptionFlags = 0x0FFFFFFF;
|
||||
|
||||
#[repr(C)]
|
||||
pub struct CFRunLoopSourceContext {
|
||||
pub version: CFIndex,
|
||||
pub info: *mut c_void,
|
||||
pub retain: extern "C" fn (info: *const c_void) -> *const c_void,
|
||||
pub release: extern "C" fn (info: *const c_void),
|
||||
pub copyDescription: extern "C" fn (info: *const c_void) -> CFStringRef,
|
||||
pub equal: extern "C" fn (info1: *const c_void, info2: *const c_void) -> Boolean,
|
||||
pub hash: extern "C" fn (info: *const c_void) -> CFHashCode,
|
||||
pub schedule: extern "C" fn (info: *const c_void, rl: CFRunLoopRef, mode: CFStringRef),
|
||||
pub cancel: extern "C" fn (info: *const c_void, rl: CFRunLoopRef, mode: CFStringRef),
|
||||
pub perform: extern "C" fn (info: *const c_void),
|
||||
}
|
||||
|
||||
#[repr(C)]
|
||||
pub struct CFRunLoopSourceContext1 {
|
||||
pub version: CFIndex,
|
||||
pub info: *mut c_void,
|
||||
pub retain: extern "C" fn (info: *const c_void) -> *const c_void,
|
||||
pub release: extern "C" fn (info: *const c_void),
|
||||
pub copyDescription: extern "C" fn (info: *const c_void) -> CFStringRef,
|
||||
pub equal: extern "C" fn (info1: *const c_void, info2: *const c_void) -> Boolean,
|
||||
pub hash: extern "C" fn (info: *const c_void) -> CFHashCode,
|
||||
// note that the following two fields are platform dependent in the C header, the ones here are for OS X
|
||||
pub getPort: extern "C" fn (info: *mut c_void) -> mach_port_t,
|
||||
pub perform: extern "C" fn (msg: *mut c_void, size: CFIndex, allocator: CFAllocatorRef, info: *mut c_void) -> *mut c_void,
|
||||
}
|
||||
|
||||
#[repr(C)]
|
||||
pub struct CFRunLoopObserverContext {
|
||||
pub version: CFIndex,
|
||||
pub info: *mut c_void,
|
||||
pub retain: extern "C" fn (info: *const c_void) -> *const c_void,
|
||||
pub release: extern "C" fn (info: *const c_void),
|
||||
pub copyDescription: extern "C" fn (info: *const c_void) -> CFStringRef,
|
||||
}
|
||||
|
||||
pub type CFRunLoopObserverCallBack = extern "C" fn (observer: CFRunLoopObserverRef, activity: CFRunLoopActivity, info: *mut c_void);
|
||||
|
||||
#[repr(C)]
|
||||
pub struct CFRunLoopTimerContext {
|
||||
pub version: CFIndex,
|
||||
pub info: *mut c_void,
|
||||
pub retain: extern "C" fn (info: *const c_void) -> *const c_void,
|
||||
pub release: extern "C" fn (info: *const c_void),
|
||||
pub copyDescription: extern "C" fn (info: *const c_void) -> CFStringRef,
|
||||
}
|
||||
|
||||
pub type CFRunLoopTimerCallBack = extern "C" fn (timer: CFRunLoopTimerRef, info: *mut c_void);
|
||||
|
||||
#[repr(C)]
|
||||
pub struct __CFRunLoopTimer;
|
||||
|
||||
pub type CFRunLoopTimerRef = *const __CFRunLoopTimer;
|
||||
|
||||
extern {
|
||||
/*
|
||||
* CFRunLoop.h
|
||||
*/
|
||||
pub static kCFRunLoopDefaultMode: CFStringRef;
|
||||
pub static kCFRunLoopCommonModes: CFStringRef;
|
||||
pub fn CFRunLoopGetTypeID() -> CFTypeID;
|
||||
pub fn CFRunLoopGetCurrent() -> CFRunLoopRef;
|
||||
pub fn CFRunLoopGetMain() -> CFRunLoopRef;
|
||||
pub fn CFRunLoopCopyCurrentMode(rl: CFRunLoopRef) -> CFStringRef;
|
||||
pub fn CFRunLoopCopyAllModes(rl: CFRunLoopRef) -> CFArrayRef;
|
||||
pub fn CFRunLoopAddCommonMode(rl: CFRunLoopRef, mode: CFStringRef);
|
||||
pub fn CFRunLoopGetNextTimerFireDate(rl: CFRunLoopRef, mode: CFStringRef) -> CFAbsoluteTime;
|
||||
pub fn CFRunLoopRun();
|
||||
pub fn CFRunLoopRunInMode(mode: CFStringRef, seconds: CFTimeInterval, returnAfterSourceHandled: Boolean) -> i32;
|
||||
pub fn CFRunLoopIsWaiting(rl: CFRunLoopRef) -> Boolean;
|
||||
pub fn CFRunLoopWakeUp(rl: CFRunLoopRef);
|
||||
pub fn CFRunLoopStop(rl: CFRunLoopRef);
|
||||
// fn CFRunLoopPerformBlock(rl: CFRunLoopRef, mode: CFTypeRef, block: void (^)(void));
|
||||
pub fn CFRunLoopContainsSource(rl: CFRunLoopRef, source: CFRunLoopSourceRef, mode: CFStringRef) -> Boolean;
|
||||
pub fn CFRunLoopAddSource(rl: CFRunLoopRef, source: CFRunLoopSourceRef, mode: CFStringRef);
|
||||
pub fn CFRunLoopRemoveSource(rl: CFRunLoopRef, source: CFRunLoopSourceRef, mode: CFStringRef);
|
||||
pub fn CFRunLoopContainsObserver(rl: CFRunLoopRef, observer: CFRunLoopObserverRef, mode: CFStringRef) -> Boolean;
|
||||
pub fn CFRunLoopAddObserver(rl: CFRunLoopRef, observer: CFRunLoopObserverRef, mode: CFStringRef);
|
||||
pub fn CFRunLoopRemoveObserver(rl: CFRunLoopRef, observer: CFRunLoopObserverRef, mode: CFStringRef);
|
||||
pub fn CFRunLoopContainsTimer(rl: CFRunLoopRef, timer: CFRunLoopTimerRef, mode: CFStringRef) -> Boolean;
|
||||
pub fn CFRunLoopAddTimer(rl: CFRunLoopRef, timer: CFRunLoopTimerRef, mode: CFStringRef);
|
||||
pub fn CFRunLoopRemoveTimer(rl: CFRunLoopRef, timer: CFRunLoopTimerRef, mode: CFStringRef);
|
||||
|
||||
pub fn CFRunLoopSourceGetTypeID() -> CFTypeID;
|
||||
pub fn CFRunLoopSourceCreate(allocator: CFAllocatorRef, order: CFIndex, context: *mut CFRunLoopSourceContext) -> CFRunLoopSourceRef;
|
||||
pub fn CFRunLoopSourceGetOrder(source: CFRunLoopSourceRef) -> CFIndex;
|
||||
pub fn CFRunLoopSourceInvalidate(source: CFRunLoopSourceRef);
|
||||
pub fn CFRunLoopSourceIsValid(source: CFRunLoopSourceRef) -> Boolean;
|
||||
pub fn CFRunLoopSourceGetContext(source: CFRunLoopSourceRef, context: *mut CFRunLoopSourceContext);
|
||||
pub fn CFRunLoopSourceSignal(source: CFRunLoopSourceRef);
|
||||
|
||||
pub fn CFRunLoopObserverGetTypeID() -> CFTypeID;
|
||||
pub fn CFRunLoopObserverCreate(allocator: CFAllocatorRef, activities: CFOptionFlags, repeats: Boolean, order: CFIndex, callout: CFRunLoopObserverCallBack, context: *mut CFRunLoopObserverContext) -> CFRunLoopObserverRef;
|
||||
// fn CFRunLoopObserverCreateWithHandler(allocator: CFAllocatorRef, activities: CFOptionFlags, repeats: Boolean, order: CFIndex, block: void (^) (CFRunLoopObserverRef observer, CFRunLoopActivity activity)) -> CFRunLoopObserverRef;
|
||||
pub fn CFRunLoopObserverGetActivities(observer: CFRunLoopObserverRef) -> CFOptionFlags;
|
||||
pub fn CFRunLoopObserverDoesRepeat(observer: CFRunLoopObserverRef) -> Boolean;
|
||||
pub fn CFRunLoopObserverGetOrder(observer: CFRunLoopObserverRef) -> CFIndex;
|
||||
pub fn CFRunLoopObserverInvalidate(observer: CFRunLoopObserverRef);
|
||||
pub fn CFRunLoopObserverIsValid(observer: CFRunLoopObserverRef) -> Boolean;
|
||||
pub fn CFRunLoopObserverGetContext(observer: CFRunLoopObserverRef, context: *mut CFRunLoopObserverContext);
|
||||
|
||||
pub fn CFRunLoopTimerGetTypeID() -> CFTypeID;
|
||||
pub fn CFRunLoopTimerCreate(allocator: CFAllocatorRef, fireDate: CFAbsoluteTime, interval: CFTimeInterval, flags: CFOptionFlags, order: CFIndex, callout: CFRunLoopTimerCallBack, context: *mut CFRunLoopTimerContext) -> CFRunLoopTimerRef;
|
||||
// fn CFRunLoopTimerCreateWithHandler(allocator: CFAllocatorRef, fireDate: CFAbsoluteTime, interval: CFTimeInterval, flags: CFOptionFlags, order: CFIndex, block: void (^) (CFRunLoopTimerRef timer)) -> CFRunLoopTimerRef;
|
||||
pub fn CFRunLoopTimerGetNextFireDate(timer: CFRunLoopTimerRef) -> CFAbsoluteTime;
|
||||
pub fn CFRunLoopTimerSetNextFireDate(timer: CFRunLoopTimerRef, fireDate: CFAbsoluteTime);
|
||||
pub fn CFRunLoopTimerGetInterval(timer: CFRunLoopTimerRef) -> CFTimeInterval;
|
||||
pub fn CFRunLoopTimerDoesRepeat(timer: CFRunLoopTimerRef) -> Boolean;
|
||||
pub fn CFRunLoopTimerGetOrder(timer: CFRunLoopTimerRef) -> CFIndex;
|
||||
pub fn CFRunLoopTimerInvalidate(timer: CFRunLoopTimerRef);
|
||||
pub fn CFRunLoopTimerIsValid(timer: CFRunLoopTimerRef) -> Boolean;
|
||||
pub fn CFRunLoopTimerGetContext(timer: CFRunLoopTimerRef, context: *mut CFRunLoopTimerContext);
|
||||
pub fn CFRunLoopTimerGetTolerance(timer: CFRunLoopTimerRef) -> CFTimeInterval;
|
||||
pub fn CFRunLoopTimerSetTolerance(timer: CFRunLoopTimerRef, tolerance: CFTimeInterval);
|
||||
}
|
|
@ -1,52 +0,0 @@
|
|||
// Copyright 2013-2015 The Servo Project Developers. See the COPYRIGHT
|
||||
// file at the top-level directory of this distribution.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
||||
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
use libc::c_void;
|
||||
|
||||
use base::{CFAllocatorRef, CFIndex, CFTypeID};
|
||||
|
||||
pub type CFSetRetainCallBack = *const u8;
|
||||
pub type CFSetReleaseCallBack = *const u8;
|
||||
pub type CFSetCopyDescriptionCallBack = *const u8;
|
||||
pub type CFSetEqualCallBack = *const u8;
|
||||
pub type CFSetHashCallBack = *const u8;
|
||||
|
||||
#[repr(C)]
|
||||
#[derive(Clone, Copy)]
|
||||
pub struct CFSetCallBacks {
|
||||
pub version: CFIndex,
|
||||
pub retain: CFSetRetainCallBack,
|
||||
pub release: CFSetReleaseCallBack,
|
||||
pub copyDescription: CFSetCopyDescriptionCallBack,
|
||||
pub equal: CFSetEqualCallBack,
|
||||
pub hash: CFSetHashCallBack,
|
||||
}
|
||||
|
||||
#[repr(C)]
|
||||
pub struct __CFSet(c_void);
|
||||
|
||||
pub type CFSetRef = *const __CFSet;
|
||||
|
||||
extern {
|
||||
/*
|
||||
* CFSet.h
|
||||
*/
|
||||
|
||||
pub static kCFTypeSetCallBacks: CFSetCallBacks;
|
||||
|
||||
/* Creating Sets */
|
||||
pub fn CFSetCreate(allocator: CFAllocatorRef, values: *const *const c_void, numValues: CFIndex,
|
||||
callBacks: *const CFSetCallBacks) -> CFSetRef;
|
||||
|
||||
/* Applying a Function to Set Members */
|
||||
//fn CFSetApplyFunction
|
||||
|
||||
pub fn CFSetGetTypeID() -> CFTypeID;
|
||||
}
|
||||
|
|
@ -1,320 +0,0 @@
|
|||
// Copyright 2013-2015 The Servo Project Developers. See the COPYRIGHT
|
||||
// file at the top-level directory of this distribution.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
||||
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
use libc::{c_char, c_ushort, c_void};
|
||||
|
||||
use base::{Boolean, CFOptionFlags, CFIndex, CFAllocatorRef, CFRange, CFTypeID};
|
||||
|
||||
pub type UniChar = c_ushort;
|
||||
|
||||
// CFString.h
|
||||
|
||||
pub type CFStringCompareFlags = CFOptionFlags;
|
||||
//static kCFCompareCaseInsensitive: CFStringCompareFlags = 1;
|
||||
//static kCFCompareBackwards: CFStringCompareFlags = 4;
|
||||
//static kCFCompareAnchored: CFStringCompareFlags = 8;
|
||||
//static kCFCompareNonliteral: CFStringCompareFlags = 16;
|
||||
//static kCFCompareLocalized: CFStringCompareFlags = 32;
|
||||
//static kCFCompareNumerically: CFStringCompareFlags = 64;
|
||||
//static kCFCompareDiacriticInsensitive: CFStringCompareFlags = 128;
|
||||
//static kCFCompareWidthInsensitive: CFStringCompareFlags = 256;
|
||||
//static kCFCompareForcedOrdering: CFStringCompareFlags = 512;
|
||||
|
||||
pub type CFStringEncoding = u32;
|
||||
|
||||
// OS X built-in encodings.
|
||||
|
||||
//static kCFStringEncodingMacRoman: CFStringEncoding = 0;
|
||||
//static kCFStringEncodingWindowsLatin1: CFStringEncoding = 0x0500;
|
||||
//static kCFStringEncodingISOLatin1: CFStringEncoding = 0x0201;
|
||||
//static kCFStringEncodingNextStepLatin: CFStringEncoding = 0x0B01;
|
||||
//static kCFStringEncodingASCII: CFStringEncoding = 0x0600;
|
||||
//static kCFStringEncodingUnicode: CFStringEncoding = 0x0100;
|
||||
pub static kCFStringEncodingUTF8: CFStringEncoding = 0x08000100;
|
||||
//static kCFStringEncodingNonLossyASCII: CFStringEncoding = 0x0BFF;
|
||||
|
||||
//static kCFStringEncodingUTF16: CFStringEncoding = 0x0100;
|
||||
//static kCFStringEncodingUTF16BE: CFStringEncoding = 0x10000100;
|
||||
//static kCFStringEncodingUTF16LE: CFStringEncoding = 0x14000100;
|
||||
//static kCFStringEncodingUTF32: CFStringEncoding = 0x0c000100;
|
||||
//static kCFStringEncodingUTF32BE: CFStringEncoding = 0x18000100;
|
||||
//static kCFStringEncodingUTF32LE: CFStringEncoding = 0x1c000100;
|
||||
|
||||
|
||||
// CFStringEncodingExt.h
|
||||
|
||||
pub type CFStringEncodings = CFIndex;
|
||||
|
||||
// External encodings, except those defined above.
|
||||
// Defined above: kCFStringEncodingMacRoman = 0
|
||||
//static kCFStringEncodingMacJapanese: CFStringEncoding = 1;
|
||||
//static kCFStringEncodingMacChineseTrad: CFStringEncoding = 2;
|
||||
//static kCFStringEncodingMacKorean: CFStringEncoding = 3;
|
||||
//static kCFStringEncodingMacArabic: CFStringEncoding = 4;
|
||||
//static kCFStringEncodingMacHebrew: CFStringEncoding = 5;
|
||||
//static kCFStringEncodingMacGreek: CFStringEncoding = 6;
|
||||
//static kCFStringEncodingMacCyrillic: CFStringEncoding = 7;
|
||||
//static kCFStringEncodingMacDevanagari: CFStringEncoding = 9;
|
||||
//static kCFStringEncodingMacGurmukhi: CFStringEncoding = 10;
|
||||
//static kCFStringEncodingMacGujarati: CFStringEncoding = 11;
|
||||
//static kCFStringEncodingMacOriya: CFStringEncoding = 12;
|
||||
//static kCFStringEncodingMacBengali: CFStringEncoding = 13;
|
||||
//static kCFStringEncodingMacTamil: CFStringEncoding = 14;
|
||||
//static kCFStringEncodingMacTelugu: CFStringEncoding = 15;
|
||||
//static kCFStringEncodingMacKannada: CFStringEncoding = 16;
|
||||
//static kCFStringEncodingMacMalayalam: CFStringEncoding = 17;
|
||||
//static kCFStringEncodingMacSinhalese: CFStringEncoding = 18;
|
||||
//static kCFStringEncodingMacBurmese: CFStringEncoding = 19;
|
||||
//static kCFStringEncodingMacKhmer: CFStringEncoding = 20;
|
||||
//static kCFStringEncodingMacThai: CFStringEncoding = 21;
|
||||
//static kCFStringEncodingMacLaotian: CFStringEncoding = 22;
|
||||
//static kCFStringEncodingMacGeorgian: CFStringEncoding = 23;
|
||||
//static kCFStringEncodingMacArmenian: CFStringEncoding = 24;
|
||||
//static kCFStringEncodingMacChineseSimp: CFStringEncoding = 25;
|
||||
//static kCFStringEncodingMacTibetan: CFStringEncoding = 26;
|
||||
//static kCFStringEncodingMacMongolian: CFStringEncoding = 27;
|
||||
//static kCFStringEncodingMacEthiopic: CFStringEncoding = 28;
|
||||
//static kCFStringEncodingMacCentralEurRoman: CFStringEncoding = 29;
|
||||
//static kCFStringEncodingMacVietnamese: CFStringEncoding = 30;
|
||||
//static kCFStringEncodingMacExtArabic: CFStringEncoding = 31;
|
||||
//static kCFStringEncodingMacSymbol: CFStringEncoding = 33;
|
||||
//static kCFStringEncodingMacDingbats: CFStringEncoding = 34;
|
||||
//static kCFStringEncodingMacTurkish: CFStringEncoding = 35;
|
||||
//static kCFStringEncodingMacCroatian: CFStringEncoding = 36;
|
||||
//static kCFStringEncodingMacIcelandic: CFStringEncoding = 37;
|
||||
//static kCFStringEncodingMacRomanian: CFStringEncoding = 38;
|
||||
//static kCFStringEncodingMacCeltic: CFStringEncoding = 39;
|
||||
//static kCFStringEncodingMacGaelic: CFStringEncoding = 40;
|
||||
//static kCFStringEncodingMacFarsi: CFStringEncoding = 0x8C;
|
||||
//static kCFStringEncodingMacUkrainian: CFStringEncoding = 0x98;
|
||||
//static kCFStringEncodingMacInuit: CFStringEncoding = 0xEC;
|
||||
//static kCFStringEncodingMacVT100: CFStringEncoding = 0xFC;
|
||||
//static kCFStringEncodingMacHFS: CFStringEncoding = 0xFF;
|
||||
// Defined above: kCFStringEncodingISOLatin1 = 0x0201
|
||||
//static kCFStringEncodingISOLatin2: CFStringEncoding = 0x0202;
|
||||
//static kCFStringEncodingISOLatin3: CFStringEncoding = 0x0203;
|
||||
//static kCFStringEncodingISOLatin4: CFStringEncoding = 0x0204;
|
||||
//static kCFStringEncodingISOLatinCyrillic: CFStringEncoding = 0x0205;
|
||||
//static kCFStringEncodingISOLatinArabic: CFStringEncoding = 0x0206;
|
||||
//static kCFStringEncodingISOLatinGreek: CFStringEncoding = 0x0207;
|
||||
//static kCFStringEncodingISOLatinHebrew: CFStringEncoding = 0x0208;
|
||||
//static kCFStringEncodingISOLatin5: CFStringEncoding = 0x0209;
|
||||
//static kCFStringEncodingISOLatin6: CFStringEncoding = 0x020A;
|
||||
//static kCFStringEncodingISOLatinThai: CFStringEncoding = 0x020B;
|
||||
//static kCFStringEncodingISOLatin7: CFStringEncoding = 0x020D;
|
||||
//static kCFStringEncodingISOLatin8: CFStringEncoding = 0x020E;
|
||||
//static kCFStringEncodingISOLatin9: CFStringEncoding = 0x020F;
|
||||
//static kCFStringEncodingISOLatin10: CFStringEncoding = 0x0210;
|
||||
//static kCFStringEncodingDOSLatinUS: CFStringEncoding = 0x0400;
|
||||
//static kCFStringEncodingDOSGreek: CFStringEncoding = 0x0405;
|
||||
//static kCFStringEncodingDOSBalticRim: CFStringEncoding = 0x0406;
|
||||
//static kCFStringEncodingDOSLatin1: CFStringEncoding = 0x0410;
|
||||
//static kCFStringEncodingDOSGreek1: CFStringEncoding = 0x0411;
|
||||
//static kCFStringEncodingDOSLatin2: CFStringEncoding = 0x0412;
|
||||
//static kCFStringEncodingDOSCyrillic: CFStringEncoding = 0x0413;
|
||||
//static kCFStringEncodingDOSTurkish: CFStringEncoding = 0x0414;
|
||||
//static kCFStringEncodingDOSPortuguese: CFStringEncoding = 0x0415;
|
||||
//static kCFStringEncodingDOSIcelandic: CFStringEncoding = 0x0416;
|
||||
//static kCFStringEncodingDOSHebrew: CFStringEncoding = 0x0417;
|
||||
//static kCFStringEncodingDOSCanadianFrench: CFStringEncoding = 0x0418;
|
||||
//static kCFStringEncodingDOSArabic: CFStringEncoding = 0x0419;
|
||||
//static kCFStringEncodingDOSNordic: CFStringEncoding = 0x041A;
|
||||
//static kCFStringEncodingDOSRussian: CFStringEncoding = 0x041B;
|
||||
//static kCFStringEncodingDOSGreek2: CFStringEncoding = 0x041C;
|
||||
//static kCFStringEncodingDOSThai: CFStringEncoding = 0x041D;
|
||||
//static kCFStringEncodingDOSJapanese: CFStringEncoding = 0x0420;
|
||||
//static kCFStringEncodingDOSChineseSimplif: CFStringEncoding = 0x0421;
|
||||
//static kCFStringEncodingDOSKorean: CFStringEncoding = 0x0422;
|
||||
//static kCFStringEncodingDOSChineseTrad: CFStringEncoding = 0x0423;
|
||||
// Defined above: kCFStringEncodingWindowsLatin1 = 0x0500
|
||||
//static kCFStringEncodingWindowsLatin2: CFStringEncoding = 0x0501;
|
||||
//static kCFStringEncodingWindowsCyrillic: CFStringEncoding = 0x0502;
|
||||
//static kCFStringEncodingWindowsGreek: CFStringEncoding = 0x0503;
|
||||
//static kCFStringEncodingWindowsLatin5: CFStringEncoding = 0x0504;
|
||||
//static kCFStringEncodingWindowsHebrew: CFStringEncoding = 0x0505;
|
||||
//static kCFStringEncodingWindowsArabic: CFStringEncoding = 0x0506;
|
||||
//static kCFStringEncodingWindowsBalticRim: CFStringEncoding = 0x0507;
|
||||
//static kCFStringEncodingWindowsVietnamese: CFStringEncoding = 0x0508;
|
||||
//static kCFStringEncodingWindowsKoreanJohab: CFStringEncoding = 0x0510;
|
||||
// Defined above: kCFStringEncodingASCII = 0x0600
|
||||
//static kCFStringEncodingANSEL: CFStringEncoding = 0x0601;
|
||||
//static kCFStringEncodingJIS_X0201_76: CFStringEncoding = 0x0620;
|
||||
//static kCFStringEncodingJIS_X0208_83: CFStringEncoding = 0x0621;
|
||||
//static kCFStringEncodingJIS_X0208_90: CFStringEncoding = 0x0622;
|
||||
//static kCFStringEncodingJIS_X0212_90: CFStringEncoding = 0x0623;
|
||||
//static kCFStringEncodingJIS_C6226_78: CFStringEncoding = 0x0624;
|
||||
//static kCFStringEncodingShiftJIS_X0213: CFStringEncoding = 0x0628;
|
||||
//static kCFStringEncodingShiftJIS_X0213_MenKuTen: CFStringEncoding = 0x0629;
|
||||
//static kCFStringEncodingGB_2312_80: CFStringEncoding = 0x0630;
|
||||
//static kCFStringEncodingGBK_95: CFStringEncoding = 0x0631;
|
||||
//static kCFStringEncodingGB_18030_2000: CFStringEncoding = 0x0632;
|
||||
//static kCFStringEncodingKSC_5601_87: CFStringEncoding = 0x0640;
|
||||
//static kCFStringEncodingKSC_5601_92_Johab: CFStringEncoding = 0x0641;
|
||||
//static kCFStringEncodingCNS_11643_92_P1: CFStringEncoding = 0x0651;
|
||||
//static kCFStringEncodingCNS_11643_92_P2: CFStringEncoding = 0x0652;
|
||||
//static kCFStringEncodingCNS_11643_92_P3: CFStringEncoding = 0x0653;
|
||||
//static kCFStringEncodingISO_2022_JP: CFStringEncoding = 0x0820;
|
||||
//static kCFStringEncodingISO_2022_JP_2: CFStringEncoding = 0x0821;
|
||||
//static kCFStringEncodingISO_2022_JP_1: CFStringEncoding = 0x0822;
|
||||
//static kCFStringEncodingISO_2022_JP_3: CFStringEncoding = 0x0823;
|
||||
//static kCFStringEncodingISO_2022_CN: CFStringEncoding = 0x0830;
|
||||
//static kCFStringEncodingISO_2022_CN_EXT: CFStringEncoding = 0x0831;
|
||||
//static kCFStringEncodingISO_2022_KR: CFStringEncoding = 0x0840;
|
||||
//static kCFStringEncodingEUC_JP: CFStringEncoding = 0x0920;
|
||||
//static kCFStringEncodingEUC_CN: CFStringEncoding = 0x0930;
|
||||
//static kCFStringEncodingEUC_TW: CFStringEncoding = 0x0931;
|
||||
//static kCFStringEncodingEUC_KR: CFStringEncoding = 0x0940;
|
||||
//static kCFStringEncodingShiftJIS: CFStringEncoding = 0x0A01;
|
||||
//static kCFStringEncodingKOI8_R: CFStringEncoding = 0x0A02;
|
||||
//static kCFStringEncodingBig5: CFStringEncoding = 0x0A03;
|
||||
//static kCFStringEncodingMacRomanLatin1: CFStringEncoding = 0x0A04;
|
||||
//static kCFStringEncodingHZ_GB_2312: CFStringEncoding = 0x0A05;
|
||||
//static kCFStringEncodingBig5_HKSCS_1999: CFStringEncoding = 0x0A06;
|
||||
//static kCFStringEncodingVISCII: CFStringEncoding = 0x0A07;
|
||||
//static kCFStringEncodingKOI8_U: CFStringEncoding = 0x0A08;
|
||||
//static kCFStringEncodingBig5_E: CFStringEncoding = 0x0A09;
|
||||
// Defined above: kCFStringEncodingNextStepLatin = 0x0B01
|
||||
//static kCFStringEncodingNextStepJapanese: CFStringEncoding = 0x0B02;
|
||||
//static kCFStringEncodingEBCDIC_US: CFStringEncoding = 0x0C01;
|
||||
//static kCFStringEncodingEBCDIC_CP037: CFStringEncoding = 0x0C02;
|
||||
//static kCFStringEncodingUTF7: CFStringEncoding = 0x04000100;
|
||||
//static kCFStringEncodingUTF7_IMAP: CFStringEncoding = 0x0A10;
|
||||
//static kCFStringEncodingShiftJIS_X0213_00: CFStringEncoding = 0x0628; /* Deprecated */
|
||||
|
||||
#[repr(C)]
|
||||
pub struct __CFString(c_void);
|
||||
|
||||
pub type CFStringRef = *const __CFString;
|
||||
|
||||
extern {
|
||||
/*
|
||||
* CFString.h
|
||||
*/
|
||||
|
||||
// N.B. organized according to "Functions by task" in docs
|
||||
|
||||
/* Creating a CFString */
|
||||
//fn CFSTR
|
||||
//fn CFStringCreateArrayBySeparatingStrings
|
||||
//fn CFStringCreateByCombiningStrings
|
||||
//fn CFStringCreateCopy
|
||||
//fn CFStringCreateFromExternalRepresentation
|
||||
pub fn CFStringCreateWithBytes(alloc: CFAllocatorRef,
|
||||
bytes: *const u8,
|
||||
numBytes: CFIndex,
|
||||
encoding: CFStringEncoding,
|
||||
isExternalRepresentation: Boolean,
|
||||
contentsDeallocator: CFAllocatorRef)
|
||||
-> CFStringRef;
|
||||
pub fn CFStringCreateWithBytesNoCopy(alloc: CFAllocatorRef,
|
||||
bytes: *const u8,
|
||||
numBytes: CFIndex,
|
||||
encoding: CFStringEncoding,
|
||||
isExternalRepresentation: Boolean,
|
||||
contentsDeallocator: CFAllocatorRef)
|
||||
-> CFStringRef;
|
||||
//fn CFStringCreateWithCharacters
|
||||
//fn CFStringCreateWithCharactersNoCopy
|
||||
pub fn CFStringCreateWithCString(alloc: CFAllocatorRef,
|
||||
cStr: *const c_char,
|
||||
encoding: CFStringEncoding)
|
||||
-> CFStringRef;
|
||||
//fn CFStringCreateWithCStringNoCopy
|
||||
//fn CFStringCreateWithFormat
|
||||
//fn CFStringCreateWithFormatAndArguments
|
||||
//fn CFStringCreateWithPascalString
|
||||
//fn CFStringCreateWithPascalStringNoCopy
|
||||
//fn CFStringCreateWithSubstring
|
||||
|
||||
/* Searching Strings */
|
||||
//fn CFStringCreateArrayWithFindResults
|
||||
//fn CFStringFind
|
||||
//fn CFStringFindCharacterFromSet
|
||||
//fn CFStringFindWithOptions
|
||||
//fn CFStringFindWithOptionsAndLocale
|
||||
//fn CFStringGetLineBounds
|
||||
|
||||
/* Comparing Strings */
|
||||
//fn CFStringCompare
|
||||
//fn CFStringCompareWithOptions
|
||||
//fn CFStringCompareWithOptionsAndLocale
|
||||
//fn CFStringHasPrefix
|
||||
//fn CFStringHasSuffix
|
||||
|
||||
/* Accessing Characters */
|
||||
//fn CFStringCreateExternalRepresentation
|
||||
pub fn CFStringGetBytes(theString: CFStringRef,
|
||||
range: CFRange,
|
||||
encoding: CFStringEncoding,
|
||||
lossByte: u8,
|
||||
isExternalRepresentation: Boolean,
|
||||
buffer: *mut u8,
|
||||
maxBufLen: CFIndex,
|
||||
usedBufLen: *mut CFIndex)
|
||||
-> CFIndex;
|
||||
//fn CFStringGetCharacterAtIndex
|
||||
//fn CFStringGetCharacters
|
||||
//fn CFStringGetCharactersPtr
|
||||
//fn CFStringGetCharacterFromInlineBuffer
|
||||
pub fn CFStringGetCString(theString: CFStringRef,
|
||||
buffer: *mut c_char,
|
||||
bufferSize: CFIndex,
|
||||
encoding: CFStringEncoding)
|
||||
-> Boolean;
|
||||
pub fn CFStringGetCStringPtr(theString: CFStringRef,
|
||||
encoding: CFStringEncoding)
|
||||
-> *const c_char;
|
||||
pub fn CFStringGetLength(theString: CFStringRef) -> CFIndex;
|
||||
//fn CFStringGetPascalString
|
||||
//fn CFStringGetPascalStringPtr
|
||||
//fn CFStringGetRangeOfComposedCharactersAtIndex
|
||||
//fn CFStringInitInlineBuffer
|
||||
|
||||
/* Working With Hyphenation */
|
||||
//fn CFStringGetHyphenationLocationBeforeIndex
|
||||
//fn CFStringIsHyphenationAvailableForLocale
|
||||
|
||||
/* Working With Encodings */
|
||||
//fn CFStringConvertEncodingToIANACharSetName
|
||||
//fn CFStringConvertEncodingToNSStringEncoding
|
||||
//fn CFStringConvertEncodingToWindowsCodepage
|
||||
//fn CFStringConvertIANACharSetNameToEncoding
|
||||
//fn CFStringConvertNSStringEncodingToEncoding
|
||||
//fn CFStringConvertWindowsCodepageToEncoding
|
||||
//fn CFStringGetFastestEncoding
|
||||
//fn CFStringGetListOfAvailableEncodings
|
||||
//fn CFStringGetMaximumSizeForEncoding
|
||||
//fn CFStringGetMostCompatibleMacStringEncoding
|
||||
//fn CFStringGetNameOfEncoding
|
||||
//fn CFStringGetSmallestEncoding
|
||||
//fn CFStringGetSystemEncoding
|
||||
//fn CFStringIsEncodingAvailable
|
||||
|
||||
/* Getting Numeric Values */
|
||||
//fn CFStringGetDoubleValue
|
||||
//fn CFStringGetIntValue
|
||||
|
||||
/* Getting String Properties */
|
||||
//fn CFShowStr
|
||||
pub fn CFStringGetTypeID() -> CFTypeID;
|
||||
|
||||
/* String File System Representations */
|
||||
//fn CFStringCreateWithFileSystemRepresentation
|
||||
//fn CFStringGetFileSystemRepresentation
|
||||
//fn CFStringGetMaximumSizeOfFileSystemRepresentation
|
||||
|
||||
/* Getting Paragraph Bounds */
|
||||
//fn CFStringGetParagraphBounds
|
||||
|
||||
/* Managing Surrogates */
|
||||
//fn CFStringGetLongCharacterForSurrogatePair
|
||||
//fn CFStringGetSurrogatePairForLongCharacter
|
||||
//fn CFStringIsSurrogateHighCharacter
|
||||
//fn CFStringIsSurrogateLowCharacter
|
||||
}
|
|
@ -1,156 +0,0 @@
|
|||
// Copyright 2013-2015 The Servo Project Developers. See the COPYRIGHT
|
||||
// file at the top-level directory of this distribution.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
||||
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
use libc::c_void;
|
||||
|
||||
use base::{CFOptionFlags, CFIndex, CFAllocatorRef, Boolean, CFTypeID, SInt32};
|
||||
use string::CFStringRef;
|
||||
|
||||
#[repr(C)]
|
||||
pub struct __CFURL(c_void);
|
||||
|
||||
pub type CFURLRef = *const __CFURL;
|
||||
|
||||
pub type CFURLBookmarkCreationOptions = CFOptionFlags;
|
||||
|
||||
pub type CFURLPathStyle = CFIndex;
|
||||
|
||||
/* typedef CF_ENUM(CFIndex, CFURLPathStyle) */
|
||||
pub const kCFURLPOSIXPathStyle: CFURLPathStyle = 0;
|
||||
pub const kCFURLHFSPathStyle: CFURLPathStyle = 1;
|
||||
pub const kCFURLWindowsPathStyle: CFURLPathStyle = 2;
|
||||
|
||||
// static kCFURLBookmarkCreationPreferFileIDResolutionMask: CFURLBookmarkCreationOptions =
|
||||
// (1 << 8) as u32;
|
||||
// static kCFURLBookmarkCreationMinimalBookmarkMask: CFURLBookmarkCreationOptions =
|
||||
// (1 << 9) as u32;
|
||||
// static kCFURLBookmarkCreationSuitableForBookmarkFile: CFURLBookmarkCreationOptions =
|
||||
// (1 << 10) as u32;
|
||||
// static kCFURLBookmarkCreationWithSecurityScope: CFURLBookmarkCreationOptions =
|
||||
// (1 << 11) as u32;
|
||||
// static kCFURLBookmarkCreationSecurityScopeAllowOnlyReadAccess: CFURLBookmarkCreationOptions =
|
||||
// (1 << 12) as u32;
|
||||
|
||||
// TODO: there are a lot of missing keys and constants. Add if you are bored or need them.
|
||||
|
||||
extern {
|
||||
/*
|
||||
* CFURL.h
|
||||
*/
|
||||
|
||||
/* Common File System Resource Keys */
|
||||
// static kCFURLAttributeModificationDateKey: CFStringRef;
|
||||
// static kCFURLContentAccessDateKey: CFStringRef;
|
||||
// static kCFURLContentModificationDateKey: CFStringRef;
|
||||
// static kCFURLCreationDateKey: CFStringRef;
|
||||
// static kCFURLCustomIconKey: CFStringRef;
|
||||
// static kCFURLEffectiveIconKey: CFStringRef;
|
||||
// static kCFURLFileResourceIdentifierKey: CFStringRef;
|
||||
// static kCFURLFileSecurityKey: CFStringRef;
|
||||
// static kCFURLHasHiddenExtensionKey: CFStringRef;
|
||||
// static kCFURLIsDirectoryKey: CFStringRef;
|
||||
// static kCFURLIsExecutableKey: CFStringRef;
|
||||
// static kCFURLIsHiddenKey: CFStringRef;
|
||||
// static kCFURLIsPackageKey: CFStringRef;
|
||||
// static kCFURLIsReadableKey: CFStringRef;
|
||||
// static kCFURLIsRegularFileKey: CFStringRef;
|
||||
// static kCFURLIsSymbolicLinkKey: CFStringRef;
|
||||
// static kCFURLIsSystemImmutableKey: CFStringRef;
|
||||
// static kCFURLIsUserImmutableKey: CFStringRef;
|
||||
// static kCFURLIsVolumeKey: CFStringRef;
|
||||
// static kCFURLIsWritableKey: CFStringRef;
|
||||
// static kCFURLLabelColorKey: CFStringRef;
|
||||
// static kCFURLLabelNumberKey: CFStringRef;
|
||||
// static kCFURLLinkCountKey: CFStringRef;
|
||||
// static kCFURLLocalizedLabelKey: CFStringRef;
|
||||
// static kCFURLLocalizedNameKey: CFStringRef;
|
||||
// static kCFURLLocalizedTypeDescriptionKey: CFStringRef;
|
||||
// static kCFURLNameKey: CFStringRef;
|
||||
// static kCFURLParentDirectoryURLKey: CFStringRef;
|
||||
// static kCFURLPreferredIOBlockSizeKey: CFStringRef;
|
||||
// static kCFURLTypeIdentifierKey: CFStringRef;
|
||||
// static kCFURLVolumeIdentifierKey: CFStringRef;
|
||||
// static kCFURLVolumeURLKey: CFStringRef;
|
||||
// static kCFURLIsExcludedFromBackupKey: CFStringRef;
|
||||
// static kCFURLFileResourceTypeKey: CFStringRef;
|
||||
|
||||
/* Creating a CFURL */
|
||||
//fn CFURLCopyAbsoluteURL
|
||||
//fn CFURLCreateAbsoluteURLWithBytes
|
||||
//fn CFURLCreateByResolvingBookmarkData
|
||||
//fn CFURLCreateCopyAppendingPathComponent
|
||||
//fn CFURLCreateCopyAppendingPathExtension
|
||||
//fn CFURLCreateCopyDeletingLastPathComponent
|
||||
//fn CFURLCreateCopyDeletingPathExtension
|
||||
//fn CFURLCreateFilePathURL
|
||||
//fn CFURLCreateFileReferenceURL
|
||||
//fn CFURLCreateFromFileSystemRepresentation
|
||||
//fn CFURLCreateFromFileSystemRepresentationRelativeToBase
|
||||
//fn CFURLCreateFromFSRef
|
||||
//fn CFURLCreateWithBytes
|
||||
//fn CFURLCreateWithFileSystemPath
|
||||
pub fn CFURLCreateWithFileSystemPath(allocator: CFAllocatorRef, filePath: CFStringRef, pathStyle: CFURLPathStyle, isDirectory: Boolean) -> CFURLRef;
|
||||
//fn CFURLCreateWithFileSystemPathRelativeToBase
|
||||
//fn CFURLCreateWithString(allocator: CFAllocatorRef, urlString: CFStringRef,
|
||||
// baseURL: CFURLRef) -> CFURLRef;
|
||||
|
||||
/* Accessing the Parts of a URL */
|
||||
pub fn CFURLCanBeDecomposed(anURL: CFURLRef) -> Boolean;
|
||||
pub fn CFURLCopyFileSystemPath(anURL: CFURLRef, pathStyle: CFURLPathStyle) -> CFStringRef;
|
||||
pub fn CFURLCopyFragment(anURL: CFURLRef, charactersToLeaveEscaped: CFStringRef) -> CFStringRef;
|
||||
pub fn CFURLCopyHostName(anURL: CFURLRef) -> CFStringRef;
|
||||
pub fn CFURLCopyLastPathComponent(anURL: CFURLRef) -> CFStringRef;
|
||||
pub fn CFURLCopyNetLocation(anURL: CFURLRef) -> CFStringRef;
|
||||
pub fn CFURLCopyParameterString(anURL: CFURLRef, charactersToLeaveEscaped: CFStringRef) -> CFStringRef;
|
||||
pub fn CFURLCopyPassword(anURL: CFURLRef) -> CFStringRef;
|
||||
pub fn CFURLCopyPath(anURL: CFURLRef) -> CFStringRef;
|
||||
pub fn CFURLCopyPathExtension(anURL: CFURLRef) -> CFStringRef;
|
||||
pub fn CFURLCopyQueryString(anURL: CFURLRef, charactersToLeaveEscaped: CFStringRef) -> CFStringRef;
|
||||
pub fn CFURLCopyResourceSpecifier(anURL: CFURLRef) -> CFStringRef;
|
||||
pub fn CFURLCopyScheme(anURL: CFURLRef) -> CFStringRef;
|
||||
pub fn CFURLCopyStrictPath(anURL: CFURLRef, isAbsolute: *mut Boolean) -> CFStringRef;
|
||||
pub fn CFURLCopyUserName(anURL: CFURLRef) -> CFStringRef;
|
||||
pub fn CFURLGetPortNumber(anURL: CFURLRef) -> SInt32;
|
||||
pub fn CFURLHasDirectoryPath(anURL: CFURLRef) -> Boolean;
|
||||
|
||||
/* Converting URLs to Other Representations */
|
||||
//fn CFURLCreateData(allocator: CFAllocatorRef, url: CFURLRef,
|
||||
// encoding: CFStringEncoding, escapeWhitespace: bool) -> CFDataRef;
|
||||
//fn CFURLCreateStringByAddingPercentEscapes
|
||||
//fn CFURLCreateStringByReplacingPercentEscapes
|
||||
//fn CFURLCreateStringByReplacingPercentEscapesUsingEncoding
|
||||
//fn CFURLGetFileSystemRepresentation
|
||||
//fn CFURLGetFSRef
|
||||
pub fn CFURLGetString(anURL: CFURLRef) -> CFStringRef;
|
||||
|
||||
/* Getting URL Properties */
|
||||
//fn CFURLGetBaseURL(anURL: CFURLRef) -> CFURLRef;
|
||||
//fn CFURLGetBytes
|
||||
//fn CFURLGetByteRangeForComponent
|
||||
pub fn CFURLGetTypeID() -> CFTypeID;
|
||||
//fn CFURLResourceIsReachable
|
||||
|
||||
/* Getting and Setting File System Resource Properties */
|
||||
//fn CFURLClearResourcePropertyCache
|
||||
//fn CFURLClearResourcePropertyCacheForKey
|
||||
//fn CFURLCopyResourcePropertiesForKeys
|
||||
//fn CFURLCopyResourcePropertyForKey
|
||||
//fn CFURLCreateResourcePropertiesForKeysFromBookmarkData
|
||||
//fn CFURLCreateResourcePropertyForKeyFromBookmarkData
|
||||
//fn CFURLSetResourcePropertiesForKeys
|
||||
//fn CFURLSetResourcePropertyForKey
|
||||
//fn CFURLSetTemporaryResourcePropertyForKey
|
||||
|
||||
/* Working with Bookmark Data */
|
||||
//fn CFURLCreateBookmarkData
|
||||
//fn CFURLCreateBookmarkDataFromAliasRecord
|
||||
//fn CFURLCreateBookmarkDataFromFile
|
||||
//fn CFURLWriteBookmarkDataToFile
|
||||
//fn CFURLStartAccessingSecurityScopedResource
|
||||
//fn CFURLStopAccessingSecurityScopedResource
|
||||
}
|
|
@ -177,8 +177,10 @@ class SyncedBookmarksMirror {
|
|||
}
|
||||
}
|
||||
await db.execute(`PRAGMA foreign_keys = ON`);
|
||||
await migrateMirrorSchema(db);
|
||||
await initializeTempMirrorEntities(db);
|
||||
await db.executeTransaction(async function() {
|
||||
await migrateMirrorSchema(db);
|
||||
await initializeTempMirrorEntities(db);
|
||||
});
|
||||
} catch (ex) {
|
||||
options.recordTelemetryEvent("mirror", "open", "error",
|
||||
{ why: "initialize" });
|
||||
|
@ -1494,7 +1496,7 @@ class SyncedBookmarksMirror {
|
|||
// tracked "weakly": if the upload is interrupted or fails, we won't
|
||||
// reupload the record on the next sync.
|
||||
await this.db.execute(`
|
||||
INSERT INTO itemsToWeaklyReupload(id)
|
||||
INSERT OR IGNORE INTO itemsToWeaklyReupload(id)
|
||||
SELECT b.id FROM moz_bookmarks b
|
||||
JOIN mergeStates r ON r.mergedGuid = b.guid
|
||||
JOIN items v ON v.guid = r.mergedGuid
|
||||
|
@ -1626,6 +1628,23 @@ class SyncedBookmarksMirror {
|
|||
*/
|
||||
async fetchLocalChangeRecords() {
|
||||
let changeRecords = {};
|
||||
let childRecordIdsByLocalParentId = new Map();
|
||||
|
||||
let childGuidRows = await this.db.execute(`
|
||||
SELECT parentId, guid FROM structureToUpload
|
||||
ORDER BY parentId, position`);
|
||||
|
||||
for (let row of childGuidRows) {
|
||||
let localParentId = row.getResultByName("parentId");
|
||||
let childRecordId = PlacesSyncUtils.bookmarks.guidToRecordId(
|
||||
row.getResultByName("guid"));
|
||||
if (childRecordIdsByLocalParentId.has(localParentId)) {
|
||||
let childRecordIds = childRecordIdsByLocalParentId.get(localParentId);
|
||||
childRecordIds.push(childRecordId);
|
||||
} else {
|
||||
childRecordIdsByLocalParentId.set(localParentId, [childRecordId]);
|
||||
}
|
||||
}
|
||||
|
||||
let itemRows = await this.db.execute(`
|
||||
SELECT id, syncChangeCounter, guid, isDeleted, type, isQuery,
|
||||
|
@ -1763,15 +1782,9 @@ class SyncedBookmarksMirror {
|
|||
if (description) {
|
||||
folderCleartext.description = description;
|
||||
}
|
||||
let childGuidRows = await this.db.executeCached(`
|
||||
SELECT guid FROM structureToUpload
|
||||
WHERE parentId = :id
|
||||
ORDER BY position`,
|
||||
{ id: row.getResultByName("id") });
|
||||
folderCleartext.children = childGuidRows.map(row => {
|
||||
let childGuid = row.getResultByName("guid");
|
||||
return PlacesSyncUtils.bookmarks.guidToRecordId(childGuid);
|
||||
});
|
||||
let localId = row.getResultByName("id");
|
||||
let childRecordIds = childRecordIdsByLocalParentId.get(localId);
|
||||
folderCleartext.children = childRecordIds || [];
|
||||
changeRecords[recordId] = new BookmarkChangeRecord(
|
||||
syncChangeCounter, folderCleartext);
|
||||
continue;
|
||||
|
@ -1854,17 +1867,15 @@ function isDatabaseCorrupt(error) {
|
|||
* @param {Sqlite.OpenedConnection} db
|
||||
* The mirror database connection.
|
||||
*/
|
||||
function migrateMirrorSchema(db) {
|
||||
return db.executeTransaction(async function() {
|
||||
let currentSchemaVersion = await db.getSchemaVersion("mirror");
|
||||
if (currentSchemaVersion < 1) {
|
||||
await initializeMirrorDatabase(db);
|
||||
}
|
||||
// Downgrading from a newer profile to an older profile rolls back the
|
||||
// schema version, but leaves all new columns in place. We'll run the
|
||||
// migration logic again on the next upgrade.
|
||||
await db.setSchemaVersion(MIRROR_SCHEMA_VERSION, "mirror");
|
||||
});
|
||||
async function migrateMirrorSchema(db) {
|
||||
let currentSchemaVersion = await db.getSchemaVersion("mirror");
|
||||
if (currentSchemaVersion < 1) {
|
||||
await initializeMirrorDatabase(db);
|
||||
}
|
||||
// Downgrading from a newer profile to an older profile rolls back the
|
||||
// schema version, but leaves all new columns in place. We'll run the
|
||||
// migration logic again on the next upgrade.
|
||||
await db.setSchemaVersion(MIRROR_SCHEMA_VERSION, "mirror");
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -587,13 +587,6 @@ add_task(async function test_nonexistent_on_one_side() {
|
|||
let buf = await openMirror("nonexistent_on_one_side");
|
||||
|
||||
info("Set up empty mirror");
|
||||
// Previous tests change the menu's date added time; reset it to a predictable
|
||||
// value.
|
||||
let menuDateAdded = new Date();
|
||||
await PlacesUtils.bookmarks.update({
|
||||
guid: PlacesUtils.bookmarks.menuGuid,
|
||||
dateAdded: menuDateAdded,
|
||||
});
|
||||
await PlacesTestUtils.markBookmarksAsSynced();
|
||||
|
||||
// A doesn't exist in the mirror.
|
||||
|
@ -622,6 +615,9 @@ add_task(async function test_nonexistent_on_one_side() {
|
|||
deepEqual(await buf.fetchUnmergedGuids(), ["bookmarkBBBB"],
|
||||
"Should leave B unmerged");
|
||||
|
||||
let menuInfo = await PlacesUtils.bookmarks.fetch(
|
||||
PlacesUtils.bookmarks.menuGuid);
|
||||
|
||||
// We should still upload a record for the menu, since we changed its
|
||||
// children when we added then removed A.
|
||||
deepEqual(changesToUpload, {
|
||||
|
@ -635,7 +631,7 @@ add_task(async function test_nonexistent_on_one_side() {
|
|||
parentid: "places",
|
||||
hasDupe: true,
|
||||
parentName: "",
|
||||
dateAdded: menuDateAdded.getTime(),
|
||||
dateAdded: menuInfo.dateAdded.getTime(),
|
||||
title: BookmarksMenuTitle,
|
||||
children: [],
|
||||
},
|
||||
|
|
|
@ -37,3 +37,73 @@ add_task(async function test_explicit_weakupload() {
|
|||
await PlacesUtils.bookmarks.eraseEverything();
|
||||
await PlacesSyncUtils.bookmarks.reset();
|
||||
});
|
||||
|
||||
add_task(async function test_explicit_weakupload_with_dateAdded() {
|
||||
let buf = await openMirror("explicit_weakupload_with_dateAdded");
|
||||
|
||||
info("Set up mirror");
|
||||
let dateAdded = new Date();
|
||||
await PlacesUtils.bookmarks.insertTree({
|
||||
guid: PlacesUtils.bookmarks.menuGuid,
|
||||
children: [{
|
||||
guid: "mozBmk______",
|
||||
url: "https://mozilla.org",
|
||||
title: "Mozilla",
|
||||
dateAdded,
|
||||
}],
|
||||
});
|
||||
await buf.store(shuffle([{
|
||||
id: "menu",
|
||||
type: "folder",
|
||||
children: ["mozBmk______"],
|
||||
}, {
|
||||
id: "mozBmk______",
|
||||
type: "bookmark",
|
||||
title: "Mozilla",
|
||||
bmkUri: "https://mozilla.org",
|
||||
dateAdded: dateAdded.getTime(),
|
||||
}]), { needsMerge: false });
|
||||
await PlacesTestUtils.markBookmarksAsSynced();
|
||||
|
||||
info("Make remote change with older date added");
|
||||
await buf.store([{
|
||||
id: "mozBmk______",
|
||||
type: "bookmark",
|
||||
title: "Firefox",
|
||||
bmkUri: "http://getfirefox.com/",
|
||||
dateAdded: dateAdded.getTime() + 5000,
|
||||
}]);
|
||||
|
||||
info("Explicitly request changed item for weak upload");
|
||||
let changesToUpload = await buf.apply({
|
||||
weakUpload: ["mozBmk______"]
|
||||
});
|
||||
deepEqual(changesToUpload, {
|
||||
mozBmk______: {
|
||||
tombstone: false,
|
||||
counter: 0,
|
||||
synced: false,
|
||||
cleartext: {
|
||||
id: "mozBmk______",
|
||||
type: "bookmark",
|
||||
title: "Firefox",
|
||||
bmkUri: "http://getfirefox.com/",
|
||||
parentid: "menu",
|
||||
hasDupe: true,
|
||||
parentName: "menu",
|
||||
dateAdded: dateAdded.getTime(),
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
let localInfo = await PlacesUtils.bookmarks.fetch("mozBmk______");
|
||||
equal(localInfo.title, "Firefox", "Should take new title from mirror");
|
||||
equal(localInfo.url.href, "http://getfirefox.com/",
|
||||
"Should take new URL from mirror");
|
||||
equal(localInfo.dateAdded.getTime(), dateAdded.getTime(),
|
||||
"Should keep older local date added");
|
||||
|
||||
await buf.finalize();
|
||||
await PlacesUtils.bookmarks.eraseEverything();
|
||||
await PlacesSyncUtils.bookmarks.reset();
|
||||
});
|
||||
|
|
|
@ -1325,6 +1325,18 @@ var AddonManagerInternal = {
|
|||
});
|
||||
},
|
||||
|
||||
// Returns true if System Addons should be updated
|
||||
systemUpdateEnabled() {
|
||||
if (!Services.prefs.getBoolPref(PREF_APP_UPDATE_ENABLED) ||
|
||||
!Services.prefs.getBoolPref(PREF_APP_UPDATE_AUTO)) {
|
||||
return false;
|
||||
}
|
||||
if (Services.policies && !Services.policies.isAllowed("SysAddonUpdate")) {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
},
|
||||
|
||||
/**
|
||||
* Performs a background update check by starting an update for all add-ons
|
||||
* that can be updated.
|
||||
|
@ -1337,9 +1349,6 @@ var AddonManagerInternal = {
|
|||
Cr.NS_ERROR_NOT_INITIALIZED);
|
||||
|
||||
let buPromise = (async () => {
|
||||
let appUpdateEnabled = Services.prefs.getBoolPref(PREF_APP_UPDATE_ENABLED) &&
|
||||
Services.prefs.getBoolPref(PREF_APP_UPDATE_AUTO);
|
||||
|
||||
logger.debug("Background update check beginning");
|
||||
|
||||
Services.obs.notifyObservers(null, "addons-background-update-start");
|
||||
|
@ -1386,7 +1395,7 @@ var AddonManagerInternal = {
|
|||
await Promise.all(updates);
|
||||
}
|
||||
|
||||
if (appUpdateEnabled) {
|
||||
if (AddonManagerInternal.systemUpdateEnabled()) {
|
||||
try {
|
||||
await AddonManagerInternal._getProviderByName("XPIProvider").updateSystemAddons();
|
||||
} catch (e) {
|
||||
|
@ -3004,14 +3013,6 @@ var AddonManagerPrivate = {
|
|||
},
|
||||
|
||||
backgroundUpdateTimerHandler() {
|
||||
// Don't call through to the real update check if no checks are enabled.
|
||||
let appUpdateEnabled = Services.prefs.getBoolPref(PREF_APP_UPDATE_ENABLED) &&
|
||||
Services.prefs.getBoolPref(PREF_APP_UPDATE_AUTO);
|
||||
|
||||
if (!AddonManagerInternal.updateEnabled && !appUpdateEnabled) {
|
||||
logger.info("Skipping background update check");
|
||||
return;
|
||||
}
|
||||
// Don't return the promise here, since the caller doesn't care.
|
||||
AddonManagerInternal.backgroundUpdateCheck();
|
||||
},
|
||||
|
|
|
@ -1286,6 +1286,7 @@ var AddonTestUtils = {
|
|||
async overrideBuiltIns(data) {
|
||||
// We need to set this in order load the URL preloader service, which
|
||||
// is only possible when running in automation.
|
||||
let prevPrefVal = Services.prefs.getBoolPref(PREF_DISABLE_SECURITY, false);
|
||||
Services.prefs.setBoolPref(PREF_DISABLE_SECURITY, true);
|
||||
aomStartup.initializeURLPreloader();
|
||||
|
||||
|
@ -1297,7 +1298,7 @@ var AddonTestUtils = {
|
|||
["override", "chrome://browser/content/built_in_addons.json",
|
||||
Services.io.newFileURI(file).spec],
|
||||
]);
|
||||
Services.prefs.setBoolPref(PREF_DISABLE_SECURITY, false);
|
||||
Services.prefs.setBoolPref(PREF_DISABLE_SECURITY, prevPrefVal);
|
||||
}
|
||||
};
|
||||
|
||||
|
|
|
@ -0,0 +1,78 @@
|
|||
/* Any copyright is dedicated to the Public Domain.
|
||||
* http://creativecommons.org/publicdomain/zero/1.0/ */
|
||||
"use strict";
|
||||
|
||||
// This test verifies that system addon updates are correctly blocked by the
|
||||
// DisableSysAddonUpdate enterprise policy.
|
||||
|
||||
ChromeUtils.import("resource://testing-common/httpd.js");
|
||||
ChromeUtils.import("resource://testing-common/EnterprisePolicyTesting.jsm");
|
||||
|
||||
// Setting PREF_DISABLE_SECURITY tells the policy engine that we are in testing
|
||||
// mode and enables restarting the policy engine without restarting the browser.
|
||||
Services.prefs.setBoolPref(PREF_DISABLE_SECURITY, true);
|
||||
Services.prefs.setBoolPref("browser.policies.enabled", true);
|
||||
registerCleanupFunction(() => {
|
||||
Services.prefs.clearUserPref(PREF_DISABLE_SECURITY);
|
||||
Services.prefs.clearUserPref("browser.policies.enabled");
|
||||
});
|
||||
|
||||
Services.policies; // Load policy engine
|
||||
|
||||
BootstrapMonitor.init();
|
||||
|
||||
createAppInfo("xpcshell@tests.mozilla.org", "XPCShell", "2");
|
||||
|
||||
var testserver = new HttpServer();
|
||||
testserver.registerDirectory("/data/", do_get_file("data/system_addons"));
|
||||
testserver.start();
|
||||
var root = testserver.identity.primaryScheme + "://" +
|
||||
testserver.identity.primaryHost + ":" +
|
||||
testserver.identity.primaryPort + "/data/";
|
||||
Services.prefs.setCharPref(PREF_SYSTEM_ADDON_UPDATE_URL, root + "update.xml");
|
||||
|
||||
let distroDir = FileUtils.getDir("ProfD", ["sysfeatures", "empty"], true);
|
||||
registerDirectory("XREAppFeat", distroDir);
|
||||
initSystemAddonDirs();
|
||||
|
||||
/**
|
||||
* Defines the set of initial conditions to run the test against.
|
||||
*
|
||||
* setup: A task to setup the profile into the initial state.
|
||||
* initialState: The initial expected system add-on state after setup has run.
|
||||
*
|
||||
* These conditions run tests with no updated or default system add-ons
|
||||
* initially installed
|
||||
*/
|
||||
const TEST_CONDITIONS = {
|
||||
setup() {
|
||||
clearSystemAddonUpdatesDir();
|
||||
distroDir.leafName = "empty";
|
||||
},
|
||||
initialState: [
|
||||
{ isUpgrade: false, version: null},
|
||||
{ isUpgrade: false, version: null},
|
||||
{ isUpgrade: false, version: null},
|
||||
{ isUpgrade: false, version: null},
|
||||
{ isUpgrade: false, version: null}
|
||||
],
|
||||
};
|
||||
|
||||
add_task(async function test_update_disabled_by_policy() {
|
||||
await setupSystemAddonConditions(TEST_CONDITIONS, distroDir);
|
||||
|
||||
await EnterprisePolicyTesting.setupPolicyEngineWithJson({
|
||||
"policies": {
|
||||
"DisableSysAddonUpdate": true
|
||||
}
|
||||
});
|
||||
|
||||
await updateAllSystemAddons(await buildSystemAddonUpdates([
|
||||
{ id: "system2@tests.mozilla.org", version: "2.0", path: "system2_2.xpi" },
|
||||
{ id: "system3@tests.mozilla.org", version: "2.0", path: "system3_2.xpi" }
|
||||
], root), testserver);
|
||||
|
||||
await verifySystemAddonState(TEST_CONDITIONS.initialState, undefined, false, distroDir);
|
||||
|
||||
await promiseShutdownManager();
|
||||
});
|
|
@ -35,6 +35,7 @@ fail-if = os == 'win' && ccov
|
|||
[test_system_update_custom.js]
|
||||
[test_system_update_empty.js]
|
||||
skip-if = true # Failing intermittently due to a race condition in the test, see bug 1348981
|
||||
[test_system_update_enterprisepolicy.js]
|
||||
[test_system_update_fail.js]
|
||||
[test_system_update_newset.js]
|
||||
[test_system_update_overlapping.js]
|
||||
|
|
|
@ -78,8 +78,8 @@ public:
|
|||
::GeckoStart(aEnv, argv, argc, aAppData);
|
||||
}
|
||||
|
||||
virtual void XRE_SetAndroidChildFds(JNIEnv* aEnv, int aCrashFd, int aIPCFd, int aCrashAnnotationFd) override {
|
||||
::XRE_SetAndroidChildFds(aEnv, aCrashFd, aIPCFd, aCrashAnnotationFd);
|
||||
virtual void XRE_SetAndroidChildFds(JNIEnv* aEnv, int aIPCFd, int aCrashFd, int aCrashAnnotationFd) override {
|
||||
::XRE_SetAndroidChildFds(aEnv, aIPCFd, aCrashFd, aCrashAnnotationFd);
|
||||
}
|
||||
#endif
|
||||
|
||||
|
|
|
@ -113,7 +113,7 @@ public:
|
|||
#ifdef MOZ_WIDGET_ANDROID
|
||||
virtual void GeckoStart(JNIEnv* aEnv, char** argv, int argc, const StaticXREAppData& aAppData) = 0;
|
||||
|
||||
virtual void XRE_SetAndroidChildFds(JNIEnv* aEnv, int aCrashFd, int aIPCFd, int aCrashAnnotationFd) = 0;
|
||||
virtual void XRE_SetAndroidChildFds(JNIEnv* aEnv, int aIPCFd, int aCrashFd, int aCrashAnnotationFd) = 0;
|
||||
#endif
|
||||
|
||||
#ifdef LIBFUZZER
|
||||
|
|
|
@ -243,12 +243,12 @@ GeckoProcessType sChildProcessType = GeckoProcessType_Default;
|
|||
|
||||
#if defined(MOZ_WIDGET_ANDROID)
|
||||
void
|
||||
XRE_SetAndroidChildFds (JNIEnv* env, int crashFd, int ipcFd, int crashAnnotationFd)
|
||||
XRE_SetAndroidChildFds (JNIEnv* env, int ipcFd, int crashFd, int crashAnnotationFd)
|
||||
{
|
||||
mozilla::jni::SetGeckoThreadEnv(env);
|
||||
IPC::Channel::SetClientChannelFd(ipcFd);
|
||||
CrashReporter::SetNotificationPipeForChild(crashFd);
|
||||
CrashReporter::SetCrashAnnotationPipeForChild(crashAnnotationFd);
|
||||
IPC::Channel::SetClientChannelFd(ipcFd);
|
||||
}
|
||||
#endif // defined(MOZ_WIDGET_ANDROID)
|
||||
|
||||
|
|
|
@ -29,7 +29,7 @@
|
|||
"collection_repair.js": ["getRepairRequestor", "getAllRepairRequestors", "CollectionRepairRequestor", "getRepairResponder", "CollectionRepairResponder"],
|
||||
"collection_validator.js": ["CollectionValidator", "CollectionProblemData"],
|
||||
"Console.jsm": ["console", "ConsoleAPI"],
|
||||
"constants.js": ["WEAVE_VERSION", "SYNC_API_VERSION", "STORAGE_VERSION", "PREFS_BRANCH", "DEFAULT_KEYBUNDLE_NAME", "SYNC_KEY_ENCODED_LENGTH", "SYNC_KEY_DECODED_LENGTH", "NO_SYNC_NODE_INTERVAL", "MAX_ERROR_COUNT_BEFORE_BACKOFF", "MINIMUM_BACKOFF_INTERVAL", "MAXIMUM_BACKOFF_INTERVAL", "HMAC_EVENT_INTERVAL", "MASTER_PASSWORD_LOCKED_RETRY_INTERVAL", "DEFAULT_GUID_FETCH_BATCH_SIZE", "DEFAULT_DOWNLOAD_BATCH_SIZE", "SINGLE_USER_THRESHOLD", "MULTI_DEVICE_THRESHOLD", "SCORE_INCREMENT_SMALL", "SCORE_INCREMENT_MEDIUM", "SCORE_INCREMENT_XLARGE", "SCORE_UPDATE_DELAY", "IDLE_OBSERVER_BACK_DELAY", "URI_LENGTH_MAX", "MAX_HISTORY_UPLOAD", "MAX_HISTORY_DOWNLOAD", "STATUS_OK", "SYNC_FAILED", "LOGIN_FAILED", "SYNC_FAILED_PARTIAL", "CLIENT_NOT_CONFIGURED", "STATUS_DISABLED", "MASTER_PASSWORD_LOCKED", "LOGIN_SUCCEEDED", "SYNC_SUCCEEDED", "ENGINE_SUCCEEDED", "LOGIN_FAILED_NO_USERNAME", "LOGIN_FAILED_NO_PASSPHRASE", "LOGIN_FAILED_NETWORK_ERROR", "LOGIN_FAILED_SERVER_ERROR", "LOGIN_FAILED_INVALID_PASSPHRASE", "LOGIN_FAILED_LOGIN_REJECTED", "METARECORD_DOWNLOAD_FAIL", "VERSION_OUT_OF_DATE", "CREDENTIALS_CHANGED", "ABORT_SYNC_COMMAND", "NO_SYNC_NODE_FOUND", "OVER_QUOTA", "PROLONGED_SYNC_FAILURE", "SERVER_MAINTENANCE", "RESPONSE_OVER_QUOTA", "ENGINE_UPLOAD_FAIL", "ENGINE_DOWNLOAD_FAIL", "ENGINE_UNKNOWN_FAIL", "ENGINE_APPLY_FAIL", "ENGINE_BATCH_INTERRUPTED", "kSyncMasterPasswordLocked", "kSyncWeaveDisabled", "kSyncNetworkOffline", "kSyncBackoffNotMet", "kFirstSyncChoiceNotMade", "kSyncNotConfigured", "kFirefoxShuttingDown", "DEVICE_TYPE_DESKTOP", "DEVICE_TYPE_MOBILE", "SQLITE_MAX_VARIABLE_NUMBER"],
|
||||
"constants.js": ["WEAVE_VERSION", "SYNC_API_VERSION", "STORAGE_VERSION", "PREFS_BRANCH", "DEFAULT_KEYBUNDLE_NAME", "SYNC_KEY_ENCODED_LENGTH", "SYNC_KEY_DECODED_LENGTH", "NO_SYNC_NODE_INTERVAL", "MAX_ERROR_COUNT_BEFORE_BACKOFF", "MINIMUM_BACKOFF_INTERVAL", "MAXIMUM_BACKOFF_INTERVAL", "HMAC_EVENT_INTERVAL", "MASTER_PASSWORD_LOCKED_RETRY_INTERVAL", "DEFAULT_GUID_FETCH_BATCH_SIZE", "DEFAULT_DOWNLOAD_BATCH_SIZE", "SINGLE_USER_THRESHOLD", "MULTI_DEVICE_THRESHOLD", "SCORE_INCREMENT_SMALL", "SCORE_INCREMENT_MEDIUM", "SCORE_INCREMENT_XLARGE", "SCORE_UPDATE_DELAY", "IDLE_OBSERVER_BACK_DELAY", "URI_LENGTH_MAX", "MAX_HISTORY_UPLOAD", "MAX_HISTORY_DOWNLOAD", "STATUS_OK", "SYNC_FAILED", "LOGIN_FAILED", "SYNC_FAILED_PARTIAL", "CLIENT_NOT_CONFIGURED", "STATUS_DISABLED", "MASTER_PASSWORD_LOCKED", "LOGIN_SUCCEEDED", "SYNC_SUCCEEDED", "ENGINE_SUCCEEDED", "LOGIN_FAILED_NO_USERNAME", "LOGIN_FAILED_NO_PASSPHRASE", "LOGIN_FAILED_NETWORK_ERROR", "LOGIN_FAILED_SERVER_ERROR", "LOGIN_FAILED_INVALID_PASSPHRASE", "LOGIN_FAILED_LOGIN_REJECTED", "METARECORD_DOWNLOAD_FAIL", "VERSION_OUT_OF_DATE", "CREDENTIALS_CHANGED", "ABORT_SYNC_COMMAND", "NO_SYNC_NODE_FOUND", "OVER_QUOTA", "SERVER_MAINTENANCE", "RESPONSE_OVER_QUOTA", "ENGINE_UPLOAD_FAIL", "ENGINE_DOWNLOAD_FAIL", "ENGINE_UNKNOWN_FAIL", "ENGINE_APPLY_FAIL", "ENGINE_BATCH_INTERRUPTED", "kSyncMasterPasswordLocked", "kSyncWeaveDisabled", "kSyncNetworkOffline", "kSyncBackoffNotMet", "kFirstSyncChoiceNotMade", "kSyncNotConfigured", "kFirefoxShuttingDown", "DEVICE_TYPE_DESKTOP", "DEVICE_TYPE_MOBILE", "SQLITE_MAX_VARIABLE_NUMBER"],
|
||||
"Constants.jsm": ["Roles", "Events", "Relations", "Filters", "States", "Prefilters"],
|
||||
"ContactDB.jsm": ["ContactDB", "DB_NAME", "STORE_NAME", "SAVED_GETALL_STORE_NAME", "REVISION_STORE", "DB_VERSION"],
|
||||
"content-server.jsm": ["init"],
|
||||
|
|
|
@ -398,7 +398,7 @@ XRE_API(const char*,
|
|||
|
||||
#if defined(MOZ_WIDGET_ANDROID)
|
||||
XRE_API(void,
|
||||
XRE_SetAndroidChildFds, (JNIEnv* env, int crashFd, int ipcFd, int crashAnnotationFd))
|
||||
XRE_SetAndroidChildFds, (JNIEnv* env, int ipcFd, int crashFd, int crashAnnotationFd))
|
||||
#endif // defined(MOZ_WIDGET_ANDROID)
|
||||
|
||||
XRE_API(void,
|
||||
|
|
|
@ -102,6 +102,38 @@ def enum(*names):
|
|||
return Foo()
|
||||
|
||||
|
||||
# List with constant time index() and contains() methods.
|
||||
class IndexedList(object):
|
||||
def __init__(self, iterable):
|
||||
self._list = []
|
||||
self._index_map = {}
|
||||
for i in iterable:
|
||||
self.append(i)
|
||||
|
||||
def sort(self):
|
||||
self._list.sort()
|
||||
self._index_map = {val: i for i, val in enumerate(self._list)}
|
||||
|
||||
def append(self, val):
|
||||
self._index_map[val] = len(self._list)
|
||||
self._list.append(val)
|
||||
|
||||
def index(self, what):
|
||||
return self._index_map[what]
|
||||
|
||||
def __contains__(self, what):
|
||||
return what in self._index_map
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self._list)
|
||||
|
||||
def __getitem__(self, index):
|
||||
return self._list[index]
|
||||
|
||||
def __len__(self):
|
||||
return len(self._list)
|
||||
|
||||
|
||||
# Descriptor types as described in the spec
|
||||
class Type(object):
|
||||
"""
|
||||
|
@ -1150,7 +1182,7 @@ class Typelib(object):
|
|||
|
||||
"""
|
||||
self.version = version
|
||||
self.interfaces = list(interfaces)
|
||||
self.interfaces = IndexedList(interfaces)
|
||||
self.annotations = list(annotations)
|
||||
self.filename = None
|
||||
|
||||
|
|
Загрузка…
Ссылка в новой задаче