зеркало из https://github.com/mozilla/gecko-dev.git
merge autoland to mozilla-central a=merge
This commit is contained in:
Коммит
2b59ae9457
|
@ -38,10 +38,11 @@ add_task(function* () {
|
|||
}
|
||||
|
||||
for (let url of aboutURLs) {
|
||||
info("Loading about:" + url);
|
||||
let tab = gBrowser.addTab("about:"+url, {userContextId: 1});
|
||||
yield BrowserTestUtils.browserLoaded(tab.linkedBrowser);
|
||||
|
||||
ok(true);
|
||||
ok(true, "Done loading about:" + url);
|
||||
|
||||
yield BrowserTestUtils.removeTab(tab);
|
||||
}
|
||||
|
|
|
@ -128,6 +128,8 @@ skip-if = os == "mac" # Bug 1245996 : click on scrollbar not working on OSX
|
|||
[browser_rules_edit-selector_07.js]
|
||||
[browser_rules_edit-selector_08.js]
|
||||
[browser_rules_edit-selector_09.js]
|
||||
[browser_rules_edit-selector_10.js]
|
||||
[browser_rules_edit-selector_11.js]
|
||||
[browser_rules_edit-value-after-name_01.js]
|
||||
[browser_rules_edit-value-after-name_02.js]
|
||||
[browser_rules_edit-value-after-name_03.js]
|
||||
|
|
|
@ -0,0 +1,64 @@
|
|||
/* vim: set ft=javascript ts=2 et sw=2 tw=80: */
|
||||
/* Any copyright is dedicated to the Public Domain.
|
||||
http://creativecommons.org/publicdomain/zero/1.0/ */
|
||||
|
||||
"use strict";
|
||||
|
||||
// Regression test for bug 1293616: make sure that editing a selector
|
||||
// keeps the rule in the proper position.
|
||||
|
||||
const TEST_URI = `
|
||||
<style type="text/css">
|
||||
#testid span, #testid p {
|
||||
background: aqua;
|
||||
}
|
||||
span {
|
||||
background: fuchsia;
|
||||
}
|
||||
</style>
|
||||
<div id="testid">
|
||||
<span class="pickme">
|
||||
Styled Node
|
||||
</span>
|
||||
</div>
|
||||
`;
|
||||
|
||||
add_task(function* () {
|
||||
yield addTab("data:text/html;charset=utf-8," + encodeURIComponent(TEST_URI));
|
||||
let {inspector, view} = yield openRuleView();
|
||||
yield selectNode(".pickme", inspector);
|
||||
yield testEditSelector(view);
|
||||
});
|
||||
|
||||
function* testEditSelector(view) {
|
||||
let ruleEditor = getRuleViewRuleEditor(view, 1);
|
||||
let editor = yield focusEditableField(view, ruleEditor.selectorText);
|
||||
|
||||
editor.input.value = "#testid span";
|
||||
let onRuleViewChanged = once(view, "ruleview-changed");
|
||||
EventUtils.synthesizeKey("VK_RETURN", {});
|
||||
yield onRuleViewChanged;
|
||||
|
||||
// Escape the new property editor after editing the selector
|
||||
let onBlur = once(view.styleDocument.activeElement, "blur");
|
||||
EventUtils.synthesizeKey("VK_ESCAPE", {}, view.styleWindow);
|
||||
yield onBlur;
|
||||
|
||||
// Get the new rule editor that replaced the original
|
||||
ruleEditor = getRuleViewRuleEditor(view, 1);
|
||||
|
||||
info("Check that the correct rules are visible");
|
||||
is(view._elementStyle.rules.length, 3, "Should have 3 rules.");
|
||||
is(ruleEditor.element.getAttribute("unmatched"), "false", "Rule editor is matched.");
|
||||
|
||||
let props = ruleEditor.rule.textProps;
|
||||
is(props.length, 1, "Rule has correct number of properties");
|
||||
is(props[0].name, "background", "Found background property");
|
||||
ok(!props[0].overridden, "Background property is not overridden");
|
||||
|
||||
ruleEditor = getRuleViewRuleEditor(view, 2);
|
||||
props = ruleEditor.rule.textProps;
|
||||
is(props.length, 1, "Rule has correct number of properties");
|
||||
is(props[0].name, "background", "Found background property");
|
||||
ok(props[0].overridden, "Background property is overridden");
|
||||
}
|
|
@ -0,0 +1,69 @@
|
|||
/* vim: set ft=javascript ts=2 et sw=2 tw=80: */
|
||||
/* Any copyright is dedicated to the Public Domain.
|
||||
http://creativecommons.org/publicdomain/zero/1.0/ */
|
||||
|
||||
"use strict";
|
||||
|
||||
// Regression test for bug 1293616, where editing a selector should
|
||||
// change the relative priority of the rule.
|
||||
|
||||
const TEST_URI = `
|
||||
<style type="text/css">
|
||||
#testid {
|
||||
background: aqua;
|
||||
}
|
||||
.pickme {
|
||||
background: seagreen;
|
||||
}
|
||||
span {
|
||||
background: fuchsia;
|
||||
}
|
||||
</style>
|
||||
<div>
|
||||
<span id="testid" class="pickme">
|
||||
Styled Node
|
||||
</span>
|
||||
</div>
|
||||
`;
|
||||
|
||||
add_task(function* () {
|
||||
yield addTab("data:text/html;charset=utf-8," + encodeURIComponent(TEST_URI));
|
||||
let {inspector, view} = yield openRuleView();
|
||||
yield selectNode(".pickme", inspector);
|
||||
yield testEditSelector(view);
|
||||
});
|
||||
|
||||
function* testEditSelector(view) {
|
||||
let ruleEditor = getRuleViewRuleEditor(view, 1);
|
||||
let editor = yield focusEditableField(view, ruleEditor.selectorText);
|
||||
|
||||
editor.input.value = ".pickme";
|
||||
let onRuleViewChanged = once(view, "ruleview-changed");
|
||||
EventUtils.synthesizeKey("VK_RETURN", {});
|
||||
yield onRuleViewChanged;
|
||||
|
||||
// Escape the new property editor after editing the selector
|
||||
let onBlur = once(view.styleDocument.activeElement, "blur");
|
||||
EventUtils.synthesizeKey("VK_ESCAPE", {}, view.styleWindow);
|
||||
yield onBlur;
|
||||
|
||||
// Get the new rule editor that replaced the original
|
||||
ruleEditor = getRuleViewRuleEditor(view, 1);
|
||||
|
||||
info("Check that the correct rules are visible");
|
||||
is(view._elementStyle.rules.length, 4, "Should have 4 rules.");
|
||||
is(ruleEditor.element.getAttribute("unmatched"), "false", "Rule editor is matched.");
|
||||
|
||||
let props = ruleEditor.rule.textProps;
|
||||
is(props.length, 1, "Rule has correct number of properties");
|
||||
is(props[0].name, "background", "Found background property");
|
||||
is(props[0].value, "aqua", "Background property is aqua");
|
||||
ok(props[0].overridden, "Background property is overridden");
|
||||
|
||||
ruleEditor = getRuleViewRuleEditor(view, 2);
|
||||
props = ruleEditor.rule.textProps;
|
||||
is(props.length, 1, "Rule has correct number of properties");
|
||||
is(props[0].name, "background", "Found background property");
|
||||
is(props[0].value, "seagreen", "Background property is seagreen");
|
||||
ok(!props[0].overridden, "Background property is not overridden");
|
||||
}
|
|
@ -27,6 +27,7 @@ const {
|
|||
const promise = require("promise");
|
||||
const Services = require("Services");
|
||||
const EventEmitter = require("devtools/shared/event-emitter");
|
||||
const {Task} = require("devtools/shared/task");
|
||||
|
||||
const STYLE_INSPECTOR_PROPERTIES = "devtools-shared/locale/styleinspector.properties";
|
||||
const {LocalizationHelper} = require("devtools/shared/l10n");
|
||||
|
@ -517,7 +518,7 @@ RuleEditor.prototype = {
|
|||
* @param {Number} direction
|
||||
* The move focus direction number.
|
||||
*/
|
||||
_onSelectorDone: function (value, commit, direction) {
|
||||
_onSelectorDone: Task.async(function* (value, commit, direction) {
|
||||
if (!commit || this.isEditing || value === "" ||
|
||||
value === this.rule.selectorText) {
|
||||
return;
|
||||
|
@ -531,16 +532,28 @@ RuleEditor.prototype = {
|
|||
|
||||
this.isEditing = true;
|
||||
|
||||
this.rule.domRule.modifySelector(element, value).then(response => {
|
||||
this.isEditing = false;
|
||||
try {
|
||||
let response = yield this.rule.domRule.modifySelector(element, value);
|
||||
|
||||
if (!supportsUnmatchedRules) {
|
||||
this.isEditing = false;
|
||||
|
||||
if (response) {
|
||||
this.ruleView.refreshPanel();
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
// We recompute the list of applied styles, because editing a
|
||||
// selector might cause this rule's position to change.
|
||||
let applied = yield elementStyle.pageStyle.getApplied(element, {
|
||||
inherited: true,
|
||||
matchedSelectors: true,
|
||||
filter: elementStyle.showUserAgentStyles ? "ua" : undefined
|
||||
});
|
||||
|
||||
this.isEditing = false;
|
||||
|
||||
let {ruleProps, isMatching} = response;
|
||||
if (!ruleProps) {
|
||||
// Notify for changes, even when nothing changes,
|
||||
|
@ -554,11 +567,25 @@ RuleEditor.prototype = {
|
|||
let editor = new RuleEditor(ruleView, newRule);
|
||||
let rules = elementStyle.rules;
|
||||
|
||||
rules.splice(rules.indexOf(this.rule), 1);
|
||||
rules.push(newRule);
|
||||
let newRuleIndex = applied.findIndex((r) => r.rule == ruleProps.rule);
|
||||
let oldIndex = rules.indexOf(this.rule);
|
||||
|
||||
// If the selector no longer matches, then we leave the rule in
|
||||
// the same relative position.
|
||||
if (newRuleIndex === -1) {
|
||||
newRuleIndex = oldIndex;
|
||||
}
|
||||
|
||||
// Remove the old rule and insert the new rule.
|
||||
rules.splice(oldIndex, 1);
|
||||
rules.splice(newRuleIndex, 0, newRule);
|
||||
elementStyle._changed();
|
||||
elementStyle.markOverriddenAll();
|
||||
|
||||
// We install the new editor in place of the old -- you might
|
||||
// think we would replicate the list-modification logic above,
|
||||
// but that is complicated due to the way the UI installs
|
||||
// pseudo-element rules and the like.
|
||||
this.element.parentNode.replaceChild(editor.element, this.element);
|
||||
|
||||
// Remove highlight for modified selector
|
||||
|
@ -568,11 +595,11 @@ RuleEditor.prototype = {
|
|||
}
|
||||
|
||||
editor._moveSelectorFocus(direction);
|
||||
}).then(null, err => {
|
||||
} catch (err) {
|
||||
this.isEditing = false;
|
||||
promiseWarn(err);
|
||||
});
|
||||
},
|
||||
}
|
||||
}),
|
||||
|
||||
/**
|
||||
* Handle moving the focus change after a tab or return keypress in the
|
||||
|
|
|
@ -0,0 +1,86 @@
|
|||
"use strict";
|
||||
|
||||
// The panel's window global is an EventEmitter firing the following events:
|
||||
const EVENTS = {
|
||||
// When the monitored target begins and finishes navigating.
|
||||
TARGET_WILL_NAVIGATE: "NetMonitor:TargetWillNavigate",
|
||||
TARGET_DID_NAVIGATE: "NetMonitor:TargetNavigate",
|
||||
|
||||
// When a network or timeline event is received.
|
||||
// See https://developer.mozilla.org/docs/Tools/Web_Console/remoting for
|
||||
// more information about what each packet is supposed to deliver.
|
||||
NETWORK_EVENT: "NetMonitor:NetworkEvent",
|
||||
TIMELINE_EVENT: "NetMonitor:TimelineEvent",
|
||||
|
||||
// When a network event is added to the view
|
||||
REQUEST_ADDED: "NetMonitor:RequestAdded",
|
||||
|
||||
// When request headers begin and finish receiving.
|
||||
UPDATING_REQUEST_HEADERS: "NetMonitor:NetworkEventUpdating:RequestHeaders",
|
||||
RECEIVED_REQUEST_HEADERS: "NetMonitor:NetworkEventUpdated:RequestHeaders",
|
||||
|
||||
// When request cookies begin and finish receiving.
|
||||
UPDATING_REQUEST_COOKIES: "NetMonitor:NetworkEventUpdating:RequestCookies",
|
||||
RECEIVED_REQUEST_COOKIES: "NetMonitor:NetworkEventUpdated:RequestCookies",
|
||||
|
||||
// When request post data begins and finishes receiving.
|
||||
UPDATING_REQUEST_POST_DATA: "NetMonitor:NetworkEventUpdating:RequestPostData",
|
||||
RECEIVED_REQUEST_POST_DATA: "NetMonitor:NetworkEventUpdated:RequestPostData",
|
||||
|
||||
// When security information begins and finishes receiving.
|
||||
UPDATING_SECURITY_INFO: "NetMonitor::NetworkEventUpdating:SecurityInfo",
|
||||
RECEIVED_SECURITY_INFO: "NetMonitor::NetworkEventUpdated:SecurityInfo",
|
||||
|
||||
// When response headers begin and finish receiving.
|
||||
UPDATING_RESPONSE_HEADERS: "NetMonitor:NetworkEventUpdating:ResponseHeaders",
|
||||
RECEIVED_RESPONSE_HEADERS: "NetMonitor:NetworkEventUpdated:ResponseHeaders",
|
||||
|
||||
// When response cookies begin and finish receiving.
|
||||
UPDATING_RESPONSE_COOKIES: "NetMonitor:NetworkEventUpdating:ResponseCookies",
|
||||
RECEIVED_RESPONSE_COOKIES: "NetMonitor:NetworkEventUpdated:ResponseCookies",
|
||||
|
||||
// When event timings begin and finish receiving.
|
||||
UPDATING_EVENT_TIMINGS: "NetMonitor:NetworkEventUpdating:EventTimings",
|
||||
RECEIVED_EVENT_TIMINGS: "NetMonitor:NetworkEventUpdated:EventTimings",
|
||||
|
||||
// When response content begins, updates and finishes receiving.
|
||||
STARTED_RECEIVING_RESPONSE: "NetMonitor:NetworkEventUpdating:ResponseStart",
|
||||
UPDATING_RESPONSE_CONTENT: "NetMonitor:NetworkEventUpdating:ResponseContent",
|
||||
RECEIVED_RESPONSE_CONTENT: "NetMonitor:NetworkEventUpdated:ResponseContent",
|
||||
|
||||
// When the request post params are displayed in the UI.
|
||||
REQUEST_POST_PARAMS_DISPLAYED: "NetMonitor:RequestPostParamsAvailable",
|
||||
|
||||
// When the response body is displayed in the UI.
|
||||
RESPONSE_BODY_DISPLAYED: "NetMonitor:ResponseBodyAvailable",
|
||||
|
||||
// When the html response preview is displayed in the UI.
|
||||
RESPONSE_HTML_PREVIEW_DISPLAYED: "NetMonitor:ResponseHtmlPreviewAvailable",
|
||||
|
||||
// When the image response thumbnail is displayed in the UI.
|
||||
RESPONSE_IMAGE_THUMBNAIL_DISPLAYED:
|
||||
"NetMonitor:ResponseImageThumbnailAvailable",
|
||||
|
||||
// When a tab is selected in the NetworkDetailsView and subsequently rendered.
|
||||
TAB_UPDATED: "NetMonitor:TabUpdated",
|
||||
|
||||
// Fired when Sidebar has finished being populated.
|
||||
SIDEBAR_POPULATED: "NetMonitor:SidebarPopulated",
|
||||
|
||||
// Fired when NetworkDetailsView has finished being populated.
|
||||
NETWORKDETAILSVIEW_POPULATED: "NetMonitor:NetworkDetailsViewPopulated",
|
||||
|
||||
// Fired when CustomRequestView has finished being populated.
|
||||
CUSTOMREQUESTVIEW_POPULATED: "NetMonitor:CustomRequestViewPopulated",
|
||||
|
||||
// Fired when charts have been displayed in the PerformanceStatisticsView.
|
||||
PLACEHOLDER_CHARTS_DISPLAYED: "NetMonitor:PlaceholderChartsDisplayed",
|
||||
PRIMED_CACHE_CHART_DISPLAYED: "NetMonitor:PrimedChartsDisplayed",
|
||||
EMPTY_CACHE_CHART_DISPLAYED: "NetMonitor:EmptyChartsDisplayed",
|
||||
|
||||
// Fired once the NetMonitorController establishes a connection to the debug
|
||||
// target.
|
||||
CONNECTED: "connected",
|
||||
};
|
||||
|
||||
exports.EVENTS = EVENTS;
|
|
@ -8,12 +8,14 @@ DIRS += [
|
|||
]
|
||||
|
||||
DevToolsModules(
|
||||
'events.js',
|
||||
'filter-predicates.js',
|
||||
'l10n.js',
|
||||
'panel.js',
|
||||
'prefs.js',
|
||||
'request-utils.js',
|
||||
'requests-menu-view.js',
|
||||
'sort-predicates.js',
|
||||
)
|
||||
|
||||
BROWSER_CHROME_MANIFESTS += ['test/browser.ini']
|
||||
|
|
|
@ -9,89 +9,6 @@
|
|||
|
||||
var { utils: Cu } = Components;
|
||||
|
||||
// The panel's window global is an EventEmitter firing the following events:
|
||||
const EVENTS = {
|
||||
// When the monitored target begins and finishes navigating.
|
||||
TARGET_WILL_NAVIGATE: "NetMonitor:TargetWillNavigate",
|
||||
TARGET_DID_NAVIGATE: "NetMonitor:TargetNavigate",
|
||||
|
||||
// When a network or timeline event is received.
|
||||
// See https://developer.mozilla.org/docs/Tools/Web_Console/remoting for
|
||||
// more information about what each packet is supposed to deliver.
|
||||
NETWORK_EVENT: "NetMonitor:NetworkEvent",
|
||||
TIMELINE_EVENT: "NetMonitor:TimelineEvent",
|
||||
|
||||
// When a network event is added to the view
|
||||
REQUEST_ADDED: "NetMonitor:RequestAdded",
|
||||
|
||||
// When request headers begin and finish receiving.
|
||||
UPDATING_REQUEST_HEADERS: "NetMonitor:NetworkEventUpdating:RequestHeaders",
|
||||
RECEIVED_REQUEST_HEADERS: "NetMonitor:NetworkEventUpdated:RequestHeaders",
|
||||
|
||||
// When request cookies begin and finish receiving.
|
||||
UPDATING_REQUEST_COOKIES: "NetMonitor:NetworkEventUpdating:RequestCookies",
|
||||
RECEIVED_REQUEST_COOKIES: "NetMonitor:NetworkEventUpdated:RequestCookies",
|
||||
|
||||
// When request post data begins and finishes receiving.
|
||||
UPDATING_REQUEST_POST_DATA: "NetMonitor:NetworkEventUpdating:RequestPostData",
|
||||
RECEIVED_REQUEST_POST_DATA: "NetMonitor:NetworkEventUpdated:RequestPostData",
|
||||
|
||||
// When security information begins and finishes receiving.
|
||||
UPDATING_SECURITY_INFO: "NetMonitor::NetworkEventUpdating:SecurityInfo",
|
||||
RECEIVED_SECURITY_INFO: "NetMonitor::NetworkEventUpdated:SecurityInfo",
|
||||
|
||||
// When response headers begin and finish receiving.
|
||||
UPDATING_RESPONSE_HEADERS: "NetMonitor:NetworkEventUpdating:ResponseHeaders",
|
||||
RECEIVED_RESPONSE_HEADERS: "NetMonitor:NetworkEventUpdated:ResponseHeaders",
|
||||
|
||||
// When response cookies begin and finish receiving.
|
||||
UPDATING_RESPONSE_COOKIES: "NetMonitor:NetworkEventUpdating:ResponseCookies",
|
||||
RECEIVED_RESPONSE_COOKIES: "NetMonitor:NetworkEventUpdated:ResponseCookies",
|
||||
|
||||
// When event timings begin and finish receiving.
|
||||
UPDATING_EVENT_TIMINGS: "NetMonitor:NetworkEventUpdating:EventTimings",
|
||||
RECEIVED_EVENT_TIMINGS: "NetMonitor:NetworkEventUpdated:EventTimings",
|
||||
|
||||
// When response content begins, updates and finishes receiving.
|
||||
STARTED_RECEIVING_RESPONSE: "NetMonitor:NetworkEventUpdating:ResponseStart",
|
||||
UPDATING_RESPONSE_CONTENT: "NetMonitor:NetworkEventUpdating:ResponseContent",
|
||||
RECEIVED_RESPONSE_CONTENT: "NetMonitor:NetworkEventUpdated:ResponseContent",
|
||||
|
||||
// When the request post params are displayed in the UI.
|
||||
REQUEST_POST_PARAMS_DISPLAYED: "NetMonitor:RequestPostParamsAvailable",
|
||||
|
||||
// When the response body is displayed in the UI.
|
||||
RESPONSE_BODY_DISPLAYED: "NetMonitor:ResponseBodyAvailable",
|
||||
|
||||
// When the html response preview is displayed in the UI.
|
||||
RESPONSE_HTML_PREVIEW_DISPLAYED: "NetMonitor:ResponseHtmlPreviewAvailable",
|
||||
|
||||
// When the image response thumbnail is displayed in the UI.
|
||||
RESPONSE_IMAGE_THUMBNAIL_DISPLAYED:
|
||||
"NetMonitor:ResponseImageThumbnailAvailable",
|
||||
|
||||
// When a tab is selected in the NetworkDetailsView and subsequently rendered.
|
||||
TAB_UPDATED: "NetMonitor:TabUpdated",
|
||||
|
||||
// Fired when Sidebar has finished being populated.
|
||||
SIDEBAR_POPULATED: "NetMonitor:SidebarPopulated",
|
||||
|
||||
// Fired when NetworkDetailsView has finished being populated.
|
||||
NETWORKDETAILSVIEW_POPULATED: "NetMonitor:NetworkDetailsViewPopulated",
|
||||
|
||||
// Fired when CustomRequestView has finished being populated.
|
||||
CUSTOMREQUESTVIEW_POPULATED: "NetMonitor:CustomRequestViewPopulated",
|
||||
|
||||
// Fired when charts have been displayed in the PerformanceStatisticsView.
|
||||
PLACEHOLDER_CHARTS_DISPLAYED: "NetMonitor:PlaceholderChartsDisplayed",
|
||||
PRIMED_CACHE_CHART_DISPLAYED: "NetMonitor:PrimedChartsDisplayed",
|
||||
EMPTY_CACHE_CHART_DISPLAYED: "NetMonitor:EmptyChartsDisplayed",
|
||||
|
||||
// Fired once the NetMonitorController establishes a connection to the debug
|
||||
// target.
|
||||
CONNECTED: "connected",
|
||||
};
|
||||
|
||||
// Descriptions for what this frontend is currently doing.
|
||||
const ACTIVITY_TYPE = {
|
||||
// Standing by and handling requests normally.
|
||||
|
@ -124,6 +41,7 @@ const Editor = require("devtools/client/sourceeditor/editor");
|
|||
const {TimelineFront} = require("devtools/shared/fronts/timeline");
|
||||
const {Task} = require("devtools/shared/task");
|
||||
const {Prefs} = require("./prefs");
|
||||
const {EVENTS} = require("./events");
|
||||
|
||||
XPCOMUtils.defineConstant(this, "EVENTS", EVENTS);
|
||||
XPCOMUtils.defineConstant(this, "ACTIVITY_TYPE", ACTIVITY_TYPE);
|
||||
|
|
|
@ -19,7 +19,11 @@ const { testing: isTesting } = require("devtools/shared/flags");
|
|||
const {ViewHelpers, Heritage} = require("devtools/client/shared/widgets/view-helpers");
|
||||
const {PluralForm} = require("devtools/shared/plural-form");
|
||||
const {Filters} = require("./filter-predicates");
|
||||
const {getFormDataSections, formDataURI, writeHeaderText, getKeyWithEvent} = require("./request-utils");
|
||||
const {getFormDataSections,
|
||||
formDataURI,
|
||||
writeHeaderText,
|
||||
getKeyWithEvent,
|
||||
getUriHostPort} = require("./request-utils");
|
||||
const {L10N} = require("./l10n");
|
||||
const {RequestsMenuView} = require("./requests-menu-view");
|
||||
|
||||
|
@ -1315,7 +1319,7 @@ NetworkDetailsView.prototype = {
|
|||
setValue("#security-ciphersuite-value", securityInfo.cipherSuite);
|
||||
|
||||
// Host header
|
||||
let domain = NetMonitorView.RequestsMenu._getUriHostPort(url);
|
||||
let domain = getUriHostPort(url);
|
||||
let hostHeader = L10N.getFormatStr("netmonitor.security.hostHeader",
|
||||
domain);
|
||||
setValue("#security-info-host-header", hostHeader);
|
||||
|
|
|
@ -3,6 +3,7 @@
|
|||
const { Ci } = require("chrome");
|
||||
const { KeyCodes } = require("devtools/client/shared/keycodes");
|
||||
const { Task } = require("devtools/shared/task");
|
||||
const NetworkHelper = require("devtools/shared/webconsole/network-helper");
|
||||
|
||||
/**
|
||||
* Helper method to get a wrapped function which can be bound to as
|
||||
|
@ -108,6 +109,48 @@ exports.writeHeaderText = function (headers) {
|
|||
return headers.map(({name, value}) => name + ": " + value).join("\n");
|
||||
};
|
||||
|
||||
/**
|
||||
* Helper for getting an abbreviated string for a mime type.
|
||||
*
|
||||
* @param string mimeType
|
||||
* @return string
|
||||
*/
|
||||
exports.getAbbreviatedMimeType = function (mimeType) {
|
||||
if (!mimeType) {
|
||||
return "";
|
||||
}
|
||||
return (mimeType.split(";")[0].split("/")[1] || "").split("+")[0];
|
||||
};
|
||||
|
||||
/**
|
||||
* Helpers for getting details about an nsIURL.
|
||||
*
|
||||
* @param nsIURL | string url
|
||||
* @return string
|
||||
*/
|
||||
exports.getUriNameWithQuery = function (url) {
|
||||
if (!(url instanceof Ci.nsIURL)) {
|
||||
url = NetworkHelper.nsIURL(url);
|
||||
}
|
||||
|
||||
let name = NetworkHelper.convertToUnicode(
|
||||
unescape(url.fileName || url.filePath || "/"));
|
||||
let query = NetworkHelper.convertToUnicode(unescape(url.query));
|
||||
|
||||
return name + (query ? "?" + query : "");
|
||||
};
|
||||
|
||||
exports.getUriHostPort = function (url) {
|
||||
if (!(url instanceof Ci.nsIURL)) {
|
||||
url = NetworkHelper.nsIURL(url);
|
||||
}
|
||||
return NetworkHelper.convertToUnicode(unescape(url.hostPort));
|
||||
};
|
||||
|
||||
exports.getUriHost = function (url) {
|
||||
return exports.getUriHostPort(url).replace(/:\d+$/, "");
|
||||
};
|
||||
|
||||
/**
|
||||
* Convert a nsIContentPolicy constant to a display string
|
||||
*/
|
||||
|
|
|
@ -16,9 +16,17 @@ const {gDevTools} = require("devtools/client/framework/devtools");
|
|||
const {Curl, CurlUtils} = require("devtools/client/shared/curl");
|
||||
const {PluralForm} = require("devtools/shared/plural-form");
|
||||
const {Filters, isFreetextMatch} = require("./filter-predicates");
|
||||
const {getFormDataSections, formDataURI, writeHeaderText, getKeyWithEvent,
|
||||
loadCauseString} = require("./request-utils");
|
||||
const {Sorters} = require("./sort-predicates");
|
||||
const {L10N, WEBCONSOLE_L10N} = require("./l10n");
|
||||
const {getFormDataSections,
|
||||
formDataURI,
|
||||
writeHeaderText,
|
||||
getKeyWithEvent,
|
||||
getAbbreviatedMimeType,
|
||||
getUriNameWithQuery,
|
||||
getUriHostPort,
|
||||
getUriHost,
|
||||
loadCauseString} = require("./request-utils");
|
||||
|
||||
loader.lazyServiceGetter(this, "clipboardHelper",
|
||||
"@mozilla.org/widget/clipboardhelper;1", "nsIClipboardHelper");
|
||||
|
@ -92,9 +100,6 @@ function RequestsMenuView() {
|
|||
this._onSwap = this._onSwap.bind(this);
|
||||
this._onResize = this._onResize.bind(this);
|
||||
this._onScroll = this._onScroll.bind(this);
|
||||
this._byFile = this._byFile.bind(this);
|
||||
this._byDomain = this._byDomain.bind(this);
|
||||
this._byType = this._byType.bind(this);
|
||||
this._onSecurityIconClick = this._onSecurityIconClick.bind(this);
|
||||
}
|
||||
|
||||
|
@ -122,7 +127,7 @@ RequestsMenuView.prototype = Heritage.extend(WidgetMethods, {
|
|||
$("#requests-menu-contents").addEventListener("scroll", this._onScroll, true);
|
||||
|
||||
Prefs.filters.forEach(type => this.filterOn(type));
|
||||
this.sortContents(this._byTiming);
|
||||
this.sortContents((a, b) => Sorters.waterfall(a.attachment, b.attachment));
|
||||
|
||||
this.allowFocusOnRightClick = true;
|
||||
this.maintainSelectionVisible = true;
|
||||
|
@ -789,65 +794,65 @@ RequestsMenuView.prototype = Heritage.extend(WidgetMethods, {
|
|||
switch (type) {
|
||||
case "status":
|
||||
if (direction == "ascending") {
|
||||
this.sortContents(this._byStatus);
|
||||
this.sortContents((a, b) => Sorters.status(a.attachment, b.attachment));
|
||||
} else {
|
||||
this.sortContents((a, b) => !this._byStatus(a, b));
|
||||
this.sortContents((a, b) => -Sorters.status(a.attachment, b.attachment));
|
||||
}
|
||||
break;
|
||||
case "method":
|
||||
if (direction == "ascending") {
|
||||
this.sortContents(this._byMethod);
|
||||
this.sortContents((a, b) => Sorters.method(a.attachment, b.attachment));
|
||||
} else {
|
||||
this.sortContents((a, b) => !this._byMethod(a, b));
|
||||
this.sortContents((a, b) => -Sorters.method(a.attachment, b.attachment));
|
||||
}
|
||||
break;
|
||||
case "file":
|
||||
if (direction == "ascending") {
|
||||
this.sortContents(this._byFile);
|
||||
this.sortContents((a, b) => Sorters.file(a.attachment, b.attachment));
|
||||
} else {
|
||||
this.sortContents((a, b) => !this._byFile(a, b));
|
||||
this.sortContents((a, b) => -Sorters.file(a.attachment, b.attachment));
|
||||
}
|
||||
break;
|
||||
case "domain":
|
||||
if (direction == "ascending") {
|
||||
this.sortContents(this._byDomain);
|
||||
this.sortContents((a, b) => Sorters.domain(a.attachment, b.attachment));
|
||||
} else {
|
||||
this.sortContents((a, b) => !this._byDomain(a, b));
|
||||
this.sortContents((a, b) => -Sorters.domain(a.attachment, b.attachment));
|
||||
}
|
||||
break;
|
||||
case "cause":
|
||||
if (direction == "ascending") {
|
||||
this.sortContents(this._byCause);
|
||||
this.sortContents((a, b) => Sorters.cause(a.attachment, b.attachment));
|
||||
} else {
|
||||
this.sortContents((a, b) => !this._byCause(a, b));
|
||||
this.sortContents((a, b) => -Sorters.cause(a.attachment, b.attachment));
|
||||
}
|
||||
break;
|
||||
case "type":
|
||||
if (direction == "ascending") {
|
||||
this.sortContents(this._byType);
|
||||
this.sortContents((a, b) => Sorters.type(a.attachment, b.attachment));
|
||||
} else {
|
||||
this.sortContents((a, b) => !this._byType(a, b));
|
||||
this.sortContents((a, b) => -Sorters.type(a.attachment, b.attachment));
|
||||
}
|
||||
break;
|
||||
case "transferred":
|
||||
if (direction == "ascending") {
|
||||
this.sortContents(this._byTransferred);
|
||||
this.sortContents((a, b) => Sorters.transferred(a.attachment, b.attachment));
|
||||
} else {
|
||||
this.sortContents((a, b) => !this._byTransferred(a, b));
|
||||
this.sortContents((a, b) => -Sorters.transferred(a.attachment, b.attachment));
|
||||
}
|
||||
break;
|
||||
case "size":
|
||||
if (direction == "ascending") {
|
||||
this.sortContents(this._bySize);
|
||||
this.sortContents((a, b) => Sorters.size(a.attachment, b.attachment));
|
||||
} else {
|
||||
this.sortContents((a, b) => !this._bySize(a, b));
|
||||
this.sortContents((a, b) => -Sorters.size(a.attachment, b.attachment));
|
||||
}
|
||||
break;
|
||||
case "waterfall":
|
||||
if (direction == "ascending") {
|
||||
this.sortContents(this._byTiming);
|
||||
this.sortContents((a, b) => Sorters.waterfall(a.attachment, b.attachment));
|
||||
} else {
|
||||
this.sortContents((a, b) => !this._byTiming(a, b));
|
||||
this.sortContents((a, b) => -Sorters.waterfall(a.attachment, b.attachment));
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
@ -870,76 +875,6 @@ RequestsMenuView.prototype = Heritage.extend(WidgetMethods, {
|
|||
this.refreshSummary();
|
||||
},
|
||||
|
||||
/**
|
||||
* Predicates used when sorting items.
|
||||
*
|
||||
* @param object aFirst
|
||||
* The first item used in the comparison.
|
||||
* @param object aSecond
|
||||
* The second item used in the comparison.
|
||||
* @return number
|
||||
* -1 to sort aFirst to a lower index than aSecond
|
||||
* 0 to leave aFirst and aSecond unchanged with respect to each other
|
||||
* 1 to sort aSecond to a lower index than aFirst
|
||||
*/
|
||||
_byTiming: function ({ attachment: first }, { attachment: second }) {
|
||||
return first.startedMillis > second.startedMillis;
|
||||
},
|
||||
|
||||
_byStatus: function ({ attachment: first }, { attachment: second }) {
|
||||
return first.status == second.status
|
||||
? first.startedMillis > second.startedMillis
|
||||
: first.status > second.status;
|
||||
},
|
||||
|
||||
_byMethod: function ({ attachment: first }, { attachment: second }) {
|
||||
return first.method == second.method
|
||||
? first.startedMillis > second.startedMillis
|
||||
: first.method > second.method;
|
||||
},
|
||||
|
||||
_byFile: function ({ attachment: first }, { attachment: second }) {
|
||||
let firstUrl = this._getUriNameWithQuery(first.url).toLowerCase();
|
||||
let secondUrl = this._getUriNameWithQuery(second.url).toLowerCase();
|
||||
return firstUrl == secondUrl
|
||||
? first.startedMillis > second.startedMillis
|
||||
: firstUrl > secondUrl;
|
||||
},
|
||||
|
||||
_byDomain: function ({ attachment: first }, { attachment: second }) {
|
||||
let firstDomain = this._getUriHostPort(first.url).toLowerCase();
|
||||
let secondDomain = this._getUriHostPort(second.url).toLowerCase();
|
||||
return firstDomain == secondDomain
|
||||
? first.startedMillis > second.startedMillis
|
||||
: firstDomain > secondDomain;
|
||||
},
|
||||
|
||||
_byCause: function ({ attachment: first }, { attachment: second }) {
|
||||
let firstCause = loadCauseString(first.cause.type);
|
||||
let secondCause = loadCauseString(second.cause.type);
|
||||
|
||||
return firstCause == secondCause
|
||||
? first.startedMillis > second.startedMillis
|
||||
: firstCause > secondCause;
|
||||
},
|
||||
|
||||
_byType: function ({ attachment: first }, { attachment: second }) {
|
||||
let firstType = this._getAbbreviatedMimeType(first.mimeType).toLowerCase();
|
||||
let secondType = this._getAbbreviatedMimeType(second.mimeType).toLowerCase();
|
||||
|
||||
return firstType == secondType
|
||||
? first.startedMillis > second.startedMillis
|
||||
: firstType > secondType;
|
||||
},
|
||||
|
||||
_byTransferred: function ({ attachment: first }, { attachment: second }) {
|
||||
return first.transferredSize > second.transferredSize;
|
||||
},
|
||||
|
||||
_bySize: function ({ attachment: first }, { attachment: second }) {
|
||||
return first.contentSize > second.contentSize;
|
||||
},
|
||||
|
||||
/**
|
||||
* Refreshes the status displayed in this container's footer, providing
|
||||
* concise information about all requests.
|
||||
|
@ -1345,9 +1280,9 @@ RequestsMenuView.prototype = Heritage.extend(WidgetMethods, {
|
|||
// User input may not make a well-formed url yet.
|
||||
break;
|
||||
}
|
||||
let nameWithQuery = this._getUriNameWithQuery(uri);
|
||||
let hostPort = this._getUriHostPort(uri);
|
||||
let host = this._getUriHost(uri);
|
||||
let nameWithQuery = getUriNameWithQuery(uri);
|
||||
let hostPort = getUriHostPort(uri);
|
||||
let host = getUriHost(uri);
|
||||
let unicodeUrl = NetworkHelper.convertToUnicode(unescape(uri.spec));
|
||||
|
||||
let file = $(".requests-menu-file", target);
|
||||
|
@ -1464,7 +1399,7 @@ RequestsMenuView.prototype = Heritage.extend(WidgetMethods, {
|
|||
break;
|
||||
}
|
||||
case "mimeType": {
|
||||
let type = this._getAbbreviatedMimeType(value);
|
||||
let type = getAbbreviatedMimeType(value);
|
||||
let node = $(".requests-menu-type", target);
|
||||
let text = CONTENT_MIME_TYPE_ABBREVIATIONS[type] || type;
|
||||
node.setAttribute("value", text);
|
||||
|
@ -1993,48 +1928,6 @@ RequestsMenuView.prototype = Heritage.extend(WidgetMethods, {
|
|||
}
|
||||
},
|
||||
|
||||
/**
|
||||
* Helpers for getting details about an nsIURL.
|
||||
*
|
||||
* @param nsIURL | string url
|
||||
* @return string
|
||||
*/
|
||||
_getUriNameWithQuery: function (url) {
|
||||
if (!(url instanceof Ci.nsIURL)) {
|
||||
url = NetworkHelper.nsIURL(url);
|
||||
}
|
||||
|
||||
let name = NetworkHelper.convertToUnicode(
|
||||
unescape(url.fileName || url.filePath || "/"));
|
||||
let query = NetworkHelper.convertToUnicode(unescape(url.query));
|
||||
|
||||
return name + (query ? "?" + query : "");
|
||||
},
|
||||
|
||||
_getUriHostPort: function (url) {
|
||||
if (!(url instanceof Ci.nsIURL)) {
|
||||
url = NetworkHelper.nsIURL(url);
|
||||
}
|
||||
return NetworkHelper.convertToUnicode(unescape(url.hostPort));
|
||||
},
|
||||
|
||||
_getUriHost: function (url) {
|
||||
return this._getUriHostPort(url).replace(/:\d+$/, "");
|
||||
},
|
||||
|
||||
/**
|
||||
* Helper for getting an abbreviated string for a mime type.
|
||||
*
|
||||
* @param string mimeType
|
||||
* @return string
|
||||
*/
|
||||
_getAbbreviatedMimeType: function (mimeType) {
|
||||
if (!mimeType) {
|
||||
return "";
|
||||
}
|
||||
return (mimeType.split(";")[0].split("/")[1] || "").split("+")[0];
|
||||
},
|
||||
|
||||
/**
|
||||
* Gets the total number of bytes representing the cumulated content size of
|
||||
* a set of requests. Returns 0 for an empty set.
|
||||
|
|
|
@ -0,0 +1,92 @@
|
|||
"use strict";
|
||||
|
||||
const { getAbbreviatedMimeType,
|
||||
getUriNameWithQuery,
|
||||
getUriHostPort,
|
||||
loadCauseString } = require("./request-utils");
|
||||
|
||||
/**
|
||||
* Predicates used when sorting items.
|
||||
*
|
||||
* @param object first
|
||||
* The first item used in the comparison.
|
||||
* @param object second
|
||||
* The second item used in the comparison.
|
||||
* @return number
|
||||
* <0 to sort first to a lower index than second
|
||||
* =0 to leave first and second unchanged with respect to each other
|
||||
* >0 to sort second to a lower index than first
|
||||
*/
|
||||
|
||||
function waterfall(first, second) {
|
||||
return first.startedMillis - second.startedMillis;
|
||||
}
|
||||
|
||||
function status(first, second) {
|
||||
return first.status == second.status
|
||||
? first.startedMillis - second.startedMillis
|
||||
: first.status - second.status;
|
||||
}
|
||||
|
||||
function method(first, second) {
|
||||
if (first.method == second.method) {
|
||||
return first.startedMillis - second.startedMillis;
|
||||
}
|
||||
return first.method > second.method ? 1 : -1;
|
||||
}
|
||||
|
||||
function file(first, second) {
|
||||
let firstUrl = getUriNameWithQuery(first.url).toLowerCase();
|
||||
let secondUrl = getUriNameWithQuery(second.url).toLowerCase();
|
||||
if (firstUrl == secondUrl) {
|
||||
return first.startedMillis - second.startedMillis;
|
||||
}
|
||||
return firstUrl > secondUrl ? 1 : -1;
|
||||
}
|
||||
|
||||
function domain(first, second) {
|
||||
let firstDomain = getUriHostPort(first.url).toLowerCase();
|
||||
let secondDomain = getUriHostPort(second.url).toLowerCase();
|
||||
if (firstDomain == secondDomain) {
|
||||
return first.startedMillis - second.startedMillis;
|
||||
}
|
||||
return firstDomain > secondDomain ? 1 : -1;
|
||||
}
|
||||
|
||||
function cause(first, second) {
|
||||
let firstCause = loadCauseString(first.cause.type);
|
||||
let secondCause = loadCauseString(second.cause.type);
|
||||
if (firstCause == secondCause) {
|
||||
return first.startedMillis - second.startedMillis;
|
||||
}
|
||||
return firstCause > secondCause ? 1 : -1;
|
||||
}
|
||||
|
||||
function type(first, second) {
|
||||
let firstType = getAbbreviatedMimeType(first.mimeType).toLowerCase();
|
||||
let secondType = getAbbreviatedMimeType(second.mimeType).toLowerCase();
|
||||
if (firstType == secondType) {
|
||||
return first.startedMillis - second.startedMillis;
|
||||
}
|
||||
return firstType > secondType ? 1 : -1;
|
||||
}
|
||||
|
||||
function transferred(first, second) {
|
||||
return first.transferredSize - second.transferredSize;
|
||||
}
|
||||
|
||||
function size(first, second) {
|
||||
return first.contentSize - second.contentSize;
|
||||
}
|
||||
|
||||
exports.Sorters = {
|
||||
status,
|
||||
method,
|
||||
file,
|
||||
domain,
|
||||
cause,
|
||||
type,
|
||||
transferred,
|
||||
size,
|
||||
waterfall,
|
||||
};
|
|
@ -16,6 +16,7 @@ DevToolsModules(
|
|||
'h-split-box.js',
|
||||
'notification-box.css',
|
||||
'notification-box.js',
|
||||
'search-box.js',
|
||||
'sidebar-toggle.css',
|
||||
'sidebar-toggle.js',
|
||||
'stack-trace.js',
|
||||
|
|
|
@ -0,0 +1,107 @@
|
|||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
|
||||
* You can obtain one at http://mozilla.org/MPL/2.0/. */
|
||||
|
||||
/* global window */
|
||||
|
||||
"use strict";
|
||||
|
||||
const { DOM: dom, createClass, PropTypes } = require("devtools/client/shared/vendor/react");
|
||||
const {KeyShortcuts} = require("devtools/client/shared/key-shortcuts");
|
||||
|
||||
/**
|
||||
* A generic search box component for use across devtools
|
||||
*/
|
||||
module.exports = createClass({
|
||||
displayName: "SearchBox",
|
||||
|
||||
propTypes: {
|
||||
delay: PropTypes.number,
|
||||
keyShortcut: PropTypes.string,
|
||||
onChange: PropTypes.func,
|
||||
placeholder: PropTypes.string,
|
||||
type: PropTypes.string
|
||||
},
|
||||
|
||||
getInitialState() {
|
||||
return {
|
||||
value: ""
|
||||
};
|
||||
},
|
||||
|
||||
componentDidMount() {
|
||||
if (!this.props.keyShortcut) {
|
||||
return;
|
||||
}
|
||||
|
||||
this.shortcuts = new KeyShortcuts({
|
||||
window
|
||||
});
|
||||
this.shortcuts.on(this.props.keyShortcut, (name, event) => {
|
||||
event.preventDefault();
|
||||
this.refs.input.focus();
|
||||
});
|
||||
},
|
||||
|
||||
componentWillUnmount() {
|
||||
this.shortcuts.destroy();
|
||||
// Clean up an existing timeout.
|
||||
if (this.searchTimeout) {
|
||||
clearTimeout(this.searchTimeout);
|
||||
}
|
||||
},
|
||||
|
||||
onChange() {
|
||||
if (this.state.value !== this.refs.input.value) {
|
||||
this.setState({ value: this.refs.input.value });
|
||||
}
|
||||
|
||||
if (!this.props.delay) {
|
||||
this.props.onChange(this.state.value);
|
||||
return;
|
||||
}
|
||||
|
||||
// Clean up an existing timeout before creating a new one.
|
||||
if (this.searchTimeout) {
|
||||
clearTimeout(this.searchTimeout);
|
||||
}
|
||||
|
||||
// Execute the search after a timeout. It makes the UX
|
||||
// smoother if the user is typing quickly.
|
||||
this.searchTimeout = setTimeout(() => {
|
||||
this.searchTimeout = null;
|
||||
this.props.onChange(this.state.value);
|
||||
}, this.props.delay);
|
||||
},
|
||||
|
||||
onClearButtonClick() {
|
||||
this.refs.input.value = "";
|
||||
this.onChange();
|
||||
},
|
||||
|
||||
render() {
|
||||
let { type = "search", placeholder } = this.props;
|
||||
let { value } = this.state;
|
||||
let divClassList = ["devtools-searchbox", "has-clear-btn"];
|
||||
let inputClassList = [`devtools-${type}input`];
|
||||
|
||||
if (value !== "") {
|
||||
inputClassList.push("filled");
|
||||
}
|
||||
return dom.div(
|
||||
{ className: divClassList.join(" ") },
|
||||
dom.input({
|
||||
className: inputClassList.join(" "),
|
||||
onChange: this.onChange,
|
||||
placeholder,
|
||||
ref: "input",
|
||||
value
|
||||
}),
|
||||
dom.button({
|
||||
className: "devtools-searchinput-clear",
|
||||
hidden: value == "",
|
||||
onClick: this.onClearButtonClick
|
||||
})
|
||||
);
|
||||
}
|
||||
});
|
|
@ -72,34 +72,40 @@ NewConsoleOutputWrapper.prototype = {
|
|||
|
||||
this.body = ReactDOM.render(provider, this.parentNode);
|
||||
},
|
||||
dispatchMessageAdd: function(message, waitForResponse) {
|
||||
let action = actions.messageAdd(message);
|
||||
let messageId = action.message.get("id");
|
||||
batchedMessageAdd(action);
|
||||
|
||||
// Wait for the message to render to resolve with the DOM node.
|
||||
// This is just for backwards compatibility with old tests, and should
|
||||
// be removed once it's not needed anymore.
|
||||
if (waitForResponse) {
|
||||
return new Promise(resolve => {
|
||||
let jsterm = this.jsterm;
|
||||
jsterm.hud.on("new-messages", function onThisMessage(e, messages) {
|
||||
for (let m of messages) {
|
||||
if (m.messageId == messageId) {
|
||||
resolve(m.node);
|
||||
jsterm.hud.off("new-messages", onThisMessage);
|
||||
return;
|
||||
}
|
||||
dispatchMessageAdd: function (message, waitForResponse) {
|
||||
let action = actions.messageAdd(message);
|
||||
batchedMessageAdd(action);
|
||||
|
||||
// Wait for the message to render to resolve with the DOM node.
|
||||
// This is just for backwards compatibility with old tests, and should
|
||||
// be removed once it's not needed anymore.
|
||||
// Can only wait for response if the action contains a valid message.
|
||||
if (waitForResponse && action.message) {
|
||||
let messageId = action.message.get("id");
|
||||
return new Promise(resolve => {
|
||||
let jsterm = this.jsterm;
|
||||
jsterm.hud.on("new-messages", function onThisMessage(e, messages) {
|
||||
for (let m of messages) {
|
||||
if (m.messageId == messageId) {
|
||||
resolve(m.node);
|
||||
jsterm.hud.off("new-messages", onThisMessage);
|
||||
return;
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
return Promise.resolve();
|
||||
},
|
||||
dispatchMessagesAdd: function(messages) {
|
||||
|
||||
dispatchMessagesAdd: function (messages) {
|
||||
const batchedActions = messages.map(message => actions.messageAdd(message));
|
||||
store.dispatch(actions.batchActions(batchedActions));
|
||||
},
|
||||
dispatchMessagesClear: function() {
|
||||
|
||||
dispatchMessagesClear: function () {
|
||||
store.dispatch(actions.messagesClear());
|
||||
},
|
||||
};
|
||||
|
|
|
@ -11,15 +11,17 @@
|
|||
var {require} = Components.utils.import("resource://devtools/shared/Loader.jsm", {});
|
||||
var {generateCssProperties} = require("devtools/server/actors/css-properties");
|
||||
|
||||
// xpcshell can output extra information, so place some delimiter text between
|
||||
// the output of the css properties database.
|
||||
dump("DEVTOOLS_CSS_DB_DELIMITER");
|
||||
|
||||
// Output JSON
|
||||
dump(JSON.stringify({
|
||||
cssProperties: cssProperties(),
|
||||
pseudoElements: pseudoElements()
|
||||
}));
|
||||
// In a debug build, xpcshell might print extra debugging information,
|
||||
// so we emit a trailing newline and then arrange to just read a
|
||||
// single (long) line of JSON from the output.
|
||||
dump("\n");
|
||||
|
||||
dump("DEVTOOLS_CSS_DB_DELIMITER");
|
||||
|
||||
/*
|
||||
* A list of CSS Properties and their various characteristics. This is used on the
|
||||
|
|
|
@ -3,11 +3,9 @@
|
|||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
"""
|
||||
This script implements the `mach devtools-css-db` command. It runs the C preprocessor
|
||||
on the CSS properties header file to get the list of preferences associated with
|
||||
a specific property, and it runs an xpcshell script that uses inIDOMUtils to query
|
||||
the CSS properties used by the browser. This information is used to generate the
|
||||
properties-db.js file.
|
||||
This script implements the `mach devtools-css-db` command. It runs an xpcshell script
|
||||
that uses inIDOMUtils to query the CSS properties used by the browser. This information
|
||||
is used to generate the properties-db.js file.
|
||||
"""
|
||||
|
||||
import json
|
||||
|
@ -15,7 +13,6 @@ import os
|
|||
import sys
|
||||
import string
|
||||
import subprocess
|
||||
from mozbuild import shellutil
|
||||
from mozbuild.base import (
|
||||
MozbuildObject,
|
||||
MachCommandBase,
|
||||
|
@ -42,46 +39,12 @@ class MachCommands(MachCommandBase):
|
|||
"""Generate the static css properties database for devtools and write it to file."""
|
||||
|
||||
print("Re-generating the css properties database...")
|
||||
preferences = self.get_preferences()
|
||||
db = self.get_properties_db_from_xpcshell()
|
||||
|
||||
self.output_template({
|
||||
'preferences': stringify(preferences),
|
||||
'cssProperties': stringify(db['cssProperties']),
|
||||
'pseudoElements': stringify(db['pseudoElements'])})
|
||||
|
||||
def get_preferences(self):
|
||||
"""Get all of the preferences associated with enabling and disabling a property."""
|
||||
# Build the command to run the preprocessor on PythonCSSProps.h
|
||||
headerPath = resolve_path(self.topsrcdir, 'layout/style/PythonCSSProps.h')
|
||||
|
||||
cpp = self.substs['CPP']
|
||||
|
||||
if not cpp:
|
||||
print("Unable to find the cpp program. Please do a full, non-artifact")
|
||||
print("build and try this again.")
|
||||
sys.exit(1)
|
||||
|
||||
if type(cpp) is list:
|
||||
cmd = cpp
|
||||
else:
|
||||
cmd = shellutil.split(cpp)
|
||||
cmd += shellutil.split(self.substs['ACDEFINES'])
|
||||
cmd.append(headerPath)
|
||||
|
||||
# The preprocessed list takes the following form:
|
||||
# [ (name, prop, id, flags, pref, proptype), ... ]
|
||||
preprocessed = eval(subprocess.check_output(cmd))
|
||||
|
||||
# Map this list
|
||||
# (name, prop, id, flags, pref, proptype) => (name, pref)
|
||||
preferences = [
|
||||
(name, pref)
|
||||
for name, prop, id, flags, pref, proptype in preprocessed
|
||||
if 'CSS_PROPERTY_INTERNAL' not in flags]
|
||||
|
||||
return preferences
|
||||
|
||||
def get_properties_db_from_xpcshell(self):
|
||||
"""Generate the static css properties db for devtools from an xpcshell script."""
|
||||
build = MozbuildObject.from_environment()
|
||||
|
@ -103,9 +66,9 @@ class MachCommands(MachCommandBase):
|
|||
contents = subprocess.check_output([xpcshell_path, '-g', gre_path,
|
||||
'-a', browser_path, script_path],
|
||||
env = sub_env)
|
||||
# Extract just the first line of output, since a debug-build
|
||||
# xpcshell might emit extra output that we don't want.
|
||||
contents = contents.split('\n')[0]
|
||||
# Extract just the output between the delimiters as the xpcshell output can
|
||||
# have extra output that we don't want.
|
||||
contents = contents.split('DEVTOOLS_CSS_DB_DELIMITER')[1]
|
||||
|
||||
return json.loads(contents)
|
||||
|
||||
|
|
Разница между файлами не показана из-за своего большого размера
Загрузить разницу
|
@ -18,9 +18,3 @@ exports.CSS_PROPERTIES = ${cssProperties};
|
|||
* A list of the pseudo elements.
|
||||
*/
|
||||
exports.PSEUDO_ELEMENTS = ${pseudoElements};
|
||||
|
||||
/**
|
||||
* A list of the preferences keys for whether a CSS property is enabled or not. This is
|
||||
* exposed for testing purposes.
|
||||
*/
|
||||
exports.PREFERENCES = ${preferences};
|
||||
|
|
|
@ -10,7 +10,20 @@
|
|||
* above each list indicates how it should be updated.
|
||||
*/
|
||||
|
||||
const { CSS_PROPERTIES, PSEUDO_ELEMENTS } = require("devtools/shared/css/generated/properties-db");
|
||||
let db;
|
||||
|
||||
// Allow this require to fail in case it's been deleted in the process of running
|
||||
// `mach devtools-css-db` to regenerate the database.
|
||||
try {
|
||||
db = require("devtools/shared/css/generated/properties-db");
|
||||
} catch (error) {
|
||||
console.error(`If this error is being displayed and "mach devtools-css-db" is not ` +
|
||||
`being run, then it needs to be fixed.`, error);
|
||||
db = {
|
||||
CSS_PROPERTIES: {},
|
||||
PSEUDO_ELEMENTS: []
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* All CSS types that properties can support. This list can be manually edited.
|
||||
|
@ -57,7 +70,7 @@ exports.ANGLE_TAKING_FUNCTIONS = ["linear-gradient", "-moz-linear-gradient",
|
|||
*
|
||||
* This list can be updated with `mach devtools-css-db`.
|
||||
*/
|
||||
exports.PSEUDO_ELEMENTS = PSEUDO_ELEMENTS;
|
||||
exports.PSEUDO_ELEMENTS = db.PSEUDO_ELEMENTS;
|
||||
|
||||
/**
|
||||
* A list of CSS Properties and their various characteristics. This is used on the
|
||||
|
@ -69,9 +82,9 @@ exports.PSEUDO_ELEMENTS = PSEUDO_ELEMENTS;
|
|||
* "supports": [ 7, 9, 10 ]
|
||||
* }
|
||||
*/
|
||||
exports.CSS_PROPERTIES = CSS_PROPERTIES;
|
||||
exports.CSS_PROPERTIES = db.CSS_PROPERTIES;
|
||||
|
||||
exports.CSS_PROPERTIES_DB = {
|
||||
properties: CSS_PROPERTIES,
|
||||
pseudoElements: PSEUDO_ELEMENTS
|
||||
properties: db.CSS_PROPERTIES,
|
||||
pseudoElements: db.PSEUDO_ELEMENTS
|
||||
};
|
||||
|
|
|
@ -3,11 +3,18 @@
|
|||
|
||||
/**
|
||||
* Test that the devtool's client-side CSS properties database is in sync with the values
|
||||
* on the platform. If they are not, then `mach devtools-css-db` needs to be run to
|
||||
* make everything up to date. Nightly, aurora, beta, and release may have different
|
||||
* preferences for what CSS values are enabled. The static CSS properties database can
|
||||
* be slightly different from the target platform as long as there is a preference that
|
||||
* exists that turns off that CSS property.
|
||||
* on the platform (in Nightly only). If they are not, then `mach devtools-css-db` needs
|
||||
* to be run to make everything up to date. Nightly, aurora, beta, and release may have
|
||||
* different CSS properties and values. These are based on preferences and compiler flags.
|
||||
*
|
||||
* This test broke uplifts as the database needed to be regenerated every uplift. The
|
||||
* combination of compiler flags and preferences means that it's too difficult to
|
||||
* statically determine which properties are enabled between Firefox releases.
|
||||
*
|
||||
* Because of these difficulties, the database only needs to be up to date with Nightly.
|
||||
* It is a fallback that is only used if the remote debugging protocol doesn't support
|
||||
* providing a CSS database, so it's ok if the provided properties don't exactly match
|
||||
* the inspected target in this particular case.
|
||||
*/
|
||||
|
||||
"use strict";
|
||||
|
@ -15,9 +22,8 @@
|
|||
const DOMUtils = Components.classes["@mozilla.org/inspector/dom-utils;1"]
|
||||
.getService(Components.interfaces.inIDOMUtils);
|
||||
|
||||
const {PSEUDO_ELEMENTS, CSS_PROPERTIES, PREFERENCES} = require("devtools/shared/css/generated/properties-db");
|
||||
const {PSEUDO_ELEMENTS, CSS_PROPERTIES} = require("devtools/shared/css/generated/properties-db");
|
||||
const {generateCssProperties} = require("devtools/server/actors/css-properties");
|
||||
const { Preferences } = require("resource://gre/modules/Preferences.jsm");
|
||||
|
||||
function run_test() {
|
||||
const propertiesErrorMessage = "If this assertion fails, then the client side CSS " +
|
||||
|
@ -32,9 +38,7 @@ function run_test() {
|
|||
|
||||
/**
|
||||
* Check that the platform and client match for the details on their CSS properties.
|
||||
* Enumerate each property to aid in debugging. Sometimes these properties don't
|
||||
* completely agree due to differences in preferences. Check the currently set
|
||||
* preference for that property to see if it's enabled.
|
||||
* Enumerate each property to aid in debugging.
|
||||
*/
|
||||
const platformProperties = generateCssProperties();
|
||||
|
||||
|
@ -71,14 +75,8 @@ function run_test() {
|
|||
}
|
||||
|
||||
mismatches.forEach(propertyName => {
|
||||
if (getPreference(propertyName) === false) {
|
||||
ok(true, `The static database and platform do not agree on the property ` +
|
||||
`"${propertyName}" This is ok because it is currently disabled through ` +
|
||||
`a preference.`);
|
||||
} else {
|
||||
ok(false, `The static database and platform do not agree on the property ` +
|
||||
`"${propertyName}" ${propertiesErrorMessage}`);
|
||||
}
|
||||
ok(false, `The static database and platform do not agree on the property ` +
|
||||
`"${propertyName}" ${propertiesErrorMessage}`);
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -119,24 +117,6 @@ function isJsonDeepEqual(a, b) {
|
|||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the preference value of whether this property is enabled. Returns an empty string
|
||||
* if no preference exists.
|
||||
*
|
||||
* @param {String} propertyName
|
||||
* @return {Boolean|undefined}
|
||||
*/
|
||||
function getPreference(propertyName) {
|
||||
const preference = PREFERENCES.find(([prefPropertyName, preferenceKey]) => {
|
||||
return prefPropertyName === propertyName && !!preferenceKey;
|
||||
});
|
||||
|
||||
if (preference) {
|
||||
return Preferences.get(preference[1]);
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
|
||||
/**
|
||||
* Take the keys of two objects, and return the ones that don't match.
|
||||
*
|
||||
|
|
|
@ -59,13 +59,6 @@ if (!BrowserElementIsReady) {
|
|||
}
|
||||
|
||||
Services.scriptloader.loadSubScript("chrome://global/content/BrowserElementChildPreload.js");
|
||||
} else {
|
||||
if (Services.prefs.getIntPref("dom.w3c_touch_events.enabled") != 0) {
|
||||
if (docShell.asyncPanZoomEnabled === false) {
|
||||
ContentPanningAPZDisabled.init();
|
||||
}
|
||||
ContentPanning.init();
|
||||
}
|
||||
}
|
||||
|
||||
function onDestroy() {
|
||||
|
@ -74,12 +67,6 @@ if (!BrowserElementIsReady) {
|
|||
if (api) {
|
||||
api.destroy();
|
||||
}
|
||||
if ("ContentPanning" in this) {
|
||||
ContentPanning.destroy();
|
||||
}
|
||||
if ("ContentPanningAPZDisabled" in this) {
|
||||
ContentPanningAPZDisabled.destroy();
|
||||
}
|
||||
if ("CopyPasteAssistent" in this) {
|
||||
CopyPasteAssistent.destroy();
|
||||
}
|
||||
|
|
|
@ -72,18 +72,20 @@ using namespace mozilla::media;
|
|||
#undef SFMT
|
||||
#undef SLOG
|
||||
#undef SWARN
|
||||
#undef SDUMP
|
||||
|
||||
#define FMT(x, ...) "Decoder=%p " x, mDecoderID, ##__VA_ARGS__
|
||||
#define DECODER_LOG(...) MOZ_LOG(gMediaDecoderLog, LogLevel::Debug, (FMT(__VA_ARGS__)))
|
||||
#define VERBOSE_LOG(...) MOZ_LOG(gMediaDecoderLog, LogLevel::Verbose, (FMT(__VA_ARGS__)))
|
||||
#define SAMPLE_LOG(...) MOZ_LOG(gMediaSampleLog, LogLevel::Debug, (FMT(__VA_ARGS__)))
|
||||
#define DECODER_WARN(...) NS_WARNING(nsPrintfCString(FMT(__VA_ARGS__)).get())
|
||||
#define DUMP_LOG(...) NS_DebugBreak(NS_DEBUG_WARNING, nsPrintfCString(FMT(__VA_ARGS__)).get(), nullptr, nullptr, -1)
|
||||
#define DECODER_LOG(x, ...) MOZ_LOG(gMediaDecoderLog, LogLevel::Debug, (FMT(x, ##__VA_ARGS__)))
|
||||
#define VERBOSE_LOG(x, ...) MOZ_LOG(gMediaDecoderLog, LogLevel::Verbose, (FMT(x, ##__VA_ARGS__)))
|
||||
#define SAMPLE_LOG(x, ...) MOZ_LOG(gMediaSampleLog, LogLevel::Debug, (FMT(x, ##__VA_ARGS__)))
|
||||
#define DECODER_WARN(x, ...) NS_WARNING(nsPrintfCString(FMT(x, ##__VA_ARGS__)).get())
|
||||
#define DUMP_LOG(x, ...) NS_DebugBreak(NS_DEBUG_WARNING, nsPrintfCString(FMT(x, ##__VA_ARGS__)).get(), nullptr, nullptr, -1)
|
||||
|
||||
// Used by StateObject and its sub-classes
|
||||
#define SFMT(x, ...) "Decoder=%p state=%s " x, mMaster->mDecoderID, ToStateStr(GetState()), ##__VA_ARGS__
|
||||
#define SLOG(...) MOZ_LOG(gMediaDecoderLog, LogLevel::Debug, (SFMT(__VA_ARGS__)))
|
||||
#define SWARN(...) NS_WARNING(nsPrintfCString(SFMT(__VA_ARGS__)).get())
|
||||
#define SLOG(x, ...) MOZ_LOG(gMediaDecoderLog, LogLevel::Debug, (SFMT(x, ##__VA_ARGS__)))
|
||||
#define SWARN(x, ...) NS_WARNING(nsPrintfCString(SFMT(x, ##__VA_ARGS__)).get())
|
||||
#define SDUMP(x, ...) NS_DebugBreak(NS_DEBUG_WARNING, nsPrintfCString(SFMT(x, ##__VA_ARGS__)).get(), nullptr, nullptr, -1)
|
||||
|
||||
// Certain constants get stored as member variables and then adjusted by various
|
||||
// scale factors on a per-decoder basis. We want to make sure to avoid using these
|
||||
|
@ -221,11 +223,13 @@ public:
|
|||
|
||||
virtual bool HandleEndOfStream() { return false; }
|
||||
|
||||
virtual RefPtr<MediaDecoder::SeekPromise> HandleSeek(SeekTarget aTarget)
|
||||
{
|
||||
MOZ_ASSERT(false, "Can't seek in this state");
|
||||
return nullptr;
|
||||
}
|
||||
virtual bool HandleWaitingForData() { return false; }
|
||||
|
||||
virtual RefPtr<MediaDecoder::SeekPromise> HandleSeek(SeekTarget aTarget) = 0;
|
||||
|
||||
virtual bool HandleAudioCaptured() { return false; }
|
||||
|
||||
virtual void DumpDebugInfo() {}
|
||||
|
||||
protected:
|
||||
using Master = MediaDecoderStateMachine;
|
||||
|
@ -286,6 +290,12 @@ public:
|
|||
return true;
|
||||
}
|
||||
|
||||
RefPtr<MediaDecoder::SeekPromise> HandleSeek(SeekTarget aTarget) override
|
||||
{
|
||||
MOZ_DIAGNOSTIC_ASSERT(false, "Can't seek while decoding metadata.");
|
||||
return MediaDecoder::SeekPromise::CreateAndReject(true, __func__);
|
||||
}
|
||||
|
||||
private:
|
||||
void OnMetadataRead(MetadataHolder* aMetadata)
|
||||
{
|
||||
|
@ -561,8 +571,7 @@ public:
|
|||
|
||||
mDecodeStartTime = TimeStamp::Now();
|
||||
|
||||
mMaster->mIsPrerolling = true;
|
||||
mMaster->MaybeStopPrerolling();
|
||||
MaybeStopPrerolling();
|
||||
|
||||
// Ensure that we've got tasks enqueued to decode data if we need to.
|
||||
mMaster->DispatchDecodeTasksIfNeeded();
|
||||
|
@ -576,7 +585,6 @@ public:
|
|||
TimeDuration decodeDuration = TimeStamp::Now() - mDecodeStartTime;
|
||||
SLOG("Exiting DECODING, decoded for %.3lfs", decodeDuration.ToSeconds());
|
||||
}
|
||||
mMaster->mIsPrerolling = false;
|
||||
}
|
||||
|
||||
void Step() override
|
||||
|
@ -589,7 +597,9 @@ public:
|
|||
}
|
||||
|
||||
// Start playback if necessary so that the clock can be properly queried.
|
||||
mMaster->MaybeStartPlayback();
|
||||
if (!mIsPrerolling) {
|
||||
mMaster->MaybeStartPlayback();
|
||||
}
|
||||
|
||||
mMaster->UpdatePlaybackPositionPeriodically();
|
||||
|
||||
|
@ -608,14 +618,14 @@ public:
|
|||
bool HandleAudioDecoded(MediaData* aAudio) override
|
||||
{
|
||||
mMaster->Push(aAudio, MediaData::AUDIO_DATA);
|
||||
mMaster->MaybeStopPrerolling();
|
||||
MaybeStopPrerolling();
|
||||
return true;
|
||||
}
|
||||
|
||||
bool HandleVideoDecoded(MediaData* aVideo, TimeStamp aDecodeStart) override
|
||||
{
|
||||
mMaster->Push(aVideo, MediaData::VIDEO_DATA);
|
||||
mMaster->MaybeStopPrerolling();
|
||||
MaybeStopPrerolling();
|
||||
CheckSlowDecoding(aDecodeStart);
|
||||
return true;
|
||||
}
|
||||
|
@ -631,6 +641,35 @@ public:
|
|||
return p.forget();
|
||||
}
|
||||
|
||||
bool HandleEndOfStream() override
|
||||
{
|
||||
if (mMaster->CheckIfDecodeComplete()) {
|
||||
SetState(DECODER_STATE_COMPLETED);
|
||||
} else {
|
||||
MaybeStopPrerolling();
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
bool HandleWaitingForData() override
|
||||
{
|
||||
MaybeStopPrerolling();
|
||||
return true;
|
||||
}
|
||||
|
||||
bool HandleAudioCaptured() override
|
||||
{
|
||||
MaybeStopPrerolling();
|
||||
// MediaSink is changed. Schedule Step() to check if we can start playback.
|
||||
mMaster->ScheduleStateMachine();
|
||||
return true;
|
||||
}
|
||||
|
||||
void DumpDebugInfo() override
|
||||
{
|
||||
SDUMP("mIsPrerolling=%d", mIsPrerolling);
|
||||
}
|
||||
|
||||
private:
|
||||
void CheckSlowDecoding(TimeStamp aDecodeStart)
|
||||
{
|
||||
|
@ -663,16 +702,42 @@ private:
|
|||
}
|
||||
}
|
||||
|
||||
bool HandleEndOfStream() override
|
||||
bool DonePrerollingAudio()
|
||||
{
|
||||
if (mMaster->CheckIfDecodeComplete()) {
|
||||
SetState(DECODER_STATE_COMPLETED);
|
||||
return !mMaster->IsAudioDecoding() ||
|
||||
mMaster->GetDecodedAudioDuration() >=
|
||||
mMaster->AudioPrerollUsecs() * mMaster->mPlaybackRate;
|
||||
}
|
||||
|
||||
bool DonePrerollingVideo()
|
||||
{
|
||||
return !mMaster->IsVideoDecoding() ||
|
||||
static_cast<uint32_t>(mMaster->VideoQueue().GetSize()) >=
|
||||
mMaster->VideoPrerollFrames() * mMaster->mPlaybackRate + 1;
|
||||
}
|
||||
|
||||
void MaybeStopPrerolling()
|
||||
{
|
||||
if (mIsPrerolling &&
|
||||
(DonePrerollingAudio() || Reader()->IsWaitingAudioData()) &&
|
||||
(DonePrerollingVideo() || Reader()->IsWaitingVideoData())) {
|
||||
mIsPrerolling = false;
|
||||
// Check if we can start playback.
|
||||
mMaster->ScheduleStateMachine();
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
// Time at which we started decoding.
|
||||
TimeStamp mDecodeStartTime;
|
||||
|
||||
// When we start decoding (either for the first time, or after a pause)
|
||||
// we may be low on decoded data. We don't want our "low data" logic to
|
||||
// kick in and decide that we're low on decoded data because the download
|
||||
// can't keep up with the decode, and cause us to pause playback. So we
|
||||
// have a "preroll" stage, where we ignore the results of our "low data"
|
||||
// logic during the first few frames of our decode. This occurs during
|
||||
// playback.
|
||||
bool mIsPrerolling = true;
|
||||
};
|
||||
|
||||
class MediaDecoderStateMachine::SeekingState
|
||||
|
@ -1109,6 +1174,13 @@ public:
|
|||
return p.forget();
|
||||
}
|
||||
|
||||
bool HandleAudioCaptured() override
|
||||
{
|
||||
// MediaSink is changed. Schedule Step() to check if we can start playback.
|
||||
mMaster->ScheduleStateMachine();
|
||||
return true;
|
||||
}
|
||||
|
||||
private:
|
||||
bool mSentPlaybackEndedEvent = false;
|
||||
};
|
||||
|
@ -1138,6 +1210,12 @@ public:
|
|||
{
|
||||
return true;
|
||||
}
|
||||
|
||||
RefPtr<MediaDecoder::SeekPromise> HandleSeek(SeekTarget aTarget) override
|
||||
{
|
||||
MOZ_DIAGNOSTIC_ASSERT(false, "Can't seek in shutdown state.");
|
||||
return MediaDecoder::SeekPromise::CreateAndReject(true, __func__);
|
||||
}
|
||||
};
|
||||
|
||||
#define INIT_WATCHABLE(name, val) \
|
||||
|
@ -1542,7 +1620,7 @@ MediaDecoderStateMachine::OnNotDecoded(MediaData::Type aType,
|
|||
MOZ_ASSERT(mReader->IsWaitForDataSupported(),
|
||||
"Readers that send WAITING_FOR_DATA need to implement WaitForData");
|
||||
mReader->WaitForData(aType);
|
||||
MaybeStopPrerolling();
|
||||
mStateObj->HandleWaitingForData();
|
||||
return;
|
||||
}
|
||||
|
||||
|
@ -1569,8 +1647,6 @@ MediaDecoderStateMachine::OnNotDecoded(MediaData::Type aType,
|
|||
VideoQueue().Finish();
|
||||
}
|
||||
|
||||
MaybeStopPrerolling();
|
||||
|
||||
mStateObj->HandleEndOfStream();
|
||||
}
|
||||
|
||||
|
@ -1719,19 +1795,6 @@ void MediaDecoderStateMachine::StopPlayback()
|
|||
DispatchDecodeTasksIfNeeded();
|
||||
}
|
||||
|
||||
void
|
||||
MediaDecoderStateMachine::MaybeStopPrerolling()
|
||||
{
|
||||
MOZ_ASSERT(OnTaskQueue());
|
||||
if (mIsPrerolling &&
|
||||
(DonePrerollingAudio() || mReader->IsWaitingAudioData()) &&
|
||||
(DonePrerollingVideo() || mReader->IsWaitingVideoData())) {
|
||||
mIsPrerolling = false;
|
||||
// Check if we can start playback.
|
||||
ScheduleStateMachine();
|
||||
}
|
||||
}
|
||||
|
||||
void MediaDecoderStateMachine::MaybeStartPlayback()
|
||||
{
|
||||
MOZ_ASSERT(OnTaskQueue());
|
||||
|
@ -1746,10 +1809,10 @@ void MediaDecoderStateMachine::MaybeStartPlayback()
|
|||
}
|
||||
|
||||
bool playStatePermits = mPlayState == MediaDecoder::PLAY_STATE_PLAYING;
|
||||
if (!playStatePermits || mIsPrerolling || mAudioOffloading) {
|
||||
if (!playStatePermits || mAudioOffloading) {
|
||||
DECODER_LOG("Not starting playback [playStatePermits: %d, "
|
||||
"mIsPrerolling: %d, mAudioOffloading: %d]",
|
||||
playStatePermits, mIsPrerolling, mAudioOffloading);
|
||||
"mAudioOffloading: %d]",
|
||||
playStatePermits, mAudioOffloading);
|
||||
return;
|
||||
}
|
||||
|
||||
|
@ -2987,12 +3050,7 @@ MediaDecoderStateMachine::SetAudioCaptured(bool aCaptured)
|
|||
// Restore playback parameters.
|
||||
mMediaSink->SetPlaybackParams(params);
|
||||
|
||||
// We don't need to call StartMediaSink() here because IsPlaying() is now
|
||||
// always in sync with the playing state of MediaSink. It will be started in
|
||||
// MaybeStartPlayback() in the next cycle if necessary.
|
||||
|
||||
mAudioCaptured = aCaptured;
|
||||
ScheduleStateMachine();
|
||||
|
||||
// Don't buffer as much when audio is captured because we don't need to worry
|
||||
// about high latency audio devices.
|
||||
|
@ -3000,7 +3058,7 @@ MediaDecoderStateMachine::SetAudioCaptured(bool aCaptured)
|
|||
detail::AMPLE_AUDIO_USECS / 2 :
|
||||
detail::AMPLE_AUDIO_USECS;
|
||||
|
||||
MaybeStopPrerolling();
|
||||
mStateObj->HandleAudioCaptured();
|
||||
}
|
||||
|
||||
uint32_t MediaDecoderStateMachine::GetAmpleVideoFrames() const
|
||||
|
@ -3020,15 +3078,16 @@ MediaDecoderStateMachine::DumpDebugInfo()
|
|||
// this function before shutdown begins.
|
||||
nsCOMPtr<nsIRunnable> r = NS_NewRunnableFunction([this] () {
|
||||
mMediaSink->DumpDebugInfo();
|
||||
mStateObj->DumpDebugInfo();
|
||||
DUMP_LOG(
|
||||
"GetMediaTime=%lld GetClock=%lld mMediaSink=%p "
|
||||
"mState=%s mPlayState=%d mSentFirstFrameLoadedEvent=%d IsPlaying=%d "
|
||||
"mAudioStatus=%s mVideoStatus=%s mDecodedAudioEndTime=%lld mDecodedVideoEndTime=%lld "
|
||||
"mIsPrerolling=%d mAudioCompleted=%d mVideoCompleted=%d",
|
||||
"mAudioCompleted=%d mVideoCompleted=%d",
|
||||
GetMediaTime(), mMediaSink->IsStarted() ? GetClock() : -1, mMediaSink.get(),
|
||||
ToStateStr(), mPlayState.Ref(), mSentFirstFrameLoadedEvent, IsPlaying(),
|
||||
AudioRequestStatus(), VideoRequestStatus(), mDecodedAudioEndTime, mDecodedVideoEndTime,
|
||||
mIsPrerolling, mAudioCompleted.Ref(), mVideoCompleted.Ref());
|
||||
mAudioCompleted.Ref(), mVideoCompleted.Ref());
|
||||
});
|
||||
|
||||
OwnerThread()->DispatchStateChange(r.forget());
|
||||
|
|
|
@ -677,32 +677,6 @@ private:
|
|||
return GetAmpleVideoFrames() / 2;
|
||||
}
|
||||
|
||||
bool DonePrerollingAudio()
|
||||
{
|
||||
MOZ_ASSERT(OnTaskQueue());
|
||||
return !IsAudioDecoding() ||
|
||||
GetDecodedAudioDuration() >= AudioPrerollUsecs() * mPlaybackRate;
|
||||
}
|
||||
|
||||
bool DonePrerollingVideo()
|
||||
{
|
||||
MOZ_ASSERT(OnTaskQueue());
|
||||
return !IsVideoDecoding() ||
|
||||
static_cast<uint32_t>(VideoQueue().GetSize()) >=
|
||||
VideoPrerollFrames() * mPlaybackRate + 1;
|
||||
}
|
||||
|
||||
void MaybeStopPrerolling();
|
||||
|
||||
// When we start decoding (either for the first time, or after a pause)
|
||||
// we may be low on decoded data. We don't want our "low data" logic to
|
||||
// kick in and decide that we're low on decoded data because the download
|
||||
// can't keep up with the decode, and cause us to pause playback. So we
|
||||
// have a "preroll" stage, where we ignore the results of our "low data"
|
||||
// logic during the first few frames of our decode. This occurs during
|
||||
// playback.
|
||||
bool mIsPrerolling = false;
|
||||
|
||||
// Only one of a given pair of ({Audio,Video}DataPromise, WaitForDataPromise)
|
||||
// should exist at any given moment.
|
||||
|
||||
|
|
|
@ -1513,18 +1513,13 @@ public:
|
|||
}
|
||||
if (errorMsg) {
|
||||
LOG(("%s %d", errorMsg, rv));
|
||||
switch (rv) {
|
||||
case NS_ERROR_NOT_AVAILABLE: {
|
||||
MOZ_ASSERT(badConstraint);
|
||||
Fail(NS_LITERAL_STRING("OverconstrainedError"),
|
||||
NS_LITERAL_STRING(""),
|
||||
NS_ConvertUTF8toUTF16(badConstraint));
|
||||
break;
|
||||
}
|
||||
default:
|
||||
Fail(NS_LITERAL_STRING("NotReadableError"),
|
||||
NS_ConvertUTF8toUTF16(errorMsg));
|
||||
break;
|
||||
if (badConstraint) {
|
||||
Fail(NS_LITERAL_STRING("OverconstrainedError"),
|
||||
NS_LITERAL_STRING(""),
|
||||
NS_ConvertUTF8toUTF16(badConstraint));
|
||||
} else {
|
||||
Fail(NS_LITERAL_STRING("NotReadableError"),
|
||||
NS_ConvertUTF8toUTF16(errorMsg));
|
||||
}
|
||||
return NS_OK;
|
||||
}
|
||||
|
@ -3520,7 +3515,7 @@ GetUserMediaCallbackMediaStreamListener::ApplyConstraintsToTrack(
|
|||
} else {
|
||||
auto* window = nsGlobalWindow::GetInnerWindowWithId(windowId);
|
||||
if (window) {
|
||||
if (rv == NS_ERROR_NOT_AVAILABLE) {
|
||||
if (badConstraint) {
|
||||
nsString constraint;
|
||||
constraint.AssignASCII(badConstraint);
|
||||
RefPtr<MediaStreamError> error =
|
||||
|
|
|
@ -16,9 +16,9 @@ extern LazyLogModule gMediaDecoderLog;
|
|||
#undef DUMP_LOG
|
||||
|
||||
#define FMT(x, ...) "VideoSink=%p " x, this, ##__VA_ARGS__
|
||||
#define VSINK_LOG(...) MOZ_LOG(gMediaDecoderLog, LogLevel::Debug, (FMT(__VA_ARGS__)))
|
||||
#define VSINK_LOG_V(...) MOZ_LOG(gMediaDecoderLog, LogLevel::Verbose, (FMT(__VA_ARGS__)))
|
||||
#define DUMP_LOG(...) NS_DebugBreak(NS_DEBUG_WARNING, nsPrintfCString(FMT(__VA_ARGS__)).get(), nullptr, nullptr, -1)
|
||||
#define VSINK_LOG(x, ...) MOZ_LOG(gMediaDecoderLog, LogLevel::Debug, (FMT(x, ##__VA_ARGS__)))
|
||||
#define VSINK_LOG_V(x, ...) MOZ_LOG(gMediaDecoderLog, LogLevel::Verbose, (FMT(x, ##__VA_ARGS__)))
|
||||
#define DUMP_LOG(x, ...) NS_DebugBreak(NS_DEBUG_WARNING, nsPrintfCString(FMT(x, ##__VA_ARGS__)).get(), nullptr, nullptr, -1)
|
||||
|
||||
using namespace mozilla::layers;
|
||||
|
||||
|
|
|
@ -66,7 +66,8 @@ AudioStreamAnalyser.prototype = {
|
|||
*/
|
||||
enableDebugCanvas: function() {
|
||||
var cvs = this.debugCanvas = document.createElement("canvas");
|
||||
document.getElementById("content").appendChild(cvs);
|
||||
const content = document.getElementById("content");
|
||||
content.insertBefore(cvs, content.children[0]);
|
||||
|
||||
// Easy: 1px per bin
|
||||
cvs.width = this.analyser.frequencyBinCount;
|
||||
|
|
|
@ -2562,7 +2562,7 @@ WorkerPrivateParent<Derived>::Freeze(nsPIDOMWindowInner* aWindow)
|
|||
if ((IsSharedWorker() || IsServiceWorker()) && !mSharedWorkers.IsEmpty()) {
|
||||
AssertIsOnMainThread();
|
||||
|
||||
bool allFrozen = false;
|
||||
bool allFrozen = true;
|
||||
|
||||
for (uint32_t i = 0; i < mSharedWorkers.Length(); ++i) {
|
||||
if (aWindow && mSharedWorkers[i]->GetOwner() == aWindow) {
|
||||
|
|
|
@ -597,7 +597,7 @@ JS_FRIEND_API(bool)
|
|||
js::ZoneGlobalsAreAllGray(JS::Zone* zone)
|
||||
{
|
||||
for (CompartmentsInZoneIter comp(zone); !comp.done(); comp.next()) {
|
||||
JSObject* obj = comp->maybeGlobal();
|
||||
JSObject* obj = comp->unsafeUnbarrieredMaybeGlobal();
|
||||
if (!obj || !JS::ObjectIsMarkedGray(obj))
|
||||
return false;
|
||||
}
|
||||
|
|
|
@ -2781,11 +2781,15 @@ CrossAxisPositionTracker::
|
|||
// XXX strip of the <overflow-position> bit until we implement that
|
||||
mAlignContent &= ~NS_STYLE_ALIGN_FLAG_BITS;
|
||||
|
||||
if (!aFirstLine->getNext()) {
|
||||
const bool isSingleLine =
|
||||
NS_STYLE_FLEX_WRAP_NOWRAP == aReflowInput.mStylePosition->mFlexWrap;
|
||||
if (isSingleLine) {
|
||||
MOZ_ASSERT(!aFirstLine->getNext(),
|
||||
"If we're styled as single-line, we should only have 1 line");
|
||||
// "If the flex container is single-line and has a definite cross size, the
|
||||
// cross size of the flex line is the flex container's inner cross size."
|
||||
//
|
||||
// SOURCE: http://dev.w3.org/csswg/css-flexbox/#algo-line-break
|
||||
// SOURCE: https://drafts.csswg.org/css-flexbox/#algo-cross-line
|
||||
// NOTE: This means (by definition) that there's no packing space, which
|
||||
// means we don't need to be concerned with "align-conent" at all and we
|
||||
// can return early. This is handy, because this is the usual case (for
|
||||
|
|
|
@ -15,18 +15,18 @@ body {
|
|||
|
||||
@font-face {
|
||||
font-family: test-regular;
|
||||
src: local("Helvetica Neue"), local("Bitstream Vera Sans"), local("Bitstream Vera Sans Roman"), local("FreeSans"), local("Free Sans"), local("SwissA"), local("DejaVu Sans"), local("Arial");
|
||||
src: local("Helvetica Neue"), local("Bitstream Vera Sans"), local("Bitstream Vera Sans Roman"), local("DejaVu Sans"), local("FreeSans"), local("Free Sans"), local("SwissA"), local("Arial");
|
||||
}
|
||||
|
||||
/* use Helvetica on the Mac, since Futura has no bold face on 10.4, 10.5 */
|
||||
@font-face {
|
||||
font-family: test-bold;
|
||||
src: local("Helvetica Neue Bold"), local("Bitstream Vera Sans Bold"), local("FreeSans Bold"), local("Free Sans Bold"), local("SwissA Bold"), local("DejaVu Sans Bold"), local("Arial Bold");
|
||||
src: local("Helvetica Neue Bold"), local("Bitstream Vera Sans Bold"), local("DejaVu Sans Bold"), local("FreeSans Bold"), local("Free Sans Bold"), local("SwissA Bold"), local("Arial Bold");
|
||||
}
|
||||
|
||||
@font-face {
|
||||
font-family: test-italic;
|
||||
src: local("Helvetica Neue Italic"), local("Bitstream Vera Sans Oblique"), local("FreeSans Oblique"), local("Free Sans Oblique"), local("SwissA Italic"), local("DejaVu Sans Oblique"), local("Arial Italic");
|
||||
src: local("Helvetica Neue Italic"), local("Bitstream Vera Sans Oblique"), local("DejaVu Sans Oblique"), local("FreeSans Oblique"), local("Free Sans Oblique"), local("SwissA Italic"), local("Arial Italic");
|
||||
}
|
||||
|
||||
.regular { font-family: test-regular, serif; }
|
||||
|
|
|
@ -11,9 +11,10 @@ body {
|
|||
font-size: 24pt;
|
||||
}
|
||||
|
||||
/* use full names */
|
||||
/* Bitstream Vera must be adjacent to DejaVu to because the latter is picked
|
||||
up by fontconfig aliases for the former. */
|
||||
|
||||
p { font-family: Helvetica Neue, Bitstream Vera Sans, FreeSans, SwissA, DejaVu Sans, Arial, serif; }
|
||||
p { font-family: Helvetica Neue, Bitstream Vera Sans, DejaVu Sans, FreeSans, SwissA, Arial, serif; }
|
||||
|
||||
.regular { }
|
||||
.bold { font-weight: bold; }
|
||||
|
|
|
@ -15,18 +15,18 @@ body {
|
|||
|
||||
@font-face {
|
||||
font-family: test-regular;
|
||||
src: local(Helvetica Neue), local(Bitstream Vera Sans), local(Bitstream Vera Sans Roman), local(FreeSans), local(Free Sans), local(SwissA), local(DejaVu Sans), local(Arial);
|
||||
src: local(Helvetica Neue), local(Bitstream Vera Sans), local(Bitstream Vera Sans Roman), local(DejaVu Sans), local(FreeSans), local(Free Sans), local(SwissA), local(Arial);
|
||||
}
|
||||
|
||||
/* use Helvetica on the Mac, since Futura has no bold face on 10.4, 10.5 */
|
||||
@font-face {
|
||||
font-family: test-bold;
|
||||
src: local(Helvetica Neue Bold), local(Bitstream Vera Sans Bold), local(FreeSans Bold), local(Free Sans Bold), local(SwissA Bold), local(DejaVu Sans Bold), local(Arial Bold);
|
||||
src: local(Helvetica Neue Bold), local(Bitstream Vera Sans Bold), local(DejaVu Sans Bold), local(FreeSans Bold), local(Free Sans Bold), local(SwissA Bold), local(Arial Bold);
|
||||
}
|
||||
|
||||
@font-face {
|
||||
font-family: test-italic;
|
||||
src: local(Helvetica Neue Italic), local(Bitstream Vera Sans Oblique), local(FreeSans Oblique), local(Free Sans Oblique), local(SwissA Italic), local(DejaVu Sans Oblique), local(Arial Italic);
|
||||
src: local(Helvetica Neue Italic), local(Bitstream Vera Sans Oblique), local(DejaVu Sans Oblique), local(FreeSans Oblique), local(Free Sans Oblique), local(SwissA Italic), local(Arial Italic);
|
||||
}
|
||||
|
||||
.regular { font-family: test-regular, serif; }
|
||||
|
|
|
@ -71,7 +71,7 @@
|
|||
|
||||
<!-- flex-end -->
|
||||
<div class="flexbox">
|
||||
<div class="a"/>
|
||||
<div class="a" style="margin-top: 190px"/>
|
||||
</div>
|
||||
<div class="flexbox">
|
||||
<div class="a" style="margin-top: 160px"/>
|
||||
|
@ -85,7 +85,7 @@
|
|||
|
||||
<!-- center -->
|
||||
<div class="flexbox">
|
||||
<div class="a"/>
|
||||
<div class="a" style="margin-top: 95px"/>
|
||||
</div>
|
||||
<div class="flexbox">
|
||||
<div class="a" style="margin-top: 80px"/>
|
||||
|
@ -113,7 +113,7 @@
|
|||
|
||||
<!-- space-around -->
|
||||
<div class="flexbox">
|
||||
<div class="a"/>
|
||||
<div class="a" style="margin-top: 95px"/>
|
||||
</div>
|
||||
<div class="flexbox">
|
||||
<div class="a" style="margin-top: 40px"/>
|
||||
|
|
|
@ -74,7 +74,7 @@
|
|||
|
||||
<!-- flex-end -->
|
||||
<div class="flexbox">
|
||||
<div class="a"/>
|
||||
<div class="a" style="margin-left: 190px"/>
|
||||
</div>
|
||||
<div class="flexbox">
|
||||
<div class="a" style="margin-left: 160px"/>
|
||||
|
@ -88,7 +88,7 @@
|
|||
|
||||
<!-- center -->
|
||||
<div class="flexbox">
|
||||
<div class="a"/>
|
||||
<div class="a" style="margin-left: 95px"/>
|
||||
</div>
|
||||
<div class="flexbox">
|
||||
<div class="a" style="margin-left: 80px"/>
|
||||
|
@ -116,7 +116,7 @@
|
|||
|
||||
<!-- space-around -->
|
||||
<div class="flexbox">
|
||||
<div class="a"/>
|
||||
<div class="a" style="margin-left: 95px"/>
|
||||
</div>
|
||||
<div class="flexbox">
|
||||
<div class="a" style="margin-left: 40px"/>
|
||||
|
@ -144,7 +144,7 @@
|
|||
|
||||
<!-- right -->
|
||||
<div class="flexbox">
|
||||
<div class="a"/>
|
||||
<div class="a" style="margin-left: 190px"/>
|
||||
</div>
|
||||
<div class="flexbox">
|
||||
<div class="a" style="margin-left: 160px"/>
|
||||
|
|
|
@ -1455,13 +1455,11 @@ pref("network.http.sendRefererHeader", 2);
|
|||
pref("network.http.referer.spoofSource", false);
|
||||
// 0=full URI, 1=scheme+host+port+path, 2=scheme+host+port
|
||||
pref("network.http.referer.trimmingPolicy", 0);
|
||||
// 0=full URI, 1=scheme+host+port+path, 2=scheme+host+port
|
||||
pref("network.http.referer.XOriginTrimmingPolicy", 0);
|
||||
// 0=always send, 1=send iff base domains match, 2=send iff hosts match
|
||||
pref("network.http.referer.XOriginPolicy", 0);
|
||||
|
||||
// Controls whether we send HTTPS referres to other HTTPS sites.
|
||||
// By default this is enabled for compatibility (see bug 141641)
|
||||
pref("network.http.sendSecureXSiteReferrer", true);
|
||||
|
||||
// Controls whether referrer attributes in <a>, <img>, <area>, <iframe>, and <link> are honoured
|
||||
pref("network.http.enablePerElementReferrer", true);
|
||||
|
||||
|
|
|
@ -1408,22 +1408,6 @@ HttpBaseChannel::SetReferrerWithPolicy(nsIURI *referrer,
|
|||
|
||||
// in other referrer policies, https->http is not allowed...
|
||||
if (!match) return NS_OK;
|
||||
|
||||
// ...and https->https is possibly only allowed if the hosts match.
|
||||
if (!gHttpHandler->SendSecureXSiteReferrer()) {
|
||||
nsAutoCString referrerHost;
|
||||
nsAutoCString host;
|
||||
|
||||
rv = referrer->GetAsciiHost(referrerHost);
|
||||
if (NS_FAILED(rv)) return rv;
|
||||
|
||||
rv = mURI->GetAsciiHost(host);
|
||||
if (NS_FAILED(rv)) return rv;
|
||||
|
||||
// GetAsciiHost returns lowercase hostname.
|
||||
if (!referrerHost.Equals(host))
|
||||
return NS_OK;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1517,6 +1501,15 @@ HttpBaseChannel::SetReferrerWithPolicy(nsIURI *referrer,
|
|||
|
||||
nsAutoCString spec;
|
||||
|
||||
// Apply the user cross-origin trimming policy if it's more
|
||||
// restrictive than the general one.
|
||||
if (isCrossOrigin) {
|
||||
int userReferrerXOriginTrimmingPolicy =
|
||||
gHttpHandler->ReferrerXOriginTrimmingPolicy();
|
||||
userReferrerTrimmingPolicy =
|
||||
std::max(userReferrerTrimmingPolicy, userReferrerXOriginTrimmingPolicy);
|
||||
}
|
||||
|
||||
// site-specified referrer trimming may affect the trim level
|
||||
// "unsafe-url" behaves like "origin" (send referrer in the same situations) but
|
||||
// "unsafe-url" sends the whole referrer and origin removes the path.
|
||||
|
|
|
@ -166,6 +166,7 @@ nsHttpHandler::nsHttpHandler()
|
|||
, mReferrerLevel(0xff) // by default we always send a referrer
|
||||
, mSpoofReferrerSource(false)
|
||||
, mReferrerTrimmingPolicy(0)
|
||||
, mReferrerXOriginTrimmingPolicy(0)
|
||||
, mReferrerXOriginPolicy(0)
|
||||
, mFastFallbackToIPv4(false)
|
||||
, mProxyPipelining(true)
|
||||
|
@ -202,7 +203,6 @@ nsHttpHandler::nsHttpHandler()
|
|||
, mCompatFirefoxEnabled(false)
|
||||
, mUserAgentIsDirty(true)
|
||||
, mPromptTempRedirect(true)
|
||||
, mSendSecureXSiteReferrer(true)
|
||||
, mEnablePersistentHttpsCaching(false)
|
||||
, mDoNotTrackEnabled(false)
|
||||
, mSafeHintEnabled(false)
|
||||
|
@ -1085,6 +1085,12 @@ nsHttpHandler::PrefsChanged(nsIPrefBranch *prefs, const char *pref)
|
|||
mReferrerTrimmingPolicy = (uint8_t) clamped(val, 0, 2);
|
||||
}
|
||||
|
||||
if (PREF_CHANGED(HTTP_PREF("referer.XOriginTrimmingPolicy"))) {
|
||||
rv = prefs->GetIntPref(HTTP_PREF("referer.XOriginTrimmingPolicy"), &val);
|
||||
if (NS_SUCCEEDED(rv))
|
||||
mReferrerXOriginTrimmingPolicy = (uint8_t) clamped(val, 0, 2);
|
||||
}
|
||||
|
||||
if (PREF_CHANGED(HTTP_PREF("referer.XOriginPolicy"))) {
|
||||
rv = prefs->GetIntPref(HTTP_PREF("referer.XOriginPolicy"), &val);
|
||||
if (NS_SUCCEEDED(rv))
|
||||
|
@ -1230,12 +1236,6 @@ nsHttpHandler::PrefsChanged(nsIPrefBranch *prefs, const char *pref)
|
|||
mQoSBits = (uint8_t) clamped(val, 0, 0xff);
|
||||
}
|
||||
|
||||
if (PREF_CHANGED(HTTP_PREF("sendSecureXSiteReferrer"))) {
|
||||
rv = prefs->GetBoolPref(HTTP_PREF("sendSecureXSiteReferrer"), &cVar);
|
||||
if (NS_SUCCEEDED(rv))
|
||||
mSendSecureXSiteReferrer = cVar;
|
||||
}
|
||||
|
||||
if (PREF_CHANGED(HTTP_PREF("accept.default"))) {
|
||||
nsXPIDLCString accept;
|
||||
rv = prefs->GetCharPref(HTTP_PREF("accept.default"),
|
||||
|
|
|
@ -82,8 +82,10 @@ public:
|
|||
uint8_t ReferrerLevel() { return mReferrerLevel; }
|
||||
bool SpoofReferrerSource() { return mSpoofReferrerSource; }
|
||||
uint8_t ReferrerTrimmingPolicy() { return mReferrerTrimmingPolicy; }
|
||||
uint8_t ReferrerXOriginTrimmingPolicy() {
|
||||
return mReferrerXOriginTrimmingPolicy;
|
||||
}
|
||||
uint8_t ReferrerXOriginPolicy() { return mReferrerXOriginPolicy; }
|
||||
bool SendSecureXSiteReferrer() { return mSendSecureXSiteReferrer; }
|
||||
bool PackagedAppsEnabled() { return mPackagedAppsEnabled; }
|
||||
uint8_t RedirectionLimit() { return mRedirectionLimit; }
|
||||
PRIntervalTime IdleTimeout() { return mIdleTimeout; }
|
||||
|
@ -417,6 +419,7 @@ private:
|
|||
uint8_t mReferrerLevel;
|
||||
uint8_t mSpoofReferrerSource;
|
||||
uint8_t mReferrerTrimmingPolicy;
|
||||
uint8_t mReferrerXOriginTrimmingPolicy;
|
||||
uint8_t mReferrerXOriginPolicy;
|
||||
|
||||
bool mFastFallbackToIPv4;
|
||||
|
@ -492,9 +495,6 @@ private:
|
|||
|
||||
|
||||
bool mPromptTempRedirect;
|
||||
// mSendSecureXSiteReferrer: default is false,
|
||||
// if true allow referrer headers between secure non-matching hosts
|
||||
bool mSendSecureXSiteReferrer;
|
||||
|
||||
// Persistent HTTPS caching flag
|
||||
bool mEnablePersistentHttpsCaching;
|
||||
|
|
|
@ -1,14 +1,19 @@
|
|||
Cu.import("resource://gre/modules/NetUtil.jsm");
|
||||
Cu.import("resource://gre/modules/Services.jsm");
|
||||
|
||||
function getTestReferrer(server_uri, referer_uri) {
|
||||
var uri = NetUtil.newURI(server_uri, "", null)
|
||||
let referrer = NetUtil.newURI(referer_uri, null, null);
|
||||
let triggeringPrincipal = Services.scriptSecurityManager.createCodebasePrincipal(referrer, {});
|
||||
var chan = NetUtil.newChannel({
|
||||
uri: uri,
|
||||
loadUsingSystemPrincipal: true
|
||||
loadingPrincipal: Services.scriptSecurityManager.getSystemPrincipal(),
|
||||
triggeringPrincipal: triggeringPrincipal,
|
||||
contentPolicyType: Ci.nsIContentPolicy.TYPE_OTHER
|
||||
});
|
||||
|
||||
chan.QueryInterface(Components.interfaces.nsIHttpChannel);
|
||||
chan.referrer = NetUtil.newURI(referer_uri, null, null);
|
||||
chan.referrer = referrer;
|
||||
var header = null;
|
||||
try {
|
||||
header = chan.getRequestHeader("Referer");
|
||||
|
@ -31,6 +36,7 @@ function run_test() {
|
|||
// for https tests
|
||||
var server_uri_https = "https://bar.example.com/anotherpath";
|
||||
var referer_uri_https = "https://bar.example.com/path3?q=blah";
|
||||
var referer_uri_2_https = "https://bar.examplesite.com/path3?q=blah";
|
||||
|
||||
// tests for sendRefererHeader
|
||||
prefs.setIntPref("network.http.sendRefererHeader", 0);
|
||||
|
@ -72,6 +78,26 @@ function run_test() {
|
|||
// test that anchor is lopped off in ordinary case
|
||||
do_check_eq(getTestReferrer(server_uri, referer_uri_2_anchor), referer_uri_2);
|
||||
|
||||
// tests for referer.XOriginTrimmingPolicy
|
||||
prefs.setIntPref("network.http.referer.XOriginTrimmingPolicy", 1);
|
||||
do_check_eq(getTestReferrer(server_uri, referer_uri), "http://foo.example.com/path");
|
||||
do_check_eq(getTestReferrer(server_uri, referer_uri_idn), "http://sub1.xn--lt-uia.example/path");
|
||||
do_check_eq(getTestReferrer(server_uri, referer_uri_2), "http://bar.examplesite.com/path3?q=blah");
|
||||
prefs.setIntPref("network.http.referer.trimmingPolicy", 1);
|
||||
do_check_eq(getTestReferrer(server_uri, referer_uri_2), "http://bar.examplesite.com/path3");
|
||||
prefs.setIntPref("network.http.referer.XOriginTrimmingPolicy", 2);
|
||||
do_check_eq(getTestReferrer(server_uri, referer_uri), "http://foo.example.com/");
|
||||
do_check_eq(getTestReferrer(server_uri, referer_uri_idn), "http://sub1.xn--lt-uia.example/");
|
||||
do_check_eq(getTestReferrer(server_uri, referer_uri_2), "http://bar.examplesite.com/path3");
|
||||
prefs.setIntPref("network.http.referer.trimmingPolicy", 0);
|
||||
do_check_eq(getTestReferrer(server_uri, referer_uri_2), "http://bar.examplesite.com/path3?q=blah");
|
||||
// https tests
|
||||
do_check_eq(getTestReferrer(server_uri_https, referer_uri_https), "https://bar.example.com/path3?q=blah");
|
||||
do_check_eq(getTestReferrer(server_uri_https, referer_uri_2_https), "https://bar.examplesite.com/");
|
||||
prefs.setIntPref("network.http.referer.XOriginTrimmingPolicy", 0);
|
||||
// test that anchor is lopped off in ordinary case
|
||||
do_check_eq(getTestReferrer(server_uri, referer_uri_2_anchor), referer_uri_2);
|
||||
|
||||
// combination test: send spoofed path-only when hosts match
|
||||
var combo_referer_uri = "http://blah.foo.com/path?q=hot";
|
||||
var dest_uri = "http://blah.foo.com:9999/spoofedpath?q=bad";
|
||||
|
|
|
@ -23,10 +23,6 @@ var { Services } = Components.utils.import("resource://gre/modules/Services.jsm"
|
|||
|
||||
var key;
|
||||
|
||||
/**
|
||||
* List of certs currently selected in the active tab.
|
||||
* @type nsIX509Cert[]
|
||||
*/
|
||||
var selected_certs = [];
|
||||
var selected_tree_items = [];
|
||||
var selected_index = [];
|
||||
|
@ -333,8 +329,8 @@ function editCerts()
|
|||
getSelectedCerts();
|
||||
|
||||
for (let cert of selected_certs) {
|
||||
window.openDialog("chrome://pippki/content/editcacert.xul", "",
|
||||
"chrome,centerscreen,modal", cert);
|
||||
window.openDialog("chrome://pippki/content/editcacert.xul", cert.dbKey,
|
||||
"chrome,centerscreen,modal");
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -1,58 +0,0 @@
|
|||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
|
||||
/* import-globals-from pippki.js */
|
||||
"use strict";
|
||||
|
||||
const { classes: Cc, interfaces: Ci, utils: Cu, results: Cr } = Components;
|
||||
|
||||
var gCertDB = Cc["@mozilla.org/security/x509certdb;1"]
|
||||
.getService(Ci.nsIX509CertDB);
|
||||
/**
|
||||
* Cert to edit the trust of.
|
||||
* @type nsIX509Cert
|
||||
*/
|
||||
var gCert;
|
||||
|
||||
/**
|
||||
* onload() handler.
|
||||
*/
|
||||
function onLoad() {
|
||||
gCert = window.arguments[0];
|
||||
|
||||
let bundle = document.getElementById("pippki_bundle");
|
||||
setText("certmsg",
|
||||
bundle.getFormattedString("editTrustCA", [gCert.commonName]));
|
||||
|
||||
let sslCheckbox = document.getElementById("trustSSL");
|
||||
sslCheckbox.checked = gCertDB.isCertTrusted(gCert, Ci.nsIX509Cert.CA_CERT,
|
||||
Ci.nsIX509CertDB.TRUSTED_SSL);
|
||||
|
||||
let emailCheckbox = document.getElementById("trustEmail");
|
||||
emailCheckbox.checked = gCertDB.isCertTrusted(gCert, Ci.nsIX509Cert.CA_CERT,
|
||||
Ci.nsIX509CertDB.TRUSTED_EMAIL);
|
||||
|
||||
let objSignCheckbox = document.getElementById("trustObjSign");
|
||||
objSignCheckbox.checked =
|
||||
gCertDB.isCertTrusted(gCert, Ci.nsIX509Cert.CA_CERT,
|
||||
Ci.nsIX509CertDB.TRUSTED_OBJSIGN);
|
||||
}
|
||||
|
||||
/**
|
||||
* ondialogaccept() handler.
|
||||
*
|
||||
* @returns {Boolean} true to make the dialog close, false otherwise.
|
||||
*/
|
||||
function onDialogAccept() {
|
||||
let sslCheckbox = document.getElementById("trustSSL");
|
||||
let emailCheckbox = document.getElementById("trustEmail");
|
||||
let objSignCheckbox = document.getElementById("trustObjSign");
|
||||
let trustSSL = sslCheckbox.checked ? Ci.nsIX509CertDB.TRUSTED_SSL : 0;
|
||||
let trustEmail = emailCheckbox.checked ? Ci.nsIX509CertDB.TRUSTED_EMAIL : 0;
|
||||
let trustObjSign = objSignCheckbox.checked ? Ci.nsIX509CertDB.TRUSTED_OBJSIGN
|
||||
: 0;
|
||||
|
||||
gCertDB.setCertTrust(gCert, Ci.nsIX509Cert.CA_CERT,
|
||||
trustSSL | trustEmail | trustObjSign);
|
||||
return true;
|
||||
}
|
|
@ -7,19 +7,18 @@
|
|||
|
||||
<!DOCTYPE dialog SYSTEM "chrome://pippki/locale/certManager.dtd">
|
||||
|
||||
<dialog id="editCaCert"
|
||||
<dialog id="editCaCert"
|
||||
title="&certmgr.editcacert.title;"
|
||||
xmlns="http://www.mozilla.org/keymaster/gatekeeper/there.is.only.xul"
|
||||
buttons="accept,cancel"
|
||||
ondialogaccept="return onDialogAccept();"
|
||||
onload="onLoad();"
|
||||
ondialogaccept="return doOK();"
|
||||
onload="setWindowName();"
|
||||
>
|
||||
|
||||
<stringbundle id="pippki_bundle" src="chrome://pippki/locale/pippki.properties"/>
|
||||
|
||||
<script type="application/javascript" src="chrome://pippki/content/pippki.js"/>
|
||||
<script type="application/javascript"
|
||||
src="chrome://pippki/content/editcacert.js"/>
|
||||
<script type="application/javascript" src="chrome://pippki/content/editcerts.js"/>
|
||||
|
||||
<description id="certmsg"/>
|
||||
<separator/>
|
||||
|
|
|
@ -0,0 +1,71 @@
|
|||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
|
||||
/* import-globals-from pippki.js */
|
||||
"use strict";
|
||||
|
||||
const nsIX509Cert = Components.interfaces.nsIX509Cert;
|
||||
const nsX509CertDB = "@mozilla.org/security/x509certdb;1";
|
||||
const nsIX509CertDB = Components.interfaces.nsIX509CertDB;
|
||||
|
||||
var certdb;
|
||||
var cert;
|
||||
|
||||
function doPrompt(msg)
|
||||
{
|
||||
let prompts = Components.classes["@mozilla.org/embedcomp/prompt-service;1"].
|
||||
getService(Components.interfaces.nsIPromptService);
|
||||
prompts.alert(window, null, msg);
|
||||
}
|
||||
|
||||
function setWindowName()
|
||||
{
|
||||
var dbkey = self.name;
|
||||
|
||||
// Get the cert from the cert database
|
||||
certdb = Components.classes[nsX509CertDB].getService(nsIX509CertDB);
|
||||
cert = certdb.findCertByDBKey(dbkey);
|
||||
|
||||
var bundle = document.getElementById("pippki_bundle");
|
||||
|
||||
var message1 = bundle.getFormattedString("editTrustCA", [cert.commonName]);
|
||||
setText("certmsg", message1);
|
||||
|
||||
var ssl = document.getElementById("trustSSL");
|
||||
if (certdb.isCertTrusted(cert, nsIX509Cert.CA_CERT,
|
||||
nsIX509CertDB.TRUSTED_SSL)) {
|
||||
ssl.setAttribute("checked", "true");
|
||||
} else {
|
||||
ssl.setAttribute("checked", "false");
|
||||
}
|
||||
var email = document.getElementById("trustEmail");
|
||||
if (certdb.isCertTrusted(cert, nsIX509Cert.CA_CERT,
|
||||
nsIX509CertDB.TRUSTED_EMAIL)) {
|
||||
email.setAttribute("checked", "true");
|
||||
} else {
|
||||
email.setAttribute("checked", "false");
|
||||
}
|
||||
var objsign = document.getElementById("trustObjSign");
|
||||
if (certdb.isCertTrusted(cert, nsIX509Cert.CA_CERT,
|
||||
nsIX509CertDB.TRUSTED_OBJSIGN)) {
|
||||
objsign.setAttribute("checked", "true");
|
||||
} else {
|
||||
objsign.setAttribute("checked", "false");
|
||||
}
|
||||
}
|
||||
|
||||
function doOK()
|
||||
{
|
||||
var ssl = document.getElementById("trustSSL");
|
||||
var email = document.getElementById("trustEmail");
|
||||
var objsign = document.getElementById("trustObjSign");
|
||||
var trustssl = (ssl.checked) ? nsIX509CertDB.TRUSTED_SSL : 0;
|
||||
var trustemail = (email.checked) ? nsIX509CertDB.TRUSTED_EMAIL : 0;
|
||||
var trustobjsign = (objsign.checked) ? nsIX509CertDB.TRUSTED_OBJSIGN : 0;
|
||||
//
|
||||
// Set the cert trust
|
||||
//
|
||||
certdb.setCertTrust(cert, nsIX509Cert.CA_CERT,
|
||||
trustssl | trustemail | trustobjsign);
|
||||
return true;
|
||||
}
|
|
@ -19,7 +19,7 @@ pippki.jar:
|
|||
content/pippki/OrphanOverlay.xul (content/OrphanOverlay.xul)
|
||||
content/pippki/viewCertDetails.xul (content/viewCertDetails.xul)
|
||||
content/pippki/editcacert.xul (content/editcacert.xul)
|
||||
content/pippki/editcacert.js (content/editcacert.js)
|
||||
content/pippki/editcerts.js (content/editcerts.js)
|
||||
* content/pippki/exceptionDialog.xul (content/exceptionDialog.xul)
|
||||
content/pippki/exceptionDialog.js (content/exceptionDialog.js)
|
||||
content/pippki/deletecert.xul (content/deletecert.xul)
|
||||
|
|
|
@ -10,4 +10,3 @@ support-files =
|
|||
[browser_clientAuth_connection.js]
|
||||
[browser_clientAuth_ui.js]
|
||||
[browser_deleteCert_ui.js]
|
||||
[browser_editCACertTrust.js]
|
||||
|
|
|
@ -10,43 +10,53 @@
|
|||
|
||||
var { OS } = Cu.import("resource://gre/modules/osfile.jsm", {});
|
||||
|
||||
var certificates = [];
|
||||
|
||||
registerCleanupFunction(function() {
|
||||
let certdb = Cc["@mozilla.org/security/x509certdb;1"]
|
||||
.getService(Ci.nsIX509CertDB);
|
||||
certificates.forEach(cert => {
|
||||
certdb.deleteCertificate(cert);
|
||||
});
|
||||
});
|
||||
|
||||
add_task(function* () {
|
||||
let cert = yield readCertificate("ca.pem", "CTu,CTu,CTu");
|
||||
let cert = yield readCertificate("ca.pem", "CTu,CTu,CTu", certificates);
|
||||
let win = yield displayCertificate(cert);
|
||||
checkUsages(win, ["SSL Certificate Authority"]);
|
||||
yield BrowserTestUtils.closeWindow(win);
|
||||
});
|
||||
|
||||
add_task(function* () {
|
||||
let cert = yield readCertificate("ssl-ee.pem", ",,");
|
||||
let cert = yield readCertificate("ssl-ee.pem", ",,", certificates);
|
||||
let win = yield displayCertificate(cert);
|
||||
checkUsages(win, ["SSL Server Certificate", "SSL Client Certificate"]);
|
||||
yield BrowserTestUtils.closeWindow(win);
|
||||
});
|
||||
|
||||
add_task(function* () {
|
||||
let cert = yield readCertificate("email-ee.pem", ",,");
|
||||
let cert = yield readCertificate("email-ee.pem", ",,", certificates);
|
||||
let win = yield displayCertificate(cert);
|
||||
checkUsages(win, ["Email Recipient Certificate", "Email Signer Certificate"]);
|
||||
yield BrowserTestUtils.closeWindow(win);
|
||||
});
|
||||
|
||||
add_task(function* () {
|
||||
let cert = yield readCertificate("code-ee.pem", ",,");
|
||||
let cert = yield readCertificate("code-ee.pem", ",,", certificates);
|
||||
let win = yield displayCertificate(cert);
|
||||
checkUsages(win, ["Object Signer"]);
|
||||
yield BrowserTestUtils.closeWindow(win);
|
||||
});
|
||||
|
||||
add_task(function* () {
|
||||
let cert = yield readCertificate("expired-ca.pem", ",,");
|
||||
let cert = yield readCertificate("expired-ca.pem", ",,", certificates);
|
||||
let win = yield displayCertificate(cert);
|
||||
checkError(win, "Could not verify this certificate because it has expired.");
|
||||
yield BrowserTestUtils.closeWindow(win);
|
||||
});
|
||||
|
||||
add_task(function* () {
|
||||
let cert = yield readCertificate("ee-from-expired-ca.pem", ",,");
|
||||
let cert = yield readCertificate("ee-from-expired-ca.pem", ",,", certificates);
|
||||
let win = yield displayCertificate(cert);
|
||||
checkError(win,
|
||||
"Could not verify this certificate because the CA certificate " +
|
||||
|
@ -55,7 +65,7 @@ add_task(function* () {
|
|||
});
|
||||
|
||||
add_task(function* () {
|
||||
let cert = yield readCertificate("unknown-issuer.pem", ",,");
|
||||
let cert = yield readCertificate("unknown-issuer.pem", ",,", certificates);
|
||||
let win = yield displayCertificate(cert);
|
||||
checkError(win,
|
||||
"Could not verify this certificate because the issuer is " +
|
||||
|
@ -64,7 +74,7 @@ add_task(function* () {
|
|||
});
|
||||
|
||||
add_task(function* () {
|
||||
let cert = yield readCertificate("md5-ee.pem", ",,");
|
||||
let cert = yield readCertificate("md5-ee.pem", ",,", certificates);
|
||||
let win = yield displayCertificate(cert);
|
||||
checkError(win,
|
||||
"Could not verify this certificate because it was signed using " +
|
||||
|
@ -74,7 +84,7 @@ add_task(function* () {
|
|||
});
|
||||
|
||||
add_task(function* () {
|
||||
let cert = yield readCertificate("untrusted-ca.pem", "p,p,p");
|
||||
let cert = yield readCertificate("untrusted-ca.pem", "p,p,p", certificates);
|
||||
let win = yield displayCertificate(cert);
|
||||
checkError(win,
|
||||
"Could not verify this certificate because it is not trusted.");
|
||||
|
@ -82,7 +92,8 @@ add_task(function* () {
|
|||
});
|
||||
|
||||
add_task(function* () {
|
||||
let cert = yield readCertificate("ee-from-untrusted-ca.pem", ",,");
|
||||
let cert = yield readCertificate("ee-from-untrusted-ca.pem", ",,",
|
||||
certificates);
|
||||
let win = yield displayCertificate(cert);
|
||||
checkError(win,
|
||||
"Could not verify this certificate because the issuer is not " +
|
||||
|
@ -99,7 +110,7 @@ add_task(function* () {
|
|||
certBlocklist.revokeCertBySubjectAndPubKey(
|
||||
"MBIxEDAOBgNVBAMMB3Jldm9rZWQ=", // CN=revoked
|
||||
"VCIlmPM9NkgFQtrs4Oa5TeFcDu6MWRTKSNdePEhOgD8="); // hash of the shared key
|
||||
let cert = yield readCertificate("revoked.pem", ",,");
|
||||
let cert = yield readCertificate("revoked.pem", ",,", certificates);
|
||||
let win = yield displayCertificate(cert);
|
||||
checkError(win,
|
||||
"Could not verify this certificate because it has been revoked.");
|
||||
|
@ -111,7 +122,7 @@ add_task(function* () {
|
|||
// keyCertSign, but it doesn't have a basicConstraints extension. This
|
||||
// shouldn't be valid for any usage. Sadly, we give a pretty lame error
|
||||
// message in this case.
|
||||
let cert = yield readCertificate("invalid.pem", ",,");
|
||||
let cert = yield readCertificate("invalid.pem", ",,", certificates);
|
||||
let win = yield displayCertificate(cert);
|
||||
checkError(win, "Could not verify this certificate for unknown reasons.");
|
||||
yield BrowserTestUtils.closeWindow(win);
|
||||
|
|
|
@ -14,6 +14,7 @@
|
|||
* @type nsIMutableArray<nsICertTreeItem>
|
||||
*/
|
||||
var gCertArray = Cc["@mozilla.org/array;1"].createInstance(Ci.nsIMutableArray);
|
||||
var gImportedCerts = [];
|
||||
|
||||
const FAKE_HOST_PORT = "Fake host and port";
|
||||
|
||||
|
@ -74,11 +75,19 @@ function openDeleteCertConfirmDialog(tabID) {
|
|||
});
|
||||
}
|
||||
|
||||
registerCleanupFunction(() => {
|
||||
let certdb = Cc["@mozilla.org/security/x509certdb;1"]
|
||||
.getService(Ci.nsIX509CertDB);
|
||||
for (let cert of gImportedCerts) {
|
||||
certdb.deleteCertificate(cert);
|
||||
}
|
||||
});
|
||||
|
||||
add_task(function* setup() {
|
||||
for (let testCase of TEST_CASES) {
|
||||
let cert = null;
|
||||
if (testCase.certFilename) {
|
||||
cert = yield readCertificate(testCase.certFilename, ",,");
|
||||
cert = yield readCertificate(testCase.certFilename, ",,", gImportedCerts);
|
||||
}
|
||||
let certTreeItem = {
|
||||
hostPort: FAKE_HOST_PORT,
|
||||
|
|
|
@ -1,119 +0,0 @@
|
|||
// Any copyright is dedicated to the Public Domain.
|
||||
// http://creativecommons.org/publicdomain/zero/1.0/
|
||||
"use strict";
|
||||
|
||||
// Tests that the UI for editing the trust of a CA certificate correctly
|
||||
// reflects trust in the cert DB, and correctly updates trust in the cert DB
|
||||
// when requested.
|
||||
|
||||
var gCertDB = Cc["@mozilla.org/security/x509certdb;1"]
|
||||
.getService(Ci.nsIX509CertDB);
|
||||
|
||||
/**
|
||||
* The cert we're editing the trust of.
|
||||
* @type nsIX509Cert
|
||||
*/
|
||||
var gCert;
|
||||
|
||||
/**
|
||||
* Opens the cert trust editing dialog.
|
||||
*
|
||||
* @returns {Promise}
|
||||
* A promise that resolves when the dialog has finished loading with
|
||||
* the window of the opened dialog.
|
||||
*/
|
||||
function openEditCertTrustDialog() {
|
||||
let win = window.openDialog("chrome://pippki/content/editcacert.xul", "", "",
|
||||
gCert);
|
||||
return new Promise((resolve, reject) => {
|
||||
win.addEventListener("load", function onLoad() {
|
||||
win.removeEventListener("load", onLoad);
|
||||
resolve(win);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
add_task(function* setup() {
|
||||
// Initially trust ca.pem for SSL, but not e-mail or object signing.
|
||||
gCert = yield readCertificate("ca.pem", "CT,,");
|
||||
Assert.ok(gCertDB.isCertTrusted(gCert, Ci.nsIX509Cert.CA_CERT,
|
||||
Ci.nsIX509CertDB.TRUSTED_SSL),
|
||||
"Sanity check: ca.pem should be trusted for SSL");
|
||||
Assert.ok(!gCertDB.isCertTrusted(gCert, Ci.nsIX509Cert.CA_CERT,
|
||||
Ci.nsIX509CertDB.TRUSTED_EMAIL),
|
||||
"Sanity check: ca.pem should not be trusted for e-mail");
|
||||
Assert.ok(!gCertDB.isCertTrusted(gCert, Ci.nsIX509Cert.CA_CERT,
|
||||
Ci.nsIX509CertDB.TRUSTED_OBJSIGN),
|
||||
"Sanity check: ca.pem should not be trusted for object signing");
|
||||
});
|
||||
|
||||
// Tests the following:
|
||||
// 1. The checkboxes correctly reflect the trust set in setup().
|
||||
// 2. Accepting the dialog after flipping some of the checkboxes results in the
|
||||
// correct trust being set in the cert DB.
|
||||
add_task(function* testAcceptDialog() {
|
||||
let win = yield openEditCertTrustDialog();
|
||||
|
||||
let sslCheckbox = win.document.getElementById("trustSSL");
|
||||
let emailCheckbox = win.document.getElementById("trustEmail");
|
||||
let objSignCheckbox = win.document.getElementById("trustObjSign");
|
||||
Assert.ok(sslCheckbox.checked,
|
||||
"Cert should be trusted for SSL in UI");
|
||||
Assert.ok(!emailCheckbox.checked,
|
||||
"Cert should not be trusted for e-mail in UI");
|
||||
Assert.ok(!objSignCheckbox.checked,
|
||||
"Cert should not be trusted for object signing in UI");
|
||||
|
||||
sslCheckbox.checked = false;
|
||||
emailCheckbox.checked = true;
|
||||
|
||||
info("Accepting dialog");
|
||||
win.document.getElementById("editCaCert").acceptDialog();
|
||||
yield BrowserTestUtils.windowClosed(win);
|
||||
|
||||
Assert.ok(!gCertDB.isCertTrusted(gCert, Ci.nsIX509Cert.CA_CERT,
|
||||
Ci.nsIX509CertDB.TRUSTED_SSL),
|
||||
"Cert should no longer be trusted for SSL");
|
||||
Assert.ok(gCertDB.isCertTrusted(gCert, Ci.nsIX509Cert.CA_CERT,
|
||||
Ci.nsIX509CertDB.TRUSTED_EMAIL),
|
||||
"Cert should now be trusted for e-mail");
|
||||
Assert.ok(!gCertDB.isCertTrusted(gCert, Ci.nsIX509Cert.CA_CERT,
|
||||
Ci.nsIX509CertDB.TRUSTED_OBJSIGN),
|
||||
"Cert should still not be trusted for object signing");
|
||||
});
|
||||
|
||||
// Tests the following:
|
||||
// 1. The checkboxes correctly reflect the trust set in testAcceptDialog().
|
||||
// 2. Canceling the dialog even after flipping the checkboxes doesn't result in
|
||||
// a change of trust in the cert DB.
|
||||
add_task(function* testCancelDialog() {
|
||||
let win = yield openEditCertTrustDialog();
|
||||
|
||||
let sslCheckbox = win.document.getElementById("trustSSL");
|
||||
let emailCheckbox = win.document.getElementById("trustEmail");
|
||||
let objSignCheckbox = win.document.getElementById("trustObjSign");
|
||||
Assert.ok(!sslCheckbox.checked,
|
||||
"Cert should not be trusted for SSL in UI");
|
||||
Assert.ok(emailCheckbox.checked,
|
||||
"Cert should be trusted for e-mail in UI");
|
||||
Assert.ok(!objSignCheckbox.checked,
|
||||
"Cert should not be trusted for object signing in UI");
|
||||
|
||||
sslCheckbox.checked = true;
|
||||
emailCheckbox.checked = false;
|
||||
objSignCheckbox.checked = true;
|
||||
|
||||
info("Canceling dialog");
|
||||
win.document.getElementById("editCaCert").cancelDialog();
|
||||
yield BrowserTestUtils.windowClosed(win);
|
||||
|
||||
Assert.ok(!gCertDB.isCertTrusted(gCert, Ci.nsIX509Cert.CA_CERT,
|
||||
Ci.nsIX509CertDB.TRUSTED_SSL),
|
||||
"Cert should still not be trusted for SSL");
|
||||
Assert.ok(gCertDB.isCertTrusted(gCert, Ci.nsIX509Cert.CA_CERT,
|
||||
Ci.nsIX509CertDB.TRUSTED_EMAIL),
|
||||
"Cert should still be trusted for e-mail");
|
||||
Assert.ok(!gCertDB.isCertTrusted(gCert, Ci.nsIX509Cert.CA_CERT,
|
||||
Ci.nsIX509CertDB.TRUSTED_OBJSIGN),
|
||||
"Cert should still not be trusted for object signing");
|
||||
});
|
|
@ -2,23 +2,6 @@
|
|||
http://creativecommons.org/publicdomain/zero/1.0/ */
|
||||
"use strict";
|
||||
|
||||
var gCertDB = Cc["@mozilla.org/security/x509certdb;1"]
|
||||
.getService(Ci.nsIX509CertDB);
|
||||
|
||||
/**
|
||||
* List of certs imported via readCertificate(). Certs in this list are
|
||||
* automatically deleted from the cert DB when a test including this head file
|
||||
* finishes.
|
||||
* @type nsIX509Cert[]
|
||||
*/
|
||||
var gImportedCerts = [];
|
||||
|
||||
registerCleanupFunction(() => {
|
||||
for (let cert of gImportedCerts) {
|
||||
gCertDB.deleteCertificate(cert);
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* This function serves the same purpose as the one defined in head_psm.js.
|
||||
*/
|
||||
|
@ -33,18 +16,18 @@ function pemToBase64(pem) {
|
|||
* a handle to the certificate when that certificate has been read and imported
|
||||
* with the given trust settings.
|
||||
*
|
||||
* Certs imported via this function will automatically be deleted from the cert
|
||||
* DB once the calling test finishes.
|
||||
*
|
||||
* @param {String} filename
|
||||
* The filename of the certificate (assumed to be in the same directory).
|
||||
* @param {String} trustString
|
||||
* A string describing how the certificate should be trusted (see
|
||||
* `certutil -A --help`).
|
||||
* @param {nsIX509Cert[]} certificates
|
||||
* An array to append the imported cert to. Useful for making sure
|
||||
* imported certs are cleaned up.
|
||||
* @return {Promise}
|
||||
* A promise that will resolve with a handle to the certificate.
|
||||
*/
|
||||
function readCertificate(filename, trustString) {
|
||||
function readCertificate(filename, trustString, certificates) {
|
||||
return OS.File.read(getTestFilePath(filename)).then(data => {
|
||||
let decoder = new TextDecoder();
|
||||
let pem = decoder.decode(data);
|
||||
|
@ -53,7 +36,7 @@ function readCertificate(filename, trustString) {
|
|||
let base64 = pemToBase64(pem);
|
||||
certdb.addCertFromBase64(base64, trustString, "unused");
|
||||
let cert = certdb.constructX509FromBase64(base64);
|
||||
gImportedCerts.push(cert);
|
||||
certificates.push(cert);
|
||||
return cert;
|
||||
}, error => { throw error; });
|
||||
}
|
||||
|
|
|
@ -41,6 +41,8 @@ this.EXPORTED_SYMBOLS = ["BookmarkValidator", "BookmarkProblemData"];
|
|||
* - parentNotFolder (array of ids): list of records that have parents that
|
||||
* aren't folders
|
||||
* - rootOnServer (boolean): true if the root came from the server
|
||||
* - badClientRoots (array of ids): Contains any client-side root ids where
|
||||
* the root is missing or isn't a (direct) child of the places root.
|
||||
*
|
||||
* - clientMissing: Array of ids on the server missing from the client
|
||||
* - serverMissing: Array of ids on the client missing from the server
|
||||
|
@ -70,6 +72,7 @@ class BookmarkProblemData {
|
|||
this.duplicateChildren = [];
|
||||
this.parentNotFolder = [];
|
||||
|
||||
this.badClientRoots = [];
|
||||
this.clientMissing = [];
|
||||
this.serverMissing = [];
|
||||
this.serverDeleted = [];
|
||||
|
@ -122,6 +125,7 @@ class BookmarkProblemData {
|
|||
{ name: "parentChildMismatches", count: this.parentChildMismatches.length },
|
||||
{ name: "cycles", count: this.cycles.length },
|
||||
{ name: "clientCycles", count: this.clientCycles.length },
|
||||
{ name: "badClientRoots", count: this.badClientRoots.length },
|
||||
{ name: "orphans", count: this.orphans.length },
|
||||
{ name: "missingChildren", count: this.missingChildren.length },
|
||||
{ name: "deletedChildren", count: this.deletedChildren.length },
|
||||
|
@ -558,6 +562,24 @@ class BookmarkValidator {
|
|||
return cycles;
|
||||
}
|
||||
|
||||
// Perform client-side sanity checking that doesn't involve server data
|
||||
_validateClient(problemData, clientRecords) {
|
||||
problemData.clientCycles = this._detectCycles(clientRecords);
|
||||
const rootsToCheck = [
|
||||
PlacesUtils.bookmarks.menuGuid,
|
||||
PlacesUtils.bookmarks.toolbarGuid,
|
||||
PlacesUtils.bookmarks.unfiledGuid,
|
||||
PlacesUtils.bookmarks.mobileGuid,
|
||||
];
|
||||
for (let rootGUID of rootsToCheck) {
|
||||
let record = clientRecords.find(record =>
|
||||
record.guid === rootGUID);
|
||||
if (!record || record.parentid !== "places") {
|
||||
problemData.badClientRoots.push(rootGUID);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Compare the list of server records with the client tree.
|
||||
*
|
||||
|
@ -578,7 +600,7 @@ class BookmarkValidator {
|
|||
serverRecords = inspectionInfo.records;
|
||||
let problemData = inspectionInfo.problemData;
|
||||
|
||||
problemData.clientCycles = this._detectCycles(clientRecords);
|
||||
this._validateClient(problemData, clientRecords);
|
||||
|
||||
let matches = [];
|
||||
|
||||
|
|
|
@ -52,6 +52,26 @@ const MOBILE_ANNO = "MobileBookmarks";
|
|||
// the tracker doesn't currently distinguish between the two.
|
||||
const IGNORED_SOURCES = [SOURCE_SYNC, SOURCE_IMPORT, SOURCE_IMPORT_REPLACE];
|
||||
|
||||
// Returns the constructor for a bookmark record type.
|
||||
function getTypeObject(type) {
|
||||
switch (type) {
|
||||
case "bookmark":
|
||||
case "microsummary":
|
||||
return Bookmark;
|
||||
case "query":
|
||||
return BookmarkQuery;
|
||||
case "folder":
|
||||
return BookmarkFolder;
|
||||
case "livemark":
|
||||
return Livemark;
|
||||
case "separator":
|
||||
return BookmarkSeparator;
|
||||
case "item":
|
||||
return PlacesItem;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
this.PlacesItem = function PlacesItem(collection, id, type) {
|
||||
CryptoWrapper.call(this, collection, id);
|
||||
this.type = type || "item";
|
||||
|
@ -69,22 +89,11 @@ PlacesItem.prototype = {
|
|||
},
|
||||
|
||||
getTypeObject: function PlacesItem_getTypeObject(type) {
|
||||
switch (type) {
|
||||
case "bookmark":
|
||||
case "microsummary":
|
||||
return Bookmark;
|
||||
case "query":
|
||||
return BookmarkQuery;
|
||||
case "folder":
|
||||
return BookmarkFolder;
|
||||
case "livemark":
|
||||
return Livemark;
|
||||
case "separator":
|
||||
return BookmarkSeparator;
|
||||
case "item":
|
||||
return PlacesItem;
|
||||
let recordObj = getTypeObject(type);
|
||||
if (!recordObj) {
|
||||
throw new Error("Unknown places item object type: " + type);
|
||||
}
|
||||
throw "Unknown places item object type: " + type;
|
||||
return recordObj;
|
||||
},
|
||||
|
||||
__proto__: CryptoWrapper.prototype,
|
||||
|
@ -99,6 +108,13 @@ PlacesItem.prototype = {
|
|||
parentSyncId: this.parentid,
|
||||
};
|
||||
},
|
||||
|
||||
// Populates the record from a Sync bookmark object returned from
|
||||
// `PlacesSyncUtils.bookmarks.fetch`.
|
||||
fromSyncBookmark(item) {
|
||||
this.parentid = item.parentSyncId;
|
||||
this.parentName = item.parentTitle;
|
||||
},
|
||||
};
|
||||
|
||||
Utils.deferGetSet(PlacesItem,
|
||||
|
@ -122,6 +138,16 @@ Bookmark.prototype = {
|
|||
info.keyword = this.keyword;
|
||||
return info;
|
||||
},
|
||||
|
||||
fromSyncBookmark(item) {
|
||||
PlacesItem.prototype.fromSyncBookmark.call(this, item);
|
||||
this.title = item.title;
|
||||
this.bmkUri = item.url.href;
|
||||
this.description = item.description;
|
||||
this.loadInSidebar = item.loadInSidebar;
|
||||
this.tags = item.tags;
|
||||
this.keyword = item.keyword;
|
||||
},
|
||||
};
|
||||
|
||||
Utils.deferGetSet(Bookmark,
|
||||
|
@ -142,6 +168,12 @@ BookmarkQuery.prototype = {
|
|||
info.query = this.queryId;
|
||||
return info;
|
||||
},
|
||||
|
||||
fromSyncBookmark(item) {
|
||||
Bookmark.prototype.fromSyncBookmark.call(this, item);
|
||||
this.folderName = item.folder;
|
||||
this.queryId = item.query;
|
||||
},
|
||||
};
|
||||
|
||||
Utils.deferGetSet(BookmarkQuery,
|
||||
|
@ -161,6 +193,13 @@ BookmarkFolder.prototype = {
|
|||
info.title = this.title;
|
||||
return info;
|
||||
},
|
||||
|
||||
fromSyncBookmark(item) {
|
||||
PlacesItem.prototype.fromSyncBookmark.call(this, item);
|
||||
this.title = item.title;
|
||||
this.description = item.description;
|
||||
this.children = item.childSyncIds;
|
||||
},
|
||||
};
|
||||
|
||||
Utils.deferGetSet(BookmarkFolder, "cleartext", ["description", "title",
|
||||
|
@ -179,6 +218,14 @@ Livemark.prototype = {
|
|||
info.site = this.siteUri;
|
||||
return info;
|
||||
},
|
||||
|
||||
fromSyncBookmark(item) {
|
||||
BookmarkFolder.prototype.fromSyncBookmark.call(this, item);
|
||||
this.feedUri = item.feed.href;
|
||||
if (item.site) {
|
||||
this.siteUri = item.site.href;
|
||||
}
|
||||
},
|
||||
};
|
||||
|
||||
Utils.deferGetSet(Livemark, "cleartext", ["siteUri", "feedUri"]);
|
||||
|
@ -189,6 +236,11 @@ this.BookmarkSeparator = function BookmarkSeparator(collection, id) {
|
|||
BookmarkSeparator.prototype = {
|
||||
__proto__: PlacesItem.prototype,
|
||||
_logName: "Sync.Record.Separator",
|
||||
|
||||
fromSyncBookmark(item) {
|
||||
PlacesItem.prototype.fromSyncBookmark.call(this, item);
|
||||
this.pos = item.index;
|
||||
},
|
||||
};
|
||||
|
||||
Utils.deferGetSet(BookmarkSeparator, "cleartext", "pos");
|
||||
|
@ -716,121 +768,23 @@ BookmarksStore.prototype = {
|
|||
Async.promiseSpinningly(PlacesSyncUtils.bookmarks.changeGuid(oldID, newID));
|
||||
},
|
||||
|
||||
_getTags: function BStore__getTags(uri) {
|
||||
try {
|
||||
if (typeof(uri) == "string")
|
||||
uri = Utils.makeURI(uri);
|
||||
} catch(e) {
|
||||
this._log.warn("Could not parse URI \"" + uri + "\": " + e);
|
||||
}
|
||||
return PlacesUtils.tagging.getTagsForURI(uri, {});
|
||||
},
|
||||
|
||||
_getDescription: function BStore__getDescription(id) {
|
||||
try {
|
||||
return PlacesUtils.annotations.getItemAnnotation(id,
|
||||
PlacesSyncUtils.bookmarks.DESCRIPTION_ANNO);
|
||||
} catch (e) {
|
||||
return null;
|
||||
}
|
||||
},
|
||||
|
||||
_isLoadInSidebar: function BStore__isLoadInSidebar(id) {
|
||||
return PlacesUtils.annotations.itemHasAnnotation(id,
|
||||
PlacesSyncUtils.bookmarks.SIDEBAR_ANNO);
|
||||
},
|
||||
|
||||
// Create a record starting from the weave id (places guid)
|
||||
createRecord: function createRecord(id, collection) {
|
||||
let placeId = this.idForGUID(id);
|
||||
let record;
|
||||
if (placeId <= 0) { // deleted item
|
||||
record = new PlacesItem(collection, id);
|
||||
let item = Async.promiseSpinningly(PlacesSyncUtils.bookmarks.fetch(id));
|
||||
if (!item) { // deleted item
|
||||
let record = new PlacesItem(collection, id);
|
||||
record.deleted = true;
|
||||
return record;
|
||||
}
|
||||
|
||||
let parent = PlacesUtils.bookmarks.getFolderIdForItem(placeId);
|
||||
switch (PlacesUtils.bookmarks.getItemType(placeId)) {
|
||||
case PlacesUtils.bookmarks.TYPE_BOOKMARK:
|
||||
let bmkUri = PlacesUtils.bookmarks.getBookmarkURI(placeId).spec;
|
||||
if (bmkUri.indexOf("place:") == 0) {
|
||||
record = new BookmarkQuery(collection, id);
|
||||
|
||||
// Get the actual tag name instead of the local itemId
|
||||
let folder = bmkUri.match(/[:&]folder=(\d+)/);
|
||||
try {
|
||||
// There might not be the tag yet when creating on a new client
|
||||
if (folder != null) {
|
||||
folder = folder[1];
|
||||
record.folderName = PlacesUtils.bookmarks.getItemTitle(folder);
|
||||
this._log.trace("query id: " + folder + " = " + record.folderName);
|
||||
}
|
||||
}
|
||||
catch(ex) {}
|
||||
|
||||
// Persist the Smart Bookmark anno, if found.
|
||||
try {
|
||||
let anno = PlacesUtils.annotations.getItemAnnotation(placeId,
|
||||
PlacesSyncUtils.bookmarks.SMART_BOOKMARKS_ANNO);
|
||||
if (anno != null) {
|
||||
this._log.trace("query anno: " +
|
||||
PlacesSyncUtils.bookmarks.SMART_BOOKMARKS_ANNO +
|
||||
" = " + anno);
|
||||
record.queryId = anno;
|
||||
}
|
||||
}
|
||||
catch(ex) {}
|
||||
}
|
||||
else {
|
||||
record = new Bookmark(collection, id);
|
||||
}
|
||||
record.title = PlacesUtils.bookmarks.getItemTitle(placeId);
|
||||
|
||||
record.parentName = PlacesUtils.bookmarks.getItemTitle(parent);
|
||||
record.bmkUri = bmkUri;
|
||||
record.tags = this._getTags(record.bmkUri);
|
||||
record.keyword = PlacesUtils.bookmarks.getKeywordForBookmark(placeId);
|
||||
record.description = this._getDescription(placeId);
|
||||
record.loadInSidebar = this._isLoadInSidebar(placeId);
|
||||
break;
|
||||
|
||||
case PlacesUtils.bookmarks.TYPE_FOLDER:
|
||||
if (PlacesUtils.annotations
|
||||
.itemHasAnnotation(placeId, PlacesUtils.LMANNO_FEEDURI)) {
|
||||
record = new Livemark(collection, id);
|
||||
let as = PlacesUtils.annotations;
|
||||
record.feedUri = as.getItemAnnotation(placeId, PlacesUtils.LMANNO_FEEDURI);
|
||||
try {
|
||||
record.siteUri = as.getItemAnnotation(placeId, PlacesUtils.LMANNO_SITEURI);
|
||||
} catch (ex) {}
|
||||
} else {
|
||||
record = new BookmarkFolder(collection, id);
|
||||
}
|
||||
|
||||
if (parent > 0)
|
||||
record.parentName = PlacesUtils.bookmarks.getItemTitle(parent);
|
||||
record.title = PlacesUtils.bookmarks.getItemTitle(placeId);
|
||||
record.description = this._getDescription(placeId);
|
||||
record.children = Async.promiseSpinningly(
|
||||
PlacesSyncUtils.bookmarks.fetchChildSyncIds(id));
|
||||
break;
|
||||
|
||||
case PlacesUtils.bookmarks.TYPE_SEPARATOR:
|
||||
record = new BookmarkSeparator(collection, id);
|
||||
if (parent > 0)
|
||||
record.parentName = PlacesUtils.bookmarks.getItemTitle(parent);
|
||||
// Create a positioning identifier for the separator, used by _mapDupe
|
||||
record.pos = PlacesUtils.bookmarks.getItemIndex(placeId);
|
||||
break;
|
||||
|
||||
default:
|
||||
record = new PlacesItem(collection, id);
|
||||
this._log.warn("Unknown item type, cannot serialize: " +
|
||||
PlacesUtils.bookmarks.getItemType(placeId));
|
||||
let recordObj = getTypeObject(item.kind);
|
||||
if (!recordObj) {
|
||||
this._log.warn("Unknown item type, cannot serialize: " + item.kind);
|
||||
recordObj = PlacesItem;
|
||||
}
|
||||
let record = new recordObj(collection, id);
|
||||
record.fromSyncBookmark(item);
|
||||
|
||||
record.parentid = this.GUIDForId(parent);
|
||||
record.sortindex = this._calculateIndex(record);
|
||||
|
||||
return record;
|
||||
|
@ -1143,9 +1097,9 @@ BookmarksTracker.prototype = {
|
|||
PlacesUtils.bookmarks.setItemTitle(mobile[0], title, SOURCE_SYNC);
|
||||
}
|
||||
let rootTitle =
|
||||
PlacesUtils.bookmarks.getItemTitle(BookmarkSpecialIds.mobile);
|
||||
PlacesUtils.bookmarks.getItemTitle(PlacesUtils.mobileFolderId);
|
||||
if (rootTitle != title) {
|
||||
PlacesUtils.bookmarks.setItemTitle(BookmarkSpecialIds.mobile, title,
|
||||
PlacesUtils.bookmarks.setItemTitle(PlacesUtils.mobileFolderId, title,
|
||||
SOURCE_SYNC);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -9,6 +9,7 @@ Cu.import("resource://services-sync/engines/bookmarks.js");
|
|||
Cu.import("resource://services-sync/engines.js");
|
||||
Cu.import("resource://services-sync/service.js");
|
||||
Cu.import("resource://services-sync/util.js");
|
||||
Cu.import("resource:///modules/PlacesUIUtils.jsm");
|
||||
|
||||
Service.engineManager.register(BookmarksEngine);
|
||||
var engine = Service.engineManager.get("bookmarks");
|
||||
|
@ -66,6 +67,13 @@ function* verifyTrackedCount(expected) {
|
|||
equal(changes.count(), expected);
|
||||
}
|
||||
|
||||
// Copied from PlacesSyncUtils.jsm.
|
||||
function findAnnoItems(anno, val) {
|
||||
let annos = PlacesUtils.annotations;
|
||||
return annos.getItemsWithAnnotation(anno, {}).filter(id =>
|
||||
annos.getItemAnnotation(id, anno) == val);
|
||||
}
|
||||
|
||||
add_task(function* test_tracking() {
|
||||
_("Test starting and stopping the tracker");
|
||||
|
||||
|
@ -1433,6 +1441,64 @@ add_task(function* test_onItemDeleted_tree() {
|
|||
}
|
||||
});
|
||||
|
||||
add_task(function* test_mobile_query() {
|
||||
_("Ensure we correctly create the mobile query");
|
||||
|
||||
try {
|
||||
// Creates the organizer queries as a side effect.
|
||||
let leftPaneId = PlacesUIUtils.leftPaneFolderId;
|
||||
_(`Left pane root ID: ${leftPaneId}`);
|
||||
|
||||
let allBookmarksIds = findAnnoItems("PlacesOrganizer/OrganizerQuery", "AllBookmarks");
|
||||
equal(allBookmarksIds.length, 1, "Should create folder with all bookmarks queries");
|
||||
let allBookmarkGuid = yield PlacesUtils.promiseItemGuid(allBookmarksIds[0]);
|
||||
|
||||
_("Try creating query after organizer is ready");
|
||||
tracker._ensureMobileQuery();
|
||||
let queryIds = findAnnoItems("PlacesOrganizer/OrganizerQuery", "MobileBookmarks");
|
||||
equal(queryIds.length, 0, "Should not create query without any mobile bookmarks");
|
||||
|
||||
_("Insert mobile bookmark, then create query");
|
||||
yield PlacesUtils.bookmarks.insert({
|
||||
parentGuid: PlacesUtils.bookmarks.mobileGuid,
|
||||
url: "https://mozilla.org",
|
||||
});
|
||||
tracker._ensureMobileQuery();
|
||||
queryIds = findAnnoItems("PlacesOrganizer/OrganizerQuery", "MobileBookmarks", {});
|
||||
equal(queryIds.length, 1, "Should create query once mobile bookmarks exist");
|
||||
|
||||
let queryId = queryIds[0];
|
||||
let queryGuid = yield PlacesUtils.promiseItemGuid(queryId);
|
||||
|
||||
let queryInfo = yield PlacesUtils.bookmarks.fetch(queryGuid);
|
||||
equal(queryInfo.url, `place:folder=${PlacesUtils.mobileFolderId}`, "Query should point to mobile root");
|
||||
equal(queryInfo.title, "Mobile Bookmarks", "Query title should be localized");
|
||||
equal(queryInfo.parentGuid, allBookmarkGuid, "Should append mobile query to all bookmarks queries");
|
||||
|
||||
_("Rename root and query, then recreate");
|
||||
yield PlacesUtils.bookmarks.update({
|
||||
guid: PlacesUtils.bookmarks.mobileGuid,
|
||||
title: "renamed root",
|
||||
});
|
||||
yield PlacesUtils.bookmarks.update({
|
||||
guid: queryGuid,
|
||||
title: "renamed query",
|
||||
});
|
||||
tracker._ensureMobileQuery();
|
||||
let rootInfo = yield PlacesUtils.bookmarks.fetch(PlacesUtils.bookmarks.mobileGuid);
|
||||
equal(rootInfo.title, "Mobile Bookmarks", "Should fix root title");
|
||||
queryInfo = yield PlacesUtils.bookmarks.fetch(queryGuid);
|
||||
equal(queryInfo.title, "Mobile Bookmarks", "Should fix query title");
|
||||
|
||||
_("We shouldn't track the query or the left pane root");
|
||||
yield verifyTrackedCount(0);
|
||||
do_check_eq(tracker.score, 0);
|
||||
} finally {
|
||||
_("Clean up.");
|
||||
yield cleanup();
|
||||
}
|
||||
});
|
||||
|
||||
function run_test() {
|
||||
initTestLogging("Trace");
|
||||
|
||||
|
|
|
@ -271,6 +271,7 @@ add_task(function *test_telemetry_integration() {
|
|||
equal(bme.validation.took, duration);
|
||||
bme.validation.problems.sort((a, b) => String.localeCompare(a.name, b.name));
|
||||
deepEqual(bme.validation.problems, [
|
||||
{ name: "badClientRoots", count: 4 },
|
||||
{ name: "sdiff:childGUIDs", count: 1 },
|
||||
{ name: "serverMissing", count: 1 },
|
||||
{ name: "structuralDifferences", count: 1 },
|
||||
|
|
|
@ -264,6 +264,83 @@ const BookmarkSyncUtils = PlacesSyncUtils.bookmarks = Object.freeze({
|
|||
let insertInfo = validateNewBookmark(info);
|
||||
return insertSyncBookmark(insertInfo);
|
||||
}),
|
||||
|
||||
/**
|
||||
* Fetches a Sync bookmark object for an item in the tree. The object contains
|
||||
* the following properties, depending on the item's kind:
|
||||
*
|
||||
* - kind (all): A string representing the item's kind.
|
||||
* - syncId (all): The item's sync ID.
|
||||
* - parentSyncId (all): The sync ID of the item's parent.
|
||||
* - parentTitle (all): The title of the item's parent, used for de-duping.
|
||||
* Omitted for the Places root and parents with empty titles.
|
||||
* - title ("bookmark", "folder", "livemark", "query"): The item's title.
|
||||
* Omitted if empty.
|
||||
* - url ("bookmark", "query"): The item's URL.
|
||||
* - tags ("bookmark", "query"): An array containing the item's tags.
|
||||
* - keyword ("bookmark"): The bookmark's keyword, if one exists.
|
||||
* - description ("bookmark", "folder", "livemark"): The item's description.
|
||||
* Omitted if one isn't set.
|
||||
* - loadInSidebar ("bookmark", "query"): Whether to load the bookmark in
|
||||
* the sidebar. Always `false` for queries.
|
||||
* - feed ("livemark"): A `URL` object pointing to the livemark's feed URL.
|
||||
* - site ("livemark"): A `URL` object pointing to the livemark's site URL,
|
||||
* or `null` if one isn't set.
|
||||
* - childSyncIds ("folder"): An array containing the sync IDs of the item's
|
||||
* children, used to determine child order.
|
||||
* - folder ("query"): The tag folder name, if this is a tag query.
|
||||
* - query ("query"): The smart bookmark query name, if this is a smart
|
||||
* bookmark.
|
||||
* - index ("separator"): The separator's position within its parent.
|
||||
*/
|
||||
fetch: Task.async(function* (syncId) {
|
||||
let guid = BookmarkSyncUtils.syncIdToGuid(syncId);
|
||||
let bookmarkItem = yield PlacesUtils.bookmarks.fetch(guid);
|
||||
if (!bookmarkItem) {
|
||||
return null;
|
||||
}
|
||||
|
||||
// Convert the Places bookmark object to a Sync bookmark and add
|
||||
// kind-specific properties.
|
||||
let kind = yield getKindForItem(bookmarkItem);
|
||||
let item;
|
||||
switch (kind) {
|
||||
case BookmarkSyncUtils.KINDS.BOOKMARK:
|
||||
case BookmarkSyncUtils.KINDS.MICROSUMMARY:
|
||||
item = yield fetchBookmarkItem(bookmarkItem);
|
||||
break;
|
||||
|
||||
case BookmarkSyncUtils.KINDS.QUERY:
|
||||
item = yield fetchQueryItem(bookmarkItem);
|
||||
break;
|
||||
|
||||
case BookmarkSyncUtils.KINDS.FOLDER:
|
||||
item = yield fetchFolderItem(bookmarkItem);
|
||||
break;
|
||||
|
||||
case BookmarkSyncUtils.KINDS.LIVEMARK:
|
||||
item = yield fetchLivemarkItem(bookmarkItem);
|
||||
break;
|
||||
|
||||
case BookmarkSyncUtils.KINDS.SEPARATOR:
|
||||
item = yield placesBookmarkToSyncBookmark(bookmarkItem);
|
||||
item.index = bookmarkItem.index;
|
||||
break;
|
||||
|
||||
default:
|
||||
throw new Error(`Unknown bookmark kind: ${kind}`);
|
||||
}
|
||||
|
||||
// Sync uses the parent title for de-duping.
|
||||
if (bookmarkItem.parentGuid) {
|
||||
let parent = yield PlacesUtils.bookmarks.fetch(bookmarkItem.parentGuid);
|
||||
if ("title" in parent) {
|
||||
item.parentTitle = parent.title;
|
||||
}
|
||||
}
|
||||
|
||||
return item;
|
||||
}),
|
||||
});
|
||||
|
||||
XPCOMUtils.defineLazyGetter(this, "BookmarkSyncLog", () => {
|
||||
|
@ -972,3 +1049,129 @@ function syncBookmarkToPlacesBookmark(info) {
|
|||
|
||||
return bookmarkInfo;
|
||||
}
|
||||
|
||||
// Creates and returns a Sync bookmark object containing the bookmark's
|
||||
// tags, keyword, description, and whether it loads in the sidebar.
|
||||
var fetchBookmarkItem = Task.async(function* (bookmarkItem) {
|
||||
let itemId = yield PlacesUtils.promiseItemId(bookmarkItem.guid);
|
||||
let item = yield placesBookmarkToSyncBookmark(bookmarkItem);
|
||||
|
||||
item.tags = PlacesUtils.tagging.getTagsForURI(
|
||||
PlacesUtils.toURI(bookmarkItem.url), {});
|
||||
|
||||
let keywordEntry = yield PlacesUtils.keywords.fetch({
|
||||
url: bookmarkItem.url,
|
||||
});
|
||||
if (keywordEntry) {
|
||||
item.keyword = keywordEntry.keyword;
|
||||
}
|
||||
|
||||
let description = getItemDescription(itemId);
|
||||
if (description) {
|
||||
item.description = description;
|
||||
}
|
||||
|
||||
item.loadInSidebar = PlacesUtils.annotations.itemHasAnnotation(itemId,
|
||||
BookmarkSyncUtils.SIDEBAR_ANNO);
|
||||
|
||||
return item;
|
||||
});
|
||||
|
||||
// Creates and returns a Sync bookmark object containing the folder's
|
||||
// description and children.
|
||||
var fetchFolderItem = Task.async(function* (bookmarkItem) {
|
||||
let itemId = yield PlacesUtils.promiseItemId(bookmarkItem.guid);
|
||||
let item = yield placesBookmarkToSyncBookmark(bookmarkItem);
|
||||
|
||||
let description = getItemDescription(itemId);
|
||||
if (description) {
|
||||
item.description = description;
|
||||
}
|
||||
|
||||
let db = yield PlacesUtils.promiseDBConnection();
|
||||
let children = yield fetchAllChildren(db, bookmarkItem.guid);
|
||||
item.childSyncIds = children.map(child =>
|
||||
BookmarkSyncUtils.guidToSyncId(child.guid)
|
||||
);
|
||||
|
||||
return item;
|
||||
});
|
||||
|
||||
// Creates and returns a Sync bookmark object containing the livemark's
|
||||
// description, children (none), feed URI, and site URI.
|
||||
var fetchLivemarkItem = Task.async(function* (bookmarkItem) {
|
||||
let itemId = yield PlacesUtils.promiseItemId(bookmarkItem.guid);
|
||||
let item = yield placesBookmarkToSyncBookmark(bookmarkItem);
|
||||
|
||||
let description = getItemDescription(itemId);
|
||||
if (description) {
|
||||
item.description = description;
|
||||
}
|
||||
|
||||
let feedAnno = PlacesUtils.annotations.getItemAnnotation(itemId,
|
||||
PlacesUtils.LMANNO_FEEDURI);
|
||||
item.feed = new URL(feedAnno);
|
||||
|
||||
let siteAnno = null;
|
||||
try {
|
||||
siteAnno = PlacesUtils.annotations.getItemAnnotation(itemId,
|
||||
PlacesUtils.LMANNO_SITEURI);
|
||||
} catch (ex) {}
|
||||
if (siteAnno != null) {
|
||||
item.site = new URL(siteAnno);
|
||||
}
|
||||
|
||||
return item;
|
||||
});
|
||||
|
||||
// Creates and returns a Sync bookmark object containing the query's tag
|
||||
// folder name and smart bookmark query ID.
|
||||
var fetchQueryItem = Task.async(function* (bookmarkItem) {
|
||||
let itemId = yield PlacesUtils.promiseItemId(bookmarkItem.guid);
|
||||
let item = yield placesBookmarkToSyncBookmark(bookmarkItem);
|
||||
|
||||
let description = getItemDescription(itemId);
|
||||
if (description) {
|
||||
item.description = description;
|
||||
}
|
||||
|
||||
let folder = null;
|
||||
let params = new URLSearchParams(bookmarkItem.url.pathname);
|
||||
let tagFolderId = +params.get("folder");
|
||||
if (tagFolderId) {
|
||||
try {
|
||||
let tagFolderGuid = yield PlacesUtils.promiseItemGuid(tagFolderId);
|
||||
let tagFolder = yield PlacesUtils.bookmarks.fetch(tagFolderGuid);
|
||||
folder = tagFolder.title;
|
||||
} catch (ex) {
|
||||
BookmarkSyncLog.warn("fetchQueryItem: Query " + bookmarkItem.url.href +
|
||||
" points to nonexistent folder " + tagFolderId, ex);
|
||||
}
|
||||
}
|
||||
if (folder != null) {
|
||||
item.folder = folder;
|
||||
}
|
||||
|
||||
let query = null;
|
||||
try {
|
||||
// Throws if the bookmark doesn't have the smart bookmark anno.
|
||||
query = PlacesUtils.annotations.getItemAnnotation(itemId,
|
||||
BookmarkSyncUtils.SMART_BOOKMARKS_ANNO);
|
||||
} catch (ex) {}
|
||||
if (query != null) {
|
||||
item.query = query;
|
||||
}
|
||||
|
||||
return item;
|
||||
});
|
||||
|
||||
// Returns an item's description, or `null` if one isn't set.
|
||||
function getItemDescription(id) {
|
||||
try {
|
||||
return PlacesUtils.annotations.getItemAnnotation(id,
|
||||
BookmarkSyncUtils.DESCRIPTION_ANNO);
|
||||
} catch (ex) {}
|
||||
return null;
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -343,13 +343,10 @@ add_task(function* test_update_annos() {
|
|||
let guids = yield populateTree(PlacesUtils.bookmarks.menuGuid, {
|
||||
kind: "folder",
|
||||
title: "folder",
|
||||
description: "Folder description",
|
||||
}, {
|
||||
kind: "bookmark",
|
||||
title: "bmk",
|
||||
url: "https://example.com",
|
||||
description: "Bookmark description",
|
||||
loadInSidebar: true,
|
||||
});
|
||||
|
||||
do_print("Add folder description");
|
||||
|
@ -1004,3 +1001,145 @@ add_task(function* test_insert_orphans() {
|
|||
|
||||
yield PlacesUtils.bookmarks.eraseEverything();
|
||||
});
|
||||
|
||||
add_task(function* test_fetch() {
|
||||
let folder = yield PlacesSyncUtils.bookmarks.insert({
|
||||
syncId: makeGuid(),
|
||||
parentSyncId: "menu",
|
||||
kind: "folder",
|
||||
description: "Folder description",
|
||||
});
|
||||
let bmk = yield PlacesSyncUtils.bookmarks.insert({
|
||||
syncId: makeGuid(),
|
||||
parentSyncId: "menu",
|
||||
kind: "bookmark",
|
||||
url: "https://example.com",
|
||||
description: "Bookmark description",
|
||||
loadInSidebar: true,
|
||||
tags: ["taggy"],
|
||||
});
|
||||
let folderBmk = yield PlacesSyncUtils.bookmarks.insert({
|
||||
syncId: makeGuid(),
|
||||
parentSyncId: folder.syncId,
|
||||
kind: "bookmark",
|
||||
url: "https://example.org",
|
||||
keyword: "kw",
|
||||
});
|
||||
let folderSep = yield PlacesSyncUtils.bookmarks.insert({
|
||||
syncId: makeGuid(),
|
||||
parentSyncId: folder.syncId,
|
||||
kind: "separator",
|
||||
});
|
||||
let tagQuery = yield PlacesSyncUtils.bookmarks.insert({
|
||||
kind: "query",
|
||||
syncId: makeGuid(),
|
||||
parentSyncId: "toolbar",
|
||||
url: "place:type=7&folder=90",
|
||||
folder: "taggy",
|
||||
title: "Tagged stuff",
|
||||
});
|
||||
let [, tagFolderId] = /\bfolder=(\d+)\b/.exec(tagQuery.url.pathname);
|
||||
let smartBmk = yield PlacesSyncUtils.bookmarks.insert({
|
||||
kind: "query",
|
||||
syncId: makeGuid(),
|
||||
parentSyncId: "toolbar",
|
||||
url: "place:folder=TOOLBAR",
|
||||
query: "BookmarksToolbar",
|
||||
title: "Bookmarks toolbar query",
|
||||
});
|
||||
|
||||
do_print("Fetch empty folder with description");
|
||||
{
|
||||
let item = yield PlacesSyncUtils.bookmarks.fetch(folder.syncId);
|
||||
deepEqual(item, {
|
||||
syncId: folder.syncId,
|
||||
kind: "folder",
|
||||
parentSyncId: "menu",
|
||||
description: "Folder description",
|
||||
childSyncIds: [folderBmk.syncId, folderSep.syncId],
|
||||
parentTitle: "Bookmarks Menu",
|
||||
}, "Should include description, children, and parent title in folder");
|
||||
}
|
||||
|
||||
do_print("Fetch bookmark with description, sidebar anno, and tags");
|
||||
{
|
||||
let item = yield PlacesSyncUtils.bookmarks.fetch(bmk.syncId);
|
||||
deepEqual(Object.keys(item).sort(), ["syncId", "kind", "parentSyncId", "url",
|
||||
"tags", "description", "loadInSidebar", "parentTitle"].sort(),
|
||||
"Should include bookmark-specific properties");
|
||||
equal(item.syncId, bmk.syncId, "Sync ID should match");
|
||||
equal(item.url.href, "https://example.com/", "Should return URL");
|
||||
equal(item.parentSyncId, "menu", "Should return parent sync ID");
|
||||
deepEqual(item.tags, ["taggy"], "Should return tags");
|
||||
equal(item.description, "Bookmark description", "Should return bookmark description");
|
||||
strictEqual(item.loadInSidebar, true, "Should return sidebar anno");
|
||||
equal(item.parentTitle, "Bookmarks Menu", "Should return parent title");
|
||||
}
|
||||
|
||||
do_print("Fetch bookmark with keyword; without parent title or annos");
|
||||
{
|
||||
let item = yield PlacesSyncUtils.bookmarks.fetch(folderBmk.syncId);
|
||||
deepEqual(Object.keys(item).sort(), ["syncId", "kind", "parentSyncId",
|
||||
"url", "keyword", "tags", "loadInSidebar"].sort(),
|
||||
"Should omit blank bookmark-specific properties");
|
||||
strictEqual(item.loadInSidebar, false, "Should not load bookmark in sidebar");
|
||||
deepEqual(item.tags, [], "Tags should be empty");
|
||||
equal(item.keyword, "kw", "Should return keyword");
|
||||
}
|
||||
|
||||
do_print("Fetch separator");
|
||||
{
|
||||
let item = yield PlacesSyncUtils.bookmarks.fetch(folderSep.syncId);
|
||||
strictEqual(item.index, 1, "Should return separator position");
|
||||
}
|
||||
|
||||
do_print("Fetch tag query");
|
||||
{
|
||||
let item = yield PlacesSyncUtils.bookmarks.fetch(tagQuery.syncId);
|
||||
deepEqual(Object.keys(item).sort(), ["syncId", "kind", "parentSyncId",
|
||||
"url", "title", "folder", "parentTitle"].sort(),
|
||||
"Should include query-specific properties");
|
||||
equal(item.url.href, `place:type=7&folder=${tagFolderId}`, "Should not rewrite outgoing tag queries");
|
||||
equal(item.folder, "taggy", "Should return tag name for tag queries");
|
||||
}
|
||||
|
||||
do_print("Fetch smart bookmark");
|
||||
{
|
||||
let item = yield PlacesSyncUtils.bookmarks.fetch(smartBmk.syncId);
|
||||
deepEqual(Object.keys(item).sort(), ["syncId", "kind", "parentSyncId",
|
||||
"url", "title", "query", "parentTitle"].sort(),
|
||||
"Should include smart bookmark-specific properties");
|
||||
equal(item.query, "BookmarksToolbar", "Should return query name for smart bookmarks");
|
||||
}
|
||||
|
||||
yield PlacesUtils.bookmarks.eraseEverything();
|
||||
});
|
||||
|
||||
add_task(function* test_fetch_livemark() {
|
||||
let { server, site, stopServer } = makeLivemarkServer();
|
||||
|
||||
try {
|
||||
do_print("Create livemark");
|
||||
let livemark = yield PlacesUtils.livemarks.addLivemark({
|
||||
parentGuid: PlacesUtils.bookmarks.menuGuid,
|
||||
feedURI: uri(site + "/feed/1"),
|
||||
siteURI: uri(site),
|
||||
index: PlacesUtils.bookmarks.DEFAULT_INDEX,
|
||||
});
|
||||
PlacesUtils.annotations.setItemAnnotation(livemark.id, DESCRIPTION_ANNO,
|
||||
"Livemark description", 0, PlacesUtils.annotations.EXPIRE_NEVER);
|
||||
|
||||
do_print("Fetch livemark");
|
||||
let item = yield PlacesSyncUtils.bookmarks.fetch(livemark.guid);
|
||||
deepEqual(Object.keys(item).sort(), ["syncId", "kind", "parentSyncId",
|
||||
"description", "feed", "site", "parentTitle"].sort(),
|
||||
"Should include livemark-specific properties");
|
||||
equal(item.description, "Livemark description", "Should return description");
|
||||
equal(item.feed.href, site + "/feed/1", "Should return feed URL");
|
||||
equal(item.site.href, site + "/", "Should return site URL");
|
||||
} finally {
|
||||
yield stopServer();
|
||||
}
|
||||
|
||||
yield PlacesUtils.bookmarks.eraseEverything();
|
||||
});
|
||||
|
|
|
@ -25,19 +25,9 @@ class BitBuffer {
|
|||
public:
|
||||
BitBuffer(const uint8_t* bytes, size_t byte_count);
|
||||
|
||||
// Gets the current offset, in bytes/bits, from the start of the buffer. The
|
||||
// bit offset is the offset into the current byte, in the range [0,7].
|
||||
void GetCurrentOffset(size_t* out_byte_offset, size_t* out_bit_offset);
|
||||
|
||||
// The remaining bits in the byte buffer.
|
||||
uint64_t RemainingBitCount() const;
|
||||
|
||||
// Reads byte-sized values from the buffer. Returns false if there isn't
|
||||
// enough data left for the specified type.
|
||||
bool ReadUInt8(uint8_t* val);
|
||||
bool ReadUInt16(uint16_t* val);
|
||||
bool ReadUInt32(uint32_t* val);
|
||||
|
||||
// Reads bit-sized values from the buffer. Returns false if there isn't enough
|
||||
// data left for the specified bit count..
|
||||
bool ReadBits(uint32_t* val, size_t bit_count);
|
||||
|
@ -56,22 +46,11 @@ class BitBuffer {
|
|||
// Returns false if there isn't enough data left for the specified type, or if
|
||||
// the value wouldn't fit in a uint32_t.
|
||||
bool ReadExponentialGolomb(uint32_t* val);
|
||||
// Reads signed exponential golomb values at the current offset. Signed
|
||||
// exponential golomb values are just the unsigned values mapped to the
|
||||
// sequence 0, 1, -1, 2, -2, etc. in order.
|
||||
bool ReadSignedExponentialGolomb(int32_t* val);
|
||||
|
||||
// Moves current position |byte_count| bytes forward. Returns false if
|
||||
// there aren't enough bytes left in the buffer.
|
||||
bool ConsumeBytes(size_t byte_count);
|
||||
// Moves current position |bit_count| bits forward. Returns false if
|
||||
// there aren't enough bits left in the buffer.
|
||||
bool ConsumeBits(size_t bit_count);
|
||||
|
||||
// Sets the current offset to the provied byte/bit offsets. The bit
|
||||
// offset is from the given byte, in the range [0,7].
|
||||
bool Seek(size_t byte_offset, size_t bit_offset);
|
||||
|
||||
protected:
|
||||
const uint8_t* const bytes_;
|
||||
// The total size of |bytes_|.
|
||||
|
@ -180,30 +159,6 @@ uint64_t BitBuffer::RemainingBitCount() const {
|
|||
return (static_cast<uint64_t>(byte_count_) - byte_offset_) * 8 - bit_offset_;
|
||||
}
|
||||
|
||||
bool BitBuffer::ReadUInt8(uint8_t* val) {
|
||||
uint32_t bit_val;
|
||||
if (!ReadBits(&bit_val, sizeof(uint8_t) * 8)) {
|
||||
return false;
|
||||
}
|
||||
MOZ_ASSERT(bit_val <= std::numeric_limits<uint8_t>::max());
|
||||
*val = static_cast<uint8_t>(bit_val);
|
||||
return true;
|
||||
}
|
||||
|
||||
bool BitBuffer::ReadUInt16(uint16_t* val) {
|
||||
uint32_t bit_val;
|
||||
if (!ReadBits(&bit_val, sizeof(uint16_t) * 8)) {
|
||||
return false;
|
||||
}
|
||||
MOZ_ASSERT(bit_val <= std::numeric_limits<uint16_t>::max());
|
||||
*val = static_cast<uint16_t>(bit_val);
|
||||
return true;
|
||||
}
|
||||
|
||||
bool BitBuffer::ReadUInt32(uint32_t* val) {
|
||||
return ReadBits(val, sizeof(uint32_t) * 8);
|
||||
}
|
||||
|
||||
bool BitBuffer::PeekBits(uint32_t* val, size_t bit_count) {
|
||||
if (!val || bit_count > RemainingBitCount() || bit_count > 32) {
|
||||
return false;
|
||||
|
@ -238,10 +193,6 @@ bool BitBuffer::ReadBits(uint32_t* val, size_t bit_count) {
|
|||
return PeekBits(val, bit_count) && ConsumeBits(bit_count);
|
||||
}
|
||||
|
||||
bool BitBuffer::ConsumeBytes(size_t byte_count) {
|
||||
return ConsumeBits(byte_count * 8);
|
||||
}
|
||||
|
||||
bool BitBuffer::ConsumeBits(size_t bit_count) {
|
||||
if (bit_count > RemainingBitCount()) {
|
||||
return false;
|
||||
|
@ -273,36 +224,4 @@ bool BitBuffer::ReadExponentialGolomb(uint32_t* val) {
|
|||
*val = one_bit_count;
|
||||
return true;
|
||||
}
|
||||
|
||||
bool BitBuffer::ReadSignedExponentialGolomb(int32_t* val) {
|
||||
uint32_t unsigned_val;
|
||||
if (!ReadExponentialGolomb(&unsigned_val)) {
|
||||
return false;
|
||||
}
|
||||
if ((unsigned_val & 1) == 0) {
|
||||
*val = -static_cast<int32_t>(unsigned_val / 2);
|
||||
} else {
|
||||
*val = (unsigned_val + 1) / 2;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
void BitBuffer::GetCurrentOffset(
|
||||
size_t* out_byte_offset, size_t* out_bit_offset) {
|
||||
MOZ_ASSERT(out_byte_offset != NULL);
|
||||
MOZ_ASSERT(out_bit_offset != NULL);
|
||||
*out_byte_offset = byte_offset_;
|
||||
*out_bit_offset = bit_offset_;
|
||||
}
|
||||
|
||||
bool BitBuffer::Seek(size_t byte_offset, size_t bit_offset) {
|
||||
if (byte_offset > byte_count_ || bit_offset > 7 ||
|
||||
(byte_offset == byte_count_ && bit_offset > 0)) {
|
||||
return false;
|
||||
}
|
||||
byte_offset_ = byte_offset;
|
||||
bit_offset_ = bit_offset;
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -196,8 +196,8 @@ PROT_ListManager.prototype.kickoffUpdate_ = function (onDiskTableData)
|
|||
{
|
||||
this.startingUpdate_ = false;
|
||||
var initialUpdateDelay = 3000;
|
||||
// Add a fuzz of 0-5 minutes.
|
||||
initialUpdateDelay += Math.floor(Math.random() * (5 * 60 * 1000));
|
||||
// Add a fuzz of 0-1 minutes for both v2 and v4 according to Bug 1305478.
|
||||
initialUpdateDelay += Math.floor(Math.random() * (1 * 60 * 1000));
|
||||
|
||||
// If the user has never downloaded tables, do the check now.
|
||||
log("needsUpdate: " + JSON.stringify(this.needsUpdate_, undefined, 2));
|
||||
|
|
|
@ -31,7 +31,7 @@
|
|||
const SAMPLE_TEXT = "Some text in a text field.";
|
||||
const SEARCH_TEXT = "Text Test";
|
||||
const NOT_FOUND_TEXT = "This text is not on the page."
|
||||
const ITERATOR_TIMEOUT = gPrefsvc.getIntPref("findbar.iteratorTimeout") + 20;
|
||||
const ITERATOR_TIMEOUT = gPrefsvc.getIntPref("findbar.iteratorTimeout");
|
||||
|
||||
var gFindBar = null;
|
||||
var gBrowser;
|
||||
|
@ -255,7 +255,7 @@
|
|||
};
|
||||
gFindBar.browser.finder.addResultListener(listener);
|
||||
// Make sure we resolve _at least_ after five times the find iterator timeout.
|
||||
setTimeout(resolve, ITERATOR_TIMEOUT * 5);
|
||||
setTimeout(resolve, (ITERATOR_TIMEOUT * 5) + 20);
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -523,7 +523,7 @@
|
|||
if (matchCase.checked) {
|
||||
promise = promiseFindResult();
|
||||
matchCase.click();
|
||||
yield new Promise(resolve => setTimeout(resolve, ITERATOR_TIMEOUT));
|
||||
yield new Promise(resolve => setTimeout(resolve, ITERATOR_TIMEOUT + 20));
|
||||
yield promise;
|
||||
}
|
||||
|
||||
|
@ -558,7 +558,13 @@
|
|||
gFindBar._findField.select();
|
||||
gFindBar._findField.focus();
|
||||
|
||||
yield new Promise(resolve => setTimeout(resolve, ITERATOR_TIMEOUT));
|
||||
let timeout = ITERATOR_TIMEOUT;
|
||||
if (test.text.length == 1)
|
||||
timeout *= 4;
|
||||
else if (test.text.length == 2)
|
||||
timeout *= 2;
|
||||
timeout += 20;
|
||||
yield new Promise(resolve => setTimeout(resolve, timeout));
|
||||
yield enterStringIntoFindField(test.text, false);
|
||||
yield promiseMatchesCountResult();
|
||||
let matches = foundMatches.value.match(regex);
|
||||
|
@ -567,7 +573,7 @@
|
|||
} else {
|
||||
assertMatches(test, matches);
|
||||
for (let i = 1; i < test.total; i++) {
|
||||
yield new Promise(resolve => setTimeout(resolve, ITERATOR_TIMEOUT));
|
||||
yield new Promise(resolve => setTimeout(resolve, timeout));
|
||||
gFindBar.onFindAgainCommand();
|
||||
yield promiseMatchesCountResult();
|
||||
// test.current + 1, test.current + 2, ..., test.total, 1, ..., test.current
|
||||
|
@ -659,7 +665,7 @@
|
|||
let result = yield promise;
|
||||
is(result.result, Ci.nsITypeAheadFind.FIND_FOUND, "Text should be found");
|
||||
|
||||
yield new Promise(resolve => setTimeout(resolve, ITERATOR_TIMEOUT));
|
||||
yield new Promise(resolve => setTimeout(resolve, ITERATOR_TIMEOUT + 20));
|
||||
promise = promiseFindResult();
|
||||
let check = gFindBar.getElement("find-entire-word");
|
||||
check.click();
|
||||
|
|
|
@ -170,6 +170,10 @@ this.FinderIterator = {
|
|||
clearTimeout(this._timer);
|
||||
this._timer = null;
|
||||
}
|
||||
if (this._runningFindResolver) {
|
||||
this._runningFindResolver();
|
||||
this._runningFindResolver = null;
|
||||
}
|
||||
|
||||
if (cachePrevious) {
|
||||
this._previousRanges = [].concat(this.ranges);
|
||||
|
@ -220,6 +224,10 @@ this.FinderIterator = {
|
|||
clearTimeout(this._timer);
|
||||
this._timer = null;
|
||||
}
|
||||
if (this._runningFindResolver) {
|
||||
this._runningFindResolver();
|
||||
this._runningFindResolver = null;
|
||||
}
|
||||
|
||||
this._catchingUp.clear();
|
||||
this._currentParams = this._previousParams = null;
|
||||
|
@ -421,8 +429,22 @@ this.FinderIterator = {
|
|||
if (this._timeout) {
|
||||
if (this._timer)
|
||||
clearTimeout(this._timer);
|
||||
yield new Promise(resolve => this._timer = setTimeout(resolve, this._timeout));
|
||||
this._timer = null;
|
||||
if (this._runningFindResolver)
|
||||
this._runningFindResolver();
|
||||
|
||||
let timeout = this._timeout;
|
||||
let searchTerm = this._currentParams.word;
|
||||
// Wait a little longer when the first or second character is typed into
|
||||
// the findbar.
|
||||
if (searchTerm.length == 1)
|
||||
timeout *= 4;
|
||||
else if (searchTerm.length == 2)
|
||||
timeout *= 2;
|
||||
yield new Promise(resolve => {
|
||||
this._runningFindResolver = resolve;
|
||||
this._timer = setTimeout(resolve, timeout);
|
||||
});
|
||||
this._timer = this._runningFindResolver = null;
|
||||
// During the timeout, we could have gotten the signal to stop iterating.
|
||||
// Make sure we do here.
|
||||
if (!this.running || spawnId !== this._spawnId)
|
||||
|
|
|
@ -221,7 +221,12 @@ add_task(function* testModalResults() {
|
|||
yield promiseOpenFindbar(findbar);
|
||||
Assert.ok(!findbar.hidden, "Findbar should be open now.");
|
||||
|
||||
yield new Promise(resolve => setTimeout(resolve, kIteratorTimeout));
|
||||
let timeout = kIteratorTimeout;
|
||||
if (word.length == 1)
|
||||
timeout *= 4;
|
||||
else if (word.length == 2)
|
||||
timeout *= 2;
|
||||
yield new Promise(resolve => setTimeout(resolve, timeout));
|
||||
let promise = promiseTestHighlighterOutput(browser, word, expectedResult,
|
||||
expectedResult.extraTest);
|
||||
yield promiseEnterStringIntoFindField(findbar, word);
|
||||
|
|
|
@ -574,6 +574,8 @@ HashNumber(const void* aKey)
|
|||
return PLHashNumber(NS_PTR_TO_INT32(aKey));
|
||||
}
|
||||
|
||||
// This method uses MOZ_RELEASE_ASSERT in the unlikely event that
|
||||
// somebody uses this in a non-debug build.
|
||||
static intptr_t
|
||||
GetSerialNumber(void* aPtr, bool aCreate)
|
||||
{
|
||||
|
@ -581,15 +583,17 @@ GetSerialNumber(void* aPtr, bool aCreate)
|
|||
HashNumber(aPtr),
|
||||
aPtr);
|
||||
if (hep && *hep) {
|
||||
MOZ_RELEASE_ASSERT(!aCreate, "If an object already has a serial number, we should be destroying it.");
|
||||
return static_cast<SerialNumberRecord*>((*hep)->value)->serialNumber;
|
||||
} else if (aCreate) {
|
||||
SerialNumberRecord* record = new SerialNumberRecord();
|
||||
WalkTheStackSavingLocations(record->allocationStack);
|
||||
PL_HashTableRawAdd(gSerialNumbers, hep, HashNumber(aPtr),
|
||||
aPtr, static_cast<void*>(record));
|
||||
return gNextSerialNumber;
|
||||
}
|
||||
return 0;
|
||||
|
||||
MOZ_RELEASE_ASSERT(aCreate, "If an object does not have a serial number, we should be creating it.");
|
||||
|
||||
SerialNumberRecord* record = new SerialNumberRecord();
|
||||
WalkTheStackSavingLocations(record->allocationStack);
|
||||
PL_HashTableRawAdd(gSerialNumbers, hep, HashNumber(aPtr),
|
||||
aPtr, static_cast<void*>(record));
|
||||
return gNextSerialNumber;
|
||||
}
|
||||
|
||||
static int32_t*
|
||||
|
|
Загрузка…
Ссылка в новой задаче