зеркало из https://github.com/mozilla/gecko-dev.git
Merge autoland to m-c a=merge
MozReview-Commit-ID: 4MCb0ASsMxe
This commit is contained in:
Коммит
7149e44594
|
@ -1518,6 +1518,8 @@ pref("toolkit.telemetry.shutdownPingSender.enabled", true);
|
|||
pref("toolkit.telemetry.shutdownPingSender.enabledFirstSession", false);
|
||||
// Enables sending the 'new-profile' ping on new profiles.
|
||||
pref("toolkit.telemetry.newProfilePing.enabled", true);
|
||||
// Enables sending 'update' pings on Firefox updates.
|
||||
pref("toolkit.telemetry.updatePing.enabled", true);
|
||||
|
||||
// Telemetry experiments settings.
|
||||
pref("experiments.enabled", true);
|
||||
|
|
|
@ -550,8 +550,9 @@ var BookmarkPropertiesPanel = {
|
|||
_getTransactionsForURIList: function BPP__getTransactionsForURIList() {
|
||||
var transactions = [];
|
||||
for (let uri of this._URIs) {
|
||||
// uri should be an object in the form { url, title }. Though add-ons
|
||||
// uri should be an object in the form { uri, title }. Though add-ons
|
||||
// could still use the legacy form, where it's an nsIURI.
|
||||
// TODO: Remove This from v57 on.
|
||||
let [_uri, _title] = uri instanceof Ci.nsIURI ?
|
||||
[uri, this._getURITitleFromHistory(uri)] : [uri.uri, uri.title];
|
||||
|
||||
|
@ -664,10 +665,12 @@ var BookmarkPropertiesPanel = {
|
|||
itemGuid = await PlacesTransactions.NewLivemark(info).transact();
|
||||
} else if (this._itemType == BOOKMARK_FOLDER) {
|
||||
itemGuid = await PlacesTransactions.NewFolder(info).transact();
|
||||
for (let uri of this._URIs) {
|
||||
let placeInfo = await PlacesUtils.history.fetch(uri);
|
||||
let title = placeInfo ? placeInfo.title : "";
|
||||
await PlacesTransactions.transact({ parentGuid: itemGuid, uri, title });
|
||||
// URIs is an array of objects in the form { uri, title }. It is still
|
||||
// named URIs because for backwards compatibility it could also be an
|
||||
// array of nsIURIs. TODO: Fix the property names from v57.
|
||||
for (let { uri: url, title } of this._URIs) {
|
||||
await PlacesTransactions.NewBookmark({ parentGuid: itemGuid, url, title })
|
||||
.transact();
|
||||
}
|
||||
} else {
|
||||
throw new Error(`unexpected value for _itemType: ${this._itemType}`);
|
||||
|
|
|
@ -29,6 +29,7 @@ support-files =
|
|||
[browser_bookmarkProperties_addFolderDefaultButton.js]
|
||||
[browser_bookmarkProperties_addKeywordForThisSearch.js]
|
||||
[browser_bookmarkProperties_addLivemark.js]
|
||||
[browser_bookmarkProperties_bookmarkAllTabs.js]
|
||||
[browser_bookmarkProperties_editTagContainer.js]
|
||||
[browser_bookmarkProperties_readOnlyRoot.js]
|
||||
[browser_bookmarksProperties.js]
|
||||
|
|
|
@ -0,0 +1,44 @@
|
|||
"use strict"
|
||||
|
||||
const TEST_URLS = [
|
||||
"about:robots",
|
||||
"about:mozilla",
|
||||
];
|
||||
|
||||
add_task(async function() {
|
||||
let tabs = [];
|
||||
for (let url of TEST_URLS) {
|
||||
tabs.push(await BrowserTestUtils.openNewForegroundTab(gBrowser, url));
|
||||
}
|
||||
registerCleanupFunction(async function() {
|
||||
for (let tab of tabs) {
|
||||
await BrowserTestUtils.removeTab(tab)
|
||||
}
|
||||
});
|
||||
|
||||
await withBookmarksDialog(true,
|
||||
function open() {
|
||||
document.getElementById("Browser:BookmarkAllTabs").doCommand();
|
||||
},
|
||||
async dialog => {
|
||||
let acceptBtn = dialog.document.documentElement.getButton("accept");
|
||||
ok(!acceptBtn.disabled, "Accept button is enabled");
|
||||
|
||||
let namepicker = dialog.document.getElementById("editBMPanel_namePicker");
|
||||
Assert.ok(!namepicker.readOnly, "Name field is writable");
|
||||
let folderName = dialog.document.getElementById("stringBundle").getString("bookmarkAllTabsDefault");
|
||||
Assert.equal(namepicker.value, folderName, "Name field is correct.");
|
||||
|
||||
let promiseTitleChange = promiseBookmarksNotification(
|
||||
"onItemChanged", (id, prop, isAnno, val) => prop == "title" && val == "folder");
|
||||
fillBookmarkTextField("editBMPanel_namePicker", "folder", dialog);
|
||||
await promiseTitleChange;
|
||||
},
|
||||
dialog => {
|
||||
let savedItemId = dialog.gEditItemOverlay.itemId;
|
||||
ok(savedItemId > 0, "Found the itemId");
|
||||
return PlacesTestUtils.waitForNotification("onItemRemoved",
|
||||
id => id === savedItemId);
|
||||
}
|
||||
);
|
||||
});
|
|
@ -50,18 +50,14 @@ add_task(async function test() {
|
|||
await BrowserTestUtils.removeTab(tab);
|
||||
});
|
||||
|
||||
async function getTransitionForUrl(url) {
|
||||
// Ensure all the transactions completed.
|
||||
await PlacesTestUtils.promiseAsyncUpdates();
|
||||
let db = await PlacesUtils.promiseDBConnection();
|
||||
let rows = await db.execute(`
|
||||
SELECT visit_type
|
||||
FROM moz_historyvisits
|
||||
JOIN moz_places h ON place_id = h.id
|
||||
WHERE url_hash = hash(:url) AND url = :url`,
|
||||
{ url });
|
||||
if (rows.length) {
|
||||
return rows[0].getResultByName("visit_type");
|
||||
}
|
||||
return null;
|
||||
function getTransitionForUrl(url) {
|
||||
return PlacesUtils.withConnectionWrapper("browser_markPageAsFollowedLink", async db => {
|
||||
let rows = await db.execute(`
|
||||
SELECT visit_type
|
||||
FROM moz_historyvisits
|
||||
JOIN moz_places h ON place_id = h.id
|
||||
WHERE url_hash = hash(:url) AND url = :url
|
||||
`, { url });
|
||||
return rows.length ? rows[0].getResultByName("visit_type") : null;
|
||||
});
|
||||
}
|
||||
|
|
|
@ -225,6 +225,11 @@ var gSearchResultsPane = {
|
|||
this.removeAllSearchTooltips();
|
||||
this.removeAllSearchMenuitemIndicators();
|
||||
|
||||
// Clear telemetry request if user types very frequently.
|
||||
if (this.telemetryTimer) {
|
||||
clearTimeout(this.telemetryTimer);
|
||||
}
|
||||
|
||||
let srHeader = document.getElementById("header-searchResults");
|
||||
|
||||
if (this.query) {
|
||||
|
@ -274,6 +279,13 @@ var gSearchResultsPane = {
|
|||
} else {
|
||||
// Creating tooltips for all the instances found
|
||||
this.listSearchTooltips.forEach((anchorNode) => this.createSearchTooltip(anchorNode, this.query));
|
||||
|
||||
// Implant search telemetry probe after user stops typing for a while
|
||||
if (this.query.length >= 2) {
|
||||
this.telemetryTimer = setTimeout(() => {
|
||||
Services.telemetry.keyedScalarAdd("preferences.search_query", this.query, 1);
|
||||
}, 1000);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
document.getElementById("sorry-message").textContent = "";
|
||||
|
|
|
@ -117,8 +117,8 @@ FormAutofillParent.prototype = {
|
|||
}
|
||||
|
||||
case "formautofill-storage-changed": {
|
||||
// Early exit if the action is not "add" nor "remove"
|
||||
if (data != "add" && data != "remove") {
|
||||
// Early exit if only metadata is changed
|
||||
if (data == "notifyUsed") {
|
||||
break;
|
||||
}
|
||||
|
||||
|
|
|
@ -47,22 +47,18 @@ add_task(async function test_activeStatus_observe() {
|
|||
formAutofillParent.observe(null, "nsPref:changed", "extensions.formautofill.addresses.enabled");
|
||||
do_check_eq(formAutofillParent._onStatusChanged.called, true);
|
||||
|
||||
// profile added => Need to trigger _onStatusChanged
|
||||
formAutofillParent._computeStatus.returns(!formAutofillParent._active);
|
||||
formAutofillParent._onStatusChanged.reset();
|
||||
formAutofillParent.observe(null, "formautofill-storage-changed", "add");
|
||||
do_check_eq(formAutofillParent._onStatusChanged.called, true);
|
||||
// profile changed => Need to trigger _onStatusChanged
|
||||
["add", "update", "remove", "reconcile", "merge"].forEach(event => {
|
||||
formAutofillParent._computeStatus.returns(!formAutofillParent._active);
|
||||
formAutofillParent._onStatusChanged.reset();
|
||||
formAutofillParent.observe(null, "formautofill-storage-changed", event);
|
||||
do_check_eq(formAutofillParent._onStatusChanged.called, true);
|
||||
});
|
||||
|
||||
// profile removed => Need to trigger _onStatusChanged
|
||||
// profile metadata updated => No need to trigger _onStatusChanged
|
||||
formAutofillParent._computeStatus.returns(!formAutofillParent._active);
|
||||
formAutofillParent._onStatusChanged.reset();
|
||||
formAutofillParent.observe(null, "formautofill-storage-changed", "remove");
|
||||
do_check_eq(formAutofillParent._onStatusChanged.called, true);
|
||||
|
||||
// profile updated => no need to trigger _onStatusChanged
|
||||
formAutofillParent._computeStatus.returns(!formAutofillParent._active);
|
||||
formAutofillParent._onStatusChanged.reset();
|
||||
formAutofillParent.observe(null, "formautofill-storage-changed", "update");
|
||||
formAutofillParent.observe(null, "formautofill-storage-changed", "notifyUsed");
|
||||
do_check_eq(formAutofillParent._onStatusChanged.called, false);
|
||||
});
|
||||
|
||||
|
|
|
@ -24,18 +24,15 @@ add_task(async function test_profileSavedFieldNames_observe() {
|
|||
|
||||
await formAutofillParent.init();
|
||||
|
||||
// profile added => Need to trigger updateValidFields
|
||||
formAutofillParent.observe(null, "formautofill-storage-changed", "add");
|
||||
do_check_eq(formAutofillParent._updateSavedFieldNames.called, true);
|
||||
// profile changed => Need to trigger updateValidFields
|
||||
["add", "update", "remove", "reconcile", "merge"].forEach(event => {
|
||||
formAutofillParent.observe(null, "formautofill-storage-changed", "add");
|
||||
do_check_eq(formAutofillParent._updateSavedFieldNames.called, true);
|
||||
});
|
||||
|
||||
// profile removed => Need to trigger updateValidFields
|
||||
// profile metadata updated => no need to trigger updateValidFields
|
||||
formAutofillParent._updateSavedFieldNames.reset();
|
||||
formAutofillParent.observe(null, "formautofill-storage-changed", "remove");
|
||||
do_check_eq(formAutofillParent._updateSavedFieldNames.called, true);
|
||||
|
||||
// profile updated => no need to trigger updateValidFields
|
||||
formAutofillParent._updateSavedFieldNames.reset();
|
||||
formAutofillParent.observe(null, "formautofill-storage-changed", "update");
|
||||
formAutofillParent.observe(null, "formautofill-storage-changed", "notifyUsed");
|
||||
do_check_eq(formAutofillParent._updateSavedFieldNames.called, false);
|
||||
});
|
||||
|
||||
|
|
|
@ -19,6 +19,9 @@
|
|||
}
|
||||
|
||||
:root:-moz-lwtheme-brighttext {
|
||||
|
||||
%ifndef MOZ_PHOTON_THEME
|
||||
|
||||
/* Chrome */
|
||||
--chrome-background-color: #272b35;
|
||||
--chrome-color: #F5F7FA;
|
||||
|
@ -37,12 +40,40 @@
|
|||
--tab-selection-color: #f5f7fa;
|
||||
--tab-selection-background-color: #5675B9;
|
||||
--tab-selection-box-shadow: none;
|
||||
%ifndef MOZ_PHOTON_THEME
|
||||
|
||||
--pinned-tab-glow: radial-gradient(22px at center calc(100% - 2px), rgba(76,158,217,0.9) 13%, rgba(0,0,0,0.4) 16%, transparent 70%);
|
||||
|
||||
|
||||
%else
|
||||
|
||||
/* Chrome */
|
||||
--chrome-background-color: hsl(240, 5%, 5%);
|
||||
--chrome-color: #F5F7FA;
|
||||
--chrome-secondary-background-color: hsl(240, 1%, 20%);
|
||||
--chrome-navigator-toolbox-separator-color: hsla(240, 5%, 5%, .1);
|
||||
--chrome-nav-bar-separator-color: rgba(0,0,0,.2);
|
||||
--chrome-nav-buttons-background: hsla(240, 5%, 5%, .1);
|
||||
--chrome-nav-buttons-hover-background: hsla(240, 5%, 5%, .15);
|
||||
--chrome-nav-bar-controls-border-color: hsla(240, 5%, 5%, .3);
|
||||
--chrome-selection-color: #fff;
|
||||
--chrome-selection-background-color: #5675B9;
|
||||
|
||||
/* Tabs */
|
||||
--tab-hover-background-color: hsla(240, 9%, 98%, .1);
|
||||
--tab-selection-color: #f5f7fa;
|
||||
--tab-selection-background-color: hsl(240, 1%, 20%);
|
||||
--tab-selection-box-shadow: none;
|
||||
|
||||
--toolbarbutton-icon-fill-inverted: rgba(249, 249, 250, .7);
|
||||
|
||||
%endif
|
||||
|
||||
/* Url and search bars */
|
||||
%ifndef MOZ_PHOTON_THEME
|
||||
--url-and-searchbar-background-color: #171B1F;
|
||||
%else
|
||||
--url-and-searchbar-background-color: hsla(0, 0%, 100%, .1);
|
||||
%endif
|
||||
--urlbar-separator-color: #5F6670;
|
||||
--urlbar-dropmarker-url: url("chrome://browser/skin/compacttheme/urlbar-history-dropmarker.svg");
|
||||
--urlbar-dropmarker-region: rect(0px, 11px, 14px, 0px);
|
||||
|
|
|
@ -20,7 +20,7 @@ gyp_vars.update({
|
|||
'use_official_google_api_keys': 0,
|
||||
'have_clock_monotonic': 1 if CONFIG['HAVE_CLOCK_MONOTONIC'] else 0,
|
||||
'have_ethtool_cmd_speed_hi': 1 if CONFIG['MOZ_WEBRTC_HAVE_ETHTOOL_SPEED_HI'] else 0,
|
||||
'include_alsa_audio': 1 if CONFIG['MOZ_ALSA'] else 0,
|
||||
'include_alsa_audio': 0,
|
||||
'include_pulse_audio': 1 if CONFIG['MOZ_PULSEAUDIO'] else 0,
|
||||
# basic stuff for everything
|
||||
'include_internal_video_render': 0,
|
||||
|
|
|
@ -50,6 +50,11 @@ add_task(function* () {
|
|||
yield onDataChanged;
|
||||
|
||||
checkScrubberIsAt(scrubberEl, timeHeaderEl, 0);
|
||||
|
||||
// Wait for promise of setCurrentTimes if setCurrentTimes is running.
|
||||
if (panel.setCurrentTimeAllPromise) {
|
||||
yield panel.setCurrentTimeAllPromise;
|
||||
}
|
||||
});
|
||||
|
||||
function* synthesizeInHeaderAndWaitForChange(timeline, x, y, type) {
|
||||
|
|
|
@ -246,6 +246,17 @@ TextComposition::DispatchCompositionEvent(
|
|||
{
|
||||
mWasCompositionStringEmpty = mString.IsEmpty();
|
||||
|
||||
// If this instance has requested to commit or cancel composition but
|
||||
// is not synthesizing commit event, that means that the IME commits or
|
||||
// cancels the composition asynchronously. Typically, iBus behaves so.
|
||||
// Then, synthesized events which were dispatched immediately after
|
||||
// the request has already committed our editor's composition string and
|
||||
// told it to web apps. Therefore, we should ignore the delayed events.
|
||||
if (mRequestedToCommitOrCancel && !aIsSynthesized) {
|
||||
*aStatus = nsEventStatus_eConsumeNoDefault;
|
||||
return;
|
||||
}
|
||||
|
||||
// If the content is a container of TabParent, composition should be in the
|
||||
// remote process.
|
||||
if (mTabParent) {
|
||||
|
@ -294,17 +305,6 @@ TextComposition::DispatchCompositionEvent(
|
|||
return;
|
||||
}
|
||||
|
||||
// If this instance has requested to commit or cancel composition but
|
||||
// is not synthesizing commit event, that means that the IME commits or
|
||||
// cancels the composition asynchronously. Typically, iBus behaves so.
|
||||
// Then, synthesized events which were dispatched immediately after
|
||||
// the request has already committed our editor's composition string and
|
||||
// told it to web apps. Therefore, we should ignore the delayed events.
|
||||
if (mRequestedToCommitOrCancel && !aIsSynthesized) {
|
||||
*aStatus = nsEventStatus_eConsumeNoDefault;
|
||||
return;
|
||||
}
|
||||
|
||||
// IME may commit composition with empty string for a commit request or
|
||||
// with non-empty string for a cancel request. We should prevent such
|
||||
// unexpected result. E.g., web apps may be confused if they implement
|
||||
|
|
|
@ -641,27 +641,29 @@ GamepadManager::VibrateHaptic(uint32_t aControllerIdx, uint32_t aHapticIndex,
|
|||
double aIntensity, double aDuration,
|
||||
nsIGlobalObject* aGlobal, ErrorResult& aRv)
|
||||
{
|
||||
const char* kGamepadHapticEnabledPref = "dom.gamepad.haptic_feedback.enabled";
|
||||
RefPtr<Promise> promise = Promise::Create(aGlobal, aRv);
|
||||
if (NS_WARN_IF(aRv.Failed())) {
|
||||
aRv.Throw(NS_ERROR_FAILURE);
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
if (aControllerIdx >= VR_GAMEPAD_IDX_OFFSET) {
|
||||
if (gfx::VRManagerChild::IsCreated()) {
|
||||
const uint32_t index = aControllerIdx - VR_GAMEPAD_IDX_OFFSET;
|
||||
gfx::VRManagerChild* vm = gfx::VRManagerChild::Get();
|
||||
vm->AddPromise(mPromiseID, promise);
|
||||
vm->SendVibrateHaptic(index, aHapticIndex,
|
||||
aIntensity, aDuration,
|
||||
mPromiseID);
|
||||
}
|
||||
} else {
|
||||
for (const auto& channelChild: mChannelChildren) {
|
||||
channelChild->AddPromise(mPromiseID, promise);
|
||||
channelChild->SendVibrateHaptic(aControllerIdx, aHapticIndex,
|
||||
aIntensity, aDuration,
|
||||
mPromiseID);
|
||||
if (Preferences::GetBool(kGamepadHapticEnabledPref)) {
|
||||
if (aControllerIdx >= VR_GAMEPAD_IDX_OFFSET) {
|
||||
if (gfx::VRManagerChild::IsCreated()) {
|
||||
const uint32_t index = aControllerIdx - VR_GAMEPAD_IDX_OFFSET;
|
||||
gfx::VRManagerChild* vm = gfx::VRManagerChild::Get();
|
||||
vm->AddPromise(mPromiseID, promise);
|
||||
vm->SendVibrateHaptic(index, aHapticIndex,
|
||||
aIntensity, aDuration,
|
||||
mPromiseID);
|
||||
}
|
||||
} else {
|
||||
for (const auto& channelChild: mChannelChildren) {
|
||||
channelChild->AddPromise(mPromiseID, promise);
|
||||
channelChild->SendVibrateHaptic(aControllerIdx, aHapticIndex,
|
||||
aIntensity, aDuration,
|
||||
mPromiseID);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -672,6 +674,11 @@ GamepadManager::VibrateHaptic(uint32_t aControllerIdx, uint32_t aHapticIndex,
|
|||
void
|
||||
GamepadManager::StopHaptics()
|
||||
{
|
||||
const char* kGamepadHapticEnabledPref = "dom.gamepad.haptic_feedback.enabled";
|
||||
if (!Preferences::GetBool(kGamepadHapticEnabledPref)) {
|
||||
return;
|
||||
}
|
||||
|
||||
for (auto iter = mGamepads.Iter(); !iter.Done(); iter.Next()) {
|
||||
const uint32_t gamepadIndex = iter.UserData()->HashKey();
|
||||
if (gamepadIndex >= VR_GAMEPAD_IDX_OFFSET) {
|
||||
|
|
|
@ -608,7 +608,8 @@ RemoteDataDecoder::ReturnDecodedData()
|
|||
if (!mDecodePromise.IsEmpty()) {
|
||||
mDecodePromise.Resolve(mDecodedData, __func__);
|
||||
mDecodedData.Clear();
|
||||
} else if (!mDrainPromise.IsEmpty()) {
|
||||
} else if (!mDrainPromise.IsEmpty() &&
|
||||
(!mDecodedData.IsEmpty() || mDrainStatus == DrainStatus::DRAINED)) {
|
||||
mDrainPromise.Resolve(mDecodedData, __func__);
|
||||
mDecodedData.Clear();
|
||||
}
|
||||
|
|
|
@ -1721,7 +1721,7 @@ fuzzy(2,40000) == 722923-1.html 722923-1-ref.html
|
|||
== 723484-1.html 723484-1-ref.html
|
||||
random-if(Android) == 728983-1.html 728983-1-ref.html
|
||||
== 729143-1.html 729143-1-ref.html
|
||||
fails-if(styloVsGecko||stylo) == 731521-1.html 731521-1-ref.html
|
||||
== 731521-1.html 731521-1-ref.html
|
||||
needs-focus == 731726-1.html 731726-1-ref.html
|
||||
== 735481-1.html 735481-1-ref.html
|
||||
fuzzy-if(cocoaWidget,1,300000) fuzzy-if(skiaContent,2,300000) == 745934-1.html 745934-1-ref.html
|
||||
|
|
|
@ -26,7 +26,7 @@ RuleNodeCacheConditions::Matches(nsStyleContext* aStyleContext) const
|
|||
return false;
|
||||
}
|
||||
if ((mBits & eHaveWritingMode) &&
|
||||
(GetWritingMode() != WritingMode(aStyleContext).GetBits())) {
|
||||
(mWritingMode != WritingMode(aStyleContext).GetBits())) {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
|
@ -46,7 +46,7 @@ RuleNodeCacheConditions::List() const
|
|||
if (!first) {
|
||||
printf(", ");
|
||||
}
|
||||
printf("WritingMode(0x%x)", GetWritingMode());
|
||||
printf("WritingMode(0x%x)", mWritingMode);
|
||||
}
|
||||
printf(" }");
|
||||
}
|
||||
|
|
|
@ -40,20 +40,32 @@ class RuleNodeCacheConditions
|
|||
{
|
||||
public:
|
||||
RuleNodeCacheConditions()
|
||||
: mFontSize(0), mBits(0) {}
|
||||
: mFontSize(0)
|
||||
, mBits(0)
|
||||
, mWritingMode(0)
|
||||
{}
|
||||
|
||||
RuleNodeCacheConditions(const RuleNodeCacheConditions& aOther)
|
||||
: mFontSize(aOther.mFontSize), mBits(aOther.mBits) {}
|
||||
: mFontSize(aOther.mFontSize)
|
||||
, mBits(aOther.mBits)
|
||||
, mWritingMode(aOther.mWritingMode)
|
||||
{}
|
||||
|
||||
RuleNodeCacheConditions& operator=(const RuleNodeCacheConditions& aOther)
|
||||
{
|
||||
mFontSize = aOther.mFontSize;
|
||||
mBits = aOther.mBits;
|
||||
mWritingMode = aOther.mWritingMode;
|
||||
return *this;
|
||||
}
|
||||
|
||||
bool operator==(const RuleNodeCacheConditions& aOther) const
|
||||
{
|
||||
return mFontSize == aOther.mFontSize &&
|
||||
mBits == aOther.mBits;
|
||||
mBits == aOther.mBits &&
|
||||
mWritingMode == aOther.mWritingMode;
|
||||
}
|
||||
|
||||
bool operator!=(const RuleNodeCacheConditions& aOther) const
|
||||
{
|
||||
return !(*this == aOther);
|
||||
|
@ -85,9 +97,9 @@ public:
|
|||
*/
|
||||
void SetWritingModeDependency(uint8_t aWritingMode)
|
||||
{
|
||||
MOZ_ASSERT(!(mBits & eHaveWritingMode) || GetWritingMode() == aWritingMode);
|
||||
mBits |= (static_cast<uint64_t>(aWritingMode) << eWritingModeShift) |
|
||||
eHaveWritingMode;
|
||||
MOZ_ASSERT(!(mBits & eHaveWritingMode) || mWritingMode == aWritingMode);
|
||||
mWritingMode = aWritingMode;
|
||||
mBits |= eHaveWritingMode;
|
||||
}
|
||||
|
||||
void SetUncacheable()
|
||||
|
@ -107,15 +119,14 @@ public:
|
|||
|
||||
bool CacheableWithDependencies() const
|
||||
{
|
||||
return !(mBits & eUncacheable) &&
|
||||
(mBits & eHaveBitsMask) != 0;
|
||||
return Cacheable() && mBits;
|
||||
}
|
||||
|
||||
bool CacheableWithoutDependencies() const
|
||||
{
|
||||
// We're not uncacheable and we have don't have a font-size or
|
||||
// writing mode value.
|
||||
return (mBits & eHaveBitsMask) == 0;
|
||||
return mBits == 0;
|
||||
}
|
||||
|
||||
#ifdef DEBUG
|
||||
|
@ -124,20 +135,11 @@ public:
|
|||
|
||||
private:
|
||||
enum {
|
||||
eUncacheable = 0x0001,
|
||||
eHaveFontSize = 0x0002,
|
||||
eHaveWritingMode = 0x0004,
|
||||
eHaveBitsMask = 0x00ff,
|
||||
eWritingModeMask = 0xff00,
|
||||
eWritingModeShift = 8,
|
||||
eUncacheable = 1 << 0,
|
||||
eHaveFontSize = 1 << 1,
|
||||
eHaveWritingMode = 1 << 2,
|
||||
};
|
||||
|
||||
uint8_t GetWritingMode() const
|
||||
{
|
||||
return static_cast<uint8_t>(
|
||||
(mBits & eWritingModeMask) >> eWritingModeShift);
|
||||
}
|
||||
|
||||
// The font size from which em units are derived.
|
||||
nscoord mFontSize;
|
||||
|
||||
|
@ -145,10 +147,8 @@ private:
|
|||
// bit 0: are we set to "uncacheable"?
|
||||
// bit 1: do we have a font size value?
|
||||
// bit 2: do we have a writing mode value?
|
||||
// bits 3-7: unused
|
||||
// bits 8-15: writing mode (uint8_t)
|
||||
// bits 16-31: unused
|
||||
uint32_t mBits;
|
||||
uint8_t mBits;
|
||||
uint8_t mWritingMode;
|
||||
};
|
||||
|
||||
} // namespace mozilla
|
||||
|
|
|
@ -5,4 +5,4 @@ Makefile.in build files for the Mozilla build system.
|
|||
|
||||
The cubeb git repository is: git://github.com/kinetiknz/cubeb.git
|
||||
|
||||
The git commit ID used was a329c6a4184de3ccedb264cf2c3b0903fc506b94 (2017-07-14 15:35:51 +0300)
|
||||
The git commit ID used was 09aeb59259487a2fbeeca1824f1f68f55b6787f4 (2017-07-22 09:54:44 +1200)
|
||||
|
|
|
@ -107,6 +107,21 @@ to_string(io_side side)
|
|||
}
|
||||
}
|
||||
|
||||
typedef uint32_t device_flags_value;
|
||||
|
||||
enum device_flags {
|
||||
DEV_UKNOWN = 0x00, /* Unkown */
|
||||
DEV_INPUT = 0x01, /* Record device like mic */
|
||||
DEV_OUTPUT = 0x02, /* Playback device like speakers */
|
||||
DEV_SYSTEM_DEFAULT = 0x04, /* System default device */
|
||||
DEV_SELECTED_DEFAULT = 0x08, /* User selected to use the system default device */
|
||||
};
|
||||
|
||||
struct device_info {
|
||||
AudioDeviceID id = kAudioObjectUnknown;
|
||||
device_flags_value flags = DEV_UKNOWN;
|
||||
};
|
||||
|
||||
struct cubeb_stream {
|
||||
explicit cubeb_stream(cubeb * context);
|
||||
|
||||
|
@ -118,9 +133,8 @@ struct cubeb_stream {
|
|||
/* Stream creation parameters */
|
||||
cubeb_stream_params input_stream_params = { CUBEB_SAMPLE_FLOAT32NE, 0, 0, CUBEB_LAYOUT_UNDEFINED };
|
||||
cubeb_stream_params output_stream_params = { CUBEB_SAMPLE_FLOAT32NE, 0, 0, CUBEB_LAYOUT_UNDEFINED };
|
||||
bool is_default_input;
|
||||
AudioDeviceID input_device = 0;
|
||||
AudioDeviceID output_device = 0;
|
||||
device_info input_device;
|
||||
device_info output_device;
|
||||
/* User pointer of data_callback */
|
||||
void * user_ptr = nullptr;
|
||||
/* Format descriptions */
|
||||
|
@ -554,75 +568,64 @@ audiounit_get_backend_id(cubeb * /* ctx */)
|
|||
}
|
||||
|
||||
#if !TARGET_OS_IPHONE
|
||||
static int
|
||||
audiounit_get_output_device_id(AudioDeviceID * device_id)
|
||||
{
|
||||
UInt32 size;
|
||||
OSStatus r;
|
||||
AudioObjectPropertyAddress output_device_address = {
|
||||
kAudioHardwarePropertyDefaultOutputDevice,
|
||||
kAudioObjectPropertyScopeGlobal,
|
||||
kAudioObjectPropertyElementMaster
|
||||
};
|
||||
|
||||
size = sizeof(*device_id);
|
||||
|
||||
r = AudioObjectGetPropertyData(kAudioObjectSystemObject,
|
||||
&output_device_address,
|
||||
0,
|
||||
NULL,
|
||||
&size,
|
||||
device_id);
|
||||
if (r != noErr) {
|
||||
LOG("output_device_id rv=%d", r);
|
||||
return CUBEB_ERROR;
|
||||
}
|
||||
|
||||
return CUBEB_OK;
|
||||
}
|
||||
|
||||
static int
|
||||
audiounit_get_input_device_id(AudioDeviceID * device_id)
|
||||
{
|
||||
UInt32 size;
|
||||
OSStatus r;
|
||||
AudioObjectPropertyAddress input_device_address = {
|
||||
kAudioHardwarePropertyDefaultInputDevice,
|
||||
kAudioObjectPropertyScopeGlobal,
|
||||
kAudioObjectPropertyElementMaster
|
||||
};
|
||||
|
||||
size = sizeof(*device_id);
|
||||
|
||||
r = AudioObjectGetPropertyData(kAudioObjectSystemObject,
|
||||
&input_device_address,
|
||||
0,
|
||||
NULL,
|
||||
&size,
|
||||
device_id);
|
||||
if (r != noErr) {
|
||||
return CUBEB_ERROR;
|
||||
}
|
||||
|
||||
return CUBEB_OK;
|
||||
}
|
||||
|
||||
static int audiounit_stream_get_volume(cubeb_stream * stm, float * volume);
|
||||
static int audiounit_stream_set_volume(cubeb_stream * stm, float volume);
|
||||
static int audiounit_uninstall_device_changed_callback(cubeb_stream * stm);
|
||||
static AudioObjectID audiounit_get_default_device_id(cubeb_device_type type);
|
||||
|
||||
static void
|
||||
audiounit_set_device_info(cubeb_stream * stm, AudioDeviceID id, io_side side)
|
||||
{
|
||||
assert(stm);
|
||||
|
||||
device_info * info = nullptr;
|
||||
cubeb_device_type type = CUBEB_DEVICE_TYPE_UNKNOWN;
|
||||
|
||||
if (side == INPUT) {
|
||||
info = &stm->input_device;
|
||||
type = CUBEB_DEVICE_TYPE_INPUT;
|
||||
} else if (side == OUTPUT) {
|
||||
info = &stm->output_device;
|
||||
type = CUBEB_DEVICE_TYPE_OUTPUT;
|
||||
}
|
||||
memset(info, 0, sizeof(device_info));
|
||||
|
||||
if (side == INPUT) {
|
||||
info->flags |= DEV_INPUT;
|
||||
} else if (side == OUTPUT) {
|
||||
info->flags |= DEV_OUTPUT;
|
||||
}
|
||||
|
||||
AudioDeviceID default_device_id = audiounit_get_default_device_id(type);
|
||||
if (id == kAudioObjectUnknown) {
|
||||
info->id = default_device_id;
|
||||
info->flags |= DEV_SELECTED_DEFAULT;
|
||||
}
|
||||
|
||||
if (info->id == default_device_id) {
|
||||
info->flags |= DEV_SYSTEM_DEFAULT;
|
||||
}
|
||||
|
||||
assert(info->id);
|
||||
assert(info->flags & DEV_INPUT && !(info->flags & DEV_OUTPUT) ||
|
||||
!(info->flags & DEV_INPUT) && info->flags & DEV_OUTPUT);
|
||||
}
|
||||
|
||||
|
||||
static int
|
||||
audiounit_reinit_stream(cubeb_stream * stm)
|
||||
audiounit_reinit_stream(cubeb_stream * stm, device_flags_value flags)
|
||||
{
|
||||
auto_lock context_lock(stm->context->mutex);
|
||||
assert(stm->input_unit || stm->output_unit);
|
||||
assert((flags & DEV_INPUT && stm->input_unit) ||
|
||||
(flags & DEV_OUTPUT && stm->output_unit));
|
||||
if (!stm->shutdown) {
|
||||
audiounit_stream_stop_internal(stm);
|
||||
}
|
||||
|
||||
int r = audiounit_uninstall_device_changed_callback(stm);
|
||||
if (r != CUBEB_OK) {
|
||||
LOG("(%p) Could not uninstall the device changed callback", stm);
|
||||
LOG("(%p) Could not uninstall all device change listeners.", stm);
|
||||
}
|
||||
|
||||
{
|
||||
|
@ -635,6 +638,17 @@ audiounit_reinit_stream(cubeb_stream * stm)
|
|||
|
||||
audiounit_close_stream(stm);
|
||||
|
||||
/* Reinit occurs in 2 cases. When the device is not alive any more and when the
|
||||
* default system device change. In both cases cubeb switch on the new default
|
||||
* device. This is considered the most expected behavior for the user. */
|
||||
if (flags & DEV_INPUT) {
|
||||
audiounit_set_device_info(stm, 0, INPUT);
|
||||
}
|
||||
/* Always use the default output on reinit. This is not correct in every case
|
||||
* but it is sufficient for Firefox and prevent reinit from reporting failures.
|
||||
* It will change soon when reinit mechanism will be updated. */
|
||||
audiounit_set_device_info(stm, 0, OUTPUT);
|
||||
|
||||
if (audiounit_setup_stream(stm) != CUBEB_OK) {
|
||||
LOG("(%p) Stream reinit failed.", stm);
|
||||
return CUBEB_ERROR;
|
||||
|
@ -663,6 +677,7 @@ audiounit_property_listener_callback(AudioObjectID /* id */, UInt32 address_coun
|
|||
{
|
||||
cubeb_stream * stm = (cubeb_stream*) user;
|
||||
stm->switching_device = true;
|
||||
device_flags_value switch_side = DEV_UKNOWN;
|
||||
|
||||
LOG("(%p) Audio device changed, %u events.", stm, (unsigned int) address_count);
|
||||
for (UInt32 i = 0; i < address_count; i++) {
|
||||
|
@ -670,25 +685,25 @@ audiounit_property_listener_callback(AudioObjectID /* id */, UInt32 address_coun
|
|||
case kAudioHardwarePropertyDefaultOutputDevice: {
|
||||
LOG("Event[%u] - mSelector == kAudioHardwarePropertyDefaultOutputDevice", (unsigned int) i);
|
||||
// Allow restart to choose the new default
|
||||
stm->output_device = 0;
|
||||
switch_side |= DEV_OUTPUT;
|
||||
}
|
||||
break;
|
||||
case kAudioHardwarePropertyDefaultInputDevice: {
|
||||
LOG("Event[%u] - mSelector == kAudioHardwarePropertyDefaultInputDevice", (unsigned int) i);
|
||||
// Allow restart to choose the new default
|
||||
stm->input_device = 0;
|
||||
switch_side |= DEV_INPUT;
|
||||
}
|
||||
break;
|
||||
case kAudioDevicePropertyDeviceIsAlive: {
|
||||
LOG("Event[%u] - mSelector == kAudioDevicePropertyDeviceIsAlive", (unsigned int) i);
|
||||
// If this is the default input device ignore the event,
|
||||
// kAudioHardwarePropertyDefaultInputDevice will take care of the switch
|
||||
if (stm->is_default_input) {
|
||||
if (stm->input_device.flags & DEV_SYSTEM_DEFAULT) {
|
||||
LOG("It's the default input device, ignore the event");
|
||||
return noErr;
|
||||
}
|
||||
// Allow restart to choose the new default. Event register only for input.
|
||||
stm->input_device = 0;
|
||||
switch_side |= DEV_INPUT;
|
||||
}
|
||||
break;
|
||||
case kAudioDevicePropertyDataSource: {
|
||||
|
@ -720,7 +735,7 @@ audiounit_property_listener_callback(AudioObjectID /* id */, UInt32 address_coun
|
|||
// Use a new thread, through the queue, to avoid deadlock when calling
|
||||
// Get/SetProperties method from inside notify callback
|
||||
dispatch_async(stm->context->serial_queue, ^() {
|
||||
if (audiounit_reinit_stream(stm) != CUBEB_OK) {
|
||||
if (audiounit_reinit_stream(stm, switch_side) != CUBEB_OK) {
|
||||
stm->state_callback(stm, stm->user_ptr, CUBEB_STATE_STOPPED);
|
||||
LOG("(%p) Could not reopen the stream after switching.", stm);
|
||||
}
|
||||
|
@ -758,66 +773,43 @@ audiounit_remove_listener(cubeb_stream * stm, AudioDeviceID id,
|
|||
return AudioObjectRemovePropertyListener(id, &address, listener, stm);
|
||||
}
|
||||
|
||||
static AudioObjectID audiounit_get_default_device_id(cubeb_device_type type);
|
||||
|
||||
static AudioObjectID
|
||||
audiounit_get_input_device_id(cubeb_stream * stm)
|
||||
{
|
||||
AudioObjectID input_dev = stm->input_device ? stm->input_device :
|
||||
audiounit_get_default_device_id(CUBEB_DEVICE_TYPE_INPUT);
|
||||
assert(input_dev);
|
||||
return input_dev;
|
||||
}
|
||||
|
||||
static int
|
||||
audiounit_install_device_changed_callback(cubeb_stream * stm)
|
||||
{
|
||||
OSStatus r;
|
||||
OSStatus rv;
|
||||
int r = CUBEB_OK;
|
||||
|
||||
if (stm->output_unit) {
|
||||
/* This event will notify us when the data source on the same device changes,
|
||||
* for example when the user plugs in a normal (non-usb) headset in the
|
||||
* headphone jack. */
|
||||
AudioDeviceID output_dev_id;
|
||||
r = audiounit_get_output_device_id(&output_dev_id);
|
||||
if (r != noErr) {
|
||||
return CUBEB_ERROR;
|
||||
}
|
||||
|
||||
r = audiounit_add_listener(stm, output_dev_id, kAudioDevicePropertyDataSource,
|
||||
rv = audiounit_add_listener(stm, stm->output_device.id, kAudioDevicePropertyDataSource,
|
||||
kAudioDevicePropertyScopeOutput, &audiounit_property_listener_callback);
|
||||
if (r != noErr) {
|
||||
LOG("AudioObjectAddPropertyListener/output/kAudioDevicePropertyDataSource rv=%d", r);
|
||||
return CUBEB_ERROR;
|
||||
if (rv != noErr) {
|
||||
LOG("AudioObjectAddPropertyListener/output/kAudioDevicePropertyDataSource rv=%d, device id=%d", rv, stm->output_device.id);
|
||||
r = CUBEB_ERROR;
|
||||
}
|
||||
}
|
||||
|
||||
if (stm->input_unit) {
|
||||
/* This event will notify us when the data source on the input device changes. */
|
||||
AudioDeviceID input_dev_id;
|
||||
r = audiounit_get_input_device_id(&input_dev_id);
|
||||
if (r != noErr) {
|
||||
return CUBEB_ERROR;
|
||||
}
|
||||
|
||||
r = audiounit_add_listener(stm, input_dev_id, kAudioDevicePropertyDataSource,
|
||||
rv = audiounit_add_listener(stm, stm->input_device.id, kAudioDevicePropertyDataSource,
|
||||
kAudioDevicePropertyScopeInput, &audiounit_property_listener_callback);
|
||||
if (r != noErr) {
|
||||
LOG("AudioObjectAddPropertyListener/input/kAudioDevicePropertyDataSource rv=%d", r);
|
||||
return CUBEB_ERROR;
|
||||
if (rv != noErr) {
|
||||
LOG("AudioObjectAddPropertyListener/input/kAudioDevicePropertyDataSource rv=%d, device id=%d", rv, stm->input_device.id);
|
||||
r = CUBEB_ERROR;
|
||||
}
|
||||
|
||||
/* Event to notify when the input is going away. */
|
||||
AudioDeviceID dev = audiounit_get_input_device_id(stm);
|
||||
r = audiounit_add_listener(stm, dev, kAudioDevicePropertyDeviceIsAlive,
|
||||
rv = audiounit_add_listener(stm, stm->input_device.id, kAudioDevicePropertyDeviceIsAlive,
|
||||
kAudioObjectPropertyScopeGlobal, &audiounit_property_listener_callback);
|
||||
if (r != noErr) {
|
||||
LOG("AudioObjectAddPropertyListener/input/kAudioDevicePropertyDeviceIsAlive rv=%d", r);
|
||||
return CUBEB_ERROR;
|
||||
if (rv != noErr) {
|
||||
LOG("AudioObjectAddPropertyListener/input/kAudioDevicePropertyDeviceIsAlive rv=%d, device id =%d", rv, stm->input_device.id);
|
||||
r = CUBEB_ERROR;
|
||||
}
|
||||
}
|
||||
|
||||
return CUBEB_OK;
|
||||
return r;
|
||||
}
|
||||
|
||||
static int
|
||||
|
@ -854,44 +846,35 @@ audiounit_install_system_changed_callback(cubeb_stream * stm)
|
|||
static int
|
||||
audiounit_uninstall_device_changed_callback(cubeb_stream * stm)
|
||||
{
|
||||
OSStatus r;
|
||||
OSStatus rv;
|
||||
// Failing to uninstall listeners is not a fatal error.
|
||||
int r = CUBEB_OK;
|
||||
|
||||
if (stm->output_unit) {
|
||||
AudioDeviceID output_dev_id;
|
||||
r = audiounit_get_output_device_id(&output_dev_id);
|
||||
if (r != noErr) {
|
||||
return CUBEB_ERROR;
|
||||
}
|
||||
|
||||
r = audiounit_remove_listener(stm, output_dev_id, kAudioDevicePropertyDataSource,
|
||||
rv = audiounit_remove_listener(stm, stm->output_device.id, kAudioDevicePropertyDataSource,
|
||||
kAudioDevicePropertyScopeOutput, &audiounit_property_listener_callback);
|
||||
if (r != noErr) {
|
||||
return CUBEB_ERROR;
|
||||
if (rv != noErr) {
|
||||
LOG("AudioObjectRemovePropertyListener/output/kAudioDevicePropertyDataSource rv=%d, device id=%d", rv, stm->output_device.id);
|
||||
r = CUBEB_ERROR;
|
||||
}
|
||||
}
|
||||
|
||||
if (stm->input_unit) {
|
||||
AudioDeviceID input_dev_id;
|
||||
r = audiounit_get_input_device_id(&input_dev_id);
|
||||
if (r != noErr) {
|
||||
return CUBEB_ERROR;
|
||||
}
|
||||
|
||||
r = audiounit_remove_listener(stm, input_dev_id, kAudioDevicePropertyDataSource,
|
||||
rv = audiounit_remove_listener(stm, stm->input_device.id, kAudioDevicePropertyDataSource,
|
||||
kAudioDevicePropertyScopeInput, &audiounit_property_listener_callback);
|
||||
if (r != noErr) {
|
||||
return CUBEB_ERROR;
|
||||
if (rv != noErr) {
|
||||
LOG("AudioObjectRemovePropertyListener/input/kAudioDevicePropertyDataSource rv=%d, device id=%d", rv, stm->input_device.id);
|
||||
r = CUBEB_ERROR;
|
||||
}
|
||||
|
||||
AudioDeviceID dev = audiounit_get_input_device_id(stm);
|
||||
r = audiounit_remove_listener(stm, dev, kAudioDevicePropertyDeviceIsAlive,
|
||||
rv = audiounit_remove_listener(stm, stm->input_device.id, kAudioDevicePropertyDeviceIsAlive,
|
||||
kAudioObjectPropertyScopeGlobal, &audiounit_property_listener_callback);
|
||||
if (r != noErr) {
|
||||
LOG("AudioObjectRemovePropertyListener/input/kAudioDevicePropertyDeviceIsAlive rv=%d", r);
|
||||
return CUBEB_ERROR;
|
||||
if (rv != noErr) {
|
||||
LOG("AudioObjectRemovePropertyListener/input/kAudioDevicePropertyDeviceIsAlive rv=%d, device id=%d", rv, stm->input_device.id);
|
||||
r = CUBEB_ERROR;
|
||||
}
|
||||
}
|
||||
return CUBEB_OK;
|
||||
return r;
|
||||
}
|
||||
|
||||
static int
|
||||
|
@ -930,7 +913,8 @@ audiounit_get_acceptable_latency_range(AudioValueRange * latency_range)
|
|||
kAudioObjectPropertyElementMaster
|
||||
};
|
||||
|
||||
if (audiounit_get_output_device_id(&output_device_id) != CUBEB_OK) {
|
||||
output_device_id = audiounit_get_default_device_id(CUBEB_DEVICE_TYPE_OUTPUT);
|
||||
if (output_device_id == kAudioObjectUnknown) {
|
||||
LOG("Could not get default output device id.");
|
||||
return CUBEB_ERROR;
|
||||
}
|
||||
|
@ -995,7 +979,8 @@ audiounit_get_max_channel_count(cubeb * ctx, uint32_t * max_channels)
|
|||
|
||||
assert(ctx && max_channels);
|
||||
|
||||
if (audiounit_get_output_device_id(&output_device_id) != CUBEB_OK) {
|
||||
output_device_id = audiounit_get_default_device_id(CUBEB_DEVICE_TYPE_OUTPUT);
|
||||
if (output_device_id == kAudioObjectUnknown) {
|
||||
return CUBEB_ERROR;
|
||||
}
|
||||
|
||||
|
@ -1056,7 +1041,8 @@ audiounit_get_preferred_sample_rate(cubeb * /* ctx */, uint32_t * rate)
|
|||
kAudioObjectPropertyElementMaster
|
||||
};
|
||||
|
||||
if (audiounit_get_output_device_id(&output_device_id) != CUBEB_OK) {
|
||||
output_device_id = audiounit_get_default_device_id(CUBEB_DEVICE_TYPE_OUTPUT);
|
||||
if (output_device_id == kAudioObjectUnknown) {
|
||||
return CUBEB_ERROR;
|
||||
}
|
||||
|
||||
|
@ -1143,7 +1129,8 @@ audiounit_get_preferred_channel_layout()
|
|||
UInt32 size = 0;
|
||||
AudioDeviceID id;
|
||||
|
||||
if (audiounit_get_output_device_id(&id) != CUBEB_OK) {
|
||||
id = audiounit_get_default_device_id(CUBEB_DEVICE_TYPE_OUTPUT);
|
||||
if (id == kAudioObjectUnknown) {
|
||||
return CUBEB_LAYOUT_UNDEFINED;
|
||||
}
|
||||
|
||||
|
@ -1165,7 +1152,7 @@ audiounit_get_preferred_channel_layout()
|
|||
return audiounit_convert_channel_layout(layout.get());
|
||||
}
|
||||
|
||||
static int audiounit_create_unit(AudioUnit * unit, io_side side, AudioDeviceID device);
|
||||
static int audiounit_create_unit(AudioUnit * unit, device_info * device);
|
||||
|
||||
static int
|
||||
audiounit_get_preferred_channel_layout(cubeb * ctx, cubeb_channel_layout * layout)
|
||||
|
@ -1189,8 +1176,13 @@ audiounit_get_preferred_channel_layout(cubeb * ctx, cubeb_channel_layout * layou
|
|||
// If there is no existed stream, then we create a default ouput unit and
|
||||
// use it to get the current used channel layout.
|
||||
AudioUnit output_unit = nullptr;
|
||||
audiounit_create_unit(&output_unit, OUTPUT, 0);
|
||||
*layout = audiounit_get_current_channel_layout(output_unit);
|
||||
device_info default_out_device;
|
||||
default_out_device.id = audiounit_get_default_device_id(CUBEB_DEVICE_TYPE_OUTPUT);
|
||||
default_out_device.flags = (DEV_OUTPUT | DEV_SYSTEM_DEFAULT);
|
||||
if (default_out_device.id != kAudioObjectUnknown) {
|
||||
audiounit_create_unit(&output_unit, &default_out_device);
|
||||
*layout = audiounit_get_current_channel_layout(output_unit);
|
||||
}
|
||||
}
|
||||
|
||||
if (*layout == CUBEB_LAYOUT_UNDEFINED) {
|
||||
|
@ -1639,7 +1631,7 @@ audiounit_activate_clock_drift_compensation(const AudioDeviceID aggregate_device
|
|||
return CUBEB_OK;
|
||||
}
|
||||
|
||||
static int audiounit_destroy_aggregate_device(AudioObjectID plugin_id, AudioDeviceID aggregate_device_id);
|
||||
static int audiounit_destroy_aggregate_device(AudioObjectID plugin_id, AudioDeviceID * aggregate_device_id);
|
||||
|
||||
/*
|
||||
* Aggregate Device is a virtual audio interface which utilizes inputs and outputs
|
||||
|
@ -1666,30 +1658,28 @@ audiounit_create_aggregate_device(cubeb_stream * stm)
|
|||
int r = audiounit_create_blank_aggregate_device(&stm->plugin_id, &stm->aggregate_device_id);
|
||||
if (r != CUBEB_OK) {
|
||||
LOG("(%p) Failed to create blank aggregate device", stm);
|
||||
audiounit_destroy_aggregate_device(stm->plugin_id, stm->aggregate_device_id);
|
||||
audiounit_destroy_aggregate_device(stm->plugin_id, &stm->aggregate_device_id);
|
||||
return CUBEB_ERROR;
|
||||
}
|
||||
|
||||
AudioDeviceID input_device_id = audiounit_get_input_device_id(stm);
|
||||
AudioDeviceID output_device_id = audiounit_get_default_device_id(CUBEB_DEVICE_TYPE_OUTPUT);
|
||||
r = audiounit_set_aggregate_sub_device_list(stm->aggregate_device_id, input_device_id, output_device_id);
|
||||
r = audiounit_set_aggregate_sub_device_list(stm->aggregate_device_id, stm->input_device.id, stm->output_device.id);
|
||||
if (r != CUBEB_OK) {
|
||||
LOG("(%p) Failed to set aggregate sub-device list", stm);
|
||||
audiounit_destroy_aggregate_device(stm->plugin_id, stm->aggregate_device_id);
|
||||
audiounit_destroy_aggregate_device(stm->plugin_id, &stm->aggregate_device_id);
|
||||
return CUBEB_ERROR;
|
||||
}
|
||||
|
||||
r = audiounit_set_master_aggregate_device(stm->aggregate_device_id);
|
||||
if (r != CUBEB_OK) {
|
||||
LOG("(%p) Failed to set master sub-device for aggregate device", stm);
|
||||
audiounit_destroy_aggregate_device(stm->plugin_id, stm->aggregate_device_id);
|
||||
audiounit_destroy_aggregate_device(stm->plugin_id, &stm->aggregate_device_id);
|
||||
return CUBEB_ERROR;
|
||||
}
|
||||
|
||||
r = audiounit_activate_clock_drift_compensation(stm->aggregate_device_id);
|
||||
if (r != CUBEB_OK) {
|
||||
LOG("(%p) Failed to activate clock drift compensation for aggregate device", stm);
|
||||
audiounit_destroy_aggregate_device(stm->plugin_id, stm->aggregate_device_id);
|
||||
audiounit_destroy_aggregate_device(stm->plugin_id, &stm->aggregate_device_id);
|
||||
return CUBEB_ERROR;
|
||||
}
|
||||
|
||||
|
@ -1697,7 +1687,7 @@ audiounit_create_aggregate_device(cubeb_stream * stm)
|
|||
}
|
||||
|
||||
static int
|
||||
audiounit_destroy_aggregate_device(AudioObjectID plugin_id, AudioDeviceID aggregate_device_id)
|
||||
audiounit_destroy_aggregate_device(AudioObjectID plugin_id, AudioDeviceID * aggregate_device_id)
|
||||
{
|
||||
AudioObjectPropertyAddress destroy_aggregate_device_addr = { kAudioPlugInDestroyAggregateDevice,
|
||||
kAudioObjectPropertyScopeGlobal,
|
||||
|
@ -1718,17 +1708,19 @@ audiounit_destroy_aggregate_device(AudioObjectID plugin_id, AudioDeviceID aggreg
|
|||
0,
|
||||
NULL,
|
||||
&size,
|
||||
&aggregate_device_id);
|
||||
aggregate_device_id);
|
||||
if (rv != noErr) {
|
||||
LOG("AudioObjectGetPropertyData/kAudioPlugInDestroyAggregateDevice, rv=%d", rv);
|
||||
return CUBEB_ERROR;
|
||||
}
|
||||
|
||||
LOG("Destroyed aggregate device %d", *aggregate_device_id);
|
||||
*aggregate_device_id = 0;
|
||||
return CUBEB_OK;
|
||||
}
|
||||
|
||||
static int
|
||||
audiounit_new_unit_instance(AudioUnit * unit, io_side side, AudioDeviceID device)
|
||||
audiounit_new_unit_instance(AudioUnit * unit, device_info * device)
|
||||
{
|
||||
AudioComponentDescription desc;
|
||||
AudioComponent comp;
|
||||
|
@ -1736,15 +1728,14 @@ audiounit_new_unit_instance(AudioUnit * unit, io_side side, AudioDeviceID device
|
|||
|
||||
desc.componentType = kAudioUnitType_Output;
|
||||
#if TARGET_OS_IPHONE
|
||||
bool use_default_output = false;
|
||||
desc.componentSubType = kAudioUnitSubType_RemoteIO;
|
||||
#else
|
||||
// Use the DefaultOutputUnit for output when no device is specified
|
||||
// so we retain automatic output device switching when the default
|
||||
// changes. Once we have complete support for device notifications
|
||||
// and switching, we can use the AUHAL for everything.
|
||||
bool use_default_output = device == 0 && (side == OUTPUT);
|
||||
if (use_default_output) {
|
||||
if ((device->flags & DEV_SYSTEM_DEFAULT)
|
||||
&& (device->flags & DEV_OUTPUT)) {
|
||||
desc.componentSubType = kAudioUnitSubType_DefaultOutput;
|
||||
} else {
|
||||
desc.componentSubType = kAudioUnitSubType_HALOutput;
|
||||
|
@ -1790,71 +1781,60 @@ audiounit_enable_unit_scope(AudioUnit * unit, io_side side, enable_state state)
|
|||
}
|
||||
|
||||
static int
|
||||
audiounit_create_unit(AudioUnit * unit, io_side side, AudioDeviceID device)
|
||||
audiounit_create_unit(AudioUnit * unit, device_info * device)
|
||||
{
|
||||
AudioDeviceID devid;
|
||||
assert(*unit == nullptr);
|
||||
assert(device);
|
||||
|
||||
OSStatus rv;
|
||||
int r;
|
||||
|
||||
assert(*unit == nullptr);
|
||||
r = audiounit_new_unit_instance(unit, side, device);
|
||||
r = audiounit_new_unit_instance(unit, device);
|
||||
if (r != CUBEB_OK) {
|
||||
return r;
|
||||
}
|
||||
assert(*unit);
|
||||
|
||||
#if TARGET_OS_IPHONE
|
||||
bool use_default_output = false;
|
||||
#else
|
||||
bool use_default_output = device == 0 && (side == OUTPUT);
|
||||
#endif
|
||||
if ((device->flags & DEV_SYSTEM_DEFAULT)
|
||||
&& (device->flags & DEV_OUTPUT)) {
|
||||
return CUBEB_OK;
|
||||
}
|
||||
|
||||
if (!use_default_output) {
|
||||
switch (side) {
|
||||
case INPUT:
|
||||
r = audiounit_enable_unit_scope(unit, INPUT, ENABLE);
|
||||
if (r != CUBEB_OK) {
|
||||
LOG("Failed to enable audiounit input scope ");
|
||||
return r;
|
||||
}
|
||||
r = audiounit_enable_unit_scope(unit, OUTPUT, DISABLE);
|
||||
if (r != CUBEB_OK) {
|
||||
LOG("Failed to disable audiounit output scope ");
|
||||
return r;
|
||||
}
|
||||
break;
|
||||
case OUTPUT:
|
||||
r = audiounit_enable_unit_scope(unit, OUTPUT, ENABLE);
|
||||
if (r != CUBEB_OK) {
|
||||
LOG("Failed to enable audiounit output scope ");
|
||||
return r;
|
||||
}
|
||||
r = audiounit_enable_unit_scope(unit, INPUT, DISABLE);
|
||||
if (r != CUBEB_OK) {
|
||||
LOG("Failed to disable audiounit input scope ");
|
||||
return r;
|
||||
}
|
||||
break;
|
||||
default:
|
||||
assert(false);
|
||||
}
|
||||
|
||||
if (device == 0) {
|
||||
assert(side == INPUT);
|
||||
devid = audiounit_get_default_device_id(CUBEB_DEVICE_TYPE_INPUT);
|
||||
} else {
|
||||
devid = device;
|
||||
if (device->flags & DEV_INPUT) {
|
||||
r = audiounit_enable_unit_scope(unit, INPUT, ENABLE);
|
||||
if (r != CUBEB_OK) {
|
||||
LOG("Failed to enable audiounit input scope ");
|
||||
return r;
|
||||
}
|
||||
r = audiounit_enable_unit_scope(unit, OUTPUT, DISABLE);
|
||||
if (r != CUBEB_OK) {
|
||||
LOG("Failed to disable audiounit output scope ");
|
||||
return r;
|
||||
}
|
||||
} else if (device->flags & DEV_OUTPUT) {
|
||||
r = audiounit_enable_unit_scope(unit, OUTPUT, ENABLE);
|
||||
if (r != CUBEB_OK) {
|
||||
LOG("Failed to enable audiounit output scope ");
|
||||
return r;
|
||||
}
|
||||
r = audiounit_enable_unit_scope(unit, INPUT, DISABLE);
|
||||
if (r != CUBEB_OK) {
|
||||
LOG("Failed to disable audiounit input scope ");
|
||||
return r;
|
||||
}
|
||||
} else {
|
||||
assert(false);
|
||||
}
|
||||
|
||||
rv = AudioUnitSetProperty(*unit,
|
||||
kAudioOutputUnitProperty_CurrentDevice,
|
||||
kAudioUnitScope_Global,
|
||||
0,
|
||||
&devid, sizeof(AudioDeviceID));
|
||||
if (rv != noErr) {
|
||||
LOG("AudioUnitSetProperty/kAudioOutputUnitProperty_CurrentDevice rv=%d", rv);
|
||||
return CUBEB_ERROR;
|
||||
}
|
||||
rv = AudioUnitSetProperty(*unit,
|
||||
kAudioOutputUnitProperty_CurrentDevice,
|
||||
kAudioUnitScope_Global,
|
||||
0,
|
||||
&device->id, sizeof(AudioDeviceID));
|
||||
if (rv != noErr) {
|
||||
LOG("AudioUnitSetProperty/kAudioOutputUnitProperty_CurrentDevice rv=%d", rv);
|
||||
return CUBEB_ERROR;
|
||||
}
|
||||
|
||||
return CUBEB_OK;
|
||||
|
@ -2278,8 +2258,9 @@ audiounit_setup_stream(cubeb_stream * stm)
|
|||
|
||||
int r = 0;
|
||||
|
||||
AudioDeviceID in_dev = stm->input_device;
|
||||
AudioDeviceID out_dev = stm->output_device;
|
||||
device_info in_dev_info = stm->input_device;
|
||||
device_info out_dev_info = stm->output_device;
|
||||
|
||||
if (has_input(stm) && has_output(stm)) {
|
||||
r = audiounit_create_aggregate_device(stm);
|
||||
if (r != CUBEB_OK) {
|
||||
|
@ -2291,14 +2272,14 @@ audiounit_setup_stream(cubeb_stream * stm)
|
|||
// it after a couple of weeks.
|
||||
return r;
|
||||
} else {
|
||||
in_dev = out_dev = stm->aggregate_device_id;
|
||||
in_dev_info.id = out_dev_info.id = stm->aggregate_device_id;
|
||||
in_dev_info.flags = DEV_INPUT;
|
||||
out_dev_info.flags = DEV_OUTPUT;
|
||||
}
|
||||
}
|
||||
|
||||
if (has_input(stm)) {
|
||||
r = audiounit_create_unit(&stm->input_unit,
|
||||
INPUT,
|
||||
in_dev);
|
||||
r = audiounit_create_unit(&stm->input_unit, &in_dev_info);
|
||||
if (r != CUBEB_OK) {
|
||||
LOG("(%p) AudioUnit creation for input failed.", stm);
|
||||
return r;
|
||||
|
@ -2306,9 +2287,7 @@ audiounit_setup_stream(cubeb_stream * stm)
|
|||
}
|
||||
|
||||
if (has_output(stm)) {
|
||||
r = audiounit_create_unit(&stm->output_unit,
|
||||
OUTPUT,
|
||||
out_dev);
|
||||
r = audiounit_create_unit(&stm->output_unit, &out_dev_info);
|
||||
if (r != CUBEB_OK) {
|
||||
LOG("(%p) AudioUnit creation for output failed.", stm);
|
||||
return r;
|
||||
|
@ -2446,8 +2425,7 @@ audiounit_setup_stream(cubeb_stream * stm)
|
|||
|
||||
r = audiounit_install_device_changed_callback(stm);
|
||||
if (r != CUBEB_OK) {
|
||||
LOG("(%p) Could not install the device change callback.", stm);
|
||||
return r;
|
||||
LOG("(%p) Could not install all device change callback.", stm);
|
||||
}
|
||||
|
||||
|
||||
|
@ -2497,13 +2475,11 @@ audiounit_stream_init(cubeb * context,
|
|||
stm->latency_frames = latency_frames;
|
||||
if (input_stream_params) {
|
||||
stm->input_stream_params = *input_stream_params;
|
||||
stm->input_device = reinterpret_cast<uintptr_t>(input_device);
|
||||
stm->is_default_input = stm->input_device == 0 ||
|
||||
(audiounit_get_default_device_id(CUBEB_DEVICE_TYPE_INPUT) == stm->input_device);
|
||||
audiounit_set_device_info(stm.get(), reinterpret_cast<uintptr_t>(input_device), INPUT);
|
||||
}
|
||||
if (output_stream_params) {
|
||||
stm->output_stream_params = *output_stream_params;
|
||||
stm->output_device = reinterpret_cast<uintptr_t>(output_device);
|
||||
audiounit_set_device_info(stm.get(), reinterpret_cast<uintptr_t>(output_device), OUTPUT);
|
||||
}
|
||||
|
||||
auto_lock context_lock(context->mutex);
|
||||
|
@ -2555,7 +2531,7 @@ audiounit_close_stream(cubeb_stream *stm)
|
|||
stm->mixer.reset();
|
||||
|
||||
if (stm->aggregate_device_id) {
|
||||
audiounit_destroy_aggregate_device(stm->plugin_id, stm->aggregate_device_id);
|
||||
audiounit_destroy_aggregate_device(stm->plugin_id, &stm->aggregate_device_id);
|
||||
stm->aggregate_device_id = 0;
|
||||
}
|
||||
}
|
||||
|
@ -2572,7 +2548,7 @@ audiounit_stream_destroy(cubeb_stream * stm)
|
|||
|
||||
r = audiounit_uninstall_device_changed_callback(stm);
|
||||
if (r != CUBEB_OK) {
|
||||
LOG("(%p) Could not uninstall the device changed callback", stm);
|
||||
LOG("(%p) Could not uninstall all device change listeners", stm);
|
||||
}
|
||||
|
||||
auto_lock context_lock(stm->context->mutex);
|
||||
|
@ -2682,8 +2658,8 @@ audiounit_stream_get_latency(cubeb_stream * stm, uint32_t * latency)
|
|||
kAudioObjectPropertyElementMaster
|
||||
};
|
||||
|
||||
r = audiounit_get_output_device_id(&output_device_id);
|
||||
if (r != noErr) {
|
||||
output_device_id = audiounit_get_default_device_id(CUBEB_DEVICE_TYPE_OUTPUT);
|
||||
if (output_device_id == kAudioObjectUnknown) {
|
||||
return CUBEB_ERROR;
|
||||
}
|
||||
|
||||
|
@ -2806,7 +2782,8 @@ int audiounit_stream_get_current_device(cubeb_stream * stm,
|
|||
|
||||
*device = NULL;
|
||||
|
||||
if (audiounit_get_output_device_id(&output_device_id) != CUBEB_OK) {
|
||||
output_device_id = audiounit_get_default_device_id(CUBEB_DEVICE_TYPE_OUTPUT);
|
||||
if (output_device_id == kAudioObjectUnknown) {
|
||||
return CUBEB_ERROR;
|
||||
}
|
||||
|
||||
|
@ -2840,7 +2817,8 @@ int audiounit_stream_get_current_device(cubeb_stream * stm,
|
|||
memcpy((*device)->output_name, strdata, size);
|
||||
(*device)->output_name[size] = '\0';
|
||||
|
||||
if (audiounit_get_input_device_id(&input_device_id) != CUBEB_OK) {
|
||||
input_device_id = audiounit_get_default_device_id(CUBEB_DEVICE_TYPE_INPUT);
|
||||
if (input_device_id == kAudioObjectUnknown) {
|
||||
return CUBEB_ERROR;
|
||||
}
|
||||
|
||||
|
|
|
@ -57,7 +57,6 @@ public final class CodecProxy {
|
|||
|
||||
private class CallbacksForwarder extends ICodecCallbacks.Stub {
|
||||
private final Callbacks mCallbacks;
|
||||
private boolean mEndOfInput;
|
||||
private boolean mCodecProxyReleased;
|
||||
|
||||
CallbacksForwarder(Callbacks callbacks) {
|
||||
|
@ -66,14 +65,14 @@ public final class CodecProxy {
|
|||
|
||||
@Override
|
||||
public synchronized void onInputQueued(long timestamp) throws RemoteException {
|
||||
if (!mEndOfInput && !mCodecProxyReleased) {
|
||||
if (!mCodecProxyReleased) {
|
||||
mCallbacks.onInputStatus(timestamp, true /* processed */);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public synchronized void onInputPending(long timestamp) throws RemoteException {
|
||||
if (!mEndOfInput && !mCodecProxyReleased) {
|
||||
if (!mCodecProxyReleased) {
|
||||
mCallbacks.onInputStatus(timestamp, false /* processed */);
|
||||
}
|
||||
}
|
||||
|
@ -118,10 +117,6 @@ public final class CodecProxy {
|
|||
}
|
||||
}
|
||||
|
||||
private void setEndOfInput(boolean end) {
|
||||
mEndOfInput = end;
|
||||
}
|
||||
|
||||
private synchronized void setCodecProxyReleased() {
|
||||
mCodecProxyReleased = true;
|
||||
}
|
||||
|
@ -203,7 +198,6 @@ public final class CodecProxy {
|
|||
}
|
||||
|
||||
boolean eos = info.flags == MediaCodec.BUFFER_FLAG_END_OF_STREAM;
|
||||
mCallbacks.setEndOfInput(eos);
|
||||
|
||||
if (eos) {
|
||||
return sendInput(Sample.EOS);
|
||||
|
|
|
@ -202,6 +202,7 @@ pref("dom.gamepad.non_standard_events.enabled", false);
|
|||
pref("dom.gamepad.non_standard_events.enabled", true);
|
||||
#endif
|
||||
pref("dom.gamepad.extensions.enabled", true);
|
||||
pref("dom.gamepad.haptic_feedback.enabled", true);
|
||||
|
||||
// If this is true, TextEventDispatcher dispatches keydown and keyup events
|
||||
// even during composition (keypress events are never fired during composition
|
||||
|
@ -444,7 +445,7 @@ pref("media.decoder-doctor.wmf-disabled-is-failure", false);
|
|||
pref("media.decoder-doctor.new-issue-endpoint", "https://webcompat.com/issues/new");
|
||||
|
||||
// Whether to suspend decoding of videos in background tabs.
|
||||
#ifdef RELEASE
|
||||
#ifdef RELEASE_OR_BETA
|
||||
pref("media.suspend-bkgnd-video.enabled", false);
|
||||
#else
|
||||
pref("media.suspend-bkgnd-video.enabled", true);
|
||||
|
|
|
@ -1671,7 +1671,7 @@ dependencies = [
|
|||
"rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"string_cache 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"string_cache_codegen 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"tendril 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"tendril 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -3199,7 +3199,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "tendril"
|
||||
version = "0.3.0"
|
||||
version = "0.3.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"futf 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
|
@ -3873,7 +3873,7 @@ dependencies = [
|
|||
"checksum syntex_pos 0.58.1 (registry+https://github.com/rust-lang/crates.io-index)" = "13ad4762fe52abc9f4008e85c4fb1b1fe3aa91ccb99ff4826a439c7c598e1047"
|
||||
"checksum syntex_syntax 0.58.1 (registry+https://github.com/rust-lang/crates.io-index)" = "6e0e4dbae163dd98989464c23dd503161b338790640e11537686f2ef0f25c791"
|
||||
"checksum tempdir 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)" = "87974a6f5c1dfb344d733055601650059a3363de2a6104819293baff662132d6"
|
||||
"checksum tendril 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "01576be96a211e017bf90b1603b1272baf9fe93a1bf9b4845257c4ba09c9b25f"
|
||||
"checksum tendril 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "8c1b72f8e2f5b73b65c315b1a70c730f24b9d7a25f39e98de8acbe2bb795caea"
|
||||
"checksum term 0.4.5 (registry+https://github.com/rust-lang/crates.io-index)" = "d168af3930b369cfe245132550579d47dfd873d69470755a19c2c6568dbbd989"
|
||||
"checksum term_size 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "07b6c1ac5b3fffd75073276bca1ceed01f67a28537097a2a9539e116e50fb21a"
|
||||
"checksum thread-id 3.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "8df7875b676fddfadffd96deea3b1124e5ede707d4884248931077518cf1f773"
|
||||
|
|
|
@ -421,7 +421,7 @@ lazy_static! {
|
|||
hb_font_funcs_set_glyph_h_kerning_func(
|
||||
hb_funcs, Some(glyph_h_kerning_func), ptr::null_mut(), None);
|
||||
|
||||
ptr::Unique::new(hb_funcs)
|
||||
ptr::Unique::new_unchecked(hb_funcs)
|
||||
};
|
||||
}
|
||||
|
||||
|
|
|
@ -256,7 +256,7 @@ impl<'ln> LayoutNode for ServoLayoutNode<'ln> {
|
|||
let ptr: *mut StyleAndLayoutData =
|
||||
Box::into_raw(Box::new(StyleAndLayoutData::new()));
|
||||
let opaque = OpaqueStyleAndLayoutData {
|
||||
ptr: NonZero::new(ptr as *mut StyleData),
|
||||
ptr: NonZero::new_unchecked(ptr as *mut StyleData),
|
||||
};
|
||||
self.init_style_and_layout_data(opaque);
|
||||
};
|
||||
|
|
|
@ -35,7 +35,7 @@ impl ThreadId {
|
|||
#[allow(unsafe_code)]
|
||||
fn new() -> ThreadId {
|
||||
let number = THREAD_COUNT.fetch_add(1, Ordering::SeqCst);
|
||||
ThreadId(unsafe { NonZero::new(number) })
|
||||
ThreadId(NonZero::new(number).unwrap())
|
||||
}
|
||||
pub fn current() -> ThreadId {
|
||||
THREAD_ID.with(|tls| tls.clone())
|
||||
|
@ -59,13 +59,13 @@ impl AtomicOptThreadId {
|
|||
#[allow(unsafe_code)]
|
||||
pub fn load(&self, ordering: Ordering) -> Option<ThreadId> {
|
||||
let number = self.0.load(ordering);
|
||||
if number == 0 { None } else { Some(ThreadId(unsafe { NonZero::new(number) })) }
|
||||
NonZero::new(number).map(ThreadId)
|
||||
}
|
||||
#[allow(unsafe_code)]
|
||||
pub fn swap(&self, value: Option<ThreadId>, ordering: Ordering) -> Option<ThreadId> {
|
||||
let number = value.map(|id| id.0.get()).unwrap_or(0);
|
||||
let number = self.0.swap(number, ordering);
|
||||
if number == 0 { None } else { Some(ThreadId(unsafe { NonZero::new(number) })) }
|
||||
NonZero::new(number).map(ThreadId)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -106,7 +106,7 @@ impl<T: DomObject + JSTraceable + Iterable> IterableIterator<T> {
|
|||
self.index.set(index + 1);
|
||||
result.map(|_| {
|
||||
assert!(!rval.is_null());
|
||||
unsafe { NonZero::new(rval.get()) }
|
||||
unsafe { NonZero::new_unchecked(rval.get()) }
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
|
@ -81,7 +81,7 @@ impl<T: DomObject> JS<T> {
|
|||
pub fn from_ref(obj: &T) -> JS<T> {
|
||||
debug_assert!(thread_state::get().is_script());
|
||||
JS {
|
||||
ptr: unsafe { NonZero::new(&*obj) },
|
||||
ptr: unsafe { NonZero::new_unchecked(&*obj) },
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -135,7 +135,7 @@ impl<T: Castable> LayoutJS<T> {
|
|||
debug_assert!(thread_state::get().is_layout());
|
||||
let ptr: *const T = self.ptr.get();
|
||||
LayoutJS {
|
||||
ptr: unsafe { NonZero::new(ptr as *const U) },
|
||||
ptr: unsafe { NonZero::new_unchecked(ptr as *const U) },
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -148,7 +148,7 @@ impl<T: Castable> LayoutJS<T> {
|
|||
if (*self.unsafe_get()).is::<U>() {
|
||||
let ptr: *const T = self.ptr.get();
|
||||
Some(LayoutJS {
|
||||
ptr: NonZero::new(ptr as *const U),
|
||||
ptr: NonZero::new_unchecked(ptr as *const U),
|
||||
})
|
||||
} else {
|
||||
None
|
||||
|
@ -223,7 +223,7 @@ impl LayoutJS<Node> {
|
|||
debug_assert!(thread_state::get().is_layout());
|
||||
let TrustedNodeAddress(addr) = inner;
|
||||
LayoutJS {
|
||||
ptr: NonZero::new(addr as *const Node),
|
||||
ptr: NonZero::new_unchecked(addr as *const Node),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -554,7 +554,7 @@ impl<T: DomObject> Root<T> {
|
|||
|
||||
/// Generate a new root from a reference
|
||||
pub fn from_ref(unrooted: &T) -> Root<T> {
|
||||
Root::new(unsafe { NonZero::new(unrooted) })
|
||||
Root::new(unsafe { NonZero::new_unchecked(unrooted) })
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -200,7 +200,7 @@ impl<T: DomObject> Trusted<T> {
|
|||
self.owner_thread == (&*live_references) as *const _ as *const libc::c_void
|
||||
}));
|
||||
unsafe {
|
||||
Root::new(NonZero::new(self.refcount.0 as *const T))
|
||||
Root::new(NonZero::new_unchecked(self.refcount.0 as *const T))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -58,7 +58,7 @@ pub trait WeakReferenceable: DomObject + Sized {
|
|||
trace!("Creating new WeakBox holder for {:p}.", self);
|
||||
ptr = Box::into_raw(box WeakBox {
|
||||
count: Cell::new(1),
|
||||
value: Cell::new(Some(NonZero::new(self))),
|
||||
value: Cell::new(Some(NonZero::new_unchecked(self))),
|
||||
});
|
||||
JS_SetReservedSlot(object, DOM_WEAK_SLOT, PrivateValue(ptr as *const c_void));
|
||||
}
|
||||
|
@ -70,7 +70,7 @@ pub trait WeakReferenceable: DomObject + Sized {
|
|||
new_count);
|
||||
box_.count.set(new_count);
|
||||
WeakRef {
|
||||
ptr: NonZero::new(ptr),
|
||||
ptr: NonZero::new_unchecked(ptr),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -65,7 +65,7 @@ impl CryptoMethods for Crypto {
|
|||
|
||||
self.rng.borrow_mut().fill_bytes(&mut data);
|
||||
|
||||
Ok(NonZero::new(input))
|
||||
Ok(NonZero::new_unchecked(input))
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -3498,7 +3498,7 @@ impl DocumentMethods for Document {
|
|||
if elements.peek().is_none() {
|
||||
// TODO: Step 2.
|
||||
// Step 3.
|
||||
return Some(NonZero::new(first.reflector().get_jsobject().get()));
|
||||
return Some(NonZero::new_unchecked(first.reflector().get_jsobject().get()));
|
||||
}
|
||||
} else {
|
||||
return None;
|
||||
|
@ -3509,7 +3509,7 @@ impl DocumentMethods for Document {
|
|||
name: name,
|
||||
};
|
||||
let collection = HTMLCollection::create(self.window(), root, box filter);
|
||||
Some(NonZero::new(collection.reflector().get_jsobject().get()))
|
||||
Some(NonZero::new_unchecked(collection.reflector().get_jsobject().get()))
|
||||
}
|
||||
|
||||
// https://html.spec.whatwg.org/multipage/#dom-tree-accessors:supported-property-names
|
||||
|
|
|
@ -128,7 +128,7 @@ impl GamepadMethods for Gamepad {
|
|||
#[allow(unsafe_code)]
|
||||
// https://w3c.github.io/gamepad/#dom-gamepad-axes
|
||||
unsafe fn Axes(&self, _cx: *mut JSContext) -> NonZero<*mut JSObject> {
|
||||
NonZero::new(self.axes.get())
|
||||
NonZero::new_unchecked(self.axes.get())
|
||||
}
|
||||
|
||||
// https://w3c.github.io/gamepad/#dom-gamepad-buttons
|
||||
|
|
|
@ -161,6 +161,6 @@ impl ImageDataMethods for ImageData {
|
|||
// https://html.spec.whatwg.org/multipage/#dom-imagedata-data
|
||||
unsafe fn Data(&self, _: *mut JSContext) -> NonZero<*mut JSObject> {
|
||||
assert!(!self.data.get().is_null());
|
||||
NonZero::new(self.data.get())
|
||||
NonZero::new_unchecked(self.data.get())
|
||||
}
|
||||
}
|
||||
|
|
|
@ -154,7 +154,7 @@ impl TestBindingMethods for TestBinding {
|
|||
unsafe fn ArrayAttribute(&self, cx: *mut JSContext) -> NonZero<*mut JSObject> {
|
||||
rooted!(in(cx) let array = JS_NewUint8ClampedArray(cx, 16));
|
||||
assert!(!array.is_null());
|
||||
NonZero::new(array.get())
|
||||
NonZero::new_unchecked(array.get())
|
||||
}
|
||||
#[allow(unsafe_code)]
|
||||
unsafe fn AnyAttribute(&self, _: *mut JSContext) -> JSVal { NullValue() }
|
||||
|
@ -164,7 +164,7 @@ impl TestBindingMethods for TestBinding {
|
|||
unsafe fn ObjectAttribute(&self, cx: *mut JSContext) -> NonZero<*mut JSObject> {
|
||||
rooted!(in(cx) let obj = JS_NewPlainObject(cx));
|
||||
assert!(!obj.is_null());
|
||||
NonZero::new(obj.get())
|
||||
NonZero::new_unchecked(obj.get())
|
||||
}
|
||||
#[allow(unsafe_code)]
|
||||
unsafe fn SetObjectAttribute(&self, _: *mut JSContext, _: *mut JSObject) {}
|
||||
|
|
|
@ -53,6 +53,6 @@ impl TextEncoderMethods for TextEncoder {
|
|||
rooted!(in(cx) let mut js_object = ptr::null_mut());
|
||||
assert!(Uint8Array::create(cx, CreateWith::Slice(&encoded), js_object.handle_mut()).is_ok());
|
||||
|
||||
NonZero::new(js_object.get())
|
||||
NonZero::new_unchecked(js_object.get())
|
||||
}
|
||||
}
|
||||
|
|
|
@ -61,7 +61,7 @@ impl VREyeParametersMethods for VREyeParameters {
|
|||
#[allow(unsafe_code)]
|
||||
// https://w3c.github.io/webvr/#dom-vreyeparameters-offset
|
||||
unsafe fn Offset(&self, _cx: *mut JSContext) -> NonZero<*mut JSObject> {
|
||||
NonZero::new(self.offset.get())
|
||||
NonZero::new_unchecked(self.offset.get())
|
||||
}
|
||||
|
||||
// https://w3c.github.io/webvr/#dom-vreyeparameters-fieldofview
|
||||
|
|
|
@ -119,25 +119,25 @@ impl VRFrameDataMethods for VRFrameData {
|
|||
#[allow(unsafe_code)]
|
||||
// https://w3c.github.io/webvr/#dom-vrframedata-leftprojectionmatrix
|
||||
unsafe fn LeftProjectionMatrix(&self, _cx: *mut JSContext) -> NonZero<*mut JSObject> {
|
||||
NonZero::new(self.left_proj.get())
|
||||
NonZero::new_unchecked(self.left_proj.get())
|
||||
}
|
||||
|
||||
#[allow(unsafe_code)]
|
||||
// https://w3c.github.io/webvr/#dom-vrframedata-leftviewmatrix
|
||||
unsafe fn LeftViewMatrix(&self, _cx: *mut JSContext) -> NonZero<*mut JSObject> {
|
||||
NonZero::new(self.left_view.get())
|
||||
NonZero::new_unchecked(self.left_view.get())
|
||||
}
|
||||
|
||||
#[allow(unsafe_code)]
|
||||
// https://w3c.github.io/webvr/#dom-vrframedata-rightprojectionmatrix
|
||||
unsafe fn RightProjectionMatrix(&self, _cx: *mut JSContext) -> NonZero<*mut JSObject> {
|
||||
NonZero::new(self.right_proj.get())
|
||||
NonZero::new_unchecked(self.right_proj.get())
|
||||
}
|
||||
|
||||
#[allow(unsafe_code)]
|
||||
// https://w3c.github.io/webvr/#dom-vrframedata-rightviewmatrix
|
||||
unsafe fn RightViewMatrix(&self, _cx: *mut JSContext) -> NonZero<*mut JSObject> {
|
||||
NonZero::new(self.right_view.get())
|
||||
NonZero::new_unchecked(self.right_view.get())
|
||||
}
|
||||
|
||||
// https://w3c.github.io/webvr/#dom-vrframedata-pose
|
||||
|
|
|
@ -58,7 +58,7 @@ fn heap_to_option(heap: &Heap<*mut JSObject>) -> Option<NonZero<*mut JSObject>>
|
|||
None
|
||||
} else {
|
||||
unsafe {
|
||||
Some(NonZero::new(js_object))
|
||||
Some(NonZero::new_unchecked(js_object))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -70,7 +70,7 @@ impl VRStageParametersMethods for VRStageParameters {
|
|||
#[allow(unsafe_code)]
|
||||
// https://w3c.github.io/webvr/#dom-vrstageparameters-sittingtostandingtransform
|
||||
unsafe fn SittingToStandingTransform(&self, _cx: *mut JSContext) -> NonZero<*mut JSObject> {
|
||||
NonZero::new(self.transform.get())
|
||||
NonZero::new_unchecked(self.transform.get())
|
||||
}
|
||||
|
||||
// https://w3c.github.io/webvr/#dom-vrstageparameters-sizex
|
||||
|
|
|
@ -57,7 +57,7 @@ impl<T> WebGLExtensionWrapper for TypedWebGLExtensionWrapper<T>
|
|||
self.enable(ext);
|
||||
}
|
||||
unsafe {
|
||||
NonZero::new(extension.reflector().get_jsobject().get())
|
||||
NonZero::new_unchecked(extension.reflector().get_jsobject().get())
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -35,6 +35,16 @@ pub struct TreeStyleInvalidator<'a, 'b: 'a, E>
|
|||
|
||||
type InvalidationVector = SmallVec<[Invalidation; 10]>;
|
||||
|
||||
/// The kind of invalidation we're processing.
|
||||
///
|
||||
/// We can use this to avoid pushing invalidations of the same kind to our
|
||||
/// descendants or siblings.
|
||||
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
|
||||
enum InvalidationKind {
|
||||
Descendant,
|
||||
Sibling,
|
||||
}
|
||||
|
||||
/// An `Invalidation` is a complex selector that describes which elements,
|
||||
/// relative to a current element we are processing, must be restyled.
|
||||
///
|
||||
|
@ -46,6 +56,37 @@ type InvalidationVector = SmallVec<[Invalidation; 10]>;
|
|||
struct Invalidation {
|
||||
selector: Selector<SelectorImpl>,
|
||||
offset: usize,
|
||||
/// Whether the invalidation was already matched by any previous sibling or
|
||||
/// ancestor.
|
||||
///
|
||||
/// If this is the case, we can avoid pushing invalidations generated by
|
||||
/// this one if the generated invalidation is effective for all the siblings
|
||||
/// or descendants after us.
|
||||
matched_by_any_previous: bool,
|
||||
}
|
||||
|
||||
impl Invalidation {
|
||||
/// Whether this invalidation is effective for the next sibling or
|
||||
/// descendant after us.
|
||||
fn effective_for_next(&self) -> bool {
|
||||
// TODO(emilio): For pseudo-elements this should be mostly false, except
|
||||
// for the weird pseudos in <input type="number">.
|
||||
//
|
||||
// We should be able to do better here!
|
||||
match self.selector.combinator_at(self.offset) {
|
||||
Combinator::NextSibling |
|
||||
Combinator::Child => false,
|
||||
_ => true,
|
||||
}
|
||||
}
|
||||
|
||||
fn kind(&self) -> InvalidationKind {
|
||||
if self.selector.combinator_at(self.offset).is_ancestor() {
|
||||
InvalidationKind::Descendant
|
||||
} else {
|
||||
InvalidationKind::Sibling
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Debug for Invalidation {
|
||||
|
@ -67,9 +108,9 @@ impl fmt::Debug for Invalidation {
|
|||
struct InvalidationResult {
|
||||
/// Whether the element itself was invalidated.
|
||||
invalidated_self: bool,
|
||||
/// Whether the invalidation we've processed is effective for the next
|
||||
/// sibling or descendant after us.
|
||||
effective_for_next: bool,
|
||||
/// Whether the invalidation matched, either invalidating the element or
|
||||
/// generating another invalidation.
|
||||
matched: bool,
|
||||
}
|
||||
|
||||
impl<'a, 'b: 'a, E> TreeStyleInvalidator<'a, 'b, E>
|
||||
|
@ -460,14 +501,16 @@ impl<'a, 'b: 'a, E> TreeStyleInvalidator<'a, 'b, E>
|
|||
let result = self.process_invalidation(
|
||||
&sibling_invalidations[i],
|
||||
descendant_invalidations,
|
||||
&mut new_sibling_invalidations
|
||||
&mut new_sibling_invalidations,
|
||||
InvalidationKind::Sibling,
|
||||
);
|
||||
|
||||
invalidated_self |= result.invalidated_self;
|
||||
if !result.effective_for_next {
|
||||
sibling_invalidations.remove(i);
|
||||
} else {
|
||||
sibling_invalidations[i].matched_by_any_previous |= result.matched;
|
||||
if sibling_invalidations[i].effective_for_next() {
|
||||
i += 1;
|
||||
} else {
|
||||
sibling_invalidations.remove(i);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -493,10 +536,13 @@ impl<'a, 'b: 'a, E> TreeStyleInvalidator<'a, 'b, E>
|
|||
invalidation,
|
||||
descendant_invalidations,
|
||||
sibling_invalidations,
|
||||
InvalidationKind::Descendant,
|
||||
);
|
||||
|
||||
invalidated |= result.invalidated_self;
|
||||
if result.effective_for_next {
|
||||
if invalidation.effective_for_next() {
|
||||
let mut invalidation = invalidation.clone();
|
||||
invalidation.matched_by_any_previous |= result.matched;
|
||||
descendant_invalidations.push(invalidation.clone());
|
||||
}
|
||||
}
|
||||
|
@ -514,10 +560,11 @@ impl<'a, 'b: 'a, E> TreeStyleInvalidator<'a, 'b, E>
|
|||
&mut self,
|
||||
invalidation: &Invalidation,
|
||||
descendant_invalidations: &mut InvalidationVector,
|
||||
sibling_invalidations: &mut InvalidationVector
|
||||
sibling_invalidations: &mut InvalidationVector,
|
||||
invalidation_kind: InvalidationKind,
|
||||
) -> InvalidationResult {
|
||||
debug!("TreeStyleInvalidator::process_invalidation({:?}, {:?})",
|
||||
self.element, invalidation);
|
||||
debug!("TreeStyleInvalidator::process_invalidation({:?}, {:?}, {:?})",
|
||||
self.element, invalidation, invalidation_kind);
|
||||
|
||||
let mut context =
|
||||
MatchingContext::new_for_visited(
|
||||
|
@ -535,14 +582,17 @@ impl<'a, 'b: 'a, E> TreeStyleInvalidator<'a, 'b, E>
|
|||
);
|
||||
|
||||
let mut invalidated_self = false;
|
||||
let mut matched = false;
|
||||
match matching_result {
|
||||
CompoundSelectorMatchingResult::Matched { next_combinator_offset: 0 } => {
|
||||
debug!(" > Invalidation matched completely");
|
||||
matched = true;
|
||||
invalidated_self = true;
|
||||
}
|
||||
CompoundSelectorMatchingResult::Matched { next_combinator_offset } => {
|
||||
let next_combinator =
|
||||
invalidation.selector.combinator_at(next_combinator_offset);
|
||||
matched = true;
|
||||
|
||||
if matches!(next_combinator, Combinator::PseudoElement) {
|
||||
let pseudo_selector =
|
||||
|
@ -578,14 +628,90 @@ impl<'a, 'b: 'a, E> TreeStyleInvalidator<'a, 'b, E>
|
|||
let next_invalidation = Invalidation {
|
||||
selector: invalidation.selector.clone(),
|
||||
offset: next_combinator_offset,
|
||||
matched_by_any_previous: false,
|
||||
};
|
||||
|
||||
debug!(" > Invalidation matched, next: {:?}, ({:?})",
|
||||
next_invalidation, next_combinator);
|
||||
if next_combinator.is_ancestor() {
|
||||
descendant_invalidations.push(next_invalidation);
|
||||
|
||||
let next_invalidation_kind = next_invalidation.kind();
|
||||
|
||||
// We can skip pushing under some circumstances, and we should
|
||||
// because otherwise the invalidation list could grow
|
||||
// exponentially.
|
||||
//
|
||||
// * First of all, both invalidations need to be of the same
|
||||
// kind. This is because of how we propagate them going to
|
||||
// the right of the tree for sibling invalidations and going
|
||||
// down the tree for children invalidations. A sibling
|
||||
// invalidation that ends up generating a children
|
||||
// invalidation ends up (correctly) in five different lists,
|
||||
// not in the same list five different times.
|
||||
//
|
||||
// * Then, the invalidation needs to be matched by a previous
|
||||
// ancestor/sibling, in order to know that this invalidation
|
||||
// has been generated already.
|
||||
//
|
||||
// * Finally, the new invalidation needs to be
|
||||
// `effective_for_next()`, in order for us to know that it is
|
||||
// still in the list, since we remove the dependencies that
|
||||
// aren't from the lists for our children / siblings.
|
||||
//
|
||||
// To go through an example, let's imagine we are processing a
|
||||
// dom subtree like:
|
||||
//
|
||||
// <div><address><div><div/></div></address></div>
|
||||
//
|
||||
// And an invalidation list with a single invalidation like:
|
||||
//
|
||||
// [div div div]
|
||||
//
|
||||
// When we process the invalidation list for the outer div, we
|
||||
// match it, and generate a `div div` invalidation, so for the
|
||||
// <address> child we have:
|
||||
//
|
||||
// [div div div, div div]
|
||||
//
|
||||
// With the first of them marked as `matched`.
|
||||
//
|
||||
// When we process the <address> child, we don't match any of
|
||||
// them, so both invalidations go untouched to our children.
|
||||
//
|
||||
// When we process the second <div>, we match _both_
|
||||
// invalidations.
|
||||
//
|
||||
// However, when matching the first, we can tell it's been
|
||||
// matched, and not push the corresponding `div div`
|
||||
// invalidation, since we know it's necessarily already on the
|
||||
// list.
|
||||
//
|
||||
// Thus, without skipping the push, we'll arrive to the
|
||||
// innermost <div> with:
|
||||
//
|
||||
// [div div div, div div, div div, div]
|
||||
//
|
||||
// While skipping it, we won't arrive here with duplicating
|
||||
// dependencies:
|
||||
//
|
||||
// [div div div, div div, div]
|
||||
//
|
||||
let can_skip_pushing =
|
||||
next_invalidation_kind == invalidation_kind &&
|
||||
invalidation.matched_by_any_previous &&
|
||||
next_invalidation.effective_for_next();
|
||||
|
||||
if can_skip_pushing {
|
||||
debug!(" > Can avoid push, since the invalidation had \
|
||||
already been matched before");
|
||||
} else {
|
||||
sibling_invalidations.push(next_invalidation);
|
||||
match next_invalidation_kind {
|
||||
InvalidationKind::Descendant => {
|
||||
descendant_invalidations.push(next_invalidation);
|
||||
}
|
||||
InvalidationKind::Sibling => {
|
||||
sibling_invalidations.push(next_invalidation);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
CompoundSelectorMatchingResult::NotMatched => {}
|
||||
|
@ -597,21 +723,7 @@ impl<'a, 'b: 'a, E> TreeStyleInvalidator<'a, 'b, E>
|
|||
}
|
||||
}
|
||||
|
||||
// TODO(emilio): For pseudo-elements this should be mostly false, except
|
||||
// for the weird pseudos in <input type="number">.
|
||||
//
|
||||
// We should be able to do better here!
|
||||
let effective_for_next =
|
||||
match invalidation.selector.combinator_at(invalidation.offset) {
|
||||
Combinator::NextSibling |
|
||||
Combinator::Child => false,
|
||||
_ => true,
|
||||
};
|
||||
|
||||
InvalidationResult {
|
||||
invalidated_self: invalidated_self,
|
||||
effective_for_next: effective_for_next,
|
||||
}
|
||||
InvalidationResult { invalidated_self, matched, }
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -830,12 +942,14 @@ impl<'a, 'b: 'a, E> InvalidationCollector<'a, 'b, E>
|
|||
self.descendant_invalidations.push(Invalidation {
|
||||
selector: dependency.selector.clone(),
|
||||
offset: dependency.selector_offset,
|
||||
matched_by_any_previous: false,
|
||||
});
|
||||
} else if dependency.affects_later_siblings() {
|
||||
debug_assert_ne!(dependency.selector_offset, 0);
|
||||
self.sibling_invalidations.push(Invalidation {
|
||||
selector: dependency.selector.clone(),
|
||||
offset: dependency.selector_offset,
|
||||
matched_by_any_previous: false,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
|
|
@ -378,10 +378,13 @@ impl ${style_struct.gecko_struct_name} {
|
|||
}
|
||||
</%def>
|
||||
|
||||
<%def name="impl_simple_copy(ident, gecko_ffi_name, *kwargs)">
|
||||
<%def name="impl_simple_copy(ident, gecko_ffi_name, on_set=None, *kwargs)">
|
||||
#[allow(non_snake_case)]
|
||||
pub fn copy_${ident}_from(&mut self, other: &Self) {
|
||||
self.gecko.${gecko_ffi_name} = other.gecko.${gecko_ffi_name};
|
||||
% if on_set:
|
||||
self.${on_set}();
|
||||
% endif
|
||||
}
|
||||
</%def>
|
||||
|
||||
|
@ -478,7 +481,7 @@ def set_gecko_property(ffi_name, expr):
|
|||
|
||||
<%def name="impl_keyword(ident, gecko_ffi_name, keyword, need_clone, cast_type='u8', **kwargs)">
|
||||
<%call expr="impl_keyword_setter(ident, gecko_ffi_name, keyword, cast_type, **kwargs)"></%call>
|
||||
<%call expr="impl_simple_copy(ident, gecko_ffi_name)"></%call>
|
||||
<%call expr="impl_simple_copy(ident, gecko_ffi_name, **kwargs)"></%call>
|
||||
%if need_clone:
|
||||
<%call expr="impl_keyword_clone(ident, gecko_ffi_name, keyword, cast_type)"></%call>
|
||||
% endif
|
||||
|
|
|
@ -1203,7 +1203,7 @@ impl Stylist {
|
|||
&rule_hash_target,
|
||||
applicable_declarations,
|
||||
context,
|
||||
self.quirks_mode,
|
||||
self.quirks_mode,
|
||||
flags_setter,
|
||||
CascadeLevel::AuthorNormal);
|
||||
} else {
|
||||
|
|
|
@ -1 +1 @@
|
|||
504328a31a211814f9cac6de84c5a7ed154f58eb
|
||||
599be0d18f4c6ddf36366d2a5a2ca6dc65886896
|
||||
|
|
|
@ -2017,7 +2017,7 @@ class Marionette(object):
|
|||
window outerWidth and outerHeight values, which include scroll bars,
|
||||
title bars, etc.
|
||||
|
||||
:returns: dictionary representation of current window width and height
|
||||
:returns: Window rect.
|
||||
"""
|
||||
warnings.warn("window_size property has been deprecated, please use get_window_rect()",
|
||||
DeprecationWarning)
|
||||
|
@ -2036,23 +2036,43 @@ class Marionette(object):
|
|||
:param width: The width to resize the window to.
|
||||
:param height: The height to resize the window to.
|
||||
|
||||
:returns Window rect.
|
||||
"""
|
||||
warnings.warn("set_window_size() has been deprecated, please use set_window_rect()",
|
||||
DeprecationWarning)
|
||||
body = {"width": width, "height": height}
|
||||
return self._send_message("setWindowSize", body)
|
||||
|
||||
def minimize_window(self):
|
||||
"""Iconify the browser window currently receiving commands.
|
||||
The action should be equivalent to the user pressing the minimize
|
||||
button in the OS window.
|
||||
|
||||
Note that this command is not available on Fennec. It may also
|
||||
not be available in certain window managers.
|
||||
|
||||
:returns Window rect.
|
||||
"""
|
||||
return self._send_message("WebDriver:MinimizeWindow")
|
||||
|
||||
def maximize_window(self):
|
||||
""" Resize the browser window currently receiving commands. The action
|
||||
should be equivalent to the user pressing the maximize button
|
||||
"""Resize the browser window currently receiving commands.
|
||||
The action should be equivalent to the user pressing the maximize
|
||||
button in the OS window.
|
||||
|
||||
|
||||
Note that this command is not available on Fennec. It may also
|
||||
not be available in certain window managers.
|
||||
|
||||
:returns: Window rect.
|
||||
"""
|
||||
return self._send_message("maximizeWindow")
|
||||
|
||||
def fullscreen(self):
|
||||
""" Synchronously sets the user agent window to full screen as if the user
|
||||
had done "View > Enter Full Screen", or restores it if it is already
|
||||
in full screen.
|
||||
"""Synchronously sets the user agent window to full screen as
|
||||
if the user had done "View > Enter Full Screen", or restores
|
||||
it if it is already in full screen.
|
||||
|
||||
:returns: dictionary representation of current window width and height
|
||||
:returns: Window rect.
|
||||
"""
|
||||
return self._send_message("fullscreen")
|
||||
|
|
|
@ -2890,6 +2890,49 @@ GeckoDriver.prototype.setScreenOrientation = function(cmd, resp) {
|
|||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Synchronously minimizes the user agent window as if the user pressed
|
||||
* the minimize button, or restores it if it is already minimized.
|
||||
*
|
||||
* Not supported on Fennec.
|
||||
*
|
||||
* @return {Object.<string, number>}
|
||||
* Window rect and window state.
|
||||
*
|
||||
* @throws {UnsupportedOperationError}
|
||||
* Not available for current application.
|
||||
* @throws {NoSuchWindowError}
|
||||
* Top-level browsing context has been discarded.
|
||||
* @throws {UnexpectedAlertOpenError}
|
||||
* A modal dialog is open, blocking this operation.
|
||||
*/
|
||||
GeckoDriver.prototype.minimizeWindow = function* (cmd, resp) {
|
||||
assert.firefox();
|
||||
const win = assert.window(this.getCurrentWindow());
|
||||
assert.noUserPrompt(this.dialog);
|
||||
|
||||
let state;
|
||||
yield new Promise(resolve => {
|
||||
win.addEventListener("sizemodechange", resolve, {once: true});
|
||||
|
||||
if (win.windowState == win.STATE_MINIMIZED) {
|
||||
win.restore();
|
||||
state = "normal";
|
||||
} else {
|
||||
win.minimize();
|
||||
state = "minimized";
|
||||
}
|
||||
});
|
||||
|
||||
resp.body = {
|
||||
x: win.screenX,
|
||||
y: win.screenY,
|
||||
width: win.outerWidth,
|
||||
height: win.outerHeight,
|
||||
state,
|
||||
};
|
||||
};
|
||||
|
||||
/**
|
||||
* Synchronously maximizes the user agent window as if the user pressed
|
||||
* the maximize button, or restores it if it is already maximized.
|
||||
|
@ -3426,6 +3469,7 @@ GeckoDriver.prototype.commands = {
|
|||
"WebDriver:IsElementDisplayed": GeckoDriver.prototype.isElementDisplayed,
|
||||
"WebDriver:IsElementEnabled": GeckoDriver.prototype.isElementEnabled,
|
||||
"WebDriver:IsElementSelected": GeckoDriver.prototype.isElementSelected,
|
||||
"WebDriver:MinimizeWindow": GeckoDriver.prototype.minimizeWindow,
|
||||
"WebDriver:MaximizeWindow": GeckoDriver.prototype.maximizeWindow,
|
||||
"WebDriver:Navigate": GeckoDriver.prototype.get,
|
||||
"WebDriver:NewSession": GeckoDriver.prototype.newSession,
|
||||
|
|
|
@ -0,0 +1,40 @@
|
|||
# This Source Code Form is subject to the terms of the Mozilla Public
|
||||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
from marionette_driver.errors import InvalidArgumentException
|
||||
|
||||
from marionette_harness import MarionetteTestCase
|
||||
|
||||
class TestWindowMinimize(MarionetteTestCase):
|
||||
|
||||
def setUp(self):
|
||||
MarionetteTestCase.setUp(self)
|
||||
|
||||
self.original_size = self.marionette.window_size
|
||||
|
||||
def assert_window_minimized(self, resp):
|
||||
self.assertEqual("minimized", resp["state"])
|
||||
|
||||
def assert_window_restored(self, actual):
|
||||
self.assertEqual("normal", actual["state"])
|
||||
self.assertEqual(self.original_size["width"], actual["width"])
|
||||
self.assertEqual(self.original_size["height"], actual["height"])
|
||||
|
||||
def test_minimize_twice_restores(self):
|
||||
resp = self.marionette.minimize_window()
|
||||
self.assert_window_minimized(resp)
|
||||
|
||||
# restore the window
|
||||
resp = self.marionette.minimize_window()
|
||||
self.assert_window_restored(resp)
|
||||
|
||||
def test_minimize_stress(self):
|
||||
for i in range(1, 25):
|
||||
expect_minimized = bool(i % 2)
|
||||
|
||||
resp = self.marionette.minimize_window()
|
||||
if expect_minimized:
|
||||
self.assert_window_minimized(resp)
|
||||
else:
|
||||
self.assert_window_restored(resp)
|
|
@ -69,6 +69,8 @@ skip-if = appname == 'fennec'
|
|||
skip-if = appname == 'fennec'
|
||||
[test_window_maximize.py]
|
||||
skip-if = appname == 'fennec'
|
||||
[test_window_minimize.py]
|
||||
skip-if = appname == 'fennec' || headless
|
||||
[test_window_status_content.py]
|
||||
[test_window_status_chrome.py]
|
||||
|
||||
|
|
|
@ -1,13 +1,14 @@
|
|||
# This Source Code Form is subject to the terms of the Mozilla Public
|
||||
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
|
||||
# You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
import json
|
||||
import os
|
||||
import shutil
|
||||
import sys
|
||||
import tempfile
|
||||
import urllib2
|
||||
import zipfile
|
||||
import hashlib
|
||||
from xml.dom import minidom
|
||||
|
||||
import mozfile
|
||||
|
@ -16,6 +17,9 @@ from mozlog.unstructured import getLogger
|
|||
# Needed for the AMO's rest API -
|
||||
# https://developer.mozilla.org/en/addons.mozilla.org_%28AMO%29_API_Developers%27_Guide/The_generic_AMO_API
|
||||
AMO_API_VERSION = "1.5"
|
||||
_SALT = os.urandom(32).encode('hex')
|
||||
_TEMPORARY_ADDON_SUFFIX = "@temporary-addon"
|
||||
|
||||
|
||||
# Logger for 'mozprofile.addons' module
|
||||
module_logger = getLogger(__name__)
|
||||
|
@ -232,6 +236,12 @@ class AddonManager(object):
|
|||
if node.nodeType == node.TEXT_NODE:
|
||||
return node.data
|
||||
|
||||
@classmethod
|
||||
def _gen_iid(cls, addon_path):
|
||||
hash = hashlib.sha1(_SALT)
|
||||
hash.update(addon_path)
|
||||
return hash.hexdigest() + _TEMPORARY_ADDON_SUFFIX
|
||||
|
||||
@classmethod
|
||||
def addon_details(cls, addon_path):
|
||||
"""
|
||||
|
@ -276,50 +286,73 @@ class AddonManager(object):
|
|||
if not os.path.exists(addon_path):
|
||||
raise IOError('Add-on path does not exist: %s' % addon_path)
|
||||
|
||||
is_webext = False
|
||||
try:
|
||||
if zipfile.is_zipfile(addon_path):
|
||||
# Bug 944361 - We cannot use 'with' together with zipFile because
|
||||
# it will cause an exception thrown in Python 2.6.
|
||||
try:
|
||||
compressed_file = zipfile.ZipFile(addon_path, 'r')
|
||||
manifest = compressed_file.read('install.rdf')
|
||||
filenames = [f.filename for f in (compressed_file).filelist]
|
||||
if 'install.rdf' in filenames:
|
||||
manifest = compressed_file.read('install.rdf')
|
||||
elif 'manifest.json' in filenames:
|
||||
is_webext = True
|
||||
manifest = compressed_file.read('manifest.json')
|
||||
manifest = json.loads(manifest)
|
||||
else:
|
||||
raise KeyError("No manifest")
|
||||
finally:
|
||||
compressed_file.close()
|
||||
elif os.path.isdir(addon_path):
|
||||
with open(os.path.join(addon_path, 'install.rdf'), 'r') as f:
|
||||
manifest = f.read()
|
||||
try:
|
||||
with open(os.path.join(addon_path, 'install.rdf')) as f:
|
||||
manifest = f.read()
|
||||
except IOError:
|
||||
with open(os.path.join(addon_path, 'manifest.json')) as f:
|
||||
manifest = json.loads(f.read())
|
||||
is_webext = True
|
||||
else:
|
||||
raise IOError('Add-on path is neither an XPI nor a directory: %s' % addon_path)
|
||||
except (IOError, KeyError) as e:
|
||||
raise AddonFormatError(str(e)), None, sys.exc_info()[2]
|
||||
|
||||
try:
|
||||
doc = minidom.parseString(manifest)
|
||||
if is_webext:
|
||||
details['version'] = manifest['version']
|
||||
details['name'] = manifest['name']
|
||||
try:
|
||||
details['id'] = manifest['applications']['gecko']['id']
|
||||
except KeyError:
|
||||
details['id'] = cls._gen_iid(addon_path)
|
||||
details['unpack'] = False
|
||||
else:
|
||||
try:
|
||||
doc = minidom.parseString(manifest)
|
||||
|
||||
# Get the namespaces abbreviations
|
||||
em = get_namespace_id(doc, 'http://www.mozilla.org/2004/em-rdf#')
|
||||
rdf = get_namespace_id(doc, 'http://www.w3.org/1999/02/22-rdf-syntax-ns#')
|
||||
# Get the namespaces abbreviations
|
||||
em = get_namespace_id(doc, 'http://www.mozilla.org/2004/em-rdf#')
|
||||
rdf = get_namespace_id(doc, 'http://www.w3.org/1999/02/22-rdf-syntax-ns#')
|
||||
|
||||
description = doc.getElementsByTagName(rdf + 'Description').item(0)
|
||||
for entry, value in description.attributes.items():
|
||||
# Remove the namespace prefix from the tag for comparison
|
||||
entry = entry.replace(em, "")
|
||||
if entry in details.keys():
|
||||
details.update({entry: value})
|
||||
for node in description.childNodes:
|
||||
# Remove the namespace prefix from the tag for comparison
|
||||
entry = node.nodeName.replace(em, "")
|
||||
if entry in details.keys():
|
||||
details.update({entry: get_text(node)})
|
||||
except Exception as e:
|
||||
raise AddonFormatError(str(e)), None, sys.exc_info()[2]
|
||||
description = doc.getElementsByTagName(rdf + 'Description').item(0)
|
||||
for entry, value in description.attributes.items():
|
||||
# Remove the namespace prefix from the tag for comparison
|
||||
entry = entry.replace(em, "")
|
||||
if entry in details.keys():
|
||||
details.update({entry: value})
|
||||
for node in description.childNodes:
|
||||
# Remove the namespace prefix from the tag for comparison
|
||||
entry = node.nodeName.replace(em, "")
|
||||
if entry in details.keys():
|
||||
details.update({entry: get_text(node)})
|
||||
except Exception as e:
|
||||
raise AddonFormatError(str(e)), None, sys.exc_info()[2]
|
||||
|
||||
# turn unpack into a true/false value
|
||||
if isinstance(details['unpack'], basestring):
|
||||
details['unpack'] = details['unpack'].lower() == 'true'
|
||||
|
||||
# If no ID is set, the add-on is invalid
|
||||
if details.get('id') is None:
|
||||
if details.get('id') is None and not is_webext:
|
||||
raise AddonFormatError('Add-on id could not be found.')
|
||||
|
||||
return details
|
||||
|
@ -331,7 +364,6 @@ class AddonManager(object):
|
|||
:param path: url, path to .xpi, or directory of addons
|
||||
:param unpack: whether to unpack unless specified otherwise in the install.rdf
|
||||
"""
|
||||
|
||||
# if the addon is a URL, download it
|
||||
# note that this won't work with protocols urllib2 doesn't support
|
||||
if mozfile.is_url(path):
|
||||
|
|
Двоичный файл не отображается.
Двоичный файл не отображается.
|
@ -9,6 +9,7 @@ import shutil
|
|||
import tempfile
|
||||
import unittest
|
||||
import urllib2
|
||||
import zipfile
|
||||
|
||||
import mozunit
|
||||
|
||||
|
@ -113,6 +114,45 @@ class TestAddonsManager(unittest.TestCase):
|
|||
|
||||
server.stop()
|
||||
|
||||
def test_install_webextension_from_dir(self):
|
||||
addon = os.path.join(here, 'addons', 'apply-css.xpi')
|
||||
zipped = zipfile.ZipFile(addon)
|
||||
try:
|
||||
zipped.extractall(self.tmpdir)
|
||||
finally:
|
||||
zipped.close()
|
||||
self.am.install_from_path(self.tmpdir)
|
||||
self.assertEqual(len(self.am.installed_addons), 1)
|
||||
self.assertTrue(os.path.isdir(self.am.installed_addons[0]))
|
||||
|
||||
def test_install_webextension(self):
|
||||
server = mozhttpd.MozHttpd(docroot=os.path.join(here, 'addons'))
|
||||
server.start()
|
||||
try:
|
||||
addon = server.get_url() + 'apply-css.xpi'
|
||||
self.am.install_from_path(addon)
|
||||
finally:
|
||||
server.stop()
|
||||
|
||||
self.assertEqual(len(self.am.downloaded_addons), 1)
|
||||
self.assertTrue(os.path.isfile(self.am.downloaded_addons[0]))
|
||||
self.assertIn('test-webext@quality.mozilla.org.xpi',
|
||||
os.path.basename(self.am.downloaded_addons[0]))
|
||||
|
||||
def test_install_webextension_sans_id(self):
|
||||
server = mozhttpd.MozHttpd(docroot=os.path.join(here, 'addons'))
|
||||
server.start()
|
||||
try:
|
||||
addon = server.get_url() + 'apply-css-sans-id.xpi'
|
||||
self.am.install_from_path(addon)
|
||||
finally:
|
||||
server.stop()
|
||||
|
||||
self.assertEqual(len(self.am.downloaded_addons), 1)
|
||||
self.assertTrue(os.path.isfile(self.am.downloaded_addons[0]))
|
||||
self.assertIn('temporary-addon.xpi',
|
||||
os.path.basename(self.am.downloaded_addons[0]))
|
||||
|
||||
def test_install_from_path_xpi(self):
|
||||
addons_to_install = []
|
||||
addons_installed = []
|
||||
|
|
|
@ -0,0 +1,34 @@
|
|||
/* -*- Mode: indent-tabs-mode: nil; js-indent-level: 2 -*- */
|
||||
/* vim: set sts=2 sw=2 et tw=80: */
|
||||
"use strict";
|
||||
|
||||
/* exported IS_OOP, arraySum, clearHistograms, getSnapshots, promiseTelemetryRecorded */
|
||||
|
||||
XPCOMUtils.defineLazyModuleGetter(this, "ContentTaskUtils",
|
||||
"resource://testing-common/ContentTaskUtils.jsm");
|
||||
|
||||
const IS_OOP = Services.prefs.getBoolPref("extensions.webextensions.remote");
|
||||
|
||||
function arraySum(arr) {
|
||||
return arr.reduce((a, b) => a + b, 0);
|
||||
}
|
||||
|
||||
function clearHistograms() {
|
||||
Services.telemetry.snapshotSubsessionHistograms(true);
|
||||
}
|
||||
|
||||
function getSnapshots(process) {
|
||||
return Services.telemetry.snapshotSubsessionHistograms()[process];
|
||||
}
|
||||
|
||||
// There is no good way to make sure that the parent received the histogram
|
||||
// entries from the extension and content processes.
|
||||
// Let's stick to the ugly, spinning the event loop until we have a good
|
||||
// approach (Bug 1357509).
|
||||
function promiseTelemetryRecorded(id, process, expectedCount) {
|
||||
let condition = () => {
|
||||
let snapshot = Services.telemetry.snapshotSubsessionHistograms()[process][id];
|
||||
return snapshot && arraySum(snapshot.counts) >= expectedCount;
|
||||
};
|
||||
return ContentTaskUtils.waitForCondition(condition);
|
||||
}
|
|
@ -41,32 +41,35 @@ add_task(async function test_telemetry() {
|
|||
},
|
||||
});
|
||||
|
||||
let histogram = Services.telemetry.getHistogramById(HISTOGRAM);
|
||||
histogram.clear();
|
||||
equal(histogram.snapshot().sum, 0,
|
||||
`No data recorded for histogram: ${HISTOGRAM}.`);
|
||||
clearHistograms();
|
||||
|
||||
let process = IS_OOP ? "content" : "parent";
|
||||
ok(!(HISTOGRAM in getSnapshots(process)), `No data recorded for histogram: ${HISTOGRAM}.`);
|
||||
|
||||
await extension1.startup();
|
||||
equal(histogram.snapshot().sum, 0,
|
||||
`No data recorded for histogram after startup: ${HISTOGRAM}.`);
|
||||
ok(!(HISTOGRAM in getSnapshots(process)),
|
||||
`No data recorded for histogram after startup: ${HISTOGRAM}.`);
|
||||
|
||||
let contentPage = await ExtensionTestUtils.loadContentPage(`${BASE_URL}/file_sample.html`);
|
||||
await extension1.awaitMessage("content-script-run");
|
||||
let histogramSum = histogram.snapshot().sum;
|
||||
ok(histogramSum > 0,
|
||||
`Data recorded for first extension for histogram: ${HISTOGRAM}.`);
|
||||
await promiseTelemetryRecorded(HISTOGRAM, process, 1);
|
||||
|
||||
equal(arraySum(getSnapshots(process)[HISTOGRAM].counts), 1,
|
||||
`Data recorded for histogram: ${HISTOGRAM}.`);
|
||||
|
||||
await contentPage.close();
|
||||
await extension1.unload();
|
||||
|
||||
await extension2.startup();
|
||||
equal(histogram.snapshot().sum, histogramSum,
|
||||
equal(arraySum(getSnapshots(process)[HISTOGRAM].counts), 1,
|
||||
`No data recorded for histogram after startup: ${HISTOGRAM}.`);
|
||||
|
||||
contentPage = await ExtensionTestUtils.loadContentPage(`${BASE_URL}/file_sample.html`);
|
||||
await extension2.awaitMessage("content-script-run");
|
||||
ok(histogram.snapshot().sum > histogramSum,
|
||||
`Data recorded for second extension for histogram: ${HISTOGRAM}.`);
|
||||
await promiseTelemetryRecorded(HISTOGRAM, process, 2);
|
||||
|
||||
equal(arraySum(getSnapshots(process)[HISTOGRAM].counts), 2,
|
||||
`Data recorded for histogram: ${HISTOGRAM}.`);
|
||||
|
||||
await contentPage.close();
|
||||
await extension2.unload();
|
||||
|
|
|
@ -6,10 +6,6 @@ const HISTOGRAM_IDS = [
|
|||
"WEBEXT_STORAGE_LOCAL_SET_MS", "WEBEXT_STORAGE_LOCAL_GET_MS",
|
||||
];
|
||||
|
||||
function arraySum(arr) {
|
||||
return arr.reduce((a, b) => a + b, 0);
|
||||
}
|
||||
|
||||
add_task(async function test_telemetry_background() {
|
||||
const server = createHttpServer();
|
||||
server.registerDirectory("/data/", do_get_file("data"));
|
||||
|
@ -49,44 +45,58 @@ add_task(async function test_telemetry_background() {
|
|||
let extension1 = ExtensionTestUtils.loadExtension(extInfo);
|
||||
let extension2 = ExtensionTestUtils.loadExtension(extInfo);
|
||||
|
||||
// Initialize and clear histograms.
|
||||
let histograms = {};
|
||||
clearHistograms();
|
||||
|
||||
let process = IS_OOP ? "extension" : "parent";
|
||||
let snapshots = getSnapshots(process);
|
||||
for (let id of HISTOGRAM_IDS) {
|
||||
histograms[id] = Services.telemetry.getHistogramById(id);
|
||||
histograms[id].clear();
|
||||
equal(arraySum(histograms[id].snapshot().counts), 0,
|
||||
`No data recorded for histogram: ${id}.`);
|
||||
ok(!(id in snapshots), `No data recorded for histogram: ${id}.`);
|
||||
}
|
||||
|
||||
await extension1.startup();
|
||||
await extension1.awaitMessage("backgroundDone");
|
||||
for (let id of HISTOGRAM_IDS) {
|
||||
await promiseTelemetryRecorded(id, process, 1);
|
||||
}
|
||||
|
||||
// Telemetry from extension1's background page should be recorded.
|
||||
for (let id in histograms) {
|
||||
equal(arraySum(histograms[id].snapshot().counts), 1,
|
||||
snapshots = getSnapshots(process);
|
||||
for (let id of HISTOGRAM_IDS) {
|
||||
equal(arraySum(snapshots[id].counts), 1,
|
||||
`Data recorded for histogram: ${id}.`);
|
||||
}
|
||||
|
||||
await extension2.startup();
|
||||
await extension2.awaitMessage("backgroundDone");
|
||||
for (let id of HISTOGRAM_IDS) {
|
||||
await promiseTelemetryRecorded(id, process, 2);
|
||||
}
|
||||
|
||||
// Telemetry from extension2's background page should be recorded.
|
||||
for (let id in histograms) {
|
||||
equal(arraySum(histograms[id].snapshot().counts), 2,
|
||||
snapshots = getSnapshots(process);
|
||||
for (let id of HISTOGRAM_IDS) {
|
||||
equal(arraySum(snapshots[id].counts), 2,
|
||||
`Additional data recorded for histogram: ${id}.`);
|
||||
}
|
||||
|
||||
await extension2.unload();
|
||||
|
||||
// Run a content script.
|
||||
process = IS_OOP ? "content" : "parent";
|
||||
let expectedCount = IS_OOP ? 1 : 3;
|
||||
let contentScriptPromise = extension1.awaitMessage("contentDone");
|
||||
let contentPage = await ExtensionTestUtils.loadContentPage(`${BASE_URL}/file_sample.html`);
|
||||
await contentScriptPromise;
|
||||
await contentPage.close();
|
||||
|
||||
for (let id of HISTOGRAM_IDS) {
|
||||
await promiseTelemetryRecorded(id, process, expectedCount);
|
||||
}
|
||||
|
||||
// Telemetry from extension1's content script should be recorded.
|
||||
for (let id in histograms) {
|
||||
equal(arraySum(histograms[id].snapshot().counts), 3,
|
||||
snapshots = getSnapshots(process);
|
||||
for (let id of HISTOGRAM_IDS) {
|
||||
equal(arraySum(snapshots[id].counts), expectedCount,
|
||||
`Data recorded in content script for histogram: ${id}.`);
|
||||
}
|
||||
|
||||
|
|
|
@ -13,6 +13,7 @@ skip-if = os == "android" # Android does not use Places for history.
|
|||
[test_ext_background_telemetry.js]
|
||||
[test_ext_background_window_properties.js]
|
||||
skip-if = os == "android"
|
||||
[test_ext_browserSettings.js]
|
||||
[test_ext_contextual_identities.js]
|
||||
skip-if = os == "android" # Containers are not exposed to android.
|
||||
[test_ext_debugging_utils.js]
|
||||
|
@ -27,6 +28,8 @@ skip-if = os == "android"
|
|||
[test_ext_extension.js]
|
||||
[test_ext_extensionPreferencesManager.js]
|
||||
[test_ext_extensionSettingsStore.js]
|
||||
[test_ext_extension_content_telemetry.js]
|
||||
skip-if = os == "android" # checking for telemetry needs to be updated: 1384923
|
||||
[test_ext_extension_startup_telemetry.js]
|
||||
[test_ext_idle.js]
|
||||
[test_ext_legacy_extension_context.js]
|
||||
|
@ -56,6 +59,8 @@ head = head.js head_sync.js
|
|||
skip-if = os == "android"
|
||||
[test_ext_storage_sync_crypto.js]
|
||||
skip-if = os == "android"
|
||||
[test_ext_storage_telemetry.js]
|
||||
skip-if = os == "android" # checking for telemetry needs to be updated: 1384923
|
||||
[test_ext_topSites.js]
|
||||
skip-if = os == "android"
|
||||
[test_native_messaging.js]
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
[DEFAULT]
|
||||
head = head.js head_remote.js head_e10s.js
|
||||
head = head.js head_remote.js head_e10s.js head_telemetry.js
|
||||
tail =
|
||||
firefox-appdir = browser
|
||||
skip-if = appname == "thunderbird" || os == "android"
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
[DEFAULT]
|
||||
head = head.js
|
||||
head = head.js head_telemetry.js
|
||||
firefox-appdir = browser
|
||||
skip-if = appname == "thunderbird"
|
||||
dupe-manifest =
|
||||
|
|
|
@ -546,10 +546,10 @@ this.History = Object.freeze({
|
|||
: "url_hash = hash(:val) AND url = :val "
|
||||
|
||||
return PlacesUtils.promiseDBConnection().then(async db => {
|
||||
let rows = await db.execute(`SELECT 1 FROM moz_places
|
||||
WHERE ${sqlFragment}
|
||||
AND last_visit_date NOTNULL`,
|
||||
{ val: isGuid ? guidOrURI : guidOrURI.href });
|
||||
let rows = await db.executeCached(`SELECT 1 FROM moz_places
|
||||
WHERE ${sqlFragment}
|
||||
AND last_visit_date NOTNULL`,
|
||||
{ val: isGuid ? guidOrURI : guidOrURI.href });
|
||||
return !!rows.length;
|
||||
});
|
||||
},
|
||||
|
|
|
@ -398,6 +398,21 @@ preferences:
|
|||
release_channel_collection: opt-out
|
||||
record_in_processes:
|
||||
- main
|
||||
search_query:
|
||||
bug_numbers:
|
||||
- 1359306
|
||||
description: >-
|
||||
Each key is a search query string when user performs a search action within
|
||||
about:preferences, and each value is the number of times that key is recorded.
|
||||
The telemetry data will be recorded if there is a successful search result highlighted.
|
||||
expires: "62"
|
||||
kind: uint
|
||||
keyed: true
|
||||
notification_emails:
|
||||
- chsiang@mozilla.com
|
||||
release_channel_collection: opt-in
|
||||
record_in_processes:
|
||||
- main
|
||||
|
||||
# The following section contains WebRTC nICEr scalars
|
||||
# For more info on ICE, see https://tools.ietf.org/html/rfc5245
|
||||
|
|
|
@ -76,6 +76,8 @@ XPCOMUtils.defineLazyModuleGetter(this, "TelemetryReportingPolicy",
|
|||
"resource://gre/modules/TelemetryReportingPolicy.jsm");
|
||||
XPCOMUtils.defineLazyModuleGetter(this, "TelemetryModules",
|
||||
"resource://gre/modules/TelemetryModules.jsm");
|
||||
XPCOMUtils.defineLazyModuleGetter(this, "UpdatePing",
|
||||
"resource://gre/modules/UpdatePing.jsm");
|
||||
XPCOMUtils.defineLazyModuleGetter(this, "TelemetryHealthPing",
|
||||
"resource://gre/modules/TelemetryHealthPing.jsm");
|
||||
|
||||
|
@ -697,6 +699,10 @@ var Impl = {
|
|||
// lead to some stale client ids.
|
||||
this._clientID = ClientID.getCachedClientID();
|
||||
|
||||
// Init the update ping telemetry as early as possible. This won't have
|
||||
// an impact on startup.
|
||||
UpdatePing.earlyInit();
|
||||
|
||||
// Delay full telemetry initialization to give the browser time to
|
||||
// run various late initializers. Otherwise our gathered memory
|
||||
// footprint and other numbers would be too optimistic.
|
||||
|
@ -783,6 +789,8 @@ var Impl = {
|
|||
await this._delayedNewPingTask.finalize();
|
||||
}
|
||||
|
||||
UpdatePing.shutdown();
|
||||
|
||||
// Stop the datachoices infobar display.
|
||||
TelemetryReportingPolicy.shutdown();
|
||||
TelemetryEnvironment.shutdown();
|
||||
|
|
|
@ -37,6 +37,7 @@ this.TelemetryUtils = {
|
|||
ShutdownPingSenderFirstSession: "toolkit.telemetry.shutdownPingSender.enabledFirstSession",
|
||||
TelemetryEnabled: "toolkit.telemetry.enabled",
|
||||
Unified: "toolkit.telemetry.unified",
|
||||
UpdatePing: "toolkit.telemetry.updatePing.enabled",
|
||||
NewProfilePingEnabled: "toolkit.telemetry.newProfilePing.enabled",
|
||||
NewProfilePingDelay: "toolkit.telemetry.newProfilePing.delay",
|
||||
PreviousBuildID: "toolkit.telemetry.previousBuildID",
|
||||
|
|
|
@ -0,0 +1,104 @@
|
|||
/* -*- js-indent-level: 2; indent-tabs-mode: nil -*- */
|
||||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
|
||||
|
||||
"use strict";
|
||||
|
||||
const {classes: Cc, interfaces: Ci, utils: Cu} = Components;
|
||||
|
||||
Cu.import("resource://gre/modules/Log.jsm", this);
|
||||
Cu.import("resource://gre/modules/Preferences.jsm", this);
|
||||
Cu.import("resource://gre/modules/Services.jsm", this);
|
||||
Cu.import("resource://gre/modules/TelemetryUtils.jsm", this);
|
||||
Cu.import("resource://gre/modules/XPCOMUtils.jsm", this);
|
||||
|
||||
XPCOMUtils.defineLazyModuleGetter(this, "TelemetryController",
|
||||
"resource://gre/modules/TelemetryController.jsm");
|
||||
|
||||
const LOGGER_NAME = "Toolkit.Telemetry";
|
||||
const PING_TYPE = "update";
|
||||
const UPDATE_DOWNLOADED_TOPIC = "update-downloaded";
|
||||
|
||||
this.EXPORTED_SYMBOLS = ["UpdatePing"];
|
||||
|
||||
/**
|
||||
* This module is responsible for listening to all the relevant update
|
||||
* signals, gathering the needed information and assembling the "update"
|
||||
* ping.
|
||||
*/
|
||||
this.UpdatePing = {
|
||||
earlyInit() {
|
||||
this._log = Log.repository.getLoggerWithMessagePrefix(LOGGER_NAME, "UpdatePing::");
|
||||
this._enabled = Preferences.get(TelemetryUtils.Preferences.UpdatePing, false);
|
||||
|
||||
this._log.trace("init - enabled: " + this._enabled);
|
||||
|
||||
if (!this._enabled) {
|
||||
return;
|
||||
}
|
||||
|
||||
Services.obs.addObserver(this, UPDATE_DOWNLOADED_TOPIC);
|
||||
},
|
||||
|
||||
/**
|
||||
* Generate an "update" ping with reason "ready" and dispatch it
|
||||
* to the Telemetry system.
|
||||
*
|
||||
* @param {String} aUpdateState The state of the downloaded patch. See
|
||||
* nsIUpdateService.idl for a list of possible values.
|
||||
*/
|
||||
_handleUpdateReady(aUpdateState) {
|
||||
const ALLOWED_STATES = [
|
||||
"applied", "applied-service", "pending", "pending-service", "pending-elevate"
|
||||
];
|
||||
if (!ALLOWED_STATES.includes(aUpdateState)) {
|
||||
this._log.trace("Unexpected update state: " + aUpdateState);
|
||||
return;
|
||||
}
|
||||
|
||||
// Get the information about the update we're going to apply from the
|
||||
// update manager.
|
||||
let updateManager =
|
||||
Cc["@mozilla.org/updates/update-manager;1"].getService(Ci.nsIUpdateManager);
|
||||
if (!updateManager || !updateManager.activeUpdate) {
|
||||
this._log.trace("Cannot get the update manager or no update is currently active.");
|
||||
return;
|
||||
}
|
||||
|
||||
let update = updateManager.activeUpdate;
|
||||
|
||||
const payload = {
|
||||
reason: "ready",
|
||||
targetChannel: update.channel,
|
||||
targetVersion: update.appVersion,
|
||||
targetBuildId: update.buildID,
|
||||
};
|
||||
|
||||
const options = {
|
||||
addClientId: true,
|
||||
addEnvironment: true,
|
||||
usePingSender: true,
|
||||
};
|
||||
|
||||
TelemetryController.submitExternalPing(PING_TYPE, payload, options)
|
||||
.catch(e => this._log.error("_handleUpdateReady - failed to submit update ping", e));
|
||||
},
|
||||
|
||||
/**
|
||||
* The notifications handler.
|
||||
*/
|
||||
observe(aSubject, aTopic, aData) {
|
||||
this._log.trace("observe - aTopic: " + aTopic);
|
||||
if (aTopic == UPDATE_DOWNLOADED_TOPIC) {
|
||||
this._handleUpdateReady(aData);
|
||||
}
|
||||
},
|
||||
|
||||
shutdown() {
|
||||
if (!this._enabled) {
|
||||
return;
|
||||
}
|
||||
Services.obs.removeObserver(this, UPDATE_DOWNLOADED_TOPIC);
|
||||
},
|
||||
};
|
|
@ -27,6 +27,6 @@ Important examples are:
|
|||
* :doc:`main <../data/main-ping>` - contains the information collected by Telemetry (Histograms, hang stacks, ...)
|
||||
* :doc:`saved-session <../data/main-ping>` - has the same format as a main ping, but it contains the *"classic"* Telemetry payload with measurements covering the whole browser session. This is only a separate type to make storage of saved-session easier server-side. This is temporary and will be removed soon.
|
||||
* :doc:`crash <../data/crash-ping>` - a ping that is captured and sent after Firefox crashes.
|
||||
* :doc:`new-profile <../data/new-profile-ping>` - sent on the first run of a new profile
|
||||
* ``upgrade`` - *planned* - sent right after an upgrade
|
||||
* :doc:`deletion <../data/deletion-ping>` - sent when FHR upload is disabled, requesting deletion of the data associated with this user
|
||||
* :doc:`new-profile <../data/new-profile-ping>` - sent on the first run of a new profile.
|
||||
* :doc:`update <../data/update-ping>` - sent right after an update is downloaded.
|
||||
* :doc:`deletion <../data/deletion-ping>` - sent when FHR upload is disabled, requesting deletion of the data associated with this user.
|
||||
|
|
|
@ -0,0 +1,52 @@
|
|||
|
||||
"update" ping
|
||||
==================
|
||||
|
||||
This opt-out ping is sent from Firefox Desktop when a browser update is ready to be applied. There is a
|
||||
plan to send this ping after an update is successfully applied and the work will happen in `bug 1380256 <https://bugzilla.mozilla.org/show_bug.cgi?id=1380256>`_.
|
||||
|
||||
Structure:
|
||||
|
||||
.. code-block:: js
|
||||
|
||||
{
|
||||
type: "update",
|
||||
... common ping data
|
||||
clientId: <UUID>,
|
||||
environment: { ... },
|
||||
payload: {
|
||||
reason: <string>, // "ready"
|
||||
targetChannel: <string>, // "nightly"
|
||||
targetVersion: <string>, // "56.01a"
|
||||
targetBuildId: <string>, // "20080811053724"
|
||||
}
|
||||
}
|
||||
|
||||
payload.reason
|
||||
--------------
|
||||
This field only supports the value ``ready``, meaning that the ping was generated after an update was downloaded
|
||||
and marked as ready to be processed. For *non-staged* updates this happens as soon as the download
|
||||
finishes and is verified while for *staged* updates this happens before the staging step is started.
|
||||
|
||||
payload.targetChannel
|
||||
-----------------------
|
||||
The Firefox channel the update was fetched from (only valid for pings with reason "ready").
|
||||
|
||||
payload.targetVersion
|
||||
-----------------------
|
||||
The Firefox version the browser is updating to. Follows the same format a application.version (only valid for pings with reason "ready").
|
||||
|
||||
payload.targetBuildId
|
||||
-----------------------
|
||||
The Firefox build id the browser is updating to. Follows the same format a application.buildId (only valid for pings with reason "ready").
|
||||
|
||||
Expected behaviours
|
||||
-------------------
|
||||
The following is a list of conditions and expected behaviours for the ``update`` ping:
|
||||
|
||||
- **The ping is generated once every time an update is downloaded, after it was verified:**
|
||||
|
||||
- *for users who saw the privacy policy*, the ``update`` ping is sent immediately;
|
||||
- *for users who did not see the privacy policy*, the ``update`` ping is saved to disk and after the policy is displayed.
|
||||
- **If the download of the update retries or other fallback occur**: the ``update`` ping will not be generated
|
||||
multiple times, but only one time once the download is complete and verified.
|
|
@ -66,6 +66,10 @@ Preferences
|
|||
|
||||
Controls the delay after which the :doc:`../data/new-profile` is sent on new profiles.
|
||||
|
||||
``toolkit.telemetry.updatePing.enabled``
|
||||
|
||||
Enable the :doc:`../data/update-ping` on browser updates.
|
||||
|
||||
Data-choices notification
|
||||
-------------------------
|
||||
|
||||
|
|
|
@ -97,6 +97,7 @@ EXTRA_JS_MODULES += [
|
|||
'TelemetryUtils.jsm',
|
||||
'ThirdPartyCookieProbe.jsm',
|
||||
'UITelemetry.jsm',
|
||||
'UpdatePing.jsm',
|
||||
]
|
||||
|
||||
TESTING_JS_MODULES += [
|
||||
|
|
|
@ -5,6 +5,7 @@ support-files =
|
|||
downloadPage.html
|
||||
testConstants.js
|
||||
|
||||
[browser_TelemetryUpdatePing.js]
|
||||
[browser_updatesBackgroundWindow.js]
|
||||
[browser_updatesBackgroundWindowFailures.js]
|
||||
[browser_updatesBasicPrompt.js]
|
||||
|
|
|
@ -0,0 +1,72 @@
|
|||
/* Any copyright is dedicated to the Public Domain.
|
||||
http://creativecommons.org/publicdomain/zero/1.0/
|
||||
*/
|
||||
|
||||
Cu.import("resource://testing-common/TelemetryArchiveTesting.jsm", this);
|
||||
|
||||
/**
|
||||
* Please note that this is really a Telemetry test, not an
|
||||
* "update UI" test like the rest of the tests in this directory.
|
||||
* This test does not live in toolkit/components/telemetry/tests to prevent
|
||||
* duplicating the code for all the test dependencies. Unfortunately, due
|
||||
* to a limitation in the build system, we were not able to simply reference
|
||||
* the dependencies as "support-files" in the test manifest.
|
||||
*/
|
||||
add_task(async function testUpdatePingReady() {
|
||||
SpecialPowers.pushPrefEnv({set: [
|
||||
[PREF_APP_UPDATE_STAGING_ENABLED, false],
|
||||
[PREF_APP_UPDATE_AUTO, false]
|
||||
]});
|
||||
|
||||
let updateParams = "promptWaitTime=0";
|
||||
|
||||
let archiveChecker = new TelemetryArchiveTesting.Checker();
|
||||
await archiveChecker.promiseInit();
|
||||
|
||||
// Trigger an "update" ping by downloading and applying an update.
|
||||
await runUpdateTest(updateParams, 1, [
|
||||
{
|
||||
notificationId: "update-available",
|
||||
button: "button",
|
||||
beforeClick() {
|
||||
checkWhatsNewLink("update-available-whats-new");
|
||||
}
|
||||
},
|
||||
{
|
||||
notificationId: "update-restart",
|
||||
button: "secondarybutton",
|
||||
cleanup() {
|
||||
AppMenuNotifications.removeNotification(/.*/);
|
||||
}
|
||||
},
|
||||
]);
|
||||
|
||||
// We cannot control when the ping will be generated/archived after we trigger
|
||||
// an update, so let's make sure to have one before moving on with validation.
|
||||
let updatePing;
|
||||
await BrowserTestUtils.waitForCondition(async function() {
|
||||
// Check that the ping made it into the Telemetry archive.
|
||||
// The test data is defined in ../data/sharedUpdateXML.js
|
||||
updatePing = await archiveChecker.promiseFindPing("update", [
|
||||
[["payload", "reason"], "ready"],
|
||||
[["payload", "targetBuildId"], "20080811053724"]
|
||||
]);
|
||||
return !!updatePing;
|
||||
}, "Make sure the ping is generated before trying to validate it.", 500, 100);
|
||||
|
||||
ok(updatePing, "The 'update' ping must be correctly sent.");
|
||||
|
||||
// We don't know the exact value for the other fields, so just check
|
||||
// that they're available.
|
||||
for (let f of ["targetVersion", "targetChannel"]) {
|
||||
ok(f in updatePing.payload,
|
||||
`${f} must be available in the update ping payload.`);
|
||||
ok(typeof(updatePing.payload[f]) == "string",
|
||||
`${f} must have the correct format.`);
|
||||
}
|
||||
|
||||
// Also make sure that the ping contains both a client id and an
|
||||
// environment section.
|
||||
ok("clientId" in updatePing, "The update ping must report a client id.");
|
||||
ok("environment" in updatePing, "The update ping must report the environment.");
|
||||
});
|
|
@ -112,3 +112,6 @@ FINAL_TARGET_FILES += [
|
|||
'TestAUSReadStrings2.ini',
|
||||
'TestAUSReadStrings3.ini',
|
||||
]
|
||||
|
||||
with Files("browser/browser_TelemetryUpdatePing.js"):
|
||||
BUG_COMPONENT = ("Toolkit", "Telemetry")
|
||||
|
|
Загрузка…
Ссылка в новой задаче