зеркало из https://github.com/mozilla/gecko-dev.git
Merge mozilla-central to mozilla-inbound
This commit is contained in:
Коммит
161c0e01f4
|
@ -694,7 +694,7 @@ dependencies = [
|
|||
"log 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"malloc_size_of 0.0.1",
|
||||
"nsstring 0.1.0",
|
||||
"parking_lot 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"parking_lot 0.5.4 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"selectors 0.19.0",
|
||||
"servo_arc 0.1.1",
|
||||
"smallvec 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
|
@ -1255,12 +1255,11 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "parking_lot"
|
||||
version = "0.4.4"
|
||||
version = "0.5.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"owning_ref 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"parking_lot_core 0.2.7 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"thread-id 3.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -1650,7 +1649,7 @@ dependencies = [
|
|||
"num_cpus 1.7.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"ordered-float 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"owning_ref 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"parking_lot 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"parking_lot 0.5.4 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"precomputed-hash 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"rayon 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"regex 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
|
@ -1780,15 +1779,6 @@ dependencies = [
|
|||
"unicode-width 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "thread-id"
|
||||
version = "3.1.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"libc 0.2.39 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "thread_local"
|
||||
version = "0.3.4"
|
||||
|
@ -2304,7 +2294,7 @@ dependencies = [
|
|||
"checksum num_cpus 1.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "514f0d73e64be53ff320680ca671b64fe3fb91da01e1ae2ddc99eb51d453b20d"
|
||||
"checksum ordered-float 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "da12c96037889ae0be29dd2bdd260e5a62a7df24e6466d5a15bb8131c1c200a8"
|
||||
"checksum owning_ref 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "cdf84f41639e037b484f93433aa3897863b561ed65c6e59c7073d7c561710f37"
|
||||
"checksum parking_lot 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)" = "37f364e2ce5efa24c7d0b6646d5bb61145551a0112f107ffd7499f1a3e322fbd"
|
||||
"checksum parking_lot 0.5.4 (registry+https://github.com/rust-lang/crates.io-index)" = "9fd9d732f2de194336fb02fe11f9eed13d9e76f13f4315b4d88a14ca411750cd"
|
||||
"checksum parking_lot_core 0.2.7 (registry+https://github.com/rust-lang/crates.io-index)" = "6c677d78851950b3aec390e681a411f78cc250cba277d4f578758a377f727970"
|
||||
"checksum peeking_take_while 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "19b17cddbe7ec3f8bc800887bab5e717348c95ea2ca0b1bf0837fb964dc67099"
|
||||
"checksum percent-encoding 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "de154f638187706bde41d9b4738748933d64e6b37bdbffc0b47a97d16a6ae356"
|
||||
|
@ -2353,7 +2343,6 @@ dependencies = [
|
|||
"checksum tempdir 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)" = "87974a6f5c1dfb344d733055601650059a3363de2a6104819293baff662132d6"
|
||||
"checksum term_size 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "e2b6b55df3198cc93372e85dd2ed817f0e38ce8cc0f22eb32391bfad9c4bf209"
|
||||
"checksum textwrap 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)" = "c0b59b6b4b44d867f1370ef1bd91bfb262bf07bf0ae65c202ea2fbc16153b693"
|
||||
"checksum thread-id 3.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "8df7875b676fddfadffd96deea3b1124e5ede707d4884248931077518cf1f773"
|
||||
"checksum thread_local 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)" = "1697c4b57aeeb7a536b647165a2825faddffb1d3bad386d507709bd51a90bb14"
|
||||
"checksum thread_profiler 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "cf947d192a9be60ef5131cc7a4648886ba89d712f16700ebbf80c8a69d05d48f"
|
||||
"checksum time 0.1.38 (registry+https://github.com/rust-lang/crates.io-index)" = "d5d788d3aa77bc0ef3e9621256885555368b47bd495c13dd2e7413c89f845520"
|
||||
|
|
|
@ -167,7 +167,7 @@
|
|||
<!-- A broadcaster of a number of attributes suitable for "sync now" UI -
|
||||
A 'syncstatus' attribute is set while actively syncing, and the label
|
||||
attribute which changes from "sync now" to "syncing" etc. -->
|
||||
<broadcaster id="sync-status"/>
|
||||
<broadcaster id="sync-status" onmouseover="gSync.refreshSyncButtonsTooltip();"/>
|
||||
<!-- broadcasters of the "hidden" attribute to reflect setup state for
|
||||
menus -->
|
||||
<broadcaster id="sync-setup-state" hidden="true"/>
|
||||
|
|
|
@ -388,7 +388,7 @@ var gSync = {
|
|||
for (let client of clients) {
|
||||
const type = client.formfactor && client.formfactor.includes("tablet") ?
|
||||
"tablet" : client.type;
|
||||
addTargetDevice(client.id, client.name, type, client.serverLastModified * 1000);
|
||||
addTargetDevice(client.id, client.name, type, new Date(client.serverLastModified * 1000));
|
||||
}
|
||||
|
||||
// "Send to All Devices" menu item
|
||||
|
@ -591,6 +591,11 @@ var gSync = {
|
|||
}
|
||||
},
|
||||
|
||||
refreshSyncButtonsTooltip() {
|
||||
const state = UIState.get();
|
||||
this.updateSyncButtonsTooltip(state);
|
||||
},
|
||||
|
||||
/* Update the tooltip for the sync-status broadcaster (which will update the
|
||||
Sync Toolbar button and the Sync spinner in the FxA hamburger area.)
|
||||
If Sync is configured, the tooltip is when the last sync occurred,
|
||||
|
@ -628,32 +633,17 @@ var gSync = {
|
|||
}
|
||||
},
|
||||
|
||||
get withinLastWeekFormat() {
|
||||
delete this.withinLastWeekFormat;
|
||||
return this.withinLastWeekFormat = new Intl.DateTimeFormat(undefined,
|
||||
{weekday: "long", hour: "numeric", minute: "numeric"});
|
||||
},
|
||||
|
||||
get oneWeekOrOlderFormat() {
|
||||
delete this.oneWeekOrOlderFormat;
|
||||
return this.oneWeekOrOlderFormat = new Intl.DateTimeFormat(undefined,
|
||||
{month: "long", day: "numeric"});
|
||||
get relativeTimeFormat() {
|
||||
delete this.relativeTimeFormat;
|
||||
return this.relativeTimeFormat = new Services.intl.RelativeTimeFormat(undefined, {style: "short"});
|
||||
},
|
||||
|
||||
formatLastSyncDate(date) {
|
||||
let sixDaysAgo = (() => {
|
||||
let tempDate = new Date();
|
||||
tempDate.setDate(tempDate.getDate() - 6);
|
||||
tempDate.setHours(0, 0, 0, 0);
|
||||
return tempDate;
|
||||
})();
|
||||
|
||||
// It may be confusing for the user to see "Last Sync: Monday" when the last
|
||||
// sync was indeed a Monday, but 3 weeks ago.
|
||||
let dateFormat = date < sixDaysAgo ? this.oneWeekOrOlderFormat : this.withinLastWeekFormat;
|
||||
|
||||
let lastSyncDateString = dateFormat.format(date);
|
||||
return this.syncStrings.formatStringFromName("lastSync2.label", [lastSyncDateString], 1);
|
||||
if (!date) { // Date can be null before the first sync!
|
||||
return null;
|
||||
}
|
||||
const relativeDateStr = this.relativeTimeFormat.formatBestUnit(date);
|
||||
return this.syncStrings.formatStringFromName("lastSync2.label", [relativeDateStr], 1);
|
||||
},
|
||||
|
||||
onClientsSynced() {
|
||||
|
|
|
@ -22,6 +22,7 @@ add_task(async function test_signedin_no_badge() {
|
|||
|
||||
UIState.get = () => ({
|
||||
status: UIState.STATUS_SIGNED_IN,
|
||||
lastSync: new Date(),
|
||||
email: "foo@bar.com"
|
||||
});
|
||||
Services.obs.notifyObservers(null, UIState.ON_UPDATE);
|
||||
|
|
|
@ -134,21 +134,6 @@ add_task(async function test_ui_state_loginFailed() {
|
|||
checkMenuBarItem("sync-reauthitem");
|
||||
});
|
||||
|
||||
add_task(async function test_FormatLastSyncDateNow() {
|
||||
let now = new Date();
|
||||
let nowString = gSync.formatLastSyncDate(now);
|
||||
is(nowString, "Last sync: " + now.toLocaleDateString(undefined, {weekday: "long", hour: "numeric", minute: "numeric"}),
|
||||
"The date is correctly formatted");
|
||||
});
|
||||
|
||||
add_task(async function test_FormatLastSyncDateMonthAgo() {
|
||||
let monthAgo = new Date();
|
||||
monthAgo.setMonth(monthAgo.getMonth() - 1);
|
||||
let monthAgoString = gSync.formatLastSyncDate(monthAgo);
|
||||
is(monthAgoString, "Last sync: " + monthAgo.toLocaleDateString(undefined, {month: "long", day: "numeric"}),
|
||||
"The date is correctly formatted");
|
||||
});
|
||||
|
||||
function checkPanelUIStatusBar({label, tooltip, fxastatus, avatarURL, syncing, syncNowTooltip}) {
|
||||
let labelNode = document.getElementById("appMenu-fxa-label");
|
||||
let tooltipNode = document.getElementById("appMenu-fxa-status");
|
||||
|
|
|
@ -328,7 +328,7 @@ add_task(async function sendToDevice_syncNotReady_configured() {
|
|||
clientId: client.id,
|
||||
label: client.name,
|
||||
clientType: client.type,
|
||||
tooltiptext: gSync.formatLastSyncDate(lastModifiedFixture * 1000)
|
||||
tooltiptext: gSync.formatLastSyncDate(new Date(lastModifiedFixture * 1000))
|
||||
},
|
||||
});
|
||||
}
|
||||
|
|
|
@ -565,26 +565,7 @@ if (Services.prefs.getBoolPref("identity.fxaccounts.enabled")) {
|
|||
},
|
||||
TABS_PER_PAGE: 25,
|
||||
NEXT_PAGE_MIN_TABS: 5, // Minimum number of tabs displayed when we click "Show All"
|
||||
onCreated(aNode) {
|
||||
this._initialize(aNode);
|
||||
},
|
||||
_initialize(aNode) {
|
||||
if (this._initialized) {
|
||||
return;
|
||||
}
|
||||
// Add an observer to the button so we get the animation during sync.
|
||||
// (Note the observer sets many attributes, including label and
|
||||
// tooltiptext, but we only want the 'syncstatus' attribute for the
|
||||
// animation)
|
||||
let doc = aNode.ownerDocument;
|
||||
let obnode = doc.createElementNS(kNSXUL, "observes");
|
||||
obnode.setAttribute("element", "sync-status");
|
||||
obnode.setAttribute("attribute", "syncstatus");
|
||||
aNode.appendChild(obnode);
|
||||
this._initialized = true;
|
||||
},
|
||||
onViewShowing(aEvent) {
|
||||
this._initialize(aEvent.target);
|
||||
let doc = aEvent.target.ownerDocument;
|
||||
this._tabsList = doc.getElementById("PanelUI-remotetabs-tabslist");
|
||||
Services.obs.addObserver(this, SyncedTabs.TOPIC_TABS_CHANGED);
|
||||
|
|
|
@ -199,6 +199,7 @@
|
|||
closemenu="none">
|
||||
<observes element="sync-status" attribute="syncstatus"/>
|
||||
<observes element="sync-status" attribute="tooltiptext"/>
|
||||
<observes element="sync-status" attribute="onmouseover"/>
|
||||
</toolbarbutton>
|
||||
</toolbaritem>
|
||||
<toolbarseparator class="sync-ui-item"/>
|
||||
|
|
|
@ -70,6 +70,7 @@ function mockFunctions() {
|
|||
// mock UIState.get()
|
||||
UIState.get = () => ({
|
||||
status: UIState.STATUS_SIGNED_IN,
|
||||
lastSync: new Date(),
|
||||
email: "user@mozilla.com"
|
||||
});
|
||||
|
||||
|
|
|
@ -159,7 +159,8 @@ add_task(async function() {
|
|||
|
||||
// Test the Connect Another Device button
|
||||
add_task(async function() {
|
||||
gSync.updateAllUI({ status: UIState.STATUS_SIGNED_IN, email: "foo@bar.com" });
|
||||
gSync.updateAllUI({ status: UIState.STATUS_SIGNED_IN, email: "foo@bar.com",
|
||||
lastSync: new Date() });
|
||||
|
||||
let button = document.getElementById("PanelUI-remotetabs-connect-device-button");
|
||||
ok(button, "found the button");
|
||||
|
@ -178,7 +179,8 @@ add_task(async function() {
|
|||
|
||||
// Test the "Sync Now" button
|
||||
add_task(async function() {
|
||||
gSync.updateAllUI({ status: UIState.STATUS_SIGNED_IN, email: "foo@bar.com" });
|
||||
gSync.updateAllUI({ status: UIState.STATUS_SIGNED_IN, email: "foo@bar.com",
|
||||
lastSync: new Date() });
|
||||
|
||||
await document.getElementById("nav-bar").overflowable.show();
|
||||
let tabsUpdatedPromise = promiseObserverNotified("synced-tabs-menu:test:tabs-updated");
|
||||
|
@ -341,7 +343,8 @@ add_task(async function() {
|
|||
]);
|
||||
};
|
||||
|
||||
gSync.updateAllUI({ status: UIState.STATUS_SIGNED_IN, email: "foo@bar.com" });
|
||||
gSync.updateAllUI({ status: UIState.STATUS_SIGNED_IN, lastSync: new Date(),
|
||||
email: "foo@bar.com" });
|
||||
|
||||
await document.getElementById("nav-bar").overflowable.show();
|
||||
let tabsUpdatedPromise = promiseObserverNotified("synced-tabs-menu:test:tabs-updated");
|
||||
|
|
|
@ -266,12 +266,17 @@ var Policies = {
|
|||
|
||||
"EnableTrackingProtection": {
|
||||
onBeforeUIStartup(manager, param) {
|
||||
if (param.Locked) {
|
||||
setAndLockPref("privacy.trackingprotection.enabled", param.Value);
|
||||
setAndLockPref("privacy.trackingprotection.pbmode.enabled", param.Value);
|
||||
if (param.Value) {
|
||||
if (param.Locked) {
|
||||
setAndLockPref("privacy.trackingprotection.enabled", true);
|
||||
setAndLockPref("privacy.trackingprotection.pbmode.enabled", true);
|
||||
} else {
|
||||
setDefaultPref("privacy.trackingprotection.enabled", true);
|
||||
setDefaultPref("privacy.trackingprotection.pbmode.enabled", true);
|
||||
}
|
||||
} else {
|
||||
setDefaultPref("privacy.trackingprotection.enabled", param.Value);
|
||||
setDefaultPref("privacy.trackingprotection.pbmode.enabled", param.Value);
|
||||
setAndLockPref("privacy.trackingprotection.enabled", false);
|
||||
setAndLockPref("privacy.trackingprotection.pbmode.enabled", false);
|
||||
}
|
||||
}
|
||||
},
|
||||
|
|
|
@ -131,11 +131,20 @@ TabListView.prototype = {
|
|||
});
|
||||
},
|
||||
|
||||
_updateLastSyncTitle(lastModified, itemNode) {
|
||||
let lastSync = new Date(lastModified);
|
||||
let lastSyncTitle = getChromeWindow(this._window).gSync.formatLastSyncDate(lastSync);
|
||||
itemNode.setAttribute("title", lastSyncTitle);
|
||||
},
|
||||
|
||||
_renderClient(client) {
|
||||
let itemNode = client.tabs.length ?
|
||||
this._createClient(client) :
|
||||
this._createEmptyClient(client);
|
||||
|
||||
itemNode.addEventListener("mouseover", () =>
|
||||
this._updateLastSyncTitle(client.lastModified, itemNode));
|
||||
|
||||
this._updateClient(client, itemNode);
|
||||
|
||||
let tabsList = itemNode.querySelector(".item-tabs-list");
|
||||
|
@ -154,15 +163,15 @@ TabListView.prototype = {
|
|||
return itemNode;
|
||||
},
|
||||
|
||||
_createClient(item) {
|
||||
_createClient() {
|
||||
return this._doc.importNode(this._clientTemplate.content, true).firstElementChild;
|
||||
},
|
||||
|
||||
_createEmptyClient(item) {
|
||||
_createEmptyClient() {
|
||||
return this._doc.importNode(this._emptyClientTemplate.content, true).firstElementChild;
|
||||
},
|
||||
|
||||
_createTab(item) {
|
||||
_createTab() {
|
||||
return this._doc.importNode(this._tabTemplate.content, true).firstElementChild;
|
||||
},
|
||||
|
||||
|
@ -211,9 +220,7 @@ TabListView.prototype = {
|
|||
*/
|
||||
_updateClient(item, itemNode) {
|
||||
itemNode.setAttribute("id", "item-" + item.id);
|
||||
let lastSync = new Date(item.lastModified);
|
||||
let lastSyncTitle = getChromeWindow(this._window).gSync.formatLastSyncDate(lastSync);
|
||||
itemNode.setAttribute("title", lastSyncTitle);
|
||||
this._updateLastSyncTitle(item.lastModified, itemNode);
|
||||
if (item.closed) {
|
||||
itemNode.classList.add("closed");
|
||||
} else {
|
||||
|
|
|
@ -36,7 +36,7 @@ var tests = [
|
|||
await setSignedInUser();
|
||||
let userData = await fxAccounts.getSignedInUser();
|
||||
isnot(userData, null, "Logged in now");
|
||||
gSync.updateAllUI({ status: UIState.STATUS_SIGNED_IN, email: "foo@example.com" });
|
||||
gSync.updateAllUI({ status: UIState.STATUS_SIGNED_IN, lastSync: new Date(), email: "foo@example.com" });
|
||||
await showMenuPromise("appMenu");
|
||||
await showHighlightPromise("accountStatus");
|
||||
let highlight = document.getElementById("UITourHighlightContainer");
|
||||
|
|
|
@ -107,10 +107,8 @@ class AsyncTabSwitcher {
|
|||
// removed from the set upon MozAfterPaint.
|
||||
this.maybeVisibleTabs = new Set([tabbrowser.selectedTab]);
|
||||
|
||||
// This holds onto the set of tabs that we've been asked to warm up.
|
||||
// This is used only for Telemetry and logging, and (in order to not
|
||||
// over-complicate the async tab switcher any further) has nothing to do
|
||||
// with how warmed tabs are loaded and unloaded.
|
||||
// This holds onto the set of tabs that we've been asked to warm up,
|
||||
// and tabs are evicted once they're done loading or are unloaded.
|
||||
this.warmingTabs = new WeakSet();
|
||||
|
||||
this.STATE_UNLOADED = 0;
|
||||
|
@ -842,16 +840,21 @@ class AsyncTabSwitcher {
|
|||
return false;
|
||||
}
|
||||
|
||||
// Similarly, if the tab is already in STATE_LOADING or
|
||||
// STATE_LOADED somehow, there's no point in trying to
|
||||
// warm it up.
|
||||
let state = this.getTabState(tab);
|
||||
if (state === this.STATE_LOADING ||
|
||||
state === this.STATE_LOADED) {
|
||||
return false;
|
||||
return true;
|
||||
}
|
||||
|
||||
shouldWarmTab(tab) {
|
||||
if (this.canWarmTab(tab)) {
|
||||
// Tabs that are already in STATE_LOADING or STATE_LOADED
|
||||
// have no need to be warmed up.
|
||||
let state = this.getTabState(tab);
|
||||
if (state === this.STATE_UNLOADING ||
|
||||
state === this.STATE_UNLOADED) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
return false;
|
||||
}
|
||||
|
||||
unwarmTab(tab) {
|
||||
|
@ -859,7 +862,7 @@ class AsyncTabSwitcher {
|
|||
}
|
||||
|
||||
warmupTab(tab) {
|
||||
if (!this.canWarmTab(tab)) {
|
||||
if (!this.shouldWarmTab(tab)) {
|
||||
return;
|
||||
}
|
||||
|
||||
|
@ -880,15 +883,21 @@ class AsyncTabSwitcher {
|
|||
if (this.tabbrowser.tabWarmingEnabled) {
|
||||
let warmingState = "disqualified";
|
||||
|
||||
if (this.warmingTabs.has(tab)) {
|
||||
if (this.canWarmTab(tab)) {
|
||||
let tabState = this.getTabState(tab);
|
||||
if (tabState == this.STATE_LOADING) {
|
||||
warmingState = "stillLoading";
|
||||
} else if (tabState == this.STATE_LOADED) {
|
||||
warmingState = "loaded";
|
||||
} else if (tabState == this.STATE_UNLOADING ||
|
||||
tabState == this.STATE_UNLOADED) {
|
||||
// At this point, if the tab's browser was being inserted
|
||||
// lazily, we never had a chance to warm it up, and unfortunately
|
||||
// there's no great way to detect that case. Those cases will
|
||||
// end up in the "notWarmed" bucket, along with legitimate cases
|
||||
// where tabs could have been warmed but weren't.
|
||||
warmingState = "notWarmed";
|
||||
}
|
||||
} else if (this.canWarmTab(tab)) {
|
||||
warmingState = "notWarmed";
|
||||
}
|
||||
|
||||
Services.telemetry
|
||||
|
|
|
@ -13,10 +13,6 @@ endif
|
|||
ifeq ($(HOST_OS_ARCH)_$(OS_ARCH),Linux_Darwin)
|
||||
# Use the host compiler instead of the target compiler.
|
||||
CXX := $(HOST_CXX)
|
||||
# expandlibs doesn't know the distinction between host and target toolchains,
|
||||
# and on cross linux/darwin builds, the options to give to the linker for file
|
||||
# lists differ between both, so don't use file lists.
|
||||
EXPAND_MKSHLIB_ARGS :=
|
||||
endif
|
||||
|
||||
# Use the default OS X deployment target to enable using the libc++ headers
|
||||
|
|
|
@ -418,18 +418,6 @@ CREATE_PRECOMPLETE_CMD = $(PYTHON) $(abspath $(MOZILLA_DIR)/config/createprecomp
|
|||
# MDDEPDIR is the subdirectory where dependency files are stored
|
||||
MDDEPDIR := .deps
|
||||
|
||||
EXPAND_LIBS_EXEC = $(PYTHON) $(MOZILLA_DIR)/config/expandlibs_exec.py
|
||||
EXPAND_LIBS_GEN = $(PYTHON) $(MOZILLA_DIR)/config/expandlibs_gen.py
|
||||
EXPAND_AR = $(EXPAND_LIBS_EXEC) --extract -- $(AR)
|
||||
EXPAND_CC = $(EXPAND_LIBS_EXEC) --uselist -- $(CC)
|
||||
EXPAND_CCC = $(EXPAND_LIBS_EXEC) --uselist -- $(CCC)
|
||||
EXPAND_LINK = $(EXPAND_LIBS_EXEC) --uselist -- $(LINKER)
|
||||
EXPAND_MKSHLIB_ARGS = --uselist
|
||||
ifdef SYMBOL_ORDER
|
||||
EXPAND_MKSHLIB_ARGS += --symbol-order $(SYMBOL_ORDER)
|
||||
endif
|
||||
EXPAND_MKSHLIB = $(EXPAND_LIBS_EXEC) $(EXPAND_MKSHLIB_ARGS) -- $(MKSHLIB)
|
||||
|
||||
# $(call CHECK_SYMBOLS,lib,PREFIX,dep_name,test)
|
||||
# Checks that the given `lib` doesn't contain dependency on symbols with a
|
||||
# version starting with `PREFIX`_ and matching the `test`. `dep_name` is only
|
||||
|
@ -486,15 +474,18 @@ endef
|
|||
# this file
|
||||
OBJ_SUFFIX := $(_OBJ_SUFFIX)
|
||||
|
||||
OBJS_VAR_SUFFIX := OBJS
|
||||
|
||||
# PGO builds with GCC build objects with instrumentation in a first pass,
|
||||
# then objects optimized, without instrumentation, in a second pass. If
|
||||
# we overwrite the objects from the first pass with those from the second,
|
||||
# we end up not getting instrumentation data for better optimization on
|
||||
# incremental builds. As a consequence, we use a different object suffix
|
||||
# for the first pass.
|
||||
ifndef NO_PROFILE_GUIDED_OPTIMIZE
|
||||
ifdef MOZ_PROFILE_GENERATE
|
||||
ifdef GNU_CC
|
||||
OBJS_VAR_SUFFIX := PGO_OBJS
|
||||
ifndef NO_PROFILE_GUIDED_OPTIMIZE
|
||||
OBJ_SUFFIX := i_o
|
||||
endif
|
||||
endif
|
||||
|
|
|
@ -1,143 +0,0 @@
|
|||
# This Source Code Form is subject to the terms of the Mozilla Public
|
||||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
'''Expandlibs is a system that allows to replace some libraries with a
|
||||
descriptor file containing some linking information about them.
|
||||
|
||||
The descriptor file format is as follows:
|
||||
---8<-----
|
||||
OBJS = a.o b.o ...
|
||||
LIBS = libfoo.a libbar.a ...
|
||||
--->8-----
|
||||
|
||||
(In the example above, OBJ_SUFFIX is o and LIB_SUFFIX is a).
|
||||
|
||||
Expandlibs also canonicalizes how to pass libraries to the linker, such
|
||||
that only the ${LIB_PREFIX}${ROOT}.${LIB_SUFFIX} form needs to be used:
|
||||
given a list of files, expandlibs will replace items with the form
|
||||
${LIB_PREFIX}${ROOT}.${LIB_SUFFIX} following these rules:
|
||||
|
||||
- If a ${DLL_PREFIX}${ROOT}.${DLL_SUFFIX} or
|
||||
${DLL_PREFIX}${ROOT}.${IMPORT_LIB_SUFFIX} file exists, use that instead
|
||||
- If the ${LIB_PREFIX}${ROOT}.${LIB_SUFFIX} file exists, use it
|
||||
- If a ${LIB_PREFIX}${ROOT}.${LIB_SUFFIX}.${LIB_DESC_SUFFIX} file exists,
|
||||
replace ${LIB_PREFIX}${ROOT}.${LIB_SUFFIX} with the OBJS and LIBS the
|
||||
descriptor contains. And for each of these LIBS, also apply the same
|
||||
rules.
|
||||
'''
|
||||
from __future__ import with_statement
|
||||
import sys, os, errno
|
||||
import expandlibs_config as conf
|
||||
|
||||
def ensureParentDir(file):
|
||||
'''Ensures the directory parent to the given file exists'''
|
||||
dir = os.path.dirname(file)
|
||||
if dir and not os.path.exists(dir):
|
||||
try:
|
||||
os.makedirs(dir)
|
||||
except OSError, error:
|
||||
if error.errno != errno.EEXIST:
|
||||
raise
|
||||
|
||||
def relativize(path):
|
||||
'''Returns a path relative to the current working directory, if it is
|
||||
shorter than the given path'''
|
||||
def splitpath(path):
|
||||
dir, file = os.path.split(path)
|
||||
if os.path.splitdrive(dir)[1] == os.sep:
|
||||
return [file]
|
||||
return splitpath(dir) + [file]
|
||||
|
||||
if not os.path.exists(path):
|
||||
return path
|
||||
curdir = splitpath(os.path.abspath(os.curdir))
|
||||
abspath = splitpath(os.path.abspath(path))
|
||||
while curdir and abspath and curdir[0] == abspath[0]:
|
||||
del curdir[0]
|
||||
del abspath[0]
|
||||
if not curdir and not abspath:
|
||||
return '.'
|
||||
relpath = os.path.join(*[os.pardir for i in curdir] + abspath)
|
||||
if len(path) > len(relpath):
|
||||
return relpath
|
||||
return path
|
||||
|
||||
def isObject(path):
|
||||
'''Returns whether the given path points to an object file, that is,
|
||||
ends with OBJ_SUFFIX or .i_o'''
|
||||
return os.path.splitext(path)[1] in [conf.OBJ_SUFFIX, '.i_o']
|
||||
|
||||
def isDynamicLib(path):
|
||||
'''Returns whether the given path points to a dynamic library, that is,
|
||||
ends with DLL_SUFFIX.'''
|
||||
# On mac, the xul library is named XUL, instead of libxul.dylib. Assume any
|
||||
# file by that name is a dynamic library.
|
||||
return os.path.splitext(path)[1] == conf.DLL_SUFFIX or os.path.basename(path) == 'XUL'
|
||||
|
||||
class LibDescriptor(dict):
|
||||
KEYS = ['OBJS', 'LIBS']
|
||||
|
||||
def __init__(self, content=None):
|
||||
'''Creates an instance of a lib descriptor, initialized with contents
|
||||
from a list of strings when given. This is intended for use with
|
||||
file.readlines()'''
|
||||
if isinstance(content, list) and all([isinstance(item, str) for item in content]):
|
||||
pass
|
||||
elif content is not None:
|
||||
raise TypeError("LibDescriptor() arg 1 must be None or a list of strings")
|
||||
super(LibDescriptor, self).__init__()
|
||||
for key in self.KEYS:
|
||||
self[key] = []
|
||||
if not content:
|
||||
return
|
||||
for key, value in [(s.strip() for s in item.split('=', 2)) for item in content if item.find('=') >= 0]:
|
||||
if key in self.KEYS:
|
||||
self[key] = value.split()
|
||||
|
||||
def __str__(self):
|
||||
'''Serializes the lib descriptor'''
|
||||
return '\n'.join('%s = %s' % (k, ' '.join(self[k])) for k in self.KEYS if len(self[k]))
|
||||
|
||||
class ExpandArgs(list):
|
||||
def __init__(self, args):
|
||||
'''Creates a clone of the |args| list and performs file expansion on
|
||||
each item it contains'''
|
||||
super(ExpandArgs, self).__init__()
|
||||
self._descs = set()
|
||||
for arg in args:
|
||||
self += self._expand(arg)
|
||||
|
||||
def _expand(self, arg):
|
||||
'''Internal function doing the actual work'''
|
||||
(root, ext) = os.path.splitext(arg)
|
||||
if ext != conf.LIB_SUFFIX or not os.path.basename(root).startswith(conf.LIB_PREFIX):
|
||||
return [relativize(arg)]
|
||||
if conf.LIB_PREFIX:
|
||||
dll = root.replace(conf.LIB_PREFIX, conf.DLL_PREFIX, 1) + conf.DLL_SUFFIX
|
||||
else:
|
||||
dll = root + conf.DLL_SUFFIX
|
||||
if os.path.exists(dll):
|
||||
if conf.IMPORT_LIB_SUFFIX:
|
||||
return [relativize(root + conf.IMPORT_LIB_SUFFIX)]
|
||||
else:
|
||||
return [relativize(dll)]
|
||||
return self._expand_desc(arg)
|
||||
|
||||
def _expand_desc(self, arg):
|
||||
'''Internal function taking care of lib descriptor expansion only'''
|
||||
desc = os.path.abspath(arg + conf.LIBS_DESC_SUFFIX)
|
||||
if os.path.exists(desc):
|
||||
if desc in self._descs:
|
||||
return []
|
||||
self._descs.add(desc)
|
||||
with open(desc, 'r') as f:
|
||||
desc = LibDescriptor(f.readlines())
|
||||
objs = [relativize(o) for o in desc['OBJS']]
|
||||
for lib in desc['LIBS']:
|
||||
objs += self._expand(lib)
|
||||
return objs
|
||||
return [relativize(arg)]
|
||||
|
||||
if __name__ == '__main__':
|
||||
print " ".join(ExpandArgs(sys.argv[1:]))
|
|
@ -1,29 +0,0 @@
|
|||
# This Source Code Form is subject to the terms of the Mozilla Public
|
||||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
from buildconfig import substs
|
||||
|
||||
def normalize_suffix(suffix):
|
||||
'''Returns a normalized suffix, i.e. ensures it starts with a dot and
|
||||
doesn't starts or ends with whitespace characters'''
|
||||
value = suffix.strip()
|
||||
if len(value) and not value.startswith('.'):
|
||||
value = '.' + value
|
||||
return value
|
||||
|
||||
# Variables from the build system
|
||||
AR = substs['AR']
|
||||
AR_EXTRACT = substs['AR_EXTRACT'].replace('$(AR)', AR)
|
||||
DLL_PREFIX = substs['DLL_PREFIX']
|
||||
LIB_PREFIX = substs['LIB_PREFIX']
|
||||
RUST_LIB_PREFIX = substs['RUST_LIB_PREFIX']
|
||||
OBJ_SUFFIX = normalize_suffix(substs['OBJ_SUFFIX'])
|
||||
LIB_SUFFIX = normalize_suffix(substs['LIB_SUFFIX'])
|
||||
RUST_LIB_SUFFIX = normalize_suffix(substs['RUST_LIB_SUFFIX'])
|
||||
DLL_SUFFIX = normalize_suffix(substs['DLL_SUFFIX'])
|
||||
IMPORT_LIB_SUFFIX = normalize_suffix(substs['IMPORT_LIB_SUFFIX'])
|
||||
LIBS_DESC_SUFFIX = normalize_suffix(substs['LIBS_DESC_SUFFIX'])
|
||||
EXPAND_LIBS_LIST_STYLE = substs['EXPAND_LIBS_LIST_STYLE']
|
||||
EXPAND_LIBS_ORDER_STYLE = substs['EXPAND_LIBS_ORDER_STYLE']
|
||||
LD_PRINT_ICF_SECTIONS = substs['LD_PRINT_ICF_SECTIONS']
|
|
@ -1,354 +0,0 @@
|
|||
# This Source Code Form is subject to the terms of the Mozilla Public
|
||||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
'''expandlibs-exec.py applies expandlibs rules, and some more (see below) to
|
||||
a given command line, and executes that command line with the expanded
|
||||
arguments.
|
||||
|
||||
With the --extract argument (useful for e.g. $(AR)), it extracts object files
|
||||
from static libraries (or use those listed in library descriptors directly).
|
||||
|
||||
With the --uselist argument (useful for e.g. $(CC)), it replaces all object
|
||||
files with a list file. This can be used to avoid limitations in the length
|
||||
of a command line. The kind of list file format used depends on the
|
||||
EXPAND_LIBS_LIST_STYLE variable: 'list' for MSVC style lists (@file.list)
|
||||
or 'linkerscript' for GNU ld linker scripts.
|
||||
See https://bugzilla.mozilla.org/show_bug.cgi?id=584474#c59 for more details.
|
||||
|
||||
With the --symbol-order argument, followed by a file name, it will add the
|
||||
relevant linker options to change the order in which the linker puts the
|
||||
symbols appear in the resulting binary. Only works for ELF targets.
|
||||
'''
|
||||
from __future__ import with_statement
|
||||
import sys
|
||||
import os
|
||||
from expandlibs import (
|
||||
ExpandArgs,
|
||||
relativize,
|
||||
isDynamicLib,
|
||||
isObject,
|
||||
)
|
||||
import expandlibs_config as conf
|
||||
from optparse import OptionParser
|
||||
import subprocess
|
||||
import tempfile
|
||||
import shutil
|
||||
import subprocess
|
||||
import re
|
||||
from mozbuild.makeutil import Makefile
|
||||
|
||||
# The are the insert points for a GNU ld linker script, assuming a more
|
||||
# or less "standard" default linker script. This is not a dict because
|
||||
# order is important.
|
||||
SECTION_INSERT_BEFORE = [
|
||||
('.text', '.fini'),
|
||||
('.rodata', '.rodata1'),
|
||||
('.data.rel.ro', '.dynamic'),
|
||||
('.data', '.data1'),
|
||||
]
|
||||
|
||||
class ExpandArgsMore(ExpandArgs):
|
||||
''' Meant to be used as 'with ExpandArgsMore(args) as ...: '''
|
||||
def __enter__(self):
|
||||
self.tmp = []
|
||||
return self
|
||||
|
||||
def __exit__(self, type, value, tb):
|
||||
'''Automatically remove temporary files'''
|
||||
for tmp in self.tmp:
|
||||
if os.path.isdir(tmp):
|
||||
shutil.rmtree(tmp, True)
|
||||
else:
|
||||
os.remove(tmp)
|
||||
|
||||
def extract(self):
|
||||
self[0:] = self._extract(self)
|
||||
|
||||
def _extract(self, args):
|
||||
'''When a static library name is found, either extract its contents
|
||||
in a temporary directory or use the information found in the
|
||||
corresponding lib descriptor.
|
||||
'''
|
||||
ar_extract = conf.AR_EXTRACT.split()
|
||||
newlist = []
|
||||
|
||||
def lookup(base, f):
|
||||
for root, dirs, files in os.walk(base):
|
||||
if f in files:
|
||||
return os.path.join(root, f)
|
||||
|
||||
for arg in args:
|
||||
if os.path.splitext(arg)[1] == conf.LIB_SUFFIX:
|
||||
if os.path.exists(arg + conf.LIBS_DESC_SUFFIX):
|
||||
newlist += self._extract(self._expand_desc(arg))
|
||||
continue
|
||||
elif os.path.exists(arg) and (len(ar_extract) or conf.AR == 'lib'):
|
||||
tmp = tempfile.mkdtemp(dir=os.curdir)
|
||||
self.tmp.append(tmp)
|
||||
if conf.AR == 'lib':
|
||||
out = subprocess.check_output([conf.AR, '-NOLOGO', '-LIST', arg])
|
||||
files = out.splitlines()
|
||||
# If lib -list returns a list full of dlls, it's an
|
||||
# import lib.
|
||||
if all(isDynamicLib(f) for f in files):
|
||||
newlist += [arg]
|
||||
continue
|
||||
for f in files:
|
||||
subprocess.call([conf.AR, '-NOLOGO', '-EXTRACT:%s' % f, os.path.abspath(arg)], cwd=tmp)
|
||||
else:
|
||||
subprocess.call(ar_extract + [os.path.abspath(arg)], cwd=tmp)
|
||||
objs = []
|
||||
basedir = os.path.dirname(arg)
|
||||
for root, dirs, files in os.walk(tmp):
|
||||
for f in files:
|
||||
if isObject(f):
|
||||
# If the file extracted from the library also
|
||||
# exists in the directory containing the
|
||||
# library, or one of its subdirectories, use
|
||||
# that instead.
|
||||
maybe_obj = lookup(os.path.join(basedir, os.path.relpath(root, tmp)), f)
|
||||
if maybe_obj:
|
||||
objs.append(relativize(maybe_obj))
|
||||
else:
|
||||
objs.append(relativize(os.path.join(root, f)))
|
||||
newlist += sorted(objs)
|
||||
continue
|
||||
newlist += [arg]
|
||||
return newlist
|
||||
|
||||
def makelist(self):
|
||||
'''Replaces object file names with a temporary list file, using a
|
||||
list format depending on the EXPAND_LIBS_LIST_STYLE variable
|
||||
'''
|
||||
objs = [o for o in self if isObject(o)]
|
||||
if not len(objs): return
|
||||
fd, tmp = tempfile.mkstemp(suffix=".list",dir=os.curdir)
|
||||
if conf.EXPAND_LIBS_LIST_STYLE == "linkerscript":
|
||||
content = ['INPUT("%s")\n' % obj for obj in objs]
|
||||
ref = tmp
|
||||
elif conf.EXPAND_LIBS_LIST_STYLE == "filelist":
|
||||
content = ["%s\n" % obj for obj in objs]
|
||||
ref = "-Wl,-filelist," + tmp
|
||||
elif conf.EXPAND_LIBS_LIST_STYLE == "list":
|
||||
content = ["%s\n" % obj for obj in objs]
|
||||
ref = "@" + tmp
|
||||
else:
|
||||
os.close(fd)
|
||||
os.remove(tmp)
|
||||
return
|
||||
self.tmp.append(tmp)
|
||||
f = os.fdopen(fd, "w")
|
||||
f.writelines(content)
|
||||
f.close()
|
||||
idx = self.index(objs[0])
|
||||
newlist = self[0:idx] + [ref] + [os.path.normpath(item) for item in self[idx:] if item not in objs]
|
||||
self[0:] = newlist
|
||||
|
||||
def _getFoldedSections(self):
|
||||
'''Returns a dict about folded sections.
|
||||
When section A and B are folded into section C, the dict contains:
|
||||
{ 'A': 'C',
|
||||
'B': 'C',
|
||||
'C': ['A', 'B'] }'''
|
||||
if not conf.LD_PRINT_ICF_SECTIONS:
|
||||
return {}
|
||||
|
||||
proc = subprocess.Popen(self + [conf.LD_PRINT_ICF_SECTIONS], stdout = subprocess.PIPE, stderr = subprocess.PIPE)
|
||||
(stdout, stderr) = proc.communicate()
|
||||
result = {}
|
||||
# gold's --print-icf-sections output looks like the following:
|
||||
# ld: ICF folding section '.section' in file 'file.o'into '.section' in file 'file.o'
|
||||
# In terms of words, chances are this will change in the future,
|
||||
# especially considering "into" is misplaced. Splitting on quotes
|
||||
# seems safer.
|
||||
for l in stderr.split('\n'):
|
||||
quoted = l.split("'")
|
||||
if len(quoted) > 5 and quoted[1] != quoted[5]:
|
||||
result[quoted[1]] = [quoted[5]]
|
||||
if quoted[5] in result:
|
||||
result[quoted[5]].append(quoted[1])
|
||||
else:
|
||||
result[quoted[5]] = [quoted[1]]
|
||||
return result
|
||||
|
||||
def _getOrderedSections(self, ordered_symbols):
|
||||
'''Given an ordered list of symbols, returns the corresponding list
|
||||
of sections following the order.'''
|
||||
if not conf.EXPAND_LIBS_ORDER_STYLE in ['linkerscript', 'section-ordering-file']:
|
||||
raise Exception('EXPAND_LIBS_ORDER_STYLE "%s" is not supported' % conf.EXPAND_LIBS_ORDER_STYLE)
|
||||
finder = SectionFinder([arg for arg in self if isObject(arg) or os.path.splitext(arg)[1] == conf.LIB_SUFFIX])
|
||||
folded = self._getFoldedSections()
|
||||
sections = set()
|
||||
ordered_sections = []
|
||||
for symbol in ordered_symbols:
|
||||
symbol_sections = finder.getSections(symbol)
|
||||
all_symbol_sections = []
|
||||
for section in symbol_sections:
|
||||
if section in folded:
|
||||
if isinstance(folded[section], str):
|
||||
section = folded[section]
|
||||
all_symbol_sections.append(section)
|
||||
all_symbol_sections.extend(folded[section])
|
||||
else:
|
||||
all_symbol_sections.append(section)
|
||||
for section in all_symbol_sections:
|
||||
if not section in sections:
|
||||
ordered_sections.append(section)
|
||||
sections.add(section)
|
||||
return ordered_sections
|
||||
|
||||
def orderSymbols(self, order):
|
||||
'''Given a file containing a list of symbols, adds the appropriate
|
||||
argument to make the linker put the symbols in that order.'''
|
||||
with open(order) as file:
|
||||
sections = self._getOrderedSections([l.strip() for l in file.readlines() if l.strip()])
|
||||
split_sections = {}
|
||||
linked_sections = [s[0] for s in SECTION_INSERT_BEFORE]
|
||||
for s in sections:
|
||||
for linked_section in linked_sections:
|
||||
if s.startswith(linked_section):
|
||||
if linked_section in split_sections:
|
||||
split_sections[linked_section].append(s)
|
||||
else:
|
||||
split_sections[linked_section] = [s]
|
||||
break
|
||||
content = []
|
||||
# Order is important
|
||||
linked_sections = [s for s in linked_sections if s in split_sections]
|
||||
|
||||
if conf.EXPAND_LIBS_ORDER_STYLE == 'section-ordering-file':
|
||||
option = '-Wl,--section-ordering-file,%s'
|
||||
content = sections
|
||||
for linked_section in linked_sections:
|
||||
content.extend(split_sections[linked_section])
|
||||
content.append('%s.*' % linked_section)
|
||||
content.append(linked_section)
|
||||
|
||||
elif conf.EXPAND_LIBS_ORDER_STYLE == 'linkerscript':
|
||||
option = '-Wl,-T,%s'
|
||||
section_insert_before = dict(SECTION_INSERT_BEFORE)
|
||||
for linked_section in linked_sections:
|
||||
content.append('SECTIONS {')
|
||||
content.append(' %s : {' % linked_section)
|
||||
content.extend(' *(%s)' % s for s in split_sections[linked_section])
|
||||
content.append(' }')
|
||||
content.append('}')
|
||||
content.append('INSERT BEFORE %s' % section_insert_before[linked_section])
|
||||
else:
|
||||
raise Exception('EXPAND_LIBS_ORDER_STYLE "%s" is not supported' % conf.EXPAND_LIBS_ORDER_STYLE)
|
||||
|
||||
fd, tmp = tempfile.mkstemp(dir=os.curdir)
|
||||
f = os.fdopen(fd, "w")
|
||||
f.write('\n'.join(content)+'\n')
|
||||
f.close()
|
||||
self.tmp.append(tmp)
|
||||
self.append(option % tmp)
|
||||
|
||||
class SectionFinder(object):
|
||||
'''Instances of this class allow to map symbol names to sections in
|
||||
object files.'''
|
||||
|
||||
def __init__(self, objs):
|
||||
'''Creates an instance, given a list of object files.'''
|
||||
if not conf.EXPAND_LIBS_ORDER_STYLE in ['linkerscript', 'section-ordering-file']:
|
||||
raise Exception('EXPAND_LIBS_ORDER_STYLE "%s" is not supported' % conf.EXPAND_LIBS_ORDER_STYLE)
|
||||
self.mapping = {}
|
||||
for obj in objs:
|
||||
if not isObject(obj) and os.path.splitext(obj)[1] != conf.LIB_SUFFIX:
|
||||
raise Exception('%s is not an object nor a static library' % obj)
|
||||
for symbol, section in SectionFinder._getSymbols(obj):
|
||||
sym = SectionFinder._normalize(symbol)
|
||||
if sym in self.mapping:
|
||||
if not section in self.mapping[sym]:
|
||||
self.mapping[sym].append(section)
|
||||
else:
|
||||
self.mapping[sym] = [section]
|
||||
|
||||
def getSections(self, symbol):
|
||||
'''Given a symbol, returns a list of sections containing it or the
|
||||
corresponding thunks. When the given symbol is a thunk, returns the
|
||||
list of sections containing its corresponding normal symbol and the
|
||||
other thunks for that symbol.'''
|
||||
sym = SectionFinder._normalize(symbol)
|
||||
if sym in self.mapping:
|
||||
return self.mapping[sym]
|
||||
return []
|
||||
|
||||
@staticmethod
|
||||
def _normalize(symbol):
|
||||
'''For normal symbols, return the given symbol. For thunks, return
|
||||
the corresponding normal symbol.'''
|
||||
if re.match('^_ZThn[0-9]+_', symbol):
|
||||
return re.sub('^_ZThn[0-9]+_', '_Z', symbol)
|
||||
return symbol
|
||||
|
||||
@staticmethod
|
||||
def _getSymbols(obj):
|
||||
'''Returns a list of (symbol, section) contained in the given object
|
||||
file.'''
|
||||
proc = subprocess.Popen(['objdump', '-t', obj], stdout = subprocess.PIPE, stderr = subprocess.PIPE)
|
||||
(stdout, stderr) = proc.communicate()
|
||||
syms = []
|
||||
for line in stdout.splitlines():
|
||||
# Each line has the following format:
|
||||
# <addr> [lgu!][w ][C ][W ][Ii ][dD ][FfO ] <section>\t<length> <symbol>
|
||||
tmp = line.split(' ',1)
|
||||
# This gives us ["<addr>", "[lgu!][w ][C ][W ][Ii ][dD ][FfO ] <section>\t<length> <symbol>"]
|
||||
# We only need to consider cases where "<section>\t<length> <symbol>" is present,
|
||||
# and where the [FfO] flag is either F (function) or O (object).
|
||||
if len(tmp) > 1 and len(tmp[1]) > 6 and tmp[1][6] in ['O', 'F']:
|
||||
tmp = tmp[1][8:].split()
|
||||
# That gives us ["<section>","<length>", "<symbol>"]
|
||||
syms.append((tmp[-1], tmp[0]))
|
||||
return syms
|
||||
|
||||
def print_command(out, args):
|
||||
print >>out, "Executing: " + " ".join(args)
|
||||
for tmp in [f for f in args.tmp if os.path.isfile(f)]:
|
||||
print >>out, tmp + ":"
|
||||
with open(tmp) as file:
|
||||
print >>out, "".join([" " + l for l in file.readlines()])
|
||||
out.flush()
|
||||
|
||||
def main(args, proc_callback=None):
|
||||
parser = OptionParser()
|
||||
parser.add_option("--extract", action="store_true", dest="extract",
|
||||
help="when a library has no descriptor file, extract it first, when possible")
|
||||
parser.add_option("--uselist", action="store_true", dest="uselist",
|
||||
help="use a list file for objects when executing a command")
|
||||
parser.add_option("--verbose", action="store_true", dest="verbose",
|
||||
help="display executed command and temporary files content")
|
||||
parser.add_option("--symbol-order", dest="symbol_order", metavar="FILE",
|
||||
help="use the given list of symbols to order symbols in the resulting binary when using with a linker")
|
||||
|
||||
(options, args) = parser.parse_args(args)
|
||||
|
||||
with ExpandArgsMore(args) as args:
|
||||
if options.extract:
|
||||
args.extract()
|
||||
if options.symbol_order:
|
||||
args.orderSymbols(options.symbol_order)
|
||||
if options.uselist:
|
||||
args.makelist()
|
||||
|
||||
if options.verbose:
|
||||
print_command(sys.stderr, args)
|
||||
try:
|
||||
proc = subprocess.Popen(args, stdout = subprocess.PIPE, stderr = subprocess.STDOUT)
|
||||
if proc_callback:
|
||||
proc_callback(proc)
|
||||
except Exception, e:
|
||||
print >>sys.stderr, 'error: Launching', args, ':', e
|
||||
raise e
|
||||
(stdout, stderr) = proc.communicate()
|
||||
if proc.returncode and not options.verbose:
|
||||
print_command(sys.stderr, args)
|
||||
sys.stderr.write(stdout)
|
||||
sys.stderr.flush()
|
||||
if proc.returncode:
|
||||
return proc.returncode
|
||||
return 0
|
||||
|
||||
if __name__ == '__main__':
|
||||
exit(main(sys.argv[1:]))
|
|
@ -1,41 +0,0 @@
|
|||
# This Source Code Form is subject to the terms of the Mozilla Public
|
||||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
'''Given a list of object files and library names, prints a library
|
||||
descriptor to standard output'''
|
||||
|
||||
from __future__ import with_statement
|
||||
import sys
|
||||
import os
|
||||
import expandlibs_config as conf
|
||||
from expandlibs import LibDescriptor, isObject, ensureParentDir
|
||||
from optparse import OptionParser
|
||||
|
||||
def generate(args):
|
||||
desc = LibDescriptor()
|
||||
for arg in args:
|
||||
if isObject(arg):
|
||||
if os.path.exists(arg):
|
||||
desc['OBJS'].append(os.path.abspath(arg))
|
||||
else:
|
||||
raise Exception("File not found: %s" % arg)
|
||||
elif os.path.splitext(arg)[1] == conf.LIB_SUFFIX:
|
||||
if os.path.exists(arg) or os.path.exists(arg + conf.LIBS_DESC_SUFFIX):
|
||||
desc['LIBS'].append(os.path.abspath(arg))
|
||||
else:
|
||||
raise Exception("File not found: %s" % arg)
|
||||
return desc
|
||||
|
||||
if __name__ == '__main__':
|
||||
parser = OptionParser()
|
||||
parser.add_option("-o", dest="output", metavar="FILE",
|
||||
help="send output to the given file")
|
||||
|
||||
(options, args) = parser.parse_args()
|
||||
if not options.output:
|
||||
raise Exception("Missing option: -o")
|
||||
|
||||
ensureParentDir(options.output)
|
||||
with open(options.output, 'w') as outfile:
|
||||
print >>outfile, generate(args)
|
|
@ -100,28 +100,12 @@ endif # ENABLE_TESTS
|
|||
|
||||
ifndef LIBRARY
|
||||
ifdef REAL_LIBRARY
|
||||
# Don't build actual static library if a shared library is also built
|
||||
ifdef FORCE_SHARED_LIB
|
||||
# ... except when we really want one
|
||||
ifdef NO_EXPAND_LIBS
|
||||
# Only build actual library if it is requested.
|
||||
LIBRARY := $(REAL_LIBRARY)
|
||||
else
|
||||
LIBRARY := $(REAL_LIBRARY).$(LIBS_DESC_SUFFIX)
|
||||
endif
|
||||
else
|
||||
# Only build actual library if it is installed in DIST/lib
|
||||
ifeq (,$(DIST_INSTALL)$(NO_EXPAND_LIBS))
|
||||
LIBRARY := $(REAL_LIBRARY).$(LIBS_DESC_SUFFIX)
|
||||
else
|
||||
ifdef NO_EXPAND_LIBS
|
||||
LIBRARY := $(REAL_LIBRARY)
|
||||
else
|
||||
LIBRARY := $(REAL_LIBRARY) $(REAL_LIBRARY).$(LIBS_DESC_SUFFIX)
|
||||
endif
|
||||
endif
|
||||
endif
|
||||
endif # REAL_LIBRARY
|
||||
endif # LIBRARY
|
||||
|
||||
ifndef HOST_LIBRARY
|
||||
ifdef HOST_LIBRARY_NAME
|
||||
|
@ -446,9 +430,6 @@ everything::
|
|||
$(MAKE) clean
|
||||
$(MAKE) all
|
||||
|
||||
STATIC_LIB_DEP = $(if $(wildcard $(1).$(LIBS_DESC_SUFFIX)),$(1).$(LIBS_DESC_SUFFIX),$(1))
|
||||
STATIC_LIBS_DEPS := $(foreach l,$(STATIC_LIBS),$(call STATIC_LIB_DEP,$(l)))
|
||||
|
||||
# Dependencies which, if modified, should cause everything to rebuild
|
||||
GLOBAL_DEPS += Makefile $(addprefix $(DEPTH)/config/,$(INCLUDED_AUTOCONF_MK)) $(MOZILLA_DIR)/config/config.mk
|
||||
|
||||
|
@ -462,6 +443,12 @@ host:: $(HOST_LIBRARY) $(HOST_PROGRAM) $(HOST_SIMPLE_PROGRAMS) $(HOST_RUST_PROGR
|
|||
|
||||
target:: $(LIBRARY) $(SHARED_LIBRARY) $(PROGRAM) $(SIMPLE_PROGRAMS) $(RUST_LIBRARY_FILE) $(RUST_PROGRAMS)
|
||||
|
||||
ifndef LIBRARY
|
||||
ifdef OBJS
|
||||
target:: $(OBJS)
|
||||
endif
|
||||
endif
|
||||
|
||||
syms::
|
||||
|
||||
include $(MOZILLA_DIR)/config/makefiles/target_binaries.mk
|
||||
|
@ -545,18 +532,18 @@ alltags:
|
|||
find $(topsrcdir) -name dist -prune -o \( -name '*.[hc]' -o -name '*.cp' -o -name '*.cpp' -o -name '*.idl' \) -print | $(TAG_PROGRAM)
|
||||
|
||||
define EXPAND_CC_OR_CXX
|
||||
$(if $(PROG_IS_C_ONLY_$(1)),$(EXPAND_CC),$(EXPAND_CCC))
|
||||
$(if $(PROG_IS_C_ONLY_$(1)),$(CC),$(CCC))
|
||||
endef
|
||||
|
||||
#
|
||||
# PROGRAM = Foo
|
||||
# creates OBJS, links with LIBS to create Foo
|
||||
#
|
||||
$(PROGRAM): $(PROGOBJS) $(STATIC_LIBS_DEPS) $(EXTRA_DEPS) $(RESFILE) $(GLOBAL_DEPS) $(call mkdir_deps,$(FINAL_TARGET))
|
||||
$(PROGRAM): $(PROGOBJS) $(STATIC_LIBS) $(EXTRA_DEPS) $(RESFILE) $(GLOBAL_DEPS) $(call mkdir_deps,$(FINAL_TARGET))
|
||||
$(REPORT_BUILD)
|
||||
@$(RM) $@.manifest
|
||||
ifeq (_WINNT,$(GNU_CC)_$(OS_ARCH))
|
||||
$(EXPAND_LINK) -NOLOGO -OUT:$@ -PDB:$(LINK_PDBFILE) -IMPLIB:$(basename $(@F)).lib $(WIN32_EXE_LDFLAGS) $(LDFLAGS) $(MOZ_PROGRAM_LDFLAGS) $(PROGOBJS) $(RESFILE) $(STATIC_LIBS) $(SHARED_LIBS) $(OS_LIBS)
|
||||
$(LINKER) -NOLOGO -OUT:$@ -PDB:$(LINK_PDBFILE) -IMPLIB:$(basename $(@F)).lib $(WIN32_EXE_LDFLAGS) $(LDFLAGS) $(MOZ_PROGRAM_LDFLAGS) $($(notdir $@)_$(OBJS_VAR_SUFFIX)) $(RESFILE) $(STATIC_LIBS) $(SHARED_LIBS) $(OS_LIBS)
|
||||
ifdef MSMANIFEST_TOOL
|
||||
@if test -f $@.manifest; then \
|
||||
if test -f '$(srcdir)/$(notdir $@).manifest'; then \
|
||||
|
@ -577,7 +564,7 @@ ifdef MOZ_PROFILE_GENERATE
|
|||
touch -t `date +%Y%m%d%H%M.%S -d 'now+5seconds'` pgo.relink
|
||||
endif
|
||||
else # !WINNT || GNU_CC
|
||||
$(call EXPAND_CC_OR_CXX,$@) -o $@ $(COMPUTED_CXX_LDFLAGS) $(PGO_CFLAGS) $(PROGOBJS) $(RESFILE) $(WIN32_EXE_LDFLAGS) $(LDFLAGS) $(STATIC_LIBS) $(MOZ_PROGRAM_LDFLAGS) $(SHARED_LIBS) $(OS_LIBS)
|
||||
$(call EXPAND_CC_OR_CXX,$@) -o $@ $(COMPUTED_CXX_LDFLAGS) $(PGO_CFLAGS) $($(notdir $@)_$(OBJS_VAR_SUFFIX)) $(RESFILE) $(WIN32_EXE_LDFLAGS) $(LDFLAGS) $(STATIC_LIBS) $(MOZ_PROGRAM_LDFLAGS) $(SHARED_LIBS) $(OS_LIBS)
|
||||
$(call CHECK_BINARY,$@)
|
||||
endif # WINNT && !GNU_CC
|
||||
|
||||
|
@ -591,7 +578,7 @@ endif
|
|||
$(HOST_PROGRAM): $(HOST_PROGOBJS) $(HOST_LIBS) $(HOST_EXTRA_DEPS) $(GLOBAL_DEPS)
|
||||
$(REPORT_BUILD)
|
||||
ifeq (_WINNT,$(GNU_CC)_$(HOST_OS_ARCH))
|
||||
$(EXPAND_LIBS_EXEC) -- $(LINKER) -NOLOGO -OUT:$@ -PDB:$(HOST_PDBFILE) $(HOST_OBJS) $(WIN32_EXE_LDFLAGS) $(HOST_LDFLAGS) $(HOST_LIBS) $(HOST_EXTRA_LIBS)
|
||||
$(LINKER) -NOLOGO -OUT:$@ -PDB:$(HOST_PDBFILE) $(HOST_OBJS) $(WIN32_EXE_LDFLAGS) $(HOST_LDFLAGS) $(HOST_LIBS) $(HOST_EXTRA_LIBS)
|
||||
ifdef MSMANIFEST_TOOL
|
||||
@if test -f $@.manifest; then \
|
||||
if test -f '$(srcdir)/$@.manifest'; then \
|
||||
|
@ -608,9 +595,9 @@ ifdef MSMANIFEST_TOOL
|
|||
endif # MSVC with manifest tool
|
||||
else
|
||||
ifeq ($(HOST_CPP_PROG_LINK),1)
|
||||
$(EXPAND_LIBS_EXEC) -- $(HOST_CXX) -o $@ $(HOST_CXX_LDFLAGS) $(HOST_LDFLAGS) $(HOST_PROGOBJS) $(HOST_LIBS) $(HOST_EXTRA_LIBS)
|
||||
$(HOST_CXX) -o $@ $(HOST_CXX_LDFLAGS) $(HOST_LDFLAGS) $(HOST_PROGOBJS) $(HOST_LIBS) $(HOST_EXTRA_LIBS)
|
||||
else
|
||||
$(EXPAND_LIBS_EXEC) -- $(HOST_CC) -o $@ $(HOST_C_LDFLAGS) $(HOST_LDFLAGS) $(HOST_PROGOBJS) $(HOST_LIBS) $(HOST_EXTRA_LIBS)
|
||||
$(HOST_CC) -o $@ $(HOST_C_LDFLAGS) $(HOST_LDFLAGS) $(HOST_PROGOBJS) $(HOST_LIBS) $(HOST_EXTRA_LIBS)
|
||||
endif # HOST_CPP_PROG_LINK
|
||||
endif
|
||||
ifndef CROSS_COMPILE
|
||||
|
@ -625,10 +612,10 @@ endif
|
|||
# SIMPLE_PROGRAMS = Foo Bar
|
||||
# creates Foo.o Bar.o, links with LIBS to create Foo, Bar.
|
||||
#
|
||||
$(SIMPLE_PROGRAMS): %$(BIN_SUFFIX): %.$(OBJ_SUFFIX) $(STATIC_LIBS_DEPS) $(EXTRA_DEPS) $(GLOBAL_DEPS)
|
||||
$(SIMPLE_PROGRAMS): %$(BIN_SUFFIX): %.$(OBJ_SUFFIX) $(STATIC_LIBS) $(EXTRA_DEPS) $(GLOBAL_DEPS)
|
||||
$(REPORT_BUILD)
|
||||
ifeq (_WINNT,$(GNU_CC)_$(OS_ARCH))
|
||||
$(EXPAND_LINK) -nologo -out:$@ -pdb:$(LINK_PDBFILE) $< $(WIN32_EXE_LDFLAGS) $(LDFLAGS) $(MOZ_PROGRAM_LDFLAGS) $(STATIC_LIBS) $(SHARED_LIBS) $(OS_LIBS)
|
||||
$(LINKER) -nologo -out:$@ -pdb:$(LINK_PDBFILE) $($@_$(OBJS_VAR_SUFFIX)) $(WIN32_EXE_LDFLAGS) $(LDFLAGS) $(MOZ_PROGRAM_LDFLAGS) $(STATIC_LIBS) $(SHARED_LIBS) $(OS_LIBS)
|
||||
ifdef MSMANIFEST_TOOL
|
||||
@if test -f $@.manifest; then \
|
||||
$(MT) -NOLOGO -MANIFEST $@.manifest -OUTPUTRESOURCE:$@\;1; \
|
||||
|
@ -636,7 +623,7 @@ ifdef MSMANIFEST_TOOL
|
|||
fi
|
||||
endif # MSVC with manifest tool
|
||||
else
|
||||
$(call EXPAND_CC_OR_CXX,$@) $(COMPUTED_CXX_LDFLAGS) $(PGO_CFLAGS) -o $@ $< $(WIN32_EXE_LDFLAGS) $(LDFLAGS) $(STATIC_LIBS) $(MOZ_PROGRAM_LDFLAGS) $(SHARED_LIBS) $(OS_LIBS)
|
||||
$(call EXPAND_CC_OR_CXX,$@) $(COMPUTED_CXX_LDFLAGS) $(PGO_CFLAGS) -o $@ $($@_$(OBJS_VAR_SUFFIX)) $(WIN32_EXE_LDFLAGS) $(LDFLAGS) $(STATIC_LIBS) $(MOZ_PROGRAM_LDFLAGS) $(SHARED_LIBS) $(OS_LIBS)
|
||||
$(call CHECK_BINARY,$@)
|
||||
endif # WINNT && !GNU_CC
|
||||
|
||||
|
@ -650,29 +637,22 @@ endif
|
|||
$(HOST_SIMPLE_PROGRAMS): host_%$(HOST_BIN_SUFFIX): host_%.$(OBJ_SUFFIX) $(HOST_LIBS) $(HOST_EXTRA_DEPS) $(GLOBAL_DEPS)
|
||||
$(REPORT_BUILD)
|
||||
ifeq (WINNT_,$(HOST_OS_ARCH)_$(GNU_CC))
|
||||
$(EXPAND_LIBS_EXEC) -- $(LINKER) -NOLOGO -OUT:$@ -PDB:$(HOST_PDBFILE) $< $(WIN32_EXE_LDFLAGS) $(HOST_LIBS) $(HOST_EXTRA_LIBS)
|
||||
$(LINKER) -NOLOGO -OUT:$@ -PDB:$(HOST_PDBFILE) $< $(WIN32_EXE_LDFLAGS) $(HOST_LIBS) $(HOST_EXTRA_LIBS)
|
||||
else
|
||||
ifneq (,$(HOST_CPPSRCS)$(USE_HOST_CXX))
|
||||
$(EXPAND_LIBS_EXEC) -- $(HOST_CXX) $(HOST_OUTOPTION)$@ $(HOST_CXX_LDFLAGS) $< $(HOST_LIBS) $(HOST_EXTRA_LIBS)
|
||||
$(HOST_CXX) $(HOST_OUTOPTION)$@ $(HOST_CXX_LDFLAGS) $< $(HOST_LIBS) $(HOST_EXTRA_LIBS)
|
||||
else
|
||||
$(EXPAND_LIBS_EXEC) -- $(HOST_CC) $(HOST_OUTOPTION)$@ $(HOST_C_LDFLAGS) $< $(HOST_LIBS) $(HOST_EXTRA_LIBS)
|
||||
$(HOST_CC) $(HOST_OUTOPTION)$@ $(HOST_C_LDFLAGS) $< $(HOST_LIBS) $(HOST_EXTRA_LIBS)
|
||||
endif
|
||||
endif
|
||||
ifndef CROSS_COMPILE
|
||||
$(call CHECK_STDCXX,$@)
|
||||
endif
|
||||
|
||||
$(filter %.$(LIB_SUFFIX),$(LIBRARY)): $(OBJS) $(STATIC_LIBS_DEPS) $(EXTRA_DEPS) $(GLOBAL_DEPS)
|
||||
$(LIBRARY): $(OBJS) $(STATIC_LIBS) $(EXTRA_DEPS) $(GLOBAL_DEPS)
|
||||
$(REPORT_BUILD)
|
||||
# Always remove both library and library descriptor
|
||||
$(RM) $(REAL_LIBRARY) $(REAL_LIBRARY).$(LIBS_DESC_SUFFIX)
|
||||
$(EXPAND_AR) $(AR_FLAGS) $(OBJS) $(STATIC_LIBS)
|
||||
|
||||
$(filter-out %.$(LIB_SUFFIX),$(LIBRARY)): $(filter %.$(LIB_SUFFIX),$(LIBRARY)) $(OBJS) $(STATIC_LIBS_DEPS) $(EXTRA_DEPS) $(GLOBAL_DEPS)
|
||||
# When we only build a library descriptor, blow out any existing library
|
||||
$(REPORT_BUILD)
|
||||
$(if $(filter %.$(LIB_SUFFIX),$(LIBRARY)),,$(RM) $(REAL_LIBRARY))
|
||||
$(EXPAND_LIBS_GEN) -o $@ $(OBJS) $(STATIC_LIBS)
|
||||
$(RM) $(REAL_LIBRARY)
|
||||
$(AR) $(AR_FLAGS) $(OBJS) $($@_$(OBJS_VAR_SUFFIX))
|
||||
|
||||
ifeq ($(OS_ARCH),WINNT)
|
||||
# Import libraries are created by the rules creating shared libraries.
|
||||
|
@ -688,19 +668,19 @@ endif
|
|||
$(HOST_LIBRARY): $(HOST_OBJS) Makefile
|
||||
$(REPORT_BUILD)
|
||||
$(RM) $@
|
||||
$(EXPAND_LIBS_EXEC) --extract -- $(HOST_AR) $(HOST_AR_FLAGS) $(HOST_OBJS)
|
||||
$(HOST_AR) $(HOST_AR_FLAGS) $(HOST_OBJS)
|
||||
|
||||
# On Darwin (Mac OS X), dwarf2 debugging uses debug info left in .o files,
|
||||
# so instead of deleting .o files after repacking them into a dylib, we make
|
||||
# symlinks back to the originals. The symlinks are a no-op for stabs debugging,
|
||||
# so no need to conditionalize on OS version or debugging format.
|
||||
|
||||
$(SHARED_LIBRARY): $(OBJS) $(RESFILE) $(RUST_STATIC_LIB_FOR_SHARED_LIB) $(STATIC_LIBS_DEPS) $(EXTRA_DEPS) $(GLOBAL_DEPS)
|
||||
$(SHARED_LIBRARY): $(OBJS) $(RESFILE) $(RUST_STATIC_LIB_FOR_SHARED_LIB) $(STATIC_LIBS) $(EXTRA_DEPS) $(GLOBAL_DEPS)
|
||||
$(REPORT_BUILD)
|
||||
ifndef INCREMENTAL_LINKER
|
||||
$(RM) $@
|
||||
endif
|
||||
$(EXPAND_MKSHLIB) $(OBJS) $(RESFILE) $(LDFLAGS) $(STATIC_LIBS) $(RUST_STATIC_LIB_FOR_SHARED_LIB) $(SHARED_LIBS) $(EXTRA_DSO_LDOPTS) $(MOZ_GLUE_LDFLAGS) $(OS_LIBS)
|
||||
$(MKSHLIB) $($@_$(OBJS_VAR_SUFFIX)) $(RESFILE) $(LDFLAGS) $(STATIC_LIBS) $(RUST_STATIC_LIB_FOR_SHARED_LIB) $(SHARED_LIBS) $(EXTRA_DSO_LDOPTS) $(MOZ_GLUE_LDFLAGS) $(OS_LIBS)
|
||||
$(call CHECK_BINARY,$@)
|
||||
|
||||
ifeq (_WINNT,$(GNU_CC)_$(OS_ARCH))
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
[test_mozbuild_reading.py]
|
||||
[unit-expandlibs.py]
|
||||
[unit-mozunit.py]
|
||||
[unit-nsinstall.py]
|
||||
[unit-printprereleasesuffix.py]
|
||||
|
|
|
@ -1,431 +0,0 @@
|
|||
import subprocess
|
||||
import unittest
|
||||
import sys
|
||||
import os
|
||||
import imp
|
||||
from tempfile import mkdtemp
|
||||
from shutil import rmtree
|
||||
import mozunit
|
||||
|
||||
from UserString import UserString
|
||||
# Create a controlled configuration for use by expandlibs
|
||||
config_win = {
|
||||
'AR': 'lib',
|
||||
'AR_EXTRACT': '',
|
||||
'DLL_PREFIX': '',
|
||||
'LIB_PREFIX': '',
|
||||
'OBJ_SUFFIX': '.obj',
|
||||
'LIB_SUFFIX': '.lib',
|
||||
'DLL_SUFFIX': '.dll',
|
||||
'IMPORT_LIB_SUFFIX': '.lib',
|
||||
'LIBS_DESC_SUFFIX': '.desc',
|
||||
'EXPAND_LIBS_LIST_STYLE': 'list',
|
||||
}
|
||||
config_unix = {
|
||||
'AR': 'ar',
|
||||
'AR_EXTRACT': 'ar -x',
|
||||
'DLL_PREFIX': 'lib',
|
||||
'LIB_PREFIX': 'lib',
|
||||
'OBJ_SUFFIX': '.o',
|
||||
'LIB_SUFFIX': '.a',
|
||||
'DLL_SUFFIX': '.so',
|
||||
'IMPORT_LIB_SUFFIX': '',
|
||||
'LIBS_DESC_SUFFIX': '.desc',
|
||||
'EXPAND_LIBS_LIST_STYLE': 'linkerscript',
|
||||
}
|
||||
|
||||
config = sys.modules['expandlibs_config'] = imp.new_module('expandlibs_config')
|
||||
|
||||
from expandlibs import LibDescriptor, ExpandArgs, relativize
|
||||
from expandlibs_gen import generate
|
||||
from expandlibs_exec import ExpandArgsMore, SectionFinder
|
||||
|
||||
def Lib(name):
|
||||
return config.LIB_PREFIX + name + config.LIB_SUFFIX
|
||||
|
||||
def Obj(name):
|
||||
return name + config.OBJ_SUFFIX
|
||||
|
||||
def Dll(name):
|
||||
return config.DLL_PREFIX + name + config.DLL_SUFFIX
|
||||
|
||||
def ImportLib(name):
|
||||
if not len(config.IMPORT_LIB_SUFFIX): return Dll(name)
|
||||
return config.LIB_PREFIX + name + config.IMPORT_LIB_SUFFIX
|
||||
|
||||
class TestRelativize(unittest.TestCase):
|
||||
def test_relativize(self):
|
||||
'''Test relativize()'''
|
||||
os_path_exists = os.path.exists
|
||||
def exists(path):
|
||||
return True
|
||||
os.path.exists = exists
|
||||
self.assertEqual(relativize(os.path.abspath(os.curdir)), os.curdir)
|
||||
self.assertEqual(relativize(os.path.abspath(os.pardir)), os.pardir)
|
||||
self.assertEqual(relativize(os.path.join(os.curdir, 'a')), 'a')
|
||||
self.assertEqual(relativize(os.path.join(os.path.abspath(os.curdir), 'a')), 'a')
|
||||
# relativize is expected to return the absolute path if it is shorter
|
||||
self.assertEqual(relativize(os.sep), os.sep)
|
||||
os.path.exists = os.path.exists
|
||||
|
||||
class TestLibDescriptor(unittest.TestCase):
|
||||
def test_serialize(self):
|
||||
'''Test LibDescriptor's serialization'''
|
||||
desc = LibDescriptor()
|
||||
desc[LibDescriptor.KEYS[0]] = ['a', 'b']
|
||||
self.assertEqual(str(desc), "{0} = a b".format(LibDescriptor.KEYS[0]))
|
||||
desc['unsupported-key'] = ['a']
|
||||
self.assertEqual(str(desc), "{0} = a b".format(LibDescriptor.KEYS[0]))
|
||||
desc[LibDescriptor.KEYS[1]] = ['c', 'd', 'e']
|
||||
self.assertEqual(str(desc),
|
||||
"{0} = a b\n{1} = c d e"
|
||||
.format(LibDescriptor.KEYS[0], LibDescriptor.KEYS[1]))
|
||||
desc[LibDescriptor.KEYS[0]] = []
|
||||
self.assertEqual(str(desc), "{0} = c d e".format(LibDescriptor.KEYS[1]))
|
||||
|
||||
def test_read(self):
|
||||
'''Test LibDescriptor's initialization'''
|
||||
desc_list = ["# Comment",
|
||||
"{0} = a b".format(LibDescriptor.KEYS[1]),
|
||||
"", # Empty line
|
||||
"foo = bar", # Should be discarded
|
||||
"{0} = c d e".format(LibDescriptor.KEYS[0])]
|
||||
desc = LibDescriptor(desc_list)
|
||||
self.assertEqual(desc[LibDescriptor.KEYS[1]], ['a', 'b'])
|
||||
self.assertEqual(desc[LibDescriptor.KEYS[0]], ['c', 'd', 'e'])
|
||||
self.assertEqual(False, 'foo' in desc)
|
||||
|
||||
def wrap_method(conf, wrapped_method):
|
||||
'''Wrapper used to call a test with a specific configuration'''
|
||||
def _method(self):
|
||||
for key in conf:
|
||||
setattr(config, key, conf[key])
|
||||
self.init()
|
||||
try:
|
||||
wrapped_method(self)
|
||||
except:
|
||||
raise
|
||||
finally:
|
||||
self.cleanup()
|
||||
return _method
|
||||
|
||||
class ReplicateTests(type):
|
||||
'''Replicates tests for unix and windows variants'''
|
||||
def __new__(cls, clsName, bases, dict):
|
||||
for name in [key for key in dict if key.startswith('test_')]:
|
||||
dict[name + '_unix'] = wrap_method(config_unix, dict[name])
|
||||
dict[name + '_unix'].__doc__ = dict[name].__doc__ + ' (unix)'
|
||||
dict[name + '_win'] = wrap_method(config_win, dict[name])
|
||||
dict[name + '_win'].__doc__ = dict[name].__doc__ + ' (win)'
|
||||
del dict[name]
|
||||
return type.__new__(cls, clsName, bases, dict)
|
||||
|
||||
class TestCaseWithTmpDir(unittest.TestCase):
|
||||
__metaclass__ = ReplicateTests
|
||||
def init(self):
|
||||
self.tmpdir = os.path.abspath(mkdtemp(dir=os.curdir))
|
||||
|
||||
def cleanup(self):
|
||||
rmtree(self.tmpdir)
|
||||
|
||||
def touch(self, files):
|
||||
for f in files:
|
||||
open(f, 'w').close()
|
||||
|
||||
def tmpfile(self, *args):
|
||||
return os.path.join(self.tmpdir, *args)
|
||||
|
||||
class TestExpandLibsGen(TestCaseWithTmpDir):
|
||||
def test_generate(self):
|
||||
'''Test library descriptor generation'''
|
||||
files = [self.tmpfile(f) for f in
|
||||
[Lib('a'), Obj('b'), Lib('c'), Obj('d'), Obj('e'), Lib('f')]]
|
||||
self.touch(files[:-1])
|
||||
self.touch([files[-1] + config.LIBS_DESC_SUFFIX])
|
||||
|
||||
desc = generate(files)
|
||||
self.assertEqual(desc['OBJS'], [self.tmpfile(Obj(s)) for s in ['b', 'd', 'e']])
|
||||
self.assertEqual(desc['LIBS'], [self.tmpfile(Lib(s)) for s in ['a', 'c', 'f']])
|
||||
|
||||
self.assertRaises(Exception, generate, files + [self.tmpfile(Obj('z'))])
|
||||
self.assertRaises(Exception, generate, files + [self.tmpfile(Lib('y'))])
|
||||
|
||||
class TestExpandInit(TestCaseWithTmpDir):
|
||||
def init(self):
|
||||
''' Initializes test environment for library expansion tests'''
|
||||
super(TestExpandInit, self).init()
|
||||
# Create 2 fake libraries, each containing 3 objects, and the second
|
||||
# including the first one and another library.
|
||||
os.mkdir(self.tmpfile('libx'))
|
||||
os.mkdir(self.tmpfile('liby'))
|
||||
self.libx_files = [self.tmpfile('libx', Obj(f)) for f in ['g', 'h', 'i']]
|
||||
self.liby_files = [self.tmpfile('liby', Obj(f)) for f in ['j', 'k', 'l']] + [self.tmpfile('liby', Lib('z'))]
|
||||
self.touch(self.libx_files + self.liby_files)
|
||||
with open(self.tmpfile('libx', Lib('x') + config.LIBS_DESC_SUFFIX), 'w') as f:
|
||||
f.write(str(generate(self.libx_files)))
|
||||
with open(self.tmpfile('liby', Lib('y') + config.LIBS_DESC_SUFFIX), 'w') as f:
|
||||
f.write(str(generate(self.liby_files + [self.tmpfile('libx', Lib('x'))])))
|
||||
|
||||
# Create various objects and libraries
|
||||
self.arg_files = [self.tmpfile(f) for f in [Lib('a'), Obj('b'), Obj('c'), Lib('d'), Obj('e')]]
|
||||
# We always give library names (LIB_PREFIX/SUFFIX), even for
|
||||
# dynamic/import libraries
|
||||
self.files = self.arg_files + [self.tmpfile(ImportLib('f'))]
|
||||
self.arg_files += [self.tmpfile(Lib('f'))]
|
||||
self.touch(self.files)
|
||||
|
||||
def assertRelEqual(self, args1, args2):
|
||||
self.assertEqual(args1, [relativize(a) for a in args2])
|
||||
|
||||
class TestExpandArgs(TestExpandInit):
|
||||
def test_expand(self):
|
||||
'''Test library expansion'''
|
||||
# Expanding arguments means libraries with a descriptor are expanded
|
||||
# with the descriptor content, and import libraries are used when
|
||||
# a library doesn't exist
|
||||
args = ExpandArgs(['foo', '-bar'] + self.arg_files + [self.tmpfile('liby', Lib('y'))])
|
||||
self.assertRelEqual(args, ['foo', '-bar'] + self.files + self.liby_files + self.libx_files)
|
||||
|
||||
# When a library exists at the same time as a descriptor, we still use
|
||||
# the descriptor.
|
||||
self.touch([self.tmpfile('libx', Lib('x'))])
|
||||
args = ExpandArgs(['foo', '-bar'] + self.arg_files + [self.tmpfile('liby', Lib('y'))])
|
||||
self.assertRelEqual(args, ['foo', '-bar'] + self.files + self.liby_files + self.libx_files)
|
||||
|
||||
self.touch([self.tmpfile('liby', Lib('y'))])
|
||||
args = ExpandArgs(['foo', '-bar'] + self.arg_files + [self.tmpfile('liby', Lib('y'))])
|
||||
self.assertRelEqual(args, ['foo', '-bar'] + self.files + self.liby_files + self.libx_files)
|
||||
|
||||
class TestExpandArgsMore(TestExpandInit):
|
||||
def test_makelist(self):
|
||||
'''Test grouping object files in lists'''
|
||||
# ExpandArgsMore does the same as ExpandArgs
|
||||
with ExpandArgsMore(['foo', '-bar'] + self.arg_files + [self.tmpfile('liby', Lib('y'))]) as args:
|
||||
self.assertRelEqual(args, ['foo', '-bar'] + self.files + self.liby_files + self.libx_files)
|
||||
|
||||
# But also has an extra method replacing object files with a list
|
||||
args.makelist()
|
||||
# self.files has objects at #1, #2, #4
|
||||
self.assertRelEqual(args[:3], ['foo', '-bar'] + self.files[:1])
|
||||
self.assertRelEqual(args[4:], [self.files[3]] + self.files[5:] + [self.tmpfile('liby', Lib('z'))])
|
||||
|
||||
# Check the list file content
|
||||
objs = [f for f in self.files + self.liby_files + self.libx_files if f.endswith(config.OBJ_SUFFIX)]
|
||||
if config.EXPAND_LIBS_LIST_STYLE == "linkerscript":
|
||||
self.assertNotEqual(args[3][0], '@')
|
||||
filename = args[3]
|
||||
content = ['INPUT("{0}")'.format(relativize(f)) for f in objs]
|
||||
with open(filename, 'r') as f:
|
||||
self.assertEqual([l.strip() for l in f.readlines() if len(l.strip())], content)
|
||||
elif config.EXPAND_LIBS_LIST_STYLE == "list":
|
||||
self.assertEqual(args[3][0], '@')
|
||||
filename = args[3][1:]
|
||||
content = objs
|
||||
with open(filename, 'r') as f:
|
||||
self.assertRelEqual([l.strip() for l in f.readlines() if len(l.strip())], content)
|
||||
|
||||
tmp = args.tmp
|
||||
# Check that all temporary files are properly removed
|
||||
self.assertEqual(True, all([not os.path.exists(f) for f in tmp]))
|
||||
|
||||
def test_extract(self):
|
||||
'''Test library extraction'''
|
||||
# Divert subprocess.call
|
||||
subprocess_call = subprocess.call
|
||||
subprocess_check_output = subprocess.check_output
|
||||
def call(args, **kargs):
|
||||
if config.AR == 'lib':
|
||||
self.assertEqual(args[:2], [config.AR, '-NOLOGO'])
|
||||
self.assertTrue(args[2].startswith('-EXTRACT:'))
|
||||
extract = [args[2][len('-EXTRACT:'):]]
|
||||
self.assertTrue(extract)
|
||||
args = args[3:]
|
||||
else:
|
||||
# The command called is always AR_EXTRACT
|
||||
ar_extract = config.AR_EXTRACT.split()
|
||||
self.assertEqual(args[:len(ar_extract)], ar_extract)
|
||||
args = args[len(ar_extract):]
|
||||
# Remaining argument is always one library
|
||||
self.assertEqual(len(args), 1)
|
||||
arg = args[0]
|
||||
self.assertEqual(os.path.splitext(arg)[1], config.LIB_SUFFIX)
|
||||
# Simulate file extraction
|
||||
lib = os.path.splitext(os.path.basename(arg))[0]
|
||||
if config.AR != 'lib':
|
||||
extract = [lib, lib + '2']
|
||||
extract = [os.path.join(kargs['cwd'], f) for f in extract]
|
||||
if config.AR != 'lib':
|
||||
extract = [Obj(f) for f in extract]
|
||||
if not lib in extracted:
|
||||
extracted[lib] = []
|
||||
extracted[lib].extend(extract)
|
||||
self.touch(extract)
|
||||
subprocess.call = call
|
||||
|
||||
def check_output(args, **kargs):
|
||||
# The command called is always AR
|
||||
ar = config.AR
|
||||
self.assertEqual(args[0:3], [ar, '-NOLOGO', '-LIST'])
|
||||
# Remaining argument is always one library
|
||||
self.assertRelEqual([os.path.splitext(arg)[1] for arg in args[3:]],
|
||||
[config.LIB_SUFFIX])
|
||||
# Simulate LIB -NOLOGO -LIST
|
||||
lib = os.path.splitext(os.path.basename(args[3]))[0]
|
||||
return '%s\n%s\n' % (Obj(lib), Obj(lib + '2'))
|
||||
subprocess.check_output = check_output
|
||||
|
||||
# ExpandArgsMore does the same as ExpandArgs
|
||||
self.touch([self.tmpfile('liby', Lib('y'))])
|
||||
for iteration in (1, 2):
|
||||
with ExpandArgsMore(['foo', '-bar'] + self.arg_files + [self.tmpfile('liby', Lib('y'))]) as args:
|
||||
files = self.files + self.liby_files + self.libx_files
|
||||
|
||||
self.assertRelEqual(args, ['foo', '-bar'] + files)
|
||||
|
||||
extracted = {}
|
||||
# ExpandArgsMore also has an extra method extracting static libraries
|
||||
# when possible
|
||||
args.extract()
|
||||
|
||||
# With AR_EXTRACT, it uses the descriptors when there are, and
|
||||
# actually
|
||||
# extracts the remaining libraries
|
||||
extracted_args = []
|
||||
for f in files:
|
||||
if f.endswith(config.LIB_SUFFIX):
|
||||
base = os.path.splitext(os.path.basename(f))[0]
|
||||
# On the first iteration, we test the behavior of
|
||||
# extracting archives that don't have a copy of their
|
||||
# contents next to them, which is to use the file
|
||||
# extracted from the archive in a temporary directory.
|
||||
# On the second iteration, we test extracting archives
|
||||
# that do have a copy of their contents next to them,
|
||||
# in which case those contents are used instead of the
|
||||
# temporarily extracted files.
|
||||
if iteration == 1:
|
||||
extracted_args.extend(sorted(extracted[base]))
|
||||
else:
|
||||
dirname = os.path.dirname(f[len(self.tmpdir)+1:])
|
||||
if base.endswith('f'):
|
||||
dirname = os.path.join(dirname, 'foo', 'bar')
|
||||
extracted_args.extend([self.tmpfile(dirname, Obj(base)), self.tmpfile(dirname, Obj(base + '2'))])
|
||||
else:
|
||||
extracted_args.append(f)
|
||||
self.assertRelEqual(args, ['foo', '-bar'] + extracted_args)
|
||||
|
||||
tmp = args.tmp
|
||||
# Check that all temporary files are properly removed
|
||||
self.assertEqual(True, all([not os.path.exists(f) for f in tmp]))
|
||||
|
||||
# Create archives contents next to them for the second iteration.
|
||||
base = os.path.splitext(Lib('_'))[0]
|
||||
self.touch(self.tmpfile(Obj(base.replace('_', suffix))) for suffix in ('a', 'a2', 'd', 'd2'))
|
||||
try:
|
||||
os.makedirs(self.tmpfile('foo', 'bar'))
|
||||
except:
|
||||
pass
|
||||
self.touch(self.tmpfile('foo', 'bar', Obj(base.replace('_', suffix))) for suffix in ('f', 'f2'))
|
||||
self.touch(self.tmpfile('liby', Obj(base.replace('_', suffix))) for suffix in ('z', 'z2'))
|
||||
|
||||
# Restore subprocess.call and subprocess.check_output
|
||||
subprocess.call = subprocess_call
|
||||
subprocess.check_output = subprocess_check_output
|
||||
|
||||
class FakeProcess(object):
|
||||
def __init__(self, out, err = ''):
|
||||
self.out = out
|
||||
self.err = err
|
||||
|
||||
def communicate(self):
|
||||
return (self.out, self.err)
|
||||
|
||||
OBJDUMPS = {
|
||||
'foo.o': '''
|
||||
00000000 g F .text\t00000001 foo
|
||||
00000000 g F .text._Z6foobarv\t00000001 _Z6foobarv
|
||||
00000000 g F .text.hello\t00000001 hello
|
||||
00000000 g F .text._ZThn4_6foobarv\t00000001 _ZThn4_6foobarv
|
||||
''',
|
||||
'bar.o': '''
|
||||
00000000 g F .text.hi\t00000001 hi
|
||||
00000000 g F .text.hot._Z6barbazv\t00000001 .hidden _Z6barbazv
|
||||
''',
|
||||
}
|
||||
|
||||
PRINT_ICF = '''
|
||||
ld: ICF folding section '.text.hello' in file 'foo.o'into '.text.hi' in file 'bar.o'
|
||||
ld: ICF folding section '.foo' in file 'foo.o'into '.foo' in file 'bar.o'
|
||||
'''
|
||||
|
||||
class SubprocessPopen(object):
|
||||
def __init__(self, test):
|
||||
self.test = test
|
||||
|
||||
def __call__(self, args, stdout = None, stderr = None):
|
||||
self.test.assertEqual(stdout, subprocess.PIPE)
|
||||
self.test.assertEqual(stderr, subprocess.PIPE)
|
||||
if args[0] == 'objdump':
|
||||
self.test.assertEqual(args[1], '-t')
|
||||
self.test.assertTrue(args[2] in OBJDUMPS)
|
||||
return FakeProcess(OBJDUMPS[args[2]])
|
||||
else:
|
||||
return FakeProcess('', PRINT_ICF)
|
||||
|
||||
class TestSectionFinder(unittest.TestCase):
|
||||
def test_getSections(self):
|
||||
'''Test SectionFinder'''
|
||||
# Divert subprocess.Popen
|
||||
subprocess_popen = subprocess.Popen
|
||||
subprocess.Popen = SubprocessPopen(self)
|
||||
config.EXPAND_LIBS_ORDER_STYLE = 'linkerscript'
|
||||
config.OBJ_SUFFIX = '.o'
|
||||
config.LIB_SUFFIX = '.a'
|
||||
finder = SectionFinder(['foo.o', 'bar.o'])
|
||||
self.assertEqual(finder.getSections('foobar'), [])
|
||||
self.assertEqual(finder.getSections('_Z6barbazv'), ['.text.hot._Z6barbazv'])
|
||||
self.assertEqual(finder.getSections('_Z6foobarv'), ['.text._Z6foobarv', '.text._ZThn4_6foobarv'])
|
||||
self.assertEqual(finder.getSections('_ZThn4_6foobarv'), ['.text._Z6foobarv', '.text._ZThn4_6foobarv'])
|
||||
subprocess.Popen = subprocess_popen
|
||||
|
||||
class TestSymbolOrder(unittest.TestCase):
|
||||
def test_getOrderedSections(self):
|
||||
'''Test ExpandMoreArgs' _getOrderedSections'''
|
||||
# Divert subprocess.Popen
|
||||
subprocess_popen = subprocess.Popen
|
||||
subprocess.Popen = SubprocessPopen(self)
|
||||
config.EXPAND_LIBS_ORDER_STYLE = 'linkerscript'
|
||||
config.OBJ_SUFFIX = '.o'
|
||||
config.LIB_SUFFIX = '.a'
|
||||
config.LD_PRINT_ICF_SECTIONS = ''
|
||||
args = ExpandArgsMore(['foo', '-bar', 'bar.o', 'foo.o'])
|
||||
self.assertEqual(args._getOrderedSections(['_Z6foobarv', '_Z6barbazv']), ['.text._Z6foobarv', '.text._ZThn4_6foobarv', '.text.hot._Z6barbazv'])
|
||||
self.assertEqual(args._getOrderedSections(['_ZThn4_6foobarv', '_Z6barbazv']), ['.text._Z6foobarv', '.text._ZThn4_6foobarv', '.text.hot._Z6barbazv'])
|
||||
subprocess.Popen = subprocess_popen
|
||||
|
||||
def test_getFoldedSections(self):
|
||||
'''Test ExpandMoreArgs' _getFoldedSections'''
|
||||
# Divert subprocess.Popen
|
||||
subprocess_popen = subprocess.Popen
|
||||
subprocess.Popen = SubprocessPopen(self)
|
||||
config.LD_PRINT_ICF_SECTIONS = '-Wl,--print-icf-sections'
|
||||
args = ExpandArgsMore(['foo', '-bar', 'bar.o', 'foo.o'])
|
||||
self.assertEqual(args._getFoldedSections(), {'.text.hello': ['.text.hi'], '.text.hi': ['.text.hello']})
|
||||
subprocess.Popen = subprocess_popen
|
||||
|
||||
def test_getOrderedSectionsWithICF(self):
|
||||
'''Test ExpandMoreArgs' _getOrderedSections, with ICF'''
|
||||
# Divert subprocess.Popen
|
||||
subprocess_popen = subprocess.Popen
|
||||
subprocess.Popen = SubprocessPopen(self)
|
||||
config.EXPAND_LIBS_ORDER_STYLE = 'linkerscript'
|
||||
config.OBJ_SUFFIX = '.o'
|
||||
config.LIB_SUFFIX = '.a'
|
||||
config.LD_PRINT_ICF_SECTIONS = '-Wl,--print-icf-sections'
|
||||
args = ExpandArgsMore(['foo', '-bar', 'bar.o', 'foo.o'])
|
||||
self.assertEqual(args._getOrderedSections(['hello', '_Z6barbazv']), ['.text.hello', '.text.hi', '.text.hot._Z6barbazv'])
|
||||
self.assertEqual(args._getOrderedSections(['_ZThn4_6foobarv', 'hi', '_Z6barbazv']), ['.text._Z6foobarv', '.text._ZThn4_6foobarv', '.text.hi', '.text.hello', '.text.hot._Z6barbazv'])
|
||||
subprocess.Popen = subprocess_popen
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
mozunit.main(runwith='unittest')
|
|
@ -7,7 +7,7 @@
|
|||
|
||||
const { Cc, Ci, Cu, Cr } = require("chrome");
|
||||
const promise = require("promise");
|
||||
const EventEmitter = require("devtools/shared/old-event-emitter");
|
||||
const EventEmitter = require("devtools/shared/event-emitter");
|
||||
const { WebGLFront } = require("devtools/shared/fronts/webgl");
|
||||
const DevToolsUtils = require("devtools/shared/DevToolsUtils");
|
||||
|
||||
|
|
|
@ -10,7 +10,7 @@ const promise = require("promise");
|
|||
const defer = require("devtools/shared/defer");
|
||||
const {Task} = require("devtools/shared/task");
|
||||
const Services = require("Services");
|
||||
const EventEmitter = require("devtools/shared/old-event-emitter");
|
||||
const EventEmitter = require("devtools/shared/event-emitter");
|
||||
const Tooltip = require("devtools/client/shared/widgets/tooltip/Tooltip");
|
||||
const Editor = require("devtools/client/sourceeditor/editor");
|
||||
const {LocalizationHelper} = require("devtools/shared/l10n");
|
||||
|
|
|
@ -20,52 +20,52 @@ async function ifWebGLSupported() {
|
|||
let fsEditor = await ShadersEditorsView._getEditor("fs");
|
||||
|
||||
vsEditor.replaceText("vec3", { line: 7, ch: 22 }, { line: 7, ch: 26 });
|
||||
let [, vertError] = await onceSpread(panel.panelWin, EVENTS.SHADER_COMPILED);
|
||||
let vertError = await panel.panelWin.once(EVENTS.SHADER_COMPILED);
|
||||
checkHasVertFirstError(true, vertError);
|
||||
checkHasVertSecondError(false, vertError);
|
||||
info("Error marks added in the vertex shader editor.");
|
||||
|
||||
vsEditor.insertText(" ", { line: 1, ch: 0 });
|
||||
await once(panel.panelWin, EVENTS.EDITOR_ERROR_MARKERS_REMOVED);
|
||||
await panel.panelWin.once(EVENTS.EDITOR_ERROR_MARKERS_REMOVED);
|
||||
is(vsEditor.getText(1), " precision lowp float;", "Typed space.");
|
||||
checkHasVertFirstError(false, vertError);
|
||||
checkHasVertSecondError(false, vertError);
|
||||
info("Error marks removed while typing in the vertex shader editor.");
|
||||
|
||||
[, vertError] = await onceSpread(panel.panelWin, EVENTS.SHADER_COMPILED);
|
||||
vertError = await panel.panelWin.once(EVENTS.SHADER_COMPILED);
|
||||
checkHasVertFirstError(true, vertError);
|
||||
checkHasVertSecondError(false, vertError);
|
||||
info("Error marks were re-added after recompiling the vertex shader.");
|
||||
|
||||
fsEditor.replaceText("vec4", { line: 2, ch: 14 }, { line: 2, ch: 18 });
|
||||
let [, fragError] = await onceSpread(panel.panelWin, EVENTS.SHADER_COMPILED);
|
||||
let fragError = await panel.panelWin.once(EVENTS.SHADER_COMPILED);
|
||||
checkHasVertFirstError(true, vertError);
|
||||
checkHasVertSecondError(false, vertError);
|
||||
checkHasFragError(true, fragError);
|
||||
info("Error marks added in the fragment shader editor.");
|
||||
|
||||
fsEditor.insertText(" ", { line: 1, ch: 0 });
|
||||
await once(panel.panelWin, EVENTS.EDITOR_ERROR_MARKERS_REMOVED);
|
||||
await panel.panelWin.once(EVENTS.EDITOR_ERROR_MARKERS_REMOVED);
|
||||
is(fsEditor.getText(1), " precision lowp float;", "Typed space.");
|
||||
checkHasVertFirstError(true, vertError);
|
||||
checkHasVertSecondError(false, vertError);
|
||||
checkHasFragError(false, fragError);
|
||||
info("Error marks removed while typing in the fragment shader editor.");
|
||||
|
||||
[, fragError] = await onceSpread(panel.panelWin, EVENTS.SHADER_COMPILED);
|
||||
fragError = await panel.panelWin.once(EVENTS.SHADER_COMPILED);
|
||||
checkHasVertFirstError(true, vertError);
|
||||
checkHasVertSecondError(false, vertError);
|
||||
checkHasFragError(true, fragError);
|
||||
info("Error marks were re-added after recompiling the fragment shader.");
|
||||
|
||||
vsEditor.replaceText("2", { line: 3, ch: 19 }, { line: 3, ch: 20 });
|
||||
await once(panel.panelWin, EVENTS.EDITOR_ERROR_MARKERS_REMOVED);
|
||||
await panel.panelWin.once(EVENTS.EDITOR_ERROR_MARKERS_REMOVED);
|
||||
checkHasVertFirstError(false, vertError);
|
||||
checkHasVertSecondError(false, vertError);
|
||||
checkHasFragError(true, fragError);
|
||||
info("Error marks removed while typing in the vertex shader editor again.");
|
||||
|
||||
[, vertError] = await onceSpread(panel.panelWin, EVENTS.SHADER_COMPILED);
|
||||
vertError = await panel.panelWin.once(EVENTS.SHADER_COMPILED);
|
||||
checkHasVertFirstError(true, vertError);
|
||||
checkHasVertSecondError(true, vertError);
|
||||
checkHasFragError(true, fragError);
|
||||
|
|
|
@ -21,7 +21,7 @@ async function ifWebGLSupported() {
|
|||
|
||||
|
||||
vsEditor.replaceText("vec3", { line: 7, ch: 22 }, { line: 7, ch: 26 });
|
||||
let [, error] = await onceSpread(panel.panelWin, EVENTS.SHADER_COMPILED);
|
||||
let error = await panel.panelWin.once(EVENTS.SHADER_COMPILED);
|
||||
|
||||
ok(error,
|
||||
"The new vertex shader source was compiled with errors.");
|
||||
|
@ -42,7 +42,7 @@ async function ifWebGLSupported() {
|
|||
|
||||
|
||||
fsEditor.replaceText("vec4", { line: 2, ch: 14 }, { line: 2, ch: 18 });
|
||||
[, error] = await onceSpread(panel.panelWin, EVENTS.SHADER_COMPILED);
|
||||
error = await panel.panelWin.once(EVENTS.SHADER_COMPILED);
|
||||
|
||||
ok(error,
|
||||
"The new fragment shader source was compiled with errors.");
|
||||
|
@ -61,11 +61,11 @@ async function ifWebGLSupported() {
|
|||
await ensurePixelIs(gFront, { x: 511, y: 511 }, { r: 0, g: 255, b: 0, a: 255 }, true);
|
||||
|
||||
vsEditor.replaceText("vec4", { line: 7, ch: 22 }, { line: 7, ch: 26 });
|
||||
[, error] = await onceSpread(panel.panelWin, EVENTS.SHADER_COMPILED);
|
||||
error = await panel.panelWin.once(EVENTS.SHADER_COMPILED);
|
||||
ok(!error, "The new vertex shader source was compiled successfully.");
|
||||
|
||||
fsEditor.replaceText("vec3", { line: 2, ch: 14 }, { line: 2, ch: 18 });
|
||||
[, error] = await onceSpread(panel.panelWin, EVENTS.SHADER_COMPILED);
|
||||
error = await panel.panelWin.once(EVENTS.SHADER_COMPILED);
|
||||
ok(!error, "The new fragment shader source was compiled successfully.");
|
||||
|
||||
await ensurePixelIs(gFront, { x: 0, y: 0 }, { r: 255, g: 0, b: 0, a: 255 }, true);
|
||||
|
|
|
@ -81,16 +81,6 @@ function createCanvas() {
|
|||
return document.createElementNS("http://www.w3.org/1999/xhtml", "canvas");
|
||||
}
|
||||
|
||||
// Hack around `once`, as that only resolves to a single (first) argument
|
||||
// and discards the rest. `onceSpread` is similar, except resolves to an
|
||||
// array of all of the arguments in the handler. These should be consolidated
|
||||
// into the same function, but many tests will need to be changed.
|
||||
function onceSpread(aTarget, aEvent) {
|
||||
let deferred = defer();
|
||||
aTarget.once(aEvent, (...args) => deferred.resolve(args));
|
||||
return deferred.promise;
|
||||
}
|
||||
|
||||
function observe(aNotificationName, aOwnsWeak = false) {
|
||||
info("Waiting for observer notification: '" + aNotificationName + ".");
|
||||
|
||||
|
|
|
@ -76,9 +76,6 @@ struct DataInfo
|
|||
nsCOMPtr<nsIPrincipal> mPrincipal;
|
||||
nsCString mStack;
|
||||
|
||||
// WeakReferences of nsHostObjectURI objects.
|
||||
nsTArray<nsWeakPtr> mURIs;
|
||||
|
||||
// When a blobURL is revoked, we keep it alive for RELEASING_TIMER
|
||||
// milliseconds in order to support pending operations such as navigation,
|
||||
// download and so on.
|
||||
|
@ -123,7 +120,7 @@ GetDataInfo(const nsACString& aUri, bool aAlsoIfRevoked = false)
|
|||
}
|
||||
|
||||
static DataInfo*
|
||||
GetDataInfoFromURI(nsIURI* aURI)
|
||||
GetDataInfoFromURI(nsIURI* aURI, bool aAlsoIfRevoked = false)
|
||||
{
|
||||
if (!aURI) {
|
||||
return nullptr;
|
||||
|
@ -135,7 +132,7 @@ GetDataInfoFromURI(nsIURI* aURI)
|
|||
return nullptr;
|
||||
}
|
||||
|
||||
return GetDataInfo(spec);
|
||||
return GetDataInfo(spec, aAlsoIfRevoked);
|
||||
}
|
||||
|
||||
// Memory reporting for the hash table.
|
||||
|
@ -540,13 +537,6 @@ private:
|
|||
|
||||
MOZ_ASSERT(info->mRevoked);
|
||||
|
||||
for (uint32_t i = 0; i < info->mURIs.Length(); ++i) {
|
||||
nsCOMPtr<nsIURI> uri = do_QueryReferent(info->mURIs[i]);
|
||||
if (uri) {
|
||||
static_cast<nsHostObjectURI*>(uri.get())->ForgetBlobImpl();
|
||||
}
|
||||
}
|
||||
|
||||
gDataTable->Remove(mURI);
|
||||
if (gDataTable->Count() == 0) {
|
||||
delete gDataTable;
|
||||
|
@ -900,17 +890,12 @@ nsHostObjectProtocolHandler::NewURI(const nsACString& aSpec,
|
|||
nsCOMPtr<nsIURI> uri;
|
||||
rv = NS_MutateURI(new nsHostObjectURI::Mutator())
|
||||
.SetSpec(aSpec)
|
||||
.Apply(NS_MutatorMethod(&nsIBlobURIMutator::SetBlobImpl, blob))
|
||||
.Apply(NS_MutatorMethod(&nsIPrincipalURIMutator::SetPrincipal, principal))
|
||||
.Finalize(uri);
|
||||
NS_ENSURE_SUCCESS(rv, rv);
|
||||
|
||||
uri.forget(aResult);
|
||||
|
||||
if (info && info->mObjectType == DataInfo::eBlobImpl) {
|
||||
info->mURIs.AppendElement(do_GetWeakReference(*aResult));
|
||||
}
|
||||
|
||||
return NS_OK;
|
||||
}
|
||||
|
||||
|
@ -921,14 +906,8 @@ nsHostObjectProtocolHandler::NewChannel2(nsIURI* uri,
|
|||
{
|
||||
*result = nullptr;
|
||||
|
||||
nsCOMPtr<nsIURIWithBlobImpl> uriBlobImpl = do_QueryInterface(uri);
|
||||
if (!uriBlobImpl) {
|
||||
return NS_ERROR_DOM_BAD_URI;
|
||||
}
|
||||
|
||||
nsCOMPtr<nsISupports> tmp;
|
||||
MOZ_ALWAYS_SUCCEEDS(uriBlobImpl->GetBlobImpl(getter_AddRefs(tmp)));
|
||||
nsCOMPtr<BlobImpl> blobImpl = do_QueryInterface(tmp);
|
||||
RefPtr<BlobImpl> blobImpl;
|
||||
NS_GetBlobForBlobURI(uri, getter_AddRefs(blobImpl), true);
|
||||
if (!blobImpl) {
|
||||
return NS_ERROR_DOM_BAD_URI;
|
||||
}
|
||||
|
@ -942,6 +921,10 @@ nsHostObjectProtocolHandler::NewChannel2(nsIURI* uri,
|
|||
nsresult rv = uriPrinc->GetPrincipal(getter_AddRefs(principal));
|
||||
NS_ENSURE_SUCCESS(rv, rv);
|
||||
|
||||
if (!principal) {
|
||||
return NS_ERROR_DOM_BAD_URI;
|
||||
}
|
||||
|
||||
#ifdef DEBUG
|
||||
// Info can be null, in case this blob URL has been revoked already.
|
||||
DataInfo* info = GetDataInfoFromURI(uri);
|
||||
|
@ -1042,24 +1025,12 @@ nsFontTableProtocolHandler::GetScheme(nsACString &result)
|
|||
return NS_OK;
|
||||
}
|
||||
|
||||
/* static */ void
|
||||
nsHostObjectProtocolHandler::StoreClonedURI(const nsACString& aSpec,
|
||||
nsIURI* aURI)
|
||||
{
|
||||
MOZ_ASSERT(aURI);
|
||||
|
||||
DataInfo* info = GetDataInfo(aSpec);
|
||||
if (info) {
|
||||
info->mURIs.AppendElement(do_GetWeakReference(aURI));
|
||||
}
|
||||
}
|
||||
|
||||
nsresult
|
||||
NS_GetBlobForBlobURI(nsIURI* aURI, BlobImpl** aBlob)
|
||||
NS_GetBlobForBlobURI(nsIURI* aURI, BlobImpl** aBlob, bool aAlsoIfRevoked)
|
||||
{
|
||||
*aBlob = nullptr;
|
||||
|
||||
DataInfo* info = GetDataInfoFromURI(aURI);
|
||||
DataInfo* info = GetDataInfoFromURI(aURI, aAlsoIfRevoked);
|
||||
if (!info || info->mObjectType != DataInfo::eBlobImpl) {
|
||||
return NS_ERROR_DOM_BAD_URI;
|
||||
}
|
||||
|
|
|
@ -88,9 +88,6 @@ public:
|
|||
GetAllBlobURLEntries(nsTArray<mozilla::dom::BlobURLRegistrationData>& aRegistrations,
|
||||
mozilla::dom::ContentParent* aCP);
|
||||
|
||||
// This is for nsHostObjectURI.
|
||||
static void StoreClonedURI(const nsACString& aSpec, nsIURI* aURI);
|
||||
|
||||
protected:
|
||||
virtual ~nsHostObjectProtocolHandler() {}
|
||||
|
||||
|
@ -133,7 +130,7 @@ inline bool IsFontTableURI(nsIURI* aUri)
|
|||
}
|
||||
|
||||
extern nsresult
|
||||
NS_GetBlobForBlobURI(nsIURI* aURI, mozilla::dom::BlobImpl** aBlob);
|
||||
NS_GetBlobForBlobURI(nsIURI* aURI, mozilla::dom::BlobImpl** aBlob, bool aAlsoIfRevoked = false);
|
||||
|
||||
extern nsresult
|
||||
NS_GetBlobForBlobURISpec(const nsACString& aSpec, mozilla::dom::BlobImpl** aBlob);
|
||||
|
|
|
@ -22,9 +22,7 @@ NS_IMPL_ADDREF_INHERITED(nsHostObjectURI, mozilla::net::nsSimpleURI)
|
|||
NS_IMPL_RELEASE_INHERITED(nsHostObjectURI, mozilla::net::nsSimpleURI)
|
||||
|
||||
NS_INTERFACE_MAP_BEGIN(nsHostObjectURI)
|
||||
NS_INTERFACE_MAP_ENTRY(nsIURIWithBlobImpl)
|
||||
NS_INTERFACE_MAP_ENTRY(nsIURIWithPrincipal)
|
||||
NS_INTERFACE_MAP_ENTRY(nsISupportsWeakReference)
|
||||
if (aIID.Equals(kHOSTOBJECTURICID))
|
||||
foundInterface = static_cast<nsIURI*>(this);
|
||||
else if (aIID.Equals(kThisSimpleURIImplementationCID)) {
|
||||
|
@ -37,16 +35,6 @@ NS_INTERFACE_MAP_BEGIN(nsHostObjectURI)
|
|||
else
|
||||
NS_INTERFACE_MAP_END_INHERITING(mozilla::net::nsSimpleURI)
|
||||
|
||||
// nsIURIWithBlobImpl methods:
|
||||
|
||||
NS_IMETHODIMP
|
||||
nsHostObjectURI::GetBlobImpl(nsISupports** aBlobImpl)
|
||||
{
|
||||
RefPtr<mozilla::dom::BlobImpl> blobImpl(mBlobImpl);
|
||||
blobImpl.forget(aBlobImpl);
|
||||
return NS_OK;
|
||||
}
|
||||
|
||||
// nsIURIWithPrincipal methods:
|
||||
|
||||
NS_IMETHODIMP
|
||||
|
@ -149,10 +137,6 @@ nsHostObjectURI::Deserialize(const mozilla::ipc::URIParams& aParams)
|
|||
return false;
|
||||
}
|
||||
|
||||
// If this fails, we still want to complete the operation. Probably this
|
||||
// blobURL has been revoked in the meantime.
|
||||
NS_GetBlobForBlobURI(this, getter_AddRefs(mBlobImpl));
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
|
@ -185,9 +169,6 @@ nsHostObjectURI::CloneInternal(mozilla::net::nsSimpleURI::RefHandlingEnum aRefHa
|
|||
nsHostObjectURI* u = static_cast<nsHostObjectURI*>(simpleClone.get());
|
||||
|
||||
u->mPrincipal = mPrincipal;
|
||||
u->mBlobImpl = mBlobImpl;
|
||||
|
||||
nsHostObjectProtocolHandler::StoreClonedURI(newRef, simpleClone);
|
||||
|
||||
simpleClone.forget(aClone);
|
||||
return NS_OK;
|
||||
|
@ -216,10 +197,6 @@ nsHostObjectURI::EqualsInternal(nsIURI* aOther,
|
|||
return NS_OK;
|
||||
}
|
||||
|
||||
// Compare the piece of additional member data that we add to base class,
|
||||
// but we cannot compare BlobImpl. This should not be a problem, because we
|
||||
// don't support changing the underlying mBlobImpl.
|
||||
|
||||
if (mPrincipal && otherUri->mPrincipal) {
|
||||
// Both of us have mPrincipals. Compare them.
|
||||
return mPrincipal->Equals(otherUri->mPrincipal, aResult);
|
||||
|
@ -233,7 +210,6 @@ nsHostObjectURI::EqualsInternal(nsIURI* aOther,
|
|||
NS_IMPL_NSIURIMUTATOR_ISUPPORTS(nsHostObjectURI::Mutator,
|
||||
nsIURISetters,
|
||||
nsIURIMutator,
|
||||
nsIBlobURIMutator,
|
||||
nsIPrincipalURIMutator,
|
||||
nsISerializable)
|
||||
|
||||
|
@ -305,10 +281,3 @@ nsHostObjectURI::GetClassIDNoAlloc(nsCID *aClassIDNoAlloc)
|
|||
*aClassIDNoAlloc = kHOSTOBJECTURICID;
|
||||
return NS_OK;
|
||||
}
|
||||
|
||||
void
|
||||
nsHostObjectURI::ForgetBlobImpl()
|
||||
{
|
||||
MOZ_ASSERT(mBlobImpl);
|
||||
mBlobImpl = nullptr;
|
||||
}
|
||||
|
|
|
@ -13,11 +13,9 @@
|
|||
#include "nsIClassInfo.h"
|
||||
#include "nsIPrincipal.h"
|
||||
#include "nsISerializable.h"
|
||||
#include "nsIURIWithBlobImpl.h"
|
||||
#include "nsIURIWithPrincipal.h"
|
||||
#include "nsSimpleURI.h"
|
||||
#include "nsIIPCSerializableURI.h"
|
||||
#include "nsWeakReference.h"
|
||||
|
||||
|
||||
/**
|
||||
|
@ -28,23 +26,20 @@
|
|||
class nsHostObjectURI final
|
||||
: public mozilla::net::nsSimpleURI
|
||||
, public nsIURIWithPrincipal
|
||||
, public nsIURIWithBlobImpl
|
||||
, public nsSupportsWeakReference
|
||||
{
|
||||
private:
|
||||
nsHostObjectURI(nsIPrincipal* aPrincipal,
|
||||
mozilla::dom::BlobImpl* aBlobImpl)
|
||||
explicit nsHostObjectURI(nsIPrincipal* aPrincipal)
|
||||
: mozilla::net::nsSimpleURI()
|
||||
, mPrincipal(aPrincipal)
|
||||
, mBlobImpl(aBlobImpl)
|
||||
{}
|
||||
|
||||
// For use only from deserialization
|
||||
nsHostObjectURI() : mozilla::net::nsSimpleURI() {}
|
||||
explicit nsHostObjectURI()
|
||||
: mozilla::net::nsSimpleURI()
|
||||
{}
|
||||
|
||||
public:
|
||||
NS_DECL_ISUPPORTS_INHERITED
|
||||
NS_DECL_NSIURIWITHBLOBIMPL
|
||||
NS_DECL_NSIURIWITHPRINCIPAL
|
||||
NS_DECL_NSISERIALIZABLE
|
||||
NS_DECL_NSICLASSINFO
|
||||
|
@ -69,10 +64,7 @@ public:
|
|||
|
||||
NS_IMETHOD Mutate(nsIURIMutator * *_retval) override;
|
||||
|
||||
void ForgetBlobImpl();
|
||||
|
||||
nsCOMPtr<nsIPrincipal> mPrincipal;
|
||||
RefPtr<mozilla::dom::BlobImpl> mBlobImpl;
|
||||
|
||||
protected:
|
||||
virtual ~nsHostObjectURI() {}
|
||||
|
@ -84,7 +76,6 @@ public:
|
|||
class Mutator final
|
||||
: public nsIURIMutator
|
||||
, public BaseURIMutator<nsHostObjectURI>
|
||||
, public nsIBlobURIMutator
|
||||
, public nsIPrincipalURIMutator
|
||||
, public nsISerializable
|
||||
{
|
||||
|
@ -104,16 +95,6 @@ public:
|
|||
return InitFromInputStream(aStream);
|
||||
}
|
||||
|
||||
MOZ_MUST_USE NS_IMETHOD
|
||||
SetBlobImpl(mozilla::dom::BlobImpl *aBlobImpl) override
|
||||
{
|
||||
if (!mURI) {
|
||||
return NS_ERROR_NULL_POINTER;
|
||||
}
|
||||
mURI->mBlobImpl = aBlobImpl;
|
||||
return NS_OK;
|
||||
}
|
||||
|
||||
MOZ_MUST_USE NS_IMETHOD
|
||||
SetPrincipal(nsIPrincipal *aPrincipal) override
|
||||
{
|
||||
|
|
|
@ -13,7 +13,7 @@ https://bugzilla.mozilla.org/show_bug.cgi?id=759124
|
|||
<p id="display"></p>
|
||||
<div id="content" style="display: none"></div>
|
||||
|
||||
<iframe id="svg" src="fragments-helper.svg"></iframe>
|
||||
<iframe id="svg"></iframe>
|
||||
|
||||
<pre id="test">
|
||||
<script class="testbody" type="application/javascript">
|
||||
|
@ -110,6 +110,7 @@ function runTests()
|
|||
}
|
||||
|
||||
$(svg).addEventListener("load", runTests);
|
||||
$(svg).setAttribute("src", "fragments-helper.svg");
|
||||
</script>
|
||||
</pre>
|
||||
</body>
|
||||
|
|
|
@ -1685,10 +1685,12 @@ CompositorBridgeParent::RecvAdoptChild(const uint64_t& child)
|
|||
APZCTreeManagerParent* parent;
|
||||
{
|
||||
MonitorAutoLock lock(*sIndirectLayerTreesLock);
|
||||
// We currently don't support adopting children from one compositor to
|
||||
// another if the two compositors don't have the same options.
|
||||
MOZ_ASSERT(sIndirectLayerTrees[child].mParent->mOptions == mOptions);
|
||||
oldApzSampler = sIndirectLayerTrees[child].mParent->mApzSampler;
|
||||
if (sIndirectLayerTrees[child].mParent) {
|
||||
// We currently don't support adopting children from one compositor to
|
||||
// another if the two compositors don't have the same options.
|
||||
MOZ_ASSERT(sIndirectLayerTrees[child].mParent->mOptions == mOptions);
|
||||
oldApzSampler = sIndirectLayerTrees[child].mParent->mApzSampler;
|
||||
}
|
||||
NotifyChildCreated(child);
|
||||
if (sIndirectLayerTrees[child].mLayerTree) {
|
||||
sIndirectLayerTrees[child].mLayerTree->SetLayerManager(mLayerManager, GetAnimationStorage());
|
||||
|
|
|
@ -127,7 +127,6 @@ XPIDL_SOURCES += [
|
|||
'nsIURI.idl',
|
||||
'nsIURIClassifier.idl',
|
||||
'nsIURIMutator.idl',
|
||||
'nsIURIWithBlobImpl.idl',
|
||||
'nsIURIWithPrincipal.idl',
|
||||
'nsIURL.idl',
|
||||
'nsIURLParser.idl',
|
||||
|
|
|
@ -1,40 +0,0 @@
|
|||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
|
||||
|
||||
#include "nsISupports.idl"
|
||||
|
||||
interface nsIURI;
|
||||
|
||||
%{C++
|
||||
namespace mozilla {
|
||||
namespace dom {
|
||||
class BlobImpl;
|
||||
}}
|
||||
%}
|
||||
|
||||
[ptr] native BlobImplPtr(mozilla::dom::BlobImpl);
|
||||
|
||||
/**
|
||||
* nsIURIWithBlobImpl is implemented by URIs which are associated with a
|
||||
* specific BlobImpl.
|
||||
*/
|
||||
[builtinclass, builtinclass, uuid(331b41d3-3506-4ab5-bef9-aab41e3202a3)]
|
||||
interface nsIURIWithBlobImpl : nsISupports
|
||||
{
|
||||
/**
|
||||
* The BlobImpl associated with the resource returned when loading this uri.
|
||||
*/
|
||||
readonly attribute nsISupports blobImpl;
|
||||
};
|
||||
|
||||
[builtinclass, uuid(d3e8c9fa-ff07-47cc-90dc-0cc5445ddb59)]
|
||||
interface nsIBlobURIMutator : nsISupports
|
||||
{
|
||||
/**
|
||||
* Associates a blobImpl to the mutated URI.
|
||||
* Would normally return nsIURIMutator, but since it only gets called
|
||||
* from C++, there is no need for that.
|
||||
*/
|
||||
[must_use, noscript] void setBlobImpl(in BlobImplPtr blobImpl);
|
||||
};
|
|
@ -426,6 +426,7 @@ class WinArtifactJob(ArtifactJob):
|
|||
'firefox/**/*.dll',
|
||||
'firefox/*.exe',
|
||||
'firefox/**/interfaces.xpt',
|
||||
'firefox/*.tlb',
|
||||
}
|
||||
|
||||
product = 'firefox'
|
||||
|
|
|
@ -27,8 +27,12 @@ from mozbuild.frontend.data import (
|
|||
FinalTargetFiles,
|
||||
GeneratedSources,
|
||||
GnProjectData,
|
||||
HostLibrary,
|
||||
HostRustLibrary,
|
||||
IPDLCollection,
|
||||
RustLibrary,
|
||||
SharedLibrary,
|
||||
StaticLibrary,
|
||||
UnifiedSources,
|
||||
XPIDLFile,
|
||||
WebIDLCollection,
|
||||
|
@ -40,7 +44,10 @@ from mozbuild.jar import (
|
|||
from mozbuild.preprocessor import Preprocessor
|
||||
from mozpack.chrome.manifest import parse_manifest_line
|
||||
|
||||
from mozbuild.util import group_unified_files
|
||||
from mozbuild.util import (
|
||||
group_unified_files,
|
||||
mkdir,
|
||||
)
|
||||
|
||||
class XPIDLManager(object):
|
||||
"""Helps manage XPCOM IDLs in the context of the build system."""
|
||||
|
@ -196,6 +203,93 @@ class CommonBackend(BuildBackend):
|
|||
}
|
||||
json.dump(d, fh, sort_keys=True, indent=4)
|
||||
|
||||
def _expand_libs(self, input_bin):
|
||||
os_libs = []
|
||||
shared_libs = []
|
||||
static_libs = []
|
||||
objs = []
|
||||
no_pgo_objs = []
|
||||
|
||||
seen_objs = set()
|
||||
seen_libs = set()
|
||||
|
||||
def add_objs(lib):
|
||||
for o in lib.objs:
|
||||
if o not in seen_objs:
|
||||
seen_objs.add(o)
|
||||
objs.append(o)
|
||||
# This is slightly odd, buf for consistency with the
|
||||
# recursivemake backend we don't replace OBJ_SUFFIX if any
|
||||
# object in a library has `no_pgo` set.
|
||||
if lib.no_pgo_objs or lib.no_pgo:
|
||||
no_pgo_objs.append(o)
|
||||
|
||||
def expand(lib, recurse_objs, system_libs):
|
||||
if isinstance(lib, StaticLibrary):
|
||||
if lib.no_expand_lib:
|
||||
static_libs.append(lib)
|
||||
recurse_objs = False
|
||||
elif recurse_objs:
|
||||
add_objs(lib)
|
||||
|
||||
for l in lib.linked_libraries:
|
||||
expand(l, recurse_objs, system_libs)
|
||||
|
||||
if system_libs:
|
||||
for l in lib.linked_system_libs:
|
||||
if l not in seen_libs:
|
||||
seen_libs.add(l)
|
||||
os_libs.append(l)
|
||||
|
||||
elif isinstance(lib, SharedLibrary):
|
||||
if lib not in seen_libs:
|
||||
seen_libs.add(lib)
|
||||
shared_libs.append(lib)
|
||||
|
||||
add_objs(input_bin)
|
||||
|
||||
system_libs = not isinstance(input_bin, StaticLibrary)
|
||||
for lib in input_bin.linked_libraries:
|
||||
if isinstance(lib, RustLibrary):
|
||||
continue
|
||||
elif isinstance(lib, StaticLibrary):
|
||||
expand(lib, True, system_libs)
|
||||
elif isinstance(lib, SharedLibrary):
|
||||
if lib not in seen_libs:
|
||||
seen_libs.add(lib)
|
||||
shared_libs.append(lib)
|
||||
|
||||
for lib in input_bin.linked_system_libs:
|
||||
if lib not in seen_libs:
|
||||
seen_libs.add(lib)
|
||||
os_libs.append(lib)
|
||||
|
||||
return objs, no_pgo_objs, shared_libs, os_libs, static_libs
|
||||
|
||||
def _make_list_file(self, objdir, objs, name):
|
||||
if not objs:
|
||||
return None
|
||||
list_style = self.environment.substs.get('EXPAND_LIBS_LIST_STYLE')
|
||||
list_file_path = mozpath.join(objdir, name)
|
||||
objs = [os.path.relpath(o, objdir) for o in objs]
|
||||
if list_style == 'linkerscript':
|
||||
ref = list_file_path
|
||||
content = '\n'.join('INPUT("%s")' % o for o in objs)
|
||||
elif list_style == 'filelist':
|
||||
ref = "-Wl,-filelist," + list_file_path
|
||||
content = '\n'.join(objs)
|
||||
elif list_style == 'list':
|
||||
ref = "@" + list_file_path
|
||||
content = '\n'.join(objs)
|
||||
else:
|
||||
return None
|
||||
|
||||
mkdir(objdir)
|
||||
with self._write_file(list_file_path) as fh:
|
||||
fh.write(content)
|
||||
|
||||
return ref
|
||||
|
||||
def _handle_generated_sources(self, files):
|
||||
self._generated_sources.update(mozpath.relpath(f, self.environment.topobjdir) for f in files)
|
||||
|
||||
|
|
|
@ -1324,16 +1324,6 @@ class RecursiveMakeBackend(CommonBackend):
|
|||
self.environment.topobjdir), obj.KIND)
|
||||
|
||||
def _process_linked_libraries(self, obj, backend_file):
|
||||
def write_shared_and_system_libs(lib):
|
||||
for l in lib.linked_libraries:
|
||||
if isinstance(l, (StaticLibrary, RustLibrary)):
|
||||
write_shared_and_system_libs(l)
|
||||
else:
|
||||
backend_file.write_once('SHARED_LIBS += %s/%s\n'
|
||||
% (pretty_relpath(l), l.import_name))
|
||||
for l in lib.linked_system_libs:
|
||||
backend_file.write_once('OS_LIBS += %s\n' % l)
|
||||
|
||||
def pretty_relpath(lib):
|
||||
return '$(DEPTH)/%s' % mozpath.relpath(lib.objdir, topobjdir)
|
||||
|
||||
|
@ -1342,36 +1332,88 @@ class RecursiveMakeBackend(CommonBackend):
|
|||
build_target = self._build_target_for_obj(obj)
|
||||
self._compile_graph[build_target]
|
||||
|
||||
objs, no_pgo_objs, shared_libs, os_libs, static_libs = self._expand_libs(obj)
|
||||
|
||||
if obj.KIND == 'target':
|
||||
obj_target = obj.name
|
||||
if isinstance(obj, Program):
|
||||
obj_target = self._pretty_path(obj.output_path, backend_file)
|
||||
|
||||
is_unit_test = isinstance(obj, BaseProgram) and obj.is_unit_test
|
||||
profile_gen_objs = []
|
||||
|
||||
if (self.environment.substs.get('MOZ_PGO') and
|
||||
self.environment.substs.get('GNU_CC')):
|
||||
# We use a different OBJ_SUFFIX for the profile generate phase on
|
||||
# linux. These get picked up via OBJS_VAR_SUFFIX in config.mk.
|
||||
if not is_unit_test and not isinstance(obj, SimpleProgram):
|
||||
profile_gen_objs = [o if o in no_pgo_objs else '%s.%s' %
|
||||
(mozpath.splitext(o)[0], 'i_o') for o in objs]
|
||||
|
||||
def write_obj_deps(target, objs_ref, pgo_objs_ref):
|
||||
if pgo_objs_ref:
|
||||
backend_file.write('ifdef MOZ_PROFILE_GENERATE\n')
|
||||
backend_file.write('%s: %s\n' % (target, pgo_objs_ref))
|
||||
backend_file.write('else\n')
|
||||
backend_file.write('%s: %s\n' % (target, objs_ref))
|
||||
backend_file.write('endif\n')
|
||||
else:
|
||||
backend_file.write('%s: %s\n' % (target, objs_ref))
|
||||
|
||||
objs_ref = ' \\\n '.join(os.path.relpath(o, obj.objdir)
|
||||
for o in objs)
|
||||
pgo_objs_ref = ' \\\n '.join(os.path.relpath(o, obj.objdir)
|
||||
for o in profile_gen_objs)
|
||||
# Don't bother with a list file if we're only linking objects built
|
||||
# in this directory or building a real static library. This
|
||||
# accommodates clang-plugin, where we would otherwise pass an
|
||||
# incorrect list file format to the host compiler as well as when
|
||||
# creating an archive with AR, which doesn't understand list files.
|
||||
if (objs == obj.objs and not isinstance(obj, StaticLibrary) or
|
||||
isinstance(obj, StaticLibrary) and obj.no_expand_lib):
|
||||
backend_file.write_once('%s_OBJS := %s\n' % (obj.name,
|
||||
objs_ref))
|
||||
if profile_gen_objs:
|
||||
backend_file.write_once('%s_PGO_OBJS := %s\n' % (obj.name,
|
||||
pgo_objs_ref))
|
||||
write_obj_deps(obj_target, objs_ref, pgo_objs_ref)
|
||||
elif not isinstance(obj, StaticLibrary):
|
||||
list_file_path = '%s.list' % obj.name.replace('.', '_')
|
||||
list_file_ref = self._make_list_file(obj.objdir, objs,
|
||||
list_file_path)
|
||||
backend_file.write_once('%s_OBJS := %s\n' %
|
||||
(obj.name, list_file_ref))
|
||||
backend_file.write_once('%s: %s\n' % (obj_target, list_file_path))
|
||||
if profile_gen_objs:
|
||||
pgo_list_file_path = '%s_pgo.list' % obj.name.replace('.', '_')
|
||||
pgo_list_file_ref = self._make_list_file(obj.objdir,
|
||||
profile_gen_objs,
|
||||
pgo_list_file_path)
|
||||
backend_file.write_once('%s_PGO_OBJS := %s\n' %
|
||||
(obj.name, pgo_list_file_ref))
|
||||
backend_file.write_once('%s: %s\n' % (obj_target,
|
||||
pgo_list_file_path))
|
||||
write_obj_deps(obj_target, objs_ref, pgo_objs_ref)
|
||||
|
||||
for lib in shared_libs:
|
||||
backend_file.write_once('SHARED_LIBS += %s/%s\n' %
|
||||
(pretty_relpath(lib), lib.import_name))
|
||||
for lib in static_libs:
|
||||
backend_file.write_once('STATIC_LIBS += %s/%s\n' %
|
||||
(pretty_relpath(lib), lib.import_name))
|
||||
for lib in os_libs:
|
||||
if obj.KIND == 'target':
|
||||
backend_file.write_once('OS_LIBS += %s\n' % lib)
|
||||
else:
|
||||
backend_file.write_once('HOST_EXTRA_LIBS += %s\n' % lib)
|
||||
|
||||
for lib in obj.linked_libraries:
|
||||
if not isinstance(lib, ExternalLibrary):
|
||||
self._compile_graph[build_target].add(
|
||||
self._build_target_for_obj(lib))
|
||||
relpath = pretty_relpath(lib)
|
||||
if isinstance(obj, Library):
|
||||
if isinstance(lib, RustLibrary):
|
||||
# We don't need to do anything here; we will handle
|
||||
# linkage for any RustLibrary elsewhere.
|
||||
continue
|
||||
elif isinstance(lib, StaticLibrary):
|
||||
backend_file.write_once('STATIC_LIBS += %s/%s\n'
|
||||
% (relpath, lib.import_name))
|
||||
if isinstance(obj, SharedLibrary):
|
||||
write_shared_and_system_libs(lib)
|
||||
elif isinstance(obj, SharedLibrary):
|
||||
backend_file.write_once('SHARED_LIBS += %s/%s\n'
|
||||
% (relpath, lib.import_name))
|
||||
elif isinstance(obj, (Program, SimpleProgram)):
|
||||
if isinstance(lib, StaticLibrary):
|
||||
backend_file.write_once('STATIC_LIBS += %s/%s\n'
|
||||
% (relpath, lib.import_name))
|
||||
write_shared_and_system_libs(lib)
|
||||
else:
|
||||
backend_file.write_once('SHARED_LIBS += %s/%s\n'
|
||||
% (relpath, lib.import_name))
|
||||
elif isinstance(obj, (HostLibrary, HostProgram, HostSimpleProgram)):
|
||||
assert isinstance(lib, (HostLibrary, HostRustLibrary))
|
||||
backend_file.write_once('HOST_LIBS += %s/%s\n'
|
||||
% (relpath, lib.import_name))
|
||||
if isinstance(lib, (HostLibrary, HostRustLibrary)):
|
||||
backend_file.write_once('HOST_LIBS += %s/%s\n' %
|
||||
(pretty_relpath(lib), lib.import_name))
|
||||
|
||||
# We have to link any Rust libraries after all intermediate static
|
||||
# libraries have been listed to ensure that the Rust libraries are
|
||||
|
@ -1379,12 +1421,6 @@ class RecursiveMakeBackend(CommonBackend):
|
|||
if isinstance(obj, SharedLibrary):
|
||||
self._process_rust_libraries(obj, backend_file, pretty_relpath)
|
||||
|
||||
for lib in obj.linked_system_libs:
|
||||
if obj.KIND == 'target':
|
||||
backend_file.write_once('OS_LIBS += %s\n' % lib)
|
||||
else:
|
||||
backend_file.write_once('HOST_EXTRA_LIBS += %s\n' % lib)
|
||||
|
||||
# Process library-based defines
|
||||
self._process_defines(obj.lib_defines, backend_file)
|
||||
|
||||
|
|
|
@ -388,6 +388,8 @@ class Linkable(ContextDerived):
|
|||
'lib_defines',
|
||||
'linked_libraries',
|
||||
'linked_system_libs',
|
||||
'no_pgo_sources',
|
||||
'no_pgo',
|
||||
'sources',
|
||||
)
|
||||
|
||||
|
@ -398,6 +400,8 @@ class Linkable(ContextDerived):
|
|||
self.linked_system_libs = []
|
||||
self.lib_defines = Defines(context, {})
|
||||
self.sources = defaultdict(list)
|
||||
self.no_pgo_sources = []
|
||||
self.no_pgo = False
|
||||
|
||||
def link_library(self, obj):
|
||||
assert isinstance(obj, BaseLibrary)
|
||||
|
@ -437,8 +441,7 @@ class Linkable(ContextDerived):
|
|||
all_sources += self.sources.get(suffix, [])
|
||||
return all_sources
|
||||
|
||||
@property
|
||||
def objs(self):
|
||||
def _get_objs(self, sources):
|
||||
obj_prefix = ''
|
||||
if self.KIND == 'host':
|
||||
obj_prefix = 'host_'
|
||||
|
@ -446,7 +449,15 @@ class Linkable(ContextDerived):
|
|||
return [mozpath.join(self.objdir, '%s%s.%s' % (obj_prefix,
|
||||
mozpath.splitext(mozpath.basename(f))[0],
|
||||
self.config.substs.get('OBJ_SUFFIX', '')))
|
||||
for f in self.source_files()]
|
||||
for f in sources]
|
||||
|
||||
@property
|
||||
def no_pgo_objs(self):
|
||||
return self._get_objs(self.no_pgo_sources)
|
||||
|
||||
@property
|
||||
def objs(self):
|
||||
return self._get_objs(self.source_files())
|
||||
|
||||
|
||||
class BaseProgram(Linkable):
|
||||
|
@ -487,6 +498,10 @@ class BaseProgram(Linkable):
|
|||
def __repr__(self):
|
||||
return '<%s: %s/%s>' % (type(self).__name__, self.relobjdir, self.program)
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
return self.program
|
||||
|
||||
|
||||
class Program(BaseProgram):
|
||||
"""Context derived container object for PROGRAM"""
|
||||
|
@ -601,6 +616,10 @@ class BaseLibrary(Linkable):
|
|||
def __repr__(self):
|
||||
return '<%s: %s/%s>' % (type(self).__name__, self.relobjdir, self.lib_name)
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
return self.lib_name
|
||||
|
||||
|
||||
class Library(BaseLibrary):
|
||||
"""Context derived container object for a library"""
|
||||
|
|
|
@ -951,6 +951,10 @@ class TreeMetadataEmitter(LoggingMixin):
|
|||
for target_var in ('SOURCES', 'UNIFIED_SOURCES'):
|
||||
for suffix, srcs in ctxt_sources[target_var].items():
|
||||
linkable.sources[suffix] += srcs
|
||||
if no_pgo_sources:
|
||||
linkable.no_pgo_sources = no_pgo_sources
|
||||
elif no_pgo:
|
||||
linkable.no_pgo = True
|
||||
for host_linkable in host_linkables:
|
||||
for suffix, srcs in ctxt_sources['HOST_SOURCES'].items():
|
||||
host_linkable.sources[suffix] += srcs
|
||||
|
|
|
@ -205,6 +205,18 @@ CONFIGS = defaultdict(lambda: {
|
|||
'BIN_SUFFIX': '.prog',
|
||||
},
|
||||
},
|
||||
'linkage': {
|
||||
'defines': {},
|
||||
'non_global_defines': [],
|
||||
'substs': {
|
||||
'COMPILE_ENVIRONMENT': '1',
|
||||
'LIB_SUFFIX': 'a',
|
||||
'BIN_SUFFIX': '.exe',
|
||||
'DLL_SUFFIX': '.so',
|
||||
'OBJ_SUFFIX': 'o',
|
||||
'EXPAND_LIBS_LIST_STYLE': 'list',
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
|
||||
|
|
|
@ -0,0 +1,11 @@
|
|||
# Any copyright is dedicated to the Public Domain.
|
||||
# http://creativecommons.org/publicdomain/zero/1.0/
|
||||
|
||||
include('templates.mozbuild')
|
||||
|
||||
DIRS += [
|
||||
'real',
|
||||
'shared',
|
||||
'prog',
|
||||
'static',
|
||||
]
|
|
@ -0,0 +1,11 @@
|
|||
# Any copyright is dedicated to the Public Domain.
|
||||
# http://creativecommons.org/publicdomain/zero/1.0/
|
||||
|
||||
DIRS += ['qux']
|
||||
|
||||
Program('MyProgram')
|
||||
|
||||
USE_LIBS += [
|
||||
'bar',
|
||||
'baz',
|
||||
]
|
|
@ -0,0 +1,6 @@
|
|||
# Any copyright is dedicated to the Public Domain.
|
||||
# http://creativecommons.org/publicdomain/zero/1.0/
|
||||
|
||||
SOURCES += ['qux1.c']
|
||||
|
||||
SharedLibrary('qux')
|
|
@ -0,0 +1,9 @@
|
|||
# Any copyright is dedicated to the Public Domain.
|
||||
# http://creativecommons.org/publicdomain/zero/1.0/
|
||||
|
||||
SOURCES += [
|
||||
'foo1.c',
|
||||
'foo2.c'
|
||||
]
|
||||
|
||||
FINAL_LIBRARY = 'foo'
|
|
@ -0,0 +1,14 @@
|
|||
# Any copyright is dedicated to the Public Domain.
|
||||
# http://creativecommons.org/publicdomain/zero/1.0/
|
||||
|
||||
DIRS += [
|
||||
'foo',
|
||||
]
|
||||
|
||||
NO_EXPAND_LIBS = True
|
||||
|
||||
OS_LIBS += ['-lbaz']
|
||||
|
||||
USE_LIBS += ['static:baz']
|
||||
|
||||
Library('foo')
|
|
@ -0,0 +1,6 @@
|
|||
# Any copyright is dedicated to the Public Domain.
|
||||
# http://creativecommons.org/publicdomain/zero/1.0/
|
||||
|
||||
SOURCES += ['baz1.c']
|
||||
|
||||
FINAL_LIBRARY = 'baz'
|
|
@ -0,0 +1,14 @@
|
|||
# Any copyright is dedicated to the Public Domain.
|
||||
# http://creativecommons.org/publicdomain/zero/1.0/
|
||||
|
||||
DIRS += [
|
||||
'baz',
|
||||
]
|
||||
|
||||
STATIC_LIBRARY_NAME = 'baz_s'
|
||||
FORCE_STATIC_LIB = True
|
||||
|
||||
OS_LIBS += ['-lfoo']
|
||||
USE_LIBS += ['qux']
|
||||
|
||||
SharedLibrary('baz')
|
|
@ -0,0 +1,8 @@
|
|||
# Any copyright is dedicated to the Public Domain.
|
||||
# http://creativecommons.org/publicdomain/zero/1.0/
|
||||
|
||||
SOURCES += [
|
||||
'bar_helper1.cpp',
|
||||
]
|
||||
|
||||
FINAL_LIBRARY = 'bar'
|
|
@ -0,0 +1,13 @@
|
|||
# Any copyright is dedicated to the Public Domain.
|
||||
# http://creativecommons.org/publicdomain/zero/1.0/
|
||||
|
||||
SOURCES += [
|
||||
'bar1.cc',
|
||||
'bar2.cc',
|
||||
]
|
||||
|
||||
DIRS += [
|
||||
'bar_helper',
|
||||
]
|
||||
|
||||
FINAL_LIBRARY = 'bar'
|
|
@ -0,0 +1,12 @@
|
|||
# Any copyright is dedicated to the Public Domain.
|
||||
# http://creativecommons.org/publicdomain/zero/1.0/
|
||||
|
||||
DIRS += [
|
||||
'bar',
|
||||
]
|
||||
|
||||
USE_LIBS += ['foo']
|
||||
|
||||
OS_LIBS += ['-lbar']
|
||||
|
||||
Library('bar')
|
|
@ -0,0 +1,23 @@
|
|||
# Any copyright is dedicated to the Public Domain.
|
||||
# http://creativecommons.org/publicdomain/zero/1.0/
|
||||
|
||||
@template
|
||||
def Library(name):
|
||||
LIBRARY_NAME = name
|
||||
|
||||
@template
|
||||
def SharedLibrary(name):
|
||||
FORCE_SHARED_LIB = True
|
||||
LIBRARY_NAME = name
|
||||
|
||||
@template
|
||||
def Binary():
|
||||
# Add -lfoo for testing purposes.
|
||||
OS_LIBS += ['foo']
|
||||
|
||||
|
||||
@template
|
||||
def Program(name):
|
||||
PROGRAM = name
|
||||
|
||||
Binary()
|
|
@ -1050,6 +1050,73 @@ class TestRecursiveMakeBackend(BackendTester):
|
|||
for line in lines:
|
||||
self.assertNotIn('LIB_IS_C_ONLY', line)
|
||||
|
||||
def test_linkage(self):
|
||||
env = self._consume('linkage', RecursiveMakeBackend)
|
||||
expected_linkage = {
|
||||
'prog': {
|
||||
'SHARED_LIBS': ['$(DEPTH)/prog/qux/qux.so',
|
||||
'$(DEPTH)/shared/baz.so'],
|
||||
'STATIC_LIBS': ['$(DEPTH)/real/foo.a'],
|
||||
'OS_LIBS': ['-lfoo', '-lbaz', '-lbar'],
|
||||
},
|
||||
'shared': {
|
||||
'OS_LIBS': ['-lfoo'],
|
||||
'SHARED_LIBS': ['$(DEPTH)/prog/qux/qux.so'],
|
||||
'STATIC_LIBS': [],
|
||||
},
|
||||
'static': {
|
||||
'STATIC_LIBS': ['$(DEPTH)/real/foo.a'],
|
||||
'OS_LIBS': ['-lbar'],
|
||||
'SHARED_LIBS': ['$(DEPTH)/prog/qux/qux.so'],
|
||||
},
|
||||
'real': {
|
||||
'STATIC_LIBS': [],
|
||||
'SHARED_LIBS': ['$(DEPTH)/prog/qux/qux.so'],
|
||||
'OS_LIBS': ['-lbaz'],
|
||||
}
|
||||
}
|
||||
actual_linkage = {}
|
||||
for name in expected_linkage.keys():
|
||||
with open(os.path.join(env.topobjdir, name, 'backend.mk'), 'rb') as fh:
|
||||
actual_linkage[name] = [line.rstrip() for line in fh.readlines()]
|
||||
for name in expected_linkage:
|
||||
for var in expected_linkage[name]:
|
||||
for val in expected_linkage[name][var]:
|
||||
line = '%s += %s' % (var, val)
|
||||
self.assertIn(line,
|
||||
actual_linkage[name])
|
||||
actual_linkage[name].remove(line)
|
||||
for line in actual_linkage[name]:
|
||||
self.assertNotIn('%s +=' % var, line)
|
||||
|
||||
def test_list_files(self):
|
||||
env = self._consume('linkage', RecursiveMakeBackend)
|
||||
expected_list_files = {
|
||||
'prog/MyProgram_exe.list': [
|
||||
'../static/bar/bar1.o',
|
||||
'../static/bar/bar2.o',
|
||||
'../static/bar/bar_helper/bar_helper1.o',
|
||||
],
|
||||
'shared/baz_so.list': [
|
||||
'baz/baz1.o',
|
||||
],
|
||||
}
|
||||
actual_list_files = {}
|
||||
for name in expected_list_files.keys():
|
||||
with open(os.path.join(env.topobjdir, name), 'rb') as fh:
|
||||
actual_list_files[name] = [mozpath.normsep(line.rstrip())
|
||||
for line in fh.readlines()]
|
||||
for name in expected_list_files:
|
||||
self.assertEqual(actual_list_files[name],
|
||||
expected_list_files[name])
|
||||
|
||||
# We don't produce a list file for a shared library composed only of
|
||||
# object files in its directory, but instead list them in a variable.
|
||||
with open(os.path.join(env.topobjdir, 'prog', 'qux', 'backend.mk'), 'rb') as fh:
|
||||
lines = [line.rstrip() for line in fh.readlines()]
|
||||
|
||||
self.assertIn('qux.so_OBJS := qux1.o', lines)
|
||||
|
||||
def test_jar_manifests(self):
|
||||
env = self._consume('jar-manifests', RecursiveMakeBackend)
|
||||
|
||||
|
|
|
@ -673,6 +673,10 @@ class TestEmitterBasic(unittest.TestCase):
|
|||
self.assertEqual(objs[4].program, 'test_program1.prog')
|
||||
self.assertEqual(objs[5].program, 'test_program2.prog')
|
||||
|
||||
self.assertEqual(objs[3].name, 'test_program.prog')
|
||||
self.assertEqual(objs[4].name, 'test_program1.prog')
|
||||
self.assertEqual(objs[5].name, 'test_program2.prog')
|
||||
|
||||
self.assertEqual(objs[4].objs,
|
||||
[mozpath.join(reader.config.topobjdir,
|
||||
'test_program1.%s' %
|
||||
|
@ -1200,9 +1204,15 @@ class TestEmitterBasic(unittest.TestCase):
|
|||
for obj in self.read_topsrcdir(reader):
|
||||
if isinstance(obj, SharedLibrary):
|
||||
if obj.basename == 'cxx_shared':
|
||||
self.assertEquals(obj.name, '%scxx_shared%s' %
|
||||
(reader.config.dll_prefix,
|
||||
reader.config.dll_suffix))
|
||||
self.assertTrue(obj.cxx_link)
|
||||
got_results += 1
|
||||
elif obj.basename == 'just_c_shared':
|
||||
self.assertEquals(obj.name, '%sjust_c_shared%s' %
|
||||
(reader.config.dll_prefix,
|
||||
reader.config.dll_suffix))
|
||||
self.assertFalse(obj.cxx_link)
|
||||
got_results += 1
|
||||
self.assertEqual(got_results, 2)
|
||||
|
|
|
@ -1163,4 +1163,4 @@ static const TransportSecurityPreload kPublicKeyPinningPreloadList[] = {
|
|||
|
||||
static const int32_t kUnknownId = -1;
|
||||
|
||||
static const PRTime kPreloadPKPinsExpirationTime = INT64_C(1530043637929000);
|
||||
static const PRTime kPreloadPKPinsExpirationTime = INT64_C(1530130018442000);
|
||||
|
|
Разница между файлами не показана из-за своего большого размера
Загрузить разницу
|
@ -8,7 +8,7 @@
|
|||
/*****************************************************************************/
|
||||
|
||||
#include <stdint.h>
|
||||
const PRTime gPreloadListExpirationTime = INT64_C(1532462824852000);
|
||||
const PRTime gPreloadListExpirationTime = INT64_C(1532549205674000);
|
||||
%%
|
||||
0-1.party, 1
|
||||
0.me.uk, 1
|
||||
|
@ -152,6 +152,7 @@ const PRTime gPreloadListExpirationTime = INT64_C(1532462824852000);
|
|||
0xabe.io, 1
|
||||
0xacab.org, 1
|
||||
0xda.de, 1
|
||||
0xdc.io, 0
|
||||
0xdefaced.de, 1
|
||||
0xee.eu, 1
|
||||
0xf00.ch, 1
|
||||
|
@ -191,6 +192,7 @@ const PRTime gPreloadListExpirationTime = INT64_C(1532462824852000);
|
|||
10gbit.ovh, 1
|
||||
10hz.de, 1
|
||||
10og.de, 1
|
||||
10ppm.com, 1
|
||||
10v2.com, 1
|
||||
10x.ooo, 1
|
||||
1100.so, 1
|
||||
|
@ -557,7 +559,6 @@ const PRTime gPreloadListExpirationTime = INT64_C(1532462824852000);
|
|||
394922.com, 1
|
||||
396422.com, 1
|
||||
398.info, 1
|
||||
3ags.de, 1
|
||||
3bakayottu.com, 1
|
||||
3bigking.com, 1
|
||||
3c-d.de, 1
|
||||
|
@ -926,7 +927,7 @@ const PRTime gPreloadListExpirationTime = INT64_C(1532462824852000);
|
|||
7graus.pt, 1
|
||||
7kicks.com, 1
|
||||
7links.com.br, 1
|
||||
7nw.eu, 0
|
||||
7nw.eu, 1
|
||||
7proxies.com, 1
|
||||
7sons.de, 1
|
||||
7thcircledesigns.com, 1
|
||||
|
@ -2100,7 +2101,6 @@ alternativet.party, 1
|
|||
alterspalter.de, 1
|
||||
altesses.eu, 1
|
||||
altestore.com, 1
|
||||
altitudemoversdenver.com, 1
|
||||
altkremsmuensterer.at, 1
|
||||
altonblom.com, 1
|
||||
altopartners.com, 1
|
||||
|
@ -3713,7 +3713,6 @@ ballotapi.com, 1
|
|||
ballothero.com, 1
|
||||
ballroom.info, 1
|
||||
balonmano.co, 1
|
||||
bals.org, 1
|
||||
balslev.io, 1
|
||||
balticnetworks.com, 1
|
||||
bambambaby.com.br, 1
|
||||
|
@ -4533,7 +4532,6 @@ binarystud.io, 1
|
|||
binbin9.com, 1
|
||||
binbin9.net, 1
|
||||
binding-problem.com, 1
|
||||
binfind.com, 1
|
||||
bing.com, 1
|
||||
bingcheung.com, 1
|
||||
bingo-wear.com, 1
|
||||
|
@ -5133,6 +5131,7 @@ botlab.ch, 1
|
|||
botmanager.pl, 1
|
||||
botserver.de, 1
|
||||
bottaerisposta.net, 1
|
||||
bottineauneighborhood.org, 1
|
||||
bottke.berlin, 1
|
||||
bou.lt, 1
|
||||
bouah.net, 1
|
||||
|
@ -5751,6 +5750,7 @@ buturyu.net, 1
|
|||
buturyu.org, 1
|
||||
buxum-communication.ch, 1
|
||||
buy-thing.com, 1
|
||||
buyaccessible.gov, 1
|
||||
buybike.shop, 1
|
||||
buycarpet.shop, 1
|
||||
buycook.shop, 1
|
||||
|
@ -6292,7 +6292,6 @@ catfooddispensersreviews.com, 1
|
|||
catgirl.me, 1
|
||||
catgirl.pics, 1
|
||||
catharinesomerville.com, 1
|
||||
catharisme.eu, 1
|
||||
catharisme.net, 1
|
||||
catharisme.org, 1
|
||||
catherineidylle.com, 1
|
||||
|
@ -6306,7 +6305,6 @@ catnet.dk, 0
|
|||
catnmeow.com, 1
|
||||
catsmagic.pp.ua, 1
|
||||
cattivo.nl, 0
|
||||
catuniverse.org, 1
|
||||
catveteran.com, 1
|
||||
caughtredhanded.co.nz, 1
|
||||
caulfieldeastapartments.com.au, 1
|
||||
|
@ -7009,7 +7007,7 @@ cirugiasplasticas.com.mx, 1
|
|||
cirujanooral.com, 1
|
||||
cirurgicagervasio.com.br, 1
|
||||
cirurgicalucena.com.br, 1
|
||||
ciscodude.net, 1
|
||||
ciscodude.net, 0
|
||||
cisoaid.com, 1
|
||||
ciss.ltd, 1
|
||||
cisy.me, 1
|
||||
|
@ -7713,13 +7711,11 @@ consonare.de, 1
|
|||
constancechen.me, 1
|
||||
constant-rough.de, 1
|
||||
constares.de, 1
|
||||
construct-trust.com, 1
|
||||
constructionjobs.com, 1
|
||||
constructive.men, 1
|
||||
consul.io, 1
|
||||
consultcelerity.com, 1
|
||||
consultingroupitaly.com, 1
|
||||
consultorcr.net, 1
|
||||
consultpetkov.com, 1
|
||||
consumer.gov, 1
|
||||
consumeractionlawgroup.com, 1
|
||||
|
@ -7789,6 +7785,7 @@ coolpickz.com, 1
|
|||
coolprylar.se, 1
|
||||
coolrc.me, 1
|
||||
coolviewthermostat.com, 1
|
||||
coolvox.com, 1
|
||||
coopens.com, 1
|
||||
coore.jp, 1
|
||||
coorpacademy.com, 1
|
||||
|
@ -8183,7 +8180,6 @@ cryptoseb.pw, 1
|
|||
cryptoshot.pw, 1
|
||||
cryptract.co, 1
|
||||
crystalchandelierservices.com, 1
|
||||
crystalmachine.net, 0
|
||||
crystone.me, 1
|
||||
cryz.ru, 1
|
||||
cs-colorscreed-betongulve.dk, 1
|
||||
|
@ -8250,8 +8246,8 @@ cubecart-hosting.co.uk, 1
|
|||
cubecraft.net, 1
|
||||
cubecraftstore.com, 1
|
||||
cubecraftstore.net, 1
|
||||
cubekrowd.net, 1
|
||||
cubela.tech, 1
|
||||
cubia.de, 1
|
||||
cubia3.com, 1
|
||||
cubia4.com, 1
|
||||
cubile.xyz, 1
|
||||
|
@ -8371,6 +8367,7 @@ cwrcoding.com, 1
|
|||
cy.technology, 1
|
||||
cybbh.space, 1
|
||||
cyber-computer.club, 1
|
||||
cyber-konzept.de, 1
|
||||
cyber-perikarp.eu, 1
|
||||
cyber.cafe, 1
|
||||
cyberatlantis.com, 1
|
||||
|
@ -8380,6 +8377,7 @@ cybercrew.cc, 1
|
|||
cybercrime-forschung.de, 1
|
||||
cyberdos.de, 1
|
||||
cyberduck.io, 1
|
||||
cyberfrancais.ro, 1
|
||||
cybergrx.com, 1
|
||||
cyberguerrilla.info, 1
|
||||
cyberguerrilla.org, 1
|
||||
|
@ -8755,7 +8753,7 @@ datumou-recipe.com, 1
|
|||
daubecity.de, 1
|
||||
daubehosting.de, 1
|
||||
dave-pearce.com, 1
|
||||
daveaglick.com, 1
|
||||
daveaglick.com, 0
|
||||
davecardwell.com, 1
|
||||
davelynes.com, 1
|
||||
daveoc64.co.uk, 1
|
||||
|
@ -8844,6 +8842,7 @@ dc562.org, 1
|
|||
dc585.info, 1
|
||||
dcards.in.th, 1
|
||||
dcautomacao.com.br, 1
|
||||
dcc.cat, 1
|
||||
dcc.moe, 1
|
||||
dcepler.net, 1
|
||||
dchatelain.ch, 1
|
||||
|
@ -9281,7 +9280,6 @@ devzero.io, 1
|
|||
dewaard.de, 1
|
||||
dewalch.net, 1
|
||||
dewapress.com, 1
|
||||
dewebwerf.nl, 1
|
||||
dewinter.com, 1
|
||||
dexalo.de, 1
|
||||
dezeregio.nl, 1
|
||||
|
@ -9302,6 +9300,7 @@ dgblaw.com.au, 1
|
|||
dgbouncycastlehire.com, 1
|
||||
dgby.org, 1
|
||||
dgeex.eu, 1
|
||||
dggm.ru, 1
|
||||
dggwp.de, 1
|
||||
dgitup.com, 1
|
||||
dgportals.co.uk, 1
|
||||
|
@ -9924,7 +9923,6 @@ dotacni-parazit.cz, 1
|
|||
dotb.dn.ua, 0
|
||||
dotbigbang.com, 1
|
||||
dotbox.org, 1
|
||||
dotbrick.co.th, 1
|
||||
dotconnor.com, 1
|
||||
dotgov.gov, 1
|
||||
dothebangthingsalon.com, 1
|
||||
|
@ -10269,7 +10267,6 @@ dustygroove.com, 1
|
|||
dustyspokesbnb.ca, 1
|
||||
dutch.desi, 1
|
||||
dutch1.nl, 1
|
||||
dutchessuganda.com, 1
|
||||
dutchrank.nl, 1
|
||||
dutchwanderers.nl, 1
|
||||
dutchweballiance.nl, 1
|
||||
|
@ -10497,7 +10494,6 @@ echopaper.com, 1
|
|||
echosim.io, 1
|
||||
echosixmonkey.com, 1
|
||||
echosystem.fr, 1
|
||||
echoteam.gq, 1
|
||||
echoteen.com, 1
|
||||
echoworld.ch, 1
|
||||
ecirtam.net, 1
|
||||
|
@ -10605,7 +10601,6 @@ educators.co.nz, 1
|
|||
educatoys.com.br, 1
|
||||
educatweb.de, 1
|
||||
eductf.org, 1
|
||||
edudrugs.com, 1
|
||||
eduid.se, 1
|
||||
eduif.nl, 0
|
||||
edumundo.nl, 1
|
||||
|
@ -10876,7 +10871,7 @@ elisabeth-strunz.de, 1
|
|||
elisabethkostecki.de, 1
|
||||
elisabethrene.com, 1
|
||||
elisechristie.com, 1
|
||||
elistor6100.xyz, 0
|
||||
elistor6100.xyz, 1
|
||||
elite-box.com, 1
|
||||
elite-box.org, 1
|
||||
elite-porno.ru, 1
|
||||
|
@ -11464,7 +11459,6 @@ esbm4.net, 1
|
|||
esbm5.net, 1
|
||||
esc.chat, 1
|
||||
escael.org, 1
|
||||
escapees.com, 1
|
||||
escapeplaza.de, 1
|
||||
escapetalk.nl, 1
|
||||
escargotbistro.com, 1
|
||||
|
@ -11825,6 +11819,7 @@ exebouncycastles.co.uk, 1
|
|||
exehack.net, 1
|
||||
exeintel.com, 1
|
||||
exekutori.com, 1
|
||||
exembit.com, 1
|
||||
exemples-de-stands.com, 1
|
||||
exeria.de, 1
|
||||
exgaywatch.com, 1
|
||||
|
@ -11901,7 +11896,7 @@ exteriorservices.io, 1
|
|||
extradesktops.com, 0
|
||||
extranetpuc.com.br, 1
|
||||
extrapagetab.com, 1
|
||||
extratorrent.cool, 1
|
||||
extratorrent.cool, 0
|
||||
extratorrent.fyi, 1
|
||||
extratorrent.red, 1
|
||||
extratorrent.world, 1
|
||||
|
@ -11981,7 +11976,6 @@ facanabota.com, 1
|
|||
facanabota.com.br, 1
|
||||
facciadastile.it, 1
|
||||
facealacrise.fr, 1
|
||||
facebattle.com, 1
|
||||
facebook-atom.appspot.com, 1
|
||||
facebook.ax, 1
|
||||
facebook.com, 0
|
||||
|
@ -12059,6 +12053,7 @@ fallenangeldrinks.eu, 1
|
|||
fallenangelspirits.co.uk, 1
|
||||
fallenangelspirits.com, 1
|
||||
fallenspirits.co.uk, 1
|
||||
fallofthecitadel.com, 1
|
||||
falsum.net, 1
|
||||
fam-kreibich.de, 1
|
||||
fam-stemmer.de, 0
|
||||
|
@ -12259,7 +12254,6 @@ feel.aero, 1
|
|||
feelgood-workouts.de, 1
|
||||
feeltennis.net, 1
|
||||
feen.us, 1
|
||||
feeriedesign-event.com, 1
|
||||
feetpa.ws, 1
|
||||
fefelovalex.ru, 1
|
||||
fegli.gov, 1
|
||||
|
@ -12746,6 +12740,7 @@ fm.ie, 1
|
|||
fmapplication.com, 1
|
||||
fmarchal.fr, 1
|
||||
fmdance.cl, 1
|
||||
fmi.gov, 1
|
||||
fminsight.net, 1
|
||||
fmodoux.biz, 1
|
||||
fmovies.fyi, 1
|
||||
|
@ -12779,6 +12774,7 @@ fol.tf, 1
|
|||
foljeton.dk, 1
|
||||
folkadelic.de, 1
|
||||
folkfests.org, 1
|
||||
follandviolins.com, 1
|
||||
followerrocket.com, 1
|
||||
followersya.com, 1
|
||||
followthatpage.com, 1
|
||||
|
@ -12878,6 +12874,7 @@ forsyththeatre.com, 1
|
|||
forteggz.nl, 1
|
||||
fortesanshop.it, 1
|
||||
fortnine.ca, 1
|
||||
fortran.io, 1
|
||||
fortress.no, 1
|
||||
fortress.sk, 1
|
||||
fortricks.in, 1
|
||||
|
@ -13191,6 +13188,7 @@ fromthesoutherncross.com, 1
|
|||
fronteers.nl, 0
|
||||
frontline.cloud, 1
|
||||
frontline6.com, 1
|
||||
fropky.com, 1
|
||||
frosthall.com, 1
|
||||
frostprotection.co.uk, 1
|
||||
frostwarning.com, 1
|
||||
|
@ -13411,6 +13409,7 @@ fysuite.com, 1
|
|||
fzbrweb.cz, 1
|
||||
fzx750.ru, 1
|
||||
g-m-w.eu, 1
|
||||
g-marketing.ro, 1
|
||||
g-o.pl, 1
|
||||
g-rom.net, 1
|
||||
g01.in.ua, 1
|
||||
|
@ -13696,7 +13695,6 @@ gehrke.nrw, 1
|
|||
gehsicht.de, 1
|
||||
geigr.de, 1
|
||||
geiser-family.ch, 1
|
||||
geiser.io, 1
|
||||
gelb-computer.de, 1
|
||||
geld-im-blick.de, 1
|
||||
geld24.nl, 1
|
||||
|
@ -13731,7 +13729,6 @@ general-anaesthetics.com, 1
|
|||
general-anesthesia.com, 1
|
||||
generali-worldwide.com, 1
|
||||
generalinsuranceservices.com, 1
|
||||
generalpants.com.au, 1
|
||||
generationgoat.com, 1
|
||||
generationnext.pl, 1
|
||||
generationsweldom.com, 1
|
||||
|
@ -13987,6 +13984,7 @@ gillyscastles.co.uk, 1
|
|||
gilmoreid.com.au, 1
|
||||
gilnet.be, 1
|
||||
gina-architektur.design, 1
|
||||
ginie.de, 1
|
||||
ginionusedcars.be, 1
|
||||
ginja.co.th, 1
|
||||
ginkel.com, 1
|
||||
|
@ -14245,7 +14243,6 @@ gooddomainna.me, 1
|
|||
goodeats.nyc, 1
|
||||
goodenough.nz, 0
|
||||
goodfeels.net, 1
|
||||
goods-memo.net, 1
|
||||
goodsex4all.com.br, 1
|
||||
goodvibesblog.com, 1
|
||||
goodyearsotn.co.uk, 1
|
||||
|
@ -14637,6 +14634,7 @@ guidelines.gov, 1
|
|||
guideo.ch, 1
|
||||
guides-et-admin.com, 1
|
||||
guides-peche64.com, 1
|
||||
guidetoiceland.is, 0
|
||||
guildbase.de, 1
|
||||
guildgearscore.cf, 0
|
||||
guildofmusicsupervisors.co.uk, 1
|
||||
|
@ -14657,7 +14655,6 @@ gulshankumar.net, 1
|
|||
gumballs.com, 1
|
||||
gume4you.com, 1
|
||||
gumi.ca, 1
|
||||
gummibande.noip.me, 1
|
||||
gunhunter.com, 1
|
||||
guniram.com, 1
|
||||
gunwatch.co.uk, 1
|
||||
|
@ -15690,7 +15687,6 @@ hopconseils.ch, 1
|
|||
hopconseils.com, 1
|
||||
hope-line-earth.jp, 1
|
||||
hopesb.org, 1
|
||||
hopewellproperties.co.uk, 1
|
||||
hopglass.eu, 1
|
||||
hopglass.net, 1
|
||||
hoplongtech.com, 1
|
||||
|
@ -15705,7 +15701,6 @@ horeizai.net, 1
|
|||
horisonttimedia.fi, 1
|
||||
horizonhomes-samui.com, 1
|
||||
horizonlawncare.tk, 1
|
||||
horizonmoto.fr, 1
|
||||
horkel.cf, 1
|
||||
hornyforhanzo.com, 1
|
||||
horodance.dk, 1
|
||||
|
@ -16503,7 +16498,6 @@ imrunner.com, 1
|
|||
imrunner.ru, 1
|
||||
ims-sargans.ch, 1
|
||||
imscompany.com, 1
|
||||
imy.life, 1
|
||||
imydl.com, 1
|
||||
imydl.tech, 1
|
||||
imyvm.com, 1
|
||||
|
@ -16651,7 +16645,6 @@ ingber.com, 1
|
|||
inge-r.nl, 1
|
||||
ingeeibach.de, 1
|
||||
ingenium.si, 1
|
||||
ingenius.ws, 1
|
||||
ingerhy.com, 1
|
||||
ingi.ga, 1
|
||||
ingjobs.ch, 1
|
||||
|
@ -17647,8 +17640,8 @@ jessekaufman.com, 1
|
|||
jessesjumpingcastles.co.uk, 1
|
||||
jessevictors.com, 1
|
||||
jessgranger.com, 1
|
||||
jessicabenedictus.nl, 0
|
||||
jesters-court.net, 1
|
||||
jesuisadmin.fr, 1
|
||||
jet-stream.fr, 1
|
||||
jetapi.org, 1
|
||||
jetbbs.com, 1
|
||||
|
@ -20077,7 +20070,6 @@ lifeinsurancepro.org, 1
|
|||
lifeinsurances.pro, 1
|
||||
lifeinsurances24.com, 1
|
||||
lifekiss.ru, 1
|
||||
lifemarque.co.uk, 1
|
||||
lifematenutrition.com, 1
|
||||
lifemstyle.com, 1
|
||||
lifenexto.com, 1
|
||||
|
@ -20430,6 +20422,7 @@ lofttravel.com, 1
|
|||
log.my, 0
|
||||
logaldeveloper.com, 1
|
||||
loganmarchione.com, 1
|
||||
loganparkneighborhood.org, 1
|
||||
logbook.ch, 1
|
||||
logbot.info, 1
|
||||
logcat.info, 1
|
||||
|
@ -20739,7 +20732,6 @@ lukasztkacz.com, 1
|
|||
lukatz.de, 1
|
||||
luke.ch, 1
|
||||
lukeistschuld.de, 1
|
||||
lukeng.net, 1
|
||||
lukesbouncycastlehire.com, 1
|
||||
lukestebbing.com, 1
|
||||
lukmanulhakim.id, 1
|
||||
|
@ -20801,6 +20793,7 @@ luxusnyvoucher.sk, 1
|
|||
luxvacuos.net, 1
|
||||
luxwatch.com, 1
|
||||
luzat.com, 1
|
||||
luzfaltex.com, 1
|
||||
lv.search.yahoo.com, 0
|
||||
lv0.it, 1
|
||||
lv5.top, 1
|
||||
|
@ -21742,6 +21735,7 @@ medicinskavranje.edu.rs, 1
|
|||
medicocompetente.it, 1
|
||||
medicoresponde.com.br, 1
|
||||
medienweite.de, 1
|
||||
medifab.online, 1
|
||||
medifi.com, 1
|
||||
medigap-quote.net, 1
|
||||
medinside.ch, 1
|
||||
|
@ -21769,6 +21763,7 @@ medwaybouncycastlehire.co.uk, 1
|
|||
medy-me.com, 1
|
||||
medyotan.ga, 1
|
||||
meedoenhartvanwestbrabant.nl, 1
|
||||
meedoennoordkop.nl, 0
|
||||
meehle.com, 1
|
||||
meeko.cc, 1
|
||||
meerutcake.com, 1
|
||||
|
@ -21900,6 +21895,7 @@ menntagatt.is, 1
|
|||
menole.com, 1
|
||||
menole.de, 1
|
||||
menole.net, 1
|
||||
menotag.com, 1
|
||||
mensagemaniversario.com.br, 1
|
||||
mensagemdaluz.com, 1
|
||||
mensagensaniversario.com.br, 1
|
||||
|
@ -22295,6 +22291,7 @@ minorshadows.net, 1
|
|||
minpingvin.dk, 1
|
||||
minschuns.ch, 1
|
||||
mintclass.com, 1
|
||||
mintea-noua.ro, 1
|
||||
minto.cc, 1
|
||||
mintosherbs.com, 1
|
||||
mintrak2.com, 1
|
||||
|
@ -22896,7 +22893,6 @@ mstdn.blue, 1
|
|||
mstdn.club, 1
|
||||
mstdn.fr, 1
|
||||
mstdn.io, 1
|
||||
mstdn.nl, 1
|
||||
mstdn.onl, 0
|
||||
mstiles92.com, 1
|
||||
msuess.me, 1
|
||||
|
@ -24303,6 +24299,7 @@ northern-lakes.com, 1
|
|||
northernhamsterclub.com, 1
|
||||
northernmuscle.ca, 1
|
||||
northernselfstorage.co.za, 1
|
||||
northfieldyarn.com, 1
|
||||
northokanaganbookkeeping.com, 1
|
||||
northpole.dance, 1
|
||||
northridgeelectrical.com, 1
|
||||
|
@ -25270,7 +25267,6 @@ p.ki, 1
|
|||
p1984.nl, 0
|
||||
p1ratrulezzz.me, 1
|
||||
p22.co, 1
|
||||
p3ter.fr, 1
|
||||
p4chivtac.com, 1
|
||||
p5r.uk, 1
|
||||
pa-w.de, 1
|
||||
|
@ -25535,6 +25531,7 @@ partypearl.de, 1
|
|||
partyrocksbounce.co.uk, 1
|
||||
partyschnaps.com, 1
|
||||
partyspaces.co.uk, 1
|
||||
partyspecialists.com, 1
|
||||
partytime-uk.co.uk, 1
|
||||
partytimeltd.ie, 1
|
||||
partytownireland.co.uk, 1
|
||||
|
@ -25853,7 +25850,6 @@ penser-electronique.com, 1
|
|||
pensioenfonds-ey.nl, 1
|
||||
pension-veldzigt.nl, 1
|
||||
pension-waldesruh.de, 1
|
||||
pensionpilot.ca, 1
|
||||
pensiunealido.ro, 1
|
||||
pentandra.com, 1
|
||||
pentest.blog, 1
|
||||
|
@ -26344,6 +26340,7 @@ planetau2.com, 1
|
|||
planetbeauty.com, 1
|
||||
planetbreath.ch, 1
|
||||
planete-cocoon.com, 0
|
||||
planete-lira.fr, 1
|
||||
planete-secu.com, 1
|
||||
planeteroliste.com, 1
|
||||
planeteroliste.fr, 1
|
||||
|
@ -26458,7 +26455,6 @@ plushev.com, 1
|
|||
pluslink.co.jp, 1
|
||||
plussizereviews.com, 1
|
||||
plusstreamfeed.appspot.com, 1
|
||||
plustech.id, 1
|
||||
pluta.net, 1
|
||||
pluto.life, 1
|
||||
plutokorea.com, 1
|
||||
|
@ -27232,6 +27228,7 @@ psychoco.net, 1
|
|||
psychotherapie-kp.de, 1
|
||||
psydix.org, 1
|
||||
psyk.yt, 1
|
||||
psylab.cc, 1
|
||||
psylab.re, 1
|
||||
psylab.vip, 1
|
||||
psynapse.net.au, 1
|
||||
|
@ -27688,6 +27685,7 @@ randomdysfunctions.com, 1
|
|||
randomkoalafacts.com, 1
|
||||
randomprecision.co.uk, 1
|
||||
randomquotesapp.com, 1
|
||||
randy.su, 1
|
||||
rangde.org, 1
|
||||
rangsmo.se, 1
|
||||
rank-net.de, 1
|
||||
|
@ -28721,6 +28719,7 @@ royalrangers.fi, 1
|
|||
royalty-market.com, 1
|
||||
royalvisiongroup.com, 1
|
||||
royzez.com, 1
|
||||
rozalisbengal.ro, 1
|
||||
rozalynne-dawn.ga, 1
|
||||
rozhodce.cz, 1
|
||||
rpadovani.com, 1
|
||||
|
@ -28828,7 +28827,6 @@ ruhrnalist.de, 1
|
|||
ruht.ro, 1
|
||||
ruigomes.me, 1
|
||||
ruiming.me, 1
|
||||
ruja.dk, 1
|
||||
ruk.ca, 1
|
||||
rukhaiyar.com, 1
|
||||
rullzer.com, 1
|
||||
|
@ -29178,7 +29176,6 @@ santmark.org, 1
|
|||
santodomingocg.org, 1
|
||||
santojuken.co.jp, 1
|
||||
sanvitolocapobus.com, 1
|
||||
saoneth.pl, 1
|
||||
saorsat.com, 1
|
||||
saorsat.ie, 1
|
||||
saorview.com, 1
|
||||
|
@ -29233,7 +29230,6 @@ satimagingcorp.com, 1
|
|||
satinn.pl, 1
|
||||
sativatunja.com, 1
|
||||
satmd.de, 1
|
||||
satrent.com, 1
|
||||
saturn.pl, 1
|
||||
saudavel.com.vc, 1
|
||||
saudeealimentos.com, 1
|
||||
|
@ -29472,12 +29468,10 @@ scijinks.gov, 1
|
|||
scimage.com, 1
|
||||
scintilla.nl, 1
|
||||
scintillating.stream, 1
|
||||
scionasset.com, 1
|
||||
scis.com.ua, 1
|
||||
scistarter.com, 1
|
||||
scitopia.me, 1
|
||||
scitopia.net, 1
|
||||
sckc.stream, 1
|
||||
scm-2017.org, 1
|
||||
scoolcode.com, 1
|
||||
scoop6.co.uk, 1
|
||||
|
@ -29545,6 +29539,7 @@ scwilliams.uk, 1
|
|||
sd.af, 1
|
||||
sdcardrecovery.de, 1
|
||||
sdg-tracker.org, 1
|
||||
sdho.org, 1
|
||||
sdns.fr, 1
|
||||
sdsi.us, 1
|
||||
sdsmanagement.me, 1
|
||||
|
@ -30715,7 +30710,6 @@ slash64.uk, 1
|
|||
slashbits.no, 1
|
||||
slashcrypto.org, 1
|
||||
slatemc.fun, 1
|
||||
slatop.org, 0
|
||||
slaughter.com, 1
|
||||
slaughterhouse.fr, 1
|
||||
slavasveta.info, 1
|
||||
|
@ -30780,7 +30774,6 @@ smackhappy.com, 1
|
|||
smadav.ml, 1
|
||||
smallchat.nl, 1
|
||||
smalldata.tech, 1
|
||||
smalldogbreeds.net, 1
|
||||
smalle-voet.de, 1
|
||||
smallhadroncollider.com, 1
|
||||
smallpath.me, 1
|
||||
|
@ -31219,7 +31212,6 @@ sp.com.pl, 1
|
|||
sp.rw, 1
|
||||
space-it.de, 1
|
||||
spacebaseapp.com, 1
|
||||
spacecafe.org, 1
|
||||
spacedirectory.org, 1
|
||||
spacedots.net, 1
|
||||
spacehighway.ms, 1
|
||||
|
@ -31547,6 +31539,7 @@ stainedglass.net.au, 1
|
|||
stair.ch, 1
|
||||
stairfallgames.com, 1
|
||||
stairlin.com, 1
|
||||
staklim-malang.info, 1
|
||||
staktrace.com, 1
|
||||
stalder.work, 1
|
||||
stalker-shop.com, 1
|
||||
|
@ -32069,6 +32062,7 @@ summercampthailand.com, 1
|
|||
summershomes.com, 1
|
||||
sumthing.com, 1
|
||||
sun-leo.co.jp, 1
|
||||
sun-wellness-online.com.vn, 1
|
||||
sunboxstore.jp, 1
|
||||
sunbritetv.com, 1
|
||||
sunchasercats.com, 1
|
||||
|
@ -32409,6 +32403,7 @@ ta-sports.net, 1
|
|||
ta65.com, 1
|
||||
taabe.net, 1
|
||||
taartbesteld.nl, 1
|
||||
taartenfeesies.nl, 1
|
||||
tab.watch, 1
|
||||
tabarnak.ga, 1
|
||||
tabernadovinho.com.br, 1
|
||||
|
@ -33293,6 +33288,7 @@ thetrendspotter.net, 1
|
|||
thetruthhurvitz.com, 1
|
||||
thetuxkeeper.de, 0
|
||||
theunitedstates.io, 1
|
||||
theuucc.org, 1
|
||||
thevacweb.com, 1
|
||||
thevalentineconstitution.com, 1
|
||||
thevgg.com, 0
|
||||
|
@ -33618,6 +33614,7 @@ tkn.tokyo, 1
|
|||
tkts.cl, 1
|
||||
tkusano.jp, 1
|
||||
tkw01536.de, 1
|
||||
tlach.cz, 1
|
||||
tlca.org, 1
|
||||
tlcnet.info, 1
|
||||
tlehseasyads.com, 1
|
||||
|
@ -34303,7 +34300,6 @@ trybooking.com, 1
|
|||
tryfabulousdiet.com, 1
|
||||
tryfabulousskincream.com, 1
|
||||
tryfabulousskinserum.com, 1
|
||||
tryfm.net, 1
|
||||
trygarciniaslimdiet.com, 1
|
||||
tryhard.cz, 1
|
||||
trymegadrol.com, 1
|
||||
|
@ -34628,6 +34624,7 @@ uiberlay.cz, 1
|
|||
uicchy.com, 1
|
||||
uiop.link, 1
|
||||
uitgeverij-deviant.nl, 1
|
||||
uitslagensoftware.nl, 1
|
||||
ujob.com.cn, 1
|
||||
uk.dating, 1
|
||||
uk.search.yahoo.com, 0
|
||||
|
@ -35623,7 +35620,6 @@ volcain.io, 1
|
|||
volcanconcretos.com, 1
|
||||
volga.us, 1
|
||||
volgavibes.ru, 0
|
||||
voliere-info.nl, 0
|
||||
volker-gropp.de, 1
|
||||
volkergropp.de, 1
|
||||
volkerwesselstransfer.nl, 1
|
||||
|
@ -35780,6 +35776,7 @@ waelti.xxx, 1
|
|||
waf.ninja, 1
|
||||
waf.sexy, 1
|
||||
wafa4hw.com, 1
|
||||
wafairhaven.com.au, 1
|
||||
wafelland.be, 1
|
||||
waffle.at, 1
|
||||
wafni.com, 1
|
||||
|
@ -35809,7 +35806,6 @@ wakiminblog.com, 1
|
|||
wala-floor.de, 1
|
||||
walk.onl, 1
|
||||
walkhighlandsandislands.com, 1
|
||||
walkingforhealth.org.uk, 1
|
||||
walkingrehabilitation.com, 1
|
||||
walksedona.com, 1
|
||||
wallabies.org, 1
|
||||
|
@ -35883,7 +35879,6 @@ wasema.com, 1
|
|||
wasfestes.de, 1
|
||||
washingtonregisteredagent.io, 1
|
||||
washingtonviews.com, 1
|
||||
wasielewski.com.de, 1
|
||||
wasil.org, 1
|
||||
waslh.com, 1
|
||||
wasserburg.dk, 1
|
||||
|
@ -36038,6 +36033,7 @@ webliberty.ru, 1
|
|||
weblogic.pl, 1
|
||||
weblogzwolle.nl, 1
|
||||
webmail.gigahost.dk, 0
|
||||
webmail.info, 0
|
||||
webmail.onlime.ch, 0
|
||||
webmail.schokokeks.org, 0
|
||||
webmail.xalqbank.az, 1
|
||||
|
@ -36108,7 +36104,7 @@ webwednesday.nl, 1
|
|||
webwinkelwestland.nl, 1
|
||||
webwit.nl, 1
|
||||
webwolf.co.za, 1
|
||||
webworkshop.ltd, 1
|
||||
webworkshop.ltd, 0
|
||||
webyazilimankara.com, 1
|
||||
webzanem.com, 1
|
||||
wecanvisit.com, 1
|
||||
|
@ -36360,6 +36356,7 @@ whisperinghoperanch.org, 1
|
|||
whisperlab.org, 1
|
||||
whistleb.com, 1
|
||||
whistleblower.gov, 1
|
||||
whistler-transfers.com, 1
|
||||
whitby-brewery.com, 1
|
||||
whitealps.at, 1
|
||||
whitealps.be, 1
|
||||
|
@ -37308,7 +37305,6 @@ xrockx.de, 1
|
|||
xroot.org, 1
|
||||
xs2a.no, 1
|
||||
xscancun.com, 1
|
||||
xscapers.com, 1
|
||||
xsec.me, 1
|
||||
xsmobile.de, 1
|
||||
xss.ht, 1
|
||||
|
@ -37398,7 +37394,7 @@ yamadaya.tv, 1
|
|||
yamaken.jp, 1
|
||||
yamashita-clinic.org, 1
|
||||
yame2.com, 1
|
||||
yamm.io, 1
|
||||
yamm.io, 0
|
||||
yanaduday.com, 1
|
||||
yandere.moe, 1
|
||||
yangjingwen.cn, 1
|
||||
|
@ -38049,7 +38045,6 @@ zorium.org, 1
|
|||
zorki.nl, 1
|
||||
zorntt.fr, 1
|
||||
zorz.info, 1
|
||||
zotero.org, 1
|
||||
zouk.info, 1
|
||||
zouyaoji.top, 1
|
||||
zravypapir.cz, 1
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
# %1: the user name (Ed), %2: the app name (Firefox), %3: the operating system (Android)
|
||||
client.name2 = %1$S’s %2$S on %3$S
|
||||
|
||||
# %S is the date and time at which the last sync successfully completed
|
||||
# %S is the relative time at which the last sync successfully completed (e.g. 5 min. ago)
|
||||
lastSync2.label = Last sync: %S
|
||||
|
||||
# signInToSync.description is the tooltip for the Sync buttons when Sync is
|
||||
|
|
|
@ -986,7 +986,7 @@ dependencies = [
|
|||
"log 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"malloc_size_of 0.0.1",
|
||||
"nsstring 0.1.0",
|
||||
"parking_lot 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"parking_lot 0.5.4 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"selectors 0.19.0",
|
||||
"servo_arc 0.1.1",
|
||||
"smallvec 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
|
@ -1406,7 +1406,7 @@ dependencies = [
|
|||
"msg 0.0.1",
|
||||
"net_traits 0.0.1",
|
||||
"ordered-float 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"parking_lot 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"parking_lot 0.5.4 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"profile_traits 0.0.1",
|
||||
"range 0.0.1",
|
||||
"rayon 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
|
@ -1451,7 +1451,7 @@ dependencies = [
|
|||
"metrics 0.0.1",
|
||||
"msg 0.0.1",
|
||||
"net_traits 0.0.1",
|
||||
"parking_lot 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"parking_lot 0.5.4 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"profile_traits 0.0.1",
|
||||
"range 0.0.1",
|
||||
"rayon 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
|
@ -2123,23 +2123,22 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "parking_lot"
|
||||
version = "0.4.8"
|
||||
version = "0.5.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"owning_ref 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"parking_lot_core 0.2.7 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"parking_lot_core 0.2.13 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "parking_lot_core"
|
||||
version = "0.2.7"
|
||||
version = "0.2.13"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"libc 0.2.39 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"rand 0.3.22 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"rand 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"smallvec 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"winapi 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -2499,7 +2498,7 @@ dependencies = [
|
|||
"net_traits 0.0.1",
|
||||
"num-traits 0.1.37 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"offscreen_gl_context 0.15.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"parking_lot 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"parking_lot 0.5.4 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"phf 0.7.21 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"phf_codegen 0.7.21 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"phf_shared 0.7.21 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
|
@ -3004,7 +3003,7 @@ dependencies = [
|
|||
"num_cpus 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"ordered-float 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"owning_ref 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"parking_lot 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"parking_lot 0.5.4 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"precomputed-hash 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"rayon 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"regex 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
|
@ -3047,7 +3046,7 @@ dependencies = [
|
|||
"cssparser 0.23.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"euclid 0.17.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"html5ever 0.22.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"parking_lot 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"parking_lot 0.5.4 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"rayon 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"selectors 0.19.0",
|
||||
"serde_json 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
|
@ -3922,8 +3921,8 @@ dependencies = [
|
|||
"checksum osmesa-sys 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "88cfece6e95d2e717e0872a7f53a8684712ad13822a7979bc760b9c77ec0013b"
|
||||
"checksum ovr-mobile-sys 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "a69b517feac6fc640f0679625defa0998bbcb32871a6901e63063c2abf9c4cbe"
|
||||
"checksum owning_ref 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "cdf84f41639e037b484f93433aa3897863b561ed65c6e59c7073d7c561710f37"
|
||||
"checksum parking_lot 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)" = "149d8f5b97f3c1133e3cfcd8886449959e856b557ff281e292b733d7c69e005e"
|
||||
"checksum parking_lot_core 0.2.7 (registry+https://github.com/rust-lang/crates.io-index)" = "6c677d78851950b3aec390e681a411f78cc250cba277d4f578758a377f727970"
|
||||
"checksum parking_lot 0.5.4 (registry+https://github.com/rust-lang/crates.io-index)" = "9fd9d732f2de194336fb02fe11f9eed13d9e76f13f4315b4d88a14ca411750cd"
|
||||
"checksum parking_lot_core 0.2.13 (registry+https://github.com/rust-lang/crates.io-index)" = "538ef00b7317875071d5e00f603f24d16f0b474c1a5fc0ccb8b454ca72eafa79"
|
||||
"checksum peeking_take_while 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "19b17cddbe7ec3f8bc800887bab5e717348c95ea2ca0b1bf0837fb964dc67099"
|
||||
"checksum percent-encoding 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "de154f638187706bde41d9b4738748933d64e6b37bdbffc0b47a97d16a6ae356"
|
||||
"checksum phf 0.7.21 (registry+https://github.com/rust-lang/crates.io-index)" = "cb325642290f28ee14d8c6201159949a872f220c62af6e110a56ea914fbe42fc"
|
||||
|
|
|
@ -28,7 +28,7 @@ malloc_size_of = { path = "../malloc_size_of" }
|
|||
msg = {path = "../msg"}
|
||||
net_traits = {path = "../net_traits"}
|
||||
ordered-float = "0.4"
|
||||
parking_lot = "0.4"
|
||||
parking_lot = "0.5"
|
||||
profile_traits = {path = "../profile_traits"}
|
||||
range = {path = "../range"}
|
||||
rayon = "1"
|
||||
|
|
|
@ -30,7 +30,7 @@ malloc_size_of = { path = "../malloc_size_of" }
|
|||
metrics = {path = "../metrics"}
|
||||
msg = {path = "../msg"}
|
||||
net_traits = {path = "../net_traits"}
|
||||
parking_lot = "0.4"
|
||||
parking_lot = "0.5"
|
||||
profile_traits = {path = "../profile_traits"}
|
||||
range = {path = "../range"}
|
||||
rayon = "1"
|
||||
|
|
|
@ -67,7 +67,7 @@ msg = {path = "../msg"}
|
|||
net_traits = {path = "../net_traits"}
|
||||
num-traits = "0.1.32"
|
||||
offscreen_gl_context = { version = "0.15", features = ["serde"] }
|
||||
parking_lot = "0.4"
|
||||
parking_lot = "0.5"
|
||||
phf = "0.7.18"
|
||||
profile_traits = {path = "../profile_traits"}
|
||||
ref_filter_map = "1.0.1"
|
||||
|
|
|
@ -52,7 +52,7 @@ num-integer = "0.1.32"
|
|||
num-traits = "0.1.32"
|
||||
ordered-float = "0.4"
|
||||
owning_ref = "0.3.3"
|
||||
parking_lot = "0.4"
|
||||
parking_lot = "0.5"
|
||||
precomputed-hash = "0.1.1"
|
||||
rayon = "1"
|
||||
selectors = { path = "../selectors" }
|
||||
|
|
|
@ -22,7 +22,7 @@ libc = "0.2"
|
|||
log = {version = "0.3.5", features = ["release_max_level_info"]}
|
||||
malloc_size_of = {path = "../../components/malloc_size_of"}
|
||||
nsstring = {path = "../../support/gecko/nsstring"}
|
||||
parking_lot = "0.4"
|
||||
parking_lot = "0.5"
|
||||
selectors = {path = "../../components/selectors"}
|
||||
servo_arc = {path = "../../components/servo_arc"}
|
||||
smallvec = "0.6"
|
||||
|
|
|
@ -15,7 +15,7 @@ app_units = "0.6"
|
|||
cssparser = "0.23.0"
|
||||
euclid = "0.17"
|
||||
html5ever = "0.22"
|
||||
parking_lot = "0.4"
|
||||
parking_lot = "0.5"
|
||||
rayon = "1"
|
||||
serde_json = "1.0"
|
||||
selectors = {path = "../../../components/selectors"}
|
||||
|
|
|
@ -441,7 +441,8 @@ element.findByXPathAll = function* (document, startNode, expression) {
|
|||
* Sequence of link elements which text is <var>s</var>.
|
||||
*/
|
||||
element.findByLinkText = function(startNode, linkText) {
|
||||
return filterLinks(startNode, link => link.text.trim() === linkText);
|
||||
return filterLinks(startNode,
|
||||
link => atom.getElementText(link).trim() === linkText);
|
||||
};
|
||||
|
||||
/**
|
||||
|
@ -458,7 +459,8 @@ element.findByLinkText = function(startNode, linkText) {
|
|||
* <var>linkText</var>.
|
||||
*/
|
||||
element.findByPartialLinkText = function(startNode, linkText) {
|
||||
return filterLinks(startNode, link => link.text.includes(linkText));
|
||||
return filterLinks(startNode,
|
||||
link => atom.getElementText(link).includes(linkText));
|
||||
};
|
||||
|
||||
/**
|
||||
|
@ -551,7 +553,7 @@ function findElement(strategy, selector, document, startNode = undefined) {
|
|||
|
||||
case element.Strategy.LinkText:
|
||||
for (let link of startNode.getElementsByTagName("a")) {
|
||||
if (link.text.trim() === selector) {
|
||||
if (atom.getElementText(link).trim() === selector) {
|
||||
return link;
|
||||
}
|
||||
}
|
||||
|
@ -559,7 +561,7 @@ function findElement(strategy, selector, document, startNode = undefined) {
|
|||
|
||||
case element.Strategy.PartialLinkText:
|
||||
for (let link of startNode.getElementsByTagName("a")) {
|
||||
if (link.text.includes(selector)) {
|
||||
if (atom.getElementText(link).includes(selector)) {
|
||||
return link;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -42,9 +42,6 @@ class_html = inline("<p class='foo bar'>", doctype="html")
|
|||
class_xhtml = inline('<p class="foo bar"></p>', doctype="xhtml")
|
||||
name_html = inline("<p name=foo>", doctype="html")
|
||||
name_xhtml = inline('<p name="foo"></p>', doctype="xhtml")
|
||||
link_html = inline("<p><a href=#>foo bar</a>", doctype="html")
|
||||
link_html_with_trailing_space = inline("<p><a href=#>a link with a trailing space </a>")
|
||||
link_xhtml = inline('<p><a href="#">foo bar</a></p>', doctype="xhtml")
|
||||
|
||||
|
||||
class TestFindElementHTML(MarionetteTestCase):
|
||||
|
@ -100,27 +97,6 @@ class TestFindElementHTML(MarionetteTestCase):
|
|||
with self.assertRaises(InvalidSelectorException):
|
||||
self.marionette.find_element(By.CSS_SELECTOR, "#")
|
||||
|
||||
def test_link_text(self):
|
||||
self.marionette.navigate(link_html)
|
||||
el = self.marionette.execute_script("return document.querySelector('a')")
|
||||
found = self.marionette.find_element(By.LINK_TEXT, "foo bar")
|
||||
self.assertIsInstance(found, HTMLElement)
|
||||
self.assertEqual(el, found)
|
||||
|
||||
def test_link_text_with_trailing_space(self):
|
||||
self.marionette.navigate(link_html_with_trailing_space)
|
||||
el = self.marionette.execute_script("return document.querySelector('a')")
|
||||
found = self.marionette.find_element(By.LINK_TEXT, "a link with a trailing space")
|
||||
self.assertIsInstance(found, HTMLElement)
|
||||
self.assertEqual(el, found)
|
||||
|
||||
def test_partial_link_text(self):
|
||||
self.marionette.navigate(link_html)
|
||||
el = self.marionette.execute_script("return document.querySelector('a')")
|
||||
found = self.marionette.find_element(By.PARTIAL_LINK_TEXT, "foo")
|
||||
self.assertIsInstance(found, HTMLElement)
|
||||
self.assertEqual(el, found)
|
||||
|
||||
def test_xpath(self):
|
||||
self.marionette.navigate(id_html)
|
||||
el = self.marionette.execute_script("return document.querySelector('#foo')")
|
||||
|
@ -264,20 +240,6 @@ class TestFindElementXHTML(MarionetteTestCase):
|
|||
self.assertIsInstance(found, HTMLElement)
|
||||
self.assertEqual(el, found)
|
||||
|
||||
def test_link_text(self):
|
||||
self.marionette.navigate(link_xhtml)
|
||||
el = self.marionette.execute_script("return document.querySelector('a')")
|
||||
found = self.marionette.find_element(By.LINK_TEXT, "foo bar")
|
||||
self.assertIsInstance(found, HTMLElement)
|
||||
self.assertEqual(el, found)
|
||||
|
||||
def test_partial_link_text(self):
|
||||
self.marionette.navigate(link_xhtml)
|
||||
el = self.marionette.execute_script("return document.querySelector('a')")
|
||||
found = self.marionette.find_element(By.PARTIAL_LINK_TEXT, "foo")
|
||||
self.assertIsInstance(found, HTMLElement)
|
||||
self.assertEqual(el, found)
|
||||
|
||||
def test_xpath(self):
|
||||
self.marionette.navigate(id_xhtml)
|
||||
el = self.marionette.execute_script("return document.querySelector('#foo')")
|
||||
|
@ -347,28 +309,6 @@ class TestFindElementsHTML(MarionetteTestCase):
|
|||
with self.assertRaises(InvalidSelectorException):
|
||||
self.marionette.find_elements(By.CSS_SELECTOR, "#")
|
||||
|
||||
def test_link_text(self):
|
||||
self.marionette.navigate(link_html)
|
||||
els = self.marionette.execute_script("return document.querySelectorAll('a')")
|
||||
found = self.marionette.find_elements(By.LINK_TEXT, "foo bar")
|
||||
self.assertItemsIsInstance(found, HTMLElement)
|
||||
self.assertSequenceEqual(els, found)
|
||||
|
||||
def test_link_text_with_trailing_space(self):
|
||||
self.marionette.navigate(link_html_with_trailing_space)
|
||||
els = self.marionette.execute_script("return document.querySelectorAll('a')")
|
||||
found = self.marionette.find_elements(By.LINK_TEXT, "a link with a trailing space")
|
||||
self.assertItemsIsInstance(found, HTMLElement)
|
||||
self.assertSequenceEqual(els, found)
|
||||
|
||||
|
||||
def test_partial_link_text(self):
|
||||
self.marionette.navigate(link_html)
|
||||
els = self.marionette.execute_script("return document.querySelectorAll('a')")
|
||||
found = self.marionette.find_elements(By.PARTIAL_LINK_TEXT, "foo")
|
||||
self.assertItemsIsInstance(found, HTMLElement)
|
||||
self.assertSequenceEqual(els, found)
|
||||
|
||||
def test_xpath(self):
|
||||
self.marionette.navigate(children_html)
|
||||
els = self.marionette.execute_script("return document.querySelectorAll('p')")
|
||||
|
@ -455,20 +395,6 @@ class TestFindElementsXHTML(MarionetteTestCase):
|
|||
self.assertItemsIsInstance(found, HTMLElement)
|
||||
self.assertSequenceEqual(els, found)
|
||||
|
||||
def test_link_text(self):
|
||||
self.marionette.navigate(link_xhtml)
|
||||
els = self.marionette.execute_script("return document.querySelectorAll('a')")
|
||||
found = self.marionette.find_elements(By.LINK_TEXT, "foo bar")
|
||||
self.assertItemsIsInstance(found, HTMLElement)
|
||||
self.assertSequenceEqual(els, found)
|
||||
|
||||
def test_partial_link_text(self):
|
||||
self.marionette.navigate(link_xhtml)
|
||||
els = self.marionette.execute_script("return document.querySelectorAll('a')")
|
||||
found = self.marionette.find_elements(By.PARTIAL_LINK_TEXT, "foo")
|
||||
self.assertItemsIsInstance(found, HTMLElement)
|
||||
self.assertSequenceEqual(els, found)
|
||||
|
||||
@skip("XHTML namespace not yet supported")
|
||||
def test_xpath(self):
|
||||
self.marionette.navigate(children_xhtml)
|
||||
|
|
|
@ -500894,27 +500894,27 @@
|
|||
"testharness"
|
||||
],
|
||||
"css/css-scoping/shadow-disabled-sheet-001.html": [
|
||||
"a3ddf0fe7fa422f5e712b8771669be76b0e3a798",
|
||||
"3de2d23c1b3339b964ec2c009832a3207a3b9dc4",
|
||||
"reftest"
|
||||
],
|
||||
"css/css-scoping/shadow-fallback-dynamic-001.html": [
|
||||
"062c99df18077a0205d0170d641b1d1e61199657",
|
||||
"741cd9e29067a4634aa5beb6bd06afa540895d22",
|
||||
"reftest"
|
||||
],
|
||||
"css/css-scoping/shadow-fallback-dynamic-002.html": [
|
||||
"2f66c8bca48c2ce5c9e82c5d67b152e2d143f4c6",
|
||||
"e9a0d8178387901404030dde1b7ae7b2842f2eca",
|
||||
"reftest"
|
||||
],
|
||||
"css/css-scoping/shadow-fallback-dynamic-003.html": [
|
||||
"f054b0974277fbee38a96a26559c9a15400266db",
|
||||
"0b75fadbaee366349576e2d6f3ca8d6a49069f66",
|
||||
"reftest"
|
||||
],
|
||||
"css/css-scoping/shadow-fallback-dynamic-004.html": [
|
||||
"fc33527eaaa7711ecb2c7cd9523e793bce2503f2",
|
||||
"71dcc60c4ff59690927c1575fff2eecf85ee558f",
|
||||
"reftest"
|
||||
],
|
||||
"css/css-scoping/shadow-fallback-dynamic-005.html": [
|
||||
"46d78b6d6931505bbc4bfc2f83e2bd0bac0d3472",
|
||||
"ab3c3d205e59df800ba5b4217245b83685521c31",
|
||||
"reftest"
|
||||
],
|
||||
"css/css-scoping/shadow-root-insert-into-document.html": [
|
||||
|
@ -593514,19 +593514,19 @@
|
|||
"support"
|
||||
],
|
||||
"webdriver/tests/element_retrieval/find_element.py": [
|
||||
"699b97bd31eed625e2f0bed145aaf94c3e646853",
|
||||
"8c9ed3ac2169f4b65a2e172a6e77b9586d83afa2",
|
||||
"wdspec"
|
||||
],
|
||||
"webdriver/tests/element_retrieval/find_element_from_element.py": [
|
||||
"34f356f2579391289edb31adf5b4d4eb88ffc643",
|
||||
"ecb74fc793a35481e2aa2456544b8b8f32d08546",
|
||||
"wdspec"
|
||||
],
|
||||
"webdriver/tests/element_retrieval/find_elements.py": [
|
||||
"284ae53c5c94d02fb46b26dcd70af02d7917e7b4",
|
||||
"fa8c2de1ffd12432bf14368b9c72000567ab8d20",
|
||||
"wdspec"
|
||||
],
|
||||
"webdriver/tests/element_retrieval/find_elements_from_element.py": [
|
||||
"b062b9f044268f0d9e092def81afae1277a91cd8",
|
||||
"6099bab098d03c0ac17134014b0acaf0b977731a",
|
||||
"wdspec"
|
||||
],
|
||||
"webdriver/tests/element_retrieval/get_active_element.py": [
|
||||
|
@ -594658,7 +594658,7 @@
|
|||
"testharness"
|
||||
],
|
||||
"websockets/Create-on-worker-shutdown.html": [
|
||||
"75112264efdc3b310f4ba2ab4517b7608aacf2f2",
|
||||
"213d7b23d154b930cc20985c2a86509fcdc09a9a",
|
||||
"testharness"
|
||||
],
|
||||
"websockets/Create-protocol-with-space.htm": [
|
||||
|
|
|
@ -51,6 +51,39 @@ def test_find_element(session, using, value):
|
|||
assert_success(response)
|
||||
|
||||
|
||||
@pytest.mark.parametrize("document,value", [
|
||||
("<a href=#>link text</a>", "link text"),
|
||||
("<a href=#> link text </a>", "link text"),
|
||||
("<a href=#>link<br>text</a>", "link\ntext"),
|
||||
("<a href=#>link&text</a>", "link&text"),
|
||||
("<a href=#>LINK TEXT</a>", "LINK TEXT"),
|
||||
("<a href=# style='text-transform: uppercase'>link text</a>", "LINK TEXT"),
|
||||
])
|
||||
def test_find_element_link_text(session, document, value):
|
||||
# Step 8 - 9
|
||||
session.url = inline(document)
|
||||
|
||||
response = find_element(session, "link text", value)
|
||||
assert_success(response)
|
||||
|
||||
|
||||
@pytest.mark.parametrize("document,value", [
|
||||
("<a href=#>partial link text</a>", "link"),
|
||||
("<a href=#> partial link text </a>", "link"),
|
||||
("<a href=#>partial link text</a>", "k t"),
|
||||
("<a href=#>partial link<br>text</a>", "k\nt"),
|
||||
("<a href=#>partial link&text</a>", "k&t"),
|
||||
("<a href=#>PARTIAL LINK TEXT</a>", "LINK"),
|
||||
("<a href=# style='text-transform: uppercase'>partial link text</a>", "LINK"),
|
||||
])
|
||||
def test_find_element_partial_link_text(session, document, value):
|
||||
# Step 8 - 9
|
||||
session.url = inline(document)
|
||||
|
||||
response = find_element(session, "partial link text", value)
|
||||
assert_success(response)
|
||||
|
||||
|
||||
@pytest.mark.parametrize("using,value", [("css selector", "#wontExist")])
|
||||
def test_no_element(session, using, value):
|
||||
# Step 8 - 9
|
||||
|
@ -65,7 +98,8 @@ def test_no_element(session, using, value):
|
|||
("tag name", "a"),
|
||||
("xpath", "//*[name()='a']")])
|
||||
def test_xhtml_namespace(session, using, value):
|
||||
session.url = inline("""<a href="#" id="linkText">full link text</a>""", doctype="xhtml")
|
||||
session.url = inline("""<a href="#" id="linkText">full link text</a>""",
|
||||
doctype="xhtml")
|
||||
expected = session.execute_script("return document.links[0]")
|
||||
|
||||
response = find_element(session, using, value)
|
||||
|
|
|
@ -31,7 +31,6 @@ def test_closed_context(session, create_window):
|
|||
session.close()
|
||||
|
||||
response = find_element(session, "notReal", "css selector", "foo")
|
||||
|
||||
assert_error(response, "no such window")
|
||||
|
||||
|
||||
|
@ -49,6 +48,41 @@ def test_find_element(session, using, value):
|
|||
assert_success(response)
|
||||
|
||||
|
||||
@pytest.mark.parametrize("document,value", [
|
||||
("<a href=#>link text</a>", "link text"),
|
||||
("<a href=#> link text </a>", "link text"),
|
||||
("<a href=#>link<br>text</a>", "link\ntext"),
|
||||
("<a href=#>link&text</a>", "link&text"),
|
||||
("<a href=#>LINK TEXT</a>", "LINK TEXT"),
|
||||
("<a href=# style='text-transform: uppercase'>link text</a>", "LINK TEXT"),
|
||||
])
|
||||
def test_find_element_link_text(session, document, value):
|
||||
# Step 8 - 9
|
||||
session.url = inline("<div>{0}</div>".format(document))
|
||||
element = session.find.css("div", all=False)
|
||||
|
||||
response = find_element(session, element.id, "link text", value)
|
||||
assert_success(response)
|
||||
|
||||
|
||||
@pytest.mark.parametrize("document,value", [
|
||||
("<a href=#>partial link text</a>", "link"),
|
||||
("<a href=#> partial link text </a>", "link"),
|
||||
("<a href=#>partial link text</a>", "k t"),
|
||||
("<a href=#>partial link<br>text</a>", "k\nt"),
|
||||
("<a href=#>partial link&text</a>", "k&t"),
|
||||
("<a href=#>PARTIAL LINK TEXT</a>", "LINK"),
|
||||
("<a href=# style='text-transform: uppercase'>partial link text</a>", "LINK"),
|
||||
])
|
||||
def test_find_element_partial_link_text(session, document, value):
|
||||
# Step 8 - 9
|
||||
session.url = inline("<div>{0}</div>".format(document))
|
||||
element = session.find.css("div", all=False)
|
||||
|
||||
response = find_element(session, element.id, "partial link text", value)
|
||||
assert_success(response)
|
||||
|
||||
|
||||
@pytest.mark.parametrize("using,value",[("css selector", "#wontExist")])
|
||||
def test_no_element(session, using, value):
|
||||
# Step 8 - 9
|
||||
|
@ -65,7 +99,8 @@ def test_no_element(session, using, value):
|
|||
("tag name", "a"),
|
||||
("xpath", "//*[name()='a']")])
|
||||
def test_xhtml_namespace(session, using, value):
|
||||
session.url = inline("""<p><a href="#" id="linkText">full link text</a></p>""", doctype="xhtml")
|
||||
session.url = inline("""<p><a href="#" id="linkText">full link text</a></p>""",
|
||||
doctype="xhtml")
|
||||
from_element = session.execute_script("""return document.querySelector("p")""")
|
||||
expected = session.execute_script("return document.links[0]")
|
||||
|
||||
|
|
|
@ -30,10 +30,7 @@ def test_closed_context(session, create_window):
|
|||
session.window_handle = new_window
|
||||
session.close()
|
||||
|
||||
response = session.transport.send("POST",
|
||||
"session/%s/elements" % session.session_id,
|
||||
{"using": "css selector", "value": "foo"})
|
||||
|
||||
response = find_elements(session, "css selector", "foo")
|
||||
assert_error(response, "no such window")
|
||||
|
||||
|
||||
|
@ -52,6 +49,51 @@ def test_find_elements(session, using, value):
|
|||
assert len(response.body["value"]) == 1
|
||||
|
||||
|
||||
@pytest.mark.parametrize("document,value", [
|
||||
("<a href=#>link text</a>", "link text"),
|
||||
("<a href=#> link text </a>", "link text"),
|
||||
("<a href=#>link<br>text</a>", "link\ntext"),
|
||||
("<a href=#>link&text</a>", "link&text"),
|
||||
("<a href=#>LINK TEXT</a>", "LINK TEXT"),
|
||||
("<a href=# style='text-transform: uppercase'>link text</a>", "LINK TEXT"),
|
||||
])
|
||||
def test_find_elements_link_text(session, document, value):
|
||||
# Step 8 - 9
|
||||
session.url = inline("<a href=#>not wanted</a><br/>{0}".format(document))
|
||||
expected = session.execute_script("return document.links[1];")
|
||||
|
||||
response = find_elements(session, "link text", value)
|
||||
value = assert_success(response)
|
||||
assert isinstance(value, list)
|
||||
assert len(value) == 1
|
||||
|
||||
found_element = value[0]
|
||||
assert_same_element(session, found_element, expected)
|
||||
|
||||
|
||||
@pytest.mark.parametrize("document,value", [
|
||||
("<a href=#>partial link text</a>", "link"),
|
||||
("<a href=#> partial link text </a>", "link"),
|
||||
("<a href=#>partial link text</a>", "k t"),
|
||||
("<a href=#>partial link<br>text</a>", "k\nt"),
|
||||
("<a href=#>partial link&text</a>", "k&t"),
|
||||
("<a href=#>PARTIAL LINK TEXT</a>", "LINK"),
|
||||
("<a href=# style='text-transform: uppercase'>partial link text</a>", "LINK"),
|
||||
])
|
||||
def test_find_elements_partial_link_text(session, document, value):
|
||||
# Step 8 - 9
|
||||
session.url = inline("<a href=#>not wanted</a><br/>{0}".format(document))
|
||||
expected = session.execute_script("return document.links[1];")
|
||||
|
||||
response = find_elements(session, "partial link text", value)
|
||||
value = assert_success(response)
|
||||
assert isinstance(value, list)
|
||||
assert len(value) == 1
|
||||
|
||||
found_element = value[0]
|
||||
assert_same_element(session, found_element, expected)
|
||||
|
||||
|
||||
@pytest.mark.parametrize("using,value", [("css selector", "#wontExist")])
|
||||
def test_no_element(session, using, value):
|
||||
# Step 8 - 9
|
||||
|
@ -67,8 +109,9 @@ def test_no_element(session, using, value):
|
|||
("tag name", "a"),
|
||||
("xpath", "//*[name()='a']")])
|
||||
def test_xhtml_namespace(session, using, value):
|
||||
session.url = inline("""<p><a href="#" id="linkText">full link text</a></p>""", doctype="xhtml")
|
||||
expected = session.execute_script("return document.links[0]")
|
||||
session.url = inline("""<a href="#" id="linkText">full link text</a>""",
|
||||
doctype="xhtml")
|
||||
expected = session.execute_script("return document.links[0];")
|
||||
|
||||
response = find_elements(session, using, value)
|
||||
value = assert_success(response)
|
||||
|
|
|
@ -48,6 +48,53 @@ def test_find_elements(session, using, value):
|
|||
assert_success(response)
|
||||
|
||||
|
||||
@pytest.mark.parametrize("document,value", [
|
||||
("<a href=#>link text</a>", "link text"),
|
||||
("<a href=#> link text </a>", "link text"),
|
||||
("<a href=#>link<br>text</a>", "link\ntext"),
|
||||
("<a href=#>link&text</a>", "link&text"),
|
||||
("<a href=#>LINK TEXT</a>", "LINK TEXT"),
|
||||
("<a href=# style='text-transform: uppercase'>link text</a>", "LINK TEXT"),
|
||||
])
|
||||
def test_find_elements_link_text(session, document, value):
|
||||
# Step 8 - 9
|
||||
session.url = inline("<div><a href=#>not wanted</a><br/>{0}</div>".format(document))
|
||||
element = session.find.css("div", all=False)
|
||||
expected = session.execute_script("return document.links[1];")
|
||||
|
||||
response = find_elements(session, element.id, "link text", value)
|
||||
value = assert_success(response)
|
||||
assert isinstance(value, list)
|
||||
assert len(value) == 1
|
||||
|
||||
found_element = value[0]
|
||||
assert_same_element(session, found_element, expected)
|
||||
|
||||
|
||||
@pytest.mark.parametrize("document,value", [
|
||||
("<a href=#>partial link text</a>", "link"),
|
||||
("<a href=#> partial link text </a>", "link"),
|
||||
("<a href=#>partial link text</a>", "k t"),
|
||||
("<a href=#>partial link<br>text</a>", "k\nt"),
|
||||
("<a href=#>partial link&text</a>", "k&t"),
|
||||
("<a href=#>PARTIAL LINK TEXT</a>", "LINK"),
|
||||
("<a href=# style='text-transform: uppercase'>partial link text</a>", "LINK"),
|
||||
])
|
||||
def test_find_elements_partial_link_text(session, document, value):
|
||||
# Step 8 - 9
|
||||
session.url = inline("<div><a href=#>not wanted</a><br/>{0}</div>".format(document))
|
||||
element = session.find.css("div", all=False)
|
||||
expected = session.execute_script("return document.links[1];")
|
||||
|
||||
response = find_elements(session, element.id, "partial link text", value)
|
||||
value = assert_success(response)
|
||||
assert isinstance(value, list)
|
||||
assert len(value) == 1
|
||||
|
||||
found_element = value[0]
|
||||
assert_same_element(session, found_element, expected)
|
||||
|
||||
|
||||
@pytest.mark.parametrize("using,value", [("css selector", "#wontExist")])
|
||||
def test_no_element(session, using, value):
|
||||
# Step 8 - 9
|
||||
|
@ -64,7 +111,8 @@ def test_no_element(session, using, value):
|
|||
("tag name", "a"),
|
||||
("xpath", "//*[name()='a']")])
|
||||
def test_xhtml_namespace(session, using, value):
|
||||
session.url = inline("""<p><a href="#" id="linkText">full link text</a></p>""", doctype="xhtml")
|
||||
session.url = inline("""<p><a href="#" id="linkText">full link text</a></p>""",
|
||||
doctype="xhtml")
|
||||
from_element = session.execute_script("""return document.querySelector("p")""")
|
||||
expected = session.execute_script("return document.links[0]")
|
||||
|
||||
|
|
|
@ -1 +1 @@
|
|||
{"files":{".travis.yml":"8e424960f1e47843f45cae205873e9590e4317b5b2316090f9f94cf2f5d704e8","Cargo.toml":"a31940ea072ae30f6df4c28f4fcbae206929a9e7e8adf19956dd9ed75fa7e75d","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"c9a75f18b9ab2927829a208fc6aa2cf4e63b8420887ba29cdb265d6619ae82d5","README.md":"0c248175303f7dc19ce2cb30882c950a55a49da6b8c765c5ba49feb3e6eb7553","appveyor.yml":"cc608360622923f6f693cd68b4d7c1f64daa55f6b38f0df90f270825c6c276bc","src/condvar.rs":"962a3838e95cb664b261a44f536b003a284fe7bfdcb94a80c9a07c7679cae3dd","src/elision.rs":"0fef04d2991afeabafb041e81afeec74e89095d0eca49e5516bdcd5bc90c086f","src/lib.rs":"50951210148941266ce3a7d4017c339f8ad4419a9a8db6f915023890ed27d638","src/mutex.rs":"59cd61dd8deeaacceabd05e15b7fd6d2942e3f6c3c592221898d84a2ca804a6e","src/once.rs":"eada2e82bd8dcb9ed68d4fb2d9f8c336878eeb122f0bf8dde3feb2d77adfb598","src/raw_mutex.rs":"225cbf0ef951be062866da674e5eea8245fcc43ecd8a26da7097dea03b770bf5","src/raw_remutex.rs":"6c6d2aa83abe8f45db04de0efc04c70564cd0c55b6655da8ef4afe841c0add95","src/raw_rwlock.rs":"a7aebf70b8f7a43f96136388be1a54e5ca5b565c9da623f23434c99fb4c0b147","src/remutex.rs":"7f1640fa5a6eb43b592db47d9afa63904895030d246708ec8eac413dc8093514","src/rwlock.rs":"87d648c5fcccda784da165801b888a3592b6a85ddb605c1df3ae0e881dd22417","src/stable.rs":"cc18c58404dc6017924d88fb9f4ed94e5320b8cb0a36985162b23130b8cd7480","src/util.rs":"2d07c0c010a857790ae2ed6a1215eeed8af76859e076797ea1ba8dec82169e84"},"package":"37f364e2ce5efa24c7d0b6646d5bb61145551a0112f107ffd7499f1a3e322fbd"}
|
||||
{"files":{".travis.yml":"04d3d7425ce24e59d25df35da9c54f3ccd429c62ed8c9cf37b5ed2757afe96f1","Cargo.toml":"9e6a70c63617696e07a9130c27a80203180c1f240eb4ebdddde4429570da0c63","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"c9a75f18b9ab2927829a208fc6aa2cf4e63b8420887ba29cdb265d6619ae82d5","README.md":"9d1e4237f1063e54aca1f65fc00a87ad53f75fcc73d054f8dd139f62f4a0b15e","appveyor.yml":"cfa9c3ae2476c879fe4240c306d45de6c2c04025212d8217fa76690888117594","src/condvar.rs":"1a3de60460e832d7ff76a82d5dac3f387fe2255e6a8ad4a686fe37f134c088c7","src/deadlock.rs":"82de990ef5966c852f8019b511e3c60471b562e56fd7ed0ca340399968b44a2d","src/elision.rs":"89072fe0aca87d53abc0f56490ae77bcf9d77e28e291bd13e861b1924bbb079f","src/lib.rs":"02d5716f4f43c2598afa57234e53d1a4c5db4f91ede937a226ee34eabbdc4da5","src/mutex.rs":"d8f557d40c3aab3e36f81961db9eb32831580a3a6a4b2a59674cafe6621e4772","src/once.rs":"1f408083854f918e896fdba8a9ecf25ae79ee06613d8daec75b800fb78dfd3a8","src/raw_mutex.rs":"f98ddd76e1491bc239b7c24e94f3f6a94ae0f5828873e78e1245ef19621a257b","src/raw_remutex.rs":"86e1e339567c12f91e3274ca3126c4af004fd30dff88a6cd261fc67680e33798","src/raw_rwlock.rs":"d3c71098df5e8b22cdfd7f8d7c3f287951d0bac1ac9ede83a94f809576ed9d41","src/remutex.rs":"d73f4a0f22f4a5e8c6126b784c03157f34456b0c1b90570b98db9f1c6b1f4046","src/rwlock.rs":"28e6c3a3d1aea9add4950fa5c67ba79f4aeb2e72830ff4d4a66adc2a9afa12dc","src/util.rs":"2d07c0c010a857790ae2ed6a1215eeed8af76859e076797ea1ba8dec82169e84"},"package":"9fd9d732f2de194336fb02fe11f9eed13d9e76f13f4315b4d88a14ca411750cd"}
|
|
@ -2,7 +2,7 @@ language: rust
|
|||
sudo: false
|
||||
|
||||
rust:
|
||||
- 1.13.0
|
||||
- 1.18.0
|
||||
- stable
|
||||
- beta
|
||||
- nightly
|
||||
|
@ -18,7 +18,8 @@ script:
|
|||
- cd ..;
|
||||
- travis-cargo build
|
||||
- travis-cargo test
|
||||
- travis-cargo doc -- --no-deps -p parking_lot -p parking_lot_core
|
||||
- travis-cargo test -- --features=deadlock_detection
|
||||
- travis-cargo --only nightly doc -- --all-features --no-deps -p parking_lot -p parking_lot_core
|
||||
- if [ "$TRAVIS_RUST_VERSION" != "1.8.0" ]; then
|
||||
cd benchmark;
|
||||
travis-cargo build;
|
||||
|
@ -33,6 +34,7 @@ after_success:
|
|||
env:
|
||||
global:
|
||||
- TRAVIS_CARGO_NIGHTLY_FEATURE=nightly
|
||||
- RUST_TEST_THREADS=1
|
||||
|
||||
notifications:
|
||||
email: false
|
||||
|
|
|
@ -12,7 +12,7 @@
|
|||
|
||||
[package]
|
||||
name = "parking_lot"
|
||||
version = "0.4.4"
|
||||
version = "0.5.4"
|
||||
authors = ["Amanieu d'Antras <amanieu@gmail.com>"]
|
||||
description = "More compact and efficient implementations of the standard synchronization primitives."
|
||||
documentation = "https://amanieu.github.io/parking_lot/parking_lot/index.html"
|
||||
|
@ -27,10 +27,9 @@ optional = true
|
|||
[dependencies.parking_lot_core]
|
||||
version = "0.2"
|
||||
[dev-dependencies.rand]
|
||||
version = "0.3"
|
||||
version = "0.4"
|
||||
|
||||
[features]
|
||||
deadlock_detection = ["parking_lot_core/deadlock_detection"]
|
||||
default = ["owning_ref"]
|
||||
nightly = ["parking_lot_core/nightly"]
|
||||
[target."cfg(not(target_os = \"emscripten\"))".dependencies.thread-id]
|
||||
version = "3.0"
|
||||
|
|
|
@ -47,22 +47,25 @@ in the Rust standard library:
|
|||
library versions of those types.
|
||||
7. `RwLock` takes advantage of hardware lock elision on processors that
|
||||
support it, which can lead to huge performance wins with many readers.
|
||||
8. `MutexGuard` (and the `RwLock` equivalents) is `Send`, which means it can
|
||||
be unlocked by a different thread than the one that locked it.
|
||||
9. `RwLock` uses a task-fair locking policy, which avoids reader and writer
|
||||
8. `RwLock` uses a task-fair locking policy, which avoids reader and writer
|
||||
starvation, whereas the standard library version makes no guarantees.
|
||||
10. `Condvar` is guaranteed not to produce spurious wakeups. A thread will
|
||||
9. `Condvar` is guaranteed not to produce spurious wakeups. A thread will
|
||||
only be woken up if it timed out or it was woken up by a notification.
|
||||
11. `Condvar::notify_all` will only wake up a single thread and requeue the
|
||||
10. `Condvar::notify_all` will only wake up a single thread and requeue the
|
||||
rest to wait on the associated `Mutex`. This avoids a thundering herd
|
||||
problem where all threads try to acquire the lock at the same time.
|
||||
12. `RwLock` supports atomically downgrading a write lock into a read lock.
|
||||
13. `Mutex` and `RwLock` allow raw unlocking without a RAII guard object.
|
||||
14. `Mutex<()>` and `RwLock<()>` allow raw locking without a RAII guard
|
||||
11. `RwLock` supports atomically downgrading a write lock into a read lock.
|
||||
12. `Mutex` and `RwLock` allow raw unlocking without a RAII guard object.
|
||||
13. `Mutex<()>` and `RwLock<()>` allow raw locking without a RAII guard
|
||||
object.
|
||||
15. `Mutex` and `RwLock` support [eventual fairness](https://trac.webkit.org/changeset/203350)
|
||||
14. `Mutex` and `RwLock` support [eventual fairness](https://trac.webkit.org/changeset/203350)
|
||||
which allows them to be fair on average without sacrificing performance.
|
||||
16. A `ReentrantMutex` type which supports recursive locking.
|
||||
15. A `ReentrantMutex` type which supports recursive locking.
|
||||
16. An *experimental* deadlock detector that works for `Mutex`,
|
||||
`RwLock` and `ReentrantMutex`. This feature is disabled by default and
|
||||
can be enabled via the `deadlock_detection` feature.
|
||||
17. `RwLock` supports atomically upgrading an "upgradable" read lock into a
|
||||
write lock.
|
||||
|
||||
## The parking lot
|
||||
|
||||
|
@ -87,13 +90,16 @@ There are a few restrictions when using this library on stable Rust:
|
|||
- Slightly less efficient code may be generated for `compare_exchange`
|
||||
operations. This should not affect architectures like x86 though.
|
||||
|
||||
To enable nightly-only functionality, you need to enable the `nightly` feature
|
||||
in Cargo (see below).
|
||||
|
||||
## Usage
|
||||
|
||||
Add this to your `Cargo.toml`:
|
||||
|
||||
```toml
|
||||
[dependencies]
|
||||
parking_lot = "0.4"
|
||||
parking_lot = "0.5"
|
||||
```
|
||||
|
||||
and this to your crate root:
|
||||
|
@ -106,9 +112,12 @@ To enable nightly-only features, add this to your `Cargo.toml` instead:
|
|||
|
||||
```toml
|
||||
[dependencies]
|
||||
parking_lot = {version = "0.4", features = ["nightly"]}
|
||||
parking_lot = {version = "0.5", features = ["nightly"]}
|
||||
```
|
||||
|
||||
The experimental deadlock detector can be enabled with the
|
||||
`deadlock_detection` Cargo feature.
|
||||
|
||||
The core parking lot API is provided by the `parking_lot_core` crate. It is
|
||||
separate from the synchronization primitives in the `parking_lot` crate so that
|
||||
changes to the core API do not cause breaking changes for users of `parking_lot`.
|
||||
|
|
|
@ -1,14 +1,15 @@
|
|||
environment:
|
||||
TRAVIS_CARGO_NIGHTLY_FEATURE: nightly
|
||||
RUST_TEST_THREADS: 1
|
||||
matrix:
|
||||
- TARGET: nightly-x86_64-pc-windows-msvc
|
||||
- TARGET: nightly-i686-pc-windows-msvc
|
||||
- TARGET: nightly-x86_64-pc-windows-gnu
|
||||
- TARGET: nightly-i686-pc-windows-gnu
|
||||
- TARGET: 1.13.0-x86_64-pc-windows-msvc
|
||||
- TARGET: 1.13.0-i686-pc-windows-msvc
|
||||
- TARGET: 1.13.0-x86_64-pc-windows-gnu
|
||||
- TARGET: 1.13.0-i686-pc-windows-gnu
|
||||
- TARGET: 1.18.0-x86_64-pc-windows-msvc
|
||||
- TARGET: 1.18.0-i686-pc-windows-msvc
|
||||
- TARGET: 1.18.0-x86_64-pc-windows-gnu
|
||||
- TARGET: 1.18.0-i686-pc-windows-gnu
|
||||
|
||||
install:
|
||||
- SET PATH=C:\Python27;C:\Python27\Scripts;%PATH%;%APPDATA%\Python\Scripts
|
||||
|
@ -24,4 +25,5 @@ build_script:
|
|||
|
||||
test_script:
|
||||
- travis-cargo test
|
||||
- travis-cargo test -- --features=deadlock_detection
|
||||
- travis-cargo doc
|
||||
|
|
|
@ -8,9 +8,10 @@
|
|||
use std::sync::atomic::{AtomicPtr, Ordering};
|
||||
use std::time::{Duration, Instant};
|
||||
use std::ptr;
|
||||
use parking_lot_core::{self, ParkResult, UnparkResult, RequeueOp, DEFAULT_PARK_TOKEN};
|
||||
use mutex::{MutexGuard, guard_lock};
|
||||
use raw_mutex::{RawMutex, TOKEN_NORMAL, TOKEN_HANDOFF};
|
||||
use parking_lot_core::{self, ParkResult, RequeueOp, UnparkResult, DEFAULT_PARK_TOKEN};
|
||||
use mutex::{guard_lock, MutexGuard};
|
||||
use raw_mutex::{RawMutex, TOKEN_HANDOFF, TOKEN_NORMAL};
|
||||
use deadlock;
|
||||
|
||||
/// A type indicating whether a timed wait on a condition variable returned
|
||||
/// due to a time out or not.
|
||||
|
@ -88,7 +89,9 @@ impl Condvar {
|
|||
#[cfg(feature = "nightly")]
|
||||
#[inline]
|
||||
pub const fn new() -> Condvar {
|
||||
Condvar { state: AtomicPtr::new(ptr::null_mut()) }
|
||||
Condvar {
|
||||
state: AtomicPtr::new(ptr::null_mut()),
|
||||
}
|
||||
}
|
||||
|
||||
/// Creates a new condition variable which is ready to be waited on and
|
||||
|
@ -96,7 +99,9 @@ impl Condvar {
|
|||
#[cfg(not(feature = "nightly"))]
|
||||
#[inline]
|
||||
pub fn new() -> Condvar {
|
||||
Condvar { state: AtomicPtr::new(ptr::null_mut()) }
|
||||
Condvar {
|
||||
state: AtomicPtr::new(ptr::null_mut()),
|
||||
}
|
||||
}
|
||||
|
||||
/// Wakes up one blocked thread on this condvar.
|
||||
|
@ -238,10 +243,11 @@ impl Condvar {
|
|||
/// This function will panic if another thread is waiting on the `Condvar`
|
||||
/// with a different `Mutex` object.
|
||||
#[inline]
|
||||
pub fn wait_until<T: ?Sized>(&self,
|
||||
mutex_guard: &mut MutexGuard<T>,
|
||||
timeout: Instant)
|
||||
-> WaitTimeoutResult {
|
||||
pub fn wait_until<T: ?Sized>(
|
||||
&self,
|
||||
mutex_guard: &mut MutexGuard<T>,
|
||||
timeout: Instant,
|
||||
) -> WaitTimeoutResult {
|
||||
self.wait_until_internal(guard_lock(mutex_guard), Some(timeout))
|
||||
}
|
||||
|
||||
|
@ -285,12 +291,14 @@ impl Condvar {
|
|||
self.state.store(ptr::null_mut(), Ordering::Relaxed);
|
||||
}
|
||||
};
|
||||
result = parking_lot_core::park(addr,
|
||||
validate,
|
||||
before_sleep,
|
||||
timed_out,
|
||||
DEFAULT_PARK_TOKEN,
|
||||
timeout);
|
||||
result = parking_lot_core::park(
|
||||
addr,
|
||||
validate,
|
||||
before_sleep,
|
||||
timed_out,
|
||||
DEFAULT_PARK_TOKEN,
|
||||
timeout,
|
||||
);
|
||||
}
|
||||
|
||||
// Panic if we tried to use multiple mutexes with a Condvar. Note
|
||||
|
@ -301,7 +309,9 @@ impl Condvar {
|
|||
}
|
||||
|
||||
// ... and re-lock it once we are done sleeping
|
||||
if result != ParkResult::Unparked(TOKEN_HANDOFF) {
|
||||
if result == ParkResult::Unparked(TOKEN_HANDOFF) {
|
||||
deadlock::acquire_resource(mutex as *const _ as usize);
|
||||
} else {
|
||||
mutex.lock();
|
||||
}
|
||||
|
||||
|
@ -328,10 +338,11 @@ impl Condvar {
|
|||
/// Like `wait`, the lock specified will be re-acquired when this function
|
||||
/// returns, regardless of whether the timeout elapsed or not.
|
||||
#[inline]
|
||||
pub fn wait_for<T: ?Sized>(&self,
|
||||
guard: &mut MutexGuard<T>,
|
||||
timeout: Duration)
|
||||
-> WaitTimeoutResult {
|
||||
pub fn wait_for<T: ?Sized>(
|
||||
&self,
|
||||
guard: &mut MutexGuard<T>,
|
||||
timeout: Duration,
|
||||
) -> WaitTimeoutResult {
|
||||
self.wait_until(guard, Instant::now() + timeout)
|
||||
}
|
||||
}
|
||||
|
@ -442,9 +453,10 @@ mod tests {
|
|||
let _g = m2.lock();
|
||||
c2.notify_one();
|
||||
});
|
||||
let timeout_res = c.wait_until(&mut g,
|
||||
Instant::now() +
|
||||
Duration::from_millis(u32::max_value() as u64));
|
||||
let timeout_res = c.wait_until(
|
||||
&mut g,
|
||||
Instant::now() + Duration::from_millis(u32::max_value() as u64),
|
||||
);
|
||||
assert!(!timeout_res.timed_out());
|
||||
drop(g);
|
||||
}
|
||||
|
|
|
@ -0,0 +1,218 @@
|
|||
//! [Experimental] Deadlock detection
|
||||
//!
|
||||
//! This feature is optional and can be enabled via the `deadlock_detection` feature flag.
|
||||
//!
|
||||
//! # Example
|
||||
//!
|
||||
//! ```
|
||||
//! #[cfg(feature = "deadlock_detection")]
|
||||
//! { // only for #[cfg]
|
||||
//! use std::thread;
|
||||
//! use std::time::Duration;
|
||||
//! use parking_lot::deadlock;
|
||||
//!
|
||||
//! // Create a background thread which checks for deadlocks every 10s
|
||||
//! thread::spawn(move || {
|
||||
//! loop {
|
||||
//! thread::sleep(Duration::from_secs(10));
|
||||
//! let deadlocks = deadlock::check_deadlock();
|
||||
//! if deadlocks.is_empty() {
|
||||
//! continue;
|
||||
//! }
|
||||
//!
|
||||
//! println!("{} deadlocks detected", deadlocks.len());
|
||||
//! for (i, threads) in deadlocks.iter().enumerate() {
|
||||
//! println!("Deadlock #{}", i);
|
||||
//! for t in threads {
|
||||
//! println!("Thread Id {:#?}", t.thread_id());
|
||||
//! println!("{:#?}", t.backtrace());
|
||||
//! }
|
||||
//! }
|
||||
//! }
|
||||
//! });
|
||||
//! } // only for #[cfg]
|
||||
//! ```
|
||||
|
||||
#[cfg(feature = "deadlock_detection")]
|
||||
pub use parking_lot_core::deadlock::check_deadlock;
|
||||
pub(crate) use parking_lot_core::deadlock::{acquire_resource, release_resource};
|
||||
|
||||
#[cfg(test)]
|
||||
#[cfg(feature = "deadlock_detection")]
|
||||
mod tests {
|
||||
use std::thread::{self, sleep};
|
||||
use std::sync::{Arc, Barrier};
|
||||
use std::time::Duration;
|
||||
use {Mutex, ReentrantMutex, RwLock};
|
||||
|
||||
fn check_deadlock() -> bool {
|
||||
use parking_lot_core::deadlock::check_deadlock;
|
||||
!check_deadlock().is_empty()
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_mutex_deadlock() {
|
||||
let m1: Arc<Mutex<()>> = Default::default();
|
||||
let m2: Arc<Mutex<()>> = Default::default();
|
||||
let m3: Arc<Mutex<()>> = Default::default();
|
||||
let b = Arc::new(Barrier::new(4));
|
||||
|
||||
let m1_ = m1.clone();
|
||||
let m2_ = m2.clone();
|
||||
let m3_ = m3.clone();
|
||||
let b1 = b.clone();
|
||||
let b2 = b.clone();
|
||||
let b3 = b.clone();
|
||||
|
||||
assert!(!check_deadlock());
|
||||
|
||||
let _t1 = thread::spawn(move || {
|
||||
let _g = m1.lock();
|
||||
b1.wait();
|
||||
let _ = m2_.lock();
|
||||
});
|
||||
|
||||
let _t2 = thread::spawn(move || {
|
||||
let _g = m2.lock();
|
||||
b2.wait();
|
||||
let _ = m3_.lock();
|
||||
});
|
||||
|
||||
let _t3 = thread::spawn(move || {
|
||||
let _g = m3.lock();
|
||||
b3.wait();
|
||||
let _ = m1_.lock();
|
||||
});
|
||||
|
||||
assert!(!check_deadlock());
|
||||
|
||||
b.wait();
|
||||
sleep(Duration::from_millis(50));
|
||||
assert!(check_deadlock());
|
||||
|
||||
assert!(!check_deadlock());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_mutex_deadlock_reentrant() {
|
||||
let m1: Arc<Mutex<()>> = Default::default();
|
||||
|
||||
assert!(!check_deadlock());
|
||||
|
||||
let _t1 = thread::spawn(move || {
|
||||
let _g = m1.lock();
|
||||
let _ = m1.lock();
|
||||
});
|
||||
|
||||
sleep(Duration::from_millis(50));
|
||||
assert!(check_deadlock());
|
||||
|
||||
assert!(!check_deadlock());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_remutex_deadlock() {
|
||||
let m1: Arc<ReentrantMutex<()>> = Default::default();
|
||||
let m2: Arc<ReentrantMutex<()>> = Default::default();
|
||||
let m3: Arc<ReentrantMutex<()>> = Default::default();
|
||||
let b = Arc::new(Barrier::new(4));
|
||||
|
||||
let m1_ = m1.clone();
|
||||
let m2_ = m2.clone();
|
||||
let m3_ = m3.clone();
|
||||
let b1 = b.clone();
|
||||
let b2 = b.clone();
|
||||
let b3 = b.clone();
|
||||
|
||||
assert!(!check_deadlock());
|
||||
|
||||
let _t1 = thread::spawn(move || {
|
||||
let _g = m1.lock();
|
||||
let _g = m1.lock();
|
||||
b1.wait();
|
||||
let _ = m2_.lock();
|
||||
});
|
||||
|
||||
let _t2 = thread::spawn(move || {
|
||||
let _g = m2.lock();
|
||||
let _g = m2.lock();
|
||||
b2.wait();
|
||||
let _ = m3_.lock();
|
||||
});
|
||||
|
||||
let _t3 = thread::spawn(move || {
|
||||
let _g = m3.lock();
|
||||
let _g = m3.lock();
|
||||
b3.wait();
|
||||
let _ = m1_.lock();
|
||||
});
|
||||
|
||||
assert!(!check_deadlock());
|
||||
|
||||
b.wait();
|
||||
sleep(Duration::from_millis(50));
|
||||
assert!(check_deadlock());
|
||||
|
||||
assert!(!check_deadlock());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_rwlock_deadlock() {
|
||||
let m1: Arc<RwLock<()>> = Default::default();
|
||||
let m2: Arc<RwLock<()>> = Default::default();
|
||||
let m3: Arc<RwLock<()>> = Default::default();
|
||||
let b = Arc::new(Barrier::new(4));
|
||||
|
||||
let m1_ = m1.clone();
|
||||
let m2_ = m2.clone();
|
||||
let m3_ = m3.clone();
|
||||
let b1 = b.clone();
|
||||
let b2 = b.clone();
|
||||
let b3 = b.clone();
|
||||
|
||||
assert!(!check_deadlock());
|
||||
|
||||
let _t1 = thread::spawn(move || {
|
||||
let _g = m1.read();
|
||||
b1.wait();
|
||||
let _g = m2_.write();
|
||||
});
|
||||
|
||||
let _t2 = thread::spawn(move || {
|
||||
let _g = m2.read();
|
||||
b2.wait();
|
||||
let _g = m3_.write();
|
||||
});
|
||||
|
||||
let _t3 = thread::spawn(move || {
|
||||
let _g = m3.read();
|
||||
b3.wait();
|
||||
let _ = m1_.write();
|
||||
});
|
||||
|
||||
assert!(!check_deadlock());
|
||||
|
||||
b.wait();
|
||||
sleep(Duration::from_millis(50));
|
||||
assert!(check_deadlock());
|
||||
|
||||
assert!(!check_deadlock());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_rwlock_deadlock_reentrant() {
|
||||
let m1: Arc<RwLock<()>> = Default::default();
|
||||
|
||||
assert!(!check_deadlock());
|
||||
|
||||
let _t1 = thread::spawn(move || {
|
||||
let _g = m1.read();
|
||||
let _ = m1.write();
|
||||
});
|
||||
|
||||
sleep(Duration::from_millis(50));
|
||||
assert!(check_deadlock());
|
||||
|
||||
assert!(!check_deadlock());
|
||||
}
|
||||
}
|
|
@ -5,32 +5,33 @@
|
|||
// http://opensource.org/licenses/MIT>, at your option. This file may not be
|
||||
// copied, modified, or distributed except according to those terms.
|
||||
|
||||
#[cfg(feature = "nightly")]
|
||||
use std::sync::atomic::AtomicUsize;
|
||||
#[cfg(not(feature = "nightly"))]
|
||||
use stable::AtomicUsize;
|
||||
|
||||
// Extension trait to add lock elision primitives to atomic types
|
||||
pub trait AtomicElisionExt {
|
||||
type IntType;
|
||||
|
||||
// Perform a compare_exchange and start a transaction
|
||||
fn elision_acquire(&self,
|
||||
current: Self::IntType,
|
||||
new: Self::IntType)
|
||||
-> Result<Self::IntType, Self::IntType>;
|
||||
fn elision_acquire(
|
||||
&self,
|
||||
current: Self::IntType,
|
||||
new: Self::IntType,
|
||||
) -> Result<Self::IntType, Self::IntType>;
|
||||
// Perform a compare_exchange and end a transaction
|
||||
fn elision_release(&self,
|
||||
current: Self::IntType,
|
||||
new: Self::IntType)
|
||||
-> Result<Self::IntType, Self::IntType>;
|
||||
fn elision_release(
|
||||
&self,
|
||||
current: Self::IntType,
|
||||
new: Self::IntType,
|
||||
) -> Result<Self::IntType, Self::IntType>;
|
||||
}
|
||||
|
||||
// Indicates whether the target architecture supports lock elision
|
||||
#[inline]
|
||||
pub fn have_elision() -> bool {
|
||||
cfg!(all(feature = "nightly",
|
||||
any(target_arch = "x86", target_arch = "x86_64")))
|
||||
cfg!(all(
|
||||
feature = "nightly",
|
||||
any(target_arch = "x86", target_arch = "x86_64"),
|
||||
))
|
||||
}
|
||||
|
||||
// This implementation is never actually called because it is guarded by
|
||||
|
@ -63,7 +64,11 @@ impl AtomicElisionExt for AtomicUsize {
|
|||
: "r" (new), "{eax}" (current)
|
||||
: "memory"
|
||||
: "volatile");
|
||||
if prev == current { Ok(prev) } else { Err(prev) }
|
||||
if prev == current {
|
||||
Ok(prev)
|
||||
} else {
|
||||
Err(prev)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -76,12 +81,55 @@ impl AtomicElisionExt for AtomicUsize {
|
|||
: "r" (new), "{eax}" (current)
|
||||
: "memory"
|
||||
: "volatile");
|
||||
if prev == current { Ok(prev) } else { Err(prev) }
|
||||
if prev == current {
|
||||
Ok(prev)
|
||||
} else {
|
||||
Err(prev)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(all(feature = "nightly", target_arch = "x86_64"))]
|
||||
#[cfg(all(feature = "nightly", target_arch = "x86_64", target_pointer_width = "32"))]
|
||||
impl AtomicElisionExt for AtomicUsize {
|
||||
type IntType = usize;
|
||||
|
||||
#[inline]
|
||||
fn elision_acquire(&self, current: usize, new: usize) -> Result<usize, usize> {
|
||||
unsafe {
|
||||
let prev: usize;
|
||||
asm!("xacquire; lock; cmpxchgl $2, $1"
|
||||
: "={rax}" (prev), "+*m" (self)
|
||||
: "r" (new), "{rax}" (current)
|
||||
: "memory"
|
||||
: "volatile");
|
||||
if prev == current {
|
||||
Ok(prev)
|
||||
} else {
|
||||
Err(prev)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn elision_release(&self, current: usize, new: usize) -> Result<usize, usize> {
|
||||
unsafe {
|
||||
let prev: usize;
|
||||
asm!("xrelease; lock; cmpxchgl $2, $1"
|
||||
: "={rax}" (prev), "+*m" (self)
|
||||
: "r" (new), "{rax}" (current)
|
||||
: "memory"
|
||||
: "volatile");
|
||||
if prev == current {
|
||||
Ok(prev)
|
||||
} else {
|
||||
Err(prev)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(all(feature = "nightly", target_arch = "x86_64", target_pointer_width = "64"))]
|
||||
impl AtomicElisionExt for AtomicUsize {
|
||||
type IntType = usize;
|
||||
|
||||
|
@ -94,7 +142,11 @@ impl AtomicElisionExt for AtomicUsize {
|
|||
: "r" (new), "{rax}" (current)
|
||||
: "memory"
|
||||
: "volatile");
|
||||
if prev == current { Ok(prev) } else { Err(prev) }
|
||||
if prev == current {
|
||||
Ok(prev)
|
||||
} else {
|
||||
Err(prev)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -107,7 +159,11 @@ impl AtomicElisionExt for AtomicUsize {
|
|||
: "r" (new), "{rax}" (current)
|
||||
: "memory"
|
||||
: "volatile");
|
||||
if prev == current { Ok(prev) } else { Err(prev) }
|
||||
if prev == current {
|
||||
Ok(prev)
|
||||
} else {
|
||||
Err(prev)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -17,14 +17,8 @@
|
|||
#[cfg(feature = "owning_ref")]
|
||||
extern crate owning_ref;
|
||||
|
||||
#[cfg(not(target_os = "emscripten"))]
|
||||
extern crate thread_id;
|
||||
|
||||
extern crate parking_lot_core;
|
||||
|
||||
#[cfg(not(feature = "nightly"))]
|
||||
mod stable;
|
||||
|
||||
mod util;
|
||||
mod elision;
|
||||
mod raw_mutex;
|
||||
|
@ -36,11 +30,16 @@ mod remutex;
|
|||
mod rwlock;
|
||||
mod once;
|
||||
|
||||
pub use once::{Once, ONCE_INIT, OnceState};
|
||||
#[cfg(feature = "deadlock_detection")]
|
||||
pub mod deadlock;
|
||||
#[cfg(not(feature = "deadlock_detection"))]
|
||||
mod deadlock;
|
||||
|
||||
pub use once::{Once, OnceState, ONCE_INIT};
|
||||
pub use mutex::{Mutex, MutexGuard};
|
||||
pub use remutex::{ReentrantMutex, ReentrantMutexGuard};
|
||||
pub use condvar::{Condvar, WaitTimeoutResult};
|
||||
pub use rwlock::{RwLock, RwLockReadGuard, RwLockWriteGuard};
|
||||
pub use rwlock::{RwLock, RwLockReadGuard, RwLockUpgradableReadGuard, RwLockWriteGuard};
|
||||
|
||||
#[cfg(feature = "owning_ref")]
|
||||
use owning_ref::OwningRef;
|
||||
|
@ -60,3 +59,8 @@ pub type RwLockReadGuardRef<'a, T, U = T> = OwningRef<RwLockReadGuard<'a, T>, U>
|
|||
/// Typedef of an owning reference that uses a `RwLockWriteGuard` as the owner.
|
||||
#[cfg(feature = "owning_ref")]
|
||||
pub type RwLockWriteGuardRef<'a, T, U = T> = OwningRef<RwLockWriteGuard<'a, T>, U>;
|
||||
|
||||
/// Typedef of an owning reference that uses a `RwLockUpgradableReadGuard` as the owner.
|
||||
#[cfg(feature = "owning_ref")]
|
||||
pub type RwLockUpgradableReadGuardRef<'a, T, U = T> =
|
||||
OwningRef<RwLockUpgradableReadGuard<'a, T>, U>;
|
||||
|
|
|
@ -50,7 +50,6 @@ use owning_ref::StableAddress;
|
|||
/// - No poisoning, the lock is released normally on panic.
|
||||
/// - Only requires 1 byte of space, whereas the standard library boxes the
|
||||
/// `Mutex` due to platform limitations.
|
||||
/// - A `MutexGuard` can be sent to another thread and unlocked there.
|
||||
/// - Can be statically constructed (requires the `const_fn` nightly feature).
|
||||
/// - Does not require any drop glue when dropped.
|
||||
/// - Inline fast path for the uncontended case.
|
||||
|
@ -99,8 +98,8 @@ pub struct Mutex<T: ?Sized> {
|
|||
data: UnsafeCell<T>,
|
||||
}
|
||||
|
||||
unsafe impl<T: Send> Send for Mutex<T> {}
|
||||
unsafe impl<T: Send> Sync for Mutex<T> {}
|
||||
unsafe impl<T: ?Sized + Send> Send for Mutex<T> {}
|
||||
unsafe impl<T: ?Sized + Send> Sync for Mutex<T> {}
|
||||
|
||||
/// An RAII implementation of a "scoped lock" of a mutex. When this structure is
|
||||
/// dropped (falls out of scope), the lock will be unlocked.
|
||||
|
@ -109,10 +108,13 @@ unsafe impl<T: Send> Sync for Mutex<T> {}
|
|||
/// `Deref` and `DerefMut` implementations.
|
||||
#[must_use]
|
||||
pub struct MutexGuard<'a, T: ?Sized + 'a> {
|
||||
mutex: &'a Mutex<T>,
|
||||
raw: &'a RawMutex,
|
||||
data: *mut T,
|
||||
marker: PhantomData<&'a mut T>,
|
||||
}
|
||||
|
||||
unsafe impl<'a, T: ?Sized + Sync + 'a> Sync for MutexGuard<'a, T> {}
|
||||
|
||||
impl<T> Mutex<T> {
|
||||
/// Creates a new mutex in an unlocked state ready for use.
|
||||
#[cfg(feature = "nightly")]
|
||||
|
@ -142,6 +144,15 @@ impl<T> Mutex<T> {
|
|||
}
|
||||
|
||||
impl<T: ?Sized> Mutex<T> {
|
||||
#[inline]
|
||||
fn guard(&self) -> MutexGuard<T> {
|
||||
MutexGuard {
|
||||
raw: &self.raw,
|
||||
data: self.data.get(),
|
||||
marker: PhantomData,
|
||||
}
|
||||
}
|
||||
|
||||
/// Acquires a mutex, blocking the current thread until it is able to do so.
|
||||
///
|
||||
/// This function will block the local thread until it is available to acquire
|
||||
|
@ -154,10 +165,7 @@ impl<T: ?Sized> Mutex<T> {
|
|||
#[inline]
|
||||
pub fn lock(&self) -> MutexGuard<T> {
|
||||
self.raw.lock();
|
||||
MutexGuard {
|
||||
mutex: self,
|
||||
marker: PhantomData,
|
||||
}
|
||||
self.guard()
|
||||
}
|
||||
|
||||
/// Attempts to acquire this lock.
|
||||
|
@ -170,10 +178,7 @@ impl<T: ?Sized> Mutex<T> {
|
|||
#[inline]
|
||||
pub fn try_lock(&self) -> Option<MutexGuard<T>> {
|
||||
if self.raw.try_lock() {
|
||||
Some(MutexGuard {
|
||||
mutex: self,
|
||||
marker: PhantomData,
|
||||
})
|
||||
Some(self.guard())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
|
@ -187,10 +192,7 @@ impl<T: ?Sized> Mutex<T> {
|
|||
#[inline]
|
||||
pub fn try_lock_for(&self, timeout: Duration) -> Option<MutexGuard<T>> {
|
||||
if self.raw.try_lock_for(timeout) {
|
||||
Some(MutexGuard {
|
||||
mutex: self,
|
||||
marker: PhantomData,
|
||||
})
|
||||
Some(self.guard())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
|
@ -204,10 +206,7 @@ impl<T: ?Sized> Mutex<T> {
|
|||
#[inline]
|
||||
pub fn try_lock_until(&self, timeout: Instant) -> Option<MutexGuard<T>> {
|
||||
if self.raw.try_lock_until(timeout) {
|
||||
Some(MutexGuard {
|
||||
mutex: self,
|
||||
marker: PhantomData,
|
||||
})
|
||||
Some(self.guard())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
|
@ -300,30 +299,53 @@ impl<'a, T: ?Sized + 'a> MutexGuard<'a, T> {
|
|||
/// using this method instead of dropping the `MutexGuard` normally.
|
||||
#[inline]
|
||||
pub fn unlock_fair(self) {
|
||||
self.mutex.raw.unlock(true);
|
||||
self.raw.unlock(true);
|
||||
mem::forget(self);
|
||||
}
|
||||
|
||||
/// Make a new `MutexGuard` for a component of the locked data.
|
||||
///
|
||||
/// This operation cannot fail as the `MutexGuard` passed
|
||||
/// in already locked the mutex.
|
||||
///
|
||||
/// This is an associated function that needs to be
|
||||
/// used as `MutexGuard::map(...)`. A method would interfere with methods of
|
||||
/// the same name on the contents of the locked data.
|
||||
#[inline]
|
||||
pub fn map<U: ?Sized, F>(orig: Self, f: F) -> MutexGuard<'a, U>
|
||||
where
|
||||
F: FnOnce(&mut T) -> &mut U,
|
||||
{
|
||||
let raw = orig.raw;
|
||||
let data = f(unsafe { &mut *orig.data });
|
||||
mem::forget(orig);
|
||||
MutexGuard {
|
||||
raw,
|
||||
data,
|
||||
marker: PhantomData,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, T: ?Sized + 'a> Deref for MutexGuard<'a, T> {
|
||||
type Target = T;
|
||||
#[inline]
|
||||
fn deref(&self) -> &T {
|
||||
unsafe { &*self.mutex.data.get() }
|
||||
unsafe { &*self.data }
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, T: ?Sized + 'a> DerefMut for MutexGuard<'a, T> {
|
||||
#[inline]
|
||||
fn deref_mut(&mut self) -> &mut T {
|
||||
unsafe { &mut *self.mutex.data.get() }
|
||||
unsafe { &mut *self.data }
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, T: ?Sized + 'a> Drop for MutexGuard<'a, T> {
|
||||
#[inline]
|
||||
fn drop(&mut self) {
|
||||
self.mutex.raw.unlock(false);
|
||||
self.raw.unlock(false);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -332,8 +354,8 @@ unsafe impl<'a, T: ?Sized> StableAddress for MutexGuard<'a, T> {}
|
|||
|
||||
// Helper function used by Condvar, not publicly exported
|
||||
#[inline]
|
||||
pub fn guard_lock<'a, T: ?Sized>(guard: &MutexGuard<'a, T>) -> &'a RawMutex {
|
||||
&guard.mutex.raw
|
||||
pub(crate) fn guard_lock<'a, T: ?Sized>(guard: &MutexGuard<'a, T>) -> &'a RawMutex {
|
||||
&guard.raw
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
|
@ -342,7 +364,7 @@ mod tests {
|
|||
use std::sync::Arc;
|
||||
use std::sync::atomic::{AtomicUsize, Ordering};
|
||||
use std::thread;
|
||||
use {Mutex, Condvar};
|
||||
use {Condvar, Mutex};
|
||||
|
||||
struct Packet<T>(Arc<(Mutex<T>, Condvar)>);
|
||||
|
||||
|
@ -476,18 +498,17 @@ mod tests {
|
|||
let arc = Arc::new(Mutex::new(1));
|
||||
let arc2 = arc.clone();
|
||||
let _ = thread::spawn(move || -> () {
|
||||
struct Unwinder {
|
||||
i: Arc<Mutex<i32>>,
|
||||
struct Unwinder {
|
||||
i: Arc<Mutex<i32>>,
|
||||
}
|
||||
impl Drop for Unwinder {
|
||||
fn drop(&mut self) {
|
||||
*self.i.lock() += 1;
|
||||
}
|
||||
impl Drop for Unwinder {
|
||||
fn drop(&mut self) {
|
||||
*self.i.lock() += 1;
|
||||
}
|
||||
}
|
||||
let _u = Unwinder { i: arc2 };
|
||||
panic!();
|
||||
})
|
||||
.join();
|
||||
}
|
||||
let _u = Unwinder { i: arc2 };
|
||||
panic!();
|
||||
}).join();
|
||||
let lock = arc.lock();
|
||||
assert_eq!(*lock, 2);
|
||||
}
|
||||
|
@ -505,10 +526,10 @@ mod tests {
|
|||
}
|
||||
|
||||
#[test]
|
||||
fn test_mutexguard_send() {
|
||||
fn send<T: Send>(_: T) {}
|
||||
fn test_mutexguard_sync() {
|
||||
fn sync<T: Sync>(_: T) {}
|
||||
|
||||
let mutex = Mutex::new(());
|
||||
send(mutex.lock());
|
||||
sync(mutex.lock());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -5,15 +5,19 @@
|
|||
// http://opensource.org/licenses/MIT>, at your option. This file may not be
|
||||
// copied, modified, or distributed except according to those terms.
|
||||
|
||||
use std::sync::atomic::{fence, Ordering};
|
||||
#[cfg(feature = "nightly")]
|
||||
use std::sync::atomic::{AtomicU8, ATOMIC_U8_INIT, Ordering, fence};
|
||||
use std::sync::atomic::{ATOMIC_U8_INIT, AtomicU8};
|
||||
#[cfg(feature = "nightly")]
|
||||
type U8 = u8;
|
||||
#[cfg(not(feature = "nightly"))]
|
||||
use stable::{AtomicU8, ATOMIC_U8_INIT, Ordering, fence};
|
||||
use std::sync::atomic::AtomicUsize as AtomicU8;
|
||||
#[cfg(not(feature = "nightly"))]
|
||||
use std::sync::atomic::ATOMIC_USIZE_INIT as ATOMIC_U8_INIT;
|
||||
#[cfg(not(feature = "nightly"))]
|
||||
type U8 = usize;
|
||||
use std::mem;
|
||||
use std::fmt;
|
||||
use parking_lot_core::{self, SpinWait, DEFAULT_PARK_TOKEN, DEFAULT_UNPARK_TOKEN};
|
||||
use util::UncheckedOptionExt;
|
||||
|
||||
|
@ -95,14 +99,14 @@ impl Once {
|
|||
#[cfg(feature = "nightly")]
|
||||
#[inline]
|
||||
pub const fn new() -> Once {
|
||||
Once(AtomicU8::new(0))
|
||||
Once(ATOMIC_U8_INIT)
|
||||
}
|
||||
|
||||
/// Creates a new `Once` value.
|
||||
#[cfg(not(feature = "nightly"))]
|
||||
#[inline]
|
||||
pub fn new() -> Once {
|
||||
Once(AtomicU8::new(0))
|
||||
Once(ATOMIC_U8_INIT)
|
||||
}
|
||||
|
||||
/// Returns the current state of this `Once`.
|
||||
|
@ -171,7 +175,8 @@ impl Once {
|
|||
/// `call_once` to also panic.
|
||||
#[inline]
|
||||
pub fn call_once<F>(&self, f: F)
|
||||
where F: FnOnce()
|
||||
where
|
||||
F: FnOnce(),
|
||||
{
|
||||
if self.0.load(Ordering::Acquire) == DONE_BIT {
|
||||
return;
|
||||
|
@ -192,15 +197,17 @@ impl Once {
|
|||
/// not).
|
||||
#[inline]
|
||||
pub fn call_once_force<F>(&self, f: F)
|
||||
where F: FnOnce(OnceState)
|
||||
where
|
||||
F: FnOnce(OnceState),
|
||||
{
|
||||
if self.0.load(Ordering::Acquire) == DONE_BIT {
|
||||
return;
|
||||
}
|
||||
|
||||
let mut f = Some(f);
|
||||
self.call_once_slow(true,
|
||||
&mut |state| unsafe { f.take().unchecked_unwrap()(state) });
|
||||
self.call_once_slow(true, &mut |state| unsafe {
|
||||
f.take().unchecked_unwrap()(state)
|
||||
});
|
||||
}
|
||||
|
||||
// This is a non-generic function to reduce the monomorphization cost of
|
||||
|
@ -239,11 +246,12 @@ impl Once {
|
|||
// We also clear the poison bit since we are going to try running
|
||||
// the closure again.
|
||||
if state & LOCKED_BIT == 0 {
|
||||
match self.0
|
||||
.compare_exchange_weak(state,
|
||||
(state | LOCKED_BIT) & !POISON_BIT,
|
||||
Ordering::Acquire,
|
||||
Ordering::Relaxed) {
|
||||
match self.0.compare_exchange_weak(
|
||||
state,
|
||||
(state | LOCKED_BIT) & !POISON_BIT,
|
||||
Ordering::Acquire,
|
||||
Ordering::Relaxed,
|
||||
) {
|
||||
Ok(_) => break,
|
||||
Err(x) => state = x,
|
||||
}
|
||||
|
@ -258,10 +266,12 @@ impl Once {
|
|||
|
||||
// Set the parked bit
|
||||
if state & PARKED_BIT == 0 {
|
||||
if let Err(x) = self.0.compare_exchange_weak(state,
|
||||
state | PARKED_BIT,
|
||||
Ordering::Relaxed,
|
||||
Ordering::Relaxed) {
|
||||
if let Err(x) = self.0.compare_exchange_weak(
|
||||
state,
|
||||
state | PARKED_BIT,
|
||||
Ordering::Relaxed,
|
||||
Ordering::Relaxed,
|
||||
) {
|
||||
state = x;
|
||||
continue;
|
||||
}
|
||||
|
@ -274,12 +284,14 @@ impl Once {
|
|||
let validate = || self.0.load(Ordering::Relaxed) == LOCKED_BIT | PARKED_BIT;
|
||||
let before_sleep = || {};
|
||||
let timed_out = |_, _| unreachable!();
|
||||
parking_lot_core::park(addr,
|
||||
validate,
|
||||
before_sleep,
|
||||
timed_out,
|
||||
DEFAULT_PARK_TOKEN,
|
||||
None);
|
||||
parking_lot_core::park(
|
||||
addr,
|
||||
validate,
|
||||
before_sleep,
|
||||
timed_out,
|
||||
DEFAULT_PARK_TOKEN,
|
||||
None,
|
||||
);
|
||||
}
|
||||
|
||||
// Loop back and check if the done bit was set
|
||||
|
@ -331,6 +343,12 @@ impl Default for Once {
|
|||
}
|
||||
}
|
||||
|
||||
impl fmt::Debug for Once {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
write!(f, "Once {{ state: {:?} }}", &self.state())
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
#[cfg(feature = "nightly")]
|
||||
|
@ -391,11 +409,15 @@ mod tests {
|
|||
static O: Once = ONCE_INIT;
|
||||
|
||||
// poison the once
|
||||
let t = panic::catch_unwind(|| { O.call_once(|| panic!()); });
|
||||
let t = panic::catch_unwind(|| {
|
||||
O.call_once(|| panic!());
|
||||
});
|
||||
assert!(t.is_err());
|
||||
|
||||
// poisoning propagates
|
||||
let t = panic::catch_unwind(|| { O.call_once(|| {}); });
|
||||
let t = panic::catch_unwind(|| {
|
||||
O.call_once(|| {});
|
||||
});
|
||||
assert!(t.is_err());
|
||||
|
||||
// we can subvert poisoning, however
|
||||
|
@ -416,7 +438,9 @@ mod tests {
|
|||
static O: Once = ONCE_INIT;
|
||||
|
||||
// poison the once
|
||||
let t = panic::catch_unwind(|| { O.call_once(|| panic!()); });
|
||||
let t = panic::catch_unwind(|| {
|
||||
O.call_once(|| panic!());
|
||||
});
|
||||
assert!(t.is_err());
|
||||
|
||||
// make sure someone's waiting inside the once via a force
|
||||
|
@ -435,7 +459,9 @@ mod tests {
|
|||
// put another waiter on the once
|
||||
let t2 = thread::spawn(|| {
|
||||
let mut called = false;
|
||||
O.call_once(|| { called = true; });
|
||||
O.call_once(|| {
|
||||
called = true;
|
||||
});
|
||||
assert!(!called);
|
||||
});
|
||||
|
||||
|
@ -443,6 +469,5 @@ mod tests {
|
|||
|
||||
assert!(t1.join().is_ok());
|
||||
assert!(t2.join().is_ok());
|
||||
|
||||
}
|
||||
}
|
||||
|
|
|
@ -5,16 +5,20 @@
|
|||
// http://opensource.org/licenses/MIT>, at your option. This file may not be
|
||||
// copied, modified, or distributed except according to those terms.
|
||||
|
||||
use std::sync::atomic::Ordering;
|
||||
#[cfg(feature = "nightly")]
|
||||
use std::sync::atomic::{AtomicU8, Ordering};
|
||||
use std::sync::atomic::{ATOMIC_U8_INIT, AtomicU8};
|
||||
#[cfg(feature = "nightly")]
|
||||
type U8 = u8;
|
||||
#[cfg(not(feature = "nightly"))]
|
||||
use stable::{AtomicU8, Ordering};
|
||||
use std::sync::atomic::AtomicUsize as AtomicU8;
|
||||
#[cfg(not(feature = "nightly"))]
|
||||
use std::sync::atomic::ATOMIC_USIZE_INIT as ATOMIC_U8_INIT;
|
||||
#[cfg(not(feature = "nightly"))]
|
||||
type U8 = usize;
|
||||
use std::time::{Duration, Instant};
|
||||
use parking_lot_core::{self, ParkResult, UnparkResult, SpinWait, UnparkToken, DEFAULT_PARK_TOKEN};
|
||||
use parking_lot_core::{self, ParkResult, SpinWait, UnparkResult, UnparkToken, DEFAULT_PARK_TOKEN};
|
||||
use deadlock;
|
||||
|
||||
// UnparkToken used to indicate that that the target thread should attempt to
|
||||
// lock the mutex again as soon as it is unparked.
|
||||
|
@ -35,42 +39,59 @@ impl RawMutex {
|
|||
#[cfg(feature = "nightly")]
|
||||
#[inline]
|
||||
pub const fn new() -> RawMutex {
|
||||
RawMutex { state: AtomicU8::new(0) }
|
||||
RawMutex {
|
||||
state: ATOMIC_U8_INIT,
|
||||
}
|
||||
}
|
||||
#[cfg(not(feature = "nightly"))]
|
||||
#[inline]
|
||||
pub fn new() -> RawMutex {
|
||||
RawMutex { state: AtomicU8::new(0) }
|
||||
RawMutex {
|
||||
state: ATOMIC_U8_INIT,
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn lock(&self) {
|
||||
if self.state
|
||||
.compare_exchange_weak(0, LOCKED_BIT, Ordering::Acquire, Ordering::Relaxed)
|
||||
.is_ok() {
|
||||
return;
|
||||
.is_err()
|
||||
{
|
||||
self.lock_slow(None);
|
||||
}
|
||||
self.lock_slow(None);
|
||||
unsafe { deadlock::acquire_resource(self as *const _ as usize) };
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn try_lock_until(&self, timeout: Instant) -> bool {
|
||||
if self.state
|
||||
let result = if self.state
|
||||
.compare_exchange_weak(0, LOCKED_BIT, Ordering::Acquire, Ordering::Relaxed)
|
||||
.is_ok() {
|
||||
return true;
|
||||
.is_ok()
|
||||
{
|
||||
true
|
||||
} else {
|
||||
self.lock_slow(Some(timeout))
|
||||
};
|
||||
if result {
|
||||
unsafe { deadlock::acquire_resource(self as *const _ as usize) };
|
||||
}
|
||||
self.lock_slow(Some(timeout))
|
||||
result
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn try_lock_for(&self, timeout: Duration) -> bool {
|
||||
if self.state
|
||||
let result = if self.state
|
||||
.compare_exchange_weak(0, LOCKED_BIT, Ordering::Acquire, Ordering::Relaxed)
|
||||
.is_ok() {
|
||||
return true;
|
||||
.is_ok()
|
||||
{
|
||||
true
|
||||
} else {
|
||||
self.lock_slow(Some(Instant::now() + timeout))
|
||||
};
|
||||
if result {
|
||||
unsafe { deadlock::acquire_resource(self as *const _ as usize) };
|
||||
}
|
||||
self.lock_slow(Some(Instant::now() + timeout))
|
||||
result
|
||||
}
|
||||
|
||||
#[inline]
|
||||
|
@ -80,11 +101,16 @@ impl RawMutex {
|
|||
if state & LOCKED_BIT != 0 {
|
||||
return false;
|
||||
}
|
||||
match self.state.compare_exchange_weak(state,
|
||||
state | LOCKED_BIT,
|
||||
Ordering::Acquire,
|
||||
Ordering::Relaxed) {
|
||||
Ok(_) => return true,
|
||||
match self.state.compare_exchange_weak(
|
||||
state,
|
||||
state | LOCKED_BIT,
|
||||
Ordering::Acquire,
|
||||
Ordering::Relaxed,
|
||||
) {
|
||||
Ok(_) => {
|
||||
unsafe { deadlock::acquire_resource(self as *const _ as usize) };
|
||||
return true;
|
||||
}
|
||||
Err(x) => state = x,
|
||||
}
|
||||
}
|
||||
|
@ -92,9 +118,11 @@ impl RawMutex {
|
|||
|
||||
#[inline]
|
||||
pub fn unlock(&self, force_fair: bool) {
|
||||
unsafe { deadlock::release_resource(self as *const _ as usize) };
|
||||
if self.state
|
||||
.compare_exchange_weak(LOCKED_BIT, 0, Ordering::Release, Ordering::Relaxed)
|
||||
.is_ok() {
|
||||
.is_ok()
|
||||
{
|
||||
return;
|
||||
}
|
||||
self.unlock_slow(force_fair);
|
||||
|
@ -103,16 +131,18 @@ impl RawMutex {
|
|||
// Used by Condvar when requeuing threads to us, must be called while
|
||||
// holding the queue lock.
|
||||
#[inline]
|
||||
pub fn mark_parked_if_locked(&self) -> bool {
|
||||
pub(crate) fn mark_parked_if_locked(&self) -> bool {
|
||||
let mut state = self.state.load(Ordering::Relaxed);
|
||||
loop {
|
||||
if state & LOCKED_BIT == 0 {
|
||||
return false;
|
||||
}
|
||||
match self.state.compare_exchange_weak(state,
|
||||
state | PARKED_BIT,
|
||||
Ordering::Relaxed,
|
||||
Ordering::Relaxed) {
|
||||
match self.state.compare_exchange_weak(
|
||||
state,
|
||||
state | PARKED_BIT,
|
||||
Ordering::Relaxed,
|
||||
Ordering::Relaxed,
|
||||
) {
|
||||
Ok(_) => return true,
|
||||
Err(x) => state = x,
|
||||
}
|
||||
|
@ -122,7 +152,7 @@ impl RawMutex {
|
|||
// Used by Condvar when requeuing threads to us, must be called while
|
||||
// holding the queue lock.
|
||||
#[inline]
|
||||
pub fn mark_parked(&self) {
|
||||
pub(crate) fn mark_parked(&self) {
|
||||
self.state.fetch_or(PARKED_BIT, Ordering::Relaxed);
|
||||
}
|
||||
|
||||
|
@ -134,11 +164,12 @@ impl RawMutex {
|
|||
loop {
|
||||
// Grab the lock if it isn't locked, even if there is a queue on it
|
||||
if state & LOCKED_BIT == 0 {
|
||||
match self.state
|
||||
.compare_exchange_weak(state,
|
||||
state | LOCKED_BIT,
|
||||
Ordering::Acquire,
|
||||
Ordering::Relaxed) {
|
||||
match self.state.compare_exchange_weak(
|
||||
state,
|
||||
state | LOCKED_BIT,
|
||||
Ordering::Acquire,
|
||||
Ordering::Relaxed,
|
||||
) {
|
||||
Ok(_) => return true,
|
||||
Err(x) => state = x,
|
||||
}
|
||||
|
@ -153,10 +184,12 @@ impl RawMutex {
|
|||
|
||||
// Set the parked bit
|
||||
if state & PARKED_BIT == 0 {
|
||||
if let Err(x) = self.state.compare_exchange_weak(state,
|
||||
state | PARKED_BIT,
|
||||
Ordering::Relaxed,
|
||||
Ordering::Relaxed) {
|
||||
if let Err(x) = self.state.compare_exchange_weak(
|
||||
state,
|
||||
state | PARKED_BIT,
|
||||
Ordering::Relaxed,
|
||||
Ordering::Relaxed,
|
||||
) {
|
||||
state = x;
|
||||
continue;
|
||||
}
|
||||
|
@ -173,12 +206,14 @@ impl RawMutex {
|
|||
self.state.fetch_and(!PARKED_BIT, Ordering::Relaxed);
|
||||
}
|
||||
};
|
||||
match parking_lot_core::park(addr,
|
||||
validate,
|
||||
before_sleep,
|
||||
timed_out,
|
||||
DEFAULT_PARK_TOKEN,
|
||||
timeout) {
|
||||
match parking_lot_core::park(
|
||||
addr,
|
||||
validate,
|
||||
before_sleep,
|
||||
timed_out,
|
||||
DEFAULT_PARK_TOKEN,
|
||||
timeout,
|
||||
) {
|
||||
// The thread that unparked us passed the lock on to us
|
||||
// directly without unlocking it.
|
||||
ParkResult::Unparked(TOKEN_HANDOFF) => return true,
|
||||
|
@ -206,7 +241,8 @@ impl RawMutex {
|
|||
// Unlock directly if there are no parked threads
|
||||
if self.state
|
||||
.compare_exchange(LOCKED_BIT, 0, Ordering::Release, Ordering::Relaxed)
|
||||
.is_ok() {
|
||||
.is_ok()
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
|
|
|
@ -9,20 +9,11 @@ use std::sync::atomic::{AtomicUsize, Ordering};
|
|||
use std::time::{Duration, Instant};
|
||||
use std::cell::Cell;
|
||||
use raw_mutex::RawMutex;
|
||||
#[cfg(not(target_os = "emscripten"))]
|
||||
use thread_id;
|
||||
|
||||
// Helper function to get a thread id
|
||||
#[cfg(not(target_os = "emscripten"))]
|
||||
fn get_thread_id() -> usize {
|
||||
thread_id::get()
|
||||
}
|
||||
#[cfg(target_os = "emscripten")]
|
||||
fn get_thread_id() -> usize {
|
||||
// pthread_self returns 0 on enscripten, but we use that as a
|
||||
// reserved value to indicate an empty slot. We instead fall
|
||||
// back to using the address of a thread-local variable, which
|
||||
// is slightly slower but guaranteed to produce a non-zero value.
|
||||
// The address of a thread-local variable is guaranteed to be unique to the
|
||||
// current thread, and is also guaranteed to be non-zero.
|
||||
thread_local!(static KEY: u8 = unsafe { ::std::mem::uninitialized() });
|
||||
KEY.with(|x| x as *const _ as usize)
|
||||
}
|
||||
|
@ -59,10 +50,12 @@ impl RawReentrantMutex {
|
|||
fn lock_internal<F: FnOnce() -> bool>(&self, try_lock: F) -> bool {
|
||||
let id = get_thread_id();
|
||||
if self.owner.load(Ordering::Relaxed) == id {
|
||||
self.lock_count.set(self.lock_count
|
||||
.get()
|
||||
.checked_add(1)
|
||||
.expect("ReentrantMutex lock count overflow"));
|
||||
self.lock_count.set(
|
||||
self.lock_count
|
||||
.get()
|
||||
.checked_add(1)
|
||||
.expect("ReentrantMutex lock count overflow"),
|
||||
);
|
||||
} else {
|
||||
if !try_lock() {
|
||||
return false;
|
||||
|
|
Разница между файлами не показана из-за своего большого размера
Загрузить разницу
|
@ -33,8 +33,8 @@ pub struct ReentrantMutex<T: ?Sized> {
|
|||
data: UnsafeCell<T>,
|
||||
}
|
||||
|
||||
unsafe impl<T: Send> Send for ReentrantMutex<T> {}
|
||||
unsafe impl<T: Send> Sync for ReentrantMutex<T> {}
|
||||
unsafe impl<T: ?Sized + Send> Send for ReentrantMutex<T> {}
|
||||
unsafe impl<T: ?Sized + Send> Sync for ReentrantMutex<T> {}
|
||||
|
||||
/// An RAII implementation of a "scoped lock" of a reentrant mutex. When this structure
|
||||
/// is dropped (falls out of scope), the lock will be unlocked.
|
||||
|
@ -43,13 +43,12 @@ unsafe impl<T: Send> Sync for ReentrantMutex<T> {}
|
|||
/// `Deref` implementation.
|
||||
#[must_use]
|
||||
pub struct ReentrantMutexGuard<'a, T: ?Sized + 'a> {
|
||||
mutex: &'a ReentrantMutex<T>,
|
||||
|
||||
// The raw pointer here ensures that ReentrantMutexGuard is !Send
|
||||
marker: PhantomData<(&'a T, *mut ())>,
|
||||
raw: &'a RawReentrantMutex,
|
||||
data: *const T,
|
||||
marker: PhantomData<&'a T>,
|
||||
}
|
||||
|
||||
unsafe impl<'a, T: ?Sized + 'a + Sync> Sync for ReentrantMutexGuard<'a, T> {}
|
||||
unsafe impl<'a, T: ?Sized + Sync + 'a> Sync for ReentrantMutexGuard<'a, T> {}
|
||||
|
||||
impl<T> ReentrantMutex<T> {
|
||||
/// Creates a new reentrant mutex in an unlocked state ready for use.
|
||||
|
@ -80,6 +79,15 @@ impl<T> ReentrantMutex<T> {
|
|||
}
|
||||
|
||||
impl<T: ?Sized> ReentrantMutex<T> {
|
||||
#[inline]
|
||||
fn guard(&self) -> ReentrantMutexGuard<T> {
|
||||
ReentrantMutexGuard {
|
||||
raw: &self.raw,
|
||||
data: self.data.get(),
|
||||
marker: PhantomData,
|
||||
}
|
||||
}
|
||||
|
||||
/// Acquires a reentrant mutex, blocking the current thread until it is able
|
||||
/// to do so.
|
||||
///
|
||||
|
@ -93,10 +101,7 @@ impl<T: ?Sized> ReentrantMutex<T> {
|
|||
#[inline]
|
||||
pub fn lock(&self) -> ReentrantMutexGuard<T> {
|
||||
self.raw.lock();
|
||||
ReentrantMutexGuard {
|
||||
mutex: self,
|
||||
marker: PhantomData,
|
||||
}
|
||||
self.guard()
|
||||
}
|
||||
|
||||
/// Attempts to acquire this lock.
|
||||
|
@ -109,10 +114,7 @@ impl<T: ?Sized> ReentrantMutex<T> {
|
|||
#[inline]
|
||||
pub fn try_lock(&self) -> Option<ReentrantMutexGuard<T>> {
|
||||
if self.raw.try_lock() {
|
||||
Some(ReentrantMutexGuard {
|
||||
mutex: self,
|
||||
marker: PhantomData,
|
||||
})
|
||||
Some(self.guard())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
|
@ -126,10 +128,7 @@ impl<T: ?Sized> ReentrantMutex<T> {
|
|||
#[inline]
|
||||
pub fn try_lock_for(&self, timeout: Duration) -> Option<ReentrantMutexGuard<T>> {
|
||||
if self.raw.try_lock_for(timeout) {
|
||||
Some(ReentrantMutexGuard {
|
||||
mutex: self,
|
||||
marker: PhantomData,
|
||||
})
|
||||
Some(self.guard())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
|
@ -143,10 +142,7 @@ impl<T: ?Sized> ReentrantMutex<T> {
|
|||
#[inline]
|
||||
pub fn try_lock_until(&self, timeout: Instant) -> Option<ReentrantMutexGuard<T>> {
|
||||
if self.raw.try_lock_until(timeout) {
|
||||
Some(ReentrantMutexGuard {
|
||||
mutex: self,
|
||||
marker: PhantomData,
|
||||
})
|
||||
Some(self.guard())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
|
@ -239,23 +235,46 @@ impl<'a, T: ?Sized + 'a> ReentrantMutexGuard<'a, T> {
|
|||
/// using this method instead of dropping the `ReentrantMutexGuard` normally.
|
||||
#[inline]
|
||||
pub fn unlock_fair(self) {
|
||||
self.mutex.raw.unlock(true);
|
||||
self.raw.unlock(true);
|
||||
mem::forget(self);
|
||||
}
|
||||
|
||||
/// Make a new `ReentrantMutexGuard` for a component of the locked data.
|
||||
///
|
||||
/// This operation cannot fail as the `ReentrantMutexGuard` passed
|
||||
/// in already locked the mutex.
|
||||
///
|
||||
/// This is an associated function that needs to be
|
||||
/// used as `ReentrantMutexGuard::map(...)`. A method would interfere with
|
||||
/// methods of the same name on the contents of the locked data.
|
||||
#[inline]
|
||||
pub fn map<U: ?Sized, F>(orig: Self, f: F) -> ReentrantMutexGuard<'a, U>
|
||||
where
|
||||
F: FnOnce(&T) -> &U,
|
||||
{
|
||||
let raw = orig.raw;
|
||||
let data = f(unsafe { &*orig.data });
|
||||
mem::forget(orig);
|
||||
ReentrantMutexGuard {
|
||||
raw,
|
||||
data,
|
||||
marker: PhantomData,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, T: ?Sized + 'a> Deref for ReentrantMutexGuard<'a, T> {
|
||||
type Target = T;
|
||||
#[inline]
|
||||
fn deref(&self) -> &T {
|
||||
unsafe { &*self.mutex.data.get() }
|
||||
unsafe { &*self.data }
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, T: ?Sized + 'a> Drop for ReentrantMutexGuard<'a, T> {
|
||||
#[inline]
|
||||
fn drop(&mut self) {
|
||||
self.mutex.raw.unlock(false);
|
||||
self.raw.unlock(false);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -310,10 +329,9 @@ mod tests {
|
|||
let _lock = m.try_lock();
|
||||
let _lock2 = m.try_lock();
|
||||
thread::spawn(move || {
|
||||
let lock = m2.try_lock();
|
||||
assert!(lock.is_none());
|
||||
})
|
||||
.join()
|
||||
let lock = m2.try_lock();
|
||||
assert!(lock.is_none());
|
||||
}).join()
|
||||
.unwrap();
|
||||
let _lock3 = m.try_lock();
|
||||
}
|
||||
|
|
Разница между файлами не показана из-за своего большого размера
Загрузить разницу
|
@ -1,77 +0,0 @@
|
|||
// Copyright 2016 Amanieu d'Antras
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0, <LICENSE-APACHE or
|
||||
// http://apache.org/licenses/LICENSE-2.0> or the MIT license <LICENSE-MIT or
|
||||
// http://opensource.org/licenses/MIT>, at your option. This file may not be
|
||||
// copied, modified, or distributed except according to those terms.
|
||||
|
||||
#![allow(dead_code)]
|
||||
|
||||
use std::sync::atomic;
|
||||
|
||||
// Re-export this for convenience
|
||||
pub use std::sync::atomic::{Ordering, fence};
|
||||
|
||||
// Wrapper around AtomicUsize for non-nightly which has usable compare_exchange
|
||||
// and compare_exchange_weak methods.
|
||||
pub struct AtomicUsize(atomic::AtomicUsize);
|
||||
pub use self::AtomicUsize as AtomicU8;
|
||||
|
||||
// Constants for static initialization
|
||||
pub const ATOMIC_USIZE_INIT: AtomicUsize = AtomicUsize(atomic::ATOMIC_USIZE_INIT);
|
||||
pub use self::ATOMIC_USIZE_INIT as ATOMIC_U8_INIT;
|
||||
|
||||
impl AtomicUsize {
|
||||
#[inline]
|
||||
pub fn new(val: usize) -> AtomicUsize {
|
||||
AtomicUsize(atomic::AtomicUsize::new(val))
|
||||
}
|
||||
#[inline]
|
||||
pub fn load(&self, order: Ordering) -> usize {
|
||||
self.0.load(order)
|
||||
}
|
||||
#[inline]
|
||||
pub fn store(&self, val: usize, order: Ordering) {
|
||||
self.0.store(val, order);
|
||||
}
|
||||
#[inline]
|
||||
pub fn swap(&self, val: usize, order: Ordering) -> usize {
|
||||
self.0.swap(val, order)
|
||||
}
|
||||
#[inline]
|
||||
pub fn fetch_add(&self, val: usize, order: Ordering) -> usize {
|
||||
self.0.fetch_add(val, order)
|
||||
}
|
||||
#[inline]
|
||||
pub fn fetch_sub(&self, val: usize, order: Ordering) -> usize {
|
||||
self.0.fetch_sub(val, order)
|
||||
}
|
||||
#[inline]
|
||||
pub fn fetch_and(&self, val: usize, order: Ordering) -> usize {
|
||||
self.0.fetch_and(val, order)
|
||||
}
|
||||
#[inline]
|
||||
pub fn fetch_or(&self, val: usize, order: Ordering) -> usize {
|
||||
self.0.fetch_or(val, order)
|
||||
}
|
||||
#[inline]
|
||||
pub fn compare_exchange(&self,
|
||||
old: usize,
|
||||
new: usize,
|
||||
order: Ordering,
|
||||
_: Ordering)
|
||||
-> Result<usize, usize> {
|
||||
let res = self.0.compare_and_swap(old, new, order);
|
||||
if res == old { Ok(res) } else { Err(res) }
|
||||
}
|
||||
#[inline]
|
||||
pub fn compare_exchange_weak(&self,
|
||||
old: usize,
|
||||
new: usize,
|
||||
order: Ordering,
|
||||
_: Ordering)
|
||||
-> Result<usize, usize> {
|
||||
let res = self.0.compare_and_swap(old, new, order);
|
||||
if res == old { Ok(res) } else { Err(res) }
|
||||
}
|
||||
}
|
|
@ -1,50 +0,0 @@
|
|||
environment:
|
||||
matrix:
|
||||
- target: 1.8.0-x86_64-pc-windows-msvc
|
||||
- target: 1.8.0-i686-pc-windows-msvc
|
||||
- target: 1.9.0-x86_64-pc-windows-msvc
|
||||
- target: 1.9.0-i686-pc-windows-msvc
|
||||
- target: 1.10.0-x86_64-pc-windows-msvc
|
||||
- target: 1.10.0-i686-pc-windows-msvc
|
||||
- target: 1.11.0-x86_64-pc-windows-msvc
|
||||
- target: 1.11.0-i686-pc-windows-msvc
|
||||
- target: 1.12.0-x86_64-pc-windows-msvc
|
||||
- target: 1.12.0-i686-pc-windows-msvc
|
||||
- target: 1.13.0-x86_64-pc-windows-msvc
|
||||
- target: 1.13.0-i686-pc-windows-msvc
|
||||
- target: 1.14.0-x86_64-pc-windows-msvc
|
||||
- target: 1.14.0-i686-pc-windows-msvc
|
||||
- target: 1.15.1-x86_64-pc-windows-msvc
|
||||
- target: 1.15.1-i686-pc-windows-msvc
|
||||
- target: 1.16.0-x86_64-pc-windows-msvc
|
||||
- target: 1.16.0-i686-pc-windows-msvc
|
||||
- target: 1.17.0-x86_64-pc-windows-msvc
|
||||
- target: 1.17.0-i686-pc-windows-msvc
|
||||
- target: beta-x86_64-pc-windows-msvc
|
||||
- target: beta-i686-pc-windows-msvc
|
||||
- target: beta-x86_64-pc-windows-gnu
|
||||
- target: beta-i686-pc-windows-gnu
|
||||
- target: nightly-x86_64-pc-windows-msvc
|
||||
- target: nightly-i686-pc-windows-msvc
|
||||
- target: nightly-x86_64-pc-windows-gnu
|
||||
- target: nightly-i686-pc-windows-gnu
|
||||
|
||||
install:
|
||||
# Download the Rust and Cargo installer.
|
||||
- ps: Start-FileDownload "https://static.rust-lang.org/dist/rust-${env:target}.msi"
|
||||
|
||||
# Install Rust and Cargo and wait for installation to finish by using Write-Output.
|
||||
- ps: msiexec /package "rust-${env:target}.msi" /quiet /norestart | Write-Output
|
||||
|
||||
# Pick up the new Path variable after the installer modified it.
|
||||
- ps: $env:Path = [System.Environment]::GetEnvironmentVariable("Path","Machine")
|
||||
|
||||
# Print versions for future reference.
|
||||
- rustc --version
|
||||
- cargo --version
|
||||
|
||||
build_script:
|
||||
- cargo build
|
||||
|
||||
test_script:
|
||||
- cargo test
|
|
@ -1 +0,0 @@
|
|||
{"files":{".appveyor.yml":"86b8a7bf3ff316a2d8c58ba1fc365c5773c926ee3e4abccf889a95e5ec4f393a",".travis.yml":"731fd15b3f516c8da8241d2c5e7a56e638f0295f637d448fb009131092792264","Cargo.toml":"fbe4f546589b570dff000201397c059cc16a5807211a436c17f03aade65690ff","changelog.md":"71ecf0389739517ce6e84fc230f1b42f7532c545c0690fbcff270c83012f0d30","license-apache":"cfc7749b96f63bd31c3c42b5c471bf756814053e847c10f3eb003417bc523d30","license-mit":"69a1ca9eaf8e5276eade8931f42808d7c39c6a26011e34450c4ebb10f11c653f","readme.md":"2dc47b0ce86bb82f007639782c12ba9376be7d31635ba5cdf7cc828e27a9ba54","src/lib.rs":"214ad7a56dd4715387bbdbe91e53cd93f49fedc88f4c818b4e52378b71d98232"},"package":"8df7875b676fddfadffd96deea3b1124e5ede707d4884248931077518cf1f773"}
|
|
@ -1,15 +0,0 @@
|
|||
language: rust
|
||||
|
||||
rust:
|
||||
- 1.8.0
|
||||
- 1.9.0
|
||||
- 1.10.0
|
||||
- 1.11.0
|
||||
- 1.12.0
|
||||
- 1.13.0
|
||||
- 1.14.0
|
||||
- 1.15.1
|
||||
- 1.16.0
|
||||
- 1.17.0
|
||||
- beta
|
||||
- nightly
|
Некоторые файлы не были показаны из-за слишком большого количества измененных файлов Показать больше
Загрузка…
Ссылка в новой задаче