зеркало из https://github.com/mozilla/gecko-dev.git
Merge mozilla-central to autoland a=merge on a CLOSED TREE
This commit is contained in:
Коммит
7bb82fa0dc
|
@ -1,5 +1,5 @@
|
|||
This is the PDF.js project output, https://github.com/mozilla/pdf.js
|
||||
|
||||
Current extension version is: 2.2.154
|
||||
Current extension version is: 2.2.160
|
||||
|
||||
Taken from upstream commit: 762c58e0
|
||||
Taken from upstream commit: 155304a0
|
||||
|
|
|
@ -123,8 +123,8 @@ return /******/ (function(modules) { // webpackBootstrap
|
|||
"use strict";
|
||||
|
||||
|
||||
var pdfjsVersion = '2.2.154';
|
||||
var pdfjsBuild = '762c58e0';
|
||||
var pdfjsVersion = '2.2.160';
|
||||
var pdfjsBuild = '155304a0';
|
||||
|
||||
var pdfjsSharedUtil = __w_pdfjs_require__(1);
|
||||
|
||||
|
@ -1303,7 +1303,7 @@ function _fetchDocument(worker, source, pdfDataRangeTransport, docId) {
|
|||
|
||||
return worker.messageHandler.sendWithPromise('GetDocRequest', {
|
||||
docId,
|
||||
apiVersion: '2.2.154',
|
||||
apiVersion: '2.2.160',
|
||||
source: {
|
||||
data: source.data,
|
||||
url: source.url,
|
||||
|
@ -3097,9 +3097,9 @@ const InternalRenderTask = function InternalRenderTaskClosure() {
|
|||
return InternalRenderTask;
|
||||
}();
|
||||
|
||||
const version = '2.2.154';
|
||||
const version = '2.2.160';
|
||||
exports.version = version;
|
||||
const build = '762c58e0';
|
||||
const build = '155304a0';
|
||||
exports.build = build;
|
||||
|
||||
/***/ }),
|
||||
|
|
|
@ -123,8 +123,8 @@ return /******/ (function(modules) { // webpackBootstrap
|
|||
"use strict";
|
||||
|
||||
|
||||
const pdfjsVersion = '2.2.154';
|
||||
const pdfjsBuild = '762c58e0';
|
||||
const pdfjsVersion = '2.2.160';
|
||||
const pdfjsBuild = '155304a0';
|
||||
|
||||
const pdfjsCoreWorker = __w_pdfjs_require__(1);
|
||||
|
||||
|
@ -378,7 +378,7 @@ var WorkerMessageHandler = {
|
|||
var WorkerTasks = [];
|
||||
const verbosity = (0, _util.getVerbosityLevel)();
|
||||
let apiVersion = docParams.apiVersion;
|
||||
let workerVersion = '2.2.154';
|
||||
let workerVersion = '2.2.160';
|
||||
|
||||
if (apiVersion !== workerVersion) {
|
||||
throw new Error(`The API version "${apiVersion}" does not match ` + `the Worker version "${workerVersion}".`);
|
||||
|
@ -19616,7 +19616,9 @@ var NullOptimizer = function NullOptimizerClosure() {
|
|||
this.queue.argsArray.push(args);
|
||||
},
|
||||
|
||||
flush() {}
|
||||
flush() {},
|
||||
|
||||
reset() {}
|
||||
|
||||
};
|
||||
return NullOptimizer;
|
||||
|
@ -19631,7 +19633,7 @@ var OperatorList = function OperatorListClosure() {
|
|||
this.fnArray = [];
|
||||
this.argsArray = [];
|
||||
|
||||
if (messageHandler && this.intent !== 'oplist') {
|
||||
if (messageHandler && intent !== 'oplist') {
|
||||
this.optimizer = new QueueOptimizer(this);
|
||||
} else {
|
||||
this.optimizer = new NullOptimizer(this);
|
||||
|
@ -20554,7 +20556,8 @@ var PartialEvaluator = function PartialEvaluatorClosure() {
|
|||
});
|
||||
return fontCapability.promise;
|
||||
},
|
||||
buildPath: function PartialEvaluator_buildPath(operatorList, fn, args) {
|
||||
|
||||
buildPath(operatorList, fn, args, parsingText = false) {
|
||||
var lastIndex = operatorList.length - 1;
|
||||
|
||||
if (!args) {
|
||||
|
@ -20562,13 +20565,23 @@ var PartialEvaluator = function PartialEvaluatorClosure() {
|
|||
}
|
||||
|
||||
if (lastIndex < 0 || operatorList.fnArray[lastIndex] !== _util.OPS.constructPath) {
|
||||
if (parsingText) {
|
||||
(0, _util.warn)(`Encountered path operator "${fn}" inside of a text object.`);
|
||||
operatorList.addOp(_util.OPS.save, null);
|
||||
}
|
||||
|
||||
operatorList.addOp(_util.OPS.constructPath, [[fn], args]);
|
||||
|
||||
if (parsingText) {
|
||||
operatorList.addOp(_util.OPS.restore, null);
|
||||
}
|
||||
} else {
|
||||
var opArgs = operatorList.argsArray[lastIndex];
|
||||
opArgs[0].push(fn);
|
||||
Array.prototype.push.apply(opArgs[1], args);
|
||||
}
|
||||
},
|
||||
|
||||
handleColorN: function PartialEvaluator_handleColorN(operatorList, fn, args, cs, patterns, resources, task) {
|
||||
var patternName = args[args.length - 1];
|
||||
var pattern;
|
||||
|
@ -20611,6 +20624,7 @@ var PartialEvaluator = function PartialEvaluatorClosure() {
|
|||
|
||||
var self = this;
|
||||
var xref = this.xref;
|
||||
let parsingText = false;
|
||||
var imageCache = Object.create(null);
|
||||
|
||||
var xobjs = resources.get('XObject') || _primitives.Dict.empty;
|
||||
|
@ -20733,6 +20747,14 @@ var PartialEvaluator = function PartialEvaluatorClosure() {
|
|||
}));
|
||||
return;
|
||||
|
||||
case _util.OPS.beginText:
|
||||
parsingText = true;
|
||||
break;
|
||||
|
||||
case _util.OPS.endText:
|
||||
parsingText = false;
|
||||
break;
|
||||
|
||||
case _util.OPS.endInlineImage:
|
||||
var cacheKey = args[0].cacheKey;
|
||||
|
||||
|
@ -20914,11 +20936,8 @@ var PartialEvaluator = function PartialEvaluatorClosure() {
|
|||
case _util.OPS.curveTo2:
|
||||
case _util.OPS.curveTo3:
|
||||
case _util.OPS.closePath:
|
||||
self.buildPath(operatorList, fn, args);
|
||||
continue;
|
||||
|
||||
case _util.OPS.rectangle:
|
||||
self.buildPath(operatorList, fn, args);
|
||||
self.buildPath(operatorList, fn, args, parsingText);
|
||||
continue;
|
||||
|
||||
case _util.OPS.markPoint:
|
||||
|
|
|
@ -1485,7 +1485,9 @@ let PDFViewerApplication = {
|
|||
};
|
||||
|
||||
window.addEventListener('visibilitychange', webViewerVisibilityChange);
|
||||
window.addEventListener('wheel', webViewerWheel);
|
||||
window.addEventListener('wheel', webViewerWheel, {
|
||||
passive: false
|
||||
});
|
||||
window.addEventListener('click', webViewerClick);
|
||||
window.addEventListener('keydown', webViewerKeyDown);
|
||||
window.addEventListener('resize', _boundEvents.windowResize);
|
||||
|
|
|
@ -20,7 +20,7 @@ origin:
|
|||
|
||||
# Human-readable identifier for this version/release
|
||||
# Generally "version NNN", "tag SSS", "bookmark SSS"
|
||||
release: version 2.2.154
|
||||
release: version 2.2.160
|
||||
|
||||
# The package's license, where possible using the mnemonic from
|
||||
# https://spdx.org/licenses/
|
||||
|
|
|
@ -8,7 +8,7 @@
|
|||
|
||||
add_task(async function test() {
|
||||
await BrowserTestUtils.withNewTab({ gBrowser,
|
||||
url: "chrome://global/content/aboutProfiles.xhtml" },
|
||||
url: "chrome://global/content/mozilla.xhtml" },
|
||||
async function(newBrowser) {
|
||||
// NB: We load the chrome:// page in the parent process.
|
||||
await testXFOFrameInChrome(newBrowser);
|
||||
|
|
|
@ -53,6 +53,13 @@ function onLoad() {
|
|||
imgListener(img).then(() => {
|
||||
ok(true, "img shouldn't be loaded");
|
||||
|
||||
// `iframe` is a content iframe, and thus not in the same docgroup with
|
||||
// us, which are a chrome-privileged test.
|
||||
//
|
||||
// Ensure the frame is laid out so that this cross-origin
|
||||
// getComputedStyle call is guaranteed to work after bug 1440537.
|
||||
iframe.getBoundingClientRect();
|
||||
|
||||
// We can't use matches(":-moz-suppressed") here, as -moz-suppressed is
|
||||
// chrome-only, however now we are in a content iframe.
|
||||
is(iframe.contentWindow.getComputedStyle(img).color, "rgb(0, 128, 0)",
|
||||
|
|
|
@ -191,7 +191,8 @@ const char kPrivateBrowsingObserverTopic[] = "last-pb-context-exited";
|
|||
const uint32_t kDefaultNextGen = false;
|
||||
const uint32_t kDefaultOriginLimitKB = 5 * 1024;
|
||||
const uint32_t kDefaultShadowWrites = true;
|
||||
const uint32_t kDefaultSnapshotPrefill = 4096;
|
||||
const uint32_t kDefaultSnapshotPrefill = 16384;
|
||||
const uint32_t kDefaultSnapshotGradualPrefill = 4096;
|
||||
const uint32_t kDefaultClientValidation = true;
|
||||
/**
|
||||
*
|
||||
|
@ -221,6 +222,17 @@ const char kShadowWritesPref[] = "dom.storage.shadow_writes";
|
|||
* sent. See `Snapshot`.
|
||||
*/
|
||||
const char kSnapshotPrefillPref[] = "dom.storage.snapshot_prefill";
|
||||
/**
|
||||
* When a specific value is requested by an LSSnapshot that is not already fully
|
||||
* populated, gradual prefill is used. This preference specifies the number of
|
||||
* bytes to be used to send values beyond the specific value that is requested.
|
||||
* (The size of the explicitly requested value does not impact this preference.)
|
||||
* Setting the value to 0 disables gradual prefill. Tests may set this value to
|
||||
* -1 which is converted to INT_MAX in order to cause gradual prefill to send
|
||||
* all values not previously sent.
|
||||
*/
|
||||
const char kSnapshotGradualPrefillPref[] =
|
||||
"dom.storage.snapshot_gradual_prefill";
|
||||
|
||||
const char kClientValidationPref[] = "dom.storage.client_validation";
|
||||
|
||||
|
@ -1703,10 +1715,12 @@ class Datastore final
|
|||
|
||||
void GetSnapshotInitInfo(nsTHashtable<nsStringHashKey>& aLoadedItems,
|
||||
nsTArray<LSItemInfo>& aItemInfos,
|
||||
uint32_t& aTotalLength, int64_t& aInitialUsage,
|
||||
int64_t& aPeakUsage,
|
||||
uint32_t& aNextLoadIndex, uint32_t& aTotalLength,
|
||||
int64_t& aInitialUsage, int64_t& aPeakUsage,
|
||||
LSSnapshot::LoadState& aLoadState);
|
||||
|
||||
const nsTArray<LSItemInfo>& GetOrderedItems() const { return mOrderedItems; }
|
||||
|
||||
void GetItem(const nsString& aKey, nsString& aValue) const;
|
||||
|
||||
void GetKeys(nsTArray<nsString>& aKeys) const;
|
||||
|
@ -1961,10 +1975,10 @@ class Snapshot final : public PBackgroundLSSnapshotParent {
|
|||
*/
|
||||
nsTHashtable<nsStringHashKey> mLoadedItems;
|
||||
/**
|
||||
* The set of keys for which a RecvLoadItem request was received but there
|
||||
* was no such key, and so null was returned. The child LSSnapshot will also
|
||||
* cache these values, so redundant requests are also handled with fatal
|
||||
* process termination just like for mLoadedItems. Also cleared when
|
||||
* The set of keys for which a RecvLoadValueAndMoreItems request was received
|
||||
* but there was no such key, and so null was returned. The child LSSnapshot
|
||||
* will also cache these values, so redundant requests are also handled with
|
||||
* fatal process termination just like for mLoadedItems. Also cleared when
|
||||
* mLoadedAllItems becomes true because then the child can infer that all
|
||||
* other values must be null. (Note: this could also be done when
|
||||
* mLoadKeysReceived is true as a further optimization, but is not.)
|
||||
|
@ -1989,6 +2003,11 @@ class Snapshot final : public PBackgroundLSSnapshotParent {
|
|||
*/
|
||||
nsTArray<nsString> mKeys;
|
||||
nsString mDocumentURI;
|
||||
/**
|
||||
* The index used for restoring iteration over not yet sent key/value pairs to
|
||||
* the child LSSnapshot.
|
||||
*/
|
||||
uint32_t mNextLoadIndex;
|
||||
/**
|
||||
* The number of key/value pairs that were present in the Datastore at the
|
||||
* time the snapshot was created. Once we have sent this many values to the
|
||||
|
@ -2012,10 +2031,11 @@ class Snapshot final : public PBackgroundLSSnapshotParent {
|
|||
* True if LSSnapshot's mLoadState should be LoadState::AllOrderedItems or
|
||||
* LoadState::AllUnorderedItems. It will be AllOrderedItems if the initial
|
||||
* snapshot contained all the data or if the state was AllOrderedKeys and
|
||||
* successive RecvLoadItem requests have resulted in the LSSnapshot being told
|
||||
* all of the key/value pairs. It will be AllUnorderedItems if the state was
|
||||
* LoadState::Partial and successive RecvLoadItem requests got all the
|
||||
* keys/values but the key ordering was not retrieved.
|
||||
* successive RecvLoadValueAndMoreItems requests have resulted in the
|
||||
* LSSnapshot being told all of the key/value pairs. It will be
|
||||
* AllUnorderedItems if the state was LoadState::Partial and successive
|
||||
* RecvLoadValueAndMoreItem requests got all the keys/values but the key
|
||||
* ordering was not retrieved.
|
||||
*/
|
||||
bool mLoadedAllItems;
|
||||
/**
|
||||
|
@ -2030,25 +2050,29 @@ class Snapshot final : public PBackgroundLSSnapshotParent {
|
|||
// Created in AllocPBackgroundLSSnapshotParent.
|
||||
Snapshot(Database* aDatabase, const nsAString& aDocumentURI);
|
||||
|
||||
void Init(nsTHashtable<nsStringHashKey>& aLoadedItems, uint32_t aTotalLength,
|
||||
void Init(nsTHashtable<nsStringHashKey>& aLoadedItems,
|
||||
uint32_t aNextLoadIndex, uint32_t aTotalLength,
|
||||
int64_t aInitialUsage, int64_t aPeakUsage,
|
||||
LSSnapshot::LoadState aLoadState) {
|
||||
AssertIsOnBackgroundThread();
|
||||
MOZ_ASSERT(aInitialUsage >= 0);
|
||||
MOZ_ASSERT(aPeakUsage >= aInitialUsage);
|
||||
MOZ_ASSERT_IF(aLoadState == LSSnapshot::LoadState::AllOrderedItems,
|
||||
aLoadedItems.Count() == 0);
|
||||
MOZ_ASSERT_IF(aLoadState != LSSnapshot::LoadState::AllOrderedItems,
|
||||
aNextLoadIndex < aTotalLength);
|
||||
MOZ_ASSERT(mTotalLength == 0);
|
||||
MOZ_ASSERT(mUsage == -1);
|
||||
MOZ_ASSERT(mPeakUsage == -1);
|
||||
|
||||
mLoadedItems.SwapElements(aLoadedItems);
|
||||
mNextLoadIndex = aNextLoadIndex;
|
||||
mTotalLength = aTotalLength;
|
||||
mUsage = aInitialUsage;
|
||||
mPeakUsage = aPeakUsage;
|
||||
if (aLoadState == LSSnapshot::LoadState::AllOrderedKeys) {
|
||||
mLoadKeysReceived = true;
|
||||
} else if (aLoadState == LSSnapshot::LoadState::AllOrderedItems) {
|
||||
MOZ_ASSERT(mLoadedItems.Count() == 0);
|
||||
MOZ_ASSERT(mNextLoadIndex == mTotalLength);
|
||||
mLoadedReceived = true;
|
||||
mLoadedAllItems = true;
|
||||
mLoadKeysReceived = true;
|
||||
|
@ -2085,8 +2109,9 @@ class Snapshot final : public PBackgroundLSSnapshotParent {
|
|||
|
||||
mozilla::ipc::IPCResult RecvLoaded() override;
|
||||
|
||||
mozilla::ipc::IPCResult RecvLoadItem(const nsString& aKey,
|
||||
nsString* aValue) override;
|
||||
mozilla::ipc::IPCResult RecvLoadValueAndMoreItems(
|
||||
const nsString& aKey, nsString* aValue,
|
||||
nsTArray<LSItemInfo>* aItemInfos) override;
|
||||
|
||||
mozilla::ipc::IPCResult RecvLoadKeys(nsTArray<nsString>* aKeys) override;
|
||||
|
||||
|
@ -2777,6 +2802,8 @@ Atomic<bool> gNextGen(kDefaultNextGen);
|
|||
Atomic<uint32_t, Relaxed> gOriginLimitKB(kDefaultOriginLimitKB);
|
||||
Atomic<bool> gShadowWrites(kDefaultShadowWrites);
|
||||
Atomic<int32_t, Relaxed> gSnapshotPrefill(kDefaultSnapshotPrefill);
|
||||
Atomic<int32_t, Relaxed> gSnapshotGradualPrefill(
|
||||
kDefaultSnapshotGradualPrefill);
|
||||
Atomic<bool> gClientValidation(kDefaultClientValidation);
|
||||
|
||||
typedef nsDataHashtable<nsCStringHashKey, int64_t> UsageHashtable;
|
||||
|
@ -2964,6 +2991,23 @@ void SnapshotPrefillPrefChangedCallback(const char* aPrefName, void* aClosure) {
|
|||
gSnapshotPrefill = snapshotPrefill;
|
||||
}
|
||||
|
||||
void SnapshotGradualPrefillPrefChangedCallback(const char* aPrefName,
|
||||
void* aClosure) {
|
||||
MOZ_ASSERT(NS_IsMainThread());
|
||||
MOZ_ASSERT(!strcmp(aPrefName, kSnapshotGradualPrefillPref));
|
||||
MOZ_ASSERT(!aClosure);
|
||||
|
||||
int32_t snapshotGradualPrefill =
|
||||
Preferences::GetInt(aPrefName, kDefaultSnapshotGradualPrefill);
|
||||
|
||||
// The magic -1 is for use only by tests.
|
||||
if (snapshotGradualPrefill == -1) {
|
||||
snapshotGradualPrefill = INT32_MAX;
|
||||
}
|
||||
|
||||
gSnapshotGradualPrefill = snapshotGradualPrefill;
|
||||
}
|
||||
|
||||
void ClientValidationPrefChangedCallback(const char* aPrefName,
|
||||
void* aClosure) {
|
||||
MOZ_ASSERT(NS_IsMainThread());
|
||||
|
@ -3035,6 +3079,9 @@ void InitializeLocalStorage() {
|
|||
Preferences::RegisterCallbackAndCall(SnapshotPrefillPrefChangedCallback,
|
||||
kSnapshotPrefillPref);
|
||||
|
||||
Preferences::RegisterCallbackAndCall(
|
||||
SnapshotGradualPrefillPrefChangedCallback, kSnapshotGradualPrefillPref);
|
||||
|
||||
Preferences::RegisterCallbackAndCall(ClientValidationPrefChangedCallback,
|
||||
kClientValidationPref);
|
||||
|
||||
|
@ -4601,6 +4648,7 @@ void Datastore::NoteInactiveDatabase(Database* aDatabase) {
|
|||
|
||||
void Datastore::GetSnapshotInitInfo(nsTHashtable<nsStringHashKey>& aLoadedItems,
|
||||
nsTArray<LSItemInfo>& aItemInfos,
|
||||
uint32_t& aNextLoadIndex,
|
||||
uint32_t& aTotalLength,
|
||||
int64_t& aInitialUsage, int64_t& aPeakUsage,
|
||||
LSSnapshot::LoadState& aLoadState) {
|
||||
|
@ -4620,51 +4668,67 @@ void Datastore::GetSnapshotInitInfo(nsTHashtable<nsStringHashKey>& aLoadedItems,
|
|||
MOZ_ASSERT(mSizeOfItems == sizeOfItems);
|
||||
#endif
|
||||
|
||||
int64_t size = 0;
|
||||
if (mSizeOfKeys <= gSnapshotPrefill) {
|
||||
if (mSizeOfItems <= gSnapshotPrefill) {
|
||||
aItemInfos.AppendElements(mOrderedItems);
|
||||
|
||||
MOZ_ASSERT(aItemInfos.Length() == mValues.Count());
|
||||
aNextLoadIndex = mValues.Count();
|
||||
|
||||
aLoadState = LSSnapshot::LoadState::AllOrderedItems;
|
||||
} else {
|
||||
int64_t size = mSizeOfKeys;
|
||||
nsString value;
|
||||
for (auto item : mOrderedItems) {
|
||||
for (uint32_t index = 0; index < mOrderedItems.Length(); index++) {
|
||||
const LSItemInfo& item = mOrderedItems[index];
|
||||
|
||||
const nsString& key = item.key();
|
||||
|
||||
if (!value.IsVoid()) {
|
||||
value = item.value();
|
||||
|
||||
size += static_cast<int64_t>(item.key().Length()) +
|
||||
static_cast<int64_t>(value.Length());
|
||||
size += static_cast<int64_t>(value.Length());
|
||||
|
||||
if (size <= gSnapshotPrefill) {
|
||||
aLoadedItems.PutEntry(item.key());
|
||||
aLoadedItems.PutEntry(key);
|
||||
} else {
|
||||
value.SetIsVoid(true);
|
||||
|
||||
// We set value to void so that will guard against entering the
|
||||
// parent branch during next iterations. So aNextLoadIndex is set
|
||||
// only once.
|
||||
aNextLoadIndex = index;
|
||||
}
|
||||
}
|
||||
|
||||
LSItemInfo* itemInfo = aItemInfos.AppendElement();
|
||||
itemInfo->key() = item.key();
|
||||
itemInfo->key() = key;
|
||||
itemInfo->value() = value;
|
||||
}
|
||||
|
||||
aLoadState = LSSnapshot::LoadState::AllOrderedKeys;
|
||||
}
|
||||
} else {
|
||||
for (auto iter = mValues.ConstIter(); !iter.Done(); iter.Next()) {
|
||||
const nsAString& key = iter.Key();
|
||||
const nsString& value = iter.Data();
|
||||
int64_t size = 0;
|
||||
for (uint32_t index = 0; index < mOrderedItems.Length(); index++) {
|
||||
const LSItemInfo& item = mOrderedItems[index];
|
||||
|
||||
const nsString& key = item.key();
|
||||
const nsString& value = item.value();
|
||||
|
||||
size += static_cast<int64_t>(key.Length()) +
|
||||
static_cast<int64_t>(value.Length());
|
||||
|
||||
if (size > gSnapshotPrefill) {
|
||||
aNextLoadIndex = index;
|
||||
break;
|
||||
}
|
||||
|
||||
aLoadedItems.PutEntry(key);
|
||||
|
||||
LSItemInfo* itemInfo = aItemInfos.AppendElement();
|
||||
itemInfo->key() = iter.Key();
|
||||
itemInfo->value() = iter.Data();
|
||||
itemInfo->key() = key;
|
||||
itemInfo->value() = value;
|
||||
}
|
||||
|
||||
MOZ_ASSERT(aItemInfos.Length() < mOrderedItems.Length());
|
||||
|
@ -5261,19 +5325,22 @@ mozilla::ipc::IPCResult Database::RecvPBackgroundLSSnapshotConstructor(
|
|||
// creation. For example clear() doesn't need to receive items at all.
|
||||
nsTHashtable<nsStringHashKey> loadedItems;
|
||||
nsTArray<LSItemInfo> itemInfos;
|
||||
uint32_t nextLoadIndex;
|
||||
uint32_t totalLength;
|
||||
int64_t initialUsage;
|
||||
int64_t peakUsage;
|
||||
LSSnapshot::LoadState loadState;
|
||||
mDatastore->GetSnapshotInitInfo(loadedItems, itemInfos, totalLength,
|
||||
initialUsage, peakUsage, loadState);
|
||||
mDatastore->GetSnapshotInitInfo(loadedItems, itemInfos, nextLoadIndex,
|
||||
totalLength, initialUsage, peakUsage,
|
||||
loadState);
|
||||
|
||||
if (aIncreasePeakUsage) {
|
||||
int64_t size = mDatastore->RequestUpdateUsage(aRequestedSize, aMinSize);
|
||||
peakUsage += size;
|
||||
}
|
||||
|
||||
snapshot->Init(loadedItems, totalLength, initialUsage, peakUsage, loadState);
|
||||
snapshot->Init(loadedItems, nextLoadIndex, totalLength, initialUsage,
|
||||
peakUsage, loadState);
|
||||
|
||||
RegisterSnapshot(snapshot);
|
||||
|
||||
|
@ -5339,7 +5406,7 @@ void Snapshot::SaveItem(const nsAString& aKey, const nsAString& aOldValue,
|
|||
mValues.LookupForAdd(aKey).OrInsert([oldValue]() { return oldValue; });
|
||||
}
|
||||
|
||||
if (aAffectsOrder && !mSavedKeys && !mLoadKeysReceived) {
|
||||
if (aAffectsOrder && !mSavedKeys) {
|
||||
mDatastore->GetKeys(mKeys);
|
||||
mSavedKeys = true;
|
||||
}
|
||||
|
@ -5489,10 +5556,11 @@ mozilla::ipc::IPCResult Snapshot::RecvLoaded() {
|
|||
return IPC_OK();
|
||||
}
|
||||
|
||||
mozilla::ipc::IPCResult Snapshot::RecvLoadItem(const nsString& aKey,
|
||||
nsString* aValue) {
|
||||
mozilla::ipc::IPCResult Snapshot::RecvLoadValueAndMoreItems(
|
||||
const nsString& aKey, nsString* aValue, nsTArray<LSItemInfo>* aItemInfos) {
|
||||
AssertIsOnBackgroundThread();
|
||||
MOZ_ASSERT(aValue);
|
||||
MOZ_ASSERT(aItemInfos);
|
||||
MOZ_ASSERT(mDatastore);
|
||||
|
||||
if (NS_WARN_IF(mFinishReceived)) {
|
||||
|
@ -5527,17 +5595,104 @@ mozilla::ipc::IPCResult Snapshot::RecvLoadItem(const nsString& aKey,
|
|||
} else {
|
||||
mLoadedItems.PutEntry(aKey);
|
||||
|
||||
if (mLoadedItems.Count() == mTotalLength) {
|
||||
mLoadedItems.Clear();
|
||||
mUnknownItems.Clear();
|
||||
#ifdef DEBUG
|
||||
for (auto iter = mValues.ConstIter(); !iter.Done(); iter.Next()) {
|
||||
MOZ_ASSERT(iter.Data().IsVoid());
|
||||
}
|
||||
#endif
|
||||
mValues.Clear();
|
||||
mLoadedAllItems = true;
|
||||
// mLoadedItems.Count()==mTotalLength is checked below.
|
||||
}
|
||||
|
||||
// Load some more key/value pairs (as many as the snapshot gradual prefill
|
||||
// byte budget allows).
|
||||
|
||||
if (gSnapshotGradualPrefill > 0) {
|
||||
const nsTArray<LSItemInfo>& orderedItems = mDatastore->GetOrderedItems();
|
||||
|
||||
uint32_t length;
|
||||
if (mSavedKeys) {
|
||||
length = mKeys.Length();
|
||||
} else {
|
||||
length = orderedItems.Length();
|
||||
}
|
||||
|
||||
int64_t size = 0;
|
||||
while (mNextLoadIndex < length) {
|
||||
// If the datastore's ordering has changed, mSavedKeys will be true and
|
||||
// mKeys contains an ordered list of the keys. Otherwise we can use the
|
||||
// datastore's key ordering which is still the same as when the snapshot
|
||||
// was created.
|
||||
|
||||
nsString key;
|
||||
if (mSavedKeys) {
|
||||
key = mKeys[mNextLoadIndex];
|
||||
} else {
|
||||
key = orderedItems[mNextLoadIndex].key();
|
||||
}
|
||||
|
||||
// Normally we would do this:
|
||||
// if (!mLoadedItems.GetEntry(key)) {
|
||||
// ...
|
||||
// mLoadedItems.PutEntry(key);
|
||||
// }
|
||||
// but that requires two hash lookups. We can reduce that to just one
|
||||
// hash lookup if we always call PutEntry and check the number of entries
|
||||
// before and after the put (which is very cheap). However, if we reach
|
||||
// the prefill limit, we need to call RemoveEntry, but that is also cheap
|
||||
// because we pass the entry (not the key).
|
||||
|
||||
uint32_t countBeforePut = mLoadedItems.Count();
|
||||
auto loadedItemEntry = mLoadedItems.PutEntry(key);
|
||||
if (countBeforePut != mLoadedItems.Count()) {
|
||||
// Check mValues first since that contains values as they existed when
|
||||
// our snapshot was created, but have since been changed/removed in the
|
||||
// datastore. If it's not there, then the datastore has the
|
||||
// still-current value. However, if the datastore's key ordering has
|
||||
// changed, we need to do a hash lookup rather than being able to do an
|
||||
// optimized direct access to the index.
|
||||
|
||||
nsString value;
|
||||
auto valueEntry = mValues.Lookup(key);
|
||||
if (valueEntry) {
|
||||
value = valueEntry.Data();
|
||||
} else if (mSavedKeys) {
|
||||
mDatastore->GetItem(nsString(key), value);
|
||||
} else {
|
||||
value = orderedItems[mNextLoadIndex].value();
|
||||
}
|
||||
|
||||
// All not loaded keys must have a value.
|
||||
MOZ_ASSERT(!value.IsVoid());
|
||||
|
||||
size += static_cast<int64_t>(key.Length()) +
|
||||
static_cast<int64_t>(value.Length());
|
||||
|
||||
if (size > gSnapshotGradualPrefill) {
|
||||
mLoadedItems.RemoveEntry(loadedItemEntry);
|
||||
|
||||
// mNextLoadIndex is not incremented, so we will resume at the same
|
||||
// position next time.
|
||||
break;
|
||||
}
|
||||
|
||||
if (valueEntry) {
|
||||
valueEntry.Remove();
|
||||
}
|
||||
|
||||
LSItemInfo* itemInfo = aItemInfos->AppendElement();
|
||||
itemInfo->key() = key;
|
||||
itemInfo->value() = value;
|
||||
}
|
||||
|
||||
mNextLoadIndex++;
|
||||
}
|
||||
}
|
||||
|
||||
if (mLoadedItems.Count() == mTotalLength) {
|
||||
mLoadedItems.Clear();
|
||||
mUnknownItems.Clear();
|
||||
#ifdef DEBUG
|
||||
for (auto iter = mValues.ConstIter(); !iter.Done(); iter.Next()) {
|
||||
MOZ_ASSERT(iter.Data().IsVoid());
|
||||
}
|
||||
#endif
|
||||
mValues.Clear();
|
||||
mLoadedAllItems = true;
|
||||
}
|
||||
|
||||
return IPC_OK();
|
||||
|
|
|
@ -432,7 +432,9 @@ nsresult LSSnapshot::GetItemInternal(const nsAString& aKey,
|
|||
} else if (mLoadedItems.GetEntry(aKey) || mUnknownItems.GetEntry(aKey)) {
|
||||
result.SetIsVoid(true);
|
||||
} else {
|
||||
if (NS_WARN_IF(!mActor->SendLoadItem(nsString(aKey), &result))) {
|
||||
nsTArray<LSItemInfo> itemInfos;
|
||||
if (NS_WARN_IF(!mActor->SendLoadValueAndMoreItems(
|
||||
nsString(aKey), &result, &itemInfos))) {
|
||||
return NS_ERROR_FAILURE;
|
||||
}
|
||||
|
||||
|
@ -442,12 +444,21 @@ nsresult LSSnapshot::GetItemInternal(const nsAString& aKey,
|
|||
mLoadedItems.PutEntry(aKey);
|
||||
mValues.Put(aKey, result);
|
||||
|
||||
if (mLoadedItems.Count() == mInitLength) {
|
||||
mLoadedItems.Clear();
|
||||
mUnknownItems.Clear();
|
||||
mLength = 0;
|
||||
mLoadState = LoadState::AllUnorderedItems;
|
||||
}
|
||||
// mLoadedItems.Count()==mInitLength is checked below.
|
||||
}
|
||||
|
||||
for (uint32_t i = 0; i < itemInfos.Length(); i++) {
|
||||
const LSItemInfo& itemInfo = itemInfos[i];
|
||||
|
||||
mLoadedItems.PutEntry(itemInfo.key());
|
||||
mValues.Put(itemInfo.key(), itemInfo.value());
|
||||
}
|
||||
|
||||
if (mLoadedItems.Count() == mInitLength) {
|
||||
mLoadedItems.Clear();
|
||||
mUnknownItems.Clear();
|
||||
mLength = 0;
|
||||
mLoadState = LoadState::AllUnorderedItems;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -466,7 +477,9 @@ nsresult LSSnapshot::GetItemInternal(const nsAString& aKey,
|
|||
case LoadState::AllOrderedKeys: {
|
||||
if (mValues.Get(aKey, &result)) {
|
||||
if (result.IsVoid()) {
|
||||
if (NS_WARN_IF(!mActor->SendLoadItem(nsString(aKey), &result))) {
|
||||
nsTArray<LSItemInfo> itemInfos;
|
||||
if (NS_WARN_IF(!mActor->SendLoadValueAndMoreItems(
|
||||
nsString(aKey), &result, &itemInfos))) {
|
||||
return NS_ERROR_FAILURE;
|
||||
}
|
||||
|
||||
|
@ -475,6 +488,15 @@ nsresult LSSnapshot::GetItemInternal(const nsAString& aKey,
|
|||
mLoadedItems.PutEntry(aKey);
|
||||
mValues.Put(aKey, result);
|
||||
|
||||
// mLoadedItems.Count()==mInitLength is checked below.
|
||||
|
||||
for (uint32_t i = 0; i < itemInfos.Length(); i++) {
|
||||
const LSItemInfo& itemInfo = itemInfos[i];
|
||||
|
||||
mLoadedItems.PutEntry(itemInfo.key());
|
||||
mValues.Put(itemInfo.key(), itemInfo.value());
|
||||
}
|
||||
|
||||
if (mLoadedItems.Count() == mInitLength) {
|
||||
mLoadedItems.Clear();
|
||||
MOZ_ASSERT(mLength == 0);
|
||||
|
|
|
@ -5,6 +5,8 @@
|
|||
include protocol PBackground;
|
||||
include protocol PBackgroundLSSnapshot;
|
||||
|
||||
include PBackgroundLSSharedTypes;
|
||||
|
||||
include "mozilla/dom/localstorage/SerializationHelpers.h";
|
||||
|
||||
using mozilla::dom::LSSnapshot::LoadState
|
||||
|
@ -13,18 +15,6 @@ using mozilla::dom::LSSnapshot::LoadState
|
|||
namespace mozilla {
|
||||
namespace dom {
|
||||
|
||||
/**
|
||||
* LocalStorage key/value pair wire representations. `value` may be void in
|
||||
* cases where there is a value but it is not being sent for memory/bandwidth
|
||||
* conservation purposes. (It's not possible to have a null/undefined `value`
|
||||
* as Storage is defined explicitly as a String store.)
|
||||
*/
|
||||
struct LSItemInfo
|
||||
{
|
||||
nsString key;
|
||||
nsString value;
|
||||
};
|
||||
|
||||
/**
|
||||
* Initial LSSnapshot state as produced by Datastore::GetSnapshotInitInfo. See
|
||||
* `LSSnapshot::LoadState` for more details about the possible states and a
|
||||
|
|
|
@ -48,5 +48,17 @@ union LSSimpleRequestParams
|
|||
LSSimpleRequestPreloadedParams;
|
||||
};
|
||||
|
||||
/**
|
||||
* LocalStorage key/value pair wire representations. `value` may be void in
|
||||
* cases where there is a value but it is not being sent for memory/bandwidth
|
||||
* conservation purposes. (It's not possible to have a null/undefined `value`
|
||||
* as Storage is defined explicitly as a String store.)
|
||||
*/
|
||||
struct LSItemInfo
|
||||
{
|
||||
nsString key;
|
||||
nsString value;
|
||||
};
|
||||
|
||||
} // namespace dom
|
||||
} // namespace mozilla
|
||||
|
|
|
@ -5,6 +5,8 @@
|
|||
include protocol PBackground;
|
||||
include protocol PBackgroundLSDatabase;
|
||||
|
||||
include PBackgroundLSSharedTypes;
|
||||
|
||||
namespace mozilla {
|
||||
namespace dom {
|
||||
|
||||
|
@ -50,15 +52,16 @@ parent:
|
|||
|
||||
/**
|
||||
* Invoked on demand to load an item that didn't fit into the initial
|
||||
* snapshot prefill.
|
||||
* snapshot prefill and also some additional key/value pairs to lower down
|
||||
* the need to use this synchronous message again.
|
||||
*
|
||||
* This needs to be synchronous because LocalStorage's semantics are
|
||||
* synchronous. Note that the Snapshot in the PBackground parent already
|
||||
* has the answers to this request immediately available without needing to
|
||||
* consult any other threads or perform any I/O.
|
||||
*/
|
||||
sync LoadItem(nsString key)
|
||||
returns (nsString value);
|
||||
sync LoadValueAndMoreItems(nsString key)
|
||||
returns (nsString value, LSItemInfo[] itemInfos);
|
||||
|
||||
/**
|
||||
* Invoked on demand to load all keys in in their canonical order if they
|
||||
|
|
|
@ -6,6 +6,10 @@
|
|||
async function testSteps() {
|
||||
const url = "http://example.com";
|
||||
|
||||
info("Setting pref");
|
||||
|
||||
Services.prefs.setBoolPref("dom.storage.snapshot_reusing", false);
|
||||
|
||||
const items = [
|
||||
{ key: "key1", value: "value1" },
|
||||
{ key: "key2", value: "value2" },
|
||||
|
@ -19,214 +23,306 @@ async function testSteps() {
|
|||
{ key: "key10", value: "value10" },
|
||||
];
|
||||
|
||||
function getPartialPrefill() {
|
||||
let size = 0;
|
||||
for (let i = 0; i < items.length / 2; i++) {
|
||||
let item = items[i];
|
||||
size += item.key.length + item.value.length;
|
||||
let sizeOfOneKey;
|
||||
let sizeOfOneValue;
|
||||
let sizeOfOneItem;
|
||||
let sizeOfKeys = 0;
|
||||
let sizeOfItems = 0;
|
||||
|
||||
for (let i = 0; i < items.length; i++) {
|
||||
let item = items[i];
|
||||
let sizeOfKey = item.key.length;
|
||||
let sizeOfValue = item.value.length;
|
||||
let sizeOfItem = sizeOfKey + sizeOfValue;
|
||||
if (i == 0) {
|
||||
sizeOfOneKey = sizeOfKey;
|
||||
sizeOfOneValue = sizeOfValue;
|
||||
sizeOfOneItem = sizeOfItem;
|
||||
}
|
||||
return size;
|
||||
sizeOfKeys += sizeOfKey;
|
||||
sizeOfItems += sizeOfItem;
|
||||
}
|
||||
|
||||
info("Size of one key is " + sizeOfOneKey);
|
||||
info("Size of one value is " + sizeOfOneValue);
|
||||
info("Size of one item is " + sizeOfOneItem);
|
||||
info("Size of keys is " + sizeOfKeys);
|
||||
info("Size of items is " + sizeOfItems);
|
||||
|
||||
const prefillValues = [
|
||||
0, // no prefill
|
||||
getPartialPrefill(), // partial prefill
|
||||
-1, // full prefill
|
||||
// Zero prefill (prefill disabled)
|
||||
0,
|
||||
// Less than one key length prefill
|
||||
sizeOfOneKey - 1,
|
||||
// Greater than one key length and less than one item length prefill
|
||||
sizeOfOneKey + 1,
|
||||
// Precisely one item length prefill
|
||||
sizeOfOneItem,
|
||||
// Precisely two times one item length prefill
|
||||
2 * sizeOfOneItem,
|
||||
// Precisely three times one item length prefill
|
||||
3 * sizeOfOneItem,
|
||||
// Precisely four times one item length prefill
|
||||
4 * sizeOfOneItem,
|
||||
// Precisely size of keys prefill
|
||||
sizeOfKeys,
|
||||
// Less than size of keys plus one value length prefill
|
||||
sizeOfKeys + sizeOfOneValue - 1,
|
||||
// Precisely size of keys plus one value length prefill
|
||||
sizeOfKeys + sizeOfOneValue,
|
||||
// Greater than size of keys plus one value length and less than size of
|
||||
// keys plus two times one value length prefill
|
||||
sizeOfKeys + sizeOfOneValue + 1,
|
||||
// Precisely size of keys plus two times one value length prefill
|
||||
sizeOfKeys + 2 * sizeOfOneValue,
|
||||
// Precisely size of keys plus three times one value length prefill
|
||||
sizeOfKeys + 3 * sizeOfOneValue,
|
||||
// Precisely size of keys plus four times one value length prefill
|
||||
sizeOfKeys + 4 * sizeOfOneValue,
|
||||
// Precisely size of keys plus five times one value length prefill
|
||||
sizeOfKeys + 5 * sizeOfOneValue,
|
||||
// Precisely size of keys plus six times one value length prefill
|
||||
sizeOfKeys + 6 * sizeOfOneValue,
|
||||
// Precisely size of keys plus seven times one value length prefill
|
||||
sizeOfKeys + 7 * sizeOfOneValue,
|
||||
// Precisely size of keys plus eight times one value length prefill
|
||||
sizeOfKeys + 8 * sizeOfOneValue,
|
||||
// Precisely size of keys plus nine times one value length prefill
|
||||
sizeOfKeys + 9 * sizeOfOneValue,
|
||||
// Precisely size of items prefill
|
||||
sizeOfItems,
|
||||
// Unlimited prefill
|
||||
-1,
|
||||
];
|
||||
|
||||
info("Setting pref");
|
||||
|
||||
Services.prefs.setBoolPref("dom.storage.snapshot_reusing", false);
|
||||
|
||||
for (let prefillValue of prefillValues) {
|
||||
info("Setting prefill value");
|
||||
info("Setting prefill value to " + prefillValue);
|
||||
|
||||
Services.prefs.setIntPref("dom.storage.snapshot_prefill", prefillValue);
|
||||
|
||||
info("Getting storage");
|
||||
const gradualPrefillValues = [
|
||||
// Zero gradual prefill
|
||||
0,
|
||||
// Less than one key length gradual prefill
|
||||
sizeOfOneKey - 1,
|
||||
// Greater than one key length and less than one item length gradual
|
||||
// prefill
|
||||
sizeOfOneKey + 1,
|
||||
// Precisely one item length gradual prefill
|
||||
sizeOfOneItem,
|
||||
// Precisely two times one item length gradual prefill
|
||||
2 * sizeOfOneItem,
|
||||
// Precisely three times one item length gradual prefill
|
||||
3 * sizeOfOneItem,
|
||||
// Precisely four times one item length gradual prefill
|
||||
4 * sizeOfOneItem,
|
||||
// Precisely five times one item length gradual prefill
|
||||
5 * sizeOfOneItem,
|
||||
// Precisely six times one item length gradual prefill
|
||||
6 * sizeOfOneItem,
|
||||
// Precisely seven times one item length gradual prefill
|
||||
7 * sizeOfOneItem,
|
||||
// Precisely eight times one item length gradual prefill
|
||||
8 * sizeOfOneItem,
|
||||
// Precisely nine times one item length gradual prefill
|
||||
9 * sizeOfOneItem,
|
||||
// Precisely size of items prefill
|
||||
sizeOfItems,
|
||||
// Unlimited gradual prefill
|
||||
-1,
|
||||
];
|
||||
|
||||
let storage = getLocalStorage(getPrincipal(url));
|
||||
for (let gradualPrefillValue of gradualPrefillValues) {
|
||||
info("Setting gradual prefill value to " + gradualPrefillValue);
|
||||
|
||||
// 1st snapshot
|
||||
Services.prefs.setIntPref("dom.storage.snapshot_gradual_prefill",
|
||||
gradualPrefillValue);
|
||||
|
||||
info("Adding data");
|
||||
info("Getting storage");
|
||||
|
||||
for (let item of items) {
|
||||
storage.setItem(item.key, item.value);
|
||||
let storage = getLocalStorage(getPrincipal(url));
|
||||
|
||||
// 1st snapshot
|
||||
|
||||
info("Adding data");
|
||||
|
||||
for (let item of items) {
|
||||
storage.setItem(item.key, item.value);
|
||||
}
|
||||
|
||||
info("Saving key order");
|
||||
|
||||
// This forces GetKeys to be called internally.
|
||||
let savedKeys = Object.keys(storage);
|
||||
|
||||
// GetKey should match GetKeys
|
||||
for (let i = 0; i < savedKeys.length; i++) {
|
||||
is(storage.key(i), savedKeys[i], "Correct key");
|
||||
}
|
||||
|
||||
info("Returning to event loop");
|
||||
|
||||
// Returning to event loop forces the internal snapshot to finish.
|
||||
await returnToEventLoop();
|
||||
|
||||
// 2nd snapshot
|
||||
|
||||
info("Verifying length");
|
||||
|
||||
is(storage.length, items.length, "Correct length");
|
||||
|
||||
info("Verifying key order");
|
||||
|
||||
let keys = Object.keys(storage);
|
||||
|
||||
is(keys.length, savedKeys.length);
|
||||
|
||||
for (let i = 0; i < keys.length; i++) {
|
||||
is(keys[i], savedKeys[i], "Correct key");
|
||||
}
|
||||
|
||||
info("Verifying values");
|
||||
|
||||
for (let item of items) {
|
||||
is(storage.getItem(item.key), item.value, "Correct value");
|
||||
}
|
||||
|
||||
info("Returning to event loop");
|
||||
|
||||
await returnToEventLoop();
|
||||
|
||||
// 3rd snapshot
|
||||
|
||||
// Force key2 to load.
|
||||
storage.getItem("key2");
|
||||
|
||||
// Fill out write infos a bit.
|
||||
storage.removeItem("key5");
|
||||
storage.setItem("key5", "value5");
|
||||
storage.removeItem("key5");
|
||||
storage.setItem("key11", "value11");
|
||||
storage.setItem("key5", "value5");
|
||||
|
||||
items.push({ key: "key11", value: "value11" });
|
||||
|
||||
info("Verifying length");
|
||||
|
||||
is(storage.length, items.length, "Correct length");
|
||||
|
||||
// This forces to get all keys from the parent and then apply write infos
|
||||
// on already cached values.
|
||||
savedKeys = Object.keys(storage);
|
||||
|
||||
info("Verifying values");
|
||||
|
||||
for (let item of items) {
|
||||
is(storage.getItem(item.key), item.value, "Correct value");
|
||||
}
|
||||
|
||||
storage.removeItem("key11");
|
||||
|
||||
items.pop();
|
||||
|
||||
info("Returning to event loop");
|
||||
|
||||
await returnToEventLoop();
|
||||
|
||||
// 4th snapshot
|
||||
|
||||
// Force loading of all items.
|
||||
info("Verifying length");
|
||||
|
||||
is(storage.length, items.length, "Correct length");
|
||||
|
||||
info("Verifying values");
|
||||
|
||||
for (let item of items) {
|
||||
is(storage.getItem(item.key), item.value, "Correct value");
|
||||
}
|
||||
|
||||
is(storage.getItem("key11"), null, "Correct value");
|
||||
|
||||
info("Returning to event loop");
|
||||
|
||||
await returnToEventLoop();
|
||||
|
||||
// 5th snapshot
|
||||
|
||||
// Force loading of all keys.
|
||||
info("Saving key order");
|
||||
|
||||
savedKeys = Object.keys(storage);
|
||||
|
||||
// Force loading of all items.
|
||||
info("Verifying length");
|
||||
|
||||
is(storage.length, items.length, "Correct length");
|
||||
|
||||
info("Verifying values");
|
||||
|
||||
for (let item of items) {
|
||||
is(storage.getItem(item.key), item.value, "Correct value");
|
||||
}
|
||||
|
||||
is(storage.getItem("key11"), null, "Correct value");
|
||||
|
||||
info("Returning to event loop");
|
||||
|
||||
await returnToEventLoop();
|
||||
|
||||
// 6th snapshot
|
||||
info("Verifying unknown item");
|
||||
|
||||
is(storage.getItem("key11"), null, "Correct value");
|
||||
|
||||
info("Verifying unknown item again");
|
||||
|
||||
is(storage.getItem("key11"), null, "Correct value");
|
||||
|
||||
info("Returning to event loop");
|
||||
|
||||
await returnToEventLoop();
|
||||
|
||||
// 7th snapshot
|
||||
|
||||
// Save actual key order.
|
||||
info("Saving key order");
|
||||
|
||||
savedKeys = Object.keys(storage);
|
||||
|
||||
await returnToEventLoop();
|
||||
|
||||
// 8th snapshot
|
||||
|
||||
// Force loading of all items, but in reverse order.
|
||||
info("Getting values");
|
||||
|
||||
for (let i = items.length - 1; i >= 0; i--) {
|
||||
let item = items[i];
|
||||
storage.getItem(item.key);
|
||||
}
|
||||
|
||||
info("Verifying key order");
|
||||
|
||||
keys = Object.keys(storage);
|
||||
|
||||
is(keys.length, savedKeys.length);
|
||||
|
||||
for (let i = 0; i < keys.length; i++) {
|
||||
is(keys[i], savedKeys[i], "Correct key");
|
||||
}
|
||||
|
||||
await returnToEventLoop();
|
||||
|
||||
// 9th snapshot
|
||||
|
||||
info("Clearing");
|
||||
|
||||
storage.clear();
|
||||
|
||||
info("Returning to event loop");
|
||||
|
||||
await returnToEventLoop();
|
||||
}
|
||||
|
||||
info("Saving key order");
|
||||
|
||||
// This forces GetKeys to be called internally.
|
||||
let savedKeys = Object.keys(storage);
|
||||
|
||||
// GetKey should match GetKeys
|
||||
for (let i = 0; i < savedKeys.length; i++) {
|
||||
is(storage.key(i), savedKeys[i], "Correct key");
|
||||
}
|
||||
|
||||
info("Returning to event loop");
|
||||
|
||||
// Returning to event loop forces the internal snapshot to finish.
|
||||
await returnToEventLoop();
|
||||
|
||||
// 2nd snapshot
|
||||
|
||||
info("Verifying length");
|
||||
|
||||
is(storage.length, items.length, "Correct length");
|
||||
|
||||
info("Verifying key order");
|
||||
|
||||
let keys = Object.keys(storage);
|
||||
|
||||
is(keys.length, savedKeys.length);
|
||||
|
||||
for (let i = 0; i < keys.length; i++) {
|
||||
is(keys[i], savedKeys[i], "Correct key");
|
||||
}
|
||||
|
||||
info("Verifying values");
|
||||
|
||||
for (let item of items) {
|
||||
is(storage.getItem(item.key), item.value, "Correct value");
|
||||
}
|
||||
|
||||
info("Returning to event loop");
|
||||
|
||||
await returnToEventLoop();
|
||||
|
||||
// 3rd snapshot
|
||||
|
||||
// Force key2 to load.
|
||||
storage.getItem("key2");
|
||||
|
||||
// Fill out write infos a bit.
|
||||
storage.removeItem("key5");
|
||||
storage.setItem("key5", "value5");
|
||||
storage.removeItem("key5");
|
||||
storage.setItem("key11", "value11");
|
||||
storage.setItem("key5", "value5");
|
||||
|
||||
items.push({ key: "key11", value: "value11" });
|
||||
|
||||
info("Verifying length");
|
||||
|
||||
is(storage.length, items.length, "Correct length");
|
||||
|
||||
// This forces to get all keys from the parent and then apply write infos
|
||||
// on already cached values.
|
||||
savedKeys = Object.keys(storage);
|
||||
|
||||
info("Verifying values");
|
||||
|
||||
for (let item of items) {
|
||||
is(storage.getItem(item.key), item.value, "Correct value");
|
||||
}
|
||||
|
||||
storage.removeItem("key11");
|
||||
|
||||
items.pop();
|
||||
|
||||
info("Returning to event loop");
|
||||
|
||||
await returnToEventLoop();
|
||||
|
||||
// 4th snapshot
|
||||
|
||||
// Force loading of all items.
|
||||
info("Verifying length");
|
||||
|
||||
is(storage.length, items.length, "Correct length");
|
||||
|
||||
info("Verifying values");
|
||||
|
||||
for (let item of items) {
|
||||
is(storage.getItem(item.key), item.value, "Correct value");
|
||||
}
|
||||
|
||||
is(storage.getItem("key11"), null, "Correct value");
|
||||
|
||||
info("Returning to event loop");
|
||||
|
||||
await returnToEventLoop();
|
||||
|
||||
// 5th snapshot
|
||||
|
||||
// Force loading of all keys.
|
||||
info("Saving key order");
|
||||
|
||||
savedKeys = Object.keys(storage);
|
||||
|
||||
// Force loading of all items.
|
||||
info("Verifying length");
|
||||
|
||||
is(storage.length, items.length, "Correct length");
|
||||
|
||||
info("Verifying values");
|
||||
|
||||
for (let item of items) {
|
||||
is(storage.getItem(item.key), item.value, "Correct value");
|
||||
}
|
||||
|
||||
is(storage.getItem("key11"), null, "Correct value");
|
||||
|
||||
info("Returning to event loop");
|
||||
|
||||
await returnToEventLoop();
|
||||
|
||||
// 6th snapshot
|
||||
info("Verifying unknown item");
|
||||
|
||||
is(storage.getItem("key11"), null, "Correct value");
|
||||
|
||||
info("Verifying unknown item again");
|
||||
|
||||
is(storage.getItem("key11"), null, "Correct value");
|
||||
|
||||
info("Returning to event loop");
|
||||
|
||||
await returnToEventLoop();
|
||||
|
||||
// 7th snapshot
|
||||
|
||||
// Save actual key order.
|
||||
info("Saving key order");
|
||||
|
||||
savedKeys = Object.keys(storage);
|
||||
|
||||
await returnToEventLoop();
|
||||
|
||||
// 8th snapshot
|
||||
|
||||
// Force loading of all items, but in reverse order.
|
||||
info("Getting values");
|
||||
|
||||
for (let i = items.length - 1; i >= 0; i--) {
|
||||
let item = items[i];
|
||||
storage.getItem(item.key);
|
||||
}
|
||||
|
||||
info("Verifying key order");
|
||||
|
||||
keys = Object.keys(storage);
|
||||
|
||||
is(keys.length, savedKeys.length);
|
||||
|
||||
for (let i = 0; i < keys.length; i++) {
|
||||
is(keys[i], savedKeys[i], "Correct key");
|
||||
}
|
||||
|
||||
await returnToEventLoop();
|
||||
|
||||
// 9th snapshot
|
||||
|
||||
info("Clearing");
|
||||
|
||||
storage.clear();
|
||||
|
||||
info("Returning to event loop");
|
||||
|
||||
await returnToEventLoop();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -36,6 +36,7 @@ run-sequentially = this test depends on a file produced by test_databaseShadowin
|
|||
[test_migration.js]
|
||||
[test_originInit.js]
|
||||
[test_snapshotting.js]
|
||||
requesttimeoutfactor = 4
|
||||
[test_stringLength.js]
|
||||
[test_stringLength2.js]
|
||||
[test_usage.js]
|
||||
|
|
|
@ -8096,13 +8096,14 @@ void ClearRequestBase::DeleteFiles(QuotaManager* aQuotaManager,
|
|||
initialized = aQuotaManager->IsTemporaryStorageInitialized();
|
||||
}
|
||||
|
||||
bool hasOtherClient = false;
|
||||
|
||||
UsageInfo usageInfo;
|
||||
|
||||
if (!mClientType.IsNull()) {
|
||||
// Checking whether there is any other client in the directory is needed.
|
||||
// If there is not, removing whole directory is needed.
|
||||
nsCOMPtr<nsIDirectoryEnumerator> originEntries;
|
||||
bool hasOtherClient = false;
|
||||
if (NS_WARN_IF(NS_FAILED(
|
||||
file->GetDirectoryEntries(getter_AddRefs(originEntries)))) ||
|
||||
!originEntries) {
|
||||
|
@ -8200,11 +8201,13 @@ void ClearRequestBase::DeleteFiles(QuotaManager* aQuotaManager,
|
|||
}
|
||||
|
||||
if (aPersistenceType != PERSISTENCE_TYPE_PERSISTENT) {
|
||||
if (mClientType.IsNull()) {
|
||||
aQuotaManager->RemoveQuotaForOrigin(aPersistenceType, group, origin);
|
||||
} else {
|
||||
if (hasOtherClient) {
|
||||
MOZ_ASSERT(!mClientType.IsNull());
|
||||
|
||||
aQuotaManager->DecreaseUsageForOrigin(aPersistenceType, group, origin,
|
||||
usageInfo.TotalUsage());
|
||||
} else {
|
||||
aQuotaManager->RemoveQuotaForOrigin(aPersistenceType, group, origin);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -135,24 +135,71 @@ add_task(async function() {
|
|||
key8: "initial8",
|
||||
};
|
||||
|
||||
function getPartialPrefill() {
|
||||
let size = 0;
|
||||
let entries = Object.entries(initialState);
|
||||
for (let i = 0; i < entries.length / 2; i++) {
|
||||
let entry = entries[i];
|
||||
size += entry[0].length + entry[1].length;
|
||||
let sizeOfOneKey;
|
||||
let sizeOfOneValue;
|
||||
let sizeOfOneItem;
|
||||
let sizeOfKeys = 0;
|
||||
let sizeOfItems = 0;
|
||||
|
||||
let entries = Object.entries(initialState);
|
||||
for (let i = 0; i < entries.length; i++) {
|
||||
let entry = entries[i];
|
||||
let sizeOfKey = entry[0].length;
|
||||
let sizeOfValue = entry[1].length;
|
||||
let sizeOfItem = sizeOfKey + sizeOfValue;
|
||||
if (i == 0) {
|
||||
sizeOfOneKey = sizeOfKey;
|
||||
sizeOfOneValue = sizeOfValue;
|
||||
sizeOfOneItem = sizeOfItem;
|
||||
}
|
||||
return size;
|
||||
sizeOfKeys += sizeOfKey;
|
||||
sizeOfItems += sizeOfItem;
|
||||
}
|
||||
|
||||
info("Size of one key is " + sizeOfOneKey);
|
||||
info("Size of one value is " + sizeOfOneValue);
|
||||
info("Size of one item is " + sizeOfOneItem);
|
||||
info("Size of keys is " + sizeOfKeys);
|
||||
info("Size of items is " + sizeOfItems);
|
||||
|
||||
const prefillValues = [
|
||||
0, // no prefill
|
||||
getPartialPrefill(), // partial prefill
|
||||
-1, // full prefill
|
||||
// Zero prefill (prefill disabled)
|
||||
0,
|
||||
// Less than one key length prefill
|
||||
sizeOfOneKey - 1,
|
||||
// Greater than one key length and less than one item length prefill
|
||||
sizeOfOneKey + 1,
|
||||
// Precisely one item length prefill
|
||||
sizeOfOneItem,
|
||||
// Precisely two times one item length prefill
|
||||
2 * sizeOfOneItem,
|
||||
// Precisely size of keys prefill
|
||||
sizeOfKeys,
|
||||
// Less than size of keys plus one value length prefill
|
||||
sizeOfKeys + sizeOfOneValue - 1,
|
||||
// Precisely size of keys plus one value length prefill
|
||||
sizeOfKeys + sizeOfOneValue,
|
||||
// Greater than size of keys plus one value length and less than size of
|
||||
// keys plus two times one value length prefill
|
||||
sizeOfKeys + sizeOfOneValue + 1,
|
||||
// Precisely size of keys plus two times one value length prefill
|
||||
sizeOfKeys + 2 * sizeOfOneValue,
|
||||
// Precisely size of keys plus three times one value length prefill
|
||||
sizeOfKeys + 3 * sizeOfOneValue,
|
||||
// Precisely size of keys plus four times one value length prefill
|
||||
sizeOfKeys + 4 * sizeOfOneValue,
|
||||
// Precisely size of keys plus five times one value length prefill
|
||||
sizeOfKeys + 5 * sizeOfOneValue,
|
||||
// Precisely size of keys plus six times one value length prefill
|
||||
sizeOfKeys + 6 * sizeOfOneValue,
|
||||
// Precisely size of items prefill
|
||||
sizeOfItems,
|
||||
// Unlimited prefill
|
||||
-1,
|
||||
];
|
||||
|
||||
for (let prefillValue of prefillValues) {
|
||||
info("Setting prefill value");
|
||||
info("Setting prefill value to " + prefillValue);
|
||||
|
||||
await SpecialPowers.pushPrefEnv({
|
||||
set: [
|
||||
|
@ -160,209 +207,247 @@ add_task(async function() {
|
|||
],
|
||||
});
|
||||
|
||||
info("Stage 1");
|
||||
|
||||
const setRemoveMutations1 = [
|
||||
["key0", "setRemove10"],
|
||||
["key1", "setRemove11"],
|
||||
["key2", null],
|
||||
["key3", "setRemove13"],
|
||||
["key4", "setRemove14"],
|
||||
["key5", "setRemove15"],
|
||||
["key6", "setRemove16"],
|
||||
["key7", "setRemove17"],
|
||||
["key8", null],
|
||||
["key9", "setRemove19"],
|
||||
const gradualPrefillValues = [
|
||||
// Zero gradual prefill
|
||||
0,
|
||||
// Less than one key length gradual prefill
|
||||
sizeOfOneKey - 1,
|
||||
// Greater than one key length and less than one item length gradual
|
||||
// prefill
|
||||
sizeOfOneKey + 1,
|
||||
// Precisely one item length gradual prefill
|
||||
sizeOfOneItem,
|
||||
// Precisely two times one item length gradual prefill
|
||||
2 * sizeOfOneItem,
|
||||
// Precisely three times one item length gradual prefill
|
||||
3 * sizeOfOneItem,
|
||||
// Precisely four times one item length gradual prefill
|
||||
4 * sizeOfOneItem,
|
||||
// Precisely five times one item length gradual prefill
|
||||
5 * sizeOfOneItem,
|
||||
// Precisely six times one item length gradual prefill
|
||||
6 * sizeOfOneItem,
|
||||
// Precisely size of items prefill
|
||||
sizeOfItems,
|
||||
// Unlimited gradual prefill
|
||||
-1,
|
||||
];
|
||||
|
||||
const setRemoveState1 = {
|
||||
key0: "setRemove10",
|
||||
key1: "setRemove11",
|
||||
key3: "setRemove13",
|
||||
key4: "setRemove14",
|
||||
key5: "setRemove15",
|
||||
key6: "setRemove16",
|
||||
key7: "setRemove17",
|
||||
key9: "setRemove19",
|
||||
};
|
||||
for (let gradualPrefillValue of gradualPrefillValues) {
|
||||
info("Setting gradual prefill value to " + gradualPrefillValue);
|
||||
|
||||
const setRemoveMutations2 = [
|
||||
["key0", "setRemove20"],
|
||||
["key1", null],
|
||||
["key2", "setRemove22"],
|
||||
["key3", "setRemove23"],
|
||||
["key4", "setRemove24"],
|
||||
["key5", "setRemove25"],
|
||||
["key6", "setRemove26"],
|
||||
["key7", null],
|
||||
["key8", "setRemove28"],
|
||||
["key9", "setRemove29"],
|
||||
];
|
||||
await SpecialPowers.pushPrefEnv({
|
||||
set: [
|
||||
["dom.storage.snapshot_gradual_prefill", gradualPrefillValue],
|
||||
],
|
||||
});
|
||||
|
||||
const setRemoveState2 = {
|
||||
key0: "setRemove20",
|
||||
key2: "setRemove22",
|
||||
key3: "setRemove23",
|
||||
key4: "setRemove24",
|
||||
key5: "setRemove25",
|
||||
key6: "setRemove26",
|
||||
key8: "setRemove28",
|
||||
key9: "setRemove29",
|
||||
};
|
||||
info("Stage 1");
|
||||
|
||||
// Apply initial mutations using an explicit snapshot. The explicit
|
||||
// snapshot here ensures that the parent process have received the changes.
|
||||
await beginExplicitSnapshot(writerTab1);
|
||||
await applyMutations(writerTab1, initialMutations);
|
||||
await endExplicitSnapshot(writerTab1);
|
||||
const setRemoveMutations1 = [
|
||||
["key0", "setRemove10"],
|
||||
["key1", "setRemove11"],
|
||||
["key2", null],
|
||||
["key3", "setRemove13"],
|
||||
["key4", "setRemove14"],
|
||||
["key5", "setRemove15"],
|
||||
["key6", "setRemove16"],
|
||||
["key7", "setRemove17"],
|
||||
["key8", null],
|
||||
["key9", "setRemove19"],
|
||||
];
|
||||
|
||||
// Begin explicit snapshots in all tabs except readerTab2. All these tabs
|
||||
// should see the initial state regardless what other tabs are doing.
|
||||
await beginExplicitSnapshot(writerTab1);
|
||||
await beginExplicitSnapshot(writerTab2);
|
||||
await beginExplicitSnapshot(readerTab1);
|
||||
const setRemoveState1 = {
|
||||
key0: "setRemove10",
|
||||
key1: "setRemove11",
|
||||
key3: "setRemove13",
|
||||
key4: "setRemove14",
|
||||
key5: "setRemove15",
|
||||
key6: "setRemove16",
|
||||
key7: "setRemove17",
|
||||
key9: "setRemove19",
|
||||
};
|
||||
|
||||
// Apply first array of set/remove mutations in writerTab1 and end the
|
||||
// explicit snapshot. This will trigger saving of values in other active
|
||||
// snapshots.
|
||||
await applyMutations(writerTab1, setRemoveMutations1);
|
||||
await endExplicitSnapshot(writerTab1);
|
||||
const setRemoveMutations2 = [
|
||||
["key0", "setRemove20"],
|
||||
["key1", null],
|
||||
["key2", "setRemove22"],
|
||||
["key3", "setRemove23"],
|
||||
["key4", "setRemove24"],
|
||||
["key5", "setRemove25"],
|
||||
["key6", "setRemove26"],
|
||||
["key7", null],
|
||||
["key8", "setRemove28"],
|
||||
["key9", "setRemove29"],
|
||||
];
|
||||
|
||||
// Begin an explicit snapshot in readerTab2. writerTab1 already ended its
|
||||
// explicit snapshot, so readerTab2 should see mutations done by
|
||||
// writerTab1.
|
||||
await beginExplicitSnapshot(readerTab2);
|
||||
const setRemoveState2 = {
|
||||
key0: "setRemove20",
|
||||
key2: "setRemove22",
|
||||
key3: "setRemove23",
|
||||
key4: "setRemove24",
|
||||
key5: "setRemove25",
|
||||
key6: "setRemove26",
|
||||
key8: "setRemove28",
|
||||
key9: "setRemove29",
|
||||
};
|
||||
|
||||
// Apply second array of set/remove mutations in writerTab2 and end the
|
||||
// explicit snapshot. This will trigger saving of values in other active
|
||||
// snapshots, but only if they haven't been saved already.
|
||||
await applyMutations(writerTab2, setRemoveMutations2);
|
||||
await endExplicitSnapshot(writerTab2);
|
||||
// Apply initial mutations using an explicit snapshot. The explicit
|
||||
// snapshot here ensures that the parent process have received the
|
||||
// changes.
|
||||
await beginExplicitSnapshot(writerTab1);
|
||||
await applyMutations(writerTab1, initialMutations);
|
||||
await endExplicitSnapshot(writerTab1);
|
||||
|
||||
// Verify state in readerTab1, it should match the initial state.
|
||||
await verifyState(readerTab1, initialState);
|
||||
await endExplicitSnapshot(readerTab1);
|
||||
// Begin explicit snapshots in all tabs except readerTab2. All these tabs
|
||||
// should see the initial state regardless what other tabs are doing.
|
||||
await beginExplicitSnapshot(writerTab1);
|
||||
await beginExplicitSnapshot(writerTab2);
|
||||
await beginExplicitSnapshot(readerTab1);
|
||||
|
||||
// Verify state in readerTab2, it should match the state after the first
|
||||
// array of set/remove mutatations have been applied and "commited".
|
||||
await verifyState(readerTab2, setRemoveState1);
|
||||
await endExplicitSnapshot(readerTab2);
|
||||
// Apply first array of set/remove mutations in writerTab1 and end the
|
||||
// explicit snapshot. This will trigger saving of values in other active
|
||||
// snapshots.
|
||||
await applyMutations(writerTab1, setRemoveMutations1);
|
||||
await endExplicitSnapshot(writerTab1);
|
||||
|
||||
// Verify final state, it should match the state after the second array of
|
||||
// set/remove mutation have been applied and "commited". An explicit
|
||||
// snapshot is used.
|
||||
await beginExplicitSnapshot(readerTab1);
|
||||
await verifyState(readerTab1, setRemoveState2);
|
||||
await endExplicitSnapshot(readerTab1);
|
||||
// Begin an explicit snapshot in readerTab2. writerTab1 already ended its
|
||||
// explicit snapshot, so readerTab2 should see mutations done by
|
||||
// writerTab1.
|
||||
await beginExplicitSnapshot(readerTab2);
|
||||
|
||||
info("Stage 2");
|
||||
// Apply second array of set/remove mutations in writerTab2 and end the
|
||||
// explicit snapshot. This will trigger saving of values in other active
|
||||
// snapshots, but only if they haven't been saved already.
|
||||
await applyMutations(writerTab2, setRemoveMutations2);
|
||||
await endExplicitSnapshot(writerTab2);
|
||||
|
||||
const setRemoveClearMutations1 = [
|
||||
["key0", "setRemoveClear10"],
|
||||
["key1", null],
|
||||
[null, null],
|
||||
];
|
||||
// Verify state in readerTab1, it should match the initial state.
|
||||
await verifyState(readerTab1, initialState);
|
||||
await endExplicitSnapshot(readerTab1);
|
||||
|
||||
const setRemoveClearState1 = {
|
||||
};
|
||||
// Verify state in readerTab2, it should match the state after the first
|
||||
// array of set/remove mutatations have been applied and "commited".
|
||||
await verifyState(readerTab2, setRemoveState1);
|
||||
await endExplicitSnapshot(readerTab2);
|
||||
|
||||
const setRemoveClearMutations2 = [
|
||||
["key8", null],
|
||||
["key9", "setRemoveClear29"],
|
||||
[null, null],
|
||||
];
|
||||
// Verify final state, it should match the state after the second array of
|
||||
// set/remove mutation have been applied and "commited". An explicit
|
||||
// snapshot is used.
|
||||
await beginExplicitSnapshot(readerTab1);
|
||||
await verifyState(readerTab1, setRemoveState2);
|
||||
await endExplicitSnapshot(readerTab1);
|
||||
|
||||
const setRemoveClearState2 = {
|
||||
};
|
||||
info("Stage 2");
|
||||
|
||||
// This is very similar to previous stage except that in addition to
|
||||
// set/remove, the clear operation is involved too.
|
||||
await beginExplicitSnapshot(writerTab1);
|
||||
await applyMutations(writerTab1, initialMutations);
|
||||
await endExplicitSnapshot(writerTab1);
|
||||
const setRemoveClearMutations1 = [
|
||||
["key0", "setRemoveClear10"],
|
||||
["key1", null],
|
||||
[null, null],
|
||||
];
|
||||
|
||||
await beginExplicitSnapshot(writerTab1);
|
||||
await beginExplicitSnapshot(writerTab2);
|
||||
await beginExplicitSnapshot(readerTab1);
|
||||
const setRemoveClearState1 = {
|
||||
};
|
||||
|
||||
await applyMutations(writerTab1, setRemoveClearMutations1);
|
||||
await endExplicitSnapshot(writerTab1);
|
||||
const setRemoveClearMutations2 = [
|
||||
["key8", null],
|
||||
["key9", "setRemoveClear29"],
|
||||
[null, null],
|
||||
];
|
||||
|
||||
await beginExplicitSnapshot(readerTab2);
|
||||
const setRemoveClearState2 = {
|
||||
};
|
||||
|
||||
await applyMutations(writerTab2, setRemoveClearMutations2);
|
||||
await endExplicitSnapshot(writerTab2);
|
||||
// This is very similar to previous stage except that in addition to
|
||||
// set/remove, the clear operation is involved too.
|
||||
await beginExplicitSnapshot(writerTab1);
|
||||
await applyMutations(writerTab1, initialMutations);
|
||||
await endExplicitSnapshot(writerTab1);
|
||||
|
||||
await verifyState(readerTab1, initialState);
|
||||
await endExplicitSnapshot(readerTab1);
|
||||
await beginExplicitSnapshot(writerTab1);
|
||||
await beginExplicitSnapshot(writerTab2);
|
||||
await beginExplicitSnapshot(readerTab1);
|
||||
|
||||
await verifyState(readerTab2, setRemoveClearState1);
|
||||
await endExplicitSnapshot(readerTab2);
|
||||
await applyMutations(writerTab1, setRemoveClearMutations1);
|
||||
await endExplicitSnapshot(writerTab1);
|
||||
|
||||
await beginExplicitSnapshot(readerTab1);
|
||||
await verifyState(readerTab1, setRemoveClearState2);
|
||||
await endExplicitSnapshot(readerTab1);
|
||||
await beginExplicitSnapshot(readerTab2);
|
||||
|
||||
info("Stage 3");
|
||||
await applyMutations(writerTab2, setRemoveClearMutations2);
|
||||
await endExplicitSnapshot(writerTab2);
|
||||
|
||||
const changeOrderMutations = [
|
||||
["key1", null],
|
||||
["key2", null],
|
||||
["key3", null],
|
||||
["key5", null],
|
||||
["key6", null],
|
||||
["key7", null],
|
||||
["key8", null],
|
||||
["key8", "initial8"],
|
||||
["key7", "initial7"],
|
||||
["key6", "initial6"],
|
||||
["key5", "initial5"],
|
||||
["key3", "initial3"],
|
||||
["key2", "initial2"],
|
||||
["key1", "initial1"],
|
||||
];
|
||||
await verifyState(readerTab1, initialState);
|
||||
await endExplicitSnapshot(readerTab1);
|
||||
|
||||
// Apply initial mutations using an explicit snapshot. The explicit
|
||||
// snapshot here ensures that the parent process have received the changes.
|
||||
await beginExplicitSnapshot(writerTab1);
|
||||
await applyMutations(writerTab1, initialMutations);
|
||||
await endExplicitSnapshot(writerTab1);
|
||||
await verifyState(readerTab2, setRemoveClearState1);
|
||||
await endExplicitSnapshot(readerTab2);
|
||||
|
||||
// Begin explicit snapshots in all tabs except writerTab2 which is not used
|
||||
// in this stage. All these tabs should see the initial order regardless
|
||||
// what other tabs are doing.
|
||||
await beginExplicitSnapshot(readerTab1);
|
||||
await beginExplicitSnapshot(writerTab1);
|
||||
await beginExplicitSnapshot(readerTab2);
|
||||
await beginExplicitSnapshot(readerTab1);
|
||||
await verifyState(readerTab1, setRemoveClearState2);
|
||||
await endExplicitSnapshot(readerTab1);
|
||||
|
||||
// Get all keys in readerTab1 and end the explicit snapshot. No mutations
|
||||
// have been applied yet.
|
||||
let tab1Keys = await getKeys(readerTab1);
|
||||
await endExplicitSnapshot(readerTab1);
|
||||
info("Stage 3");
|
||||
|
||||
// Apply mutations that change the order of keys and end the explicit
|
||||
// snapshot. The state is unchanged. This will trigger saving of key order
|
||||
// in other active snapshots, but only if the order hasn't been saved
|
||||
// already.
|
||||
await applyMutations(writerTab1, changeOrderMutations);
|
||||
await endExplicitSnapshot(writerTab1);
|
||||
const changeOrderMutations = [
|
||||
["key1", null],
|
||||
["key2", null],
|
||||
["key3", null],
|
||||
["key5", null],
|
||||
["key6", null],
|
||||
["key7", null],
|
||||
["key8", null],
|
||||
["key8", "initial8"],
|
||||
["key7", "initial7"],
|
||||
["key6", "initial6"],
|
||||
["key5", "initial5"],
|
||||
["key3", "initial3"],
|
||||
["key2", "initial2"],
|
||||
["key1", "initial1"],
|
||||
];
|
||||
|
||||
// Get all keys in readerTab2 and end the explicit snapshot. Change order
|
||||
// mutations have been applied, but the order should stay unchanged.
|
||||
let tab2Keys = await getKeys(readerTab2);
|
||||
await endExplicitSnapshot(readerTab2);
|
||||
// Apply initial mutations using an explicit snapshot. The explicit
|
||||
// snapshot here ensures that the parent process have received the
|
||||
// changes.
|
||||
await beginExplicitSnapshot(writerTab1);
|
||||
await applyMutations(writerTab1, initialMutations);
|
||||
await endExplicitSnapshot(writerTab1);
|
||||
|
||||
// Verify the key order is the same.
|
||||
is(tab2Keys.length, tab1Keys.length, "Correct keys length");
|
||||
for (let i = 0; i < tab2Keys.length; i++) {
|
||||
is(tab2Keys[i], tab1Keys[i], "Correct key");
|
||||
// Begin explicit snapshots in all tabs except writerTab2 which is not
|
||||
// used in this stage. All these tabs should see the initial order
|
||||
// regardless what other tabs are doing.
|
||||
await beginExplicitSnapshot(readerTab1);
|
||||
await beginExplicitSnapshot(writerTab1);
|
||||
await beginExplicitSnapshot(readerTab2);
|
||||
|
||||
// Get all keys in readerTab1 and end the explicit snapshot. No mutations
|
||||
// have been applied yet.
|
||||
let tab1Keys = await getKeys(readerTab1);
|
||||
await endExplicitSnapshot(readerTab1);
|
||||
|
||||
// Apply mutations that change the order of keys and end the explicit
|
||||
// snapshot. The state is unchanged. This will trigger saving of key order
|
||||
// in other active snapshots, but only if the order hasn't been saved
|
||||
// already.
|
||||
await applyMutations(writerTab1, changeOrderMutations);
|
||||
await endExplicitSnapshot(writerTab1);
|
||||
|
||||
// Get all keys in readerTab2 and end the explicit snapshot. Change order
|
||||
// mutations have been applied, but the order should stay unchanged.
|
||||
let tab2Keys = await getKeys(readerTab2);
|
||||
await endExplicitSnapshot(readerTab2);
|
||||
|
||||
// Verify the key order is the same.
|
||||
is(tab2Keys.length, tab1Keys.length, "Correct keys length");
|
||||
for (let i = 0; i < tab2Keys.length; i++) {
|
||||
is(tab2Keys[i], tab1Keys[i], "Correct key");
|
||||
}
|
||||
|
||||
// Verify final state, it should match the initial state since applied
|
||||
// mutations only changed the key order. An explicit snapshot is used.
|
||||
await beginExplicitSnapshot(readerTab1);
|
||||
await verifyState(readerTab1, initialState);
|
||||
await endExplicitSnapshot(readerTab1);
|
||||
}
|
||||
|
||||
// Verify final state, it should match the initial state since applied
|
||||
// mutations only changed the key order. An explicit snapshot is used.
|
||||
await beginExplicitSnapshot(readerTab1);
|
||||
await verifyState(readerTab1, initialState);
|
||||
await endExplicitSnapshot(readerTab1);
|
||||
}
|
||||
|
||||
// - Clean up.
|
||||
|
|
|
@ -55,6 +55,7 @@ already_AddRefed<DataSourceSurface> SharedSurfacesParent::Get(
|
|||
const wr::ExternalImageId& aId) {
|
||||
StaticMutexAutoLock lock(sMutex);
|
||||
if (!sInstance) {
|
||||
gfxCriticalNote << "SSP:Get " << aId.mHandle << " shtd";
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
|
@ -68,6 +69,7 @@ already_AddRefed<DataSourceSurface> SharedSurfacesParent::Acquire(
|
|||
const wr::ExternalImageId& aId) {
|
||||
StaticMutexAutoLock lock(sMutex);
|
||||
if (!sInstance) {
|
||||
gfxCriticalNote << "SSP:Acq " << aId.mHandle << " shtd";
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
|
@ -111,6 +113,7 @@ void SharedSurfacesParent::AddSameProcess(const wr::ExternalImageId& aId,
|
|||
MOZ_ASSERT(NS_IsMainThread());
|
||||
StaticMutexAutoLock lock(sMutex);
|
||||
if (!sInstance) {
|
||||
gfxCriticalNote << "SSP:Ads " << aId.mHandle << " shtd";
|
||||
return;
|
||||
}
|
||||
|
||||
|
@ -168,6 +171,7 @@ void SharedSurfacesParent::Add(const wr::ExternalImageId& aId,
|
|||
MOZ_ASSERT(aPid != base::GetCurrentProcId());
|
||||
StaticMutexAutoLock lock(sMutex);
|
||||
if (!sInstance) {
|
||||
gfxCriticalNote << "SSP:Add " << aId.mHandle << " shtd";
|
||||
return;
|
||||
}
|
||||
|
||||
|
@ -176,6 +180,7 @@ void SharedSurfacesParent::Add(const wr::ExternalImageId& aId,
|
|||
new SourceSurfaceSharedDataWrapper();
|
||||
if (NS_WARN_IF(!surface->Init(aDesc.size(), aDesc.stride(), aDesc.format(),
|
||||
aDesc.handle(), aPid))) {
|
||||
gfxCriticalNote << "SSP:Add " << aId.mHandle << " init";
|
||||
return;
|
||||
}
|
||||
|
||||
|
|
|
@ -2595,8 +2595,7 @@ static FeatureState& WebRenderHardwareQualificationStatus(
|
|||
}
|
||||
#endif
|
||||
#ifdef NIGHTLY_BUILD
|
||||
} else if (adapterVendorID == u"0x8086" ||
|
||||
adapterVendorID == u"mesa/i965") { // Intel
|
||||
} else if (adapterVendorID == u"0x8086") { // Intel
|
||||
const uint16_t supportedDevices[] = {
|
||||
// skylake gt2+
|
||||
0x1912,
|
||||
|
@ -2670,7 +2669,11 @@ static FeatureState& WebRenderHardwareQualificationStatus(
|
|||
featureWebRenderQualified.Disable(
|
||||
FeatureStatus::Blocked, "Device too old",
|
||||
NS_LITERAL_CSTRING("FEATURE_FAILURE_DEVICE_TOO_OLD"));
|
||||
} else if (adapterVendorID == u"mesa/i965") {
|
||||
}
|
||||
# ifdef MOZ_WIDGET_GTK
|
||||
else {
|
||||
// Performance is not great on 4k screens with WebRender + Linux.
|
||||
// Disable it for now if it is too large.
|
||||
const int32_t maxPixels = 3440 * 1440; // UWQHD
|
||||
int32_t pixels = aScreenSize.width * aScreenSize.height;
|
||||
if (pixels > maxPixels) {
|
||||
|
@ -2683,7 +2686,8 @@ static FeatureState& WebRenderHardwareQualificationStatus(
|
|||
NS_LITERAL_CSTRING("FEATURE_FAILURE_SCREEN_SIZE_UNKNOWN"));
|
||||
}
|
||||
}
|
||||
#endif
|
||||
# endif // MOZ_WIDGET_GTK
|
||||
#endif // NIGHTLY_BUILD
|
||||
} else {
|
||||
featureWebRenderQualified.Disable(
|
||||
FeatureStatus::Blocked, "Unsupported vendor",
|
||||
|
|
|
@ -920,7 +920,7 @@ description =
|
|||
description =
|
||||
[PBackgroundLSDatabase::PBackgroundLSSnapshot]
|
||||
description = See corresponding comment in PBackgroundLSDatabase.ipdl
|
||||
[PBackgroundLSSnapshot::LoadItem]
|
||||
[PBackgroundLSSnapshot::LoadValueAndMoreItems]
|
||||
description = See corresponding comment in PBackgroundLSSnapshot.ipdl
|
||||
[PBackgroundLSSnapshot::LoadKeys]
|
||||
description = See corresponding comment in PBackgroundLSSnapshot.ipdl
|
||||
|
|
|
@ -1286,6 +1286,7 @@ pref("dom.storage.next_gen", false);
|
|||
pref("dom.storage.default_quota", 5120);
|
||||
pref("dom.storage.shadow_writes", true);
|
||||
pref("dom.storage.snapshot_prefill", 16384);
|
||||
pref("dom.storage.snapshot_gradual_prefill", 4096);
|
||||
pref("dom.storage.snapshot_reusing", true);
|
||||
pref("dom.storage.testing", false);
|
||||
pref("dom.storage.client_validation", true);
|
||||
|
|
|
@ -1765,8 +1765,6 @@ XML_SetAttlistDeclHandler(XML_Parser parser,
|
|||
if (parser != NULL)
|
||||
attlistDeclHandler = attdecl;
|
||||
}
|
||||
#endif
|
||||
/* END MOZILLA CHANGE */
|
||||
|
||||
void XMLCALL
|
||||
XML_SetEntityDeclHandler(XML_Parser parser,
|
||||
|
@ -1775,6 +1773,8 @@ XML_SetEntityDeclHandler(XML_Parser parser,
|
|||
if (parser != NULL)
|
||||
entityDeclHandler = handler;
|
||||
}
|
||||
#endif
|
||||
/* END MOZILLA CHANGE */
|
||||
|
||||
void XMLCALL
|
||||
XML_SetXmlDeclHandler(XML_Parser parser,
|
||||
|
|
|
@ -154,18 +154,6 @@ static int Driver_HandleExternalEntityRef(void* aExternalEntityRefHandler,
|
|||
aPublicId);
|
||||
}
|
||||
|
||||
static void Driver_HandleEntityDecl(
|
||||
void* aUserData, const XML_Char* aEntityName, int aIsParameterEntity,
|
||||
const XML_Char* aValue, int aValueLength, const XML_Char* aBase,
|
||||
const XML_Char* aSystemId, const XML_Char* aPublicId,
|
||||
const XML_Char* aNotationName) {
|
||||
NS_ASSERTION(aUserData, "expat driver should exist");
|
||||
if (aUserData) {
|
||||
static_cast<nsExpatDriver*>(aUserData)->HandleEntityDecl(
|
||||
aEntityName, aValue, aValueLength);
|
||||
}
|
||||
}
|
||||
|
||||
/***************************** END CALL BACKS ********************************/
|
||||
|
||||
/***************************** CATALOG UTILS *********************************/
|
||||
|
@ -486,19 +474,6 @@ nsresult nsExpatDriver::HandleEndDoctypeDecl() {
|
|||
return NS_OK;
|
||||
}
|
||||
|
||||
void nsExpatDriver::HandleEntityDecl(const char16_t* aEntityName,
|
||||
const char16_t* aEntityValue,
|
||||
const uint32_t aLength) {
|
||||
MOZ_ASSERT(
|
||||
mInInternalSubset || mInExternalDTD,
|
||||
"Should only see entity declarations in the internal subset or in DTDs");
|
||||
auto charLength = aLength / sizeof(char16_t);
|
||||
nsDependentSubstring entityVal(aEntityValue, charLength);
|
||||
if (entityVal.FindChar('<') != -1) {
|
||||
MaybeStopParser(NS_ERROR_UNEXPECTED);
|
||||
}
|
||||
}
|
||||
|
||||
static nsresult ExternalDTDStreamReaderFunc(nsIUnicharInputStream* aIn,
|
||||
void* aClosure,
|
||||
const char16_t* aFromSegment,
|
||||
|
@ -1082,9 +1057,6 @@ nsExpatDriver::WillBuildModel(const CParserContext& aParserContext,
|
|||
|
||||
XML_SetParamEntityParsing(mExpatParser,
|
||||
XML_PARAM_ENTITY_PARSING_UNLESS_STANDALONE);
|
||||
if (doc && doc->NodePrincipal()->IsSystemPrincipal()) {
|
||||
XML_SetEntityDeclHandler(mExpatParser, Driver_HandleEntityDecl);
|
||||
}
|
||||
XML_SetDoctypeDeclHandler(mExpatParser, Driver_HandleStartDoctypeDecl,
|
||||
Driver_HandleEndDoctypeDecl);
|
||||
|
||||
|
|
|
@ -49,8 +49,6 @@ class nsExpatDriver : public nsIDTD, public nsITokenizer {
|
|||
const char16_t* aPubid,
|
||||
bool aHasInternalSubset);
|
||||
nsresult HandleEndDoctypeDecl();
|
||||
void HandleEntityDecl(const char16_t* aEntityName,
|
||||
const char16_t* aEntityValue, const uint32_t aLength);
|
||||
|
||||
private:
|
||||
// Load up an external stream to get external entity information
|
||||
|
|
|
@ -1,5 +0,0 @@
|
|||
[2d.shadow.enable.x.html]
|
||||
[Shadows are drawn if shadowOffsetX is set]
|
||||
expected:
|
||||
if not debug and not webrender and not e10s and (os == "android") and (version == "Ubuntu 16.04") and (processor == "x86") and (bits == 32): FAIL
|
||||
|
|
@ -1,2 +0,0 @@
|
|||
[contain-size-multicol-001.html]
|
||||
expected: FAIL
|
|
@ -86,3 +86,9 @@
|
|||
[Initial value for <length> correctly computed [1in\]]
|
||||
expected: FAIL
|
||||
|
||||
[Initial value for <url> correctly computed [url(a)\]]
|
||||
expected: FAIL
|
||||
|
||||
[Initial value for <url>+ correctly computed [url(a) url(a)\]]
|
||||
expected: FAIL
|
||||
|
||||
|
|
|
@ -1,6 +1,4 @@
|
|||
[Event-timestamp-safe-resolution.html]
|
||||
[Event timestamp should not have a resolution better than 5 microseconds]
|
||||
expected:
|
||||
if os == "android" and not e10s: PASS
|
||||
FAIL
|
||||
expected: FAIL
|
||||
|
||||
|
|
|
@ -8,3 +8,21 @@
|
|||
[Cross-site iframe]
|
||||
expected: FAIL
|
||||
|
||||
[web-platform.test -> www.not-web-platform.test:8443 iframe: user-activated]
|
||||
expected: FAIL
|
||||
|
||||
[web-platform.test -> web-platform.test:8443 iframe: user-activated]
|
||||
expected: FAIL
|
||||
|
||||
[web-platform.test -> www.web-platform.test:8443 iframe: forced]
|
||||
expected: FAIL
|
||||
|
||||
[web-platform.test -> web-platform.test:8443 iframe: forced]
|
||||
expected: FAIL
|
||||
|
||||
[web-platform.test -> www.web-platform.test:8443 iframe: user-activated]
|
||||
expected: FAIL
|
||||
|
||||
[web-platform.test -> www.not-web-platform.test:8443 iframe: forced]
|
||||
expected: FAIL
|
||||
|
||||
|
|
|
@ -1,2 +1,2 @@
|
|||
local: d14e2042e7e706c3ba305d5a517bbaeba3f610af
|
||||
upstream: add24188a1226f3598ad6b455e71641c9ac6a5fd
|
||||
local: 8d443e94452a1f0780c9d92cb2fd1bbb69e74f53
|
||||
upstream: 2f2bf34086414fb3bd8e01e92aca1aa18e7ea730
|
||||
|
|
|
@ -0,0 +1 @@
|
|||
lsan-allowed: [Alloc, NS_GetXPTCallStub, NewPage, nsXPCWrappedJS::GetNewOrUsed]
|
|
@ -2,12 +2,6 @@
|
|||
expected: TIMEOUT
|
||||
|
||||
[idlharness.any.serviceworker.html]
|
||||
[PerformanceObserver interface: calling observe(PerformanceObserverInit) on observer with too few arguments must throw TypeError]
|
||||
expected: FAIL
|
||||
|
||||
[PerformanceObserver interface: operation observe(PerformanceObserverInit)]
|
||||
expected: FAIL
|
||||
|
||||
[PerformanceMark interface: attribute detail]
|
||||
expected: FAIL
|
||||
|
||||
|
@ -22,12 +16,6 @@
|
|||
[PerformanceMark interface: attribute detail]
|
||||
expected: FAIL
|
||||
|
||||
[PerformanceObserver interface: calling observe(PerformanceObserverInit) on observer with too few arguments must throw TypeError]
|
||||
expected: FAIL
|
||||
|
||||
[PerformanceObserver interface: operation observe(PerformanceObserverInit)]
|
||||
expected: FAIL
|
||||
|
||||
[PerformanceMark interface: mark must inherit property "detail" with the proper type]
|
||||
expected: FAIL
|
||||
|
||||
|
@ -39,12 +27,6 @@
|
|||
[PerformanceMark interface: attribute detail]
|
||||
expected: FAIL
|
||||
|
||||
[PerformanceObserver interface: calling observe(PerformanceObserverInit) on observer with too few arguments must throw TypeError]
|
||||
expected: FAIL
|
||||
|
||||
[PerformanceObserver interface: operation observe(PerformanceObserverInit)]
|
||||
expected: FAIL
|
||||
|
||||
[PerformanceMark interface: mark must inherit property "detail" with the proper type]
|
||||
expected: FAIL
|
||||
|
||||
|
@ -56,12 +38,6 @@
|
|||
[PerformanceMark interface: attribute detail]
|
||||
expected: FAIL
|
||||
|
||||
[PerformanceObserver interface: calling observe(PerformanceObserverInit) on observer with too few arguments must throw TypeError]
|
||||
expected: FAIL
|
||||
|
||||
[PerformanceObserver interface: operation observe(PerformanceObserverInit)]
|
||||
expected: FAIL
|
||||
|
||||
[PerformanceMark interface: mark must inherit property "detail" with the proper type]
|
||||
expected: FAIL
|
||||
|
||||
|
|
|
@ -1,3 +0,0 @@
|
|||
[po-observe.html]
|
||||
expected: TIMEOUT
|
||||
|
|
@ -1,4 +1,4 @@
|
|||
[resource-timing-level1.sub.html]
|
||||
disabled:
|
||||
if (os == "mac"): https://bugzilla.mozilla.org/show_bug.cgi?id=1543604
|
||||
if (os == "android"): https://bugzilla.mozilla.org/show_bug.cgi?id=1543604
|
||||
if os == "mac": https://bugzilla.mozilla.org/show_bug.cgi?id=1543604
|
||||
if os == "android": https://bugzilla.mozilla.org/show_bug.cgi?id=1543604
|
||||
|
|
|
@ -1,60 +1,60 @@
|
|||
[getdisplaymedia.https.html]
|
||||
[getDisplayMedia({"video":true}) must succeed with video]
|
||||
expected:
|
||||
if os == "android" and not e10s: FAIL
|
||||
if (os == "android") and not e10s: FAIL
|
||||
|
||||
[getDisplayMedia({"video":true,"audio":false}) must succeed with video]
|
||||
expected:
|
||||
if os == "android" and not e10s: FAIL
|
||||
if (os == "android") and not e10s: FAIL
|
||||
|
||||
[getDisplayMedia({"audio":false}) must succeed with video]
|
||||
expected:
|
||||
if os == "android" and not e10s: FAIL
|
||||
if (os == "android") and not e10s: FAIL
|
||||
|
||||
[getDisplayMedia({}) must succeed with video]
|
||||
expected:
|
||||
if os == "android" and not e10s: FAIL
|
||||
if (os == "android") and not e10s: FAIL
|
||||
|
||||
[getDisplayMedia(undefined) must succeed with video]
|
||||
expected:
|
||||
if os == "android" and not e10s: FAIL
|
||||
if (os == "android") and not e10s: FAIL
|
||||
|
||||
[getDisplayMedia({"video":true,"audio":true}) must succeed with video maybe audio]
|
||||
expected:
|
||||
if os == "android" and not e10s: FAIL
|
||||
if (os == "android") and not e10s: FAIL
|
||||
|
||||
[getDisplayMedia({"audio":true}) must succeed with video maybe audio]
|
||||
expected:
|
||||
if os == "android" and not e10s: FAIL
|
||||
if (os == "android") and not e10s: FAIL
|
||||
PASS
|
||||
|
||||
[getDisplayMedia({"video":{"width":{"max":360}}}) must be constrained]
|
||||
expected:
|
||||
if os == "android" and not e10s: FAIL
|
||||
if (os == "android") and not e10s: FAIL
|
||||
|
||||
[getDisplayMedia({"video":{"height":{"max":240}}}) must be constrained]
|
||||
expected:
|
||||
if os == "android" and not e10s: FAIL
|
||||
if (os == "android") and not e10s: FAIL
|
||||
|
||||
[getDisplayMedia({"video":{"width":{"max":360},"height":{"max":240}}}) must be constrained]
|
||||
expected:
|
||||
if os == "android" and not e10s: FAIL
|
||||
if (os == "android") and not e10s: FAIL
|
||||
|
||||
[getDisplayMedia({"video":{"frameRate":{"max":4}}}) must be constrained]
|
||||
expected:
|
||||
if os == "android" and not e10s: FAIL
|
||||
if (os == "android") and not e10s: FAIL
|
||||
|
||||
[getDisplayMedia({"video":{"frameRate":{"max":4},"width":{"max":360}}}) must be constrained]
|
||||
expected:
|
||||
if os == "android" and not e10s: FAIL
|
||||
if (os == "android") and not e10s: FAIL
|
||||
|
||||
[getDisplayMedia({"video":{"frameRate":{"max":4},"height":{"max":240}}}) must be constrained]
|
||||
expected:
|
||||
if os == "android" and not e10s: FAIL
|
||||
if (os == "android") and not e10s: FAIL
|
||||
|
||||
[getDisplayMedia({"video":{"frameRate":{"max":4},"width":{"max":360},"height":{"max":240}}}) must be constrained]
|
||||
expected:
|
||||
if os == "android" and not e10s: FAIL
|
||||
if (os == "android") and not e10s: FAIL
|
||||
|
||||
[getDisplayMedia() with getSettings]
|
||||
expected: FAIL
|
||||
|
|
|
@ -1,3 +1,3 @@
|
|||
prefs: [dom.serviceWorkers.enabled:true]
|
||||
lsan-allowed: [Alloc, CompareNetwork, Create, EntrySlotOrCreate, MakeUnique, Malloc, NS_NewLoadGroup, NewChannelFromURIWithProxyFlagsInternal, NewPage, PLDHashTable::Add, Realloc, SharedMutex, Then, createTable, mozilla::BasePrincipal::CreateCodebasePrincipal, mozilla::SchedulerGroup::CreateEventTargetFor, mozilla::ThrottledEventQueue::Create, mozilla::detail::UniqueSelector, mozilla::dom::PerformanceStorageWorker::Create, mozilla::dom::ServiceWorkerJobQueue::RunJob, mozilla::dom::ServiceWorkerManager::Unregister, mozilla::dom::ServiceWorkerRegistrationMainThread::Unregister, mozilla::dom::UnregisterCallback::UnregisterCallback, mozilla::dom::WorkerCSPEventListener::Create, mozilla::dom::WorkerPrivate::EnsurePerformanceCounter, mozilla::dom::WorkerPrivate::WorkerPrivate, mozilla::dom::cache::CacheOpChild::Recv__delete__, mozilla::dom::serviceWorkerScriptCache::, mozilla::net::HttpBaseChannel::HttpBaseChannel, mozilla::net::HttpChannelChild::HttpChannelChild, mozilla::net::nsHttpHandler::NewProxiedChannel, mozilla::net::nsIOService::NewChannelFromURIWithProxyFlagsInternal, mozilla::net::nsStandardURL::TemplatedMutator, nsPermission::Create, nsTimer, nsTimer::WithEventTarget, operator]
|
||||
lsan-allowed: [Alloc, CompareNetwork, Create, EntrySlotOrCreate, MakeUnique, Malloc, NS_NewLoadGroup, NewChannelFromURIWithProxyFlagsInternal, NewPage, PLDHashTable::Add, Realloc, SharedMutex, Then, createTable, mozilla::BasePrincipal::CreateCodebasePrincipal, mozilla::SchedulerGroup::CreateEventTargetFor, mozilla::ThrottledEventQueue::Create, mozilla::WeakPtr, mozilla::detail::UniqueSelector, mozilla::dom::PerformanceStorageWorker::Create, mozilla::dom::ServiceWorkerJobQueue::RunJob, mozilla::dom::ServiceWorkerManager::Unregister, mozilla::dom::ServiceWorkerRegistrationMainThread::Unregister, mozilla::dom::UnregisterCallback::UnregisterCallback, mozilla::dom::WorkerCSPEventListener::Create, mozilla::dom::WorkerPrivate::EnsurePerformanceCounter, mozilla::dom::WorkerPrivate::WorkerPrivate, mozilla::dom::cache::CacheOpChild::Recv__delete__, mozilla::dom::serviceWorkerScriptCache::, mozilla::net::HttpBaseChannel::HttpBaseChannel, mozilla::net::HttpChannelChild::HttpChannelChild, mozilla::net::nsHttpHandler::NewProxiedChannel, mozilla::net::nsIOService::NewChannelFromURIWithProxyFlagsInternal, mozilla::net::nsStandardURL::TemplatedMutator, nsPermission::Create, nsTimer, nsTimer::WithEventTarget, operator]
|
||||
leak-threshold: [default:51200, tab:51200]
|
||||
|
|
|
@ -1,4 +1,6 @@
|
|||
[navigate-window.https.html]
|
||||
expected:
|
||||
if sw-e10s: CRASH
|
||||
[Clients.matchAll() should not show an old window after it navigates.]
|
||||
expected: FAIL
|
||||
|
||||
|
|
|
@ -1,3 +1,3 @@
|
|||
[unregister-then-register.https.html]
|
||||
disabled:
|
||||
if os == "android" and not e10s: https://bugzilla.mozilla.org/show_bug.cgi?id=1499972
|
||||
if (os == "android") and not e10s: https://bugzilla.mozilla.org/show_bug.cgi?id=1499972
|
||||
|
|
|
@ -1,3 +1,3 @@
|
|||
[update-not-allowed.https.html]
|
||||
disabled:
|
||||
if os == "android" and not e10s: https://bugzilla.mozilla.org/show_bug.cgi?id=1499972
|
||||
if (os == "android") and not e10s: https://bugzilla.mozilla.org/show_bug.cgi?id=1499972
|
||||
|
|
|
@ -1 +1,2 @@
|
|||
prefs: [media.navigator.permission.disabled:true, media.navigator.streams.fake:true, privacy.resistFingerprinting.reduceTimerPrecision.jitter:false, privacy.reduceTimerPrecision:false]
|
||||
leak-threshold: [default:51200]
|
||||
|
|
|
@ -18,6 +18,7 @@ div {
|
|||
#test {
|
||||
background: green;
|
||||
|
||||
contain: size;
|
||||
columns: 2 40px;
|
||||
column-gap: 20px;
|
||||
min-height: 100px;
|
||||
|
|
|
@ -0,0 +1,55 @@
|
|||
<!DOCTYPE html>
|
||||
<html>
|
||||
<title>CSS Flexbox: min-height: auto with nested flexboxes and justify-content</title>
|
||||
<link rel="author" title="Google LLC" href="https://www.google.com/" />
|
||||
<link rel="help" href="https://drafts.csswg.org/css-flexbox/#min-size-auto" />
|
||||
<link rel="issue" href="https://bugs.chromium.org/p/chromium/issues/detail?id=945214" />
|
||||
<link rel="match" href="../reference/ref-filled-green-100px-square.xht" />
|
||||
|
||||
<style>
|
||||
.overlapped-green {
|
||||
position: absolute;
|
||||
background-color: green;
|
||||
width: 100px;
|
||||
height: 100px;
|
||||
z-index: 1;
|
||||
}
|
||||
|
||||
.outer {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
height: 100px;
|
||||
width: 100px;
|
||||
background: green;
|
||||
}
|
||||
|
||||
.inner {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
justify-content: flex-end;
|
||||
height: 100%;
|
||||
background-color: red;
|
||||
}
|
||||
|
||||
.spacer {
|
||||
height: 30px;
|
||||
width: 100px;
|
||||
background: red;
|
||||
flex: none;
|
||||
}
|
||||
</style>
|
||||
<body>
|
||||
|
||||
<p>Test passes if there is a filled green square and <strong>no red</strong>.</p>
|
||||
|
||||
<div class="overlapped-green"></div>
|
||||
<div class="outer">
|
||||
<div class="spacer"></div>
|
||||
<div class="inner">
|
||||
<div class="spacer"></div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
|
||||
</body>
|
||||
</html>
|
|
@ -0,0 +1,41 @@
|
|||
<!DOCTYPE html>
|
||||
<title>CSS Position Absolute: Chrome crash</title>
|
||||
<link rel="author" href="mailto:atotic@chromium.org">
|
||||
<script src="/resources/testharness.js"></script>
|
||||
<script src="/resources/testharnessreport.js"></script>
|
||||
<link rel="help" href="https://bugs.chromium.org/p/chromium/issues/detail?id=946986">
|
||||
<meta name="assert" content="Nested abs/fixed/flex do not crash">
|
||||
<style>
|
||||
body { overflow: scroll;}
|
||||
.container {
|
||||
position: relative;
|
||||
contain: paint;
|
||||
}
|
||||
.flex {
|
||||
display: flex;
|
||||
}
|
||||
.fixed {
|
||||
position: fixed;
|
||||
}
|
||||
.abs {
|
||||
position: absolute;
|
||||
}
|
||||
</style>
|
||||
<!-- LayoutNG currently does not support display:flex.
|
||||
Propagation of descendants across flex boundaries is error prone -->
|
||||
<div id="one" class="container" style="">
|
||||
<div class="flex">
|
||||
<div class="abs">
|
||||
<div class="flex">
|
||||
<div id="fixed1" class="fixed">
|
||||
<div id="fixed2" class="fixed"></div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<script>
|
||||
test(() => {
|
||||
}, 'test passes if it does not crash');
|
||||
</script>
|
||||
|
|
@ -30,6 +30,10 @@ test_initial_value({ syntax: '<color>', initialValue: 'purple' }, 'rgb(128, 0, 1
|
|||
test_initial_value({ syntax: '<transform-function>', initialValue: 'rotate(42deg)' }, 'rotate(42deg)');
|
||||
test_initial_value({ syntax: '<transform-list>', initialValue: 'scale(calc(2 + 2))' }, 'scale(4)');
|
||||
test_initial_value({ syntax: '<transform-list>', initialValue: 'scale(calc(2 + 1)) translateX(calc(3px + 1px))' }, 'scale(3) translateX(4px)');
|
||||
test_initial_value({ syntax: '<url>', initialValue: 'url(a)' },
|
||||
`url("${new URL('a', document.baseURI)}")`);
|
||||
test_initial_value({ syntax: '<url>+', initialValue: 'url(a) url(a)' },
|
||||
`url("${new URL('a', document.baseURI)}") url("${new URL('a', document.baseURI)}")`);
|
||||
|
||||
// Test that the initial value of the custom property 'reg' is successfully
|
||||
// substituted into 'property'.
|
||||
|
|
|
@ -71,7 +71,7 @@ https://chromium.googlesource.com/chromium/src/+/c825d655f6aaf73484f9d56e9012793
|
|||
|
||||
verifyComputedStyle("opacity", "initial", "calc(2 / 4)", "0.5", "testing opacity: calc(2 / 4)");
|
||||
|
||||
verifyComputedStyle("tab-size", "12345", "calc(2 / 4)", "1", "testing tab-size: calc(2 / 4)");
|
||||
verifyComputedStyle("tab-size", "12345", "calc(2 / 4)", "0.5", "testing tab-size: calc(2 / 4)");
|
||||
/*
|
||||
'tab-size' accepts <number> values.
|
||||
*/
|
||||
|
|
|
@ -38,4 +38,4 @@
|
|||
== background-attachment-fixed-inside-transform-1.html background-attachment-fixed-inside-transform-1-ref.html
|
||||
|
||||
# box-shadow with currentcolor test cases
|
||||
== box-shadow-currentcolor.html box-shadow-currentcolor-ref.html
|
||||
== box-shadow-currentcolor.html box-shadow-currentcolor-ref.html
|
||||
|
|
|
@ -0,0 +1,88 @@
|
|||
<!DOCTYPE HTML>
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<title>CSS Reftest Reference</title>
|
||||
<link rel="author" title="Daniel Holbert" href="mailto:dholbert@mozilla.com">
|
||||
<link rel="author" title="Morgan Rae Reschenberg" href="mailto:mreschenberg@berkeley.edu">
|
||||
<style>
|
||||
.basic {
|
||||
overflow: scroll;
|
||||
position: relative;
|
||||
border: 2px solid green;
|
||||
}
|
||||
.height-ref {
|
||||
height: 60px;
|
||||
background: lightblue;
|
||||
}
|
||||
.width-ref {
|
||||
width: 60px;
|
||||
}
|
||||
.floatLBasic-ref {
|
||||
float: left;
|
||||
}
|
||||
.floatLWidth-ref {
|
||||
float: left;
|
||||
width: 60px;
|
||||
}
|
||||
.flexBaselineCheck {
|
||||
display: flex;
|
||||
align-items: baseline;
|
||||
}
|
||||
.innerContents {
|
||||
color: transparent;
|
||||
height: 100px;
|
||||
width: 100px;
|
||||
position: absolute;
|
||||
}
|
||||
.zeroHeightContents {
|
||||
color: transparent;
|
||||
height: 0px;
|
||||
width: 0px;
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<!-- NOTE: In the reference-case scenarios here, we use the same DOM as in
|
||||
the testcase, and we simply use 'position: absolute' on the descendants
|
||||
wherever the testcase has 'contain: size' on the container. This
|
||||
produces an accurate reference rendering, because out-of-flow content
|
||||
doesn't contribute to the container's sizing, but does create scrollable
|
||||
overflow (i.e. it produces scrollbars of the appropriate size for the
|
||||
amount of overflow). -->
|
||||
<div class="basic"><div class="innerContents">inner</div></div>
|
||||
<br>
|
||||
|
||||
<div class="basic height-ref"><div class="innerContents">inner</div></div>
|
||||
<br>
|
||||
|
||||
<div class="basic height-ref"><div class="innerContents">inner</div></div>
|
||||
<br>
|
||||
|
||||
<div class="basic width-ref"><div class="innerContents">inner</div></div>
|
||||
<br>
|
||||
|
||||
<div class="basic width-ref"><div class="innerContents">inner</div></div>
|
||||
<br>
|
||||
|
||||
<div class="basic floatLBasic-ref"><div class="innerContents">inner</div></div>
|
||||
<br>
|
||||
|
||||
<div class="basic floatLWidth-ref"><div class="innerContents">inner</div></div>
|
||||
<br>
|
||||
|
||||
<div class="flexBaselineCheck">
|
||||
outside before
|
||||
<div class="basic">
|
||||
<!-- We use the out-of-flow "innerContents" to create the correct
|
||||
amount of scrollable overflow to match the testcase, and we
|
||||
use the smaller in-flow "zeroHeightContents" to provide a
|
||||
baseline that we can use to verify the testcase's baseline
|
||||
alignment position. -->
|
||||
<div class="innerContents">inner</div>
|
||||
<div class="zeroHeightContents">i</div>
|
||||
</div>
|
||||
outside after
|
||||
</div>
|
||||
</body>
|
||||
</html>
|
|
@ -0,0 +1,93 @@
|
|||
<!DOCTYPE HTML>
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<title>CSS Test: 'contain: size' on 'overflow:scroll' block elements should cause them to be sized as if they had no contents</title>
|
||||
<link rel="author" title="Daniel Holbert" href="mailto:dholbert@mozilla.com">
|
||||
<link rel="author" title="Morgan Rae Reschenberg" href="mailto:mreschenberg@berkeley.edu">
|
||||
<link rel="help" href="https://drafts.csswg.org/css-contain/#containment-size">
|
||||
<link rel="match" href="contain-size-block-002-ref.html">
|
||||
<style>
|
||||
.contain {
|
||||
contain: size;
|
||||
overflow: scroll;
|
||||
border: 2px solid green;
|
||||
}
|
||||
.innerContents {
|
||||
color: transparent;
|
||||
height: 100px;
|
||||
width: 100px;
|
||||
}
|
||||
.minHeight {
|
||||
min-height: 60px;
|
||||
background: lightblue;
|
||||
}
|
||||
.height {
|
||||
height: 60px;
|
||||
background: lightblue;
|
||||
}
|
||||
.maxWidth {
|
||||
max-width: 60px;
|
||||
}
|
||||
.width {
|
||||
width: 60px;
|
||||
}
|
||||
.floatLBasic {
|
||||
float: left;
|
||||
}
|
||||
.floatLWidth {
|
||||
float: left;
|
||||
width: 60px;
|
||||
}
|
||||
.flexBaselineCheck {
|
||||
display: flex;
|
||||
align-items: baseline;
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<!-- NOTE: In all cases below, the expectation is that the size-contained
|
||||
element should be sized as if it had no contents (while honoring
|
||||
whatever sizing properties are provided). -->
|
||||
|
||||
<!--CSS Test: A size-contained scrollable block with no specified size.-->
|
||||
<div class="contain"><div class="innerContents">inner</div></div>
|
||||
<br>
|
||||
|
||||
<!--CSS Test: A size-contained scrollable block with specified min-height -->
|
||||
<div class="contain minHeight"><div class="innerContents">inner</div></div>
|
||||
<br>
|
||||
|
||||
<!--CSS Test: A size-contained scrollable block with specified height -->
|
||||
<div class="contain height"><div class="innerContents">inner</div></div>
|
||||
<br>
|
||||
|
||||
<!--CSS Test: A size-contained scrollable block with specified max-width -->
|
||||
<div class="contain maxWidth"><div class="innerContents">inner</div></div>
|
||||
<br>
|
||||
|
||||
<!--CSS Test: A size-contained scrollable block with specified width -->
|
||||
<div class="contain width"><div class="innerContents">inner</div></div>
|
||||
<br>
|
||||
|
||||
<!--CSS Test: A size-contained floated scrollable block with auto size -->
|
||||
<div class="contain floatLBasic"><div class="innerContents">inner</div></div>
|
||||
<br>
|
||||
|
||||
<!--CSS Test: A size-contained floated scrollable block with
|
||||
specified width -->
|
||||
<div class="contain floatLWidth"><div class="innerContents">inner</div></div>
|
||||
<br>
|
||||
|
||||
<!--CSS Test: A size-contained scrollable block in a
|
||||
baseline-aligning flex container: should size as if it's empty, but
|
||||
still baseline-align using its contents' baseline -->
|
||||
<div class="flexBaselineCheck">
|
||||
outside before
|
||||
<div class="contain">
|
||||
<div class="innerContents">inner</div>
|
||||
</div>
|
||||
outside after
|
||||
</div>
|
||||
</body>
|
||||
</html>
|
|
@ -0,0 +1,88 @@
|
|||
<!DOCTYPE HTML>
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<title>CSS Reftest Reference</title>
|
||||
<link rel="author" title="Daniel Holbert" href="mailto:dholbert@mozilla.com">
|
||||
<link rel="author" title="Morgan Rae Reschenberg" href="mailto:mreschenberg@berkeley.edu">
|
||||
<style>
|
||||
.basic {
|
||||
overflow: auto;
|
||||
position: relative;
|
||||
border: 2px solid green;
|
||||
}
|
||||
.height-ref {
|
||||
height: 60px;
|
||||
background: lightblue;
|
||||
}
|
||||
.width-ref {
|
||||
width: 60px;
|
||||
}
|
||||
.floatLBasic-ref {
|
||||
float: left;
|
||||
}
|
||||
.floatLWidth-ref {
|
||||
float: left;
|
||||
width: 60px;
|
||||
}
|
||||
.flexBaselineCheck {
|
||||
display: flex;
|
||||
align-items: baseline;
|
||||
}
|
||||
.innerContents {
|
||||
color: transparent;
|
||||
height: 100px;
|
||||
width: 100px;
|
||||
position: absolute;
|
||||
}
|
||||
.zeroHeightContents {
|
||||
color: transparent;
|
||||
height: 0px;
|
||||
width: 0px;
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<!-- NOTE: In the reference-case scenarios here, we use the same DOM as in
|
||||
the testcase, and we simply use 'position: absolute' on the descendants
|
||||
wherever the testcase has 'contain: size' on the container. This
|
||||
produces an accurate reference rendering, because out-of-flow content
|
||||
doesn't contribute to the container's sizing, but does create scrollable
|
||||
overflow (i.e. it produces scrollbars of the appropriate size for the
|
||||
amount of overflow). -->
|
||||
<div class="basic"><div class="innerContents">inner</div></div>
|
||||
<br>
|
||||
|
||||
<div class="basic height-ref"><div class="innerContents">inner</div></div>
|
||||
<br>
|
||||
|
||||
<div class="basic height-ref"><div class="innerContents">inner</div></div>
|
||||
<br>
|
||||
|
||||
<div class="basic width-ref"><div class="innerContents">inner</div></div>
|
||||
<br>
|
||||
|
||||
<div class="basic width-ref"><div class="innerContents">inner</div></div>
|
||||
<br>
|
||||
|
||||
<div class="basic floatLBasic-ref"><div class="innerContents">inner</div></div>
|
||||
<br>
|
||||
|
||||
<div class="basic floatLWidth-ref"><div class="innerContents">inner</div></div>
|
||||
<br>
|
||||
|
||||
<div class="flexBaselineCheck">
|
||||
outside before
|
||||
<div class="basic">
|
||||
<!-- We use the out-of-flow "innerContents" to create the correct
|
||||
amount of scrollable overflow to match the testcase, and we
|
||||
use the smaller in-flow "zeroHeightContents" to provide a
|
||||
baseline that we can use to verify the testcase's baseline
|
||||
alignment position. -->
|
||||
<div class="innerContents">inner</div>
|
||||
<div class="zeroHeightContents">i</div>
|
||||
</div>
|
||||
outside after
|
||||
</div>
|
||||
</body>
|
||||
</html>
|
|
@ -0,0 +1,93 @@
|
|||
<!DOCTYPE HTML>
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<title>CSS Test: 'contain: size' on 'overflow:auto' block elements should cause them to be sized as if they had no contents</title>
|
||||
<link rel="author" title="Daniel Holbert" href="mailto:dholbert@mozilla.com">
|
||||
<link rel="author" title="Morgan Rae Reschenberg" href="mailto:mreschenberg@berkeley.edu">
|
||||
<link rel="help" href="https://drafts.csswg.org/css-contain/#containment-size">
|
||||
<link rel="match" href="contain-size-block-003-ref.html">
|
||||
<style>
|
||||
.contain {
|
||||
contain: size;
|
||||
overflow: auto;
|
||||
border: 2px solid green;
|
||||
}
|
||||
.innerContents {
|
||||
color: transparent;
|
||||
height: 100px;
|
||||
width: 100px;
|
||||
}
|
||||
.minHeight {
|
||||
min-height: 60px;
|
||||
background: lightblue;
|
||||
}
|
||||
.height {
|
||||
height: 60px;
|
||||
background: lightblue;
|
||||
}
|
||||
.maxWidth {
|
||||
max-width: 60px;
|
||||
}
|
||||
.width {
|
||||
width: 60px;
|
||||
}
|
||||
.floatLBasic {
|
||||
float: left;
|
||||
}
|
||||
.floatLWidth {
|
||||
float: left;
|
||||
width: 60px;
|
||||
}
|
||||
.flexBaselineCheck {
|
||||
display: flex;
|
||||
align-items: baseline;
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<!-- NOTE: In all cases below, the expectation is that the size-contained
|
||||
element should be sized as if it had no contents (while honoring
|
||||
whatever sizing properties are provided). -->
|
||||
|
||||
<!--CSS Test: A size-contained scrollable block with no specified size.-->
|
||||
<div class="contain"><div class="innerContents">inner</div></div>
|
||||
<br>
|
||||
|
||||
<!--CSS Test: A size-contained scrollable block with specified min-height -->
|
||||
<div class="contain minHeight"><div class="innerContents">inner</div></div>
|
||||
<br>
|
||||
|
||||
<!--CSS Test: A size-contained scrollable block with specified height -->
|
||||
<div class="contain height"><div class="innerContents">inner</div></div>
|
||||
<br>
|
||||
|
||||
<!--CSS Test: A size-contained scrollable block with specified max-width -->
|
||||
<div class="contain maxWidth"><div class="innerContents">inner</div></div>
|
||||
<br>
|
||||
|
||||
<!--CSS Test: A size-contained scrollable block with specified width -->
|
||||
<div class="contain width"><div class="innerContents">inner</div></div>
|
||||
<br>
|
||||
|
||||
<!--CSS Test: A size-contained floated scrollable block with auto size -->
|
||||
<div class="contain floatLBasic"><div class="innerContents">inner</div></div>
|
||||
<br>
|
||||
|
||||
<!--CSS Test: A size-contained floated scrollable block with
|
||||
specified width -->
|
||||
<div class="contain floatLWidth"><div class="innerContents">inner</div></div>
|
||||
<br>
|
||||
|
||||
<!--CSS Test: A size-contained scrollable block in a
|
||||
baseline-aligning flex container: should size as if it's empty, but
|
||||
still baseline-align using its contents' baseline -->
|
||||
<div class="flexBaselineCheck">
|
||||
outside before
|
||||
<div class="contain">
|
||||
<div class="innerContents">inner</div>
|
||||
</div>
|
||||
outside after
|
||||
</div>
|
||||
</body>
|
||||
</html>
|
|
@ -0,0 +1,87 @@
|
|||
<!DOCTYPE HTML>
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<title>CSS Reftest Reference</title>
|
||||
<link rel="author" title="Daniel Holbert" href="mailto:dholbert@mozilla.com">
|
||||
<link rel="author" title="Morgan Rae Reschenberg" href="mailto:mreschenberg@berkeley.edu">
|
||||
<style>
|
||||
.basic {
|
||||
overflow: hidden;
|
||||
position: relative;
|
||||
border: 2px solid green;
|
||||
}
|
||||
.height-ref {
|
||||
height: 60px;
|
||||
background: lightblue;
|
||||
}
|
||||
.width-ref {
|
||||
width: 60px;
|
||||
}
|
||||
.floatLBasic-ref {
|
||||
float: left;
|
||||
}
|
||||
.floatLWidth-ref {
|
||||
float: left;
|
||||
width: 60px;
|
||||
}
|
||||
.flexBaselineCheck {
|
||||
display: flex;
|
||||
align-items: baseline;
|
||||
}
|
||||
.innerContents {
|
||||
color: transparent;
|
||||
height: 100px;
|
||||
width: 100px;
|
||||
position: absolute;
|
||||
}
|
||||
.zeroHeightContents {
|
||||
color: transparent;
|
||||
height: 0px;
|
||||
width: 0px;
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<!-- NOTE: In the reference-case scenarios here, we use the same DOM as in
|
||||
the testcase, and we simply use 'position: absolute' on the descendants
|
||||
wherever the testcase has 'contain: size' on the container. This
|
||||
produces an accurate reference rendering, because out-of-flow content
|
||||
doesn't contribute to the container's sizing, but does create scrollable
|
||||
overflow. -->
|
||||
<div class="basic"><div class="innerContents">inner</div></div>
|
||||
<br>
|
||||
|
||||
<div class="basic height-ref"><div class="innerContents">inner</div></div>
|
||||
<br>
|
||||
|
||||
<div class="basic height-ref"><div class="innerContents">inner</div></div>
|
||||
<br>
|
||||
|
||||
<div class="basic width-ref"><div class="innerContents">inner</div></div>
|
||||
<br>
|
||||
|
||||
<div class="basic width-ref"><div class="innerContents">inner</div></div>
|
||||
<br>
|
||||
|
||||
<div class="basic floatLBasic-ref"><div class="innerContents">inner</div></div>
|
||||
<br>
|
||||
|
||||
<div class="basic floatLWidth-ref"><div class="innerContents">inner</div></div>
|
||||
<br>
|
||||
|
||||
<div class="flexBaselineCheck">
|
||||
outside before
|
||||
<div class="basic">
|
||||
<!-- We use the out-of-flow "innerContents" to create the correct
|
||||
amount of scrollable overflow to match the testcase, and we
|
||||
use the smaller in-flow "zeroHeightContents" to provide a
|
||||
baseline that we can use to verify the testcase's baseline
|
||||
alignment position. -->
|
||||
<div class="innerContents">inner</div>
|
||||
<div class="zeroHeightContents">i</div>
|
||||
</div>
|
||||
outside after
|
||||
</div>
|
||||
</body>
|
||||
</html>
|
|
@ -0,0 +1,93 @@
|
|||
<!DOCTYPE HTML>
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<title>CSS Test: 'contain: size' on 'overflow:hidden' block elements should cause them to be sized as if they had no contents</title>
|
||||
<link rel="author" title="Daniel Holbert" href="mailto:dholbert@mozilla.com">
|
||||
<link rel="author" title="Morgan Rae Reschenberg" href="mailto:mreschenberg@berkeley.edu">
|
||||
<link rel="help" href="https://drafts.csswg.org/css-contain/#containment-size">
|
||||
<link rel="match" href="contain-size-block-004-ref.html">
|
||||
<style>
|
||||
.contain {
|
||||
contain: size;
|
||||
overflow: hidden;
|
||||
border: 2px solid green;
|
||||
}
|
||||
.innerContents {
|
||||
color: transparent;
|
||||
height: 100px;
|
||||
width: 100px;
|
||||
}
|
||||
.minHeight {
|
||||
min-height: 60px;
|
||||
background: lightblue;
|
||||
}
|
||||
.height {
|
||||
height: 60px;
|
||||
background: lightblue;
|
||||
}
|
||||
.maxWidth {
|
||||
max-width: 60px;
|
||||
}
|
||||
.width {
|
||||
width: 60px;
|
||||
}
|
||||
.floatLBasic {
|
||||
float: left;
|
||||
}
|
||||
.floatLWidth {
|
||||
float: left;
|
||||
width: 60px;
|
||||
}
|
||||
.flexBaselineCheck {
|
||||
display: flex;
|
||||
align-items: baseline;
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<!-- NOTE: In all cases below, the expectation is that the size-contained
|
||||
element should be sized as if it had no contents (while honoring
|
||||
whatever sizing properties are provided). -->
|
||||
|
||||
<!--CSS Test: A size-contained scrollable block with no specified size.-->
|
||||
<div class="contain"><div class="innerContents">inner</div></div>
|
||||
<br>
|
||||
|
||||
<!--CSS Test: A size-contained scrollable block with specified min-height -->
|
||||
<div class="contain minHeight"><div class="innerContents">inner</div></div>
|
||||
<br>
|
||||
|
||||
<!--CSS Test: A size-contained scrollable block with specified height -->
|
||||
<div class="contain height"><div class="innerContents">inner</div></div>
|
||||
<br>
|
||||
|
||||
<!--CSS Test: A size-contained scrollable block with specified max-width -->
|
||||
<div class="contain maxWidth"><div class="innerContents">inner</div></div>
|
||||
<br>
|
||||
|
||||
<!--CSS Test: A size-contained scrollable block with specified width -->
|
||||
<div class="contain width"><div class="innerContents">inner</div></div>
|
||||
<br>
|
||||
|
||||
<!--CSS Test: A size-contained floated scrollable block with auto size -->
|
||||
<div class="contain floatLBasic"><div class="innerContents">inner</div></div>
|
||||
<br>
|
||||
|
||||
<!--CSS Test: A size-contained floated scrollable block with
|
||||
specified width -->
|
||||
<div class="contain floatLWidth"><div class="innerContents">inner</div></div>
|
||||
<br>
|
||||
|
||||
<!--CSS Test: A size-contained scrollable block in a
|
||||
baseline-aligning flex container: should size as if it's empty, but
|
||||
still baseline-align using its contents' baseline -->
|
||||
<div class="flexBaselineCheck">
|
||||
outside before
|
||||
<div class="contain">
|
||||
<div class="innerContents">inner</div>
|
||||
</div>
|
||||
outside after
|
||||
</div>
|
||||
</body>
|
||||
</html>
|
|
@ -0,0 +1,56 @@
|
|||
<!DOCTYPE HTML>
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<title>CSS Reftest Reference</title>
|
||||
<link rel="author" title="Daniel Holbert" href="mailto:dholbert@mozilla.com">
|
||||
<link rel="author" title="Morgan Rae Reschenberg" href="mailto:mreschenberg@berkeley.edu">
|
||||
<style>
|
||||
.basic {
|
||||
display: inline-block;
|
||||
overflow: scroll;
|
||||
position: relative;
|
||||
border: 2px solid green;
|
||||
}
|
||||
.height-ref {
|
||||
height: 60px;
|
||||
}
|
||||
.width-ref {
|
||||
width: 60px;
|
||||
}
|
||||
.innerContents {
|
||||
color: transparent;
|
||||
height: 100px;
|
||||
width: 100px;
|
||||
position: absolute;
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<!-- NOTE: In the reference-case scenarios here, we use the same DOM as in
|
||||
the testcase, and we simply use 'position: absolute' on the descendants
|
||||
wherever the testcase has 'contain: size' on the container. This
|
||||
produces an accurate reference rendering, because out-of-flow content
|
||||
doesn't contribute to the container's sizing, but does create scrollable
|
||||
overflow (i.e. it produces scrollbars of the appropriate size for the
|
||||
amount of overflow). -->
|
||||
<div class="basic"><div class="innerContents">inner</div></div>
|
||||
<br>
|
||||
|
||||
outside before
|
||||
<div class="basic"><div class="innerContents">inner</div></div>
|
||||
outside after
|
||||
<br>
|
||||
|
||||
<div class="basic height-ref"><div class="innerContents">inner</div></div>
|
||||
<br>
|
||||
|
||||
<div class="basic height-ref"><div class="innerContents">inner</div></div>
|
||||
<br>
|
||||
|
||||
<div class="basic width-ref"><div class="innerContents">inner</div></div>
|
||||
<br>
|
||||
|
||||
<div class="basic width-ref"><div class="innerContents">inner</div></div>
|
||||
</body>
|
||||
</html>
|
|
@ -0,0 +1,67 @@
|
|||
<!DOCTYPE HTML>
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<title>CSS Test: 'contain: size' on 'overflow:scroll' inline-block elements should cause them to be sized as if they had no contents and baseline-aligned regularly.</title>
|
||||
<link rel="author" title="Daniel Holbert" href="mailto:dholbert@mozilla.com">
|
||||
<link rel="author" title="Morgan Rae Reschenberg" href="mailto:mreschenberg@berkeley.edu">
|
||||
<link rel="help" href="https://drafts.csswg.org/css-contain/#containment-size">
|
||||
<link rel="match" href="contain-size-inline-block-002-ref.html">
|
||||
<style>
|
||||
.contain {
|
||||
display: inline-block;
|
||||
overflow: scroll;
|
||||
contain:size;
|
||||
border: 2px solid green;
|
||||
}
|
||||
.innerContents {
|
||||
color: transparent;
|
||||
height: 100px;
|
||||
width: 100px;
|
||||
}
|
||||
.minHeight {
|
||||
min-height: 60px;
|
||||
}
|
||||
.height {
|
||||
height: 60px;
|
||||
}
|
||||
.minWidth {
|
||||
min-width: 60px;
|
||||
}
|
||||
.width {
|
||||
width: 60px;
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<!-- NOTE: In all cases below, the expectation is that the size-contained
|
||||
element should be sized as if it had no contents (while honoring
|
||||
whatever sizing properties are provided). -->
|
||||
|
||||
<!-- A size-contained scrollable inline-block with no specified size -->
|
||||
<div class="contain"><div class="innerContents">inner</div></div>
|
||||
<br>
|
||||
|
||||
<!-- A size-contained scrollable inline-block should perform baseline
|
||||
alignment regularly, based on contents' baseline. -->
|
||||
outside before
|
||||
<div class="contain"><div class="innerContents">inner</div></div>
|
||||
outside after
|
||||
<br>
|
||||
|
||||
<!-- A size-contained scrollable inline-block with specified min-height -->
|
||||
<div class="contain minHeight"><div class="innerContents">inner</div></div>
|
||||
<br>
|
||||
|
||||
<!-- A size-contained scrollable inline-block with specified height -->
|
||||
<div class="contain height"><div class="innerContents">inner</div></div>
|
||||
<br>
|
||||
|
||||
<!-- A size-contained scrollable inline-block with specified min-width -->
|
||||
<div class="contain minWidth"><div class="innerContents">inner</div></div>
|
||||
<br>
|
||||
|
||||
<!-- A size-contained scrollable inline-block with specified width -->
|
||||
<div class="contain width"><div class="innerContents">inner</div></div>
|
||||
</body>
|
||||
</html>
|
|
@ -0,0 +1,56 @@
|
|||
<!DOCTYPE HTML>
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<title>CSS Reftest Reference</title>
|
||||
<link rel="author" title="Daniel Holbert" href="mailto:dholbert@mozilla.com">
|
||||
<link rel="author" title="Morgan Rae Reschenberg" href="mailto:mreschenberg@berkeley.edu">
|
||||
<style>
|
||||
.basic {
|
||||
display: inline-block;
|
||||
overflow: auto;
|
||||
position: relative;
|
||||
border: 2px solid green;
|
||||
}
|
||||
.height-ref {
|
||||
height: 60px;
|
||||
}
|
||||
.width-ref {
|
||||
width: 60px;
|
||||
}
|
||||
.innerContents {
|
||||
color: transparent;
|
||||
height: 100px;
|
||||
width: 100px;
|
||||
position: absolute;
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<!-- NOTE: In the reference-case scenarios here, we use the same DOM as in
|
||||
the testcase, and we simply use 'position: absolute' on the descendants
|
||||
wherever the testcase has 'contain: size' on the container. This
|
||||
produces an accurate reference rendering, because out-of-flow content
|
||||
doesn't contribute to the container's sizing, but does create scrollable
|
||||
overflow (i.e. it produces scrollbars of the appropriate size for the
|
||||
amount of overflow). -->
|
||||
<div class="basic"><div class="innerContents">inner</div></div>
|
||||
<br>
|
||||
|
||||
outside before
|
||||
<div class="basic"><div class="innerContents">inner</div></div>
|
||||
outside after
|
||||
<br>
|
||||
|
||||
<div class="basic height-ref"><div class="innerContents">inner</div></div>
|
||||
<br>
|
||||
|
||||
<div class="basic height-ref"><div class="innerContents">inner</div></div>
|
||||
<br>
|
||||
|
||||
<div class="basic width-ref"><div class="innerContents">inner</div></div>
|
||||
<br>
|
||||
|
||||
<div class="basic width-ref"><div class="innerContents">inner</div></div>
|
||||
</body>
|
||||
</html>
|
|
@ -0,0 +1,67 @@
|
|||
<!DOCTYPE HTML>
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<title>CSS Test: 'contain: size' on 'overflow:auto' inline-block elements should cause them to be sized as if they had no contents and baseline-aligned regularly.</title>
|
||||
<link rel="author" title="Daniel Holbert" href="mailto:dholbert@mozilla.com">
|
||||
<link rel="author" title="Morgan Rae Reschenberg" href="mailto:mreschenberg@berkeley.edu">
|
||||
<link rel="help" href="https://drafts.csswg.org/css-contain/#containment-size">
|
||||
<link rel="match" href="contain-size-inline-block-003-ref.html">
|
||||
<style>
|
||||
.contain {
|
||||
display: inline-block;
|
||||
overflow: auto;
|
||||
contain:size;
|
||||
border: 2px solid green;
|
||||
}
|
||||
.innerContents {
|
||||
color: transparent;
|
||||
height: 100px;
|
||||
width: 100px;
|
||||
}
|
||||
.minHeight {
|
||||
min-height: 60px;
|
||||
}
|
||||
.height {
|
||||
height: 60px;
|
||||
}
|
||||
.minWidth {
|
||||
min-width: 60px;
|
||||
}
|
||||
.width {
|
||||
width: 60px;
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<!-- NOTE: In all cases below, the expectation is that the size-contained
|
||||
element should be sized as if it had no contents (while honoring
|
||||
whatever sizing properties are provided). -->
|
||||
|
||||
<!-- A size-contained scrollable inline-block with no specified size -->
|
||||
<div class="contain"><div class="innerContents">inner</div></div>
|
||||
<br>
|
||||
|
||||
<!-- A size-contained scrollable inline-block should perform baseline
|
||||
alignment regularly, based on contents' baseline. -->
|
||||
outside before
|
||||
<div class="contain"><div class="innerContents">inner</div></div>
|
||||
outside after
|
||||
<br>
|
||||
|
||||
<!-- A size-contained scrollable inline-block with specified min-height -->
|
||||
<div class="contain minHeight"><div class="innerContents">inner</div></div>
|
||||
<br>
|
||||
|
||||
<!-- A size-contained scrollable inline-block with specified height -->
|
||||
<div class="contain height"><div class="innerContents">inner</div></div>
|
||||
<br>
|
||||
|
||||
<!-- A size-contained scrollable inline-block with specified min-width -->
|
||||
<div class="contain minWidth"><div class="innerContents">inner</div></div>
|
||||
<br>
|
||||
|
||||
<!-- A size-contained scrollable inline-block with specified width -->
|
||||
<div class="contain width"><div class="innerContents">inner</div></div>
|
||||
</body>
|
||||
</html>
|
|
@ -0,0 +1,55 @@
|
|||
<!DOCTYPE HTML>
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<title>CSS Reftest Reference</title>
|
||||
<link rel="author" title="Daniel Holbert" href="mailto:dholbert@mozilla.com">
|
||||
<link rel="author" title="Morgan Rae Reschenberg" href="mailto:mreschenberg@berkeley.edu">
|
||||
<style>
|
||||
.basic {
|
||||
display: inline-block;
|
||||
overflow: hidden;
|
||||
position: relative;
|
||||
border: 2px solid green;
|
||||
}
|
||||
.height-ref {
|
||||
height: 60px;
|
||||
}
|
||||
.width-ref {
|
||||
width: 60px;
|
||||
}
|
||||
.innerContents {
|
||||
color: transparent;
|
||||
height: 100px;
|
||||
width: 100px;
|
||||
position: absolute;
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<!-- NOTE: In the reference-case scenarios here, we use the same DOM as in
|
||||
the testcase, and we simply use 'position: absolute' on the descendants
|
||||
wherever the testcase has 'contain: size' on the container. This
|
||||
produces an accurate reference rendering, because out-of-flow content
|
||||
doesn't contribute to the container's sizing, but does create scrollable
|
||||
overflow. -->
|
||||
<div class="basic"><div class="innerContents">inner</div></div>
|
||||
<br>
|
||||
|
||||
outside before
|
||||
<div class="basic"><div class="innerContents">inner</div></div>
|
||||
outside after
|
||||
<br>
|
||||
|
||||
<div class="basic height-ref"><div class="innerContents">inner</div></div>
|
||||
<br>
|
||||
|
||||
<div class="basic height-ref"><div class="innerContents">inner</div></div>
|
||||
<br>
|
||||
|
||||
<div class="basic width-ref"><div class="innerContents">inner</div></div>
|
||||
<br>
|
||||
|
||||
<div class="basic width-ref"><div class="innerContents">inner</div></div>
|
||||
</body>
|
||||
</html>
|
|
@ -0,0 +1,67 @@
|
|||
<!DOCTYPE HTML>
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<title>CSS Test: 'contain: size' on 'overflow:hidden' inline-block elements should cause them to be sized as if they had no contents and baseline-aligned regularly.</title>
|
||||
<link rel="author" title="Daniel Holbert" href="mailto:dholbert@mozilla.com">
|
||||
<link rel="author" title="Morgan Rae Reschenberg" href="mailto:mreschenberg@berkeley.edu">
|
||||
<link rel="help" href="https://drafts.csswg.org/css-contain/#containment-size">
|
||||
<link rel="match" href="contain-size-inline-block-004-ref.html">
|
||||
<style>
|
||||
.contain {
|
||||
display: inline-block;
|
||||
overflow: hidden;
|
||||
contain:size;
|
||||
border: 2px solid green;
|
||||
}
|
||||
.innerContents {
|
||||
color: transparent;
|
||||
height: 100px;
|
||||
width: 100px;
|
||||
}
|
||||
.minHeight {
|
||||
min-height: 60px;
|
||||
}
|
||||
.height {
|
||||
height: 60px;
|
||||
}
|
||||
.minWidth {
|
||||
min-width: 60px;
|
||||
}
|
||||
.width {
|
||||
width: 60px;
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<!-- NOTE: In all cases below, the expectation is that the size-contained
|
||||
element should be sized as if it had no contents (while honoring
|
||||
whatever sizing properties are provided). -->
|
||||
|
||||
<!-- A size-contained scrollable inline-block with no specified size -->
|
||||
<div class="contain"><div class="innerContents">inner</div></div>
|
||||
<br>
|
||||
|
||||
<!-- A size-contained scrollable inline-block should perform baseline
|
||||
alignment regularly, based on contents' baseline. -->
|
||||
outside before
|
||||
<div class="contain"><div class="innerContents">inner</div></div>
|
||||
outside after
|
||||
<br>
|
||||
|
||||
<!-- A size-contained scrollable inline-block with specified min-height -->
|
||||
<div class="contain minHeight"><div class="innerContents">inner</div></div>
|
||||
<br>
|
||||
|
||||
<!-- A size-contained scrollable inline-block with specified height -->
|
||||
<div class="contain height"><div class="innerContents">inner</div></div>
|
||||
<br>
|
||||
|
||||
<!-- A size-contained scrollable inline-block with specified min-width -->
|
||||
<div class="contain minWidth"><div class="innerContents">inner</div></div>
|
||||
<br>
|
||||
|
||||
<!-- A size-contained scrollable inline-block with specified width -->
|
||||
<div class="contain width"><div class="innerContents">inner</div></div>
|
||||
</body>
|
||||
</html>
|
|
@ -19,7 +19,13 @@
|
|||
== contain-paint-stacking-context-001b.html contain-paint-stacking-context-001-ref.html
|
||||
== contain-size-button-001.html contain-size-button-001-ref.html
|
||||
== contain-size-block-001.html contain-size-block-001-ref.html
|
||||
== contain-size-block-002.html contain-size-block-002-ref.html
|
||||
== contain-size-block-003.html contain-size-block-003-ref.html
|
||||
== contain-size-block-004.html contain-size-block-004-ref.html
|
||||
== contain-size-inline-block-001.html contain-size-inline-block-001-ref.html
|
||||
== contain-size-inline-block-002.html contain-size-inline-block-002-ref.html
|
||||
== contain-size-inline-block-003.html contain-size-inline-block-003-ref.html
|
||||
== contain-size-inline-block-004.html contain-size-inline-block-004-ref.html
|
||||
== contain-size-flex-001.html contain-size-flex-001-ref.html
|
||||
== contain-size-inline-flex-001.html contain-size-inline-flex-001-ref.html
|
||||
== contain-size-grid-001.html contain-size-grid-001-ref.html
|
||||
|
|
|
@ -0,0 +1,20 @@
|
|||
<!doctype html>
|
||||
<meta charset="utf-8">
|
||||
<title>CSS Test Reference</title>
|
||||
<link rel="author" href="mailto:dholbert@mozilla.com" title="Daniel Holbert">
|
||||
<style>
|
||||
.outer {
|
||||
margin-bottom: 2px;
|
||||
height: 60px;
|
||||
}
|
||||
.inner {
|
||||
background: lime;
|
||||
height: 100%;
|
||||
}
|
||||
</style>
|
||||
<p>Test passes if you see four 60px-tall lime rows (with platform-appropriate scrollbars on the last one).</p>
|
||||
|
||||
<div class="outer"><div class="inner"></div></div>
|
||||
<div class="outer"><div class="inner"></div></div>
|
||||
<div class="outer"><div class="inner"></div></div>
|
||||
<div class="outer" style="overflow: scroll"><div class="inner"></div></div>
|
|
@ -0,0 +1,47 @@
|
|||
<!doctype html>
|
||||
<meta charset="utf-8">
|
||||
<title>CSS Test: nested flex containers with definite max-height</title>
|
||||
<link rel="match" href="flexbox-definite-sizes-005-ref.html">
|
||||
<link rel="author" href="mailto:dholbert@mozilla.com" title="Daniel Holbert">
|
||||
<link rel="help" href="https://drafts.csswg.org/css-flexbox/#definite-sizes">
|
||||
<link rel="help" href="https://bugzilla.mozilla.org/show_bug.cgi?id=1503173">
|
||||
<style>
|
||||
.horizFlex {
|
||||
display: flex;
|
||||
margin-bottom: 2px;
|
||||
}
|
||||
.vertFlex {
|
||||
display: flex;
|
||||
flex-grow: 1;
|
||||
flex-direction: column;
|
||||
min-height: 60px;
|
||||
}
|
||||
.item {
|
||||
background: lime;
|
||||
height: 100%;
|
||||
}
|
||||
</style>
|
||||
<p>Test passes if you see four 60px-tall lime rows (with platform-appropriate scrollbars on the last one).</p>
|
||||
|
||||
<div class="horizFlex">
|
||||
<div class="vertFlex">
|
||||
<div class="item"></div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="horizFlex">
|
||||
<div class="vertFlex" style="overflow: hidden">
|
||||
<div class="item"></div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="horizFlex">
|
||||
<div class="vertFlex" style="overflow: auto">
|
||||
<div class="item"></div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="horizFlex">
|
||||
<div class="vertFlex" style="overflow: scroll">
|
||||
<div class="item"></div>
|
||||
</div>
|
||||
</div>
|
|
@ -154,6 +154,7 @@
|
|||
== flexbox-definite-sizes-002.html flexbox-definite-sizes-001-ref.html
|
||||
== flexbox-definite-sizes-003.html flexbox-definite-sizes-001-ref.html
|
||||
== flexbox-definite-sizes-004.html flexbox-definite-sizes-001-ref.html
|
||||
== flexbox-definite-sizes-005.html flexbox-definite-sizes-005-ref.html
|
||||
|
||||
# Tests for flex items as (pseudo) stacking contexts
|
||||
== flexbox-items-as-stacking-contexts-001.xhtml flexbox-items-as-stacking-contexts-001-ref.xhtml
|
||||
|
|
|
@ -16,7 +16,7 @@
|
|||
let e = document.createElement('embed');
|
||||
e.src = "https://{{host}}:{{ports[https][0]}}/fetch/sec-metadata/resources/record-header.py?file=" + key;
|
||||
e.onload = e => {
|
||||
let expected = {"dest":"embed", "site":"same-origin", "user":"?F", "mode":"no-cors"};
|
||||
let expected = {"dest":"embed", "site":"same-origin", "user":"", "mode":"no-cors"};
|
||||
fetch("/fetch/sec-metadata/resources/record-header.py?retrieve=true&file=" + key)
|
||||
.then(response => response.text())
|
||||
.then(text => assert_header_equals(text, expected))
|
||||
|
@ -35,7 +35,7 @@
|
|||
let e = document.createElement('embed');
|
||||
e.src = "https://{{hosts[][www]}}:{{ports[https][0]}}/fetch/sec-metadata/resources/record-header.py?file=" + key;
|
||||
e.onload = e => {
|
||||
let expected = {"dest":"embed", "site":"same-site", "user":"?F", "mode":"no-cors"};
|
||||
let expected = {"dest":"embed", "site":"same-site", "user":"", "mode":"no-cors"};
|
||||
fetch("/fetch/sec-metadata/resources/record-header.py?retrieve=true&file=" + key)
|
||||
.then(response => response.text())
|
||||
.then(text => assert_header_equals(text, expected))
|
||||
|
@ -54,7 +54,7 @@
|
|||
let e = document.createElement('embed');
|
||||
e.src = "https://{{hosts[alt][www]}}:{{ports[https][0]}}/fetch/sec-metadata/resources/record-header.py?file=" + key;
|
||||
e.onload = e => {
|
||||
let expected = {"dest":"embed", "site":"cross-site", "user":"?F", "mode":"no-cors"};
|
||||
let expected = {"dest":"embed", "site":"cross-site", "user":"", "mode":"no-cors"};
|
||||
fetch("/fetch/sec-metadata/resources/record-header.py?retrieve=true&file=" + key)
|
||||
.then(response => response.text())
|
||||
.then(text => assert_header_equals(text, expected))
|
||||
|
|
|
@ -11,7 +11,7 @@
|
|||
assert_header_equals(j, {
|
||||
"dest": "empty",
|
||||
"site": "same-origin",
|
||||
"user": "?F",
|
||||
"user": "",
|
||||
"mode": "cors",
|
||||
});
|
||||
});
|
||||
|
@ -24,7 +24,7 @@
|
|||
assert_header_equals(j, {
|
||||
"dest": "empty",
|
||||
"site": "same-site",
|
||||
"user": "?F",
|
||||
"user": "",
|
||||
"mode": "cors",
|
||||
});
|
||||
});
|
||||
|
@ -37,7 +37,7 @@
|
|||
assert_header_equals(j, {
|
||||
"dest": "empty",
|
||||
"site": "cross-site",
|
||||
"user": "?F",
|
||||
"user": "",
|
||||
"mode": "cors",
|
||||
});
|
||||
});
|
||||
|
@ -51,7 +51,7 @@
|
|||
assert_header_equals(j, {
|
||||
"dest": "empty",
|
||||
"site": "same-origin",
|
||||
"user": "?F",
|
||||
"user": "",
|
||||
"mode": "same-origin",
|
||||
});
|
||||
});
|
||||
|
@ -64,7 +64,7 @@
|
|||
assert_header_equals(j, {
|
||||
"dest": "empty",
|
||||
"site": "same-origin",
|
||||
"user": "?F",
|
||||
"user": "",
|
||||
"mode": "cors",
|
||||
});
|
||||
});
|
||||
|
@ -77,7 +77,7 @@
|
|||
assert_header_equals(j, {
|
||||
"dest": "empty",
|
||||
"site": "same-origin",
|
||||
"user": "?F",
|
||||
"user": "",
|
||||
"mode": "no-cors",
|
||||
});
|
||||
});
|
||||
|
|
|
@ -46,7 +46,7 @@
|
|||
promise_test(t => {
|
||||
return new Promise((resolve, reject) => {
|
||||
let key = "font-same-origin";
|
||||
let expected = {"dest":"font", "site":"same-origin", "user":"?F", "mode": "cors"};
|
||||
let expected = {"dest":"font", "site":"same-origin", "user":"", "mode": "cors"};
|
||||
fetch("/fetch/sec-metadata/resources/record-header.py?retrieve=true&file=" + key)
|
||||
.then(response => response.text())
|
||||
.then(text => assert_header_equals(text, expected))
|
||||
|
@ -58,7 +58,7 @@
|
|||
promise_test(t => {
|
||||
return new Promise((resolve, reject) => {
|
||||
let key = "font-same-site";
|
||||
let expected = {"dest":"font", "site":"same-site", "user":"?F", "mode": "cors"};
|
||||
let expected = {"dest":"font", "site":"same-site", "user":"", "mode": "cors"};
|
||||
fetch("/fetch/sec-metadata/resources/record-header.py?retrieve=true&file=" + key)
|
||||
.then(response => response.text())
|
||||
.then(text => assert_header_equals(text, expected))
|
||||
|
@ -70,7 +70,7 @@
|
|||
promise_test(t => {
|
||||
return new Promise((resolve, reject) => {
|
||||
let key = "font-cross-site";
|
||||
let expected = {"dest":"font", "site":"cross-site", "user":"?F", "mode": "cors"};
|
||||
let expected = {"dest":"font", "site":"cross-site", "user":"", "mode": "cors"};
|
||||
fetch("/fetch/sec-metadata/resources/record-header.py?retrieve=true&file=" + key)
|
||||
.then(response => response.text())
|
||||
.then(text => assert_header_equals(text, expected))
|
||||
|
|
|
@ -1,63 +1,85 @@
|
|||
<!DOCTYPE html>
|
||||
<script src=/resources/testharness.js></script>
|
||||
<script src=/resources/testharnessreport.js></script>
|
||||
<script src=/resources/testdriver.js></script>
|
||||
<script src=/resources/testdriver-vendor.js></script>
|
||||
<script src=/fetch/sec-metadata/resources/helper.js></script>
|
||||
<script src=/common/utils.js></script>
|
||||
<body>
|
||||
<script>
|
||||
async_test(t => {
|
||||
let i = document.createElement('iframe');
|
||||
i.src = "https://{{host}}:{{ports[https][0]}}/fetch/sec-metadata/resources/post-to-owner.py";
|
||||
window.addEventListener('message', t.step_func(e => {
|
||||
if (e.source != i.contentWindow)
|
||||
return;
|
||||
const USER = true;
|
||||
const FORCED = false;
|
||||
|
||||
assert_header_equals(e.data, {
|
||||
"dest": "nested-document",
|
||||
"site": "same-origin",
|
||||
"user": "?F",
|
||||
"mode": "nested-navigate"
|
||||
});
|
||||
t.done();
|
||||
}));
|
||||
function create_test(host, user_activated, expectations) {
|
||||
async_test(t => {
|
||||
let i = document.createElement('iframe');
|
||||
window.addEventListener('message', t.step_func(e => {
|
||||
if (e.source != i.contentWindow)
|
||||
return;
|
||||
|
||||
document.body.appendChild(i);
|
||||
}, "Same-origin iframe");
|
||||
assert_header_equals(e.data, expectations);
|
||||
t.done();
|
||||
}));
|
||||
|
||||
async_test(t => {
|
||||
let i = document.createElement('iframe');
|
||||
i.src = "https://{{hosts[][www]}}:{{ports[https][0]}}/fetch/sec-metadata/resources/post-to-owner.py";
|
||||
window.addEventListener('message', t.step_func(e => {
|
||||
if (e.source != i.contentWindow)
|
||||
return;
|
||||
let url = `https://${host}/fetch/sec-metadata/resources/post-to-owner.py`;
|
||||
if (user_activated == FORCED) {
|
||||
i.src = url;
|
||||
document.body.appendChild(i);
|
||||
} else if (user_activated == USER) {
|
||||
let uuid = token();
|
||||
i.name = uuid;
|
||||
let a = document.createElement('a');
|
||||
a.href = url;
|
||||
a.target = uuid;
|
||||
a.text = "This is a link!";
|
||||
|
||||
assert_header_equals(e.data, {
|
||||
"dest": "nested-document",
|
||||
"site": "same-site",
|
||||
"user": "?F",
|
||||
"mode": "nested-navigate"
|
||||
});
|
||||
t.done();
|
||||
}));
|
||||
document.body.appendChild(i);
|
||||
document.body.appendChild(a);
|
||||
|
||||
document.body.appendChild(i);
|
||||
}, "Same-site iframe");
|
||||
test_driver.click(a);
|
||||
}
|
||||
}, `{{host}} -> ${host} iframe: ${user_activated ? "user-activated" : "forced"}`);
|
||||
}
|
||||
|
||||
async_test(t => {
|
||||
let i = document.createElement('iframe');
|
||||
i.src = "https://{{hosts[alt][www]}}:{{ports[https][0]}}/fetch/sec-metadata/resources/post-to-owner.py";
|
||||
window.addEventListener('message', t.step_func(e => {
|
||||
if (e.source != i.contentWindow)
|
||||
return;
|
||||
create_test("{{host}}:{{ports[https][0]}}", FORCED, {
|
||||
"dest": "nested-document",
|
||||
"site": "same-origin",
|
||||
"user": "",
|
||||
"mode": "nested-navigate"
|
||||
});
|
||||
|
||||
assert_header_equals(e.data, {
|
||||
"dest": "nested-document",
|
||||
"site": "cross-site",
|
||||
"user": "?F",
|
||||
"mode": "nested-navigate"
|
||||
});
|
||||
t.done();
|
||||
}));
|
||||
create_test("{{hosts[][www]}}:{{ports[https][0]}}", FORCED, {
|
||||
"dest": "nested-document",
|
||||
"site": "same-site",
|
||||
"user": "",
|
||||
"mode": "nested-navigate"
|
||||
});
|
||||
|
||||
document.body.appendChild(i);
|
||||
}, "Cross-site iframe");
|
||||
create_test("{{hosts[alt][www]}}:{{ports[https][0]}}", FORCED, {
|
||||
"dest": "nested-document",
|
||||
"site": "cross-site",
|
||||
"user": "",
|
||||
"mode": "nested-navigate"
|
||||
});
|
||||
|
||||
create_test("{{host}}:{{ports[https][0]}}", USER, {
|
||||
"dest": "nested-document",
|
||||
"site": "same-origin",
|
||||
"user": "?T",
|
||||
"mode": "nested-navigate"
|
||||
});
|
||||
|
||||
create_test("{{hosts[][www]}}:{{ports[https][0]}}", USER, {
|
||||
"dest": "nested-document",
|
||||
"site": "same-site",
|
||||
"user": "?T",
|
||||
"mode": "nested-navigate"
|
||||
});
|
||||
|
||||
create_test("{{hosts[alt][www]}}:{{ports[https][0]}}", USER, {
|
||||
"dest": "nested-document",
|
||||
"site": "cross-site",
|
||||
"user": "?T",
|
||||
"mode": "nested-navigate"
|
||||
});
|
||||
</script>
|
||||
|
|
|
@ -23,7 +23,9 @@
|
|||
assert_header_equals(got, {
|
||||
"dest": "image",
|
||||
"site": "same-origin",
|
||||
"user": "?F",
|
||||
// Note that we're using `undefined` here, as opposed to "" elsewhere because of the way
|
||||
// that `image.py` encodes data.
|
||||
"user": undefined,
|
||||
"mode": "cors", // Because `loadImageInWindow` tacks on `crossorigin`
|
||||
});
|
||||
}),
|
||||
|
@ -45,7 +47,9 @@
|
|||
assert_header_equals(got, {
|
||||
"dest": "image",
|
||||
"site": "same-site",
|
||||
"user": "?F",
|
||||
// Note that we're using `undefined` here, as opposed to "" elsewhere because of the way
|
||||
// that `image.py` encodes data.
|
||||
"user": undefined,
|
||||
"mode": "cors", // Because `loadImageInWindow` tacks on `crossorigin`
|
||||
});
|
||||
}),
|
||||
|
@ -67,7 +71,9 @@
|
|||
assert_header_equals(got, {
|
||||
"dest": "image",
|
||||
"site": "cross-site",
|
||||
"user": "?F",
|
||||
// Note that we're using `undefined` here, as opposed to "" elsewhere because of the way
|
||||
// that `image.py` encodes data.
|
||||
"user": undefined,
|
||||
"mode": "cors", // Because `loadImageInWindow` tacks on `crossorigin`
|
||||
});
|
||||
}),
|
||||
|
|
|
@ -16,7 +16,7 @@
|
|||
let e = document.createElement('object');
|
||||
e.data = "https://{{host}}:{{ports[https][0]}}/fetch/sec-metadata/resources/record-header.py?file=" + key;
|
||||
e.onload = e => {
|
||||
let expected = {"dest":"object", "site":"same-origin", "user":"?F", "mode":"no-cors"};
|
||||
let expected = {"dest":"object", "site":"same-origin", "user":"", "mode":"no-cors"};
|
||||
fetch("/fetch/sec-metadata/resources/record-header.py?retrieve=true&file=" + key)
|
||||
.then(response => response.text())
|
||||
.then(text => assert_header_equals(text, expected))
|
||||
|
@ -35,7 +35,7 @@
|
|||
let e = document.createElement('object');
|
||||
e.data = "https://{{hosts[][www]}}:{{ports[https][0]}}/fetch/sec-metadata/resources/record-header.py?file=" + key;
|
||||
e.onload = e => {
|
||||
let expected = {"dest":"object", "site":"same-site", "user":"?F", "mode":"no-cors"};
|
||||
let expected = {"dest":"object", "site":"same-site", "user":"", "mode":"no-cors"};
|
||||
fetch("/fetch/sec-metadata/resources/record-header.py?retrieve=true&file=" + key)
|
||||
.then(response => response.text())
|
||||
.then(text => assert_header_equals(text, expected))
|
||||
|
@ -54,7 +54,7 @@
|
|||
let e = document.createElement('object');
|
||||
e.data = "https://{{hosts[alt][www]}}:{{ports[https][0]}}/fetch/sec-metadata/resources/record-header.py?file=" + key;
|
||||
e.onload = e => {
|
||||
let expected = {"dest":"object", "site":"cross-site", "user":"?F", "mode":"no-cors"};
|
||||
let expected = {"dest":"object", "site":"cross-site", "user":"", "mode":"no-cors"};
|
||||
fetch("/fetch/sec-metadata/resources/record-header.py?retrieve=true&file=" + key)
|
||||
.then(response => response.text())
|
||||
.then(text => assert_header_equals(text, expected))
|
||||
|
|
|
@ -15,7 +15,7 @@
|
|||
|
||||
let e = document.createElement('img');
|
||||
e.src = "https://{{hosts[alt][www]}}:{{ports[https][0]}}/xhr/resources/redirect.py?location=https://{{host}}:{{ports[https][0]}}/fetch/sec-metadata/resources/record-header.py?file=" + key;
|
||||
let expected = {"dest":"image", "site":"cross-site", "user":"?F", "mode": "no-cors"};
|
||||
let expected = {"dest":"image", "site":"cross-site", "user":"", "mode": "no-cors"};
|
||||
e.onload = e => {
|
||||
fetch("/fetch/sec-metadata/resources/record-header.py?retrieve=true&file=" + key)
|
||||
.then(response => response.text())
|
||||
|
@ -41,7 +41,7 @@
|
|||
|
||||
let e = document.createElement('img');
|
||||
e.src = "https://{{hosts[alt][www]}}:{{ports[https][0]}}/xhr/resources/redirect.py?location=https://{{hosts[][www]}}:{{ports[https][0]}}/fetch/sec-metadata/resources/record-header.py?file=" + key;
|
||||
let expected = {"dest":"image", "site":"cross-site", "user":"?F", "mode": "no-cors"};
|
||||
let expected = {"dest":"image", "site":"cross-site", "user":"", "mode": "no-cors"};
|
||||
e.onload = e => {
|
||||
fetch("/fetch/sec-metadata/resources/record-header.py?retrieve=true&file=" + key)
|
||||
.then(response => response.text())
|
||||
|
@ -67,7 +67,7 @@
|
|||
|
||||
let e = document.createElement('img');
|
||||
e.src = "https://{{hosts[alt][www]}}:{{ports[https][0]}}/xhr/resources/redirect.py?location=https://{{hosts[alt][www]}}:{{ports[https][0]}}/fetch/sec-metadata/resources/record-header.py?file=" + key;
|
||||
let expected = {"dest":"image", "site":"cross-site", "user":"?F", "mode": "no-cors"};
|
||||
let expected = {"dest":"image", "site":"cross-site", "user":"", "mode": "no-cors"};
|
||||
e.onload = e => {
|
||||
fetch("/fetch/sec-metadata/resources/record-header.py?retrieve=true&file=" + key)
|
||||
.then(response => response.text())
|
||||
|
|
|
@ -17,7 +17,7 @@
|
|||
e.src = "https://{{host}}:{{ports[https][0]}}/xhr/resources/redirect.py?location=" +// same-origin
|
||||
"https://{{hosts[alt][www]}}:{{ports[https][0]}}/xhr/resources/redirect.py?location=" +// cross-site
|
||||
"https://{{host}}:{{ports[https][0]}}/fetch/sec-metadata/resources/record-header.py?file=" + key;// same-origin
|
||||
let expected = {"dest":"image", "site":"cross-site", "user":"?F", "mode": "no-cors"};
|
||||
let expected = {"dest":"image", "site":"cross-site", "user":"", "mode": "no-cors"};
|
||||
|
||||
e.onload = e => {
|
||||
fetch("/fetch/sec-metadata/resources/record-header.py?retrieve=true&file=" + key)
|
||||
|
|
|
@ -17,7 +17,7 @@
|
|||
e.src = "https://{{host}}:{{ports[https][0]}}/xhr/resources/redirect.py?location=" +// same-origin
|
||||
"https://{{hosts[][www]}}:{{ports[https][0]}}/xhr/resources/redirect.py?location=" +// same-site
|
||||
"https://{{host}}:{{ports[https][0]}}/fetch/sec-metadata/resources/record-header.py?file=" + key;// same-origin
|
||||
let expected = {"dest":"image", "site":"same-site", "user":"?F", "mode": "no-cors"};
|
||||
let expected = {"dest":"image", "site":"same-site", "user":"", "mode": "no-cors"};
|
||||
|
||||
e.onload = e => {
|
||||
fetch("/fetch/sec-metadata/resources/record-header.py?retrieve=true&file=" + key)
|
||||
|
|
|
@ -15,7 +15,7 @@
|
|||
|
||||
let e = document.createElement('img');
|
||||
e.src = "/xhr/resources/redirect.py?location=https://{{host}}:{{ports[https][0]}}/fetch/sec-metadata/resources/record-header.py?file=" + key;
|
||||
let expected = {"dest":"image", "site":"same-origin", "user":"?F", "mode": "no-cors"};
|
||||
let expected = {"dest":"image", "site":"same-origin", "user":"", "mode": "no-cors"};
|
||||
|
||||
e.onload = e => {
|
||||
fetch("/fetch/sec-metadata/resources/record-header.py?retrieve=true&file=" + key)
|
||||
|
@ -42,7 +42,7 @@ promise_test(t => {
|
|||
|
||||
let e = document.createElement('img');
|
||||
e.src = "/xhr/resources/redirect.py?location=https://{{hosts[][www]}}:{{ports[https][0]}}/fetch/sec-metadata/resources/record-header.py?file=" + key;
|
||||
let expected = {"dest":"image", "site":"same-site", "user":"?F", "mode": "no-cors"};
|
||||
let expected = {"dest":"image", "site":"same-site", "user":"", "mode": "no-cors"};
|
||||
|
||||
e.onload = e => {
|
||||
fetch("/fetch/sec-metadata/resources/record-header.py?retrieve=true&file=" + key)
|
||||
|
@ -69,7 +69,7 @@ promise_test(t => {
|
|||
|
||||
let e = document.createElement('img');
|
||||
e.src = "/xhr/resources/redirect.py?location=https://{{hosts[alt][www]}}:{{ports[https][0]}}/fetch/sec-metadata/resources/record-header.py?file=" + key;
|
||||
let expected = {"dest":"image", "site":"cross-site", "user":"?F", "mode": "no-cors"};
|
||||
let expected = {"dest":"image", "site":"cross-site", "user":"", "mode": "no-cors"};
|
||||
|
||||
e.onload = e => {
|
||||
fetch("/fetch/sec-metadata/resources/record-header.py?retrieve=true&file=" + key)
|
||||
|
|
|
@ -15,7 +15,7 @@
|
|||
|
||||
let e = document.createElement('img');
|
||||
e.src = "https://{{hosts[][www]}}:{{ports[https][0]}}/xhr/resources/redirect.py?location=https://{{host}}:{{ports[https][0]}}/fetch/sec-metadata/resources/record-header.py?file=" + key;
|
||||
let expected = {"dest":"image", "site":"same-site", "user":"?F", "mode": "no-cors"};
|
||||
let expected = {"dest":"image", "site":"same-site", "user":"", "mode": "no-cors"};
|
||||
|
||||
e.onload = e => {
|
||||
fetch("/fetch/sec-metadata/resources/record-header.py?retrieve=true&file=" + key)
|
||||
|
@ -42,7 +42,7 @@ promise_test(t => {
|
|||
|
||||
let e = document.createElement('img');
|
||||
e.src = "https://{{hosts[][www]}}:{{ports[https][0]}}/xhr/resources/redirect.py?location=https://{{host}}:{{ports[https][0]}}/fetch/sec-metadata/resources/record-header.py?file=" + key;
|
||||
let expected = {"dest":"image", "site":"same-site", "user":"?F", "mode": "no-cors"};
|
||||
let expected = {"dest":"image", "site":"same-site", "user":"", "mode": "no-cors"};
|
||||
|
||||
e.onload = e => {
|
||||
fetch("/fetch/sec-metadata/resources/record-header.py?retrieve=true&file=" + key)
|
||||
|
@ -69,7 +69,7 @@ promise_test(t => {
|
|||
|
||||
let e = document.createElement('img');
|
||||
e.src = "https://{{hosts[][www]}}:{{ports[https][0]}}/xhr/resources/redirect.py?location=https://{{hosts[alt][www]}}:{{ports[https][0]}}/fetch/sec-metadata/resources/record-header.py?file=" + key;
|
||||
let expected = {"dest":"image", "site":"cross-site", "user":"?F", "mode": "no-cors"};
|
||||
let expected = {"dest":"image", "site":"cross-site", "user":"", "mode": "no-cors"};
|
||||
|
||||
e.onload = e => {
|
||||
fetch("/fetch/sec-metadata/resources/record-header.py?retrieve=true&file=" + key)
|
||||
|
|
|
@ -22,9 +22,9 @@
|
|||
document.addEventListener("securitypolicyviolation", (e) => {
|
||||
counter++;
|
||||
if (counter == 3) {
|
||||
generate_test({"dest":"report", "site":"same-origin", "user":"?F", "mode": "no-cors"}, "same-origin");
|
||||
generate_test({"dest":"report", "site":"same-site", "user":"?F", "mode": "no-cors"}, "same-site");
|
||||
generate_test({"dest":"report", "site":"cross-site", "user":"?F", "mode": "no-cors"}, "cross-site");
|
||||
generate_test({"dest":"report", "site":"same-origin", "user":"", "mode": "no-cors"}, "same-origin");
|
||||
generate_test({"dest":"report", "site":"same-site", "user":"", "mode": "no-cors"}, "same-site");
|
||||
generate_test({"dest":"report", "site":"cross-site", "user":"", "mode": "no-cors"}, "cross-site");
|
||||
|
||||
done();
|
||||
}
|
||||
|
|
|
@ -12,7 +12,7 @@
|
|||
assert_header_equals(header, {
|
||||
"dest": "script",
|
||||
"site": "same-origin",
|
||||
"user": "?F",
|
||||
"user": "",
|
||||
"mode": "no-cors",
|
||||
});
|
||||
}, "Same-origin script");
|
||||
|
@ -27,7 +27,7 @@
|
|||
assert_header_equals(header, {
|
||||
"dest": "script",
|
||||
"site": "same-site",
|
||||
"user": "?F",
|
||||
"user": "",
|
||||
"mode": "no-cors",
|
||||
});
|
||||
}, "Same-site script");
|
||||
|
@ -42,7 +42,7 @@
|
|||
assert_header_equals(header, {
|
||||
"dest": "script",
|
||||
"site": "cross-site",
|
||||
"user": "?F",
|
||||
"user": "",
|
||||
"mode": "no-cors",
|
||||
});
|
||||
}, "Cross-site script");
|
||||
|
@ -57,7 +57,7 @@
|
|||
assert_header_equals(header, {
|
||||
"dest": "script",
|
||||
"site": "same-origin",
|
||||
"user": "?F",
|
||||
"user": "",
|
||||
"mode": "cors",
|
||||
});
|
||||
}, "Same-origin CORS script");
|
||||
|
|
|
@ -38,7 +38,7 @@
|
|||
function test_same_origin(){
|
||||
promise_test(t => {
|
||||
return new Promise((resolve, reject) => {
|
||||
let expected = {"dest":"serviceworker", "site":"same-origin", "user":"?F", "mode": "same-origin"};
|
||||
let expected = {"dest":"serviceworker", "site":"same-origin", "user":"", "mode": "same-origin"};
|
||||
fetch("/fetch/sec-metadata/resources/record-header.py?retrieve=true&file=" + key)
|
||||
.then(response => response.text())
|
||||
.then(text => assert_header_equals(text, expected))
|
||||
|
|
|
@ -28,7 +28,7 @@
|
|||
function test_same_origin(){
|
||||
promise_test(t => {
|
||||
return new Promise((resolve, reject) => {
|
||||
let expected = {"dest":"sharedworker", "site":"same-origin", "user":"?F", "mode": "same-origin"};
|
||||
let expected = {"dest":"sharedworker", "site":"same-origin", "user":"", "mode": "same-origin"};
|
||||
|
||||
fetch("/fetch/sec-metadata/resources/record-header.py?retrieve=true&file=" + key)
|
||||
.then(response => response.text())
|
||||
|
|
|
@ -17,7 +17,7 @@
|
|||
e.rel = "stylesheet";
|
||||
e.href = "https://{{host}}:{{ports[https][0]}}/fetch/sec-metadata/resources/record-header.py?file=" + key;
|
||||
e.onload = e => {
|
||||
let expected = {"dest":"style", "site":"same-origin", "user":"?F", "mode": "no-cors"};
|
||||
let expected = {"dest":"style", "site":"same-origin", "user":"", "mode": "no-cors"};
|
||||
fetch("/fetch/sec-metadata/resources/record-header.py?retrieve=true&file=" + key)
|
||||
.then(response => response.text())
|
||||
.then(text => assert_header_equals(text, expected))
|
||||
|
@ -37,7 +37,7 @@
|
|||
e.rel = "stylesheet";
|
||||
e.href = "https://{{hosts[][www]}}:{{ports[https][0]}}/fetch/sec-metadata/resources/record-header.py?file=" + key;
|
||||
e.onload = e => {
|
||||
let expected = {"dest":"style", "site":"same-site", "user":"?F", "mode": "no-cors"};
|
||||
let expected = {"dest":"style", "site":"same-site", "user":"", "mode": "no-cors"};
|
||||
fetch("/fetch/sec-metadata/resources/record-header.py?retrieve=true&file=" + key)
|
||||
.then(response => response.text())
|
||||
.then(text => assert_header_equals(text, expected))
|
||||
|
@ -57,7 +57,7 @@
|
|||
e.rel = "stylesheet";
|
||||
e.href = "https://{{hosts[alt][www]}}:{{ports[https][0]}}/fetch/sec-metadata/resources/record-header.py?file=" + key;
|
||||
e.onload = e => {
|
||||
let expected = {"dest":"style", "site":"cross-site", "user":"?F", "mode": "no-cors"};
|
||||
let expected = {"dest":"style", "site":"cross-site", "user":"", "mode": "no-cors"};
|
||||
fetch("/fetch/sec-metadata/resources/record-header.py?retrieve=true&file=" + key)
|
||||
.then(response => response.text())
|
||||
.then(text => assert_header_equals(text, expected))
|
||||
|
@ -78,7 +78,7 @@
|
|||
e.href = "https://{{host}}:{{ports[https][0]}}/fetch/sec-metadata/resources/record-header.py?file=" + key;
|
||||
e.crossOrigin = "anonymous";
|
||||
e.onload = e => {
|
||||
let expected = {"dest":"style", "site":"same-origin", "user":"?F", "mode": "cors"};
|
||||
let expected = {"dest":"style", "site":"same-origin", "user":"", "mode": "cors"};
|
||||
fetch("/fetch/sec-metadata/resources/record-header.py?retrieve=true&file=" + key)
|
||||
.then(response => response.text())
|
||||
.then(text => assert_header_equals(text, expected))
|
||||
|
|
|
@ -36,7 +36,7 @@
|
|||
expected = {
|
||||
"dest": "track",
|
||||
"site": "same-origin",
|
||||
"user": "?F",
|
||||
"user": "",
|
||||
"mode": "cors" // Because the `video` element has `crossorigin`
|
||||
};
|
||||
fetch("/fetch/sec-metadata/resources/record-header.py?retrieve=true&file=" + key)
|
||||
|
@ -59,7 +59,7 @@
|
|||
expected = {
|
||||
"dest": "track",
|
||||
"site": "same-site",
|
||||
"user": "?F",
|
||||
"user": "",
|
||||
"mode": "cors" // Because the `video` element has `crossorigin`
|
||||
};
|
||||
fetch("/fetch/sec-metadata/resources/record-header.py?retrieve=true&file=" + key)
|
||||
|
@ -84,7 +84,7 @@
|
|||
expected = {
|
||||
"dest": "track",
|
||||
"site": "cross-site",
|
||||
"user": "?F",
|
||||
"user": "",
|
||||
"mode": "cors" // Because the `video` element has `crossorigin`
|
||||
};
|
||||
fetch("/fetch/sec-metadata/resources/record-header.py?retrieve=true&file=" + key)
|
||||
|
@ -112,7 +112,7 @@
|
|||
expected = {
|
||||
"dest":"track",
|
||||
"site":"same-origin",
|
||||
"user":"?F",
|
||||
"user":"",
|
||||
"mode": "same-origin"
|
||||
};
|
||||
fetch("/fetch/sec-metadata/resources/record-header.py?retrieve=true&file=" + key)
|
||||
|
|
|
@ -11,7 +11,7 @@
|
|||
assert_header_equals(j, {
|
||||
"dest": "empty",
|
||||
"site": "cross-site",
|
||||
"user": "?F",
|
||||
"user": "",
|
||||
"mode": "cors",
|
||||
});
|
||||
});
|
||||
|
@ -24,7 +24,7 @@
|
|||
assert_header_equals(j, {
|
||||
"dest": "empty",
|
||||
"site": "cross-site",
|
||||
"user": "?F",
|
||||
"user": "",
|
||||
"mode": "cors",
|
||||
});
|
||||
});
|
||||
|
@ -37,7 +37,7 @@
|
|||
assert_header_equals(j, {
|
||||
"dest": "empty",
|
||||
"site": "cross-site",
|
||||
"user": "?F",
|
||||
"user": "",
|
||||
"mode": "cors",
|
||||
});
|
||||
});
|
||||
|
|
|
@ -17,7 +17,7 @@
|
|||
assert_header_equals(e.data, {
|
||||
"dest": "document",
|
||||
"site": "same-origin",
|
||||
"user": "?F",
|
||||
"user": "",
|
||||
"mode": "navigate",
|
||||
});
|
||||
t.done();
|
||||
|
@ -34,7 +34,7 @@
|
|||
assert_header_equals(e.data, {
|
||||
"dest": "document",
|
||||
"site": "same-site",
|
||||
"user": "?F",
|
||||
"user": "",
|
||||
"mode": "navigate",
|
||||
});
|
||||
t.done();
|
||||
|
@ -51,7 +51,7 @@
|
|||
assert_header_equals(e.data, {
|
||||
"dest": "document",
|
||||
"site": "cross-site",
|
||||
"user": "?F",
|
||||
"user": "",
|
||||
"mode": "navigate",
|
||||
});
|
||||
t.done();
|
||||
|
@ -70,7 +70,7 @@
|
|||
assert_header_equals(e.data, {
|
||||
"dest": "document",
|
||||
"site": "same-origin",
|
||||
"user": "?F",
|
||||
"user": "",
|
||||
"mode": "navigate",
|
||||
});
|
||||
|
||||
|
@ -94,7 +94,7 @@
|
|||
assert_header_equals(e.data, {
|
||||
"dest": "document",
|
||||
"site": "same-site",
|
||||
"user": "?F",
|
||||
"user": "",
|
||||
"mode": "navigate",
|
||||
});
|
||||
|
||||
|
@ -118,7 +118,7 @@
|
|||
assert_header_equals(e.data, {
|
||||
"dest": "document",
|
||||
"site": "cross-site",
|
||||
"user": "?F",
|
||||
"user": "",
|
||||
"mode": "navigate",
|
||||
});
|
||||
|
||||
|
|
|
@ -13,7 +13,7 @@
|
|||
let key = "worker-same-origin" + nonce;
|
||||
let w = new Worker("/fetch/sec-metadata/resources/record-header.py?file=" + key);
|
||||
w.onmessage = e => {
|
||||
let expected = {"dest":"worker", "site":"same-origin", "user":"?F", "mode": "same-origin"};
|
||||
let expected = {"dest":"worker", "site":"same-origin", "user":"", "mode": "same-origin"};
|
||||
fetch("/fetch/sec-metadata/resources/record-header.py?retrieve=true&file=" + key)
|
||||
.then(response => response.text())
|
||||
.then(text => assert_header_equals(text, expected))
|
||||
|
|
|
@ -12,21 +12,21 @@
|
|||
return;
|
||||
|
||||
promise_test(t => {
|
||||
let expected = {"dest":"xslt", "site":"same-origin", "user":"?F", "mode": "same-origin"};
|
||||
let expected = {"dest":"xslt", "site":"same-origin", "user":"", "mode": "same-origin"};
|
||||
return fetch("/fetch/sec-metadata/resources/record-header.py?retrieve=true&file=xslt-same-origin")
|
||||
.then(response => response.text())
|
||||
.then(text => assert_header_equals(text, expected));
|
||||
}, "Same-Origin xslt");
|
||||
|
||||
promise_test(t => {
|
||||
let expected = {"dest":"xslt", "site":"same-site", "user":"?F", "mode": "no-cors"};
|
||||
let expected = {"dest":"xslt", "site":"same-site", "user":"", "mode": "no-cors"};
|
||||
return fetch("/fetch/sec-metadata/resources/record-header.py?retrieve=true&file=xslt-same-site")
|
||||
.then(response => response.text())
|
||||
.then(text => assert_header_equals(text, expected));
|
||||
}, "Same-site xslt");
|
||||
|
||||
promise_test(t => {
|
||||
let expected = {"dest":"xslt", "site":"cross-site", "user":"?F", "mode": "no-cors"};
|
||||
let expected = {"dest":"xslt", "site":"cross-site", "user":"", "mode": "no-cors"};
|
||||
return fetch("/fetch/sec-metadata/resources/record-header.py?retrieve=true&file=xslt-cross-site")
|
||||
.then(response => response.text())
|
||||
.then(text => assert_header_equals(text, expected));
|
||||
|
|
|
@ -23,7 +23,7 @@ callback PerformanceObserverCallback = void (PerformanceObserverEntryList entrie
|
|||
PerformanceObserver observer);
|
||||
[Constructor(PerformanceObserverCallback callback), Exposed=(Window,Worker)]
|
||||
interface PerformanceObserver {
|
||||
void observe(PerformanceObserverInit options);
|
||||
void observe(optional PerformanceObserverInit options);
|
||||
void disconnect();
|
||||
PerformanceEntryList takeRecords();
|
||||
static readonly attribute FrozenArray<DOMString> supportedEntryTypes;
|
||||
|
|
|
@ -4,3 +4,4 @@ support-files =
|
|||
data/**
|
||||
|
||||
[test_verify_jar.js]
|
||||
skip-if = true # Bug 1549147 - Disable temporily until non-expired cert is available to sign test XPI.
|
||||
|
|
|
@ -463,6 +463,7 @@ function getGfxAdapter(aSuffix = "") {
|
|||
subsysID: getGfxField("adapterSubsysID" + aSuffix, null),
|
||||
RAM: memoryMB,
|
||||
driver: getGfxField("adapterDriver" + aSuffix, null),
|
||||
driverVendor: getGfxField("adapterDriverVendor" + aSuffix, null),
|
||||
driverVersion: getGfxField("adapterDriverVersion" + aSuffix, null),
|
||||
driverDate: getGfxField("adapterDriverDate" + aSuffix, null),
|
||||
};
|
||||
|
@ -1772,7 +1773,7 @@ EnvironmentCache.prototype = {
|
|||
features: {},
|
||||
};
|
||||
|
||||
if (!["android", "linux"].includes(AppConstants.platform)) {
|
||||
if (AppConstants.platform !== "android") {
|
||||
let gfxInfo = Cc["@mozilla.org/gfx/info;1"].getService(Ci.nsIGfxInfo);
|
||||
try {
|
||||
gfxData.monitors = gfxInfo.getMonitors();
|
||||
|
|
|
@ -164,6 +164,7 @@ Structure:
|
|||
subsysID: <string>, // null on failure
|
||||
RAM: <number>, // in MB, null on failure
|
||||
driver: <string>, // null on failure
|
||||
driverVendor: <string>, // null on failure
|
||||
driverVersion: <string>, // null on failure
|
||||
driverDate: <string>, // null on failure
|
||||
GPUActive: <bool>, // currently always true for the first adapter
|
||||
|
@ -171,7 +172,7 @@ Structure:
|
|||
...
|
||||
],
|
||||
// Note: currently only added on Desktop. On Linux, only a single
|
||||
// monitor is returned representing the entire virtual screen.
|
||||
// monitor is returned for the primary screen.
|
||||
monitors: [
|
||||
{
|
||||
screenWidth: <number>, // screen width in pixels
|
||||
|
|
|
@ -506,6 +506,7 @@ function checkGfxAdapter(data) {
|
|||
subsysID: "string",
|
||||
RAM: "number",
|
||||
driver: "string",
|
||||
driverVendor: "string",
|
||||
driverVersion: "string",
|
||||
driverDate: "string",
|
||||
GPUActive: "boolean",
|
||||
|
@ -631,7 +632,7 @@ function checkSystemSection(data) {
|
|||
Assert.ok(gfxData.adapters[0].GPUActive, "The first GFX adapter must be active.");
|
||||
|
||||
Assert.ok(Array.isArray(gfxData.monitors));
|
||||
if (gIsWindows || gIsMac) {
|
||||
if (gIsWindows || gIsMac || gIsLinux) {
|
||||
Assert.ok(gfxData.monitors.length >= 1, "There is at least one monitor.");
|
||||
Assert.equal(typeof gfxData.monitors[0].screenWidth, "number");
|
||||
Assert.equal(typeof gfxData.monitors[0].screenHeight, "number");
|
||||
|
|
|
@ -507,6 +507,7 @@ var snapshotFormatters = {
|
|||
["adapterDescription", "gpu-description"],
|
||||
["adapterVendorID", "gpu-vendor-id"],
|
||||
["adapterDeviceID", "gpu-device-id"],
|
||||
["driverVendor", "gpu-driver-vendor"],
|
||||
["driverVersion", "gpu-driver-version"],
|
||||
["driverDate", "gpu-driver-date"],
|
||||
["adapterDrivers", "gpu-drivers"],
|
||||
|
|
|
@ -49,6 +49,11 @@ AdapterDeviceID:
|
|||
Graphics adapter name.
|
||||
type: string
|
||||
|
||||
AdapterDriverVendor:
|
||||
description: >
|
||||
Graphics adapter driver vendor.
|
||||
type: string
|
||||
|
||||
AdapterDriverVersion:
|
||||
description: >
|
||||
Graphics adapter driver version.
|
||||
|
|
|
@ -206,6 +206,7 @@ gpu-device-id = Device ID
|
|||
gpu-subsys-id = Subsys ID
|
||||
gpu-drivers = Drivers
|
||||
gpu-ram = RAM
|
||||
gpu-driver-vendor = Driver Vendor
|
||||
gpu-driver-version = Driver Version
|
||||
gpu-driver-date = Driver Date
|
||||
gpu-active = Active
|
||||
|
|
|
@ -443,6 +443,7 @@ var dataProviders = {
|
|||
adapterSubsysID: null,
|
||||
adapterRAM: null,
|
||||
adapterDriver: "adapterDrivers",
|
||||
adapterDriverVendor: "driverVendor",
|
||||
adapterDriverVersion: "driverVersion",
|
||||
adapterDriverDate: "driverDate",
|
||||
|
||||
|
@ -452,6 +453,7 @@ var dataProviders = {
|
|||
adapterSubsysID2: null,
|
||||
adapterRAM2: null,
|
||||
adapterDriver2: "adapterDrivers2",
|
||||
adapterDriverVendor2: "driverVendor2",
|
||||
adapterDriverVersion2: "driverVersion2",
|
||||
adapterDriverDate2: "driverDate2",
|
||||
isGPU2Active: null,
|
||||
|
|
|
@ -326,6 +326,9 @@ const SNAPSHOT_SCHEMA = {
|
|||
adapterDrivers: {
|
||||
type: "string",
|
||||
},
|
||||
driverVendor: {
|
||||
type: "string",
|
||||
},
|
||||
driverVersion: {
|
||||
type: "string",
|
||||
},
|
||||
|
@ -350,6 +353,9 @@ const SNAPSHOT_SCHEMA = {
|
|||
adapterDrivers2: {
|
||||
type: "string",
|
||||
},
|
||||
driverVendor2: {
|
||||
type: "string",
|
||||
},
|
||||
driverVersion2: {
|
||||
type: "string",
|
||||
},
|
||||
|
|
|
@ -12,7 +12,7 @@ Classes = [
|
|||
'constructor': 'BlocklistService',
|
||||
'processes': ProcessSelector.MAIN_PROCESS_ONLY,
|
||||
'categories': ({'profile-after-change': 'nsBlocklistService'}
|
||||
if buildconfig.substs['MOZ_BUILD_APP'] == 'browser'
|
||||
if buildconfig.substs['MOZ_BUILD_APP'] != 'browser'
|
||||
else {}),
|
||||
},
|
||||
{
|
||||
|
|
|
@ -2410,9 +2410,7 @@ this.XPIDatabaseReconcile = {
|
|||
|
||||
if (!aNewAddon) {
|
||||
// Load the manifest from the add-on.
|
||||
let file = new nsIFile(aAddonState.path);
|
||||
aNewAddon = XPIInstall.syncLoadManifestFromFile(file, aLocation);
|
||||
aNewAddon.rootURI = XPIInternal.getURIForResourceInFile(file, "").spec;
|
||||
aNewAddon = XPIInstall.syncLoadManifest(aAddonState, aLocation);
|
||||
}
|
||||
// The add-on in the manifest should match the add-on ID.
|
||||
if (aNewAddon.id != aId) {
|
||||
|
@ -2506,9 +2504,7 @@ this.XPIDatabaseReconcile = {
|
|||
try {
|
||||
// If there isn't an updated install manifest for this add-on then load it.
|
||||
if (!aNewAddon) {
|
||||
let file = new nsIFile(aAddonState.path);
|
||||
aNewAddon = XPIInstall.syncLoadManifestFromFile(file, aLocation, aOldAddon);
|
||||
aNewAddon.rootURI = XPIInternal.getURIForResourceInFile(file, "").spec;
|
||||
aNewAddon = XPIInstall.syncLoadManifest(aAddonState, aLocation, aOldAddon);
|
||||
} else {
|
||||
aNewAddon.rootURI = aOldAddon.rootURI;
|
||||
}
|
||||
|
@ -2586,9 +2582,7 @@ this.XPIDatabaseReconcile = {
|
|||
let manifest = null;
|
||||
if (checkSigning || aReloadMetadata) {
|
||||
try {
|
||||
let file = new nsIFile(aAddonState.path);
|
||||
manifest = XPIInstall.syncLoadManifestFromFile(file, aLocation);
|
||||
manifest.rootURI = aOldAddon.rootURI;
|
||||
manifest = XPIInstall.syncLoadManifest(aAddonState, aLocation);
|
||||
} catch (err) {
|
||||
// If we can no longer read the manifest, it is no longer compatible.
|
||||
aOldAddon.brokenManifest = true;
|
||||
|
@ -2839,6 +2833,9 @@ this.XPIDatabaseReconcile = {
|
|||
|
||||
for (let [id, addon] of previousVisible) {
|
||||
if (addon.location) {
|
||||
if (addon.location.name == KEY_APP_BUILTINS) {
|
||||
continue;
|
||||
}
|
||||
if (addonExists(addon)) {
|
||||
XPIInternal.BootstrapScope.get(addon).uninstall();
|
||||
}
|
||||
|
@ -2909,8 +2906,9 @@ this.XPIDatabaseReconcile = {
|
|||
AddonManagerPrivate.addStartupChange(AddonManager.STARTUP_CHANGE_CHANGED, id);
|
||||
|
||||
if (previousAddon.location &&
|
||||
previousAddon._sourceBundle.exists() &&
|
||||
!previousAddon._sourceBundle.equals(currentAddon._sourceBundle)) {
|
||||
(!previousAddon._sourceBundle ||
|
||||
(previousAddon._sourceBundle.exists() &&
|
||||
!previousAddon._sourceBundle.equals(currentAddon._sourceBundle)))) {
|
||||
promise = XPIInternal.BootstrapScope.get(previousAddon).update(
|
||||
currentAddon);
|
||||
} else if (this.isSystemAddonLocation(currentAddon.location) &&
|
||||
|
|
|
@ -339,6 +339,36 @@ XPIPackage = class XPIPackage extends Package {
|
|||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Return an object that implements enough of the Package interface
|
||||
* to allow loadManifest() to work for a built-in addon (ie, one loaded
|
||||
* from a resource: url)
|
||||
*
|
||||
* @param {nsIURL} baseURL The URL for the root of the add-on.
|
||||
* @returns {object}
|
||||
*/
|
||||
function builtinPackage(baseURL) {
|
||||
return {
|
||||
rootURI: baseURL,
|
||||
filePath: baseURL.spec,
|
||||
file: null,
|
||||
verifySignedState() {
|
||||
return {
|
||||
signedState: AddonManager.SIGNEDSTATE_NOT_REQUIRED,
|
||||
cert: null,
|
||||
};
|
||||
},
|
||||
async hasResource(path) {
|
||||
try {
|
||||
let response = await fetch(this.rootURI.resolve(path));
|
||||
return response.ok;
|
||||
} catch (e) {
|
||||
return false;
|
||||
}
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Determine the reason to pass to an extension's bootstrap methods when
|
||||
* switch between versions.
|
||||
|
@ -610,8 +640,23 @@ var loadManifestFromFile = async function(aFile, aLocation, aOldAddon) {
|
|||
* A synchronous method for loading an add-on's manifest. Do not use
|
||||
* this.
|
||||
*/
|
||||
function syncLoadManifestFromFile(aFile, aLocation, aOldAddon) {
|
||||
return XPIInternal.awaitPromise(loadManifestFromFile(aFile, aLocation, aOldAddon));
|
||||
function syncLoadManifest(state, location, oldAddon) {
|
||||
if (location.name == "app-builtin") {
|
||||
let pkg = builtinPackage(Services.io.newURI(state.rootURI));
|
||||
return XPIInternal.awaitPromise(loadManifest(pkg, location, oldAddon));
|
||||
}
|
||||
|
||||
let file = new nsIFile(state.path);
|
||||
let pkg = Package.get(file);
|
||||
return XPIInternal.awaitPromise((async () => {
|
||||
try {
|
||||
let addon = await loadManifest(pkg, location, oldAddon);
|
||||
addon.rootURI = getURIForResourceInFile(file, "").spec;
|
||||
return addon;
|
||||
} finally {
|
||||
pkg.close();
|
||||
}
|
||||
})());
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -3226,7 +3271,7 @@ var XPIInstall = {
|
|||
flushJarCache,
|
||||
newVersionReason,
|
||||
recursiveRemove,
|
||||
syncLoadManifestFromFile,
|
||||
syncLoadManifest,
|
||||
|
||||
// Keep track of in-progress operations that support cancel()
|
||||
_inProgress: [],
|
||||
|
@ -3695,27 +3740,7 @@ var XPIInstall = {
|
|||
throw new Error("Built-in addons must use resource: URLS");
|
||||
}
|
||||
|
||||
// Enough of the Package interface to allow loadManifest() to work.
|
||||
let pkg = {
|
||||
rootURI: baseURL,
|
||||
filePath: baseURL.spec,
|
||||
file: null,
|
||||
verifySignedState() {
|
||||
return {
|
||||
signedState: AddonManager.SIGNEDSTATE_NOT_REQUIRED,
|
||||
cert: null,
|
||||
};
|
||||
},
|
||||
async hasResource(path) {
|
||||
try {
|
||||
let response = await fetch(this.rootURI.resolve(path));
|
||||
return response.ok;
|
||||
} catch (e) {
|
||||
return false;
|
||||
}
|
||||
},
|
||||
};
|
||||
|
||||
let pkg = builtinPackage(baseURL);
|
||||
let addon = await loadManifest(pkg, XPIInternal.BuiltInLocation);
|
||||
addon.rootURI = base;
|
||||
|
||||
|
|
|
@ -106,7 +106,7 @@ const XPI_PERMISSION = "install";
|
|||
|
||||
const XPI_SIGNATURE_CHECK_PERIOD = 24 * 60 * 60;
|
||||
|
||||
const DB_SCHEMA = 29;
|
||||
const DB_SCHEMA = 30;
|
||||
|
||||
XPCOMUtils.defineLazyPreferenceGetter(this, "enabledScopesPref",
|
||||
PREF_EM_ENABLED_SCOPES,
|
||||
|
@ -1987,6 +1987,29 @@ class BootstrapScope {
|
|||
}
|
||||
}
|
||||
|
||||
// https://bugzilla.mozilla.org/show_bug.cgi?id=1548973
|
||||
const MISSING_INTERMEDIATE_CERTIFICATE = "MIIHLTCCBRWgAwIBAgIDEAAIMA0GCSqGSIb3DQEBDAUAMH0xCzAJBgNVBAYTAlVTMRwwGgYDVQQKExNNb3ppbGxhIENvcnBvcmF0aW9uMS8wLQYDVQQLEyZNb3ppbGxhIEFNTyBQcm9kdWN0aW9uIFNpZ25pbmcgU2VydmljZTEfMB0GA1UEAxMWcm9vdC1jYS1wcm9kdWN0aW9uLWFtbzAeFw0xNTA0MDQwMDAwMDBaFw0yNTA0MDQwMDAwMDBaMIGnMQswCQYDVQQGEwJVUzEcMBoGA1UEChMTTW96aWxsYSBDb3Jwb3JhdGlvbjEvMC0GA1UECxMmTW96aWxsYSBBTU8gUHJvZHVjdGlvbiBTaWduaW5nIFNlcnZpY2UxJjAkBgNVBAMTHXNpZ25pbmdjYTEuYWRkb25zLm1vemlsbGEub3JnMSEwHwYJKoZIhvcNAQkBFhJmb3hzZWNAbW96aWxsYS5jb20wggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC/qluiiI+wO6qGA4vH7cHvWvXpdju9JnvbwnrbYmxhtUpfS68LbdjGGtv7RP6F1XhHT4MU3v4GuMulH0E4Wfalm8evsb3tBJRMJPICJX5UCLi6VJ6J2vipXSWBf8xbcOB+PY5Kk6L+EZiWaepiM23CdaZjNOJCAB6wFHlGe+zUk87whpLa7GrtrHjTb8u9TSS+mwjhvgfP8ILZrWhzb5H/ybgmD7jYaJGIDY/WDmq1gVe03fShxD09Ml1P7H38o5kbFLnbbqpqC6n8SfUI31MiJAXAN2e6rAOM8EmocAY0EC5KUooXKRsYvHzhwwHkwIbbe6QpTUlIqvw1MPlQPs7Zu/MBnVmyGTSqJxtYoklr0MaEXnJNY3g3FDf1R0Opp2/BEY9Vh3Fc9Pq6qWIhGoMyWdueoSYa+GURqDbsuYnk7ZkysxK+yRoFJu4x3TUBmMKM14jQKLgxvuIzWVn6qg6cw7ye/DYNufc+DSPSTSakSsWJ9IPxiAU7xJ+GCMzaZ10Y3VGOybGLuPxDlSd6KALAoMcl9ghB2mvfB0N3wv6uWnbKuxihq/qDps+FjliNvr7C66mIVH+9rkyHIy6GgIUlwr7E88Qqw+SQeNeph6NIY85PL4p0Y8KivKP4J928tpp18wLuHNbIG+YaUk5WUDZ6/2621pi19UZQ8iiHxN/XKQIDAQABo4IBiTCCAYUwDAYDVR0TBAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwFgYDVR0lAQH/BAwwCgYIKwYBBQUHAwMwHQYDVR0OBBYEFBY++xz/DCuT+JsV1y2jwuZ4YdztMIGoBgNVHSMEgaAwgZ2AFLO86lh0q+FueCqyq5wjHqhjLJe3oYGBpH8wfTELMAkGA1UEBhMCVVMxHDAaBgNVBAoTE01vemlsbGEgQ29ycG9yYXRpb24xLzAtBgNVBAsTJk1vemlsbGEgQU1PIFByb2R1Y3Rpb24gU2lnbmluZyBTZXJ2aWNlMR8wHQYDVQQDExZyb290LWNhLXByb2R1Y3Rpb24tYW1vggEBMDMGCWCGSAGG+EIBBAQmFiRodHRwOi8vYWRkb25zLm1vemlsbGEub3JnL2NhL2NybC5wZW0wTgYDVR0eBEcwRaFDMCCCHi5jb250ZW50LXNpZ25hdHVyZS5tb3ppbGxhLm9yZzAfgh1jb250ZW50LXNpZ25hdHVyZS5tb3ppbGxhLm9yZzANBgkqhkiG9w0BAQwFAAOCAgEAX1PNli/zErw3tK3S9Bv803RV4tHkrMa5xztxzlWja0VAUJKEQx7f1yM8vmcQJ9g5RE8WFc43IePwzbAoum5F4BTM7tqM//+e476F1YUgB7SnkDTVpBOnV5vRLz1Si4iJ/U0HUvMUvNJEweXvKg/DNbXuCreSvTEAawmRIxqNYoaigQD8x4hCzGcVtIi5Xk2aMCJW2K/6JqkN50pnLBNkPx6FeiYMJCP8z0FIz3fv53FHgu3oeDhi2u3VdONjK3aaFWTlKNiGeDU0/lr0suWfQLsNyphTMbYKyTqQYHxXYJno9PuNi7e1903PvM47fKB5bFmSLyzB1hB1YIVLj0/YqD4nz3lADDB91gMBB7vR2h5bRjFqLOxuOutNNcNRnv7UPqtVCtLF2jVb4/AmdJU78jpfDs+BgY/t2bnGBVFBuwqS2Kult/2kth4YMrL5DrURIM8oXWVQRBKxzr843yDmHo8+2rqxLnZcmWoe8yQ41srZ4IB+V3w2TIAd4gxZAB0Xa6KfnR4D8RgE5sgmgQoK7Y/hdvd9Ahu0WEZI8Eg+mDeCeojWcyjF+dt6c2oERiTmFTIFUoojEjJwLyIqHKt+eApEYpF7imaWcumFN1jR+iUjE4ZSUoVxGtZ/Jdnkf8VVQMhiBA+i7r5PsfrHq+lqTTGOg+GzYx7OmoeJAT0zo4c=";
|
||||
|
||||
function addMissingIntermediateCertificate() {
|
||||
const PREF_SIGNER_HOTFIXED = "extensions.signer.hotfixed";
|
||||
let hotfixApplied = Services.prefs.getBoolPref(PREF_SIGNER_HOTFIXED, false);
|
||||
if (hotfixApplied) {
|
||||
return;
|
||||
}
|
||||
logger.debug("hotfix for addon signing cert has not been applied; applying");
|
||||
|
||||
try {
|
||||
let certDB = Cc["@mozilla.org/security/x509certdb;1"].getService(Ci.nsIX509CertDB);
|
||||
certDB.addCertFromBase64(MISSING_INTERMEDIATE_CERTIFICATE, ",,");
|
||||
logger.debug("new intermediate certificate added");
|
||||
} catch (e) {
|
||||
logger.error("failed to add new intermediate certificate:", e);
|
||||
return;
|
||||
}
|
||||
|
||||
Services.prefs.setBoolPref(PREF_SIGNER_HOTFIXED, true);
|
||||
}
|
||||
|
||||
let resolveDBReady;
|
||||
let dbReadyPromise = new Promise(resolve => {
|
||||
resolveDBReady = resolve;
|
||||
|
@ -2232,6 +2255,11 @@ var XPIProvider = {
|
|||
* if it is a new profile or the version is unknown
|
||||
*/
|
||||
startup(aAppChanged, aOldAppVersion, aOldPlatformVersion) {
|
||||
// Add missing certificate (bug 1548973). Mistakenly disabled add-ons are
|
||||
// going to be re-enabled because the schema version bump forces a new
|
||||
// signature verification check.
|
||||
addMissingIntermediateCertificate();
|
||||
|
||||
try {
|
||||
AddonManagerPrivate.recordTimestamp("XPI_startup_begin");
|
||||
|
||||
|
|
|
@ -43,6 +43,29 @@ add_task(async function test_builtin_location() {
|
|||
notEqual(addon, null, "Addon is installed");
|
||||
ok(addon.isActive, "Addon is active");
|
||||
|
||||
// After a restart that causes a database rebuild, it should still work
|
||||
await promiseRestartManager("2");
|
||||
await wrapper.awaitStartup();
|
||||
await wrapper.awaitMessage("started");
|
||||
ok(true, "Extension in built-in location ran after restart");
|
||||
|
||||
addon = await promiseAddonByID(id);
|
||||
notEqual(addon, null, "Addon is installed");
|
||||
ok(addon.isActive, "Addon is active");
|
||||
|
||||
// After a restart that changes the schema version, it should still work
|
||||
await promiseShutdownManager();
|
||||
Services.prefs.setIntPref("extensions.databaseSchema", 0);
|
||||
await promiseStartupManager();
|
||||
|
||||
await wrapper.awaitStartup();
|
||||
await wrapper.awaitMessage("started");
|
||||
ok(true, "Extension in built-in location ran after restart");
|
||||
|
||||
addon = await promiseAddonByID(id);
|
||||
notEqual(addon, null, "Addon is installed");
|
||||
ok(addon.isActive, "Addon is active");
|
||||
|
||||
await wrapper.unload();
|
||||
|
||||
addon = await promiseAddonByID(id);
|
||||
|
|
|
@ -16,11 +16,13 @@ GfxDeviceFamily* const GfxDriverInfo::allDevices = nullptr;
|
|||
|
||||
GfxDeviceFamily* GfxDriverInfo::sDeviceFamilies[DeviceFamilyMax];
|
||||
nsAString* GfxDriverInfo::sDeviceVendors[DeviceVendorMax];
|
||||
nsAString* GfxDriverInfo::sDriverVendors[DriverVendorMax];
|
||||
|
||||
GfxDriverInfo::GfxDriverInfo()
|
||||
: mOperatingSystem(OperatingSystem::Unknown),
|
||||
mOperatingSystemVersion(0),
|
||||
mAdapterVendor(GfxDriverInfo::GetDeviceVendor(VendorAll)),
|
||||
mDriverVendor(GfxDriverInfo::GetDriverVendor(DriverVendorAll)),
|
||||
mDevices(allDevices),
|
||||
mDeleteDevices(false),
|
||||
mFeature(allFeatures),
|
||||
|
@ -32,7 +34,8 @@ GfxDriverInfo::GfxDriverInfo()
|
|||
mRuleId(nullptr),
|
||||
mGpu2(false) {}
|
||||
|
||||
GfxDriverInfo::GfxDriverInfo(OperatingSystem os, nsAString& vendor,
|
||||
GfxDriverInfo::GfxDriverInfo(OperatingSystem os, const nsAString& vendor,
|
||||
const nsAString& driverVendor,
|
||||
GfxDeviceFamily* devices, int32_t feature,
|
||||
int32_t featureStatus, VersionComparisonOp op,
|
||||
uint64_t driverVersion, const char* ruleId,
|
||||
|
@ -42,6 +45,7 @@ GfxDriverInfo::GfxDriverInfo(OperatingSystem os, nsAString& vendor,
|
|||
: mOperatingSystem(os),
|
||||
mOperatingSystemVersion(0),
|
||||
mAdapterVendor(vendor),
|
||||
mDriverVendor(driverVendor),
|
||||
mDevices(devices),
|
||||
mDeleteDevices(ownDevices),
|
||||
mFeature(feature),
|
||||
|
@ -57,6 +61,7 @@ GfxDriverInfo::GfxDriverInfo(const GfxDriverInfo& aOrig)
|
|||
: mOperatingSystem(aOrig.mOperatingSystem),
|
||||
mOperatingSystemVersion(aOrig.mOperatingSystemVersion),
|
||||
mAdapterVendor(aOrig.mAdapterVendor),
|
||||
mDriverVendor(aOrig.mDriverVendor),
|
||||
mFeature(aOrig.mFeature),
|
||||
mFeatureStatus(aOrig.mFeatureStatus),
|
||||
mComparisonOp(aOrig.mComparisonOp),
|
||||
|
@ -381,14 +386,37 @@ const nsAString& GfxDriverInfo::GetDeviceVendor(DeviceVendor id) {
|
|||
// Choose an arbitrary Qualcomm PCI VENdor ID for now.
|
||||
// TODO: This should be "QCOM" when Windows device ID parsing is reworked.
|
||||
DECLARE_VENDOR_ID(VendorQualcomm, "0x5143");
|
||||
DECLARE_VENDOR_ID(VendorMesaAll, "mesa/all");
|
||||
DECLARE_VENDOR_ID(VendorMesaLLVMPipe, "mesa/llvmpipe");
|
||||
DECLARE_VENDOR_ID(VendorMesaSoftPipe, "mesa/softpipe");
|
||||
DECLARE_VENDOR_ID(VendorMesaSWRast, "mesa/swrast");
|
||||
DECLARE_VENDOR_ID(VendorMesaUnknown, "mesa/unknown");
|
||||
// Suppress a warning.
|
||||
DECLARE_VENDOR_ID(DeviceVendorMax, "");
|
||||
}
|
||||
|
||||
return *sDeviceVendors[id];
|
||||
}
|
||||
|
||||
// Macro for assigning a driver vendor id to a string.
|
||||
#define DECLARE_DRIVER_VENDOR_ID(name, driverVendorId) \
|
||||
case name: \
|
||||
sDriverVendors[id]->AssignLiteral(driverVendorId); \
|
||||
break;
|
||||
|
||||
const nsAString& GfxDriverInfo::GetDriverVendor(DriverVendor id) {
|
||||
NS_ASSERTION(id >= 0 && id < DriverVendorMax,
|
||||
"DriverVendor id is out of range");
|
||||
|
||||
if (sDriverVendors[id]) return *sDriverVendors[id];
|
||||
|
||||
sDriverVendors[id] = new nsString();
|
||||
|
||||
switch (id) {
|
||||
DECLARE_DRIVER_VENDOR_ID(DriverVendorAll, "");
|
||||
DECLARE_DRIVER_VENDOR_ID(DriverMesaAll, "mesa/all");
|
||||
DECLARE_DRIVER_VENDOR_ID(DriverMesaLLVMPipe, "mesa/llvmpipe");
|
||||
DECLARE_DRIVER_VENDOR_ID(DriverMesaSoftPipe, "mesa/softpipe");
|
||||
DECLARE_DRIVER_VENDOR_ID(DriverMesaSWRast, "mesa/swrast");
|
||||
DECLARE_DRIVER_VENDOR_ID(DriverMesaUnknown, "mesa/unknown");
|
||||
// Suppress a warning.
|
||||
DECLARE_DRIVER_VENDOR_ID(DriverVendorMax, "");
|
||||
}
|
||||
|
||||
return *sDriverVendors[id];
|
||||
}
|
||||
|
|
|
@ -9,45 +9,47 @@
|
|||
#include "nsString.h"
|
||||
|
||||
// Macros for adding a blocklist item to the static list.
|
||||
#define APPEND_TO_DRIVER_BLOCKLIST(os, vendor, devices, feature, \
|
||||
featureStatus, driverComparator, \
|
||||
driverVersion, ruleId, suggestedVersion) \
|
||||
sDriverInfo->AppendElement(GfxDriverInfo( \
|
||||
os, vendor, devices, feature, featureStatus, driverComparator, \
|
||||
driverVersion, ruleId, suggestedVersion))
|
||||
#define APPEND_TO_DRIVER_BLOCKLIST2(os, vendor, devices, feature, \
|
||||
featureStatus, driverComparator, \
|
||||
driverVersion, ruleId) \
|
||||
sDriverInfo->AppendElement(GfxDriverInfo(os, vendor, devices, feature, \
|
||||
featureStatus, driverComparator, \
|
||||
driverVersion, ruleId))
|
||||
#define APPEND_TO_DRIVER_BLOCKLIST(os, vendor, driverVendor, devices, feature, \
|
||||
featureStatus, driverComparator, \
|
||||
driverVersion, ruleId, suggestedVersion) \
|
||||
sDriverInfo->AppendElement(GfxDriverInfo( \
|
||||
os, vendor, driverVendor, devices, feature, featureStatus, \
|
||||
driverComparator, driverVersion, ruleId, suggestedVersion))
|
||||
#define APPEND_TO_DRIVER_BLOCKLIST2(os, vendor, driverVendor, devices, \
|
||||
feature, featureStatus, driverComparator, \
|
||||
driverVersion, ruleId) \
|
||||
sDriverInfo->AppendElement( \
|
||||
GfxDriverInfo(os, vendor, driverVendor, devices, feature, featureStatus, \
|
||||
driverComparator, driverVersion, ruleId))
|
||||
|
||||
#define APPEND_TO_DRIVER_BLOCKLIST_RANGE( \
|
||||
os, vendor, devices, feature, featureStatus, driverComparator, \
|
||||
driverVersion, driverVersionMax, ruleId, suggestedVersion) \
|
||||
do { \
|
||||
MOZ_ASSERT(driverComparator == DRIVER_BETWEEN_EXCLUSIVE || \
|
||||
driverComparator == DRIVER_BETWEEN_INCLUSIVE || \
|
||||
driverComparator == DRIVER_BETWEEN_INCLUSIVE_START); \
|
||||
GfxDriverInfo info(os, vendor, devices, feature, featureStatus, \
|
||||
driverComparator, driverVersion, ruleId, \
|
||||
suggestedVersion); \
|
||||
info.mDriverVersionMax = driverVersionMax; \
|
||||
sDriverInfo->AppendElement(info); \
|
||||
#define APPEND_TO_DRIVER_BLOCKLIST_RANGE( \
|
||||
os, vendor, driverVendor, devices, feature, featureStatus, \
|
||||
driverComparator, driverVersion, driverVersionMax, ruleId, \
|
||||
suggestedVersion) \
|
||||
do { \
|
||||
MOZ_ASSERT(driverComparator == DRIVER_BETWEEN_EXCLUSIVE || \
|
||||
driverComparator == DRIVER_BETWEEN_INCLUSIVE || \
|
||||
driverComparator == DRIVER_BETWEEN_INCLUSIVE_START); \
|
||||
GfxDriverInfo info(os, vendor, driverVendor, devices, feature, \
|
||||
featureStatus, driverComparator, driverVersion, ruleId, \
|
||||
suggestedVersion); \
|
||||
info.mDriverVersionMax = driverVersionMax; \
|
||||
sDriverInfo->AppendElement(info); \
|
||||
} while (false)
|
||||
|
||||
#define APPEND_TO_DRIVER_BLOCKLIST_RANGE_GPU2( \
|
||||
os, vendor, devices, feature, featureStatus, driverComparator, \
|
||||
driverVersion, driverVersionMax, ruleId, suggestedVersion) \
|
||||
do { \
|
||||
MOZ_ASSERT(driverComparator == DRIVER_BETWEEN_EXCLUSIVE || \
|
||||
driverComparator == DRIVER_BETWEEN_INCLUSIVE || \
|
||||
driverComparator == DRIVER_BETWEEN_INCLUSIVE_START); \
|
||||
GfxDriverInfo info(os, vendor, devices, feature, featureStatus, \
|
||||
driverComparator, driverVersion, ruleId, \
|
||||
suggestedVersion, false, true); \
|
||||
info.mDriverVersionMax = driverVersionMax; \
|
||||
sDriverInfo->AppendElement(info); \
|
||||
#define APPEND_TO_DRIVER_BLOCKLIST_RANGE_GPU2( \
|
||||
os, vendor, driverVendor, devices, feature, featureStatus, \
|
||||
driverComparator, driverVersion, driverVersionMax, ruleId, \
|
||||
suggestedVersion) \
|
||||
do { \
|
||||
MOZ_ASSERT(driverComparator == DRIVER_BETWEEN_EXCLUSIVE || \
|
||||
driverComparator == DRIVER_BETWEEN_INCLUSIVE || \
|
||||
driverComparator == DRIVER_BETWEEN_INCLUSIVE_START); \
|
||||
GfxDriverInfo info(os, vendor, driverVendor, devices, feature, \
|
||||
featureStatus, driverComparator, driverVersion, ruleId, \
|
||||
suggestedVersion, false, true); \
|
||||
info.mDriverVersionMax = driverVersionMax; \
|
||||
sDriverInfo->AppendElement(info); \
|
||||
} while (false)
|
||||
|
||||
namespace mozilla {
|
||||
|
@ -128,18 +130,23 @@ enum DeviceVendor {
|
|||
VendorParallels,
|
||||
VendorQualcomm,
|
||||
|
||||
DeviceVendorMax
|
||||
};
|
||||
|
||||
enum DriverVendor {
|
||||
DriverVendorAll, // There is an assumption that this is the first enum
|
||||
// Wildcard for all Mesa drivers.
|
||||
VendorMesaAll,
|
||||
DriverMesaAll,
|
||||
// Note that the following list of Mesa drivers is not comprehensive; we pull
|
||||
// the DRI driver at runtime. These drivers are provided for convenience when
|
||||
// populating the local blocklist.
|
||||
VendorMesaLLVMPipe,
|
||||
VendorMesaSoftPipe,
|
||||
VendorMesaSWRast,
|
||||
DriverMesaLLVMPipe,
|
||||
DriverMesaSoftPipe,
|
||||
DriverMesaSWRast,
|
||||
// A generic ID to be provided when we can't determine the DRI driver on Mesa.
|
||||
VendorMesaUnknown,
|
||||
DriverMesaUnknown,
|
||||
|
||||
DeviceVendorMax
|
||||
DriverVendorMax
|
||||
};
|
||||
|
||||
/* Array of devices to match, or an empty array for all devices */
|
||||
|
@ -148,7 +155,8 @@ typedef nsTArray<nsString> GfxDeviceFamily;
|
|||
struct GfxDriverInfo {
|
||||
// If |ownDevices| is true, you are transferring ownership of the devices
|
||||
// array, and it will be deleted when this GfxDriverInfo is destroyed.
|
||||
GfxDriverInfo(OperatingSystem os, nsAString& vendor, GfxDeviceFamily* devices,
|
||||
GfxDriverInfo(OperatingSystem os, const nsAString& vendor,
|
||||
const nsAString& driverVendor, GfxDeviceFamily* devices,
|
||||
int32_t feature, int32_t featureStatus, VersionComparisonOp op,
|
||||
uint64_t driverVersion, const char* ruleId,
|
||||
const char* suggestedVersion = nullptr, bool ownDevices = false,
|
||||
|
@ -162,6 +170,7 @@ struct GfxDriverInfo {
|
|||
uint32_t mOperatingSystemVersion;
|
||||
|
||||
nsString mAdapterVendor;
|
||||
nsString mDriverVendor;
|
||||
|
||||
static GfxDeviceFamily* const allDevices;
|
||||
GfxDeviceFamily* mDevices;
|
||||
|
@ -193,6 +202,9 @@ struct GfxDriverInfo {
|
|||
static const nsAString& GetDeviceVendor(DeviceVendor id);
|
||||
static nsAString* sDeviceVendors[DeviceVendorMax];
|
||||
|
||||
static const nsAString& GetDriverVendor(DriverVendor id);
|
||||
static nsAString* sDriverVendors[DriverVendorMax];
|
||||
|
||||
nsString mModel, mHardware, mProduct, mManufacturer;
|
||||
|
||||
bool mGpu2;
|
||||
|
|
|
@ -71,6 +71,11 @@ class ShutdownObserver : public nsIObserver {
|
|||
GfxDriverInfo::sDeviceVendors[i] = nullptr;
|
||||
}
|
||||
|
||||
for (uint32_t i = 0; i < DriverVendorMax; i++) {
|
||||
delete GfxDriverInfo::sDriverVendors[i];
|
||||
GfxDriverInfo::sDriverVendors[i] = nullptr;
|
||||
}
|
||||
|
||||
GfxInfoBase::sShutdownOccurred = true;
|
||||
|
||||
return NS_OK;
|
||||
|
@ -471,6 +476,8 @@ static bool BlacklistEntryToDriverInfo(nsCString& aBlacklistEntry,
|
|||
aDriverInfo.mOperatingSystemVersion = strtoul(value.get(), nullptr, 10);
|
||||
} else if (key.EqualsLiteral("vendor")) {
|
||||
aDriverInfo.mAdapterVendor = dataValue;
|
||||
} else if (key.EqualsLiteral("driverVendor")) {
|
||||
aDriverInfo.mDriverVendor = dataValue;
|
||||
} else if (key.EqualsLiteral("feature")) {
|
||||
aDriverInfo.mFeature = BlacklistFeatureToGfxFeature(dataValue);
|
||||
if (aDriverInfo.mFeature < 0) {
|
||||
|
@ -679,16 +686,19 @@ int32_t GfxInfoBase::FindBlocklistedDeviceInList(
|
|||
// Get the adapters once then reuse below
|
||||
nsAutoString adapterVendorID[2];
|
||||
nsAutoString adapterDeviceID[2];
|
||||
nsAutoString adapterDriverVendor[2];
|
||||
nsAutoString adapterDriverVersionString[2];
|
||||
bool adapterInfoFailed[2];
|
||||
|
||||
adapterInfoFailed[0] =
|
||||
(NS_FAILED(GetAdapterVendorID(adapterVendorID[0])) ||
|
||||
NS_FAILED(GetAdapterDeviceID(adapterDeviceID[0])) ||
|
||||
NS_FAILED(GetAdapterDriverVendor(adapterDriverVendor[0])) ||
|
||||
NS_FAILED(GetAdapterDriverVersion(adapterDriverVersionString[0])));
|
||||
adapterInfoFailed[1] =
|
||||
(NS_FAILED(GetAdapterVendorID2(adapterVendorID[1])) ||
|
||||
NS_FAILED(GetAdapterDeviceID2(adapterDeviceID[1])) ||
|
||||
NS_FAILED(GetAdapterDriverVendor2(adapterDriverVendor[1])) ||
|
||||
NS_FAILED(GetAdapterDriverVersion2(adapterDriverVersionString[1])));
|
||||
// No point in going on if we don't have adapter info
|
||||
if (adapterInfoFailed[0] && adapterInfoFailed[1]) {
|
||||
|
@ -732,6 +742,11 @@ int32_t GfxInfoBase::FindBlocklistedDeviceInList(
|
|||
continue;
|
||||
}
|
||||
|
||||
if (!DoesDriverVendorMatch(info[i].mDriverVendor,
|
||||
adapterDriverVendor[infoIndex])) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (info[i].mDevices != GfxDriverInfo::allDevices &&
|
||||
info[i].mDevices->Length()) {
|
||||
bool deviceMatches = false;
|
||||
|
@ -887,6 +902,15 @@ bool GfxInfoBase::DoesVendorMatch(const nsAString& aBlocklistVendor,
|
|||
nsCaseInsensitiveStringComparator());
|
||||
}
|
||||
|
||||
bool GfxInfoBase::DoesDriverVendorMatch(const nsAString& aBlocklistVendor,
|
||||
const nsAString& aDriverVendor) {
|
||||
return aBlocklistVendor.Equals(aDriverVendor,
|
||||
nsCaseInsensitiveStringComparator()) ||
|
||||
aBlocklistVendor.Equals(
|
||||
GfxDriverInfo::GetDriverVendor(DriverVendorAll),
|
||||
nsCaseInsensitiveStringComparator());
|
||||
}
|
||||
|
||||
nsresult GfxInfoBase::GetFeatureStatusImpl(
|
||||
int32_t aFeature, int32_t* aStatus, nsAString& aSuggestedVersion,
|
||||
const nsTArray<GfxDriverInfo>& aDriverInfo, nsACString& aFailureId,
|
||||
|
@ -1161,6 +1185,30 @@ void GfxInfoBase::RemoveCollector(GfxInfoCollectorBase* collector) {
|
|||
}
|
||||
}
|
||||
|
||||
nsresult GfxInfoBase::FindMonitors(JSContext* aCx, JS::HandleObject aOutArray) {
|
||||
// If we have no platform specific implementation for detecting monitors, we
|
||||
// can just get the screen size from gfxPlatform as the best guess.
|
||||
if (!gfxPlatform::Initialized()) {
|
||||
return NS_OK;
|
||||
}
|
||||
|
||||
// If the screen size is empty, we are probably in xpcshell.
|
||||
gfx::IntSize screenSize = gfxPlatform::GetPlatform()->GetScreenSize();
|
||||
|
||||
JS::Rooted<JSObject*> obj(aCx, JS_NewPlainObject(aCx));
|
||||
|
||||
JS::Rooted<JS::Value> screenWidth(aCx, JS::Int32Value(screenSize.width));
|
||||
JS_SetProperty(aCx, obj, "screenWidth", screenWidth);
|
||||
|
||||
JS::Rooted<JS::Value> screenHeight(aCx, JS::Int32Value(screenSize.height));
|
||||
JS_SetProperty(aCx, obj, "screenHeight", screenHeight);
|
||||
|
||||
JS::Rooted<JS::Value> element(aCx, JS::ObjectValue(*obj));
|
||||
JS_SetElement(aCx, aOutArray, 0, element);
|
||||
|
||||
return NS_OK;
|
||||
}
|
||||
|
||||
NS_IMETHODIMP
|
||||
GfxInfoBase::GetMonitors(JSContext* aCx, JS::MutableHandleValue aResult) {
|
||||
JS::Rooted<JSObject*> array(aCx, JS_NewArrayObject(aCx, 0));
|
||||
|
|
Некоторые файлы не были показаны из-за слишком большого количества измененных файлов Показать больше
Загрузка…
Ссылка в новой задаче