зеркало из https://github.com/mozilla/gecko-dev.git
merge autoland to mozilla-central a=merge
This commit is contained in:
Коммит
34bdd112b2
|
@ -4144,7 +4144,9 @@ OverflowableToolbar.prototype = {
|
|||
this._disable();
|
||||
break;
|
||||
case "dragover":
|
||||
if (this._enabled) {
|
||||
this._showWithTimeout();
|
||||
}
|
||||
break;
|
||||
case "dragend":
|
||||
this._panel.hidePopup();
|
||||
|
@ -4181,10 +4183,9 @@ OverflowableToolbar.prototype = {
|
|||
this._panel.openPopup(anchor || this._chevron);
|
||||
this._chevron.open = true;
|
||||
|
||||
let overflowableToolbarInstance = this;
|
||||
this._panel.addEventListener("popupshown", function(aEvent) {
|
||||
this.addEventListener("dragover", overflowableToolbarInstance);
|
||||
this.addEventListener("dragend", overflowableToolbarInstance);
|
||||
this._panel.addEventListener("popupshown", aEvent => {
|
||||
this._panel.addEventListener("dragover", this);
|
||||
this._panel.addEventListener("dragend", this);
|
||||
resolve();
|
||||
}, {once: true});
|
||||
});
|
||||
|
|
|
@ -640,6 +640,12 @@ const PanelUI = {
|
|||
}
|
||||
},
|
||||
|
||||
onAreaReset(aArea, aContainer) {
|
||||
if (gPhotonStructure && aContainer == this.overflowFixedList) {
|
||||
this.updateOverflowStatus();
|
||||
}
|
||||
},
|
||||
|
||||
/**
|
||||
* Signal that we're about to make a lot of changes to the contents of the
|
||||
* panels all at once. For performance, we ignore the mutations.
|
||||
|
|
|
@ -139,7 +139,7 @@ FirefoxProfileMigrator.prototype._getResourcesInternal = function(sourceProfileD
|
|||
let dictionary = getFileResource(types.OTHERDATA, ["persdict.dat"]);
|
||||
|
||||
let sessionCheckpoints = this._getFileObject(sourceProfileDir, "sessionCheckpoints.json");
|
||||
let sessionFile = this._getFileObject(sourceProfileDir, "sessionstore.js");
|
||||
let sessionFile = this._getFileObject(sourceProfileDir, "sessionstore.jsonlz4");
|
||||
let session;
|
||||
if (sessionFile) {
|
||||
session = {
|
||||
|
@ -147,7 +147,7 @@ FirefoxProfileMigrator.prototype._getResourcesInternal = function(sourceProfileD
|
|||
migrate(aCallback) {
|
||||
sessionCheckpoints.copyTo(currentProfileDir, "sessionCheckpoints.json");
|
||||
let newSessionFile = currentProfileDir.clone();
|
||||
newSessionFile.append("sessionstore.js");
|
||||
newSessionFile.append("sessionstore.jsonlz4");
|
||||
let migrationPromise = SessionMigration.migrate(sessionFile.path, newSessionFile.path);
|
||||
migrationPromise.then(function() {
|
||||
let buildID = Services.appinfo.platformBuildID;
|
||||
|
|
|
@ -10,6 +10,31 @@ const PTV_interfaces = [Ci.nsITreeView,
|
|||
Ci.nsINavHistoryResultTreeViewer,
|
||||
Ci.nsISupportsWeakReference];
|
||||
|
||||
/**
|
||||
* This returns the key for any node/details object.
|
||||
*
|
||||
* @param nodeOrDetails
|
||||
* A node, or an object containing the following properties:
|
||||
* - uri
|
||||
* - time
|
||||
* - itemId
|
||||
* In case any of these is missing, an empty string will be returned. This is
|
||||
* to facilitate easy delete statements which occur due to assignment to items in `this._rows`,
|
||||
* since the item we are deleting may be undefined in the array.
|
||||
*
|
||||
* @return key or empty string.
|
||||
*/
|
||||
function makeNodeDetailsKey(nodeOrDetails) {
|
||||
if (nodeOrDetails &&
|
||||
typeof nodeOrDetails === "object" &&
|
||||
"uri" in nodeOrDetails &&
|
||||
"time" in nodeOrDetails &&
|
||||
"itemId" in nodeOrDetails) {
|
||||
return `${nodeOrDetails.uri}*${nodeOrDetails.time}*${nodeOrDetails.itemId}`;
|
||||
}
|
||||
return "";
|
||||
}
|
||||
|
||||
function PlacesTreeView(aFlatList, aOnOpenFlatContainer, aController) {
|
||||
this._tree = null;
|
||||
this._result = null;
|
||||
|
@ -17,6 +42,7 @@ function PlacesTreeView(aFlatList, aOnOpenFlatContainer, aController) {
|
|||
this._rootNode = null;
|
||||
this._rows = [];
|
||||
this._flatList = aFlatList;
|
||||
this._nodeDetails = new Map();
|
||||
this._openContainerCallback = aOnOpenFlatContainer;
|
||||
this._controller = aController;
|
||||
}
|
||||
|
@ -187,8 +213,11 @@ PlacesTreeView.prototype = {
|
|||
}
|
||||
}
|
||||
|
||||
if (row != -1)
|
||||
if (row != -1) {
|
||||
this._nodeDetails.delete(makeNodeDetailsKey(this._rows[row]));
|
||||
this._nodeDetails.set(makeNodeDetailsKey(aNode), aNode);
|
||||
this._rows[row] = aNode;
|
||||
}
|
||||
|
||||
return row;
|
||||
},
|
||||
|
@ -233,16 +262,27 @@ PlacesTreeView.prototype = {
|
|||
|
||||
// If there's no container prior to the given row, it's a child of
|
||||
// the root node (remember: all containers are listed in the rows array).
|
||||
if (!rowNode)
|
||||
return this._rows[aRow] = this._rootNode.getChild(aRow);
|
||||
if (!rowNode) {
|
||||
let newNode = this._rootNode.getChild(aRow);
|
||||
this._nodeDetails.delete(makeNodeDetailsKey(this._rows[aRow]));
|
||||
this._nodeDetails.set(makeNodeDetailsKey(newNode), newNode);
|
||||
return this._rows[aRow] = newNode;
|
||||
}
|
||||
|
||||
// Unset elements may exist only in plain containers. Thus, if the nearest
|
||||
// node is a container, it's the row's parent, otherwise, it's a sibling.
|
||||
if (rowNode instanceof Ci.nsINavHistoryContainerResultNode)
|
||||
return this._rows[aRow] = rowNode.getChild(aRow - row - 1);
|
||||
if (rowNode instanceof Ci.nsINavHistoryContainerResultNode) {
|
||||
let newNode = rowNode.getChild(aRow - row - 1);
|
||||
this._nodeDetails.delete(makeNodeDetailsKey(this._rows[aRow]));
|
||||
this._nodeDetails.set(makeNodeDetailsKey(newNode), newNode);
|
||||
return this._rows[aRow] = newNode;
|
||||
}
|
||||
|
||||
let [parent, parentRow] = this._getParentByChildRow(row);
|
||||
return this._rows[aRow] = parent.getChild(aRow - parentRow - 1);
|
||||
let newNode = parent.getChild(aRow - parentRow - 1);
|
||||
this._nodeDetails.delete(makeNodeDetailsKey(this._rows[aRow]));
|
||||
this._nodeDetails.set(makeNodeDetailsKey(newNode), newNode);
|
||||
return this._rows[aRow] = newNode;
|
||||
},
|
||||
|
||||
/**
|
||||
|
@ -271,6 +311,10 @@ PlacesTreeView.prototype = {
|
|||
// iteration.
|
||||
let cc = aContainer.childCount;
|
||||
let newElements = new Array(cc);
|
||||
// We need to clean up the node details from aFirstChildRow + 1 to the end of rows.
|
||||
for (let i = aFirstChildRow + 1; i < this._rows.length; i++) {
|
||||
this._nodeDetails.delete(makeNodeDetailsKey(this._rows[i]));
|
||||
}
|
||||
this._rows = this._rows.splice(0, aFirstChildRow)
|
||||
.concat(newElements, this._rows);
|
||||
|
||||
|
@ -292,11 +336,14 @@ PlacesTreeView.prototype = {
|
|||
// Remove the element for the filtered separator.
|
||||
// Notice that the rows array was initially resized to include all
|
||||
// children.
|
||||
this._nodeDetails.delete(makeNodeDetailsKey(this._rows[row]));
|
||||
this._rows.splice(row, 1);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
this._nodeDetails.delete(makeNodeDetailsKey(this._rows[row]));
|
||||
this._nodeDetails.set(makeNodeDetailsKey(curChild), curChild);
|
||||
this._rows[row] = curChild;
|
||||
rowsInserted++;
|
||||
|
||||
|
@ -407,9 +454,10 @@ PlacesTreeView.prototype = {
|
|||
// invisible.
|
||||
let ancestors = PlacesUtils.nodeAncestors(aOldNode);
|
||||
for (let ancestor of ancestors) {
|
||||
if (!ancestor.containerOpen)
|
||||
if (!ancestor.containerOpen) {
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
|
||||
return this._getRowForNode(aOldNode, true);
|
||||
}
|
||||
|
@ -419,10 +467,8 @@ PlacesTreeView.prototype = {
|
|||
// the old node, we'll select the first one after refresh. There's
|
||||
// nothing we could do about that, because aOldNode.parent is
|
||||
// gone by the time invalidateContainer is called.
|
||||
let newNode = aUpdatedContainer.findNodeByDetails(aOldNode.uri,
|
||||
aOldNode.time,
|
||||
aOldNode.itemId,
|
||||
true);
|
||||
let newNode = this._nodeDetails.get(makeNodeDetailsKey(aOldNode));
|
||||
|
||||
if (!newNode)
|
||||
return -1;
|
||||
|
||||
|
@ -649,6 +695,7 @@ PlacesTreeView.prototype = {
|
|||
}
|
||||
}
|
||||
|
||||
this._nodeDetails.set(makeNodeDetailsKey(aNode), aNode);
|
||||
this._rows.splice(row, 0, aNode);
|
||||
this._tree.rowCountChanged(row, 1);
|
||||
|
||||
|
@ -700,7 +747,9 @@ PlacesTreeView.prototype = {
|
|||
|
||||
// Remove the node and its children, if any.
|
||||
let count = this._countVisibleRowsForNodeAtRow(oldRow);
|
||||
this._rows.splice(oldRow, count);
|
||||
for (let splicedNode of this._rows.splice(oldRow, count)) {
|
||||
this._nodeDetails.delete(makeNodeDetailsKey(splicedNode));
|
||||
}
|
||||
this._tree.rowCountChanged(oldRow, -count);
|
||||
|
||||
// Redraw the parent if its twisty state has changed.
|
||||
|
@ -753,7 +802,9 @@ PlacesTreeView.prototype = {
|
|||
}
|
||||
|
||||
// Remove node and its children, if any, from the old position.
|
||||
this._rows.splice(oldRow, count);
|
||||
for (let splicedNode of this._rows.splice(oldRow, count)) {
|
||||
this._nodeDetails.delete(makeNodeDetailsKey(splicedNode));
|
||||
}
|
||||
this._tree.rowCountChanged(oldRow, -count);
|
||||
|
||||
// Insert the node into the new position.
|
||||
|
@ -817,6 +868,10 @@ PlacesTreeView.prototype = {
|
|||
},
|
||||
|
||||
nodeURIChanged: function PTV_nodeURIChanged(aNode, aOldURI) {
|
||||
this._nodeDetails.delete(makeNodeDetailsKey({uri: aOldURI,
|
||||
itemId: aNode.itemId,
|
||||
time: aNode.time}));
|
||||
this._nodeDetails.set(makeNodeDetailsKey(aNode), aNode);
|
||||
this._invalidateCellValue(aNode, this.COLUMN_TYPE_URI);
|
||||
},
|
||||
|
||||
|
@ -827,6 +882,10 @@ PlacesTreeView.prototype = {
|
|||
nodeHistoryDetailsChanged:
|
||||
function PTV_nodeHistoryDetailsChanged(aNode, aOldVisitDate,
|
||||
aOldVisitCount) {
|
||||
this._nodeDetails.delete(makeNodeDetailsKey({uri: aNode.uri,
|
||||
itemId: aNode.itemId,
|
||||
time: aOldVisitDate}));
|
||||
this._nodeDetails.set(makeNodeDetailsKey(aNode), aNode);
|
||||
if (aNode.parent && this._controller.hasCachedLivemarkInfo(aNode.parent)) {
|
||||
// Find the node in the parent.
|
||||
let parentRow = this._flatList ? 0 : this._getRowForNode(aNode.parent);
|
||||
|
@ -918,6 +977,7 @@ PlacesTreeView.prototype = {
|
|||
|
||||
// If the root node is now closed, the tree is empty.
|
||||
if (!this._rootNode.containerOpen) {
|
||||
this._nodeDetails.clear();
|
||||
this._rows = [];
|
||||
if (replaceCount)
|
||||
this._tree.rowCountChanged(startReplacement, -replaceCount);
|
||||
|
@ -944,7 +1004,9 @@ PlacesTreeView.prototype = {
|
|||
this.selection.selectEventsSuppressed = true;
|
||||
|
||||
// First remove the old elements
|
||||
this._rows.splice(startReplacement, replaceCount);
|
||||
for (let splicedNode of this._rows.splice(startReplacement, replaceCount)) {
|
||||
this._nodeDetails.delete(makeNodeDetailsKey(splicedNode));
|
||||
}
|
||||
|
||||
// If the container is now closed, we're done.
|
||||
if (!aContainer.containerOpen) {
|
||||
|
|
|
@ -98,11 +98,11 @@ var SessionFileInternal = {
|
|||
Paths: Object.freeze({
|
||||
// The path to the latest version of sessionstore written during a clean
|
||||
// shutdown. After startup, it is renamed `cleanBackup`.
|
||||
clean: Path.join(profileDir, "sessionstore.js"),
|
||||
clean: Path.join(profileDir, "sessionstore.jsonlz4"),
|
||||
|
||||
// The path at which we store the previous version of `clean`. Updated
|
||||
// whenever we successfully load from `clean`.
|
||||
cleanBackup: Path.join(profileDir, "sessionstore-backups", "previous.js"),
|
||||
cleanBackup: Path.join(profileDir, "sessionstore-backups", "previous.jsonlz4"),
|
||||
|
||||
// The directory containing all sessionstore backups.
|
||||
backups: Path.join(profileDir, "sessionstore-backups"),
|
||||
|
@ -112,7 +112,7 @@ var SessionFileInternal = {
|
|||
// privacy-sensitive information than |clean|, and this file is
|
||||
// therefore removed during clean shutdown. This file is designed to protect
|
||||
// against crashes / sudden shutdown.
|
||||
recovery: Path.join(profileDir, "sessionstore-backups", "recovery.js"),
|
||||
recovery: Path.join(profileDir, "sessionstore-backups", "recovery.jsonlz4"),
|
||||
|
||||
// The path to the previous version of the sessionstore written
|
||||
// during runtime (e.g. 15 seconds before recovery). In case of a
|
||||
|
@ -121,13 +121,13 @@ var SessionFileInternal = {
|
|||
// this file is therefore removed during clean shutdown. This
|
||||
// file is designed to protect against crashes that are nasty
|
||||
// enough to corrupt |recovery|.
|
||||
recoveryBackup: Path.join(profileDir, "sessionstore-backups", "recovery.bak"),
|
||||
recoveryBackup: Path.join(profileDir, "sessionstore-backups", "recovery.baklz4"),
|
||||
|
||||
// The path to a backup created during an upgrade of Firefox.
|
||||
// Having this backup protects the user essentially from bugs in
|
||||
// Firefox or add-ons, especially for users of Nightly. This file
|
||||
// does not contain any information more sensitive than |clean|.
|
||||
upgradeBackupPrefix: Path.join(profileDir, "sessionstore-backups", "upgrade.js-"),
|
||||
upgradeBackupPrefix: Path.join(profileDir, "sessionstore-backups", "upgrade.jsonlz4-"),
|
||||
|
||||
// The path to the backup of the version of the session store used
|
||||
// during the latest upgrade of Firefox. During load/recovery,
|
||||
|
@ -207,21 +207,28 @@ var SessionFileInternal = {
|
|||
}
|
||||
},
|
||||
|
||||
// Find the correct session file, read it and setup the worker.
|
||||
async read() {
|
||||
this._initializationStarted = true;
|
||||
|
||||
async _readInternal(useOldExtension) {
|
||||
let result;
|
||||
let noFilesFound = true;
|
||||
|
||||
// Attempt to load by order of priority from the various backups
|
||||
for (let key of this.Paths.loadOrder) {
|
||||
let corrupted = false;
|
||||
let exists = true;
|
||||
try {
|
||||
let path = this.Paths[key];
|
||||
let path;
|
||||
let startMs = Date.now();
|
||||
|
||||
let source = await OS.File.read(path, { encoding: "utf-8" });
|
||||
let options = {encoding: "utf-8"};
|
||||
if (useOldExtension) {
|
||||
path = this.Paths[key]
|
||||
.replace("jsonlz4", "js")
|
||||
.replace("baklz4", "bak");
|
||||
} else {
|
||||
path = this.Paths[key];
|
||||
options.compression = "lz4";
|
||||
}
|
||||
let source = await OS.File.read(path, options);
|
||||
let parsed = JSON.parse(source);
|
||||
|
||||
if (!SessionStore.isFormatVersionCompatible(parsed.version || ["sessionrestore", 0] /* fallback for old versions*/)) {
|
||||
|
@ -232,7 +239,8 @@ var SessionFileInternal = {
|
|||
result = {
|
||||
origin: key,
|
||||
source,
|
||||
parsed
|
||||
parsed,
|
||||
useOldExtension
|
||||
};
|
||||
Telemetry.getHistogramById("FX_SESSION_RESTORE_CORRUPT_FILE").
|
||||
add(false);
|
||||
|
@ -260,6 +268,21 @@ var SessionFileInternal = {
|
|||
}
|
||||
}
|
||||
}
|
||||
return {result, noFilesFound};
|
||||
},
|
||||
|
||||
// Find the correct session file, read it and setup the worker.
|
||||
async read() {
|
||||
this._initializationStarted = true;
|
||||
|
||||
// Load session files with lz4 compression.
|
||||
let {result, noFilesFound} = await this._readInternal(false);
|
||||
if (!result) {
|
||||
// No result? Probably because of migration, let's
|
||||
// load uncompressed session files.
|
||||
let r = await this._readInternal(true);
|
||||
result = r.result;
|
||||
}
|
||||
|
||||
// All files are corrupted if files found but none could deliver a result.
|
||||
let allCorrupt = !noFilesFound && !result;
|
||||
|
@ -271,7 +294,8 @@ var SessionFileInternal = {
|
|||
result = {
|
||||
origin: "empty",
|
||||
source: "",
|
||||
parsed: null
|
||||
parsed: null,
|
||||
useOldExtension: false
|
||||
};
|
||||
}
|
||||
|
||||
|
@ -279,7 +303,7 @@ var SessionFileInternal = {
|
|||
|
||||
// Initialize the worker (in the background) to let it handle backups and also
|
||||
// as a workaround for bug 964531.
|
||||
let promiseInitialized = SessionWorker.post("init", [result.origin, this.Paths, {
|
||||
let promiseInitialized = SessionWorker.post("init", [result.origin, result.useOldExtension, this.Paths, {
|
||||
maxUpgradeBackups: Preferences.get(PREF_MAX_UPGRADE_BACKUPS, 3),
|
||||
maxSerializeBack: Preferences.get(PREF_MAX_SERIALIZE_BACK, 10),
|
||||
maxSerializeForward: Preferences.get(PREF_MAX_SERIALIZE_FWD, -1)
|
||||
|
|
|
@ -72,7 +72,7 @@ var SessionMigrationInternal = {
|
|||
*/
|
||||
readState(aPath) {
|
||||
return (async function() {
|
||||
let bytes = await OS.File.read(aPath);
|
||||
let bytes = await OS.File.read(aPath, {compression: "lz4"});
|
||||
let text = gDecoder.decode(bytes);
|
||||
let state = JSON.parse(text);
|
||||
return state;
|
||||
|
@ -83,7 +83,7 @@ var SessionMigrationInternal = {
|
|||
*/
|
||||
writeState(aPath, aState) {
|
||||
let bytes = gEncoder.encode(JSON.stringify(aState));
|
||||
return OS.File.writeAtomic(aPath, bytes, {tmpPath: aPath + ".tmp"});
|
||||
return OS.File.writeAtomic(aPath, bytes, {tmpPath: aPath + ".tmp", compression: "lz4"});
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -79,6 +79,11 @@ var Agent = {
|
|||
*/
|
||||
state: null,
|
||||
|
||||
/**
|
||||
* A flag that indicates we loaded a session file with the deprecated .js extension.
|
||||
*/
|
||||
useOldExtension: false,
|
||||
|
||||
/**
|
||||
* Number of old upgrade backups that are being kept
|
||||
*/
|
||||
|
@ -89,10 +94,11 @@ var Agent = {
|
|||
*
|
||||
* @param {string} origin Which of sessionstore.js or its backups
|
||||
* was used. One of the `STATE_*` constants defined above.
|
||||
* @param {boolean} a flag indicate whether we loaded a session file with ext .js
|
||||
* @param {object} paths The paths at which to find the various files.
|
||||
* @param {object} prefs The preferences the worker needs to known.
|
||||
*/
|
||||
init(origin, paths, prefs = {}) {
|
||||
init(origin, useOldExtension, paths, prefs = {}) {
|
||||
if (!(origin in paths || origin == STATE_EMPTY)) {
|
||||
throw new TypeError("Invalid origin: " + origin);
|
||||
}
|
||||
|
@ -104,6 +110,7 @@ var Agent = {
|
|||
}
|
||||
}
|
||||
|
||||
this.useOldExtension = useOldExtension;
|
||||
this.state = origin;
|
||||
this.Paths = paths;
|
||||
this.maxUpgradeBackups = prefs.maxUpgradeBackups;
|
||||
|
@ -165,10 +172,20 @@ var Agent = {
|
|||
if (this.state == STATE_CLEAN) {
|
||||
// Move $Path.clean out of the way, to avoid any ambiguity as
|
||||
// to which file is more recent.
|
||||
if (!this.useOldExtension) {
|
||||
File.move(this.Paths.clean, this.Paths.cleanBackup);
|
||||
} else {
|
||||
// Since we are migrating from .js to .jsonlz4,
|
||||
// we need to compress the deprecated $Path.clean
|
||||
// and write it to $Path.cleanBackup.
|
||||
let oldCleanPath = this.Paths.clean.replace("jsonlz4", "js");
|
||||
let d = File.read(oldCleanPath);
|
||||
File.writeAtomic(this.Paths.cleanBackup, d, {compression: "lz4"});
|
||||
}
|
||||
}
|
||||
|
||||
let startWriteMs = Date.now();
|
||||
let fileStat;
|
||||
|
||||
if (options.isFinalWrite) {
|
||||
// We are shutting down. At this stage, we know that
|
||||
|
@ -177,8 +194,10 @@ var Agent = {
|
|||
// $Paths.cleanBackup a long time ago. We can therefore write
|
||||
// with the guarantees that we erase no important data.
|
||||
File.writeAtomic(this.Paths.clean, data, {
|
||||
tmpPath: this.Paths.clean + ".tmp"
|
||||
tmpPath: this.Paths.clean + ".tmp",
|
||||
compression: "lz4"
|
||||
});
|
||||
fileStat = File.stat(this.Paths.clean);
|
||||
} else if (this.state == STATE_RECOVERY) {
|
||||
// At this stage, either $Paths.recovery was written >= 15
|
||||
// seconds ago during this session or we have just started
|
||||
|
@ -188,19 +207,23 @@ var Agent = {
|
|||
// file.
|
||||
File.writeAtomic(this.Paths.recovery, data, {
|
||||
tmpPath: this.Paths.recovery + ".tmp",
|
||||
backupTo: this.Paths.recoveryBackup
|
||||
backupTo: this.Paths.recoveryBackup,
|
||||
compression: "lz4"
|
||||
});
|
||||
fileStat = File.stat(this.Paths.recovery);
|
||||
} else {
|
||||
// In other cases, either $Path.recovery is not necessary, or
|
||||
// it doesn't exist or it has been corrupted. Regardless,
|
||||
// don't backup $Path.recovery.
|
||||
File.writeAtomic(this.Paths.recovery, data, {
|
||||
tmpPath: this.Paths.recovery + ".tmp"
|
||||
tmpPath: this.Paths.recovery + ".tmp",
|
||||
compression: "lz4"
|
||||
});
|
||||
fileStat = File.stat(this.Paths.recovery);
|
||||
}
|
||||
|
||||
telemetry.FX_SESSION_RESTORE_WRITE_FILE_MS = Date.now() - startWriteMs;
|
||||
telemetry.FX_SESSION_RESTORE_FILE_SIZE_BYTES = data.byteLength;
|
||||
telemetry.FX_SESSION_RESTORE_FILE_SIZE_BYTES = fileStat.size;
|
||||
|
||||
} catch (ex) {
|
||||
// Don't throw immediately
|
||||
|
@ -293,6 +316,8 @@ var Agent = {
|
|||
// Erase main session state file
|
||||
try {
|
||||
File.remove(this.Paths.clean);
|
||||
// Remove old extension ones.
|
||||
File.remove(this.Paths.clean.replace("jsonlz4", "js"), {ignoreAbsent: true});
|
||||
} catch (ex) {
|
||||
// Don't stop immediately.
|
||||
exn = exn || ex;
|
||||
|
|
|
@ -14,7 +14,7 @@ function test() {
|
|||
get("ProfD", Ci.nsIFile);
|
||||
function getSessionstoreFile() {
|
||||
let sessionStoreJS = profilePath.clone();
|
||||
sessionStoreJS.append("sessionstore.js");
|
||||
sessionStoreJS.append("sessionstore.jsonlz4");
|
||||
return sessionStoreJS;
|
||||
}
|
||||
function getSessionstorejsModificationTime() {
|
||||
|
|
|
@ -13,13 +13,13 @@ const Paths = SessionFile.Paths;
|
|||
|
||||
// A text decoder.
|
||||
var gDecoder = new TextDecoder();
|
||||
// Global variables that contain sessionstore.js and sessionstore.bak data for
|
||||
// Global variables that contain sessionstore.jsonlz4 and sessionstore.baklz4 data for
|
||||
// comparison between tests.
|
||||
var gSSData;
|
||||
var gSSBakData;
|
||||
|
||||
function promiseRead(path) {
|
||||
return File.read(path, {encoding: "utf-8"});
|
||||
return File.read(path, {encoding: "utf-8", compression: "lz4"});
|
||||
}
|
||||
|
||||
add_task(async function init() {
|
||||
|
@ -31,8 +31,8 @@ add_task(async function init() {
|
|||
|
||||
add_task(async function test_creation() {
|
||||
// Create dummy sessionstore backups
|
||||
let OLD_BACKUP = Path.join(Constants.Path.profileDir, "sessionstore.bak");
|
||||
let OLD_UPGRADE_BACKUP = Path.join(Constants.Path.profileDir, "sessionstore.bak-0000000");
|
||||
let OLD_BACKUP = Path.join(Constants.Path.profileDir, "sessionstore.baklz4");
|
||||
let OLD_UPGRADE_BACKUP = Path.join(Constants.Path.profileDir, "sessionstore.baklz4-0000000");
|
||||
|
||||
await File.writeAtomic(OLD_BACKUP, "sessionstore.bak");
|
||||
await File.writeAtomic(OLD_UPGRADE_BACKUP, "sessionstore upgrade backup");
|
||||
|
@ -111,15 +111,15 @@ add_task(async function test_recovery() {
|
|||
// Create Paths.recovery, ensure that we can recover from it.
|
||||
let SOURCE = await promiseSource("Paths.recovery");
|
||||
await File.makeDir(Paths.backups);
|
||||
await File.writeAtomic(Paths.recovery, SOURCE);
|
||||
await File.writeAtomic(Paths.recovery, SOURCE, {encoding: "utf-8", compression: "lz4"});
|
||||
is((await SessionFile.read()).source, SOURCE, "Recovered the correct source from the recovery file");
|
||||
await SessionFile.wipe();
|
||||
|
||||
info("Corrupting recovery file, attempting to recover from recovery backup");
|
||||
SOURCE = await promiseSource("Paths.recoveryBackup");
|
||||
await File.makeDir(Paths.backups);
|
||||
await File.writeAtomic(Paths.recoveryBackup, SOURCE);
|
||||
await File.writeAtomic(Paths.recovery, "<Invalid JSON>");
|
||||
await File.writeAtomic(Paths.recoveryBackup, SOURCE, {encoding: "utf-8", compression: "lz4"});
|
||||
await File.writeAtomic(Paths.recovery, "<Invalid JSON>", {encoding: "utf-8", compression: "lz4"});
|
||||
is((await SessionFile.read()).source, SOURCE, "Recovered the correct source from the recovery file");
|
||||
await SessionFile.wipe();
|
||||
});
|
||||
|
@ -134,10 +134,10 @@ add_task(async function test_recovery_inaccessible() {
|
|||
let SOURCE_RECOVERY = await promiseSource("Paths.recovery");
|
||||
let SOURCE = await promiseSource("Paths.recoveryBackup");
|
||||
await File.makeDir(Paths.backups);
|
||||
await File.writeAtomic(Paths.recoveryBackup, SOURCE);
|
||||
await File.writeAtomic(Paths.recoveryBackup, SOURCE, {encoding: "utf-8", compression: "lz4"});
|
||||
|
||||
// Write a valid recovery file but make it inaccessible.
|
||||
await File.writeAtomic(Paths.recovery, SOURCE_RECOVERY);
|
||||
await File.writeAtomic(Paths.recovery, SOURCE_RECOVERY, {encoding: "utf-8", compression: "lz4"});
|
||||
await File.setPermissions(Paths.recovery, { unixMode: 0 });
|
||||
|
||||
is((await SessionFile.read()).source, SOURCE, "Recovered the correct source from the recovery file");
|
||||
|
@ -147,7 +147,7 @@ add_task(async function test_recovery_inaccessible() {
|
|||
add_task(async function test_clean() {
|
||||
await SessionFile.wipe();
|
||||
let SOURCE = await promiseSource("Paths.clean");
|
||||
await File.writeAtomic(Paths.clean, SOURCE);
|
||||
await File.writeAtomic(Paths.clean, SOURCE, {encoding: "utf-8", compression: "lz4"});
|
||||
await SessionFile.read();
|
||||
await SessionSaver.run();
|
||||
is((await promiseRead(Paths.cleanBackup)), SOURCE, "After first read/write, clean shutdown file has been moved to cleanBackup");
|
||||
|
@ -166,7 +166,7 @@ add_task(async function test_version() {
|
|||
|
||||
// Create Paths.clean file
|
||||
await File.makeDir(Paths.backups);
|
||||
await File.writeAtomic(Paths.clean, SOURCE);
|
||||
await File.writeAtomic(Paths.clean, SOURCE, {encoding: "utf-8", compression: "lz4"});
|
||||
|
||||
info("Attempting to recover from the clean file");
|
||||
// Ensure that we can recover from Paths.recovery
|
||||
|
@ -189,15 +189,15 @@ add_task(async function test_version_fallback() {
|
|||
info("Modifying format version number to something incorrect, to make sure that we disregard the file.");
|
||||
let parsedSource = JSON.parse(SOURCE);
|
||||
parsedSource.version[0] = "bookmarks";
|
||||
await File.writeAtomic(Paths.clean, JSON.stringify(parsedSource));
|
||||
await File.writeAtomic(Paths.cleanBackup, BACKUP_SOURCE);
|
||||
await File.writeAtomic(Paths.clean, JSON.stringify(parsedSource), {encoding: "utf-8", compression: "lz4"});
|
||||
await File.writeAtomic(Paths.cleanBackup, BACKUP_SOURCE, {encoding: "utf-8", compression: "lz4"});
|
||||
is((await SessionFile.read()).source, BACKUP_SOURCE, "Recovered the correct source from the backup recovery file");
|
||||
|
||||
info("Modifying format version number to a future version, to make sure that we disregard the file.");
|
||||
parsedSource = JSON.parse(SOURCE);
|
||||
parsedSource.version[1] = Number.MAX_SAFE_INTEGER;
|
||||
await File.writeAtomic(Paths.clean, JSON.stringify(parsedSource));
|
||||
await File.writeAtomic(Paths.cleanBackup, BACKUP_SOURCE);
|
||||
await File.writeAtomic(Paths.clean, JSON.stringify(parsedSource), {encoding: "utf-8", compression: "lz4"});
|
||||
await File.writeAtomic(Paths.cleanBackup, BACKUP_SOURCE, {encoding: "utf-8", compression: "lz4"});
|
||||
is((await SessionFile.read()).source, BACKUP_SOURCE, "Recovered the correct source from the backup recovery file");
|
||||
});
|
||||
|
||||
|
|
|
@ -61,7 +61,7 @@ add_task(async function init() {
|
|||
add_task(async function test_upgrade_backup() {
|
||||
let test = await prepareTest();
|
||||
info("Let's check if we create an upgrade backup");
|
||||
await OS.File.writeAtomic(Paths.clean, test.contents);
|
||||
await OS.File.writeAtomic(Paths.clean, test.contents, {encoding: "utf-8", compression: "lz4"});
|
||||
await SessionFile.read(); // First call to read() initializes the SessionWorker
|
||||
await SessionFile.write(""); // First call to write() triggers the backup
|
||||
|
||||
|
@ -69,15 +69,15 @@ add_task(async function test_upgrade_backup() {
|
|||
|
||||
is((await OS.File.exists(Paths.upgradeBackup)), true, "upgrade backup file has been created");
|
||||
|
||||
let data = await OS.File.read(Paths.upgradeBackup);
|
||||
let data = await OS.File.read(Paths.upgradeBackup, {compression: "lz4"});
|
||||
is(test.contents, (new TextDecoder()).decode(data), "upgrade backup contains the expected contents");
|
||||
|
||||
info("Let's check that we don't overwrite this upgrade backup");
|
||||
let newContents = JSON.stringify({"something else entirely": Math.random()});
|
||||
await OS.File.writeAtomic(Paths.clean, newContents);
|
||||
await OS.File.writeAtomic(Paths.clean, newContents, {encoding: "utf-8", compression: "lz4"});
|
||||
await SessionFile.read(); // Reinitialize the SessionWorker
|
||||
await SessionFile.write(""); // Next call to write() shouldn't trigger the backup
|
||||
data = await OS.File.read(Paths.upgradeBackup);
|
||||
data = await OS.File.read(Paths.upgradeBackup, {compression: "lz4"});
|
||||
is(test.contents, (new TextDecoder()).decode(data), "upgrade backup hasn't changed");
|
||||
});
|
||||
|
||||
|
@ -85,7 +85,7 @@ add_task(async function test_upgrade_backup_removal() {
|
|||
let test = await prepareTest();
|
||||
let maxUpgradeBackups = Preferences.get(PREF_MAX_UPGRADE_BACKUPS, 3);
|
||||
info("Let's see if we remove backups if there are too many");
|
||||
await OS.File.writeAtomic(Paths.clean, test.contents);
|
||||
await OS.File.writeAtomic(Paths.clean, test.contents, {encoding: "utf-8", compression: "lz4"});
|
||||
|
||||
// if the nextUpgradeBackup already exists (from another test), remove it
|
||||
if (OS.File.exists(Paths.nextUpgradeBackup)) {
|
||||
|
@ -93,12 +93,12 @@ add_task(async function test_upgrade_backup_removal() {
|
|||
}
|
||||
|
||||
// create dummy backups
|
||||
await OS.File.writeAtomic(Paths.upgradeBackupPrefix + "20080101010101", "");
|
||||
await OS.File.writeAtomic(Paths.upgradeBackupPrefix + "20090101010101", "");
|
||||
await OS.File.writeAtomic(Paths.upgradeBackupPrefix + "20100101010101", "");
|
||||
await OS.File.writeAtomic(Paths.upgradeBackupPrefix + "20110101010101", "");
|
||||
await OS.File.writeAtomic(Paths.upgradeBackupPrefix + "20120101010101", "");
|
||||
await OS.File.writeAtomic(Paths.upgradeBackupPrefix + "20130101010101", "");
|
||||
await OS.File.writeAtomic(Paths.upgradeBackupPrefix + "20080101010101", "", {encoding: "utf-8", compression: "lz4"});
|
||||
await OS.File.writeAtomic(Paths.upgradeBackupPrefix + "20090101010101", "", {encoding: "utf-8", compression: "lz4"});
|
||||
await OS.File.writeAtomic(Paths.upgradeBackupPrefix + "20100101010101", "", {encoding: "utf-8", compression: "lz4"});
|
||||
await OS.File.writeAtomic(Paths.upgradeBackupPrefix + "20110101010101", "", {encoding: "utf-8", compression: "lz4"});
|
||||
await OS.File.writeAtomic(Paths.upgradeBackupPrefix + "20120101010101", "", {encoding: "utf-8", compression: "lz4"});
|
||||
await OS.File.writeAtomic(Paths.upgradeBackupPrefix + "20130101010101", "", {encoding: "utf-8", compression: "lz4"});
|
||||
|
||||
// get currently existing backups
|
||||
let backups = await getUpgradeBackups();
|
||||
|
|
|
@ -270,7 +270,7 @@ function forceSaveState() {
|
|||
function promiseRecoveryFileContents() {
|
||||
let promise = forceSaveState();
|
||||
return promise.then(function() {
|
||||
return OS.File.read(SessionFile.Paths.recovery, { encoding: "utf-8" });
|
||||
return OS.File.read(SessionFile.Paths.recovery, { encoding: "utf-8", compression: "lz4" });
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -278,7 +278,7 @@ var promiseForEachSessionRestoreFile = async function(cb) {
|
|||
for (let key of SessionFile.Paths.loadOrder) {
|
||||
let data = "";
|
||||
try {
|
||||
data = await OS.File.read(SessionFile.Paths[key], { encoding: "utf-8" });
|
||||
data = await OS.File.read(SessionFile.Paths[key], { encoding: "utf-8", compression: "lz4" });
|
||||
} catch (ex) {
|
||||
// Ignore missing files
|
||||
if (!(ex instanceof OS.File.Error && ex.becauseNoSuchFile)) {
|
||||
|
|
|
@ -3,6 +3,7 @@ var Cc = Components.classes;
|
|||
var Ci = Components.interfaces;
|
||||
|
||||
Components.utils.import("resource://gre/modules/Services.jsm");
|
||||
const {OS} = Cu.import("resource://gre/modules/osfile.jsm", {});
|
||||
|
||||
// Call a function once initialization of SessionStartup is complete
|
||||
function afterSessionStartupInitialization(cb) {
|
||||
|
@ -30,3 +31,11 @@ function afterSessionStartupInitialization(cb) {
|
|||
Services.obs.notifyObservers(null, "final-ui-startup");
|
||||
Services.obs.addObserver(observer, "sessionstore-state-finalized");
|
||||
}
|
||||
|
||||
// Compress the source file using lz4 and put the result to destination file.
|
||||
// After that, source file is deleted.
|
||||
async function writeCompressedFile(source, destination) {
|
||||
let s = await OS.File.read(source);
|
||||
await OS.File.writeAtomic(destination, s, {compression: "lz4"});
|
||||
await OS.File.remove(source);
|
||||
}
|
||||
|
|
|
@ -3,7 +3,6 @@
|
|||
|
||||
"use strict";
|
||||
|
||||
var {OS} = Cu.import("resource://gre/modules/osfile.jsm", {});
|
||||
var {XPCOMUtils} = Cu.import("resource://gre/modules/XPCOMUtils.jsm", {});
|
||||
var {SessionWorker} = Cu.import("resource:///modules/sessionstore/SessionWorker.jsm", {});
|
||||
|
||||
|
@ -30,9 +29,9 @@ add_task(async function init() {
|
|||
SessionFile = Cu.import("resource:///modules/sessionstore/SessionFile.jsm", {}).SessionFile;
|
||||
Paths = SessionFile.Paths;
|
||||
|
||||
|
||||
let source = do_get_file("data/sessionstore_valid.js");
|
||||
source.copyTo(profd, "sessionstore.js");
|
||||
await writeCompressedFile(Paths.clean.replace("jsonlz4", "js"), Paths.clean);
|
||||
|
||||
// Finish initialization of SessionFile
|
||||
await SessionFile.read();
|
||||
|
@ -54,7 +53,7 @@ function promise_check_exist(path, shouldExist) {
|
|||
function promise_check_contents(path, expect) {
|
||||
return (async function() {
|
||||
do_print("Checking whether " + path + " has the right contents");
|
||||
let actual = await OS.File.read(path, { encoding: "utf-8"});
|
||||
let actual = await OS.File.read(path, { encoding: "utf-8", compression: "lz4" });
|
||||
Assert.deepEqual(JSON.parse(actual), expect, `File ${path} contains the expected data.`);
|
||||
})();
|
||||
}
|
||||
|
@ -75,7 +74,7 @@ add_task(async function test_first_write_backup() {
|
|||
await promise_check_exist(Paths.backups, false);
|
||||
|
||||
await File.makeDir(Paths.backups);
|
||||
await File.writeAtomic(Paths.clean, JSON.stringify(initial_content), { encoding: "utf-8" });
|
||||
await File.writeAtomic(Paths.clean, JSON.stringify(initial_content), { encoding: "utf-8", compression: "lz4" });
|
||||
await SessionFile.write(new_content);
|
||||
|
||||
do_print("After first write, a few files should have been created");
|
||||
|
@ -96,7 +95,7 @@ add_task(async function test_first_write_backup() {
|
|||
// - $Path.recoveryBackup contains the previous data
|
||||
add_task(async function test_second_write_no_backup() {
|
||||
let new_content = generateFileContents("test_2");
|
||||
let previous_backup_content = await File.read(Paths.recovery, { encoding: "utf-8" });
|
||||
let previous_backup_content = await File.read(Paths.recovery, { encoding: "utf-8", compression: "lz4" });
|
||||
previous_backup_content = JSON.parse(previous_backup_content);
|
||||
|
||||
await OS.File.remove(Paths.cleanBackup);
|
||||
|
|
|
@ -20,10 +20,10 @@ Cu.import("resource:///modules/sessionstore/SessionFile.jsm", this);
|
|||
|
||||
/**
|
||||
* A utility function for resetting the histogram and the contents
|
||||
* of the backup directory.
|
||||
* of the backup directory. This will also compress the file using lz4 compression.
|
||||
*/
|
||||
function reset_session(backups = {}) {
|
||||
|
||||
function promise_reset_session(backups = {}) {
|
||||
return (async function() {
|
||||
// Reset the histogram.
|
||||
Telemetry.getHistogramById(HistogramId).clear();
|
||||
|
||||
|
@ -31,11 +31,13 @@ function reset_session(backups = {}) {
|
|||
OS.File.makeDir(SessionFile.Paths.backups);
|
||||
for (let key of SessionFile.Paths.loadOrder) {
|
||||
if (backups.hasOwnProperty(key)) {
|
||||
OS.File.copy(backups[key], SessionFile.Paths[key]);
|
||||
let s = await OS.File.read(backups[key]);
|
||||
await OS.File.writeAtomic(SessionFile.Paths[key], s, {compression: "lz4"});
|
||||
} else {
|
||||
OS.File.remove(SessionFile.Paths[key]);
|
||||
await OS.File.remove(SessionFile.Paths[key]);
|
||||
}
|
||||
}
|
||||
})();
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -54,7 +56,7 @@ add_task(async function test_ensure_histogram_exists_and_empty() {
|
|||
*/
|
||||
add_task(async function test_no_files_exist() {
|
||||
// No session files are available to SessionFile.
|
||||
reset_session();
|
||||
await promise_reset_session();
|
||||
|
||||
await SessionFile.read();
|
||||
// Checking if the histogram is updated negatively
|
||||
|
@ -72,7 +74,7 @@ add_task(async function test_one_file_valid() {
|
|||
// Corrupting some backup files.
|
||||
let invalidSession = "data/sessionstore_invalid.js";
|
||||
let validSession = "data/sessionstore_valid.js";
|
||||
reset_session({
|
||||
await promise_reset_session({
|
||||
clean: invalidSession,
|
||||
cleanBackup: validSession,
|
||||
recovery: invalidSession,
|
||||
|
@ -94,7 +96,7 @@ add_task(async function test_one_file_valid() {
|
|||
add_task(async function test_all_files_corrupt() {
|
||||
// Corrupting all backup files.
|
||||
let invalidSession = "data/sessionstore_invalid.js";
|
||||
reset_session({
|
||||
await promise_reset_session({
|
||||
clean: invalidSession,
|
||||
cleanBackup: invalidSession,
|
||||
recovery: invalidSession,
|
||||
|
|
|
@ -0,0 +1,132 @@
|
|||
"use strict";
|
||||
|
||||
const {XPCOMUtils} = Cu.import("resource://gre/modules/XPCOMUtils.jsm", {});
|
||||
const {SessionWorker} = Cu.import("resource:///modules/sessionstore/SessionWorker.jsm", {});
|
||||
|
||||
var Paths;
|
||||
var SessionFile;
|
||||
|
||||
// We need a XULAppInfo to initialize SessionFile
|
||||
Cu.import("resource://testing-common/AppInfo.jsm", this);
|
||||
updateAppInfo({
|
||||
name: "SessionRestoreTest",
|
||||
ID: "{230de50e-4cd1-11dc-8314-0800200c9a66}",
|
||||
version: "1",
|
||||
platformVersion: "",
|
||||
});
|
||||
|
||||
function run_test() {
|
||||
run_next_test();
|
||||
}
|
||||
|
||||
function promise_check_exist(path, shouldExist) {
|
||||
return (async function() {
|
||||
do_print("Ensuring that " + path + (shouldExist ? " exists" : " does not exist"));
|
||||
if ((await OS.File.exists(path)) != shouldExist) {
|
||||
throw new Error("File " + path + " should " + (shouldExist ? "exist" : "not exist"));
|
||||
}
|
||||
})();
|
||||
}
|
||||
|
||||
function promise_check_contents(path, expect) {
|
||||
return (async function() {
|
||||
do_print("Checking whether " + path + " has the right contents");
|
||||
let actual = await OS.File.read(path, { encoding: "utf-8", compression: "lz4" });
|
||||
Assert.deepEqual(JSON.parse(actual), expect, `File ${path} contains the expected data.`);
|
||||
})();
|
||||
}
|
||||
|
||||
function generateFileContents(id) {
|
||||
let url = `http://example.com/test_backup_once#${id}_${Math.random()}`;
|
||||
return {windows: [{tabs: [{entries: [{url}], index: 1}]}]}
|
||||
}
|
||||
|
||||
// Check whether the migration from .js to .jslz4 is correct.
|
||||
add_task(async function test_migration() {
|
||||
// Make sure that we have a profile before initializing SessionFile.
|
||||
let profd = do_get_profile();
|
||||
SessionFile = Cu.import("resource:///modules/sessionstore/SessionFile.jsm", {}).SessionFile;
|
||||
Paths = SessionFile.Paths;
|
||||
|
||||
let source = do_get_file("data/sessionstore_valid.js");
|
||||
source.copyTo(profd, "sessionstore.js");
|
||||
|
||||
// Read the content of the session store file.
|
||||
let sessionStoreUncompressed = await OS.File.read(Paths.clean.replace("jsonlz4", "js"), {encoding: "utf-8"});
|
||||
let parsed = JSON.parse(sessionStoreUncompressed);
|
||||
|
||||
// Read the session file with .js extension.
|
||||
let result = await SessionFile.read();
|
||||
|
||||
// Check whether the result is what we wanted.
|
||||
equal(result.origin, "clean");
|
||||
equal(result.useOldExtension, true);
|
||||
Assert.deepEqual(result.parsed, parsed, "result.parsed contains expected data");
|
||||
|
||||
// Initiate a write to ensure we write the compressed version.
|
||||
await SessionFile.write(parsed);
|
||||
await promise_check_exist(Paths.backups, true);
|
||||
await promise_check_exist(Paths.clean, false);
|
||||
await promise_check_exist(Paths.cleanBackup, true);
|
||||
await promise_check_exist(Paths.recovery, true);
|
||||
await promise_check_exist(Paths.recoveryBackup, false);
|
||||
await promise_check_exist(Paths.nextUpgradeBackup, true);
|
||||
// The deprecated $Path.clean should exist.
|
||||
await promise_check_exist(Paths.clean.replace("jsonlz4", "js"), true);
|
||||
|
||||
await promise_check_contents(Paths.recovery, parsed);
|
||||
});
|
||||
|
||||
add_task(async function test_startup_with_compressed_clean() {
|
||||
let state = {windows: []};
|
||||
let stateString = JSON.stringify(state);
|
||||
|
||||
// Mare sure we have an empty profile dir.
|
||||
await SessionFile.wipe();
|
||||
|
||||
// Populate session files to profile dir.
|
||||
await OS.File.writeAtomic(Paths.clean, stateString, {encoding: "utf-8", compression: "lz4"});
|
||||
await OS.File.makeDir(Paths.backups);
|
||||
await OS.File.writeAtomic(Paths.cleanBackup, stateString, {encoding: "utf-8", compression: "lz4"});
|
||||
|
||||
// Initiate a read.
|
||||
let result = await SessionFile.read();
|
||||
|
||||
// Make sure we read correct session file and its content.
|
||||
equal(result.origin, "clean");
|
||||
equal(result.useOldExtension, false);
|
||||
Assert.deepEqual(state, result.parsed, "result.parsed contains expected data");
|
||||
});
|
||||
|
||||
add_task(async function test_empty_profile_dir() {
|
||||
// Make sure that we have empty profile dir.
|
||||
await SessionFile.wipe();
|
||||
await promise_check_exist(Paths.backups, false);
|
||||
await promise_check_exist(Paths.clean, false);
|
||||
await promise_check_exist(Paths.cleanBackup, false);
|
||||
await promise_check_exist(Paths.recovery, false);
|
||||
await promise_check_exist(Paths.recoveryBackup, false);
|
||||
await promise_check_exist(Paths.nextUpgradeBackup, false);
|
||||
await promise_check_exist(Paths.backups.replace("jsonlz4", "js"), false);
|
||||
await promise_check_exist(Paths.clean.replace("jsonlz4", "js"), false);
|
||||
await promise_check_exist(Paths.cleanBackup.replace("lz4", ""), false);
|
||||
await promise_check_exist(Paths.recovery.replace("jsonlz4", "js"), false);
|
||||
await promise_check_exist(Paths.recoveryBackup.replace("jsonlz4", "js"), false);
|
||||
await promise_check_exist(Paths.nextUpgradeBackup.replace("jsonlz4", "js"), false);
|
||||
|
||||
// Initiate a read and make sure that we are in empty state.
|
||||
let result = await SessionFile.read();
|
||||
equal(result.origin, "empty");
|
||||
equal(result.noFilesFound, true);
|
||||
|
||||
// Create a state to store.
|
||||
let state = {windows: []};
|
||||
await SessionWorker.post("write", [state, {isFinalWrite: true}]);
|
||||
|
||||
// Check session files are created, but not deprecated ones.
|
||||
await promise_check_exist(Paths.clean, true);
|
||||
await promise_check_exist(Paths.clean.replace("jsonlz4", "js"), false);
|
||||
|
||||
// Check session file' content is correct.
|
||||
await promise_check_contents(Paths.clean, state);
|
||||
});
|
|
@ -14,7 +14,6 @@ const profd = do_get_profile();
|
|||
const {SessionFile} = Cu.import("resource:///modules/sessionstore/SessionFile.jsm", {});
|
||||
const {Paths} = SessionFile;
|
||||
|
||||
const {OS} = Cu.import("resource://gre/modules/osfile.jsm", {});
|
||||
const {File} = OS;
|
||||
|
||||
const MAX_ENTRIES = 9;
|
||||
|
@ -32,6 +31,7 @@ updateAppInfo({
|
|||
add_task(async function setup() {
|
||||
let source = do_get_file("data/sessionstore_valid.js");
|
||||
source.copyTo(profd, "sessionstore.js");
|
||||
await writeCompressedFile(Paths.clean.replace("jsonlz4", "js"), Paths.clean);
|
||||
|
||||
// Finish SessionFile initialization.
|
||||
await SessionFile.read();
|
||||
|
@ -62,7 +62,7 @@ async function setMaxBackForward(back, fwd) {
|
|||
|
||||
async function writeAndParse(state, path, options = {}) {
|
||||
await SessionWorker.post("write", [state, options]);
|
||||
return JSON.parse(await File.read(path, {encoding: "utf-8"}));
|
||||
return JSON.parse(await File.read(path, {encoding: "utf-8", compression: "lz4"}));
|
||||
}
|
||||
|
||||
add_task(async function test_shistory_cap_none() {
|
||||
|
|
|
@ -3,6 +3,7 @@
|
|||
|
||||
function run_test() {
|
||||
let profd = do_get_profile();
|
||||
var SessionFile = Cu.import("resource:///modules/sessionstore/SessionFile.jsm", {}).SessionFile;
|
||||
|
||||
let sourceSession = do_get_file("data/sessionstore_invalid.js");
|
||||
sourceSession.copyTo(profd, "sessionstore.js");
|
||||
|
@ -10,7 +11,10 @@ function run_test() {
|
|||
let sourceCheckpoints = do_get_file("data/sessionCheckpoints_all.json");
|
||||
sourceCheckpoints.copyTo(profd, "sessionCheckpoints.json");
|
||||
|
||||
do_test_pending();
|
||||
// Compress sessionstore.js to sessionstore.jsonlz4
|
||||
// and remove sessionstore.js
|
||||
let oldExtSessionFile = SessionFile.Paths.clean.replace("jsonlz4", "js");
|
||||
writeCompressedFile(oldExtSessionFile, SessionFile.Paths.clean).then(() => {
|
||||
let startup = Cc["@mozilla.org/browser/sessionstartup;1"].
|
||||
getService(Ci.nsISessionStartup);
|
||||
|
||||
|
@ -18,4 +22,7 @@ function run_test() {
|
|||
do_check_eq(startup.sessionType, Ci.nsISessionStartup.NO_SESSION);
|
||||
do_test_finished();
|
||||
});
|
||||
});
|
||||
|
||||
do_test_pending();
|
||||
}
|
||||
|
|
|
@ -9,6 +9,7 @@
|
|||
|
||||
function run_test() {
|
||||
let profd = do_get_profile();
|
||||
var SessionFile = Cu.import("resource:///modules/sessionstore/SessionFile.jsm", {}).SessionFile;
|
||||
|
||||
let sourceSession = do_get_file("data/sessionstore_valid.js");
|
||||
sourceSession.copyTo(profd, "sessionstore.js");
|
||||
|
@ -16,7 +17,10 @@ function run_test() {
|
|||
let sourceCheckpoints = do_get_file("data/sessionCheckpoints_all.json");
|
||||
sourceCheckpoints.copyTo(profd, "sessionCheckpoints.json");
|
||||
|
||||
do_test_pending();
|
||||
// Compress sessionstore.js to sessionstore.jsonlz4
|
||||
// and remove sessionstore.js
|
||||
let oldExtSessionFile = SessionFile.Paths.clean.replace("jsonlz4", "js");
|
||||
writeCompressedFile(oldExtSessionFile, SessionFile.Paths.clean).then(() => {
|
||||
let startup = Cc["@mozilla.org/browser/sessionstartup;1"].
|
||||
getService(Ci.nsISessionStartup);
|
||||
|
||||
|
@ -24,4 +28,7 @@ function run_test() {
|
|||
do_check_eq(startup.sessionType, Ci.nsISessionStartup.DEFER_SESSION);
|
||||
do_test_finished();
|
||||
});
|
||||
});
|
||||
|
||||
do_test_pending();
|
||||
}
|
||||
|
|
|
@ -13,3 +13,4 @@ support-files =
|
|||
[test_startup_nosession_async.js]
|
||||
[test_startup_session_async.js]
|
||||
[test_startup_invalid_session.js]
|
||||
[test_migration_lz4compression.js]
|
||||
|
|
|
@ -27,6 +27,7 @@ this.FormAutofillUtils = {
|
|||
"address-level2": "address",
|
||||
"postal-code": "address",
|
||||
"country": "address",
|
||||
"country-name": "address",
|
||||
"tel": "tel",
|
||||
"email": "email",
|
||||
"cc-name": "creditCard",
|
||||
|
|
|
@ -93,18 +93,20 @@ ProfileAutoCompleteResult.prototype = {
|
|||
* @returns {string} The secondary label
|
||||
*/
|
||||
_getSecondaryLabel(focusedFieldName, allFieldNames, profile) {
|
||||
/* TODO: Since "name" is a special case here, so the secondary "name" label
|
||||
will be refined when the handling rule for "name" is ready.
|
||||
*/
|
||||
const possibleNameFields = [
|
||||
// We group similar fields into the same field name so we won't pick another
|
||||
// field in the same group as the secondary label.
|
||||
const GROUP_FIELDS = {
|
||||
"name": [
|
||||
"name",
|
||||
"given-name",
|
||||
"additional-name",
|
||||
"family-name",
|
||||
];
|
||||
|
||||
focusedFieldName = possibleNameFields.includes(focusedFieldName) ?
|
||||
"name" : focusedFieldName;
|
||||
],
|
||||
"country-name": [
|
||||
"country",
|
||||
"country-name",
|
||||
],
|
||||
};
|
||||
|
||||
const secondaryLabelOrder = [
|
||||
"street-address", // Street address
|
||||
|
@ -112,24 +114,27 @@ ProfileAutoCompleteResult.prototype = {
|
|||
"address-level2", // City/Town
|
||||
"organization", // Company or organization name
|
||||
"address-level1", // Province/State (Standardized code if possible)
|
||||
"country", // Country
|
||||
"country-name", // Country name
|
||||
"postal-code", // Postal code
|
||||
"tel", // Phone number
|
||||
"email", // Email address
|
||||
];
|
||||
|
||||
for (let field in GROUP_FIELDS) {
|
||||
if (GROUP_FIELDS[field].includes(focusedFieldName)) {
|
||||
focusedFieldName = field;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
for (const currentFieldName of secondaryLabelOrder) {
|
||||
if (focusedFieldName == currentFieldName ||
|
||||
!profile[currentFieldName]) {
|
||||
if (focusedFieldName == currentFieldName || !profile[currentFieldName]) {
|
||||
continue;
|
||||
}
|
||||
|
||||
let matching;
|
||||
if (currentFieldName == "name") {
|
||||
matching = allFieldNames.some(fieldName => possibleNameFields.includes(fieldName));
|
||||
} else {
|
||||
matching = allFieldNames.includes(currentFieldName);
|
||||
}
|
||||
let matching = GROUP_FIELDS[currentFieldName] ?
|
||||
allFieldNames.some(fieldName => GROUP_FIELDS[currentFieldName].includes(fieldName)) :
|
||||
allFieldNames.includes(currentFieldName);
|
||||
|
||||
if (matching) {
|
||||
return profile[currentFieldName];
|
||||
|
|
|
@ -35,6 +35,7 @@
|
|||
* address-line1,
|
||||
* address-line2,
|
||||
* address-line3,
|
||||
* country-name,
|
||||
*
|
||||
* // metadata
|
||||
* timeCreated, // in ms
|
||||
|
@ -95,6 +96,16 @@ XPCOMUtils.defineLazyServiceGetter(this, "gUUIDGenerator",
|
|||
"@mozilla.org/uuid-generator;1",
|
||||
"nsIUUIDGenerator");
|
||||
|
||||
XPCOMUtils.defineLazyGetter(this, "REGION_NAMES", function() {
|
||||
let regionNames = {};
|
||||
let countries = Services.strings.createBundle("chrome://global/locale/regionNames.properties").getSimpleEnumeration();
|
||||
while (countries.hasMoreElements()) {
|
||||
let country = countries.getNext().QueryInterface(Components.interfaces.nsIPropertyElement);
|
||||
regionNames[country.key.toUpperCase()] = country.value;
|
||||
}
|
||||
return regionNames;
|
||||
});
|
||||
|
||||
const PROFILE_JSON_FILE_NAME = "autofill-profiles.json";
|
||||
|
||||
const STORAGE_SCHEMA_VERSION = 1;
|
||||
|
@ -418,6 +429,20 @@ class Addresses extends AutofillRecords {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Compute country name
|
||||
if (profile.country) {
|
||||
if (profile.country == "US") {
|
||||
let countryName = REGION_NAMES[profile.country];
|
||||
if (countryName) {
|
||||
profile["country-name"] = countryName;
|
||||
}
|
||||
} else {
|
||||
// TODO: We only support US in MVP so hide the field if it's not. We
|
||||
// are going to support more countries in bug 1370193.
|
||||
delete profile.country;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
_recordWriteProcessor(profile) {
|
||||
|
@ -459,6 +484,25 @@ class Addresses extends AutofillRecords {
|
|||
profile["street-address"] = addressLines.join("\n");
|
||||
}
|
||||
}
|
||||
|
||||
// Normalize country
|
||||
if (profile.country) {
|
||||
let country = profile.country.toUpperCase();
|
||||
// Only values included in the region list will be saved.
|
||||
if (REGION_NAMES[country]) {
|
||||
profile.country = country;
|
||||
} else {
|
||||
delete profile.country;
|
||||
}
|
||||
} else if (profile["country-name"]) {
|
||||
for (let region in REGION_NAMES) {
|
||||
if (REGION_NAMES[region].toLowerCase() == profile["country-name"].toLowerCase()) {
|
||||
profile.country = region;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
delete profile["country-name"];
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -145,7 +145,7 @@ ManageProfileDialog.prototype = {
|
|||
"address-level2", // City/Town
|
||||
"organization", // Company or organization name
|
||||
"address-level1", // Province/State (Standardized code if possible)
|
||||
"country", // Country
|
||||
"country-name", // Country name
|
||||
"postal-code", // Postal code
|
||||
"tel", // Phone number
|
||||
"email", // Email address
|
||||
|
|
|
@ -65,7 +65,7 @@ const ADDRESS_COMPUTE_TESTCASES = [
|
|||
expectedResult: {
|
||||
"street-address": "line1\n\nline3",
|
||||
"address-line1": "line1",
|
||||
"address-line2": "",
|
||||
"address-line2": undefined,
|
||||
"address-line3": "line3",
|
||||
},
|
||||
},
|
||||
|
@ -81,6 +81,18 @@ const ADDRESS_COMPUTE_TESTCASES = [
|
|||
"address-line3": "line3",
|
||||
},
|
||||
},
|
||||
|
||||
// Country
|
||||
{
|
||||
description: "Has \"country\"",
|
||||
address: {
|
||||
"country": "US",
|
||||
},
|
||||
expectedResult: {
|
||||
"country": "US",
|
||||
"country-name": "United States",
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
const ADDRESS_NORMALIZE_TESTCASES = [
|
||||
|
@ -178,6 +190,68 @@ const ADDRESS_NORMALIZE_TESTCASES = [
|
|||
"street-address": "street address\nstreet address line 2",
|
||||
},
|
||||
},
|
||||
|
||||
// Country
|
||||
{
|
||||
description: "Has \"country\" in lowercase",
|
||||
address: {
|
||||
"country": "us",
|
||||
},
|
||||
expectedResult: {
|
||||
"country": "US",
|
||||
},
|
||||
},
|
||||
{
|
||||
description: "Has unknown \"country\"",
|
||||
address: {
|
||||
"country": "AA",
|
||||
},
|
||||
expectedResult: {
|
||||
"country": undefined,
|
||||
},
|
||||
},
|
||||
{
|
||||
description: "Has \"country-name\"",
|
||||
address: {
|
||||
"country-name": "united states",
|
||||
},
|
||||
expectedResult: {
|
||||
"country": "US",
|
||||
"country-name": undefined,
|
||||
},
|
||||
},
|
||||
{
|
||||
description: "Has unknown \"country-name\"",
|
||||
address: {
|
||||
"country-name": "unknown country name",
|
||||
},
|
||||
expectedResult: {
|
||||
"country": undefined,
|
||||
"country-name": undefined,
|
||||
},
|
||||
},
|
||||
{
|
||||
description: "Has \"country\" and unknown \"country-name\"",
|
||||
address: {
|
||||
"country": "us",
|
||||
"country-name": "unknown country name",
|
||||
},
|
||||
expectedResult: {
|
||||
"country": "US",
|
||||
"country-name": undefined,
|
||||
},
|
||||
},
|
||||
{
|
||||
description: "Has \"country-name\" and unknown \"country\"",
|
||||
address: {
|
||||
"country": "AA",
|
||||
"country-name": "united states",
|
||||
},
|
||||
expectedResult: {
|
||||
"country": undefined,
|
||||
"country-name": undefined,
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
const CREDIT_CARD_COMPUTE_TESTCASES = [
|
||||
|
@ -241,7 +315,7 @@ const CREDIT_CARD_NORMALIZE_TESTCASES = [
|
|||
|
||||
let do_check_record_matches = (expectedRecord, record) => {
|
||||
for (let key in expectedRecord) {
|
||||
do_check_eq(expectedRecord[key], record[key] || "");
|
||||
do_check_eq(expectedRecord[key], record[key]);
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -277,7 +351,7 @@ add_task(async function test_normalizeAddressFields() {
|
|||
profileStorage = new ProfileStorage(path);
|
||||
await profileStorage.initialize();
|
||||
|
||||
let addresses = profileStorage.addresses.getAll();
|
||||
let addresses = profileStorage.addresses.getAll({noComputedFields: true});
|
||||
|
||||
for (let i in addresses) {
|
||||
do_print("Verify testcase: " + ADDRESS_NORMALIZE_TESTCASES[i].description);
|
||||
|
|
|
@ -129,7 +129,7 @@
|
|||
}
|
||||
|
||||
#onboarding-tour-list > li:dir(rtl) {
|
||||
background-position: right 27px center;
|
||||
background-position-x: right 10px;
|
||||
}
|
||||
|
||||
#onboarding-tour-list > li.onboarding-complete::before {
|
||||
|
|
|
@ -648,7 +648,7 @@ this.UnsubmittedCrashHandler = {
|
|||
|
||||
let reportIDs = [];
|
||||
try {
|
||||
reportIDs = await CrashSubmit.pendingIDsAsync(dateLimit);
|
||||
reportIDs = await CrashSubmit.pendingIDs(dateLimit);
|
||||
} catch (e) {
|
||||
Cu.reportError(e);
|
||||
return null;
|
||||
|
|
|
@ -628,6 +628,7 @@ groupbox {
|
|||
padding: 0 10px;
|
||||
bottom: 100%;
|
||||
background-color: #ffe900;
|
||||
border: 1px solid #d7b600;
|
||||
}
|
||||
|
||||
.search-tooltip:hover,
|
||||
|
@ -636,6 +637,15 @@ groupbox {
|
|||
}
|
||||
|
||||
.search-tooltip::before {
|
||||
position: absolute;
|
||||
content: "";
|
||||
border: 7px solid transparent;
|
||||
border-top-color: #d7b600;
|
||||
top: 100%;
|
||||
offset-inline-start: calc(50% - 7px);
|
||||
}
|
||||
|
||||
.search-tooltip::after {
|
||||
position: absolute;
|
||||
content: "";
|
||||
border: 6px solid transparent;
|
||||
|
|
|
@ -71,6 +71,8 @@ this.BrowserToolboxProcess = function BrowserToolboxProcess(onClose, onRun, opti
|
|||
|
||||
this._telemetry = new Telemetry();
|
||||
|
||||
this._onConnectionChange = this._onConnectionChange.bind(this);
|
||||
|
||||
this.close = this.close.bind(this);
|
||||
Services.obs.addObserver(this.close, "quit-application");
|
||||
this._initServer();
|
||||
|
@ -133,7 +135,7 @@ BrowserToolboxProcess.prototype = {
|
|||
dumpn("Created a separate loader instance for the DebuggerServer.");
|
||||
|
||||
// Forward interesting events.
|
||||
this.debuggerServer.on("connectionchange", this.emit);
|
||||
this.debuggerServer.on("connectionchange", this._onConnectionChange);
|
||||
|
||||
this.debuggerServer.init();
|
||||
// We mainly need a root actor and tab actors for opening a toolbox, even
|
||||
|
@ -283,6 +285,19 @@ BrowserToolboxProcess.prototype = {
|
|||
});
|
||||
},
|
||||
|
||||
/**
|
||||
* Called upon receiving the connectionchange event from a debuggerServer.
|
||||
*
|
||||
* @param {String} what
|
||||
* Type of connection change (can be either 'opened' or 'closed').
|
||||
* @param {DebuggerServerConnection} connection
|
||||
* The connection that was opened or closed.
|
||||
*/
|
||||
_onConnectionChange: function (evt, what, connection) {
|
||||
let wrappedJSObject = { what, connection };
|
||||
Services.obs.notifyObservers({ wrappedJSObject }, "toolbox-connection-change");
|
||||
},
|
||||
|
||||
/**
|
||||
* Closes the remote debugging server and kills the toolbox process.
|
||||
*/
|
||||
|
@ -299,7 +314,7 @@ BrowserToolboxProcess.prototype = {
|
|||
|
||||
this._telemetry.toolClosed("jsbrowserdebugger");
|
||||
if (this.debuggerServer) {
|
||||
this.debuggerServer.off("connectionchange", this.emit);
|
||||
this.debuggerServer.off("connectionchange", this._onConnectionChange);
|
||||
this.debuggerServer.destroy();
|
||||
this.debuggerServer = null;
|
||||
}
|
||||
|
@ -337,4 +352,9 @@ Services.prefs.addObserver("devtools.debugger.log", {
|
|||
}
|
||||
});
|
||||
|
||||
Services.obs.notifyObservers(null, "ToolboxProcessLoaded");
|
||||
Services.prefs.addObserver("toolbox-update-addon-options", {
|
||||
observe: (subject) => {
|
||||
let {id, options} = subject.wrappedJSObject;
|
||||
BrowserToolboxProcess.setAddonOptions(id, options);
|
||||
}
|
||||
});
|
||||
|
|
|
@ -299,7 +299,7 @@ StorageUI.prototype = {
|
|||
* @param {Array} editableFields
|
||||
* An array of keys of columns to be made editable
|
||||
*/
|
||||
makeFieldsEditable: function* (editableFields) {
|
||||
makeFieldsEditable: function (editableFields) {
|
||||
if (editableFields && editableFields.length > 0) {
|
||||
this.table.makeFieldsEditable(editableFields);
|
||||
} else if (this.table._editableFieldsEngine) {
|
||||
|
@ -318,7 +318,7 @@ StorageUI.prototype = {
|
|||
* the table and repopulates the sidebar with that item's data if the item
|
||||
* being removed was selected.
|
||||
*/
|
||||
removeItemFromTable: function (name) {
|
||||
removeItemFromTable: Task.async(function* (name) {
|
||||
if (this.table.isSelected(name) && this.table.items.size > 1) {
|
||||
if (this.table.selectedIndex == 0) {
|
||||
this.table.selectNextRow();
|
||||
|
@ -328,8 +328,8 @@ StorageUI.prototype = {
|
|||
}
|
||||
|
||||
this.table.remove(name);
|
||||
this.updateObjectSidebar();
|
||||
},
|
||||
yield this.updateObjectSidebar();
|
||||
}),
|
||||
|
||||
/**
|
||||
* Event handler for "stores-cleared" event coming from the storage actor.
|
||||
|
@ -401,30 +401,36 @@ StorageUI.prototype = {
|
|||
* of the changed store objects. This array is empty for deleted object
|
||||
* if the host was completely removed.
|
||||
*/
|
||||
onUpdate: function ({ changed, added, deleted }) {
|
||||
if (deleted) {
|
||||
this.handleDeletedItems(deleted);
|
||||
}
|
||||
|
||||
onUpdate: Task.async(function* ({ changed, added, deleted }) {
|
||||
if (added) {
|
||||
this.handleAddedItems(added);
|
||||
yield this.handleAddedItems(added);
|
||||
}
|
||||
|
||||
if (changed) {
|
||||
this.handleChangedItems(changed);
|
||||
yield this.handleChangedItems(changed);
|
||||
}
|
||||
|
||||
// We are dealing with batches of changes here. Deleted **MUST** come last in case it
|
||||
// is in the same batch as added and changed events e.g.
|
||||
// - An item is changed then deleted in the same batch: deleted then changed will
|
||||
// display an item that has been deleted.
|
||||
// - An item is added then deleted in the same batch: deleted then added will
|
||||
// display an item that has been deleted.
|
||||
if (deleted) {
|
||||
this.handleDeletedItems(deleted);
|
||||
}
|
||||
|
||||
if (added || deleted || changed) {
|
||||
this.emit("store-objects-updated");
|
||||
}
|
||||
},
|
||||
}),
|
||||
|
||||
/**
|
||||
* Handle added items received by onUpdate
|
||||
*
|
||||
* @param {object} See onUpdate docs
|
||||
*/
|
||||
handleAddedItems: function (added) {
|
||||
handleAddedItems: Task.async(function* (added) {
|
||||
for (let type in added) {
|
||||
for (let host in added[type]) {
|
||||
this.tree.add([type, {id: host, type: "url"}]);
|
||||
|
@ -437,7 +443,7 @@ StorageUI.prototype = {
|
|||
this.tree.add([type, host, ...name]);
|
||||
if (!this.tree.selectedItem) {
|
||||
this.tree.selectedItem = [type, host, name[0], name[1]];
|
||||
this.fetchStorageObjects(type, host, [JSON.stringify(name)],
|
||||
yield this.fetchStorageObjects(type, host, [JSON.stringify(name)],
|
||||
REASON.NEW_ROW);
|
||||
}
|
||||
} catch (ex) {
|
||||
|
@ -446,19 +452,19 @@ StorageUI.prototype = {
|
|||
}
|
||||
|
||||
if (this.tree.isSelected([type, host])) {
|
||||
this.fetchStorageObjects(type, host, added[type][host],
|
||||
yield this.fetchStorageObjects(type, host, added[type][host],
|
||||
REASON.NEW_ROW);
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
}),
|
||||
|
||||
/**
|
||||
* Handle deleted items received by onUpdate
|
||||
*
|
||||
* @param {object} See onUpdate docs
|
||||
*/
|
||||
handleDeletedItems: function (deleted) {
|
||||
handleDeletedItems: Task.async(function* (deleted) {
|
||||
for (let type in deleted) {
|
||||
for (let host in deleted[type]) {
|
||||
if (!deleted[type][host].length) {
|
||||
|
@ -491,26 +497,26 @@ StorageUI.prototype = {
|
|||
if (names.length > 0) {
|
||||
let tableItemName = names.pop();
|
||||
if (this.tree.isSelected([type, host, ...names])) {
|
||||
this.removeItemFromTable(tableItemName);
|
||||
yield this.removeItemFromTable(tableItemName);
|
||||
}
|
||||
}
|
||||
} catch (ex) {
|
||||
if (this.tree.isSelected([type, host])) {
|
||||
this.removeItemFromTable(name);
|
||||
yield this.removeItemFromTable(name);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
}),
|
||||
|
||||
/**
|
||||
* Handle changed items received by onUpdate
|
||||
*
|
||||
* @param {object} See onUpdate docs
|
||||
*/
|
||||
handleChangedItems: function (changed) {
|
||||
handleChangedItems: Task.async(function* (changed) {
|
||||
let [type, host, db, objectStore] = this.tree.selectedItem;
|
||||
if (!changed[type] || !changed[type][host] ||
|
||||
changed[type][host].length == 0) {
|
||||
|
@ -524,11 +530,11 @@ StorageUI.prototype = {
|
|||
toUpdate.push(name);
|
||||
}
|
||||
}
|
||||
this.fetchStorageObjects(type, host, toUpdate, REASON.UPDATE);
|
||||
yield this.fetchStorageObjects(type, host, toUpdate, REASON.UPDATE);
|
||||
} catch (ex) {
|
||||
this.fetchStorageObjects(type, host, changed[type][host], REASON.UPDATE);
|
||||
yield this.fetchStorageObjects(type, host, changed[type][host], REASON.UPDATE);
|
||||
}
|
||||
},
|
||||
}),
|
||||
|
||||
/**
|
||||
* Fetches the storage objects from the storage actor and populates the
|
||||
|
@ -584,9 +590,9 @@ StorageUI.prototype = {
|
|||
|
||||
let {data} = yield storageType.getStoreObjects(host, names, fetchOpts);
|
||||
if (data.length) {
|
||||
this.populateTable(data, reason);
|
||||
yield this.populateTable(data, reason);
|
||||
}
|
||||
yield this.updateToolbar();
|
||||
this.updateToolbar();
|
||||
this.emit("store-objects-updated");
|
||||
} catch (ex) {
|
||||
console.error(ex);
|
||||
|
@ -596,7 +602,7 @@ StorageUI.prototype = {
|
|||
/**
|
||||
* Updates the toolbar hiding and showing buttons as appropriate.
|
||||
*/
|
||||
updateToolbar: Task.async(function* () {
|
||||
updateToolbar: function () {
|
||||
let item = this.tree.selectedItem;
|
||||
let howManyNodesIn = item ? item.length : 0;
|
||||
|
||||
|
@ -612,7 +618,7 @@ StorageUI.prototype = {
|
|||
this._addButton.hidden = true;
|
||||
this._addButton.removeAttribute("tooltiptext");
|
||||
}
|
||||
}),
|
||||
},
|
||||
|
||||
/**
|
||||
* Populates the storage tree which displays the list of storages present for
|
||||
|
@ -857,7 +863,7 @@ StorageUI.prototype = {
|
|||
* An array of ids which represent the location of the selected item in
|
||||
* the storage tree
|
||||
*/
|
||||
onHostSelect: function (event, item) {
|
||||
onHostSelect: Task.async(function* (event, item) {
|
||||
this.table.clear();
|
||||
this.hideSidebar();
|
||||
this.searchBox.value = "";
|
||||
|
@ -875,9 +881,9 @@ StorageUI.prototype = {
|
|||
if (item.length > 2) {
|
||||
names = [JSON.stringify(item.slice(2))];
|
||||
}
|
||||
this.fetchStorageObjects(type, host, names, REASON.POPULATE);
|
||||
yield this.fetchStorageObjects(type, host, names, REASON.POPULATE);
|
||||
this.itemOffset = 0;
|
||||
},
|
||||
}),
|
||||
|
||||
/**
|
||||
* Resets the column headers in the storage table with the pased object `data`
|
||||
|
@ -891,7 +897,7 @@ StorageUI.prototype = {
|
|||
* @param {string} [subType]
|
||||
* The sub type under the given type.
|
||||
*/
|
||||
resetColumns: function* (type, host, subtype) {
|
||||
resetColumns: Task.async(function* (type, host, subtype) {
|
||||
this.table.host = host;
|
||||
this.table.datatype = type;
|
||||
|
||||
|
@ -940,8 +946,8 @@ StorageUI.prototype = {
|
|||
this.table.setColumns(columns, null, hiddenFields, privateFields);
|
||||
this.hideSidebar();
|
||||
|
||||
yield this.makeFieldsEditable(editableFields);
|
||||
},
|
||||
this.makeFieldsEditable(editableFields);
|
||||
}),
|
||||
|
||||
/**
|
||||
* Populates or updates the rows in the storage table.
|
||||
|
@ -951,7 +957,7 @@ StorageUI.prototype = {
|
|||
* @param {Constant} reason
|
||||
* See REASON constant at top of file.
|
||||
*/
|
||||
populateTable: function (data, reason) {
|
||||
populateTable: Task.async(function* (data, reason) {
|
||||
for (let item of data) {
|
||||
if (item.value) {
|
||||
item.valueActor = item.value;
|
||||
|
@ -982,14 +988,14 @@ StorageUI.prototype = {
|
|||
case REASON.UPDATE:
|
||||
this.table.update(item);
|
||||
if (item == this.table.selectedRow && !this.sidebar.hidden) {
|
||||
this.updateObjectSidebar();
|
||||
yield this.updateObjectSidebar();
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
this.shouldLoadMoreItems = true;
|
||||
}
|
||||
},
|
||||
}),
|
||||
|
||||
/**
|
||||
* Handles keypress event on the body table to close the sidebar when open
|
||||
|
@ -1019,7 +1025,7 @@ StorageUI.prototype = {
|
|||
/**
|
||||
* Handles endless scrolling for the table
|
||||
*/
|
||||
handleScrollEnd: function () {
|
||||
handleScrollEnd: Task.async(function* () {
|
||||
if (!this.shouldLoadMoreItems) {
|
||||
return;
|
||||
}
|
||||
|
@ -1032,8 +1038,8 @@ StorageUI.prototype = {
|
|||
if (item.length > 2) {
|
||||
names = [JSON.stringify(item.slice(2))];
|
||||
}
|
||||
this.fetchStorageObjects(type, host, names, REASON.NEXT_50_ITEMS);
|
||||
},
|
||||
yield this.fetchStorageObjects(type, host, names, REASON.NEXT_50_ITEMS);
|
||||
}),
|
||||
|
||||
/**
|
||||
* Fires before a cell context menu with the "Add" or "Delete" action is
|
||||
|
|
|
@ -2,14 +2,24 @@
|
|||
|
||||
<!-- TODO: this will need to be updated in the future when we move to GitHub -->
|
||||
|
||||
## Creating a patch
|
||||
|
||||
To create a patch you need to first commit your changes and then export them to a patch file.
|
||||
|
||||
With Mercurial:
|
||||
* `hg commit -m 'your commit message'`
|
||||
* `hg export > /path/to/your/patch`
|
||||
|
||||
With Git, the process is similar, but you first need to add an alias to create Mercurial-style patches. Have a look at [the detailed documentation](https://developer.mozilla.org/en-US/docs/Tools/Contributing#Creating_a_patch_to_check_in).
|
||||
|
||||
## Commit messags
|
||||
|
||||
Commit messages should follow the pattern `Bug 1234567 - change description. r=reviewer`
|
||||
|
||||
First is the bug number related to the patch. Then the description should explain what the patch changes. The last part is used to keep track of the reviewer for the patch.
|
||||
|
||||
## Submitting a patch
|
||||
|
||||
Once you have a patch file, add it as an attachment to the Bugzilla ticket you are working on and add the `feedback?` or `review?` flag depending on if you just want feedback and confirmation you're doing the right thing or if you think the patch is ready to land respectively. Read more about [how to submit a patch and the Bugzilla review cycle here](https://developer.mozilla.org/en-US/docs/Developer_Guide/How_to_Submit_a_Patch).
|
||||
|
||||
You can also take a look at the [Code Review Checklist](./code-reviews.md) as it contains a list of checks that your reviewer is likely to go over when reviewing your code.
|
||||
|
||||
## Posting patches as gists
|
||||
|
||||
* Install gist-cli: `npm install -g gist-cli`
|
||||
* In your bash profile add:
|
||||
* git: `alias gist-patch="git diff | gist -o -t patch"`
|
||||
* hg: `alias gist-patch="hg diff | gist -o -t patch"`
|
||||
* Then go to a clean repo, modify the files, and run the command `gist-patch`
|
||||
|
|
|
@ -304,6 +304,7 @@ bool nsContentUtils::sLowerNetworkPriority = false;
|
|||
#ifndef RELEASE_OR_BETA
|
||||
bool nsContentUtils::sBypassCSSOMOriginCheck = false;
|
||||
#endif
|
||||
bool nsContentUtils::sIsScopedStyleEnabled = false;
|
||||
|
||||
bool nsContentUtils::sIsBytecodeCacheEnabled = false;
|
||||
int32_t nsContentUtils::sBytecodeCacheStrategy = 0;
|
||||
|
@ -706,6 +707,9 @@ nsContentUtils::Init()
|
|||
sBypassCSSOMOriginCheck = getenv("MOZ_BYPASS_CSSOM_ORIGIN_CHECK");
|
||||
#endif
|
||||
|
||||
Preferences::AddBoolVarCache(&sIsScopedStyleEnabled,
|
||||
"layout.css.scoped-style.enabled", false);
|
||||
|
||||
Preferences::AddBoolVarCache(&sLowerNetworkPriority,
|
||||
"privacy.trackingprotection.lower_network_priority", false);
|
||||
|
||||
|
|
|
@ -2281,6 +2281,14 @@ public:
|
|||
#endif
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns true if the <style scoped> enabling pref is true.
|
||||
*/
|
||||
static bool IsScopedStylePrefEnabled()
|
||||
{
|
||||
return sIsScopedStyleEnabled;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return true if this doc is controlled by a ServiceWorker.
|
||||
*/
|
||||
|
@ -3192,6 +3200,7 @@ private:
|
|||
#ifndef RELEASE_OR_BETA
|
||||
static bool sBypassCSSOMOriginCheck;
|
||||
#endif
|
||||
static bool sIsScopedStyleEnabled;
|
||||
static bool sIsBytecodeCacheEnabled;
|
||||
static int32_t sBytecodeCacheStrategy;
|
||||
static uint32_t sCookiesLifetimePolicy;
|
||||
|
|
|
@ -2600,6 +2600,7 @@ nsDOMWindowUtils::FlushApzRepaints(bool* aOutResult)
|
|||
return NS_ERROR_UNEXPECTED;
|
||||
}
|
||||
wrbc->SendFlushApzRepaints();
|
||||
*aOutResult = true;
|
||||
return NS_OK;
|
||||
}
|
||||
ShadowLayerForwarder* forwarder = manager->AsShadowForwarder();
|
||||
|
|
|
@ -1343,6 +1343,7 @@ nsIDocument::nsIDocument()
|
|||
mFrameRequestCallbacksScheduled(false),
|
||||
mIsTopLevelContentDocument(false),
|
||||
mIsContentDocument(false),
|
||||
mIsScopedStyleEnabled(eScopedStyle_Unknown),
|
||||
mCompatMode(eCompatibility_FullStandards),
|
||||
mReadyState(ReadyState::READYSTATE_UNINITIALIZED),
|
||||
mStyleBackendType(StyleBackendType::None),
|
||||
|
@ -13440,3 +13441,15 @@ nsDocument::IsThirdParty()
|
|||
mIsThirdParty.emplace(false);
|
||||
return mIsThirdParty.value();
|
||||
}
|
||||
|
||||
bool
|
||||
nsIDocument::IsScopedStyleEnabled()
|
||||
{
|
||||
if (mIsScopedStyleEnabled == eScopedStyle_Unknown) {
|
||||
mIsScopedStyleEnabled = nsContentUtils::IsChromeDoc(this) ||
|
||||
nsContentUtils::IsScopedStylePrefEnabled()
|
||||
? eScopedStyle_Enabled
|
||||
: eScopedStyle_Disabled;
|
||||
}
|
||||
return mIsScopedStyleEnabled == eScopedStyle_Enabled;
|
||||
}
|
||||
|
|
|
@ -2948,6 +2948,8 @@ public:
|
|||
virtual mozilla::dom::FlashClassification DocumentFlashClassification() = 0;
|
||||
virtual bool IsThirdParty() = 0;
|
||||
|
||||
bool IsScopedStyleEnabled();
|
||||
|
||||
protected:
|
||||
bool GetUseCounter(mozilla::UseCounter aUseCounter)
|
||||
{
|
||||
|
@ -3271,6 +3273,10 @@ protected:
|
|||
|
||||
bool mIsContentDocument : 1;
|
||||
|
||||
// Whether <style scoped> support is enabled in this document.
|
||||
enum { eScopedStyle_Unknown, eScopedStyle_Disabled, eScopedStyle_Enabled };
|
||||
unsigned int mIsScopedStyleEnabled : 2;
|
||||
|
||||
// Compatibility mode
|
||||
nsCompatibility mCompatMode;
|
||||
|
||||
|
|
|
@ -224,7 +224,8 @@ IsScopedStyleElement(nsIContent* aContent)
|
|||
// if it is scoped.
|
||||
return (aContent->IsHTMLElement(nsGkAtoms::style) ||
|
||||
aContent->IsSVGElement(nsGkAtoms::style)) &&
|
||||
aContent->HasAttr(kNameSpaceID_None, nsGkAtoms::scoped);
|
||||
aContent->HasAttr(kNameSpaceID_None, nsGkAtoms::scoped) &&
|
||||
aContent->OwnerDoc()->IsScopedStyleEnabled();
|
||||
}
|
||||
|
||||
static bool
|
||||
|
|
|
@ -2512,9 +2512,14 @@ nsresult HTMLMediaElement::LoadResource()
|
|||
return NS_ERROR_FAILURE;
|
||||
}
|
||||
ChangeDelayLoadStatus(false);
|
||||
RefPtr<MediaResource> resource =
|
||||
MediaSourceDecoder::CreateResource(mMediaSource->GetPrincipal());
|
||||
return FinishDecoderSetup(decoder, resource, nullptr);
|
||||
nsresult rv = decoder->Load(mMediaSource->GetPrincipal());
|
||||
if (NS_FAILED(rv)) {
|
||||
decoder->Shutdown();
|
||||
LOG(LogLevel::Debug,
|
||||
("%p Failed to load for decoder %p", this, decoder.get()));
|
||||
return rv;
|
||||
}
|
||||
return FinishDecoderSetup(decoder);
|
||||
}
|
||||
|
||||
RefPtr<ChannelLoader> loader = new ChannelLoader;
|
||||
|
@ -4650,16 +4655,15 @@ HTMLMediaElement::InitializeDecoderAsClone(ChannelMediaDecoder* aOriginal)
|
|||
|
||||
LOG(LogLevel::Debug, ("%p Cloned decoder %p from %p", this, decoder.get(), aOriginal));
|
||||
|
||||
RefPtr<MediaResource> resource =
|
||||
originalResource->CloneData(decoder->GetResourceCallback());
|
||||
|
||||
if (!resource) {
|
||||
nsresult rv = decoder->Load(originalResource);
|
||||
if (NS_FAILED(rv)) {
|
||||
decoder->Shutdown();
|
||||
LOG(LogLevel::Debug, ("%p Failed to cloned stream for decoder %p", this, decoder.get()));
|
||||
return NS_ERROR_FAILURE;
|
||||
LOG(LogLevel::Debug,
|
||||
("%p Failed to load for decoder %p", this, decoder.get()));
|
||||
return rv;
|
||||
}
|
||||
|
||||
return FinishDecoderSetup(decoder, resource, nullptr);
|
||||
return FinishDecoderSetup(decoder);
|
||||
}
|
||||
|
||||
nsresult HTMLMediaElement::InitializeDecoderForChannel(nsIChannel* aChannel,
|
||||
|
@ -4700,32 +4704,32 @@ nsresult HTMLMediaElement::InitializeDecoderForChannel(nsIChannel* aChannel,
|
|||
|
||||
LOG(LogLevel::Debug, ("%p Created decoder %p for type %s", this, decoder.get(), mimeType.get()));
|
||||
|
||||
bool isPrivateBrowsing = NodePrincipal()->GetPrivateBrowsingId() > 0;
|
||||
RefPtr<MediaResource> resource = MediaResource::Create(
|
||||
decoder->GetResourceCallback(), aChannel, isPrivateBrowsing);
|
||||
|
||||
if (!resource) {
|
||||
decoder->Shutdown();
|
||||
return NS_ERROR_OUT_OF_MEMORY;
|
||||
}
|
||||
|
||||
if (mChannelLoader) {
|
||||
mChannelLoader->Done();
|
||||
mChannelLoader = nullptr;
|
||||
}
|
||||
|
||||
nsresult rv = FinishDecoderSetup(decoder, resource, aListener);
|
||||
bool isPrivateBrowsing = NodePrincipal()->GetPrivateBrowsingId() > 0;
|
||||
nsresult rv = decoder->Load(aChannel, isPrivateBrowsing, aListener);
|
||||
if (NS_FAILED(rv)) {
|
||||
decoder->Shutdown();
|
||||
LOG(LogLevel::Debug,
|
||||
("%p Failed to load for decoder %p", this, decoder.get()));
|
||||
return rv;
|
||||
}
|
||||
|
||||
rv = FinishDecoderSetup(decoder);
|
||||
if (NS_SUCCEEDED(rv)) {
|
||||
AddMediaElementToURITable();
|
||||
NS_ASSERTION(MediaElementTableCount(this, mLoadingSrc) == 1,
|
||||
"Media element should have single table entry if decode initialized");
|
||||
}
|
||||
|
||||
return rv;
|
||||
}
|
||||
|
||||
nsresult HTMLMediaElement::FinishDecoderSetup(MediaDecoder* aDecoder,
|
||||
MediaResource* aStream,
|
||||
nsIStreamListener** aListener)
|
||||
nsresult
|
||||
HTMLMediaElement::FinishDecoderSetup(MediaDecoder* aDecoder)
|
||||
{
|
||||
ChangeNetworkState(nsIDOMHTMLMediaElement::NETWORK_LOADING);
|
||||
|
||||
|
@ -4736,9 +4740,6 @@ nsresult HTMLMediaElement::FinishDecoderSetup(MediaDecoder* aDecoder,
|
|||
// here and Load(), they work.
|
||||
SetDecoder(aDecoder);
|
||||
|
||||
// Tell the decoder about its MediaResource now so things like principals are
|
||||
// available immediately.
|
||||
mDecoder->SetResource(aStream);
|
||||
// Notify the decoder of the initial activity status.
|
||||
NotifyDecoderActivityChanges();
|
||||
|
||||
|
@ -4746,13 +4747,6 @@ nsresult HTMLMediaElement::FinishDecoderSetup(MediaDecoder* aDecoder,
|
|||
// can affect how we feed data to MediaStreams
|
||||
NotifyDecoderPrincipalChanged();
|
||||
|
||||
nsresult rv = aDecoder->Load(aListener);
|
||||
if (NS_FAILED(rv)) {
|
||||
ShutdownDecoder();
|
||||
LOG(LogLevel::Debug, ("%p Failed to load for decoder %p", this, aDecoder));
|
||||
return rv;
|
||||
}
|
||||
|
||||
for (OutputMediaStream& ms : mOutputStreams) {
|
||||
if (ms.mCapturingMediaStream) {
|
||||
MOZ_ASSERT(!ms.mCapturingDecoder);
|
||||
|
@ -4790,6 +4784,7 @@ nsresult HTMLMediaElement::FinishDecoderSetup(MediaDecoder* aDecoder,
|
|||
// This will also do an AddRemoveSelfReference.
|
||||
NotifyOwnerDocumentActivityChanged();
|
||||
|
||||
nsresult rv = NS_OK;
|
||||
if (!mPaused) {
|
||||
SetPlayedOrSeeked(true);
|
||||
if (!mPausedForInactiveDocumentOrChannel) {
|
||||
|
|
|
@ -958,9 +958,7 @@ protected:
|
|||
* Finish setting up the decoder after Load() has been called on it.
|
||||
* Called by InitializeDecoderForChannel/InitializeDecoderAsClone.
|
||||
*/
|
||||
nsresult FinishDecoderSetup(MediaDecoder* aDecoder,
|
||||
MediaResource* aStream,
|
||||
nsIStreamListener **aListener);
|
||||
nsresult FinishDecoderSetup(MediaDecoder* aDecoder);
|
||||
|
||||
/**
|
||||
* Call this after setting up mLoadingSrc and mDecoder.
|
||||
|
|
|
@ -179,7 +179,8 @@ HTMLStyleElement::AfterSetAttr(int32_t aNameSpaceID, nsIAtom* aName,
|
|||
aName == nsGkAtoms::media ||
|
||||
aName == nsGkAtoms::type) {
|
||||
UpdateStyleSheetInternal(nullptr, nullptr, true);
|
||||
} else if (aName == nsGkAtoms::scoped) {
|
||||
} else if (aName == nsGkAtoms::scoped &&
|
||||
OwnerDoc()->IsScopedStyleEnabled()) {
|
||||
bool isScoped = aValue;
|
||||
UpdateStyleSheetScopedness(isScoped);
|
||||
}
|
||||
|
@ -242,7 +243,8 @@ HTMLStyleElement::GetStyleSheetInfo(nsAString& aTitle,
|
|||
|
||||
GetAttr(kNameSpaceID_None, nsGkAtoms::type, aType);
|
||||
|
||||
*aIsScoped = HasAttr(kNameSpaceID_None, nsGkAtoms::scoped);
|
||||
*aIsScoped = HasAttr(kNameSpaceID_None, nsGkAtoms::scoped) &&
|
||||
OwnerDoc()->IsScopedStyleEnabled();
|
||||
|
||||
nsAutoString mimeType;
|
||||
nsAutoString notUsed;
|
||||
|
|
|
@ -29,11 +29,13 @@ reflectString({
|
|||
attribute: "type"
|
||||
});
|
||||
|
||||
if (SpecialPowers.getBoolPref("layout.css.scoped-style.enabled")) {
|
||||
// .scoped
|
||||
reflectBoolean({
|
||||
element: e,
|
||||
attribute: "scoped"
|
||||
});
|
||||
}
|
||||
|
||||
</script>
|
||||
</pre>
|
||||
|
|
|
@ -6,6 +6,7 @@
|
|||
|
||||
#include "ChannelMediaDecoder.h"
|
||||
#include "MediaResource.h"
|
||||
#include "MediaShutdownManager.h"
|
||||
|
||||
namespace mozilla {
|
||||
|
||||
|
@ -162,4 +163,67 @@ ChannelMediaDecoder::Shutdown()
|
|||
MediaDecoder::Shutdown();
|
||||
}
|
||||
|
||||
nsresult
|
||||
ChannelMediaDecoder::OpenResource(nsIStreamListener** aStreamListener)
|
||||
{
|
||||
MOZ_ASSERT(NS_IsMainThread());
|
||||
if (aStreamListener) {
|
||||
*aStreamListener = nullptr;
|
||||
}
|
||||
return mResource->Open(aStreamListener);
|
||||
}
|
||||
|
||||
nsresult
|
||||
ChannelMediaDecoder::Load(nsIChannel* aChannel,
|
||||
bool aIsPrivateBrowsing,
|
||||
nsIStreamListener** aStreamListener)
|
||||
{
|
||||
MOZ_ASSERT(NS_IsMainThread());
|
||||
MOZ_ASSERT(!mResource);
|
||||
|
||||
mResource =
|
||||
MediaResource::Create(mResourceCallback, aChannel, aIsPrivateBrowsing);
|
||||
if (!mResource) {
|
||||
return NS_ERROR_FAILURE;
|
||||
}
|
||||
|
||||
nsresult rv = MediaShutdownManager::Instance().Register(this);
|
||||
if (NS_WARN_IF(NS_FAILED(rv))) {
|
||||
return rv;
|
||||
}
|
||||
|
||||
rv = OpenResource(aStreamListener);
|
||||
NS_ENSURE_SUCCESS(rv, rv);
|
||||
|
||||
SetStateMachine(CreateStateMachine());
|
||||
NS_ENSURE_TRUE(GetStateMachine(), NS_ERROR_FAILURE);
|
||||
|
||||
return InitializeStateMachine();
|
||||
}
|
||||
|
||||
nsresult
|
||||
ChannelMediaDecoder::Load(MediaResource* aOriginal)
|
||||
{
|
||||
MOZ_ASSERT(NS_IsMainThread());
|
||||
MOZ_ASSERT(!mResource);
|
||||
|
||||
mResource = aOriginal->CloneData(mResourceCallback);
|
||||
if (!mResource) {
|
||||
return NS_ERROR_FAILURE;
|
||||
}
|
||||
|
||||
nsresult rv = MediaShutdownManager::Instance().Register(this);
|
||||
if (NS_WARN_IF(NS_FAILED(rv))) {
|
||||
return rv;
|
||||
}
|
||||
|
||||
rv = OpenResource(nullptr);
|
||||
NS_ENSURE_SUCCESS(rv, rv);
|
||||
|
||||
SetStateMachine(CreateStateMachine());
|
||||
NS_ENSURE_TRUE(GetStateMachine(), NS_ERROR_FAILURE);
|
||||
|
||||
return InitializeStateMachine();
|
||||
}
|
||||
|
||||
} // namespace mozilla
|
||||
|
|
|
@ -10,6 +10,9 @@
|
|||
#include "MediaDecoder.h"
|
||||
#include "MediaResourceCallback.h"
|
||||
|
||||
class nsIChannel;
|
||||
class nsIStreamListener;
|
||||
|
||||
namespace mozilla {
|
||||
|
||||
class ChannelMediaDecoder : public MediaDecoder
|
||||
|
@ -54,18 +57,19 @@ class ChannelMediaDecoder : public MediaDecoder
|
|||
public:
|
||||
explicit ChannelMediaDecoder(MediaDecoderInit& aInit);
|
||||
|
||||
// Return a callback object used to register with MediaResource to receive
|
||||
// notifications.
|
||||
MediaResourceCallback* GetResourceCallback() const
|
||||
{
|
||||
return mResourceCallback;
|
||||
}
|
||||
|
||||
void Shutdown() override;
|
||||
|
||||
// Create a new decoder of the same type as this one.
|
||||
// Subclasses must implement this.
|
||||
virtual ChannelMediaDecoder* Clone(MediaDecoderInit& aInit) = 0;
|
||||
|
||||
nsresult Load(nsIChannel* aChannel,
|
||||
bool aIsPrivateBrowsing,
|
||||
nsIStreamListener** aStreamListener);
|
||||
nsresult Load(MediaResource* aOriginal);
|
||||
|
||||
private:
|
||||
nsresult OpenResource(nsIStreamListener** aStreamListener);
|
||||
};
|
||||
|
||||
} // namespace mozilla
|
||||
|
|
|
@ -452,36 +452,6 @@ MediaDecoder::FinishShutdown()
|
|||
MediaShutdownManager::Instance().Unregister(this);
|
||||
}
|
||||
|
||||
nsresult
|
||||
MediaDecoder::OpenResource(nsIStreamListener** aStreamListener)
|
||||
{
|
||||
MOZ_ASSERT(NS_IsMainThread());
|
||||
if (aStreamListener) {
|
||||
*aStreamListener = nullptr;
|
||||
}
|
||||
return mResource->Open(aStreamListener);
|
||||
}
|
||||
|
||||
nsresult
|
||||
MediaDecoder::Load(nsIStreamListener** aStreamListener)
|
||||
{
|
||||
MOZ_ASSERT(NS_IsMainThread());
|
||||
MOZ_ASSERT(mResource, "Can't load without a MediaResource");
|
||||
|
||||
nsresult rv = MediaShutdownManager::Instance().Register(this);
|
||||
if (NS_WARN_IF(NS_FAILED(rv))) {
|
||||
return rv;
|
||||
}
|
||||
|
||||
rv = OpenResource(aStreamListener);
|
||||
NS_ENSURE_SUCCESS(rv, rv);
|
||||
|
||||
SetStateMachine(CreateStateMachine());
|
||||
NS_ENSURE_TRUE(GetStateMachine(), NS_ERROR_FAILURE);
|
||||
|
||||
return InitializeStateMachine();
|
||||
}
|
||||
|
||||
nsresult
|
||||
MediaDecoder::InitializeStateMachine()
|
||||
{
|
||||
|
|
|
@ -31,7 +31,6 @@
|
|||
#include "nsISupports.h"
|
||||
#include "nsITimer.h"
|
||||
|
||||
class nsIStreamListener;
|
||||
class nsIPrincipal;
|
||||
|
||||
namespace mozilla {
|
||||
|
@ -122,14 +121,6 @@ public:
|
|||
// to prevent further calls into the decoder.
|
||||
void NotifyXPCOMShutdown();
|
||||
|
||||
// Start downloading the media. Decode the downloaded data up to the
|
||||
// point of the first frame of data.
|
||||
// This is called at most once per decoder, after Init().
|
||||
virtual nsresult Load(nsIStreamListener** aListener);
|
||||
|
||||
// Called in |Load| to open mResource.
|
||||
nsresult OpenResource(nsIStreamListener** aStreamListener);
|
||||
|
||||
// Called if the media file encounters a network error.
|
||||
void NetworkError();
|
||||
|
||||
|
@ -145,11 +136,6 @@ public:
|
|||
{
|
||||
return mResource;
|
||||
}
|
||||
void SetResource(MediaResource* aResource)
|
||||
{
|
||||
MOZ_ASSERT(NS_IsMainThread());
|
||||
mResource = aResource;
|
||||
}
|
||||
|
||||
// Return the principal of the current URI being played or downloaded.
|
||||
virtual already_AddRefed<nsIPrincipal> GetCurrentPrincipal();
|
||||
|
|
|
@ -520,12 +520,16 @@ MP4TrackDemuxer::GetSamples(int32_t aNumSamples)
|
|||
}
|
||||
for (const auto& sample : samples->mSamples) {
|
||||
// Collect telemetry from h264 Annex B SPS.
|
||||
if (mNeedSPSForTelemetry && mp4_demuxer::AnnexB::HasSPS(sample)) {
|
||||
if (mNeedSPSForTelemetry) {
|
||||
RefPtr<MediaByteBuffer> extradata =
|
||||
mp4_demuxer::AnnexB::ExtractExtraData(sample);
|
||||
mp4_demuxer::H264::ExtractExtraData(sample);
|
||||
if (mp4_demuxer::H264::HasSPS(extradata)) {
|
||||
RefPtr<MediaByteBuffer> extradata =
|
||||
mp4_demuxer::H264::ExtractExtraData(sample);
|
||||
mNeedSPSForTelemetry = AccumulateSPSTelemetry(extradata);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (mNextKeyframeTime.isNothing()
|
||||
|| samples->mSamples.LastElement()->mTime
|
||||
|
|
|
@ -939,7 +939,7 @@ ChromiumCDMParent::InitializeVideoDecoder(
|
|||
|
||||
mMaxRefFrames =
|
||||
(aConfig.mCodec() == cdm::VideoDecoderConfig::kCodecH264)
|
||||
? mp4_demuxer::AnnexB::HasSPS(aInfo.mExtraData)
|
||||
? mp4_demuxer::H264::HasSPS(aInfo.mExtraData)
|
||||
? mp4_demuxer::H264::ComputeMaxRefFrames(aInfo.mExtraData)
|
||||
: 16
|
||||
: 0;
|
||||
|
|
|
@ -44,11 +44,13 @@ MediaSourceDecoder::CreateStateMachine()
|
|||
}
|
||||
|
||||
nsresult
|
||||
MediaSourceDecoder::Load(nsIStreamListener**)
|
||||
MediaSourceDecoder::Load(nsIPrincipal* aPrincipal)
|
||||
{
|
||||
MOZ_ASSERT(NS_IsMainThread());
|
||||
MOZ_ASSERT(!GetStateMachine());
|
||||
|
||||
mResource = new MediaSourceResource(aPrincipal);
|
||||
|
||||
nsresult rv = MediaShutdownManager::Instance().Register(this);
|
||||
if (NS_WARN_IF(NS_FAILED(rv))) {
|
||||
return rv;
|
||||
|
@ -168,13 +170,6 @@ MediaSourceDecoder::Shutdown()
|
|||
MediaDecoder::Shutdown();
|
||||
}
|
||||
|
||||
/*static*/
|
||||
already_AddRefed<MediaResource>
|
||||
MediaSourceDecoder::CreateResource(nsIPrincipal* aPrincipal)
|
||||
{
|
||||
return RefPtr<MediaResource>(new MediaSourceResource(aPrincipal)).forget();
|
||||
}
|
||||
|
||||
void
|
||||
MediaSourceDecoder::AttachMediaSource(dom::MediaSource* aMediaSource)
|
||||
{
|
||||
|
|
|
@ -38,14 +38,12 @@ public:
|
|||
explicit MediaSourceDecoder(MediaDecoderInit& aInit);
|
||||
|
||||
MediaDecoderStateMachine* CreateStateMachine() override;
|
||||
nsresult Load(nsIStreamListener**) override;
|
||||
nsresult Load(nsIPrincipal* aPrincipal);
|
||||
media::TimeIntervals GetSeekable() override;
|
||||
media::TimeIntervals GetBuffered() override;
|
||||
|
||||
void Shutdown() override;
|
||||
|
||||
static already_AddRefed<MediaResource> CreateResource(nsIPrincipal* aPrincipal = nullptr);
|
||||
|
||||
void AttachMediaSource(dom::MediaSource* aMediaSource);
|
||||
void DetachMediaSource();
|
||||
|
||||
|
|
|
@ -19,7 +19,7 @@ DummyMediaDataDecoder::DummyMediaDataDecoder(UniquePtr<DummyDataCreator>&& aCrea
|
|||
, mIsH264(MP4Decoder::IsH264(aParams.mConfig.mMimeType))
|
||||
, mMaxRefFrames(
|
||||
mIsH264
|
||||
? mp4_demuxer::AnnexB::HasSPS(aParams.VideoConfig().mExtraData)
|
||||
? mp4_demuxer::H264::HasSPS(aParams.VideoConfig().mExtraData)
|
||||
? mp4_demuxer::H264::ComputeMaxRefFrames(aParams.VideoConfig().mExtraData)
|
||||
: 16
|
||||
: 0)
|
||||
|
|
|
@ -35,7 +35,7 @@ H264Converter::H264Converter(PlatformDecoderModule* aPDM,
|
|||
{
|
||||
CreateDecoder(mOriginalConfig, aParams.mDiagnostics);
|
||||
if (mDecoder) {
|
||||
MOZ_ASSERT(mp4_demuxer::AnnexB::HasSPS(mOriginalConfig.mExtraData));
|
||||
MOZ_ASSERT(mp4_demuxer::H264::HasSPS(mOriginalConfig.mExtraData));
|
||||
// The video metadata contains out of band SPS/PPS (AVC1) store it.
|
||||
mOriginalExtraData = mOriginalConfig.mExtraData;
|
||||
}
|
||||
|
@ -240,7 +240,7 @@ nsresult
|
|||
H264Converter::CreateDecoder(const VideoInfo& aConfig,
|
||||
DecoderDoctorDiagnostics* aDiagnostics)
|
||||
{
|
||||
if (!mp4_demuxer::AnnexB::HasSPS(aConfig.mExtraData)) {
|
||||
if (!mp4_demuxer::H264::HasSPS(aConfig.mExtraData)) {
|
||||
// nothing found yet, will try again later
|
||||
return NS_ERROR_NOT_INITIALIZED;
|
||||
}
|
||||
|
@ -290,10 +290,10 @@ nsresult
|
|||
H264Converter::CreateDecoderAndInit(MediaRawData* aSample)
|
||||
{
|
||||
RefPtr<MediaByteBuffer> extra_data =
|
||||
mp4_demuxer::AnnexB::ExtractExtraData(aSample);
|
||||
bool inbandExtradata = mp4_demuxer::AnnexB::HasSPS(extra_data);
|
||||
mp4_demuxer::H264::ExtractExtraData(aSample);
|
||||
bool inbandExtradata = mp4_demuxer::H264::HasSPS(extra_data);
|
||||
if (!inbandExtradata &&
|
||||
!mp4_demuxer::AnnexB::HasSPS(mCurrentConfig.mExtraData)) {
|
||||
!mp4_demuxer::H264::HasSPS(mCurrentConfig.mExtraData)) {
|
||||
return NS_ERROR_NOT_INITIALIZED;
|
||||
}
|
||||
|
||||
|
@ -393,8 +393,8 @@ nsresult
|
|||
H264Converter::CheckForSPSChange(MediaRawData* aSample)
|
||||
{
|
||||
RefPtr<MediaByteBuffer> extra_data =
|
||||
mp4_demuxer::AnnexB::ExtractExtraData(aSample);
|
||||
if (!mp4_demuxer::AnnexB::HasSPS(extra_data)) {
|
||||
mp4_demuxer::H264::ExtractExtraData(aSample);
|
||||
if (!mp4_demuxer::H264::HasSPS(extra_data)) {
|
||||
MOZ_ASSERT(mCanRecycleDecoder.isSome());
|
||||
if (!*mCanRecycleDecoder) {
|
||||
// If the decoder can't be recycled, the out of band extradata will never
|
||||
|
@ -406,14 +406,14 @@ H264Converter::CheckForSPSChange(MediaRawData* aSample)
|
|||
// We now check if the out of band one has changed.
|
||||
// This scenario can only occur on Android with devices that can recycle a
|
||||
// decoder.
|
||||
if (!mp4_demuxer::AnnexB::HasSPS(aSample->mExtraData) ||
|
||||
mp4_demuxer::AnnexB::CompareExtraData(aSample->mExtraData,
|
||||
if (!mp4_demuxer::H264::HasSPS(aSample->mExtraData) ||
|
||||
mp4_demuxer::H264::CompareExtraData(aSample->mExtraData,
|
||||
mOriginalExtraData)) {
|
||||
return NS_OK;
|
||||
}
|
||||
extra_data = mOriginalExtraData = aSample->mExtraData;
|
||||
}
|
||||
if (mp4_demuxer::AnnexB::CompareExtraData(extra_data,
|
||||
if (mp4_demuxer::H264::CompareExtraData(extra_data,
|
||||
mCurrentConfig.mExtraData)) {
|
||||
return NS_OK;
|
||||
}
|
||||
|
|
|
@ -104,7 +104,8 @@ SVGStyleElement::SetAttr(int32_t aNameSpaceID, nsIAtom* aName,
|
|||
aName == nsGkAtoms::media ||
|
||||
aName == nsGkAtoms::type) {
|
||||
UpdateStyleSheetInternal(nullptr, nullptr, true);
|
||||
} else if (aName == nsGkAtoms::scoped) {
|
||||
} else if (aName == nsGkAtoms::scoped &&
|
||||
OwnerDoc()->IsScopedStyleEnabled()) {
|
||||
UpdateStyleSheetScopedness(true);
|
||||
}
|
||||
}
|
||||
|
@ -123,7 +124,8 @@ SVGStyleElement::UnsetAttr(int32_t aNameSpaceID, nsIAtom* aAttribute,
|
|||
aAttribute == nsGkAtoms::media ||
|
||||
aAttribute == nsGkAtoms::type) {
|
||||
UpdateStyleSheetInternal(nullptr, nullptr, true);
|
||||
} else if (aAttribute == nsGkAtoms::scoped) {
|
||||
} else if (aAttribute == nsGkAtoms::scoped &&
|
||||
OwnerDoc()->IsScopedStyleEnabled()) {
|
||||
UpdateStyleSheetScopedness(false);
|
||||
}
|
||||
}
|
||||
|
@ -290,7 +292,8 @@ SVGStyleElement::GetStyleSheetInfo(nsAString& aTitle,
|
|||
aType.AssignLiteral("text/css");
|
||||
}
|
||||
|
||||
*aIsScoped = HasAttr(kNameSpaceID_None, nsGkAtoms::scoped);
|
||||
*aIsScoped = HasAttr(kNameSpaceID_None, nsGkAtoms::scoped) &&
|
||||
OwnerDoc()->IsScopedStyleEnabled();
|
||||
|
||||
return;
|
||||
}
|
||||
|
|
|
@ -16,7 +16,7 @@ interface HTMLStyleElement : HTMLElement {
|
|||
attribute DOMString media;
|
||||
[SetterThrows, Pure]
|
||||
attribute DOMString type;
|
||||
[SetterThrows, Pure]
|
||||
[SetterThrows, Pure, Pref="layout.css.scoped-style.enabled"]
|
||||
attribute boolean scoped;
|
||||
};
|
||||
HTMLStyleElement implements LinkStyle;
|
||||
|
|
|
@ -19,7 +19,7 @@ interface SVGStyleElement : SVGElement {
|
|||
attribute DOMString media;
|
||||
[SetterThrows]
|
||||
attribute DOMString title;
|
||||
[SetterThrows]
|
||||
[SetterThrows, Pref="layout.css.scoped-style.enabled"]
|
||||
attribute boolean scoped;
|
||||
};
|
||||
SVGStyleElement implements LinkStyle;
|
||||
|
|
|
@ -96,6 +96,11 @@ public:
|
|||
mIdNamespace = aIdNamespace;
|
||||
}
|
||||
|
||||
WrImageKey GetNextImageKey()
|
||||
{
|
||||
return WrImageKey{ GetNamespace(), GetNextResourceId() };
|
||||
}
|
||||
|
||||
void PushGlyphs(wr::DisplayListBuilder& aBuilder, const nsTArray<GlyphArray>& aGlyphs,
|
||||
gfx::ScaledFont* aFont, const StackingContextHelper& aSc,
|
||||
const LayerRect& aBounds, const LayerRect& aClip);
|
||||
|
|
|
@ -74,6 +74,8 @@ WebRenderLayerManager::Destroy()
|
|||
return;
|
||||
}
|
||||
|
||||
mWidget->CleanupWebRenderWindowOverlay(WrBridge());
|
||||
|
||||
LayerManager::Destroy();
|
||||
DiscardImages();
|
||||
DiscardCompositorAnimations();
|
||||
|
@ -203,6 +205,7 @@ WebRenderLayerManager::EndTransactionInternal(DrawPaintedLayerCallback aCallback
|
|||
WrSize contentSize { (float)size.width, (float)size.height };
|
||||
wr::DisplayListBuilder builder(WrBridge()->GetPipeline(), contentSize);
|
||||
WebRenderLayer::ToWebRenderLayer(mRoot)->RenderLayer(builder, sc);
|
||||
mWidget->AddWindowOverlayWebRenderCommands(WrBridge(), builder);
|
||||
WrBridge()->ClearReadLocks();
|
||||
|
||||
// We can't finish this transaction so return. This usually
|
||||
|
|
|
@ -36,7 +36,7 @@ UNIFIED_SOURCES += [
|
|||
]
|
||||
|
||||
# Build OpenVR on Windows, Linux, and macOS desktop targets
|
||||
if CONFIG['MOZ_WIDGET_TOOLKIT'] in ('cocoa', 'uikit', 'windows', 'gtk2', 'gtk3'):
|
||||
if CONFIG['OS_TARGET'] in ('WINNT', 'Linux', 'Darwin'):
|
||||
DIRS += [
|
||||
'openvr',
|
||||
]
|
||||
|
@ -44,7 +44,7 @@ if CONFIG['MOZ_WIDGET_TOOLKIT'] in ('cocoa', 'uikit', 'windows', 'gtk2', 'gtk3')
|
|||
'gfxVROpenVR.cpp',
|
||||
]
|
||||
|
||||
if CONFIG['MOZ_WIDGET_TOOLKIT'] == 'windows':
|
||||
if CONFIG['OS_TARGET'] == 'WINNT':
|
||||
SOURCES += [
|
||||
'gfxVROculus.cpp',
|
||||
]
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
This directory contains files from the OpenVR SDK, version 1.0.6.
|
||||
This directory contains files from the OpenVR SDK, version 1.0.8.
|
||||
|
||||
This SDK contains the OpenVR API interface headers and functions to load the
|
||||
OpenVR runtime libraries which actually implement the functionality. The
|
||||
|
|
|
@ -27,17 +27,7 @@ struct ID3D12CommandQueue;
|
|||
|
||||
namespace vr
|
||||
{
|
||||
|
||||
#if defined(__linux__) || defined(__APPLE__)
|
||||
// The 32-bit version of gcc has the alignment requirement for uint64 and double set to
|
||||
// 4 meaning that even with #pragma pack(8) these types will only be four-byte aligned.
|
||||
// The 64-bit version of gcc has the alignment requirement for these types set to
|
||||
// 8 meaning that unless we use #pragma pack(4) our structures will get bigger.
|
||||
// The 64-bit structure packing has to match the 32-bit structure packing for each platform.
|
||||
#pragma pack( push, 4 )
|
||||
#else
|
||||
#pragma pack( push, 8 )
|
||||
#endif
|
||||
|
||||
typedef void* glSharedTextureHandle_t;
|
||||
typedef int32_t glInt_t;
|
||||
|
@ -120,7 +110,7 @@ enum ETextureType
|
|||
TextureType_DirectX = 0, // Handle is an ID3D11Texture
|
||||
TextureType_OpenGL = 1, // Handle is an OpenGL texture name or an OpenGL render buffer name, depending on submit flags
|
||||
TextureType_Vulkan = 2, // Handle is a pointer to a VRVulkanTextureData_t structure
|
||||
TextureType_IOSurface = 3, // Handle is a macOS cross-process-sharable IOSurface
|
||||
TextureType_IOSurface = 3, // Handle is a macOS cross-process-sharable IOSurfaceRef
|
||||
TextureType_DirectX12 = 4, // Handle is a pointer to a D3D12TextureData_t structure
|
||||
};
|
||||
|
||||
|
@ -153,6 +143,9 @@ enum ETrackingResult
|
|||
TrackingResult_Running_OutOfRange = 201,
|
||||
};
|
||||
|
||||
typedef uint32_t DriverId_t;
|
||||
static const uint32_t k_nDriverNone = 0xFFFFFFFF;
|
||||
|
||||
static const uint32_t k_unMaxDriverDebugResponseSize = 32768;
|
||||
|
||||
/** Used to pass device IDs to API calls */
|
||||
|
@ -170,6 +163,7 @@ enum ETrackedDeviceClass
|
|||
TrackedDeviceClass_Controller = 2, // Tracked controllers
|
||||
TrackedDeviceClass_GenericTracker = 3, // Generic trackers, similar to controllers
|
||||
TrackedDeviceClass_TrackingReference = 4, // Camera and base stations that serve as tracking reference points
|
||||
TrackedDeviceClass_DisplayRedirect = 5, // Accessories that aren't necessarily tracked themselves, but may redirect video output from other tracked devices
|
||||
};
|
||||
|
||||
|
||||
|
@ -272,7 +266,7 @@ enum ETrackedDeviceProperty
|
|||
Prop_Firmware_ForceUpdateRequired_Bool = 1032,
|
||||
Prop_ViveSystemButtonFixRequired_Bool = 1033,
|
||||
Prop_ParentDriver_Uint64 = 1034,
|
||||
|
||||
Prop_ResourceRoot_String = 1035,
|
||||
|
||||
// Properties that are unique to TrackedDeviceClass_HMD
|
||||
Prop_ReportsTimeSinceVSync_Bool = 2000,
|
||||
|
@ -317,7 +311,10 @@ enum ETrackedDeviceProperty
|
|||
Prop_DisplayMCImageHeight_Int32 = 2039,
|
||||
Prop_DisplayMCImageNumChannels_Int32 = 2040,
|
||||
Prop_DisplayMCImageData_Binary = 2041,
|
||||
Prop_UsesDriverDirectMode_Bool = 2042,
|
||||
Prop_SecondsFromPhotonsToVblank_Float = 2042,
|
||||
Prop_DriverDirectModeSendsVsyncEvents_Bool = 2043,
|
||||
Prop_DisplayDebugMode_Bool = 2044,
|
||||
Prop_GraphicsAdapterLuid_Uint64 = 2045,
|
||||
|
||||
// Properties that are unique to TrackedDeviceClass_Controller
|
||||
Prop_AttachedDeviceId_String = 3000,
|
||||
|
@ -339,15 +336,15 @@ enum ETrackedDeviceProperty
|
|||
Prop_ModeLabel_String = 4006,
|
||||
|
||||
// Properties that are used for user interface like icons names
|
||||
Prop_IconPathName_String = 5000, // usually a directory named "icons"
|
||||
Prop_NamedIconPathDeviceOff_String = 5001, // PNG for static icon, or GIF for animation, 50x32 for headsets and 32x32 for others
|
||||
Prop_NamedIconPathDeviceSearching_String = 5002, // PNG for static icon, or GIF for animation, 50x32 for headsets and 32x32 for others
|
||||
Prop_NamedIconPathDeviceSearchingAlert_String = 5003, // PNG for static icon, or GIF for animation, 50x32 for headsets and 32x32 for others
|
||||
Prop_NamedIconPathDeviceReady_String = 5004, // PNG for static icon, or GIF for animation, 50x32 for headsets and 32x32 for others
|
||||
Prop_NamedIconPathDeviceReadyAlert_String = 5005, // PNG for static icon, or GIF for animation, 50x32 for headsets and 32x32 for others
|
||||
Prop_NamedIconPathDeviceNotReady_String = 5006, // PNG for static icon, or GIF for animation, 50x32 for headsets and 32x32 for others
|
||||
Prop_NamedIconPathDeviceStandby_String = 5007, // PNG for static icon, or GIF for animation, 50x32 for headsets and 32x32 for others
|
||||
Prop_NamedIconPathDeviceAlertLow_String = 5008, // PNG for static icon, or GIF for animation, 50x32 for headsets and 32x32 for others
|
||||
Prop_IconPathName_String = 5000, // DEPRECATED. Value not referenced. Now expected to be part of icon path properties.
|
||||
Prop_NamedIconPathDeviceOff_String = 5001, // {driver}/icons/icon_filename - PNG for static icon, or GIF for animation, 50x32 for headsets and 32x32 for others
|
||||
Prop_NamedIconPathDeviceSearching_String = 5002, // {driver}/icons/icon_filename - PNG for static icon, or GIF for animation, 50x32 for headsets and 32x32 for others
|
||||
Prop_NamedIconPathDeviceSearchingAlert_String = 5003, // {driver}/icons/icon_filename - PNG for static icon, or GIF for animation, 50x32 for headsets and 32x32 for others
|
||||
Prop_NamedIconPathDeviceReady_String = 5004, // {driver}/icons/icon_filename - PNG for static icon, or GIF for animation, 50x32 for headsets and 32x32 for others
|
||||
Prop_NamedIconPathDeviceReadyAlert_String = 5005, // {driver}/icons/icon_filename - PNG for static icon, or GIF for animation, 50x32 for headsets and 32x32 for others
|
||||
Prop_NamedIconPathDeviceNotReady_String = 5006, // {driver}/icons/icon_filename - PNG for static icon, or GIF for animation, 50x32 for headsets and 32x32 for others
|
||||
Prop_NamedIconPathDeviceStandby_String = 5007, // {driver}/icons/icon_filename - PNG for static icon, or GIF for animation, 50x32 for headsets and 32x32 for others
|
||||
Prop_NamedIconPathDeviceAlertLow_String = 5008, // {driver}/icons/icon_filename - PNG for static icon, or GIF for animation, 50x32 for headsets and 32x32 for others
|
||||
|
||||
// Properties that are used by helpers, but are opaque to applications
|
||||
Prop_DisplayHiddenArea_Binary_Start = 5100,
|
||||
|
@ -356,6 +353,11 @@ enum ETrackedDeviceProperty
|
|||
// Properties that are unique to drivers
|
||||
Prop_UserConfigPath_String = 6000,
|
||||
Prop_InstallPath_String = 6001,
|
||||
Prop_HasDisplayComponent_Bool = 6002,
|
||||
Prop_HasControllerComponent_Bool = 6003,
|
||||
Prop_HasCameraComponent_Bool = 6004,
|
||||
Prop_HasDriverDirectModeComponent_Bool = 6005,
|
||||
Prop_HasVirtualDisplayComponent_Bool = 6006,
|
||||
|
||||
// Vendors are free to expose private debug data in this reserved region
|
||||
Prop_VendorSpecific_Reserved_Start = 10000,
|
||||
|
@ -543,6 +545,7 @@ enum EVREventType
|
|||
VREvent_ModelSkinSettingsHaveChanged = 853,
|
||||
VREvent_EnvironmentSettingsHaveChanged = 854,
|
||||
VREvent_PowerSettingsHaveChanged = 855,
|
||||
VREvent_EnableHomeAppSettingsHaveChanged = 856,
|
||||
|
||||
VREvent_StatusUpdate = 900,
|
||||
|
||||
|
@ -561,6 +564,8 @@ enum EVREventType
|
|||
VREvent_ApplicationListUpdated = 1303,
|
||||
VREvent_ApplicationMimeTypeLoad = 1304,
|
||||
VREvent_ApplicationTransitionNewAppLaunchComplete = 1305,
|
||||
VREvent_ProcessConnected = 1306,
|
||||
VREvent_ProcessDisconnected = 1307,
|
||||
|
||||
VREvent_Compositor_MirrorWindowShown = 1400,
|
||||
VREvent_Compositor_MirrorWindowHidden = 1401,
|
||||
|
@ -586,13 +591,17 @@ enum EVREventType
|
|||
|
||||
|
||||
/** Level of Hmd activity */
|
||||
// UserInteraction_Timeout means the device is in the process of timing out.
|
||||
// InUse = ( k_EDeviceActivityLevel_UserInteraction || k_EDeviceActivityLevel_UserInteraction_Timeout )
|
||||
// VREvent_TrackedDeviceUserInteractionStarted fires when the devices transitions from Standby -> UserInteraction or Idle -> UserInteraction.
|
||||
// VREvent_TrackedDeviceUserInteractionEnded fires when the devices transitions from UserInteraction_Timeout -> Idle
|
||||
enum EDeviceActivityLevel
|
||||
{
|
||||
k_EDeviceActivityLevel_Unknown = -1,
|
||||
k_EDeviceActivityLevel_Idle = 0,
|
||||
k_EDeviceActivityLevel_UserInteraction = 1,
|
||||
k_EDeviceActivityLevel_UserInteraction_Timeout = 2,
|
||||
k_EDeviceActivityLevel_Standby = 3,
|
||||
k_EDeviceActivityLevel_Idle = 0, // No activity for the last 10 seconds
|
||||
k_EDeviceActivityLevel_UserInteraction = 1, // Activity (movement or prox sensor) is happening now
|
||||
k_EDeviceActivityLevel_UserInteraction_Timeout = 2, // No activity for the last 0.5 seconds
|
||||
k_EDeviceActivityLevel_Standby = 3, // Idle for at least 5 seconds (configurable in Settings -> Power Management)
|
||||
};
|
||||
|
||||
|
||||
|
@ -800,6 +809,13 @@ typedef union
|
|||
VREvent_Property_t property;
|
||||
} VREvent_Data_t;
|
||||
|
||||
|
||||
#if defined(__linux__) || defined(__APPLE__)
|
||||
// This structure was originally defined mis-packed on Linux, preserved for
|
||||
// compatibility.
|
||||
#pragma pack( push, 4 )
|
||||
#endif
|
||||
|
||||
/** An event posted by the server to all running applications */
|
||||
struct VREvent_t
|
||||
{
|
||||
|
@ -810,6 +826,9 @@ struct VREvent_t
|
|||
VREvent_Data_t data;
|
||||
};
|
||||
|
||||
#if defined(__linux__) || defined(__APPLE__)
|
||||
#pragma pack( pop )
|
||||
#endif
|
||||
|
||||
/** The mesh to draw into the stencil (or depth) buffer to perform
|
||||
* early stencil (or depth) kills of pixels that will never appear on the HMD.
|
||||
|
@ -858,6 +877,12 @@ struct VRControllerAxis_t
|
|||
static const uint32_t k_unControllerStateAxisCount = 5;
|
||||
|
||||
|
||||
#if defined(__linux__) || defined(__APPLE__)
|
||||
// This structure was originally defined mis-packed on Linux, preserved for
|
||||
// compatibility.
|
||||
#pragma pack( push, 4 )
|
||||
#endif
|
||||
|
||||
/** Holds all the state of a controller at one moment in time. */
|
||||
struct VRControllerState001_t
|
||||
{
|
||||
|
@ -872,6 +897,9 @@ struct VRControllerState001_t
|
|||
// Axis data for the controller's analog inputs
|
||||
VRControllerAxis_t rAxis[ k_unControllerStateAxisCount ];
|
||||
};
|
||||
#if defined(__linux__) || defined(__APPLE__)
|
||||
#pragma pack( pop )
|
||||
#endif
|
||||
|
||||
|
||||
typedef VRControllerState001_t VRControllerState_t;
|
||||
|
@ -954,6 +982,7 @@ enum EVRApplicationType
|
|||
// interfaces (like IVRSettings and IVRApplications) but not hardware.
|
||||
VRApplication_VRMonitor = 5, // Reserved for vrmonitor
|
||||
VRApplication_SteamWatchdog = 6,// Reserved for Steam
|
||||
VRApplication_Bootstrapper = 7, // Start up SteamVR
|
||||
|
||||
VRApplication_Max
|
||||
};
|
||||
|
@ -1022,6 +1051,8 @@ enum EVRInitError
|
|||
VRInitError_Init_WatchdogDisabledInSettings = 132,
|
||||
VRInitError_Init_VRDashboardNotFound = 133,
|
||||
VRInitError_Init_VRDashboardStartupFailed = 134,
|
||||
VRInitError_Init_VRHomeNotFound = 135,
|
||||
VRInitError_Init_VRHomeStartupFailed = 136,
|
||||
|
||||
VRInitError_Driver_Failed = 200,
|
||||
VRInitError_Driver_Unknown = 201,
|
||||
|
@ -1052,6 +1083,7 @@ enum EVRInitError
|
|||
VRInitError_Compositor_FirmwareRequiresUpdate = 402,
|
||||
VRInitError_Compositor_OverlayInitFailed = 403,
|
||||
VRInitError_Compositor_ScreenshotsInitFailed = 404,
|
||||
VRInitError_Compositor_UnableToCreateDevice = 405,
|
||||
|
||||
VRInitError_VendorSpecific_UnableToConnectToOculusRuntime = 1000,
|
||||
|
||||
|
@ -1250,6 +1282,22 @@ public:
|
|||
*/
|
||||
virtual void GetDXGIOutputInfo( int32_t *pnAdapterIndex ) = 0;
|
||||
|
||||
/**
|
||||
* Returns platform- and texture-type specific adapter identification so that applications and the
|
||||
* compositor are creating textures and swap chains on the same GPU. If an error occurs the device
|
||||
* will be set to 0.
|
||||
* [D3D10/11/12 Only (D3D9 Not Supported)]
|
||||
* Returns the adapter LUID that identifies the GPU attached to the HMD. The user should
|
||||
* enumerate all adapters using IDXGIFactory::EnumAdapters and IDXGIAdapter::GetDesc to find
|
||||
* the adapter with the matching LUID, or use IDXGIFactory4::EnumAdapterByLuid.
|
||||
* The discovered IDXGIAdapter should be used to create the device and swap chain.
|
||||
* [Vulkan Only]
|
||||
* Returns the vk::PhysicalDevice that should be used by the application.
|
||||
* [macOS Only]
|
||||
* Returns an id<MTLDevice> that should be used by the application.
|
||||
*/
|
||||
virtual void GetOutputDevice( uint64_t *pnDevice, ETextureType textureType ) = 0;
|
||||
|
||||
// ------------------------------------
|
||||
// Display Mode methods
|
||||
// ------------------------------------
|
||||
|
@ -1466,7 +1514,7 @@ public:
|
|||
|
||||
};
|
||||
|
||||
static const char * const IVRSystem_Version = "IVRSystem_015";
|
||||
static const char * const IVRSystem_Version = "IVRSystem_016";
|
||||
|
||||
}
|
||||
|
||||
|
@ -1734,7 +1782,6 @@ namespace vr
|
|||
static const char * const k_pch_SteamVR_ForcedHmdKey_String = "forcedHmd";
|
||||
static const char * const k_pch_SteamVR_DisplayDebug_Bool = "displayDebug";
|
||||
static const char * const k_pch_SteamVR_DebugProcessPipe_String = "debugProcessPipe";
|
||||
static const char * const k_pch_SteamVR_EnableDistortion_Bool = "enableDistortion";
|
||||
static const char * const k_pch_SteamVR_DisplayDebugX_Int32 = "displayDebugX";
|
||||
static const char * const k_pch_SteamVR_DisplayDebugY_Int32 = "displayDebugY";
|
||||
static const char * const k_pch_SteamVR_SendSystemButtonToAllApps_Bool= "sendSystemButtonToAllApps";
|
||||
|
@ -1755,7 +1802,7 @@ namespace vr
|
|||
static const char * const k_pch_SteamVR_SpeakersForwardYawOffsetDegrees_Float = "speakersForwardYawOffsetDegrees";
|
||||
static const char * const k_pch_SteamVR_BaseStationPowerManagement_Bool = "basestationPowerManagement";
|
||||
static const char * const k_pch_SteamVR_NeverKillProcesses_Bool = "neverKillProcesses";
|
||||
static const char * const k_pch_SteamVR_RenderTargetMultiplier_Float = "renderTargetMultiplier";
|
||||
static const char * const k_pch_SteamVR_SupersampleScale_Float = "supersampleScale";
|
||||
static const char * const k_pch_SteamVR_AllowAsyncReprojection_Bool = "allowAsyncReprojection";
|
||||
static const char * const k_pch_SteamVR_AllowReprojection_Bool = "allowInterleavedReprojection";
|
||||
static const char * const k_pch_SteamVR_ForceReprojection_Bool = "forceReprojection";
|
||||
|
@ -1768,10 +1815,10 @@ namespace vr
|
|||
static const char * const k_pch_SteamVR_StartDashboardFromAppLaunch_Bool = "startDashboardFromAppLaunch";
|
||||
static const char * const k_pch_SteamVR_StartOverlayAppsFromDashboard_Bool = "startOverlayAppsFromDashboard";
|
||||
static const char * const k_pch_SteamVR_EnableHomeApp = "enableHomeApp";
|
||||
static const char * const k_pch_SteamVR_SetInitialDefaultHomeApp = "setInitialDefaultHomeApp";
|
||||
static const char * const k_pch_SteamVR_CycleBackgroundImageTimeSec_Int32 = "CycleBackgroundImageTimeSec";
|
||||
static const char * const k_pch_SteamVR_RetailDemo_Bool = "retailDemo";
|
||||
static const char * const k_pch_SteamVR_IpdOffset_Float = "ipdOffset";
|
||||
static const char * const k_pch_SteamVR_AllowSupersampleFiltering_Bool = "allowSupersampleFiltering";
|
||||
|
||||
//-----------------------------------------------------------------------------
|
||||
// lighthouse keys
|
||||
|
@ -1829,6 +1876,7 @@ namespace vr
|
|||
static const char * const k_pch_Perf_AllowTimingStore_Bool = "allowTimingStore";
|
||||
static const char * const k_pch_Perf_SaveTimingsOnExit_Bool = "saveTimingsOnExit";
|
||||
static const char * const k_pch_Perf_TestData_Float = "perfTestData";
|
||||
static const char * const k_pch_Perf_LinuxGPUProfiling_Bool = "linuxGPUProfiling";
|
||||
|
||||
//-----------------------------------------------------------------------------
|
||||
// collision bounds keys
|
||||
|
@ -1895,16 +1943,7 @@ namespace vr
|
|||
namespace vr
|
||||
{
|
||||
|
||||
#if defined(__linux__) || defined(__APPLE__)
|
||||
// The 32-bit version of gcc has the alignment requirement for uint64 and double set to
|
||||
// 4 meaning that even with #pragma pack(8) these types will only be four-byte aligned.
|
||||
// The 64-bit version of gcc has the alignment requirement for these types set to
|
||||
// 8 meaning that unless we use #pragma pack(4) our structures will get bigger.
|
||||
// The 64-bit structure packing has to match the 32-bit structure packing for each platform.
|
||||
#pragma pack( push, 4 )
|
||||
#else
|
||||
#pragma pack( push, 8 )
|
||||
#endif
|
||||
|
||||
enum ChaperoneCalibrationState
|
||||
{
|
||||
|
@ -2065,16 +2104,7 @@ static const char * const IVRChaperoneSetup_Version = "IVRChaperoneSetup_005";
|
|||
namespace vr
|
||||
{
|
||||
|
||||
#if defined(__linux__) || defined(__APPLE__)
|
||||
// The 32-bit version of gcc has the alignment requirement for uint64 and double set to
|
||||
// 4 meaning that even with #pragma pack(8) these types will only be four-byte aligned.
|
||||
// The 64-bit version of gcc has the alignment requirement for these types set to
|
||||
// 8 meaning that unless we use #pragma pack(4) our structures will get bigger.
|
||||
// The 64-bit structure packing has to match the 32-bit structure packing for each platform.
|
||||
#pragma pack( push, 4 )
|
||||
#else
|
||||
#pragma pack( push, 8 )
|
||||
#endif
|
||||
|
||||
/** Errors that can occur with the VR compositor */
|
||||
enum EVRCompositorError
|
||||
|
@ -2090,6 +2120,7 @@ enum EVRCompositorError
|
|||
VRCompositorError_SharedTexturesNotSupported = 106,
|
||||
VRCompositorError_IndexOutOfRange = 107,
|
||||
VRCompositorError_AlreadySubmitted = 108,
|
||||
VRCompositorError_InvalidBounds = 109,
|
||||
};
|
||||
|
||||
const uint32_t VRCompositor_ReprojectionReason_Cpu = 0x01;
|
||||
|
@ -2350,16 +2381,7 @@ static const char * const IVRCompositor_Version = "IVRCompositor_020";
|
|||
namespace vr
|
||||
{
|
||||
|
||||
#if defined(__linux__) || defined(__APPLE__)
|
||||
// The 32-bit version of gcc has the alignment requirement for uint64 and double set to
|
||||
// 4 meaning that even with #pragma pack(8) these types will only be four-byte aligned.
|
||||
// The 64-bit version of gcc has the alignment requirement for these types set to
|
||||
// 8 meaning that unless we use #pragma pack(4) our structures will get bigger.
|
||||
// The 64-bit structure packing has to match the 32-bit structure packing for each platform.
|
||||
#pragma pack( push, 4 )
|
||||
#else
|
||||
#pragma pack( push, 8 )
|
||||
#endif
|
||||
|
||||
// Used for passing graphic data
|
||||
struct NotificationBitmap_t
|
||||
|
@ -2619,7 +2641,7 @@ namespace vr
|
|||
virtual EVROverlayError FindOverlay( const char *pchOverlayKey, VROverlayHandle_t * pOverlayHandle ) = 0;
|
||||
|
||||
/** Creates a new named overlay. All overlays start hidden and with default settings. */
|
||||
virtual EVROverlayError CreateOverlay( const char *pchOverlayKey, const char *pchOverlayFriendlyName, VROverlayHandle_t * pOverlayHandle ) = 0;
|
||||
virtual EVROverlayError CreateOverlay( const char *pchOverlayKey, const char *pchOverlayName, VROverlayHandle_t * pOverlayHandle ) = 0;
|
||||
|
||||
/** Destroys the specified overlay. When an application calls VR_Shutdown all overlays created by that app are
|
||||
* automatically destroyed. */
|
||||
|
@ -2644,6 +2666,9 @@ namespace vr
|
|||
* the terminating null character. k_unVROverlayMaxNameLength will be enough bytes to fit the string. */
|
||||
virtual uint32_t GetOverlayName( VROverlayHandle_t ulOverlayHandle, VR_OUT_STRING() char *pchValue, uint32_t unBufferSize, EVROverlayError *pError = 0L ) = 0;
|
||||
|
||||
/** set the name to use for this overlay */
|
||||
virtual EVROverlayError SetOverlayName( VROverlayHandle_t ulOverlayHandle, const char *pchName ) = 0;
|
||||
|
||||
/** Gets the raw image data from an overlay. Overlay image data is always returned as RGBA data, 4 bytes per pixel. If the buffer is not large enough, width and height
|
||||
* will be set and VROverlayError_ArrayTooSmall is returned. */
|
||||
virtual EVROverlayError GetOverlayImageData( VROverlayHandle_t ulOverlayHandle, void *pvBuffer, uint32_t unBufferSize, uint32_t *punWidth, uint32_t *punHeight ) = 0;
|
||||
|
@ -2652,7 +2677,6 @@ namespace vr
|
|||
* of the error enum value for all valid error codes */
|
||||
virtual const char *GetOverlayErrorNameFromEnum( EVROverlayError error ) = 0;
|
||||
|
||||
|
||||
// ---------------------------------------------
|
||||
// Overlay rendering methods
|
||||
// ---------------------------------------------
|
||||
|
@ -2729,6 +2753,13 @@ namespace vr
|
|||
/** Gets the part of the texture to use for the overlay. UV Min is the upper left corner and UV Max is the lower right corner. */
|
||||
virtual EVROverlayError GetOverlayTextureBounds( VROverlayHandle_t ulOverlayHandle, VRTextureBounds_t *pOverlayTextureBounds ) = 0;
|
||||
|
||||
/** Gets render model to draw behind this overlay */
|
||||
virtual uint32_t GetOverlayRenderModel( vr::VROverlayHandle_t ulOverlayHandle, char *pchValue, uint32_t unBufferSize, HmdColor_t *pColor, vr::EVROverlayError *pError ) = 0;
|
||||
|
||||
/** Sets render model to draw behind this overlay and the vertex color to use, pass null for pColor to match the overlays vertex color.
|
||||
The model is scaled by the same amount as the overlay, with a default of 1m. */
|
||||
virtual vr::EVROverlayError SetOverlayRenderModel( vr::VROverlayHandle_t ulOverlayHandle, const char *pchRenderModel, const HmdColor_t *pColor ) = 0;
|
||||
|
||||
/** Returns the transform type of this overlay. */
|
||||
virtual EVROverlayError GetOverlayTransformType( VROverlayHandle_t ulOverlayHandle, VROverlayTransformType *peTransformType ) = 0;
|
||||
|
||||
|
@ -2751,6 +2782,12 @@ namespace vr
|
|||
/** Gets the transform information when the overlay is rendering on a component. */
|
||||
virtual EVROverlayError GetOverlayTransformTrackedDeviceComponent( VROverlayHandle_t ulOverlayHandle, TrackedDeviceIndex_t *punDeviceIndex, char *pchComponentName, uint32_t unComponentNameSize ) = 0;
|
||||
|
||||
/** Gets the transform if it is relative to another overlay. Returns an error if the transform is some other type. */
|
||||
virtual vr::EVROverlayError GetOverlayTransformOverlayRelative( VROverlayHandle_t ulOverlayHandle, VROverlayHandle_t *ulOverlayHandleParent, HmdMatrix34_t *pmatParentOverlayToOverlayTransform ) = 0;
|
||||
|
||||
/** Sets the transform to relative to the transform of the specified overlay. This overlays visibility will also track the parents visibility */
|
||||
virtual vr::EVROverlayError SetOverlayTransformOverlayRelative( VROverlayHandle_t ulOverlayHandle, VROverlayHandle_t ulOverlayHandleParent, const HmdMatrix34_t *pmatParentOverlayToOverlayTransform ) = 0;
|
||||
|
||||
/** Shows the VR overlay. For dashboard overlays, only the Dashboard Manager is allowed to call this. */
|
||||
virtual EVROverlayError ShowOverlay( VROverlayHandle_t ulOverlayHandle ) = 0;
|
||||
|
||||
|
@ -2921,7 +2958,7 @@ namespace vr
|
|||
virtual VRMessageOverlayResponse ShowMessageOverlay( const char* pchText, const char* pchCaption, const char* pchButton0Text, const char* pchButton1Text = nullptr, const char* pchButton2Text = nullptr, const char* pchButton3Text = nullptr ) = 0;
|
||||
};
|
||||
|
||||
static const char * const IVROverlay_Version = "IVROverlay_014";
|
||||
static const char * const IVROverlay_Version = "IVROverlay_016";
|
||||
|
||||
} // namespace vr
|
||||
|
||||
|
@ -2935,16 +2972,7 @@ static const char * const k_pch_Controller_Component_Tip = "tip"; // F
|
|||
static const char * const k_pch_Controller_Component_HandGrip = "handgrip"; // Neutral, ambidextrous hand-pose when holding controller. On plane between neutrally posed index finger and thumb
|
||||
static const char * const k_pch_Controller_Component_Status = "status"; // 1:1 aspect ratio status area, with canonical [0,1] uv mapping
|
||||
|
||||
#if defined(__linux__) || defined(__APPLE__)
|
||||
// The 32-bit version of gcc has the alignment requirement for uint64 and double set to
|
||||
// 4 meaning that even with #pragma pack(8) these types will only be four-byte aligned.
|
||||
// The 64-bit version of gcc has the alignment requirement for these types set to
|
||||
// 8 meaning that unless we use #pragma pack(4) our structures will get bigger.
|
||||
// The 64-bit structure packing has to match the 32-bit structure packing for each platform.
|
||||
#pragma pack( push, 4 )
|
||||
#else
|
||||
#pragma pack( push, 8 )
|
||||
#endif
|
||||
|
||||
/** Errors that can occur with the VR compositor */
|
||||
enum EVRRenderModelError
|
||||
|
@ -2993,11 +3021,20 @@ struct RenderModel_Vertex_t
|
|||
};
|
||||
|
||||
/** A texture map for use on a render model */
|
||||
#if defined(__linux__) || defined(__APPLE__)
|
||||
// This structure was originally defined mis-packed on Linux, preserved for
|
||||
// compatibility.
|
||||
#pragma pack( push, 4 )
|
||||
#endif
|
||||
|
||||
struct RenderModel_TextureMap_t
|
||||
{
|
||||
uint16_t unWidth, unHeight; // width and height of the texture map in pixels
|
||||
const uint8_t *rubTextureMapData; // Map texture data. All textures are RGBA with 8 bits per channel per pixel. Data size is width * height * 4ub
|
||||
};
|
||||
#if defined(__linux__) || defined(__APPLE__)
|
||||
#pragma pack( pop )
|
||||
#endif
|
||||
|
||||
/** Session unique texture identifier. Rendermodels which share the same texture will have the same id.
|
||||
IDs <0 denote the texture is not present */
|
||||
|
@ -3006,6 +3043,12 @@ typedef int32_t TextureID_t;
|
|||
|
||||
const TextureID_t INVALID_TEXTURE_ID = -1;
|
||||
|
||||
#if defined(__linux__) || defined(__APPLE__)
|
||||
// This structure was originally defined mis-packed on Linux, preserved for
|
||||
// compatibility.
|
||||
#pragma pack( push, 4 )
|
||||
#endif
|
||||
|
||||
struct RenderModel_t
|
||||
{
|
||||
const RenderModel_Vertex_t *rVertexData; // Vertex data for the mesh
|
||||
|
@ -3014,6 +3057,10 @@ struct RenderModel_t
|
|||
uint32_t unTriangleCount; // Number of triangles in the mesh. Index count is 3 * TriangleCount
|
||||
TextureID_t diffuseTextureId; // Session unique texture identifier. Rendermodels which share the same texture will have the same id. <0 == texture not present
|
||||
};
|
||||
#if defined(__linux__) || defined(__APPLE__)
|
||||
#pragma pack( pop )
|
||||
#endif
|
||||
|
||||
|
||||
struct RenderModel_ControllerMode_State_t
|
||||
{
|
||||
|
@ -3250,7 +3297,7 @@ public:
|
|||
* once SubmitScreenshot() is called.
|
||||
* If Steam is not running, the paths will be in the user's
|
||||
* documents folder under Documents\SteamVR\Screenshots.
|
||||
* Other VR applications can call this to initate a
|
||||
* Other VR applications can call this to initiate a
|
||||
* screenshot outside of user control.
|
||||
* The destination file names do not need an extension,
|
||||
* will be replaced with the correct one for the format
|
||||
|
@ -3288,7 +3335,7 @@ public:
|
|||
* submitted scene textures of the running application and
|
||||
* write them into the preview image and a side-by-side file
|
||||
* for the VR image.
|
||||
* This is similiar to request screenshot, but doesn't ever
|
||||
* This is similar to request screenshot, but doesn't ever
|
||||
* talk to the application, just takes the shot and submits. */
|
||||
virtual vr::EVRScreenshotError TakeStereoScreenshot( vr::ScreenshotHandle_t *pOutScreenshotHandle, const char *pchPreviewFilename, const char *pchVRFilename ) = 0;
|
||||
|
||||
|
@ -3299,8 +3346,7 @@ public:
|
|||
* function will display a notification to the user that the
|
||||
* screenshot was taken. The paths should be full paths with
|
||||
* extensions.
|
||||
* File paths should be absolute including
|
||||
* exntensions.
|
||||
* File paths should be absolute including extensions.
|
||||
* screenshotHandle can be k_unScreenshotHandleInvalid if this
|
||||
* was a new shot taking by the app to be saved and not
|
||||
* initiated by a user (achievement earned or something) */
|
||||
|
@ -3338,10 +3384,30 @@ public:
|
|||
static const char * const IVRResources_Version = "IVRResources_001";
|
||||
|
||||
|
||||
}// End
|
||||
}
|
||||
// ivrdrivermanager.h
|
||||
namespace vr
|
||||
{
|
||||
|
||||
class IVRDriverManager
|
||||
{
|
||||
public:
|
||||
virtual uint32_t GetDriverCount() const = 0;
|
||||
|
||||
/** Returns the length of the number of bytes necessary to hold this string including the trailing null. */
|
||||
virtual uint32_t GetDriverName( vr::DriverId_t nDriver, VR_OUT_STRING() char *pchValue, uint32_t unBufferSize ) = 0;
|
||||
};
|
||||
|
||||
static const char * const IVRDriverManager_Version = "IVRDriverManager_001";
|
||||
|
||||
} // namespace vr
|
||||
|
||||
|
||||
// End
|
||||
|
||||
#endif // _OPENVR_API
|
||||
|
||||
|
||||
namespace vr
|
||||
{
|
||||
/** Finds the active installation of the VR API and initializes it. The provided path must be absolute
|
||||
|
@ -3372,7 +3438,7 @@ namespace vr
|
|||
/** Returns the name of the enum value for an EVRInitError. This function may be called outside of VR_Init()/VR_Shutdown(). */
|
||||
VR_INTERFACE const char *VR_CALLTYPE VR_GetVRInitErrorAsSymbol( EVRInitError error );
|
||||
|
||||
/** Returns an english string for an EVRInitError. Applications should call VR_GetVRInitErrorAsSymbol instead and
|
||||
/** Returns an English string for an EVRInitError. Applications should call VR_GetVRInitErrorAsSymbol instead and
|
||||
* use that as a key to look up their own localized error message. This function may be called outside of VR_Init()/VR_Shutdown(). */
|
||||
VR_INTERFACE const char *VR_CALLTYPE VR_GetVRInitErrorAsEnglishDescription( EVRInitError error );
|
||||
|
||||
|
@ -3558,6 +3624,17 @@ namespace vr
|
|||
return m_pVRTrackedCamera;
|
||||
}
|
||||
|
||||
IVRDriverManager *VRDriverManager()
|
||||
{
|
||||
CheckClear();
|
||||
if ( !m_pVRDriverManager )
|
||||
{
|
||||
EVRInitError eError;
|
||||
m_pVRDriverManager = ( IVRDriverManager * )VR_GetGenericInterface( IVRDriverManager_Version, &eError );
|
||||
}
|
||||
return m_pVRDriverManager;
|
||||
}
|
||||
|
||||
private:
|
||||
IVRSystem *m_pVRSystem;
|
||||
IVRChaperone *m_pVRChaperone;
|
||||
|
@ -3571,6 +3648,7 @@ namespace vr
|
|||
IVRApplications *m_pVRApplications;
|
||||
IVRTrackedCamera *m_pVRTrackedCamera;
|
||||
IVRScreenshots *m_pVRScreenshots;
|
||||
IVRDriverManager *m_pVRDriverManager;
|
||||
};
|
||||
|
||||
inline COpenVRContext &OpenVRInternal_ModuleContext()
|
||||
|
@ -3591,6 +3669,7 @@ namespace vr
|
|||
inline IVRResources *VR_CALLTYPE VRResources() { return OpenVRInternal_ModuleContext().VRResources(); }
|
||||
inline IVRExtendedDisplay *VR_CALLTYPE VRExtendedDisplay() { return OpenVRInternal_ModuleContext().VRExtendedDisplay(); }
|
||||
inline IVRTrackedCamera *VR_CALLTYPE VRTrackedCamera() { return OpenVRInternal_ModuleContext().VRTrackedCamera(); }
|
||||
inline IVRDriverManager *VR_CALLTYPE VRDriverManager() { return OpenVRInternal_ModuleContext().VRDriverManager(); }
|
||||
|
||||
inline void COpenVRContext::Clear()
|
||||
{
|
||||
|
@ -3606,6 +3685,7 @@ namespace vr
|
|||
m_pVRTrackedCamera = nullptr;
|
||||
m_pVRResources = nullptr;
|
||||
m_pVRScreenshots = nullptr;
|
||||
m_pVRDriverManager = nullptr;
|
||||
}
|
||||
|
||||
VR_INTERFACE uint32_t VR_CALLTYPE VR_InitInternal( EVRInitError *peError, EVRApplicationType eApplicationType );
|
||||
|
|
|
@ -3,9 +3,9 @@ OpenVR applications to the SteamVR runtime, taking into account the version
|
|||
of the OpenVR interface they were compiled against.
|
||||
|
||||
The client binding library - openvr_api.dll on Windows, openvr_api.so on
|
||||
Linux, and openvr_api.dylib on macOS - knows how to find and read the
|
||||
SteamVR runtime installation information which allows it to find and
|
||||
dynamically connect to the installed runtime. In combination with the
|
||||
Linux, and openvr_api.dylib or OpenVR.framework on macOS - knows how to find
|
||||
and read the SteamVR runtime installation information which allows it to
|
||||
find and dynamically connect to the installed runtime. In combination with the
|
||||
interface version identifiers from /include/openvr.h which are baked
|
||||
into applications at the time they are built, the OpenVR API client
|
||||
binding library captures and conveys to the SteamVR runtime the version
|
||||
|
@ -32,4 +32,8 @@ for your platform. For example, to build on a POSIX system simply perform
|
|||
|
||||
cd src; mkdir _build; cd _build; cmake ..; make
|
||||
|
||||
and you will end up with the static library /src/_build/libopenvr_api.a
|
||||
and you will end up with the static library /src/bin/<arch>/libopenvr_api.a
|
||||
|
||||
To build a shared library, pass -DBUILD_SHARED=1 to cmake.
|
||||
To build as a framework on apple platforms, pass -DBUILD_FRAMEWORK=1 to cmake.
|
||||
To see a complete list of configurable build options, use `cmake -LAH`
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
#include <stdlib.h>
|
||||
|
||||
#if defined(_WIN32)
|
||||
#include <windows.h>
|
||||
#include <Windows.h>
|
||||
|
||||
#undef GetEnvironmentVariable
|
||||
#undef SetEnvironmentVariable
|
||||
|
|
|
@ -42,6 +42,16 @@ const char *GetEnglishStringForHmdError( vr::EVRInitError eError )
|
|||
case VRInitError_Init_Internal: return "vrserver internal error (124)";
|
||||
case VRInitError_Init_HmdDriverIdIsNone: return "Hmd DriverId is invalid (125)";
|
||||
case VRInitError_Init_HmdNotFoundPresenceFailed: return "Hmd Not Found Presence Failed (126)";
|
||||
case VRInitError_Init_VRMonitorNotFound: return "VR Monitor Not Found (127)";
|
||||
case VRInitError_Init_VRMonitorStartupFailed: return "VR Monitor startup failed (128)";
|
||||
case VRInitError_Init_LowPowerWatchdogNotSupported: return "Low Power Watchdog Not Supported (129)";
|
||||
case VRInitError_Init_InvalidApplicationType: return "Invalid Application Type (130)";
|
||||
case VRInitError_Init_NotAvailableToWatchdogApps: return "Not available to watchdog apps (131)";
|
||||
case VRInitError_Init_WatchdogDisabledInSettings: return "Watchdog disabled in settings (132)";
|
||||
case VRInitError_Init_VRDashboardNotFound: return "VR Dashboard Not Found (133)";
|
||||
case VRInitError_Init_VRDashboardStartupFailed: return "VR Dashboard startup failed (134)";
|
||||
case VRInitError_Init_VRHomeNotFound: return "VR Home Not Found (135)";
|
||||
case VRInitError_Init_VRHomeStartupFailed: return "VR home startup failed (136)";
|
||||
|
||||
case VRInitError_Driver_Failed: return "Driver Failed (200)";
|
||||
case VRInitError_Driver_Unknown: return "Driver Not Known (201)";
|
||||
|
@ -71,6 +81,8 @@ const char *GetEnglishStringForHmdError( vr::EVRInitError eError )
|
|||
case VRInitError_Compositor_D3D11HardwareRequired: return "Compositor failed to find DX11 hardware (401)";
|
||||
case VRInitError_Compositor_FirmwareRequiresUpdate: return "Compositor requires mandatory firmware update (402)";
|
||||
case VRInitError_Compositor_OverlayInitFailed: return "Compositor initialization succeeded, but overlay init failed (403)";
|
||||
case VRInitError_Compositor_ScreenshotsInitFailed: return "Compositor initialization succeeded, but screenshot init failed (404)";
|
||||
case VRInitError_Compositor_UnableToCreateDevice: return "Compositor unable to create graphics device (405)";
|
||||
|
||||
// Oculus
|
||||
case VRInitError_VendorSpecific_UnableToConnectToOculusRuntime: return "Unable to connect to Oculus Runtime (1000)";
|
||||
|
@ -141,6 +153,10 @@ const char *GetIDForVRInitError( vr::EVRInitError eError )
|
|||
RETURN_ENUM_AS_STRING( VRInitError_Init_InvalidApplicationType );
|
||||
RETURN_ENUM_AS_STRING( VRInitError_Init_NotAvailableToWatchdogApps );
|
||||
RETURN_ENUM_AS_STRING( VRInitError_Init_WatchdogDisabledInSettings );
|
||||
RETURN_ENUM_AS_STRING( VRInitError_Init_VRDashboardNotFound );
|
||||
RETURN_ENUM_AS_STRING( VRInitError_Init_VRDashboardStartupFailed );
|
||||
RETURN_ENUM_AS_STRING( VRInitError_Init_VRHomeNotFound );
|
||||
RETURN_ENUM_AS_STRING( VRInitError_Init_VRHomeStartupFailed );
|
||||
|
||||
RETURN_ENUM_AS_STRING( VRInitError_Init_HmdDriverIdIsNone );
|
||||
RETURN_ENUM_AS_STRING( VRInitError_Init_HmdNotFoundPresenceFailed );
|
||||
|
@ -173,6 +189,8 @@ const char *GetIDForVRInitError( vr::EVRInitError eError )
|
|||
RETURN_ENUM_AS_STRING( VRInitError_Compositor_D3D11HardwareRequired );
|
||||
RETURN_ENUM_AS_STRING( VRInitError_Compositor_FirmwareRequiresUpdate );
|
||||
RETURN_ENUM_AS_STRING( VRInitError_Compositor_OverlayInitFailed );
|
||||
RETURN_ENUM_AS_STRING( VRInitError_Compositor_ScreenshotsInitFailed );
|
||||
RETURN_ENUM_AS_STRING( VRInitError_Compositor_UnableToCreateDevice );
|
||||
|
||||
// Oculus
|
||||
RETURN_ENUM_AS_STRING( VRInitError_VendorSpecific_UnableToConnectToOculusRuntime);
|
||||
|
|
|
@ -22,7 +22,7 @@ public:
|
|||
/** Returns true if any driver has an HMD attached. Can be called outside of Init/Cleanup */
|
||||
virtual bool BIsHmdPresent() = 0;
|
||||
|
||||
/** Returns an english error string from inside vrclient.dll which might be newer than the API DLL */
|
||||
/** Returns an English error string from inside vrclient.dll which might be newer than the API DLL */
|
||||
virtual const char *GetEnglishStringForHmdError( vr::EVRInitError eError ) = 0;
|
||||
|
||||
/** Returns an error symbol from inside vrclient.dll which might be newer than the API DLL */
|
||||
|
|
|
@ -3,12 +3,11 @@
|
|||
#include "pathtools_public.h"
|
||||
|
||||
#if defined( _WIN32)
|
||||
#include <windows.h>
|
||||
#include <Windows.h>
|
||||
#include <direct.h>
|
||||
#include <shobjidl.h>
|
||||
#include <knownfolders.h>
|
||||
#include <shlobj.h>
|
||||
#include <share.h>
|
||||
#include <Shobjidl.h>
|
||||
#include <KnownFolders.h>
|
||||
#include <Shlobj.h>
|
||||
|
||||
#undef GetEnvironmentVariable
|
||||
#else
|
||||
|
|
|
@ -108,6 +108,7 @@ std::string GetUserDocumentsPath();
|
|||
//-----------------------------------------------------------------------------
|
||||
#if defined(WIN32)
|
||||
#define DYNAMIC_LIB_EXT ".dll"
|
||||
#define PROGRAM_EXT ".exe"
|
||||
#ifdef _WIN64
|
||||
#define PLATSUBDIR "win64"
|
||||
#else
|
||||
|
@ -116,8 +117,10 @@ std::string GetUserDocumentsPath();
|
|||
#elif defined(OSX)
|
||||
#define DYNAMIC_LIB_EXT ".dylib"
|
||||
#define PLATSUBDIR "osx32"
|
||||
#define PROGRAM_EXT ""
|
||||
#elif defined(LINUX)
|
||||
#define DYNAMIC_LIB_EXT ".so"
|
||||
#define PROGRAM_EXT ""
|
||||
#if defined( LINUX32 )
|
||||
#define PLATSUBDIR "linux32"
|
||||
#else
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
#include <string.h>
|
||||
|
||||
#if defined(_WIN32)
|
||||
#include <windows.h>
|
||||
#include <Windows.h>
|
||||
#endif
|
||||
|
||||
#if defined(POSIX)
|
||||
|
@ -22,7 +22,7 @@ SharedLibHandle SharedLib_Load( const char *pchPath )
|
|||
void *SharedLib_GetFunction( SharedLibHandle lib, const char *pchFunctionName)
|
||||
{
|
||||
#if defined( _WIN32)
|
||||
return (void*)GetProcAddress( (HMODULE)lib, pchFunctionName );
|
||||
return GetProcAddress( (HMODULE)lib, pchFunctionName );
|
||||
#elif defined(POSIX)
|
||||
return dlsym( lib, pchFunctionName );
|
||||
#endif
|
||||
|
@ -31,6 +31,8 @@ void *SharedLib_GetFunction( SharedLibHandle lib, const char *pchFunctionName)
|
|||
|
||||
void SharedLib_Unload( SharedLibHandle lib )
|
||||
{
|
||||
if ( !lib )
|
||||
return;
|
||||
#if defined( _WIN32)
|
||||
FreeLibrary( (HMODULE)lib );
|
||||
#elif defined(POSIX)
|
||||
|
|
|
@ -9,7 +9,7 @@
|
|||
|
||||
#if defined( WIN32 )
|
||||
#include <windows.h>
|
||||
#include <shlobj.h>
|
||||
#include <Shlobj.h>
|
||||
|
||||
#undef GetEnvironmentVariable
|
||||
#elif defined OSX
|
||||
|
@ -23,9 +23,7 @@
|
|||
#include <algorithm>
|
||||
|
||||
#ifndef VRLog
|
||||
#if defined( __MINGW32__ )
|
||||
#define VRLog(args...) fprintf(stderr, args)
|
||||
#elif defined( WIN32 )
|
||||
#if defined( WIN32 )
|
||||
#define VRLog(fmt, ...) fprintf(stderr, fmt, __VA_ARGS__)
|
||||
#else
|
||||
#define VRLog(args...) fprintf(stderr, args)
|
||||
|
|
|
@ -192,8 +192,7 @@ impl ClipScrollNode {
|
|||
|
||||
pub fn finalize(&mut self, new_scrolling: &ScrollingState) {
|
||||
match self.node_type {
|
||||
NodeType::ReferenceFrame(_) | NodeType::Clip(_) =>
|
||||
warn!("Tried to scroll a non-scroll node."),
|
||||
NodeType::ReferenceFrame(_) | NodeType::Clip(_) => (),
|
||||
NodeType::ScrollFrame(ref mut scrolling) => *scrolling = *new_scrolling,
|
||||
}
|
||||
}
|
||||
|
|
|
@ -121,14 +121,11 @@ RendererOGL::Render()
|
|||
return false;
|
||||
}
|
||||
// XXX set clear color if MOZ_WIDGET_ANDROID is defined.
|
||||
// XXX pass the actual render bounds instead of an empty rect.
|
||||
mWidget->DrawWindowUnderlay(&widgetContext, LayoutDeviceIntRect());
|
||||
|
||||
auto size = mWidget->GetClientSize();
|
||||
wr_renderer_render(mWrRenderer, size.width, size.height);
|
||||
|
||||
mGL->SwapBuffers();
|
||||
mWidget->DrawWindowOverlay(&widgetContext, LayoutDeviceIntRect());
|
||||
mWidget->PostRender(&widgetContext);
|
||||
|
||||
// TODO: Flush pending actions such as texture deletions/unlocks and
|
||||
|
|
|
@ -7,9 +7,9 @@ fuzzy-if(Android,8,604) == display-contents-acid-dyn-2.html display-contents-aci
|
|||
fuzzy-if(Android,8,604) == display-contents-acid-dyn-3.html display-contents-acid-ref.html
|
||||
== display-contents-generated-content.html display-contents-generated-content-ref.html
|
||||
== display-contents-generated-content-2.html display-contents-generated-content-ref.html
|
||||
fails-if(styloVsGecko||stylo) == display-contents-style-inheritance-1.html display-contents-style-inheritance-1-ref.html
|
||||
fails-if(styloVsGecko||stylo) == display-contents-style-inheritance-1-stylechange.html display-contents-style-inheritance-1-ref.html
|
||||
fuzzy-if(winWidget,12,100) skip-if(styloVsGecko||stylo) == display-contents-style-inheritance-1-dom-mutations.html display-contents-style-inheritance-1-ref.html
|
||||
fails-if(styloVsGecko||stylo) pref(layout.css.scoped-style.enabled,true) == display-contents-style-inheritance-1.html display-contents-style-inheritance-1-ref.html
|
||||
fails-if(styloVsGecko||stylo) pref(layout.css.scoped-style.enabled,true) == display-contents-style-inheritance-1-stylechange.html display-contents-style-inheritance-1-ref.html
|
||||
fuzzy-if(winWidget,12,100) skip-if(styloVsGecko||stylo) pref(layout.css.scoped-style.enabled,true) == display-contents-style-inheritance-1-dom-mutations.html display-contents-style-inheritance-1-ref.html
|
||||
== display-contents-tables.xhtml display-contents-tables-ref.xhtml
|
||||
== display-contents-tables-2.xhtml display-contents-tables-ref.xhtml
|
||||
== display-contents-tables-3.xhtml display-contents-tables-3-ref.xhtml
|
||||
|
@ -17,7 +17,7 @@ fuzzy-if(winWidget,12,100) skip-if(styloVsGecko||stylo) == display-contents-styl
|
|||
== display-contents-visibility-hidden-2.html display-contents-visibility-hidden-ref.html
|
||||
== display-contents-495385-2d.html display-contents-495385-2d-ref.html
|
||||
fuzzy-if(Android,7,3935) == display-contents-xbl.xhtml display-contents-xbl-ref.html
|
||||
fuzzy-if(Android,7,1186) pref(dom.webcomponents.enabled,true) fails-if(stylo||styloVsGecko) == display-contents-shadow-dom-1.html display-contents-shadow-dom-1-ref.html
|
||||
fuzzy-if(Android,7,1186) fails-if(stylo||styloVsGecko) pref(dom.webcomponents.enabled,true) pref(layout.css.scoped-style.enabled,true) == display-contents-shadow-dom-1.html display-contents-shadow-dom-1-ref.html
|
||||
== display-contents-xbl-2.xul display-contents-xbl-2-ref.xul
|
||||
asserts(1) asserts-if(styloVsGecko,2) == display-contents-xbl-3.xul display-contents-xbl-3-ref.xul # bug 1089223
|
||||
skip == display-contents-xbl-4.xul display-contents-xbl-4-ref.xul # fails (not just asserts) due to bug 1089223
|
||||
|
|
|
@ -15,5 +15,5 @@ fuzzy-if(Android,8,454) == mq_print_maxheight.xhtml mq_print-ref.xhtml
|
|||
== mq_print_minheight_updown.xhtml mq_print-ref.xhtml
|
||||
== mq_print_minwidth_updown.xhtml mq_print-ref.xhtml
|
||||
|
||||
== scoped-mq-update.html scoped-mq-update-ref.html
|
||||
pref(layout.css.scoped-style.enabled,true) == scoped-mq-update.html scoped-mq-update-ref.html
|
||||
fails-if(styloVsGecko||stylo) == system-metrics-1.html system-metrics-1-ref.html
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
default-preferences pref(layout.css.scoped-style.enabled,true)
|
||||
|
||||
fails-if(styloVsGecko||stylo) == scoped-style-001.html scoped-style-001-ref.html
|
||||
fails-if(styloVsGecko||stylo) == scoped-style-002.html scoped-style-002-ref.html
|
||||
fails-if(styloVsGecko||stylo) == scoped-style-003.html scoped-style-003-ref.html
|
||||
|
|
|
@ -10,17 +10,17 @@ random-if(Android) fuzzy-if(/^Windows\x20NT\x2010\.0/.test(http.oscpu),1,12) ==
|
|||
random-if(Android) HTTP == image-1.html image-1.html?ref
|
||||
random-if(Android) HTTP == opacity-mixed-scrolling-1.html opacity-mixed-scrolling-1.html?ref # bug 760269
|
||||
random-if(cocoaWidget) HTTP == opacity-mixed-scrolling-2.html opacity-mixed-scrolling-2.html?ref # see bug 625357
|
||||
skip-if(Android) pref(layout.css.scroll-behavior.enabled,true) pref(layout.css.scroll-behavior.property-enabled,true) == scroll-behavior-1.html scroll-behavior-1.html?ref # see bug 1041833
|
||||
skip-if(Android) pref(layout.css.scroll-behavior.enabled,true) pref(layout.css.scroll-behavior.property-enabled,true) == scroll-behavior-2.html scroll-behavior-2.html?ref # see bug 1041833
|
||||
skip-if(Android) pref(layout.css.scroll-behavior.enabled,true) pref(layout.css.scroll-behavior.property-enabled,true) == scroll-behavior-3.html scroll-behavior-3.html?ref # see bug 1041833
|
||||
skip-if(Android) pref(layout.css.scroll-behavior.enabled,true) pref(layout.css.scroll-behavior.property-enabled,true) == scroll-behavior-4.html scroll-behavior-4.html?ref # see bug 1041833
|
||||
skip-if(Android) pref(layout.css.scroll-behavior.enabled,true) pref(layout.css.scroll-behavior.property-enabled,true) == scroll-behavior-5.html scroll-behavior-5.html?ref # see bug 1041833
|
||||
skip-if(Android) pref(layout.css.scroll-behavior.enabled,true) pref(layout.css.scroll-behavior.property-enabled,true) == scroll-behavior-6.html scroll-behavior-6.html?ref # see bug 1041833
|
||||
skip-if(Android) pref(layout.css.scroll-behavior.enabled,true) pref(layout.css.scroll-behavior.property-enabled,true) == scroll-behavior-7.html scroll-behavior-7.html?ref # see bug 1041833
|
||||
skip-if(Android) pref(layout.css.scroll-behavior.enabled,true) pref(layout.css.scroll-behavior.property-enabled,true) == scroll-behavior-8.html scroll-behavior-8.html?ref # see bug 1041833
|
||||
skip-if(Android) pref(layout.css.scroll-behavior.enabled,true) pref(layout.css.scroll-behavior.property-enabled,true) == scroll-behavior-9.html scroll-behavior-9.html?ref # see bug 1041833
|
||||
skip-if(Android) pref(layout.css.scroll-behavior.enabled,true) pref(layout.css.scroll-behavior.property-enabled,true) == scroll-behavior-10.html scroll-behavior-10.html?ref # see bug 1041833
|
||||
skip-if(Android) pref(layout.css.scroll-behavior.enabled,true) pref(layout.css.scroll-behavior.property-enabled,true) == scroll-behavior-textarea.html scroll-behavior-textarea.html?ref # see bug 1320200
|
||||
pref(layout.css.scroll-behavior.enabled,true) pref(layout.css.scroll-behavior.property-enabled,true) == scroll-behavior-1.html scroll-behavior-1.html?ref
|
||||
pref(layout.css.scroll-behavior.enabled,true) pref(layout.css.scroll-behavior.property-enabled,true) == scroll-behavior-2.html scroll-behavior-2.html?ref
|
||||
pref(layout.css.scroll-behavior.enabled,true) pref(layout.css.scroll-behavior.property-enabled,true) == scroll-behavior-3.html scroll-behavior-3.html?ref
|
||||
pref(layout.css.scroll-behavior.enabled,true) pref(layout.css.scroll-behavior.property-enabled,true) == scroll-behavior-4.html scroll-behavior-4.html?ref
|
||||
pref(layout.css.scroll-behavior.enabled,true) pref(layout.css.scroll-behavior.property-enabled,true) == scroll-behavior-5.html scroll-behavior-5.html?ref
|
||||
pref(layout.css.scroll-behavior.enabled,true) pref(layout.css.scroll-behavior.property-enabled,true) == scroll-behavior-6.html scroll-behavior-6.html?ref
|
||||
pref(layout.css.scroll-behavior.enabled,true) pref(layout.css.scroll-behavior.property-enabled,true) == scroll-behavior-7.html scroll-behavior-7.html?ref
|
||||
pref(layout.css.scroll-behavior.enabled,true) pref(layout.css.scroll-behavior.property-enabled,true) == scroll-behavior-8.html scroll-behavior-8.html?ref
|
||||
pref(layout.css.scroll-behavior.enabled,true) pref(layout.css.scroll-behavior.property-enabled,true) == scroll-behavior-9.html scroll-behavior-9.html?ref
|
||||
pref(layout.css.scroll-behavior.enabled,true) pref(layout.css.scroll-behavior.property-enabled,true) == scroll-behavior-10.html scroll-behavior-10.html?ref
|
||||
pref(layout.css.scroll-behavior.enabled,true) pref(layout.css.scroll-behavior.property-enabled,true) == scroll-behavior-textarea.html scroll-behavior-textarea.html?ref
|
||||
HTTP == simple-1.html simple-1.html?ref
|
||||
skip-if(styloVsGecko) HTTP == subpixel-1.html#d subpixel-1-ref.html#d # bug 1354406
|
||||
fuzzy-if(Android,4,120) HTTP == text-1.html text-1.html?ref
|
||||
|
|
|
@ -29,3 +29,4 @@ fuzzy-if(skiaContent,1,11) fuzzy-if(webrender,1,30) == calc-transform-origin-1.h
|
|||
== calc-width-block-intrinsic-1.html calc-width-block-intrinsic-1-ref.html
|
||||
== calc-width-table-auto-1.html calc-width-table-auto-1-ref.html
|
||||
== calc-width-table-fixed-1.html calc-width-table-fixed-1-ref.html
|
||||
== rem-root-font-size-restyle-1.html rem-root-font-size-restyle-1-ref.html
|
||||
|
|
|
@ -0,0 +1,15 @@
|
|||
<!doctype html>
|
||||
<title>CSS Test reference</title>
|
||||
<link rel="author" title="Emilio Cobos Álvarez" href="mailto:emilio@crisal.io">
|
||||
<style>
|
||||
:root {
|
||||
font-size: 2em;
|
||||
}
|
||||
|
||||
div {
|
||||
width: 10rem;
|
||||
height: 10rem;
|
||||
background: green;
|
||||
}
|
||||
</style>
|
||||
<div></div>
|
|
@ -0,0 +1,22 @@
|
|||
<!doctype html>
|
||||
<title>CSS Test: Test for rem units on the root element</title>
|
||||
<link rel="author" title="Emilio Cobos Álvarez" href="mailto:emilio@crisal.io">
|
||||
<link rel="help" href="https://drafts.csswg.org/css-values/#rem">
|
||||
<link rel="match" href="rem-root-font-size-restyle-1-ref.html">
|
||||
<style>
|
||||
:root {
|
||||
font-size: 2rem;
|
||||
}
|
||||
|
||||
div {
|
||||
width: 10rem;
|
||||
height: 10rem;
|
||||
background: green;
|
||||
}
|
||||
</style>
|
||||
<div></div>
|
||||
<script>
|
||||
document.documentElement.offsetTop;
|
||||
// Force a style recalc.
|
||||
document.documentElement.style.color = "green";
|
||||
</script>
|
|
@ -58,25 +58,6 @@ AnimationCollection<AnimationType>::GetAnimationCollection(
|
|||
GetProperty(propName));
|
||||
}
|
||||
|
||||
template <class AnimationType>
|
||||
/* static */ AnimationCollection<AnimationType>*
|
||||
AnimationCollection<AnimationType>::GetAnimationCollection(
|
||||
const dom::Element *aElement,
|
||||
nsIAtom* aPseudoTagOrNull)
|
||||
{
|
||||
MOZ_ASSERT(!aPseudoTagOrNull ||
|
||||
aPseudoTagOrNull == nsCSSPseudoElements::before ||
|
||||
aPseudoTagOrNull == nsCSSPseudoElements::after);
|
||||
|
||||
CSSPseudoElementType pseudoType = CSSPseudoElementType::NotPseudo;
|
||||
if (aPseudoTagOrNull) {
|
||||
pseudoType = aPseudoTagOrNull == nsCSSPseudoElements::before
|
||||
? CSSPseudoElementType::before
|
||||
: CSSPseudoElementType::after;
|
||||
}
|
||||
return GetAnimationCollection(aElement, pseudoType);
|
||||
}
|
||||
|
||||
template <class AnimationType>
|
||||
/* static */ AnimationCollection<AnimationType>*
|
||||
AnimationCollection<AnimationType>::GetAnimationCollection(
|
||||
|
|
|
@ -70,12 +70,6 @@ public:
|
|||
GetAnimationCollection(const dom::Element* aElement,
|
||||
CSSPseudoElementType aPseudoType);
|
||||
|
||||
// Get the collection of animations for the given |aElement| and
|
||||
// |aPseudoTagOrNull|.
|
||||
static AnimationCollection<AnimationType>*
|
||||
GetAnimationCollection(const dom::Element* aElement,
|
||||
nsIAtom* aPseudoTagOrNull);
|
||||
|
||||
// Given the frame |aFrame| with possibly animated content, finds its
|
||||
// associated collection of animations. If |aFrame| is a generated content
|
||||
// frame, this function may examine the parent frame to search for such
|
||||
|
|
|
@ -510,19 +510,19 @@ Gecko_GetActiveLinkAttrDeclarationBlock(RawGeckoElementBorrowed aElement)
|
|||
return AsRefRawStrong(sheet->GetServoActiveLinkDecl());
|
||||
}
|
||||
|
||||
static nsIAtom*
|
||||
PseudoTagAndCorrectElementForAnimation(const Element*& aElementOrPseudo) {
|
||||
static CSSPseudoElementType
|
||||
GetPseudoTypeFromElementForAnimation(const Element*& aElementOrPseudo) {
|
||||
if (aElementOrPseudo->IsGeneratedContentContainerForBefore()) {
|
||||
aElementOrPseudo = aElementOrPseudo->GetParent()->AsElement();
|
||||
return nsCSSPseudoElements::before;
|
||||
return CSSPseudoElementType::before;
|
||||
}
|
||||
|
||||
if (aElementOrPseudo->IsGeneratedContentContainerForAfter()) {
|
||||
aElementOrPseudo = aElementOrPseudo->GetParent()->AsElement();
|
||||
return nsCSSPseudoElements::after;
|
||||
return CSSPseudoElementType::after;
|
||||
}
|
||||
|
||||
return nullptr;
|
||||
return CSSPseudoElementType::NotPseudo;
|
||||
}
|
||||
|
||||
bool
|
||||
|
@ -542,12 +542,8 @@ Gecko_GetAnimationRule(RawGeckoElementBorrowed aElement,
|
|||
return false;
|
||||
}
|
||||
|
||||
nsIAtom* pseudoTag = PseudoTagAndCorrectElementForAnimation(aElement);
|
||||
|
||||
CSSPseudoElementType pseudoType =
|
||||
nsCSSPseudoElements::GetPseudoType(
|
||||
pseudoTag,
|
||||
nsCSSProps::EnabledState::eIgnoreEnabledState);
|
||||
GetPseudoTypeFromElementForAnimation(aElement);
|
||||
|
||||
return presContext->EffectCompositor()
|
||||
->GetServoAnimationRule(aElement,
|
||||
|
@ -581,10 +577,8 @@ Gecko_UpdateAnimations(RawGeckoElementBorrowed aElement,
|
|||
return;
|
||||
}
|
||||
|
||||
nsIAtom* pseudoTag = PseudoTagAndCorrectElementForAnimation(aElement);
|
||||
CSSPseudoElementType pseudoType =
|
||||
nsCSSPseudoElements::GetPseudoType(pseudoTag,
|
||||
CSSEnabledState::eForAllContent);
|
||||
GetPseudoTypeFromElementForAnimation(aElement);
|
||||
|
||||
if (aTasks & UpdateAnimationsTasks::CSSAnimations) {
|
||||
presContext->AnimationManager()->
|
||||
|
@ -629,10 +623,8 @@ Gecko_UpdateAnimations(RawGeckoElementBorrowed aElement,
|
|||
bool
|
||||
Gecko_ElementHasAnimations(RawGeckoElementBorrowed aElement)
|
||||
{
|
||||
nsIAtom* pseudoTag = PseudoTagAndCorrectElementForAnimation(aElement);
|
||||
CSSPseudoElementType pseudoType =
|
||||
nsCSSPseudoElements::GetPseudoType(pseudoTag,
|
||||
CSSEnabledState::eForAllContent);
|
||||
GetPseudoTypeFromElementForAnimation(aElement);
|
||||
|
||||
return !!EffectSet::GetEffectSet(aElement, pseudoType);
|
||||
}
|
||||
|
@ -640,10 +632,11 @@ Gecko_ElementHasAnimations(RawGeckoElementBorrowed aElement)
|
|||
bool
|
||||
Gecko_ElementHasCSSAnimations(RawGeckoElementBorrowed aElement)
|
||||
{
|
||||
nsIAtom* pseudoTag = PseudoTagAndCorrectElementForAnimation(aElement);
|
||||
CSSPseudoElementType pseudoType =
|
||||
GetPseudoTypeFromElementForAnimation(aElement);
|
||||
nsAnimationManager::CSSAnimationCollection* collection =
|
||||
nsAnimationManager::CSSAnimationCollection
|
||||
::GetAnimationCollection(aElement, pseudoTag);
|
||||
::GetAnimationCollection(aElement, pseudoType);
|
||||
|
||||
return collection && !collection->mAnimations.IsEmpty();
|
||||
}
|
||||
|
@ -651,10 +644,11 @@ Gecko_ElementHasCSSAnimations(RawGeckoElementBorrowed aElement)
|
|||
bool
|
||||
Gecko_ElementHasCSSTransitions(RawGeckoElementBorrowed aElement)
|
||||
{
|
||||
nsIAtom* pseudoTag = PseudoTagAndCorrectElementForAnimation(aElement);
|
||||
CSSPseudoElementType pseudoType =
|
||||
GetPseudoTypeFromElementForAnimation(aElement);
|
||||
nsTransitionManager::CSSTransitionCollection* collection =
|
||||
nsTransitionManager::CSSTransitionCollection
|
||||
::GetAnimationCollection(aElement, pseudoTag);
|
||||
::GetAnimationCollection(aElement, pseudoType);
|
||||
|
||||
return collection && !collection->mAnimations.IsEmpty();
|
||||
}
|
||||
|
@ -662,10 +656,11 @@ Gecko_ElementHasCSSTransitions(RawGeckoElementBorrowed aElement)
|
|||
size_t
|
||||
Gecko_ElementTransitions_Length(RawGeckoElementBorrowed aElement)
|
||||
{
|
||||
nsIAtom* pseudoTag = PseudoTagAndCorrectElementForAnimation(aElement);
|
||||
CSSPseudoElementType pseudoType =
|
||||
GetPseudoTypeFromElementForAnimation(aElement);
|
||||
nsTransitionManager::CSSTransitionCollection* collection =
|
||||
nsTransitionManager::CSSTransitionCollection
|
||||
::GetAnimationCollection(aElement, pseudoTag);
|
||||
::GetAnimationCollection(aElement, pseudoType);
|
||||
|
||||
return collection ? collection->mAnimations.Length() : 0;
|
||||
}
|
||||
|
@ -673,10 +668,11 @@ Gecko_ElementTransitions_Length(RawGeckoElementBorrowed aElement)
|
|||
static CSSTransition*
|
||||
GetCurrentTransitionAt(RawGeckoElementBorrowed aElement, size_t aIndex)
|
||||
{
|
||||
nsIAtom* pseudoTag = PseudoTagAndCorrectElementForAnimation(aElement);
|
||||
CSSPseudoElementType pseudoType =
|
||||
GetPseudoTypeFromElementForAnimation(aElement);
|
||||
nsTransitionManager::CSSTransitionCollection* collection =
|
||||
nsTransitionManager::CSSTransitionCollection
|
||||
::GetAnimationCollection(aElement, pseudoTag);
|
||||
::GetAnimationCollection(aElement, pseudoType);
|
||||
if (!collection) {
|
||||
return nullptr;
|
||||
}
|
||||
|
|
|
@ -914,13 +914,9 @@ ServoStyleSet::AddDocStyleSheet(ServoStyleSheet* aSheet,
|
|||
already_AddRefed<nsStyleContext>
|
||||
ServoStyleSet::ProbePseudoElementStyle(Element* aOriginatingElement,
|
||||
CSSPseudoElementType aType,
|
||||
nsStyleContext* aParentContext,
|
||||
Element* aPseudoElement)
|
||||
nsStyleContext* aParentContext)
|
||||
{
|
||||
UpdateStylistIfNeeded();
|
||||
if (aPseudoElement) {
|
||||
NS_ERROR("stylo: We don't support CSS_PSEUDO_ELEMENT_SUPPORTS_USER_ACTION_STATE yet");
|
||||
}
|
||||
|
||||
// NB: We ignore aParentContext, on the assumption that pseudo element styles
|
||||
// should just inherit from aOriginatingElement's primary style, which Servo
|
||||
|
|
|
@ -268,8 +268,7 @@ public:
|
|||
already_AddRefed<nsStyleContext>
|
||||
ProbePseudoElementStyle(dom::Element* aOriginatingElement,
|
||||
mozilla::CSSPseudoElementType aType,
|
||||
nsStyleContext* aParentContext,
|
||||
dom::Element* aPseudoElement = nullptr);
|
||||
nsStyleContext* aParentContext);
|
||||
|
||||
// Test if style is dependent on content state
|
||||
nsRestyleHint HasStateDependentStyle(dom::Element* aElement,
|
||||
|
|
|
@ -167,8 +167,7 @@ public:
|
|||
ProbePseudoElementStyle(dom::Element* aParentElement,
|
||||
mozilla::CSSPseudoElementType aType,
|
||||
nsStyleContext* aParentContext,
|
||||
TreeMatchContext* aTreeMatchContext,
|
||||
dom::Element* aPseudoElement = nullptr);
|
||||
TreeMatchContext* aTreeMatchContext);
|
||||
inline nsRestyleHint HasStateDependentStyle(dom::Element* aElement,
|
||||
EventStates aStateMask);
|
||||
inline nsRestyleHint HasStateDependentStyle(
|
||||
|
|
|
@ -263,17 +263,14 @@ already_AddRefed<nsStyleContext>
|
|||
StyleSetHandle::Ptr::ProbePseudoElementStyle(dom::Element* aParentElement,
|
||||
CSSPseudoElementType aType,
|
||||
nsStyleContext* aParentContext,
|
||||
TreeMatchContext* aTreeMatchContext,
|
||||
dom::Element* aPseudoElement)
|
||||
TreeMatchContext* aTreeMatchContext)
|
||||
{
|
||||
if (IsGecko()) {
|
||||
MOZ_ASSERT(aTreeMatchContext);
|
||||
return AsGecko()->ProbePseudoElementStyle(aParentElement, aType, aParentContext,
|
||||
*aTreeMatchContext, aPseudoElement);
|
||||
} else {
|
||||
return AsServo()->ProbePseudoElementStyle(aParentElement, aType, aParentContext,
|
||||
aPseudoElement);
|
||||
*aTreeMatchContext);
|
||||
}
|
||||
return AsServo()->ProbePseudoElementStyle(aParentElement, aType, aParentContext);
|
||||
}
|
||||
|
||||
nsRestyleHint
|
||||
|
|
|
@ -2010,8 +2010,7 @@ already_AddRefed<nsStyleContext>
|
|||
nsStyleSet::ProbePseudoElementStyle(Element* aParentElement,
|
||||
CSSPseudoElementType aType,
|
||||
nsStyleContext* aParentContext,
|
||||
TreeMatchContext& aTreeMatchContext,
|
||||
Element* aPseudoElement)
|
||||
TreeMatchContext& aTreeMatchContext)
|
||||
{
|
||||
NS_ENSURE_FALSE(mInShutdown, nullptr);
|
||||
|
||||
|
@ -2024,7 +2023,7 @@ nsStyleSet::ProbePseudoElementStyle(Element* aParentElement,
|
|||
aTreeMatchContext.ResetForUnvisitedMatching();
|
||||
PseudoElementRuleProcessorData data(PresContext(), aParentElement,
|
||||
&ruleWalker, aType, aTreeMatchContext,
|
||||
aPseudoElement);
|
||||
/* aPseudoElement = */ nullptr);
|
||||
WalkRestrictionRule(aType, &ruleWalker);
|
||||
// not the root if there was a restriction rule
|
||||
nsRuleNode *adjustedRoot = ruleWalker.CurrentNode();
|
||||
|
|
|
@ -259,8 +259,7 @@ class nsStyleSet final
|
|||
ProbePseudoElementStyle(mozilla::dom::Element* aParentElement,
|
||||
mozilla::CSSPseudoElementType aType,
|
||||
nsStyleContext* aParentContext,
|
||||
TreeMatchContext& aTreeMatchContext,
|
||||
mozilla::dom::Element* aPseudoElement = nullptr);
|
||||
TreeMatchContext& aTreeMatchContext);
|
||||
|
||||
/**
|
||||
* Bit-flags that can be passed to GetContext in its parameter 'aFlags'.
|
||||
|
|
|
@ -447,6 +447,7 @@ function WaitForTestEnd(contentRootElement, inPrintMode, spellCheckedElements) {
|
|||
var state = STATE_WAITING_TO_FIRE_INVALIDATE_EVENT;
|
||||
|
||||
function AfterPaintListener(event) {
|
||||
dump("AfterPaintListener\n");
|
||||
LogInfo("AfterPaintListener in " + event.target.document.location.href);
|
||||
if (event.target.document != currentDoc) {
|
||||
// ignore paint events for subframes or old documents in the window.
|
||||
|
|
|
@ -98,7 +98,7 @@ if CONFIG['CLANG_CL'] or not CONFIG['_MSC_VER']:
|
|||
elif f.endswith('avx2.c'):
|
||||
SOURCES[f].flags += ['-mavx2']
|
||||
|
||||
if CONFIG['_MSC_VER'] and not config['CLANG_CL']:
|
||||
if CONFIG['_MSC_VER'] and not CONFIG['CLANG_CL']:
|
||||
for f in SOURCES:
|
||||
if f.endswith('avx.c'):
|
||||
SOURCES[f].flags += ['-arch:AVX']
|
||||
|
|
|
@ -411,10 +411,10 @@ bitflags! {
|
|||
impl SinkFlags {
|
||||
pub fn try_from(x: ffi::pa_sink_flags_t) -> Option<SinkFlags> {
|
||||
if (x &
|
||||
!(ffi::PA_SOURCE_NOFLAGS | ffi::PA_SOURCE_HW_VOLUME_CTRL | ffi::PA_SOURCE_LATENCY |
|
||||
ffi::PA_SOURCE_HARDWARE | ffi::PA_SOURCE_NETWORK | ffi::PA_SOURCE_HW_MUTE_CTRL |
|
||||
ffi::PA_SOURCE_DECIBEL_VOLUME |
|
||||
ffi::PA_SOURCE_DYNAMIC_LATENCY | ffi::PA_SOURCE_FLAT_VOLUME)) == 0 {
|
||||
!(ffi::PA_SINK_NOFLAGS | ffi::PA_SINK_HW_VOLUME_CTRL | ffi::PA_SINK_LATENCY |
|
||||
ffi::PA_SINK_HARDWARE | ffi::PA_SINK_NETWORK | ffi::PA_SINK_HW_MUTE_CTRL |
|
||||
ffi::PA_SINK_DECIBEL_VOLUME | ffi::PA_SINK_DYNAMIC_LATENCY |
|
||||
ffi::PA_SINK_FLAT_VOLUME | ffi::PA_SINK_SET_FORMATS)) == 0 {
|
||||
Some(unsafe { ::std::mem::transmute(x) })
|
||||
} else {
|
||||
None
|
||||
|
|
|
@ -120,7 +120,7 @@ impl Context {
|
|||
let mut ctx = unsafe { &mut *(u as *mut Context) };
|
||||
if eol == 0 {
|
||||
let info = unsafe { &*i };
|
||||
let flags = pulse::SinkFlags::try_from(info.flags).expect("SinkInfo contains invalid flags");
|
||||
let flags = pulse::SinkFlags::from_bits_truncate(info.flags);
|
||||
ctx.default_sink_info = Some(DefaultInfo {
|
||||
sample_spec: info.sample_spec,
|
||||
channel_map: info.channel_map,
|
||||
|
|
|
@ -271,108 +271,6 @@ AnnexB::ConvertSampleToAVCC(mozilla::MediaRawData* aSample)
|
|||
return true;
|
||||
}
|
||||
|
||||
already_AddRefed<mozilla::MediaByteBuffer>
|
||||
AnnexB::ExtractExtraData(const mozilla::MediaRawData* aSample)
|
||||
{
|
||||
MOZ_ASSERT(IsAVCC(aSample));
|
||||
|
||||
RefPtr<mozilla::MediaByteBuffer> extradata = new mozilla::MediaByteBuffer;
|
||||
|
||||
// SPS content
|
||||
nsTArray<uint8_t> sps;
|
||||
ByteWriter spsw(sps);
|
||||
int numSps = 0;
|
||||
// PPS content
|
||||
nsTArray<uint8_t> pps;
|
||||
ByteWriter ppsw(pps);
|
||||
int numPps = 0;
|
||||
|
||||
int nalLenSize = ((*aSample->mExtraData)[4] & 3) + 1;
|
||||
|
||||
size_t sampleSize = aSample->Size();
|
||||
if (aSample->mCrypto.mValid) {
|
||||
// The content is encrypted, we can only parse the non-encrypted data.
|
||||
MOZ_ASSERT(aSample->mCrypto.mPlainSizes.Length() > 0);
|
||||
if (aSample->mCrypto.mPlainSizes.Length() == 0 ||
|
||||
aSample->mCrypto.mPlainSizes[0] > sampleSize) {
|
||||
// This is invalid content.
|
||||
return nullptr;
|
||||
}
|
||||
sampleSize = aSample->mCrypto.mPlainSizes[0];
|
||||
}
|
||||
|
||||
ByteReader reader(aSample->Data(), sampleSize);
|
||||
|
||||
// Find SPS and PPS NALUs in AVCC data
|
||||
while (reader.Remaining() > nalLenSize) {
|
||||
uint32_t nalLen;
|
||||
switch (nalLenSize) {
|
||||
case 1: nalLen = reader.ReadU8(); break;
|
||||
case 2: nalLen = reader.ReadU16(); break;
|
||||
case 3: nalLen = reader.ReadU24(); break;
|
||||
case 4: nalLen = reader.ReadU32(); break;
|
||||
}
|
||||
uint8_t nalType = reader.PeekU8() & 0x1f;
|
||||
const uint8_t* p = reader.Read(nalLen);
|
||||
if (!p) {
|
||||
return extradata.forget();
|
||||
}
|
||||
|
||||
if (nalType == 0x7) { /* SPS */
|
||||
numSps++;
|
||||
if (!spsw.WriteU16(nalLen)
|
||||
|| !spsw.Write(p, nalLen)) {
|
||||
return extradata.forget();
|
||||
}
|
||||
} else if (nalType == 0x8) { /* PPS */
|
||||
numPps++;
|
||||
if (!ppsw.WriteU16(nalLen)
|
||||
|| !ppsw.Write(p, nalLen)) {
|
||||
return extradata.forget();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (numSps && sps.Length() > 5) {
|
||||
extradata->AppendElement(1); // version
|
||||
extradata->AppendElement(sps[3]); // profile
|
||||
extradata->AppendElement(sps[4]); // profile compat
|
||||
extradata->AppendElement(sps[5]); // level
|
||||
extradata->AppendElement(0xfc | 3); // nal size - 1
|
||||
extradata->AppendElement(0xe0 | numSps);
|
||||
extradata->AppendElements(sps.Elements(), sps.Length());
|
||||
extradata->AppendElement(numPps);
|
||||
if (numPps) {
|
||||
extradata->AppendElements(pps.Elements(), pps.Length());
|
||||
}
|
||||
}
|
||||
|
||||
return extradata.forget();
|
||||
}
|
||||
|
||||
bool
|
||||
AnnexB::HasSPS(const mozilla::MediaRawData* aSample)
|
||||
{
|
||||
return HasSPS(aSample->mExtraData);
|
||||
}
|
||||
|
||||
bool
|
||||
AnnexB::HasSPS(const mozilla::MediaByteBuffer* aExtraData)
|
||||
{
|
||||
if (!aExtraData) {
|
||||
return false;
|
||||
}
|
||||
|
||||
ByteReader reader(aExtraData);
|
||||
const uint8_t* ptr = reader.Read(5);
|
||||
if (!ptr || !reader.CanRead8()) {
|
||||
return false;
|
||||
}
|
||||
uint8_t numSps = reader.ReadU8() & 0x1f;
|
||||
|
||||
return numSps > 0;
|
||||
}
|
||||
|
||||
bool
|
||||
AnnexB::ConvertSampleTo4BytesAVCC(mozilla::MediaRawData* aSample)
|
||||
{
|
||||
|
@ -424,12 +322,4 @@ AnnexB::IsAnnexB(const mozilla::MediaRawData* aSample)
|
|||
return header == 0x00000001 || (header >> 8) == 0x000001;
|
||||
}
|
||||
|
||||
bool
|
||||
AnnexB::CompareExtraData(const mozilla::MediaByteBuffer* aExtraData1,
|
||||
const mozilla::MediaByteBuffer* aExtraData2)
|
||||
{
|
||||
// Very crude comparison.
|
||||
return aExtraData1 == aExtraData2 || *aExtraData1 == *aExtraData2;
|
||||
}
|
||||
|
||||
} // namespace mp4_demuxer
|
||||
|
|
|
@ -181,17 +181,160 @@ SPSData::SPSData()
|
|||
memset(scaling_matrix8x8, 16, sizeof(scaling_matrix8x8));
|
||||
}
|
||||
|
||||
bool
|
||||
SPSData::operator==(const SPSData& aOther) const
|
||||
{
|
||||
return this->valid && aOther.valid &&
|
||||
!memcmp(this, &aOther, sizeof(SPSData));
|
||||
}
|
||||
|
||||
bool
|
||||
SPSData::operator!=(const SPSData& aOther) const
|
||||
{
|
||||
return !(operator==(aOther));
|
||||
}
|
||||
|
||||
// SPSNAL and SPSNALIterator do not own their data.
|
||||
class SPSNAL
|
||||
{
|
||||
public:
|
||||
SPSNAL(const uint8_t* aPtr, size_t aLength)
|
||||
{
|
||||
MOZ_ASSERT(aPtr);
|
||||
|
||||
if (aLength == 0 || (*aPtr & 0x1f) != H264_NAL_SPS) {
|
||||
return;
|
||||
}
|
||||
mDecodedNAL = H264::DecodeNALUnit(aPtr, aLength);
|
||||
if (mDecodedNAL) {
|
||||
mLength = GetBitLength(mDecodedNAL);
|
||||
}
|
||||
}
|
||||
|
||||
SPSNAL() { }
|
||||
|
||||
bool IsValid() const { return mDecodedNAL; }
|
||||
|
||||
bool operator==(const SPSNAL& aOther) const
|
||||
{
|
||||
if (!mDecodedNAL || !aOther.mDecodedNAL) {
|
||||
return false;
|
||||
}
|
||||
|
||||
SPSData decodedSPS1;
|
||||
SPSData decodedSPS2;
|
||||
if (!GetSPSData(decodedSPS1) || !aOther.GetSPSData(decodedSPS2)) {
|
||||
// Couldn't decode one SPS, perform a binary comparison
|
||||
if (mLength != aOther.mLength) {
|
||||
return false;
|
||||
}
|
||||
MOZ_ASSERT(mLength / 8 <= mDecodedNAL->Length());
|
||||
|
||||
if (memcmp(mDecodedNAL->Elements(),
|
||||
aOther.mDecodedNAL->Elements(),
|
||||
mLength / 8)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
uint32_t remaining = mLength - (mLength & ~7);
|
||||
|
||||
BitReader b1(mDecodedNAL->Elements() + mLength / 8, remaining);
|
||||
BitReader b2(aOther.mDecodedNAL->Elements() + mLength / 8, remaining);
|
||||
for (uint32_t i = 0; i < remaining; i++) {
|
||||
if (b1.ReadBit() != b2.ReadBit()) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
return decodedSPS1 == decodedSPS2;
|
||||
}
|
||||
|
||||
bool operator!=(const SPSNAL& aOther) const
|
||||
{
|
||||
return !(operator==(aOther));
|
||||
}
|
||||
|
||||
bool GetSPSData(SPSData& aDest) const
|
||||
{
|
||||
return H264::DecodeSPS(mDecodedNAL, aDest);
|
||||
}
|
||||
|
||||
private:
|
||||
RefPtr<mozilla::MediaByteBuffer> mDecodedNAL;
|
||||
uint32_t mLength = 0;
|
||||
};
|
||||
|
||||
class SPSNALIterator
|
||||
{
|
||||
public:
|
||||
explicit SPSNALIterator(const mozilla::MediaByteBuffer* aExtraData)
|
||||
: mExtraDataPtr(aExtraData->Elements())
|
||||
, mReader(aExtraData)
|
||||
{
|
||||
if (!mReader.Read(5)) {
|
||||
return;
|
||||
}
|
||||
|
||||
mNumSPS = mReader.ReadU8() & 0x1f;
|
||||
if (mNumSPS == 0) {
|
||||
return;
|
||||
}
|
||||
mValid = true;
|
||||
}
|
||||
|
||||
SPSNALIterator& operator++()
|
||||
{
|
||||
if (mEOS || !mValid) {
|
||||
return *this;
|
||||
}
|
||||
if (--mNumSPS == 0) {
|
||||
mEOS = true;
|
||||
}
|
||||
uint16_t length = mReader.ReadU16();
|
||||
if (length == 0 || !mReader.Read(length)) {
|
||||
mEOS = true;
|
||||
}
|
||||
return *this;
|
||||
}
|
||||
|
||||
explicit operator bool() const
|
||||
{
|
||||
return mValid && !mEOS;
|
||||
}
|
||||
|
||||
SPSNAL operator*() const
|
||||
{
|
||||
MOZ_ASSERT(bool(*this));
|
||||
ByteReader reader(mExtraDataPtr + mReader.Offset(), mReader.Remaining());
|
||||
uint16_t length = reader.ReadU16();
|
||||
const uint8_t* ptr = reader.Read(length);
|
||||
if (!ptr) {
|
||||
return SPSNAL();
|
||||
}
|
||||
return SPSNAL(ptr, length);
|
||||
}
|
||||
|
||||
private:
|
||||
const uint8_t* mExtraDataPtr;
|
||||
ByteReader mReader;
|
||||
bool mValid = false;
|
||||
bool mEOS = false;
|
||||
uint8_t mNumSPS = 0;
|
||||
};
|
||||
|
||||
/* static */ already_AddRefed<mozilla::MediaByteBuffer>
|
||||
H264::DecodeNALUnit(const mozilla::MediaByteBuffer* aNAL)
|
||||
H264::DecodeNALUnit(const uint8_t* aNAL, size_t aLength)
|
||||
{
|
||||
MOZ_ASSERT(aNAL);
|
||||
|
||||
if (aNAL->Length() < 4) {
|
||||
if (aLength < 4) {
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
RefPtr<mozilla::MediaByteBuffer> rbsp = new mozilla::MediaByteBuffer;
|
||||
ByteReader reader(aNAL);
|
||||
ByteReader reader(aNAL, aLength);
|
||||
uint8_t nal_unit_type = reader.ReadU8() & 0x1f;
|
||||
uint32_t nalUnitHeaderBytes = 1;
|
||||
if (nal_unit_type == H264_NAL_PREFIX ||
|
||||
|
@ -400,6 +543,8 @@ H264::DecodeSPS(const mozilla::MediaByteBuffer* aSPS, SPSData& aDest)
|
|||
ConditionDimension(aDest.pic_height / aDest.sample_ratio);
|
||||
}
|
||||
|
||||
aDest.valid = true;
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
|
@ -573,11 +718,11 @@ H264::vui_parameters(BitReader& aBr, SPSData& aDest)
|
|||
READUE(chroma_sample_loc_type_bottom_field, 5);
|
||||
}
|
||||
|
||||
aDest.timing_info_present_flag = aBr.ReadBit();
|
||||
if (aDest.timing_info_present_flag) {
|
||||
aDest.num_units_in_tick = aBr.ReadBits(32);
|
||||
aDest.time_scale = aBr.ReadBits(32);
|
||||
aDest.fixed_frame_rate_flag = aBr.ReadBit();
|
||||
bool timing_info_present_flag = aBr.ReadBit();
|
||||
if (timing_info_present_flag) {
|
||||
aBr.ReadBits(32); // num_units_in_tick
|
||||
aBr.ReadBits(32); // time_scale
|
||||
aBr.ReadBit(); // fixed_frame_rate_flag
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
@ -586,46 +731,11 @@ H264::vui_parameters(BitReader& aBr, SPSData& aDest)
|
|||
H264::DecodeSPSFromExtraData(const mozilla::MediaByteBuffer* aExtraData,
|
||||
SPSData& aDest)
|
||||
{
|
||||
if (!AnnexB::HasSPS(aExtraData)) {
|
||||
SPSNALIterator it(aExtraData);
|
||||
if (!it) {
|
||||
return false;
|
||||
}
|
||||
ByteReader reader(aExtraData);
|
||||
|
||||
if (!reader.Read(5)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
uint8_t numSps = reader.ReadU8() & 0x1f;
|
||||
if (!numSps) {
|
||||
// No SPS.
|
||||
return false;
|
||||
}
|
||||
|
||||
if (numSps > 1) {
|
||||
NS_WARNING("Multiple SPS, only decoding the first one");
|
||||
}
|
||||
|
||||
uint16_t length = reader.ReadU16();
|
||||
|
||||
if ((reader.PeekU8() & 0x1f) != H264_NAL_SPS) {
|
||||
// Not a SPS NAL type.
|
||||
return false;
|
||||
}
|
||||
const uint8_t* ptr = reader.Read(length);
|
||||
if (!ptr) {
|
||||
return false;
|
||||
}
|
||||
|
||||
RefPtr<mozilla::MediaByteBuffer> rawNAL = new mozilla::MediaByteBuffer;
|
||||
rawNAL->AppendElements(ptr, length);
|
||||
|
||||
RefPtr<mozilla::MediaByteBuffer> sps = DecodeNALUnit(rawNAL);
|
||||
|
||||
if (!sps) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return DecodeSPS(sps, aDest);
|
||||
return (*it).GetSPSData(aDest);
|
||||
}
|
||||
|
||||
/* static */ bool
|
||||
|
@ -704,6 +814,162 @@ H264::GetFrameType(const mozilla::MediaRawData* aSample)
|
|||
return FrameType::OTHER;
|
||||
}
|
||||
|
||||
/* static */ already_AddRefed<mozilla::MediaByteBuffer>
|
||||
H264::ExtractExtraData(const mozilla::MediaRawData* aSample)
|
||||
{
|
||||
MOZ_ASSERT(AnnexB::IsAVCC(aSample));
|
||||
|
||||
RefPtr<mozilla::MediaByteBuffer> extradata = new mozilla::MediaByteBuffer;
|
||||
|
||||
// SPS content
|
||||
nsTArray<uint8_t> sps;
|
||||
ByteWriter spsw(sps);
|
||||
int numSps = 0;
|
||||
// PPS content
|
||||
nsTArray<uint8_t> pps;
|
||||
ByteWriter ppsw(pps);
|
||||
int numPps = 0;
|
||||
|
||||
int nalLenSize = ((*aSample->mExtraData)[4] & 3) + 1;
|
||||
|
||||
size_t sampleSize = aSample->Size();
|
||||
if (aSample->mCrypto.mValid) {
|
||||
// The content is encrypted, we can only parse the non-encrypted data.
|
||||
MOZ_ASSERT(aSample->mCrypto.mPlainSizes.Length() > 0);
|
||||
if (aSample->mCrypto.mPlainSizes.Length() == 0 ||
|
||||
aSample->mCrypto.mPlainSizes[0] > sampleSize) {
|
||||
// This is invalid content.
|
||||
return nullptr;
|
||||
}
|
||||
sampleSize = aSample->mCrypto.mPlainSizes[0];
|
||||
}
|
||||
|
||||
ByteReader reader(aSample->Data(), sampleSize);
|
||||
|
||||
nsTArray<SPSData> SPSTable(MAX_SPS_COUNT);
|
||||
SPSTable.SetLength(MAX_SPS_COUNT);
|
||||
// If we encounter SPS with the same id but different content, we will stop
|
||||
// attempting to detect duplicates.
|
||||
bool checkDuplicate = true;
|
||||
|
||||
// Find SPS and PPS NALUs in AVCC data
|
||||
while (reader.Remaining() > nalLenSize) {
|
||||
uint32_t nalLen;
|
||||
switch (nalLenSize) {
|
||||
case 1: nalLen = reader.ReadU8(); break;
|
||||
case 2: nalLen = reader.ReadU16(); break;
|
||||
case 3: nalLen = reader.ReadU24(); break;
|
||||
case 4: nalLen = reader.ReadU32(); break;
|
||||
}
|
||||
const uint8_t* p = reader.Read(nalLen);
|
||||
if (!p) {
|
||||
return extradata.forget();
|
||||
}
|
||||
uint8_t nalType = *p & 0x1f;
|
||||
|
||||
if (nalType == H264_NAL_SPS) {
|
||||
RefPtr<mozilla::MediaByteBuffer> sps = DecodeNALUnit(p, nalLen);
|
||||
SPSData data;
|
||||
if (!DecodeSPS(sps, data)) {
|
||||
// Invalid SPS, ignore.
|
||||
continue;
|
||||
}
|
||||
uint8_t spsId = data.seq_parameter_set_id;
|
||||
if (checkDuplicate && SPSTable[spsId].valid && SPSTable[spsId] == data) {
|
||||
// Duplicate ignore.
|
||||
continue;
|
||||
}
|
||||
if (SPSTable[spsId].valid) {
|
||||
// We already have detected a SPS with this Id. Just to be safe we
|
||||
// disable SPS duplicate detection.
|
||||
checkDuplicate = false;
|
||||
} else {
|
||||
SPSTable[spsId] = data;
|
||||
}
|
||||
numSps++;
|
||||
if (!spsw.WriteU16(nalLen)
|
||||
|| !spsw.Write(p, nalLen)) {
|
||||
return extradata.forget();
|
||||
}
|
||||
} else if (nalType == H264_NAL_PPS) {
|
||||
numPps++;
|
||||
if (!ppsw.WriteU16(nalLen)
|
||||
|| !ppsw.Write(p, nalLen)) {
|
||||
return extradata.forget();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// We ignore PPS data if we didn't find a SPS as we would be unable to
|
||||
// decode it anyway.
|
||||
numPps = numSps ? numPps : 0;
|
||||
|
||||
if (numSps && sps.Length() > 5) {
|
||||
extradata->AppendElement(1); // version
|
||||
extradata->AppendElement(sps[3]); // profile
|
||||
extradata->AppendElement(sps[4]); // profile compat
|
||||
extradata->AppendElement(sps[5]); // level
|
||||
extradata->AppendElement(0xfc | 3); // nal size - 1
|
||||
extradata->AppendElement(0xe0 | numSps);
|
||||
extradata->AppendElements(sps.Elements(), sps.Length());
|
||||
extradata->AppendElement(numPps);
|
||||
if (numPps) {
|
||||
extradata->AppendElements(pps.Elements(), pps.Length());
|
||||
}
|
||||
}
|
||||
|
||||
return extradata.forget();
|
||||
}
|
||||
|
||||
/* static */ bool
|
||||
H264::HasSPS(const mozilla::MediaByteBuffer* aExtraData)
|
||||
{
|
||||
return NumSPS(aExtraData) > 0;
|
||||
}
|
||||
|
||||
/* static */ uint8_t
|
||||
H264::NumSPS(const mozilla::MediaByteBuffer* aExtraData)
|
||||
{
|
||||
if (!aExtraData) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
ByteReader reader(aExtraData);
|
||||
const uint8_t* ptr = reader.Read(5);
|
||||
if (!ptr || !reader.CanRead8()) {
|
||||
return 0;
|
||||
}
|
||||
return reader.ReadU8() & 0x1f;
|
||||
}
|
||||
|
||||
/* static */ bool
|
||||
H264::CompareExtraData(const mozilla::MediaByteBuffer* aExtraData1,
|
||||
const mozilla::MediaByteBuffer* aExtraData2)
|
||||
{
|
||||
if (aExtraData1 == aExtraData2) {
|
||||
return true;
|
||||
}
|
||||
uint8_t numSPS = NumSPS(aExtraData1);
|
||||
if (numSPS == 0 || numSPS != NumSPS(aExtraData2)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// We only compare if the SPS are the same as the various H264 decoders can
|
||||
// deal with in-band change of PPS.
|
||||
|
||||
SPSNALIterator it1(aExtraData1);
|
||||
SPSNALIterator it2(aExtraData2);
|
||||
|
||||
while (it1 && it2) {
|
||||
if (*it1 != *it2) {
|
||||
return false;
|
||||
}
|
||||
++it1;
|
||||
++it2;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
#undef READUE
|
||||
#undef READSE
|
||||
|
||||
|
|
|
@ -29,19 +29,10 @@ public:
|
|||
// Parse an AVCC extradata and construct the Annex B sample header.
|
||||
static already_AddRefed<mozilla::MediaByteBuffer> ConvertExtraDataToAnnexB(
|
||||
const mozilla::MediaByteBuffer* aExtraData);
|
||||
// Extract SPS and PPS NALs from aSample by looking into each NALs.
|
||||
// aSample must be in AVCC format.
|
||||
static already_AddRefed<mozilla::MediaByteBuffer> ExtractExtraData(
|
||||
const mozilla::MediaRawData* aSample);
|
||||
static bool HasSPS(const mozilla::MediaRawData* aSample);
|
||||
static bool HasSPS(const mozilla::MediaByteBuffer* aExtraData);
|
||||
// Returns true if format is AVCC and sample has valid extradata.
|
||||
static bool IsAVCC(const mozilla::MediaRawData* aSample);
|
||||
// Returns true if format is AnnexB.
|
||||
static bool IsAnnexB(const mozilla::MediaRawData* aSample);
|
||||
// Return true if both extradata are equal.
|
||||
static bool CompareExtraData(const mozilla::MediaByteBuffer* aExtraData1,
|
||||
const mozilla::MediaByteBuffer* aExtraData2);
|
||||
|
||||
private:
|
||||
// AVCC box parser helper.
|
||||
|
|
|
@ -50,14 +50,14 @@ public:
|
|||
{
|
||||
}
|
||||
|
||||
size_t Offset()
|
||||
size_t Offset() const
|
||||
{
|
||||
return mLength - mRemaining;
|
||||
}
|
||||
|
||||
size_t Remaining() const { return mRemaining; }
|
||||
|
||||
bool CanRead8() { return mRemaining >= 1; }
|
||||
bool CanRead8() const { return mRemaining >= 1; }
|
||||
|
||||
uint8_t ReadU8()
|
||||
{
|
||||
|
@ -188,7 +188,7 @@ public:
|
|||
return mPtr;
|
||||
}
|
||||
|
||||
uint8_t PeekU8()
|
||||
uint8_t PeekU8() const
|
||||
{
|
||||
auto ptr = Peek(1);
|
||||
if (!ptr) {
|
||||
|
@ -198,7 +198,7 @@ public:
|
|||
return *ptr;
|
||||
}
|
||||
|
||||
uint16_t PeekU16()
|
||||
uint16_t PeekU16() const
|
||||
{
|
||||
auto ptr = Peek(2);
|
||||
if (!ptr) {
|
||||
|
@ -208,7 +208,7 @@ public:
|
|||
return mozilla::BigEndian::readUint16(ptr);
|
||||
}
|
||||
|
||||
uint32_t PeekU24()
|
||||
uint32_t PeekU24() const
|
||||
{
|
||||
auto ptr = Peek(3);
|
||||
if (!ptr) {
|
||||
|
@ -218,12 +218,12 @@ public:
|
|||
return ptr[0] << 16 | ptr[1] << 8 | ptr[2];
|
||||
}
|
||||
|
||||
uint32_t Peek24()
|
||||
uint32_t Peek24() const
|
||||
{
|
||||
return (uint32_t)PeekU24();
|
||||
}
|
||||
|
||||
uint32_t PeekU32()
|
||||
uint32_t PeekU32() const
|
||||
{
|
||||
auto ptr = Peek(4);
|
||||
if (!ptr) {
|
||||
|
@ -233,7 +233,7 @@ public:
|
|||
return mozilla::BigEndian::readUint32(ptr);
|
||||
}
|
||||
|
||||
int32_t Peek32()
|
||||
int32_t Peek32() const
|
||||
{
|
||||
auto ptr = Peek(4);
|
||||
if (!ptr) {
|
||||
|
@ -243,7 +243,7 @@ public:
|
|||
return mozilla::BigEndian::readInt32(ptr);
|
||||
}
|
||||
|
||||
uint64_t PeekU64()
|
||||
uint64_t PeekU64() const
|
||||
{
|
||||
auto ptr = Peek(8);
|
||||
if (!ptr) {
|
||||
|
@ -253,7 +253,7 @@ public:
|
|||
return mozilla::BigEndian::readUint64(ptr);
|
||||
}
|
||||
|
||||
int64_t Peek64()
|
||||
int64_t Peek64() const
|
||||
{
|
||||
auto ptr = Peek(8);
|
||||
if (!ptr) {
|
||||
|
@ -263,7 +263,7 @@ public:
|
|||
return mozilla::BigEndian::readInt64(ptr);
|
||||
}
|
||||
|
||||
const uint8_t* Peek(size_t aCount)
|
||||
const uint8_t* Peek(size_t aCount) const
|
||||
{
|
||||
if (aCount > mRemaining) {
|
||||
return nullptr;
|
||||
|
@ -290,12 +290,12 @@ public:
|
|||
return mPtr;
|
||||
}
|
||||
|
||||
uint32_t Align()
|
||||
uint32_t Align() const
|
||||
{
|
||||
return 4 - ((intptr_t)mPtr & 3);
|
||||
}
|
||||
|
||||
template <typename T> bool CanReadType() { return mRemaining >= sizeof(T); }
|
||||
template <typename T> bool CanReadType() const { return mRemaining >= sizeof(T); }
|
||||
|
||||
template <typename T> T ReadType()
|
||||
{
|
||||
|
|
|
@ -38,6 +38,11 @@ class BitReader;
|
|||
|
||||
struct SPSData
|
||||
{
|
||||
bool operator==(const SPSData& aOther) const;
|
||||
bool operator!=(const SPSData& aOther) const;
|
||||
|
||||
bool valid;
|
||||
|
||||
/* Decoded Members */
|
||||
/*
|
||||
pic_width is the decoded width according to:
|
||||
|
@ -391,10 +396,6 @@ struct SPSData
|
|||
*/
|
||||
uint8_t chroma_sample_loc_type_top_field;
|
||||
uint8_t chroma_sample_loc_type_bottom_field;
|
||||
bool timing_info_present_flag;
|
||||
uint32_t num_units_in_tick;
|
||||
uint32_t time_scale;
|
||||
bool fixed_frame_rate_flag;
|
||||
|
||||
bool scaling_matrix_present;
|
||||
uint8_t scaling_matrix4x4[6][16];
|
||||
|
@ -406,12 +407,15 @@ struct SPSData
|
|||
class H264
|
||||
{
|
||||
public:
|
||||
/* Extract RAW BYTE SEQUENCE PAYLOAD from NAL content.
|
||||
Returns nullptr if invalid content.
|
||||
This is compliant to ITU H.264 7.3.1 Syntax in tabular form NAL unit syntax
|
||||
*/
|
||||
static already_AddRefed<mozilla::MediaByteBuffer> DecodeNALUnit(
|
||||
const mozilla::MediaByteBuffer* aNAL);
|
||||
/* Check if out of band extradata contains a SPS NAL */
|
||||
static bool HasSPS(const mozilla::MediaByteBuffer* aExtraData);
|
||||
// Extract SPS and PPS NALs from aSample by looking into each NALs.
|
||||
// aSample must be in AVCC format.
|
||||
static already_AddRefed<mozilla::MediaByteBuffer> ExtractExtraData(
|
||||
const mozilla::MediaRawData* aSample);
|
||||
// Return true if both extradata are equal.
|
||||
static bool CompareExtraData(const mozilla::MediaByteBuffer* aExtraData1,
|
||||
const mozilla::MediaByteBuffer* aExtraData2);
|
||||
|
||||
// Ensure that SPS data makes sense, Return true if SPS data was, and false
|
||||
// otherwise. If false, then content will be adjusted accordingly.
|
||||
|
@ -437,11 +441,19 @@ public:
|
|||
static FrameType GetFrameType(const mozilla::MediaRawData* aSample);
|
||||
|
||||
private:
|
||||
friend class SPSNAL;
|
||||
/* Extract RAW BYTE SEQUENCE PAYLOAD from NAL content.
|
||||
Returns nullptr if invalid content.
|
||||
This is compliant to ITU H.264 7.3.1 Syntax in tabular form NAL unit syntax
|
||||
*/
|
||||
static already_AddRefed<mozilla::MediaByteBuffer> DecodeNALUnit(
|
||||
const uint8_t* aNAL, size_t aLength);
|
||||
/* Decode SPS NAL RBSP and fill SPSData structure */
|
||||
static bool DecodeSPS(const mozilla::MediaByteBuffer* aSPS, SPSData& aDest);
|
||||
static bool vui_parameters(BitReader& aBr, SPSData& aDest);
|
||||
// Read HRD parameters, all data is ignored.
|
||||
static void hrd_parameters(BitReader& aBr);
|
||||
static uint8_t NumSPS(const mozilla::MediaByteBuffer* aExtraData);
|
||||
};
|
||||
|
||||
} // namespace mp4_demuxer
|
||||
|
|
|
@ -97,7 +97,7 @@ RTPSender::RTPSender(
|
|||
payload_type_(-1),
|
||||
payload_type_map_(),
|
||||
rtp_header_extension_map_(),
|
||||
rid_(NULL),
|
||||
rid_{0},
|
||||
packet_history_(clock),
|
||||
flexfec_packet_history_(clock),
|
||||
// Statistics
|
||||
|
@ -166,10 +166,6 @@ RTPSender::~RTPSender() {
|
|||
delete it->second;
|
||||
payload_type_map_.erase(it);
|
||||
}
|
||||
|
||||
if (rid_) {
|
||||
delete[] rid_;
|
||||
}
|
||||
}
|
||||
|
||||
uint16_t RTPSender::ActualSendBitrateKbit() const {
|
||||
|
@ -200,13 +196,12 @@ uint32_t RTPSender::NackOverheadRate() const {
|
|||
|
||||
int32_t RTPSender::SetRID(const char* rid) {
|
||||
rtc::CritScope lock(&send_critsect_);
|
||||
// TODO(jesup) avoid allocations
|
||||
if (!rid_ || strlen(rid_) < strlen(rid)) {
|
||||
// rid rarely changes length....
|
||||
delete [] rid_;
|
||||
rid_ = new char[strlen(rid)+1];
|
||||
const size_t len = rid ? strlen(rid) : 0;
|
||||
if (!len || len >= sizeof(rid_)) {
|
||||
rid_[0] = '\0';
|
||||
} else {
|
||||
memmove(&rid_[0], rid, len + 1);
|
||||
}
|
||||
strcpy(rid_, rid);
|
||||
return 0;
|
||||
}
|
||||
|
||||
|
@ -455,7 +450,7 @@ bool RTPSender::SendOutgoingData(FrameType frame_type,
|
|||
result = video_->SendVideo(video_type, frame_type, payload_type,
|
||||
rtp_timestamp, capture_time_ms, payload_data,
|
||||
payload_size, fragmentation, rtp_header,
|
||||
rid_);
|
||||
&rid_[0]);
|
||||
}
|
||||
|
||||
rtc::CritScope cs(&statistics_crit_);
|
||||
|
|
|
@ -280,7 +280,7 @@ class RTPSender {
|
|||
|
||||
RtpHeaderExtensionMap rtp_header_extension_map_ GUARDED_BY(send_critsect_);
|
||||
|
||||
char* rid_;
|
||||
char rid_[kRIDSize + 1] GUARDED_BY(send_critsect_);
|
||||
|
||||
// Tracks the current request for playout delay limits from application
|
||||
// and decides whether the current RTP frame should include the playout
|
||||
|
|
|
@ -17,6 +17,7 @@
|
|||
#include <vector>
|
||||
#include <utility>
|
||||
|
||||
#include "webrtc/common_types.h"
|
||||
#include "webrtc/base/checks.h"
|
||||
#include "webrtc/base/logging.h"
|
||||
#include "webrtc/base/trace_event.h"
|
||||
|
@ -324,8 +325,11 @@ bool RTPSenderVideo::SendVideo(RtpVideoCodecTypes video_type,
|
|||
rtp_header->SetExtension<VideoOrientation>(current_rotation);
|
||||
last_rotation_ = current_rotation;
|
||||
}
|
||||
if (rid) {
|
||||
rtp_header->SetExtensionWithLength<StreamId>(strlen(rid)-1, rid);
|
||||
if (rid && rid[0]) {
|
||||
const size_t len = strlen(rid);
|
||||
if (len) {
|
||||
rtp_header->SetExtensionWithLength<StreamId>(len - 1, rid);
|
||||
}
|
||||
}
|
||||
|
||||
// FEC settings.
|
||||
|
|
|
@ -60,7 +60,8 @@
|
|||
<ignore path="**/mobile/android/base/java/org/mozilla/gecko/IntentHelper.java"/>
|
||||
<ignore path="**/media/webrtc/trunk/webrtc/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioEffects.java"/>
|
||||
<ignore path="**/media/webrtc/trunk/webrtc/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioRecord.java"/>
|
||||
<ignore path="src/main/res/values/styles.xml"/>
|
||||
<ignore path="src/australis/res/values/styles.xml"/>
|
||||
<ignore path="src/photon/res/values/styles.xml"/>
|
||||
</issue>
|
||||
|
||||
<!-- We fixed all "Registered" lint errors. However the current gradle plugin has a bug where
|
||||
|
@ -220,6 +221,8 @@
|
|||
preprocessed_resources res/raw folder. Lint reports it as unused. We should get
|
||||
rid of the file eventually. See bug 1268948. -->
|
||||
<ignore path="**/raw/.mkdir.done" />
|
||||
<!-- bug 1375351 - To clean unused color codes -->
|
||||
<ignore path="src/photon/res/values/colors.xml"/>
|
||||
<ignore path="src/australis/res/values/skin_strings.xml"/>
|
||||
<ignore path="src/photon/res/values/skin_strings.xml"/>
|
||||
</issue>
|
||||
|
|
Некоторые файлы не были показаны из-за слишком большого количества измененных файлов Показать больше
Загрузка…
Ссылка в новой задаче