зеркало из https://github.com/mozilla/gecko-dev.git
Merge mozilla-central to mozilla-inbound
This commit is contained in:
Коммит
40e583eb67
|
@ -59,7 +59,11 @@ let RemoteDebugger = {
|
|||
this._handleAllowResult = detail => {
|
||||
this._handleAllowResult = null;
|
||||
this._promptingForAllow = null;
|
||||
if (detail.value) {
|
||||
// Newer Gaia supplies |authResult|, which is one of the
|
||||
// AuthenticationResult values.
|
||||
if (detail.authResult) {
|
||||
resolve(detail.authResult);
|
||||
} else if (detail.value) {
|
||||
resolve(DebuggerServer.AuthenticationResult.ALLOW);
|
||||
} else {
|
||||
resolve(DebuggerServer.AuthenticationResult.DENY);
|
||||
|
|
|
@ -15,7 +15,7 @@
|
|||
<project name="platform_build" path="build" remote="b2g" revision="ef937d1aca7c4cf89ecb5cc43ae8c21c2000a9db">
|
||||
<copyfile dest="Makefile" src="core/root.mk"/>
|
||||
</project>
|
||||
<project name="gaia" path="gaia" remote="mozillaorg" revision="eabe35cf054d47087b37c1ca7db8143717fbd7f3"/>
|
||||
<project name="gaia" path="gaia" remote="mozillaorg" revision="10dcd335d588997fc12845e9197de89228664f95"/>
|
||||
<project name="fake-libdvm" path="dalvik" remote="b2g" revision="d50ae982b19f42f0b66d08b9eb306be81687869f"/>
|
||||
<project name="gonk-misc" path="gonk-misc" remote="b2g" revision="c82a532ee1f14b9733214022b1e2d55a0b030be8"/>
|
||||
<project name="librecovery" path="librecovery" remote="b2g" revision="1b3591a50ed352fc6ddb77462b7b35d0bfa555a3"/>
|
||||
|
|
|
@ -19,7 +19,7 @@
|
|||
<copyfile dest="Makefile" src="core/root.mk"/>
|
||||
</project>
|
||||
<project name="fake-dalvik" path="dalvik" remote="b2g" revision="ca1f327d5acc198bb4be62fa51db2c039032c9ce"/>
|
||||
<project name="gaia.git" path="gaia" remote="mozillaorg" revision="eabe35cf054d47087b37c1ca7db8143717fbd7f3"/>
|
||||
<project name="gaia.git" path="gaia" remote="mozillaorg" revision="10dcd335d588997fc12845e9197de89228664f95"/>
|
||||
<project name="gonk-misc" path="gonk-misc" remote="b2g" revision="c82a532ee1f14b9733214022b1e2d55a0b030be8"/>
|
||||
<project name="rilproxy" path="rilproxy" remote="b2g" revision="5ef30994f4778b4052e58a4383dbe7890048c87e"/>
|
||||
<project name="platform_hardware_ril" path="hardware/ril" remote="b2g" revision="93f9ba577f68d772093987c2f1c0a4ae293e1802"/>
|
||||
|
|
|
@ -17,7 +17,7 @@
|
|||
</project>
|
||||
<project name="rilproxy" path="rilproxy" remote="b2g" revision="5ef30994f4778b4052e58a4383dbe7890048c87e"/>
|
||||
<project name="fake-libdvm" path="dalvik" remote="b2g" revision="d50ae982b19f42f0b66d08b9eb306be81687869f"/>
|
||||
<project name="gaia" path="gaia" remote="mozillaorg" revision="eabe35cf054d47087b37c1ca7db8143717fbd7f3"/>
|
||||
<project name="gaia" path="gaia" remote="mozillaorg" revision="10dcd335d588997fc12845e9197de89228664f95"/>
|
||||
<project name="gonk-misc" path="gonk-misc" remote="b2g" revision="c82a532ee1f14b9733214022b1e2d55a0b030be8"/>
|
||||
<project name="moztt" path="external/moztt" remote="b2g" revision="ed2cf97a6c37a4bbd0bbbbffe06ec7136d8c79ff"/>
|
||||
<project name="apitrace" path="external/apitrace" remote="apitrace" revision="1a1e326a7804a62de8f61fab705f7e1974cad818"/>
|
||||
|
|
|
@ -15,7 +15,7 @@
|
|||
<project name="platform_build" path="build" remote="b2g" revision="ef937d1aca7c4cf89ecb5cc43ae8c21c2000a9db">
|
||||
<copyfile dest="Makefile" src="core/root.mk"/>
|
||||
</project>
|
||||
<project name="gaia" path="gaia" remote="mozillaorg" revision="eabe35cf054d47087b37c1ca7db8143717fbd7f3"/>
|
||||
<project name="gaia" path="gaia" remote="mozillaorg" revision="10dcd335d588997fc12845e9197de89228664f95"/>
|
||||
<project name="fake-libdvm" path="dalvik" remote="b2g" revision="d50ae982b19f42f0b66d08b9eb306be81687869f"/>
|
||||
<project name="gonk-misc" path="gonk-misc" remote="b2g" revision="c82a532ee1f14b9733214022b1e2d55a0b030be8"/>
|
||||
<project name="librecovery" path="librecovery" remote="b2g" revision="1b3591a50ed352fc6ddb77462b7b35d0bfa555a3"/>
|
||||
|
|
|
@ -15,7 +15,7 @@
|
|||
<project name="platform_build" path="build" remote="b2g" revision="52775e03a2d8532429dff579cb2cd56718e488c3">
|
||||
<copyfile dest="Makefile" src="core/root.mk"/>
|
||||
</project>
|
||||
<project name="gaia" path="gaia" remote="mozillaorg" revision="eabe35cf054d47087b37c1ca7db8143717fbd7f3"/>
|
||||
<project name="gaia" path="gaia" remote="mozillaorg" revision="10dcd335d588997fc12845e9197de89228664f95"/>
|
||||
<project name="fake-libdvm" path="dalvik" remote="b2g" revision="d50ae982b19f42f0b66d08b9eb306be81687869f"/>
|
||||
<project name="gonk-misc" path="gonk-misc" remote="b2g" revision="c82a532ee1f14b9733214022b1e2d55a0b030be8"/>
|
||||
<project name="librecovery" path="librecovery" remote="b2g" revision="1b3591a50ed352fc6ddb77462b7b35d0bfa555a3"/>
|
||||
|
|
|
@ -19,7 +19,7 @@
|
|||
<copyfile dest="Makefile" src="core/root.mk"/>
|
||||
</project>
|
||||
<project name="fake-dalvik" path="dalvik" remote="b2g" revision="ca1f327d5acc198bb4be62fa51db2c039032c9ce"/>
|
||||
<project name="gaia.git" path="gaia" remote="mozillaorg" revision="eabe35cf054d47087b37c1ca7db8143717fbd7f3"/>
|
||||
<project name="gaia.git" path="gaia" remote="mozillaorg" revision="10dcd335d588997fc12845e9197de89228664f95"/>
|
||||
<project name="gonk-misc" path="gonk-misc" remote="b2g" revision="c82a532ee1f14b9733214022b1e2d55a0b030be8"/>
|
||||
<project name="rilproxy" path="rilproxy" remote="b2g" revision="5ef30994f4778b4052e58a4383dbe7890048c87e"/>
|
||||
<project name="platform_hardware_ril" path="hardware/ril" remote="b2g" revision="93f9ba577f68d772093987c2f1c0a4ae293e1802"/>
|
||||
|
|
|
@ -15,7 +15,7 @@
|
|||
<project name="platform_build" path="build" remote="b2g" revision="ef937d1aca7c4cf89ecb5cc43ae8c21c2000a9db">
|
||||
<copyfile dest="Makefile" src="core/root.mk"/>
|
||||
</project>
|
||||
<project name="gaia" path="gaia" remote="mozillaorg" revision="eabe35cf054d47087b37c1ca7db8143717fbd7f3"/>
|
||||
<project name="gaia" path="gaia" remote="mozillaorg" revision="10dcd335d588997fc12845e9197de89228664f95"/>
|
||||
<project name="fake-libdvm" path="dalvik" remote="b2g" revision="d50ae982b19f42f0b66d08b9eb306be81687869f"/>
|
||||
<project name="gonk-misc" path="gonk-misc" remote="b2g" revision="c82a532ee1f14b9733214022b1e2d55a0b030be8"/>
|
||||
<project name="librecovery" path="librecovery" remote="b2g" revision="1b3591a50ed352fc6ddb77462b7b35d0bfa555a3"/>
|
||||
|
|
|
@ -17,7 +17,7 @@
|
|||
</project>
|
||||
<project name="librecovery" path="librecovery" remote="b2g" revision="1b3591a50ed352fc6ddb77462b7b35d0bfa555a3"/>
|
||||
<project name="fake-libdvm" path="dalvik" remote="b2g" revision="d50ae982b19f42f0b66d08b9eb306be81687869f"/>
|
||||
<project name="gaia" path="gaia" remote="mozillaorg" revision="eabe35cf054d47087b37c1ca7db8143717fbd7f3"/>
|
||||
<project name="gaia" path="gaia" remote="mozillaorg" revision="10dcd335d588997fc12845e9197de89228664f95"/>
|
||||
<project name="gonk-misc" path="gonk-misc" remote="b2g" revision="c82a532ee1f14b9733214022b1e2d55a0b030be8"/>
|
||||
<project name="moztt" path="external/moztt" remote="b2g" revision="ed2cf97a6c37a4bbd0bbbbffe06ec7136d8c79ff"/>
|
||||
<project name="apitrace" path="external/apitrace" remote="apitrace" revision="1a1e326a7804a62de8f61fab705f7e1974cad818"/>
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
{
|
||||
"git": {
|
||||
"git_revision": "eabe35cf054d47087b37c1ca7db8143717fbd7f3",
|
||||
"git_revision": "10dcd335d588997fc12845e9197de89228664f95",
|
||||
"remote": "https://git.mozilla.org/releases/gaia.git",
|
||||
"branch": ""
|
||||
},
|
||||
"revision": "9139624122bb1343e99250a9643bca07f57579f7",
|
||||
"revision": "f378c0a3ef97b44ddb7f71a7a65faddc391c4e0e",
|
||||
"repo_path": "integration/gaia-central"
|
||||
}
|
||||
|
|
|
@ -17,7 +17,7 @@
|
|||
</project>
|
||||
<project name="rilproxy" path="rilproxy" remote="b2g" revision="5ef30994f4778b4052e58a4383dbe7890048c87e"/>
|
||||
<project name="fake-libdvm" path="dalvik" remote="b2g" revision="d50ae982b19f42f0b66d08b9eb306be81687869f"/>
|
||||
<project name="gaia" path="gaia" remote="mozillaorg" revision="eabe35cf054d47087b37c1ca7db8143717fbd7f3"/>
|
||||
<project name="gaia" path="gaia" remote="mozillaorg" revision="10dcd335d588997fc12845e9197de89228664f95"/>
|
||||
<project name="gonk-misc" path="gonk-misc" remote="b2g" revision="c82a532ee1f14b9733214022b1e2d55a0b030be8"/>
|
||||
<project name="moztt" path="external/moztt" remote="b2g" revision="ed2cf97a6c37a4bbd0bbbbffe06ec7136d8c79ff"/>
|
||||
<project name="apitrace" path="external/apitrace" remote="apitrace" revision="1a1e326a7804a62de8f61fab705f7e1974cad818"/>
|
||||
|
|
|
@ -15,7 +15,7 @@
|
|||
<project name="platform_build" path="build" remote="b2g" revision="52775e03a2d8532429dff579cb2cd56718e488c3">
|
||||
<copyfile dest="Makefile" src="core/root.mk"/>
|
||||
</project>
|
||||
<project name="gaia" path="gaia" remote="mozillaorg" revision="eabe35cf054d47087b37c1ca7db8143717fbd7f3"/>
|
||||
<project name="gaia" path="gaia" remote="mozillaorg" revision="10dcd335d588997fc12845e9197de89228664f95"/>
|
||||
<project name="fake-libdvm" path="dalvik" remote="b2g" revision="d50ae982b19f42f0b66d08b9eb306be81687869f"/>
|
||||
<project name="gonk-misc" path="gonk-misc" remote="b2g" revision="c82a532ee1f14b9733214022b1e2d55a0b030be8"/>
|
||||
<project name="librecovery" path="librecovery" remote="b2g" revision="1b3591a50ed352fc6ddb77462b7b35d0bfa555a3"/>
|
||||
|
|
|
@ -1498,7 +1498,7 @@
|
|||
aBrowser.setAttribute("remote", aShouldBeRemote ? "true" : "false");
|
||||
// Tearing down the browser gives a new permanentKey but we want to
|
||||
// keep the old one. Re-set it explicitly after unbinding from DOM.
|
||||
aBrowser.permanentKey = permanentKey;
|
||||
aBrowser._permanentKey = permanentKey;
|
||||
parent.appendChild(aBrowser);
|
||||
|
||||
// Restore the progress listener.
|
||||
|
|
|
@ -410,6 +410,7 @@ skip-if = buildapp == 'mulet' || e10s
|
|||
[browser_tabMatchesInAwesomebar.js]
|
||||
[browser_tabMatchesInAwesomebar_perwindowpb.js]
|
||||
skip-if = e10s || os == 'linux' # Bug 1093373, bug 1104755
|
||||
[browser_tab_detach_restore.js]
|
||||
[browser_tab_drag_drop_perwindow.js]
|
||||
skip-if = buildapp == 'mulet'
|
||||
[browser_tab_dragdrop.js]
|
||||
|
|
|
@ -0,0 +1,31 @@
|
|||
"use strict";
|
||||
|
||||
add_task(function*() {
|
||||
let uri = "http://example.com/browser/browser/base/content/test/general/dummy_page.html";
|
||||
|
||||
// Clear out the closed windows set to start
|
||||
while (SessionStore.getClosedWindowCount() > 0)
|
||||
SessionStore.forgetClosedWindow(0);
|
||||
|
||||
let tab = gBrowser.addTab();
|
||||
tab.linkedBrowser.loadURI(uri);
|
||||
yield BrowserTestUtils.browserLoaded(tab.linkedBrowser);
|
||||
|
||||
let key = tab.linkedBrowser.permanentKey;
|
||||
let win = gBrowser.replaceTabWithWindow(tab);
|
||||
yield new Promise(resolve => whenDelayedStartupFinished(win, resolve));
|
||||
|
||||
is(win.gBrowser.selectedBrowser.permanentKey, key, "Should have properly copied the permanentKey");
|
||||
yield promiseWindowClosed(win);
|
||||
|
||||
is(SessionStore.getClosedWindowCount(), 1, "Should have restore data for the closed window");
|
||||
|
||||
win = SessionStore.undoCloseWindow(0);
|
||||
yield BrowserTestUtils.waitForEvent(win, "load");
|
||||
yield BrowserTestUtils.waitForEvent(win.gBrowser.tabs[0], "SSTabRestored");
|
||||
|
||||
is(win.gBrowser.tabs.length, 1, "Should have restored one tab");
|
||||
is(win.gBrowser.selectedBrowser.currentURI.spec, uri, "Should have restored the right page");
|
||||
|
||||
yield promiseWindowClosed(win);
|
||||
});
|
|
@ -194,6 +194,8 @@ ReadingListImpl.prototype = {
|
|||
this._invalidateIterators();
|
||||
let item = this._itemFromObject(obj);
|
||||
this._callListeners("onItemAdded", item);
|
||||
let mm = Cc["@mozilla.org/globalmessagemanager;1"].getService(Ci.nsIMessageListenerManager);
|
||||
mm.broadcastAsyncMessage("Reader:Added", item);
|
||||
return item;
|
||||
}),
|
||||
|
||||
|
@ -234,9 +236,23 @@ ReadingListImpl.prototype = {
|
|||
item.list = null;
|
||||
this._itemsByURL.delete(item.url);
|
||||
this._invalidateIterators();
|
||||
let mm = Cc["@mozilla.org/globalmessagemanager;1"].getService(Ci.nsIMessageListenerManager);
|
||||
mm.broadcastAsyncMessage("Reader:Removed", item);
|
||||
this._callListeners("onItemDeleted", item);
|
||||
}),
|
||||
|
||||
/**
|
||||
* Find any item that matches a given URL - either the item's URL, or its
|
||||
* resolved URL.
|
||||
*
|
||||
* @param {String/nsIURI} uri - URI to match against. This will be normalized.
|
||||
*/
|
||||
getItemForURL: Task.async(function* (uri) {
|
||||
let url = this._normalizeURI(uri).spec;
|
||||
let [item] = yield this.iterator({url: url}, {resolvedURL: url}).items(1);
|
||||
return item;
|
||||
}),
|
||||
|
||||
/**
|
||||
* Adds a listener that will be notified when the list changes. Listeners
|
||||
* are objects with the following optional methods:
|
||||
|
@ -288,6 +304,22 @@ ReadingListImpl.prototype = {
|
|||
// A Set containing listener objects.
|
||||
_listeners: null,
|
||||
|
||||
/**
|
||||
* Normalize a URI, stripping away extraneous parts we don't want to store
|
||||
* or compare against.
|
||||
*
|
||||
* @param {nsIURI/String} uri - URI to normalize.
|
||||
* @returns {nsIURI} Cloned and normalized version of the input URI.
|
||||
*/
|
||||
_normalizeURI(uri) {
|
||||
if (typeof uri == "string") {
|
||||
uri = Services.io.newURI(uri, "", null);
|
||||
}
|
||||
uri = uri.cloneIgnoringRef();
|
||||
uri.userPass = "";
|
||||
return uri;
|
||||
},
|
||||
|
||||
/**
|
||||
* Returns the ReadingListItem represented by the given simple object. If
|
||||
* the item doesn't exist yet, it's created first.
|
||||
|
@ -349,6 +381,8 @@ ReadingListImpl.prototype = {
|
|||
},
|
||||
};
|
||||
|
||||
let _unserializable = () => {}; // See comments in the ReadingListItem ctor.
|
||||
|
||||
/**
|
||||
* An item in a reading list.
|
||||
*
|
||||
|
@ -359,6 +393,18 @@ ReadingListImpl.prototype = {
|
|||
*/
|
||||
function ReadingListItem(props={}) {
|
||||
this._properties = {};
|
||||
|
||||
// |this._unserializable| works around a problem when sending one of these
|
||||
// items via a message manager. If |this.list| is set, the item can't be
|
||||
// transferred directly, so .toJSON is implicitly called and the object
|
||||
// returned via that is sent. However, once the item is deleted and |this.list|
|
||||
// is null, the item *can* be directly serialized - so the message handler
|
||||
// sees the "raw" object - ie, it sees "_properties" etc.
|
||||
// We work around this problem by *always* having an unserializable property
|
||||
// on the object - this way the implicit .toJSON call is always made, even
|
||||
// when |this.list| is null.
|
||||
this._unserializable = _unserializable;
|
||||
|
||||
this.setProperties(props, false);
|
||||
}
|
||||
|
||||
|
@ -830,7 +876,7 @@ function hash(str) {
|
|||
hasher.updateFromStream(stream, -1);
|
||||
let binaryStr = hasher.finish(false);
|
||||
let hexStr =
|
||||
[("0" + binaryStr.charCodeAt(i).toString(16)).slice(-2) for (i in hash)].
|
||||
[("0" + binaryStr.charCodeAt(i).toString(16)).slice(-2) for (i in binaryStr)].
|
||||
join("");
|
||||
return hexStr;
|
||||
}
|
||||
|
|
|
@ -697,7 +697,7 @@ function hash(str) {
|
|||
hasher.updateFromStream(stream, -1);
|
||||
let binaryStr = hasher.finish(false);
|
||||
let hexStr =
|
||||
[("0" + binaryStr.charCodeAt(i).toString(16)).slice(-2) for (i in hash)].
|
||||
[("0" + binaryStr.charCodeAt(i).toString(16)).slice(-2) for (i in binaryStr)].
|
||||
join("");
|
||||
return hexStr;
|
||||
}
|
||||
|
|
|
@ -79,15 +79,15 @@ function testSetBreakpointBlankLine() {
|
|||
let sourceForm = getSourceForm(gSources, COFFEE_URL);
|
||||
|
||||
let source = gDebugger.gThreadClient.source(sourceForm);
|
||||
source.setBreakpoint({ line: 3 }, aResponse => {
|
||||
source.setBreakpoint({ line: 7 }, aResponse => {
|
||||
ok(!aResponse.error,
|
||||
"Should be able to set a breakpoint in a coffee source file on a blank line.");
|
||||
ok(aResponse.actualLocation,
|
||||
"Because 3 is empty, we should have an actualLocation.");
|
||||
is(aResponse.actualLocation.source.url, COFFEE_URL,
|
||||
"actualLocation.actor should be source mapped to the coffee file.");
|
||||
is(aResponse.actualLocation.line, 2,
|
||||
"actualLocation.line should be source mapped back to 2.");
|
||||
is(aResponse.actualLocation.line, 8,
|
||||
"actualLocation.line should be source mapped back to 8.");
|
||||
|
||||
deferred.resolve();
|
||||
});
|
||||
|
@ -147,7 +147,7 @@ function testStepping() {
|
|||
is(aPacket.frame.environment.bindings.variables.start.value, 0,
|
||||
"'start' is 0.");
|
||||
is(aPacket.frame.environment.bindings.variables.stop.value, 5,
|
||||
"'stop' hasn't been assigned to yet.");
|
||||
"'stop' is 5.");
|
||||
is(aPacket.frame.environment.bindings.variables.pivot.value.type, "undefined",
|
||||
"'pivot' hasn't been assigned to yet.");
|
||||
|
||||
|
|
|
@ -45,11 +45,11 @@ function testSetBreakpoint() {
|
|||
let sourceForm = getSourceForm(gSources, JS_URL);
|
||||
let source = gDebugger.gThreadClient.source(sourceForm);
|
||||
|
||||
source.setBreakpoint({ line: 30, column: 21 }, aResponse => {
|
||||
source.setBreakpoint({ line: 30 }, aResponse => {
|
||||
ok(!aResponse.error,
|
||||
"Should be able to set a breakpoint in a js file.");
|
||||
ok(!aResponse.actualLocation,
|
||||
"Should be able to set a breakpoint on line 30 and column 10.");
|
||||
"Should be able to set a breakpoint on line 30.");
|
||||
|
||||
gDebugger.gClient.addOneTimeListener("resumed", () => {
|
||||
waitForCaretAndScopes(gPanel, 30).then(() => {
|
||||
|
|
|
@ -352,8 +352,12 @@ PerformanceFront.prototype = {
|
|||
* them to consumers.
|
||||
*/
|
||||
_pullAllocationSites: Task.async(function *() {
|
||||
let isDetached = (yield this._request("memory", "getState")) !== "attached";
|
||||
if (isDetached) {
|
||||
return;
|
||||
}
|
||||
|
||||
let memoryData = yield this._request("memory", "getAllocations");
|
||||
let isStillAttached = yield this._request("memory", "getState") == "attached";
|
||||
|
||||
this.emit("allocations", {
|
||||
sites: memoryData.allocations,
|
||||
|
@ -362,10 +366,8 @@ PerformanceFront.prototype = {
|
|||
counts: memoryData.counts
|
||||
});
|
||||
|
||||
if (isStillAttached) {
|
||||
let delay = DEFAULT_ALLOCATION_SITES_PULL_TIMEOUT;
|
||||
this._sitesPullTimeout = setTimeout(this._pullAllocationSites, delay);
|
||||
}
|
||||
let delay = DEFAULT_ALLOCATION_SITES_PULL_TIMEOUT;
|
||||
this._sitesPullTimeout = setTimeout(this._pullAllocationSites, delay);
|
||||
}),
|
||||
|
||||
/**
|
||||
|
|
|
@ -197,34 +197,14 @@
|
|||
|
||||
<vbox id="memory-calltree-view" flex="1">
|
||||
<hbox class="call-tree-headers-container">
|
||||
<label class="plain call-tree-header"
|
||||
type="duration"
|
||||
crop="end"
|
||||
value="&profilerUI.table.totalDuration2;"/>
|
||||
<label class="plain call-tree-header"
|
||||
type="percentage"
|
||||
crop="end"
|
||||
value="&profilerUI.table.totalPercentage;"/>
|
||||
<label class="plain call-tree-header"
|
||||
type="allocations"
|
||||
crop="end"
|
||||
value="&profilerUI.table.totalAlloc;"/>
|
||||
<label class="plain call-tree-header"
|
||||
type="self-duration"
|
||||
crop="end"
|
||||
value="&profilerUI.table.selfDuration2;"/>
|
||||
<label class="plain call-tree-header"
|
||||
type="self-percentage"
|
||||
crop="end"
|
||||
value="&profilerUI.table.selfPercentage;"/>
|
||||
<label class="plain call-tree-header"
|
||||
type="self-allocations"
|
||||
crop="end"
|
||||
value="&profilerUI.table.selfAlloc;"/>
|
||||
<label class="plain call-tree-header"
|
||||
type="samples"
|
||||
crop="end"
|
||||
value="&profilerUI.table.samples;"/>
|
||||
<label class="plain call-tree-header"
|
||||
type="function"
|
||||
crop="end"
|
||||
|
|
|
@ -16,6 +16,8 @@ support-files =
|
|||
[browser_perf-compatibility-04.js]
|
||||
[browser_perf-clear-01.js]
|
||||
[browser_perf-clear-02.js]
|
||||
[browser_perf-columns-js-calltree.js]
|
||||
[browser_perf-columns-memory-calltree.js]
|
||||
[browser_perf-data-massaging-01.js]
|
||||
[browser_perf-data-samples.js]
|
||||
[browser_perf-details-calltree-render.js]
|
||||
|
|
|
@ -0,0 +1,53 @@
|
|||
/* Any copyright is dedicated to the Public Domain.
|
||||
http://creativecommons.org/publicdomain/zero/1.0/ */
|
||||
|
||||
/**
|
||||
* Tests that the js call tree view renders the correct columns.
|
||||
*/
|
||||
function spawnTest () {
|
||||
let { panel } = yield initPerformance(SIMPLE_URL);
|
||||
let { EVENTS, $, $$, DetailsView, JsCallTreeView } = panel.panelWin;
|
||||
|
||||
// Enable platform data to show the `busyWait` function in the tree.
|
||||
Services.prefs.setBoolPref(PLATFORM_DATA_PREF, true);
|
||||
|
||||
yield DetailsView.selectView("js-calltree");
|
||||
ok(DetailsView.isViewSelected(JsCallTreeView), "The call tree is now selected.");
|
||||
|
||||
yield startRecording(panel);
|
||||
yield busyWait(1000);
|
||||
|
||||
let rendered = once(JsCallTreeView, EVENTS.JS_CALL_TREE_RENDERED);
|
||||
yield stopRecording(panel);
|
||||
yield rendered;
|
||||
|
||||
testCells($, $$, {
|
||||
"duration": true,
|
||||
"percentage": true,
|
||||
"allocations": false,
|
||||
"self-duration": true,
|
||||
"self-percentage": true,
|
||||
"self-allocations": false,
|
||||
"samples": true,
|
||||
"function": true
|
||||
});
|
||||
|
||||
yield teardown(panel);
|
||||
finish();
|
||||
}
|
||||
|
||||
function testCells($, $$, visibleCells) {
|
||||
for (let cell in visibleCells) {
|
||||
if (visibleCells[cell]) {
|
||||
ok($(`.call-tree-cell[type=${cell}]`),
|
||||
`At least one ${cell} column was visible in the tree.`);
|
||||
} else {
|
||||
ok(!$(`.call-tree-cell[type=${cell}]`),
|
||||
`No ${cell} columns were visible in the tree.`);
|
||||
}
|
||||
}
|
||||
|
||||
is($$(".call-tree-cell", $(".call-tree-item")).length,
|
||||
Object.keys(visibleCells).filter(e => visibleCells[e]).length,
|
||||
"The correct number of cells were found in the tree.");
|
||||
}
|
|
@ -0,0 +1,53 @@
|
|||
/* Any copyright is dedicated to the Public Domain.
|
||||
http://creativecommons.org/publicdomain/zero/1.0/ */
|
||||
|
||||
/**
|
||||
* Tests that the memory call tree view renders the correct columns.
|
||||
*/
|
||||
function spawnTest () {
|
||||
let { panel } = yield initPerformance(SIMPLE_URL);
|
||||
let { EVENTS, $, $$, DetailsView, MemoryCallTreeView } = panel.panelWin;
|
||||
|
||||
// Enable memory to test.
|
||||
Services.prefs.setBoolPref(MEMORY_PREF, true);
|
||||
|
||||
yield DetailsView.selectView("memory-calltree");
|
||||
ok(DetailsView.isViewSelected(MemoryCallTreeView), "The call tree is now selected.");
|
||||
|
||||
yield startRecording(panel);
|
||||
yield busyWait(1000);
|
||||
|
||||
let rendered = once(MemoryCallTreeView, EVENTS.MEMORY_CALL_TREE_RENDERED);
|
||||
yield stopRecording(panel);
|
||||
yield rendered;
|
||||
|
||||
testCells($, $$, {
|
||||
"duration": false,
|
||||
"percentage": false,
|
||||
"allocations": true,
|
||||
"self-duration": false,
|
||||
"self-percentage": false,
|
||||
"self-allocations": true,
|
||||
"samples": false,
|
||||
"function": true
|
||||
});
|
||||
|
||||
yield teardown(panel);
|
||||
finish();
|
||||
}
|
||||
|
||||
function testCells($, $$, visibleCells) {
|
||||
for (let cell in visibleCells) {
|
||||
if (visibleCells[cell]) {
|
||||
ok($(`.call-tree-cell[type=${cell}]`),
|
||||
`At least one ${cell} column was visible in the tree.`);
|
||||
} else {
|
||||
ok(!$(`.call-tree-cell[type=${cell}]`),
|
||||
`No ${cell} columns were visible in the tree.`);
|
||||
}
|
||||
}
|
||||
|
||||
is($$(".call-tree-cell", $(".call-tree-item")).length,
|
||||
Object.keys(visibleCells).filter(e => visibleCells[e]).length,
|
||||
"The correct number of cells were found in the tree.");
|
||||
}
|
|
@ -10,6 +10,9 @@ let WAIT_TIME = 1000;
|
|||
function spawnTest () {
|
||||
let { target, front } = yield initBackend(SIMPLE_URL);
|
||||
|
||||
let count = 0;
|
||||
let counter = () => count++;
|
||||
|
||||
let {
|
||||
profilerStartTime,
|
||||
timelineStartTime,
|
||||
|
@ -25,7 +28,14 @@ function spawnTest () {
|
|||
ok(typeof memoryStartTime === "number",
|
||||
"The front.startRecording() emits a memory start time.");
|
||||
|
||||
yield busyWait(WAIT_TIME);
|
||||
// Record allocation events to ensure it's called more than once
|
||||
// so we know it's polling
|
||||
front.on("allocations", counter);
|
||||
|
||||
yield Promise.all([
|
||||
busyWait(WAIT_TIME),
|
||||
waitUntil(() => count > 1)
|
||||
]);
|
||||
|
||||
let {
|
||||
profilerEndTime,
|
||||
|
@ -35,6 +45,8 @@ function spawnTest () {
|
|||
withAllocations: true
|
||||
});
|
||||
|
||||
front.off("allocations", counter);
|
||||
|
||||
ok(typeof profilerEndTime === "number",
|
||||
"The front.stopRecording() emits a profiler end time.");
|
||||
ok(typeof timelineEndTime === "number",
|
||||
|
@ -49,6 +61,9 @@ function spawnTest () {
|
|||
ok(memoryEndTime > memoryStartTime,
|
||||
"The memoryEndTime is after memoryStartTime.");
|
||||
|
||||
is((yield front._request("memory", "getState")), "detached",
|
||||
"memory actor is detached when stopping recording with allocations");
|
||||
|
||||
yield removeTab(target.tab);
|
||||
finish();
|
||||
}
|
||||
|
|
|
@ -22,10 +22,10 @@ function test() {
|
|||
is(container.childNodes[0].className, "call-tree-item",
|
||||
"The root node in the tree has the correct class name.");
|
||||
|
||||
is(container.childNodes[0].childNodes.length, 8,
|
||||
is(container.childNodes[0].childNodes.length, 6,
|
||||
"The root node in the tree has the correct number of children.");
|
||||
is(container.childNodes[0].querySelectorAll(".call-tree-cell").length, 8,
|
||||
"The root node in the tree has only 'call-tree-cell' children.");
|
||||
is(container.childNodes[0].querySelectorAll(".call-tree-cell").length, 6,
|
||||
"The root node in the tree has only 6 'call-tree-cell' children.");
|
||||
|
||||
is(container.childNodes[0].childNodes[0].getAttribute("type"), "duration",
|
||||
"The root node in the tree has a duration cell.");
|
||||
|
@ -37,34 +37,24 @@ function test() {
|
|||
is(container.childNodes[0].childNodes[1].getAttribute("value"), "100%",
|
||||
"The root node in the tree has the correct percentage cell value.");
|
||||
|
||||
is(container.childNodes[0].childNodes[2].getAttribute("type"), "allocations",
|
||||
is(container.childNodes[0].childNodes[2].getAttribute("type"), "self-duration",
|
||||
"The root node in the tree has a self-duration cell.");
|
||||
is(container.childNodes[0].childNodes[2].getAttribute("value"), "0",
|
||||
is(container.childNodes[0].childNodes[2].getAttribute("value"), "0 ms",
|
||||
"The root node in the tree has the correct self-duration cell value.");
|
||||
|
||||
is(container.childNodes[0].childNodes[3].getAttribute("type"), "self-duration",
|
||||
"The root node in the tree has a self-duration cell.");
|
||||
is(container.childNodes[0].childNodes[3].getAttribute("value"), "0 ms",
|
||||
"The root node in the tree has the correct self-duration cell value.");
|
||||
|
||||
is(container.childNodes[0].childNodes[4].getAttribute("type"), "self-percentage",
|
||||
is(container.childNodes[0].childNodes[3].getAttribute("type"), "self-percentage",
|
||||
"The root node in the tree has a self-percentage cell.");
|
||||
is(container.childNodes[0].childNodes[4].getAttribute("value"), "0%",
|
||||
is(container.childNodes[0].childNodes[3].getAttribute("value"), "0%",
|
||||
"The root node in the tree has the correct self-percentage cell value.");
|
||||
|
||||
is(container.childNodes[0].childNodes[5].getAttribute("type"), "self-allocations",
|
||||
"The root node in the tree has a self-percentage cell.");
|
||||
is(container.childNodes[0].childNodes[5].getAttribute("value"), "0",
|
||||
"The root node in the tree has the correct self-percentage cell value.");
|
||||
|
||||
is(container.childNodes[0].childNodes[6].getAttribute("type"), "samples",
|
||||
is(container.childNodes[0].childNodes[4].getAttribute("type"), "samples",
|
||||
"The root node in the tree has an samples cell.");
|
||||
is(container.childNodes[0].childNodes[6].getAttribute("value"), "4",
|
||||
is(container.childNodes[0].childNodes[4].getAttribute("value"), "4",
|
||||
"The root node in the tree has the correct samples cell value.");
|
||||
|
||||
is(container.childNodes[0].childNodes[7].getAttribute("type"), "function",
|
||||
is(container.childNodes[0].childNodes[5].getAttribute("type"), "function",
|
||||
"The root node in the tree has a function cell.");
|
||||
is(container.childNodes[0].childNodes[7].style.MozMarginStart, "0px",
|
||||
is(container.childNodes[0].childNodes[5].style.MozMarginStart, "0px",
|
||||
"The root node in the tree has the correct indentation.");
|
||||
|
||||
finish();
|
||||
|
|
|
@ -44,26 +44,22 @@ function test() {
|
|||
ok(!A.target.querySelector(".call-tree-category").hidden,
|
||||
"The .A.B.D node's category label cell should not be hidden.");
|
||||
|
||||
is(D.target.childNodes.length, 8,
|
||||
is(D.target.childNodes.length, 6,
|
||||
"The number of columns displayed for tree items is correct.");
|
||||
is(D.target.childNodes[0].getAttribute("type"), "duration",
|
||||
"The first column displayed for tree items is correct.");
|
||||
is(D.target.childNodes[1].getAttribute("type"), "percentage",
|
||||
"The third column displayed for tree items is correct.");
|
||||
is(D.target.childNodes[2].getAttribute("type"), "allocations",
|
||||
is(D.target.childNodes[2].getAttribute("type"), "self-duration",
|
||||
"The second column displayed for tree items is correct.");
|
||||
is(D.target.childNodes[3].getAttribute("type"), "self-duration",
|
||||
"The second column displayed for tree items is correct.");
|
||||
is(D.target.childNodes[4].getAttribute("type"), "self-percentage",
|
||||
is(D.target.childNodes[3].getAttribute("type"), "self-percentage",
|
||||
"The fourth column displayed for tree items is correct.");
|
||||
is(D.target.childNodes[5].getAttribute("type"), "self-allocations",
|
||||
"The fourth column displayed for tree items is correct.");
|
||||
is(D.target.childNodes[6].getAttribute("type"), "samples",
|
||||
is(D.target.childNodes[4].getAttribute("type"), "samples",
|
||||
"The fifth column displayed for tree items is correct.");
|
||||
is(D.target.childNodes[7].getAttribute("type"), "function",
|
||||
is(D.target.childNodes[5].getAttribute("type"), "function",
|
||||
"The sixth column displayed for tree items is correct.");
|
||||
|
||||
let functionCell = D.target.childNodes[7];
|
||||
let functionCell = D.target.childNodes[5];
|
||||
|
||||
is(functionCell.childNodes.length, 9,
|
||||
"The number of columns displayed for function cells is correct.");
|
||||
|
|
|
@ -103,9 +103,6 @@ let JsCallTreeView = Heritage.extend(DetailsSubview, {
|
|||
container.innerHTML = "";
|
||||
root.attachTo(container);
|
||||
|
||||
// Profiler data does not contain memory allocations information.
|
||||
root.toggleAllocations(false);
|
||||
|
||||
// When platform data isn't shown, hide the cateogry labels, since they're
|
||||
// only available for C++ frames.
|
||||
let contentOnly = !PerformanceController.getOption("show-platform-data");
|
||||
|
|
|
@ -89,6 +89,13 @@ let MemoryCallTreeView = Heritage.extend(DetailsSubview, {
|
|||
// Call trees should only auto-expand when not inverted. Passing undefined
|
||||
// will default to the CALL_TREE_AUTO_EXPAND depth.
|
||||
autoExpandDepth: options.inverted ? 0 : undefined,
|
||||
// Some cells like the time duration and cost percentage don't make sense
|
||||
// for a memory allocations call tree.
|
||||
visibleCells: {
|
||||
allocations: true,
|
||||
selfAllocations: true,
|
||||
function: true
|
||||
}
|
||||
});
|
||||
|
||||
// Bind events.
|
||||
|
|
|
@ -509,7 +509,6 @@ let ProfileView = {
|
|||
|
||||
let contentOnly = !Prefs.showPlatformData;
|
||||
callTreeRoot.toggleCategories(!contentOnly);
|
||||
callTreeRoot.toggleAllocations(false);
|
||||
|
||||
this._callTreeRootByPanel.set(panel, callTreeRoot);
|
||||
},
|
||||
|
|
|
@ -17,9 +17,20 @@ const MILLISECOND_UNITS = L10N.getStr("table.ms");
|
|||
const PERCENTAGE_UNITS = L10N.getStr("table.percentage");
|
||||
const URL_LABEL_TOOLTIP = L10N.getStr("table.url.tooltiptext");
|
||||
const ZOOM_BUTTON_TOOLTIP = L10N.getStr("table.zoom.tooltiptext");
|
||||
const CALL_TREE_AUTO_EXPAND = 3; // depth
|
||||
const CALL_TREE_INDENTATION = 16; // px
|
||||
|
||||
const DEFAULT_SORTING_PREDICATE = (a, b) => a.frame.samples < b.frame.samples ? 1 : -1;
|
||||
const DEFAULT_AUTO_EXPAND_DEPTH = 3; // depth
|
||||
const DEFAULT_VISIBLE_CELLS = {
|
||||
duration: true,
|
||||
percentage: true,
|
||||
allocations: false,
|
||||
selfDuration: true,
|
||||
selfPercentage: true,
|
||||
selfAllocations: false,
|
||||
samples: true,
|
||||
function: true
|
||||
};
|
||||
|
||||
const clamp = (val, min, max) => Math.max(min, Math.min(max, val));
|
||||
const sum = vals => vals.reduce((a, b) => a + b, 0);
|
||||
|
@ -55,23 +66,25 @@ exports.CallView = CallView;
|
|||
* top-down). Defaults to false.
|
||||
* @param function sortingPredicate [optional]
|
||||
* The predicate used to sort the tree items when created. Defaults to
|
||||
* the caller's sortingPredicate if a caller exists, otherwise defaults
|
||||
* the caller's `sortingPredicate` if a caller exists, otherwise defaults
|
||||
* to DEFAULT_SORTING_PREDICATE. The two passed arguments are FrameNodes.
|
||||
* @param number autoExpandDepth [optional]
|
||||
* The depth to which the tree should automatically expand. Defualts to
|
||||
* the caller's `autoExpandDepth` if a caller exists, otherwise defaults
|
||||
* to CALL_TREE_AUTO_EXPAND.
|
||||
* to DEFAULT_AUTO_EXPAND_DEPTH.
|
||||
* @param object visibleCells
|
||||
* An object specifying which cells are visible in the tree. Defaults to
|
||||
* the caller's `visibleCells` if a caller exists, otherwise defaults
|
||||
* to DEFAULT_VISIBLE_CELLS.
|
||||
*/
|
||||
function CallView({ caller, frame, level, hidden, inverted, sortingPredicate, autoExpandDepth }) {
|
||||
// Assume no indentation if this tree item's level is not specified.
|
||||
level = level || 0;
|
||||
|
||||
// Don't increase indentation if this tree item is hidden.
|
||||
if (hidden) {
|
||||
level--;
|
||||
}
|
||||
|
||||
AbstractTreeItem.call(this, { parent: caller, level });
|
||||
function CallView({
|
||||
caller, frame, level, hidden, inverted,
|
||||
sortingPredicate, autoExpandDepth, visibleCells
|
||||
}) {
|
||||
AbstractTreeItem.call(this, {
|
||||
parent: caller,
|
||||
level: level|0 - (hidden ? 1 : 0)
|
||||
});
|
||||
|
||||
this.sortingPredicate = sortingPredicate != null
|
||||
? sortingPredicate
|
||||
|
@ -81,7 +94,12 @@ function CallView({ caller, frame, level, hidden, inverted, sortingPredicate, au
|
|||
this.autoExpandDepth = autoExpandDepth != null
|
||||
? autoExpandDepth
|
||||
: caller ? caller.autoExpandDepth
|
||||
: CALL_TREE_AUTO_EXPAND;
|
||||
: DEFAULT_AUTO_EXPAND_DEPTH;
|
||||
|
||||
this.visibleCells = visibleCells != null
|
||||
? visibleCells
|
||||
: caller ? caller.visibleCells
|
||||
: Object.create(DEFAULT_VISIBLE_CELLS);
|
||||
|
||||
this.caller = caller;
|
||||
this.frame = frame;
|
||||
|
@ -110,35 +128,60 @@ CallView.prototype = Heritage.extend(AbstractTreeItem.prototype, {
|
|||
let totalAllocations;
|
||||
|
||||
if (!this._getChildCalls().length) {
|
||||
selfPercentage = framePercentage;
|
||||
selfDuration = this.frame.duration;
|
||||
totalAllocations = this.frame.allocations;
|
||||
if (this.visibleCells.selfPercentage) {
|
||||
selfPercentage = framePercentage;
|
||||
}
|
||||
if (this.visibleCells.selfDuration) {
|
||||
selfDuration = this.frame.duration;
|
||||
}
|
||||
if (this.visibleCells.allocations) {
|
||||
totalAllocations = this.frame.allocations;
|
||||
}
|
||||
} else {
|
||||
let childrenPercentage = sum(
|
||||
[this._getPercentage(c.samples) for (c of this._getChildCalls())]);
|
||||
let childrenDuration = sum(
|
||||
[c.duration for (c of this._getChildCalls())]);
|
||||
let childrenAllocations = sum(
|
||||
[c.allocations for (c of this._getChildCalls())]);
|
||||
|
||||
selfPercentage = clamp(framePercentage - childrenPercentage, 0, 100);
|
||||
selfDuration = this.frame.duration - childrenDuration;
|
||||
totalAllocations = this.frame.allocations + childrenAllocations;
|
||||
|
||||
// Avoid performing costly computations if the respective columns
|
||||
// won't be shown anyway.
|
||||
if (this.visibleCells.selfPercentage) {
|
||||
let childrenPercentage = sum([this._getPercentage(c.samples) for (c of this._getChildCalls())]);
|
||||
selfPercentage = clamp(framePercentage - childrenPercentage, 0, 100);
|
||||
}
|
||||
if (this.visibleCells.selfDuration) {
|
||||
let childrenDuration = sum([c.duration for (c of this._getChildCalls())]);
|
||||
selfDuration = this.frame.duration - childrenDuration;
|
||||
}
|
||||
if (this.visibleCells.allocations) {
|
||||
let childrenAllocations = sum([c.allocations for (c of this._getChildCalls())]);
|
||||
totalAllocations = this.frame.allocations + childrenAllocations;
|
||||
}
|
||||
if (this.inverted) {
|
||||
selfPercentage = framePercentage - selfPercentage;
|
||||
selfDuration = this.frame.duration - selfDuration;
|
||||
}
|
||||
}
|
||||
|
||||
let durationCell = this._createTimeCell(this.frame.duration);
|
||||
let selfDurationCell = this._createTimeCell(selfDuration, true);
|
||||
let percentageCell = this._createExecutionCell(framePercentage);
|
||||
let selfPercentageCell = this._createExecutionCell(selfPercentage, true);
|
||||
let allocationsCell = this._createAllocationsCell(totalAllocations);
|
||||
let selfAllocationsCell = this._createAllocationsCell(this.frame.allocations, true);
|
||||
let samplesCell = this._createSamplesCell(this.frame.samples);
|
||||
let functionCell = this._createFunctionCell(arrowNode, frameInfo, this.level);
|
||||
if (this.visibleCells.duration) {
|
||||
var durationCell = this._createTimeCell(this.frame.duration);
|
||||
}
|
||||
if (this.visibleCells.selfDuration) {
|
||||
var selfDurationCell = this._createTimeCell(selfDuration, true);
|
||||
}
|
||||
if (this.visibleCells.percentage) {
|
||||
var percentageCell = this._createExecutionCell(framePercentage);
|
||||
}
|
||||
if (this.visibleCells.selfPercentage) {
|
||||
var selfPercentageCell = this._createExecutionCell(selfPercentage, true);
|
||||
}
|
||||
if (this.visibleCells.allocations) {
|
||||
var allocationsCell = this._createAllocationsCell(totalAllocations);
|
||||
}
|
||||
if (this.visibleCells.selfAllocations) {
|
||||
var selfAllocationsCell = this._createAllocationsCell(this.frame.allocations, true);
|
||||
}
|
||||
if (this.visibleCells.samples) {
|
||||
var samplesCell = this._createSamplesCell(this.frame.samples);
|
||||
}
|
||||
if (this.visibleCells.function) {
|
||||
var functionCell = this._createFunctionCell(arrowNode, frameInfo, this.level);
|
||||
}
|
||||
|
||||
let targetNode = document.createElement("hbox");
|
||||
targetNode.className = "call-tree-item";
|
||||
|
@ -155,14 +198,30 @@ CallView.prototype = Heritage.extend(AbstractTreeItem.prototype, {
|
|||
functionCell.querySelector(".call-tree-category").hidden = true;
|
||||
}
|
||||
|
||||
targetNode.appendChild(durationCell);
|
||||
targetNode.appendChild(percentageCell);
|
||||
targetNode.appendChild(allocationsCell);
|
||||
targetNode.appendChild(selfDurationCell);
|
||||
targetNode.appendChild(selfPercentageCell);
|
||||
targetNode.appendChild(selfAllocationsCell);
|
||||
targetNode.appendChild(samplesCell);
|
||||
targetNode.appendChild(functionCell);
|
||||
if (this.visibleCells.duration) {
|
||||
targetNode.appendChild(durationCell);
|
||||
}
|
||||
if (this.visibleCells.percentage) {
|
||||
targetNode.appendChild(percentageCell);
|
||||
}
|
||||
if (this.visibleCells.allocations) {
|
||||
targetNode.appendChild(allocationsCell);
|
||||
}
|
||||
if (this.visibleCells.selfDuration) {
|
||||
targetNode.appendChild(selfDurationCell);
|
||||
}
|
||||
if (this.visibleCells.selfPercentage) {
|
||||
targetNode.appendChild(selfPercentageCell);
|
||||
}
|
||||
if (this.visibleCells.selfAllocations) {
|
||||
targetNode.appendChild(selfAllocationsCell);
|
||||
}
|
||||
if (this.visibleCells.samples) {
|
||||
targetNode.appendChild(samplesCell);
|
||||
}
|
||||
if (this.visibleCells.function) {
|
||||
targetNode.appendChild(functionCell);
|
||||
}
|
||||
|
||||
return targetNode;
|
||||
},
|
||||
|
@ -301,18 +360,6 @@ CallView.prototype = Heritage.extend(AbstractTreeItem.prototype, {
|
|||
return cell;
|
||||
},
|
||||
|
||||
/**
|
||||
* Toggles the allocations information hidden or visible.
|
||||
* @param boolean visible
|
||||
*/
|
||||
toggleAllocations: function(visible) {
|
||||
if (!visible) {
|
||||
this.container.setAttribute("allocations-hidden", "");
|
||||
} else {
|
||||
this.container.removeAttribute("allocations-hidden");
|
||||
}
|
||||
},
|
||||
|
||||
/**
|
||||
* Toggles the category information hidden or visible.
|
||||
* @param boolean visible
|
||||
|
|
|
@ -14,6 +14,7 @@ Cu.import("resource://gre/modules/Services.jsm");
|
|||
Cu.import("resource://gre/modules/Task.jsm");
|
||||
|
||||
XPCOMUtils.defineLazyModuleGetter(this, "ReaderMode", "resource://gre/modules/ReaderMode.jsm");
|
||||
XPCOMUtils.defineLazyModuleGetter(this, "ReadingList", "resource:///modules/readinglist/ReadingList.jsm");
|
||||
|
||||
const gStringBundle = Services.strings.createBundle("chrome://global/locale/aboutReader.properties");
|
||||
|
||||
|
@ -42,7 +43,7 @@ let ReaderParent = {
|
|||
receiveMessage: function(message) {
|
||||
switch (message.name) {
|
||||
case "Reader:AddToList":
|
||||
// XXX: To implement.
|
||||
ReadingList.addItem(message.data.article);
|
||||
break;
|
||||
|
||||
case "Reader:ArticleGet":
|
||||
|
@ -59,11 +60,21 @@ let ReaderParent = {
|
|||
break;
|
||||
}
|
||||
case "Reader:ListStatusRequest":
|
||||
// XXX: To implement.
|
||||
ReadingList.count(message.data).then(count => {
|
||||
let mm = message.target.messageManager
|
||||
// Make sure the target browser is still alive before trying to send data back.
|
||||
if (mm) {
|
||||
mm.sendAsyncMessage("Reader:ListStatusData",
|
||||
{ inReadingList: !!count, url: message.data.url });
|
||||
}
|
||||
});
|
||||
break;
|
||||
|
||||
case "Reader:RemoveFromList":
|
||||
// XXX: To implement.
|
||||
// We need to get the "real" item to delete it.
|
||||
ReadingList.getItemForURL(message.data.url).then(item => {
|
||||
ReadingList.deleteItem(item)
|
||||
});
|
||||
break;
|
||||
|
||||
case "Reader:Share":
|
||||
|
|
|
@ -124,8 +124,6 @@
|
|||
overflow: auto;
|
||||
}
|
||||
|
||||
.call-tree-cells-container[allocations-hidden] .call-tree-cell[type="allocations"],
|
||||
.call-tree-cells-container[allocations-hidden] .call-tree-cell[type="self-allocations"],
|
||||
.call-tree-cells-container[categories-hidden] .call-tree-category {
|
||||
display: none;
|
||||
}
|
||||
|
|
|
@ -220,8 +220,6 @@
|
|||
overflow: auto;
|
||||
}
|
||||
|
||||
.call-tree-cells-container[allocations-hidden] .call-tree-cell[type="allocations"],
|
||||
.call-tree-cells-container[allocations-hidden] .call-tree-cell[type="self-allocations"],
|
||||
.call-tree-cells-container[categories-hidden] .call-tree-category {
|
||||
display: none;
|
||||
}
|
||||
|
|
|
@ -1161,7 +1161,27 @@ sync_java_files = [
|
|||
'tokenserver/TokenServerToken.java',
|
||||
]
|
||||
reading_list_service_java_files = [
|
||||
'reading/ClientMetadata.java',
|
||||
'reading/ClientReadingListRecord.java',
|
||||
'reading/FetchSpec.java',
|
||||
'reading/LocalReadingListStorage.java',
|
||||
'reading/ReadingListChangeAccumulator.java',
|
||||
'reading/ReadingListClient.java',
|
||||
'reading/ReadingListClientContentValuesFactory.java',
|
||||
'reading/ReadingListClientRecordFactory.java',
|
||||
'reading/ReadingListConstants.java',
|
||||
'reading/ReadingListDeleteDelegate.java',
|
||||
'reading/ReadingListRecord.java',
|
||||
'reading/ReadingListRecordDelegate.java',
|
||||
'reading/ReadingListRecordResponse.java',
|
||||
'reading/ReadingListRecordUploadDelegate.java',
|
||||
'reading/ReadingListResponse.java',
|
||||
'reading/ReadingListStorage.java',
|
||||
'reading/ReadingListStorageResponse.java',
|
||||
'reading/ReadingListSyncAdapter.java',
|
||||
'reading/ReadingListSynchronizer.java',
|
||||
'reading/ReadingListSynchronizerDelegate.java',
|
||||
'reading/ReadingListSyncService.java',
|
||||
'reading/ReadingListWipeDelegate.java',
|
||||
'reading/ServerReadingListRecord.java',
|
||||
]
|
||||
|
|
|
@ -540,6 +540,7 @@ public class AndroidFxAccount {
|
|||
|
||||
try {
|
||||
StateLabel stateLabel = StateLabel.valueOf(stateLabelString);
|
||||
Logger.debug(LOG_TAG, "Account is in state " + stateLabel);
|
||||
return StateFactory.fromJSONObject(stateLabel, new ExtendedJSONObject(stateString));
|
||||
} catch (Exception e) {
|
||||
throw new IllegalStateException("could not get state", e);
|
||||
|
|
|
@ -0,0 +1,19 @@
|
|||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
|
||||
|
||||
package org.mozilla.gecko.reading;
|
||||
|
||||
public class ClientMetadata {
|
||||
public final long id; // A client numeric ID. We don't always have a GUID.
|
||||
public final long lastModified; // A client timestamp.
|
||||
public final boolean isDeleted;
|
||||
public final boolean isArchived;
|
||||
|
||||
public ClientMetadata(final long id, final long lastModified, final boolean isDeleted, final boolean isArchived) {
|
||||
this.id = id;
|
||||
this.lastModified = lastModified;
|
||||
this.isDeleted = isDeleted;
|
||||
this.isArchived = isArchived;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,79 @@
|
|||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
|
||||
|
||||
package org.mozilla.gecko.reading;
|
||||
|
||||
import org.mozilla.gecko.sync.ExtendedJSONObject;
|
||||
|
||||
public class ClientReadingListRecord extends ReadingListRecord {
|
||||
final ExtendedJSONObject fields;
|
||||
public ClientMetadata clientMetadata;
|
||||
|
||||
private String getDefaultAddedBy() {
|
||||
return "Test Device"; // TODO
|
||||
}
|
||||
|
||||
/**
|
||||
* Provided `fields` are *not copied*.
|
||||
*/
|
||||
public ClientReadingListRecord(final ServerMetadata serverMetadata, final ClientMetadata clientMetadata, final ExtendedJSONObject fields) {
|
||||
super(serverMetadata);
|
||||
this.clientMetadata = clientMetadata == null ? new ClientMetadata(-1L, -1L, false, false) : clientMetadata;
|
||||
this.fields = fields;
|
||||
}
|
||||
|
||||
public ClientReadingListRecord(String url, String title, String addedBy) {
|
||||
this(url, title, addedBy, System.currentTimeMillis(), false, false);
|
||||
}
|
||||
|
||||
public ClientReadingListRecord(String url, String title, String addedBy, long lastModified, boolean isDeleted, boolean isArchived) {
|
||||
super(null);
|
||||
|
||||
// Required.
|
||||
if (url == null) {
|
||||
throw new IllegalArgumentException("url must be provided.");
|
||||
}
|
||||
|
||||
final ExtendedJSONObject f = new ExtendedJSONObject();
|
||||
f.put("url", url);
|
||||
f.put("title", title == null ? "" : title);
|
||||
f.put("added_by", addedBy == null ? getDefaultAddedBy() : addedBy);
|
||||
|
||||
this.fields = f;
|
||||
this.clientMetadata = new ClientMetadata(-1L, lastModified, isDeleted, isArchived);
|
||||
}
|
||||
|
||||
public ExtendedJSONObject toJSON() {
|
||||
final ExtendedJSONObject object = this.fields.deepCopy();
|
||||
final String guid = getGUID();
|
||||
|
||||
if (guid != null) {
|
||||
object.put("id", guid);
|
||||
}
|
||||
return object;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getAddedBy() {
|
||||
return this.fields.getString("added_by");
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getURL() {
|
||||
return this.fields.getString("url"); // TODO: resolved_url
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getTitle() {
|
||||
return this.fields.getString("title"); // TODO: resolved_title
|
||||
}
|
||||
|
||||
/**
|
||||
* Produce a record just like the server record, but with the
|
||||
* appropriate additional metadata, such as the local numeric ID.
|
||||
*/
|
||||
public ClientReadingListRecord givenServerRecord(ServerReadingListRecord down) {
|
||||
return new ClientReadingListRecord(down.serverMetadata, this.clientMetadata, down.fields);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,99 @@
|
|||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
|
||||
|
||||
package org.mozilla.gecko.reading;
|
||||
|
||||
import java.net.URI;
|
||||
import java.net.URISyntaxException;
|
||||
|
||||
import ch.boye.httpclientandroidlib.client.utils.URIBuilder;
|
||||
|
||||
/**
|
||||
* Defines the parameters that can be added to a reading list fetch URI.
|
||||
*/
|
||||
public class FetchSpec {
|
||||
private final String queryString;
|
||||
|
||||
private FetchSpec(final String q) {
|
||||
this.queryString = q;
|
||||
}
|
||||
|
||||
public URI getURI(final URI serviceURI) throws URISyntaxException {
|
||||
return new URIBuilder(serviceURI).setCustomQuery(queryString).build();
|
||||
}
|
||||
|
||||
public URI getURI(final URI serviceURI, final String path) throws URISyntaxException {
|
||||
final String currentPath = serviceURI.getPath();
|
||||
final String newPath = (currentPath == null ? "" : currentPath) + path;
|
||||
return new URIBuilder(serviceURI).setPath(newPath)
|
||||
.setCustomQuery(queryString)
|
||||
.build();
|
||||
}
|
||||
|
||||
public static class Builder {
|
||||
final StringBuilder b = new StringBuilder();
|
||||
boolean first = true;
|
||||
|
||||
public FetchSpec build() {
|
||||
return new FetchSpec(b.toString());
|
||||
}
|
||||
|
||||
private void ampersand() {
|
||||
if (first) {
|
||||
first = false;
|
||||
return;
|
||||
}
|
||||
b.append('&');
|
||||
}
|
||||
|
||||
public Builder setUnread(boolean unread) {
|
||||
ampersand();
|
||||
b.append("unread=");
|
||||
b.append(unread);
|
||||
return this;
|
||||
}
|
||||
|
||||
private void qualifyAttribute(String qual, String attr) {
|
||||
ampersand();
|
||||
b.append(qual);
|
||||
b.append(attr);
|
||||
b.append('=');
|
||||
}
|
||||
|
||||
public Builder setMinAttribute(String attr, int val) {
|
||||
qualifyAttribute("min_", attr);
|
||||
b.append(val);
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder setMaxAttribute(String attr, int val) {
|
||||
qualifyAttribute("max_", attr);
|
||||
b.append(val);
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder setNotAttribute(String attr, String val) {
|
||||
qualifyAttribute("not_", attr);
|
||||
b.append(val);
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder setSince(long since) {
|
||||
if (since == -1L) {
|
||||
return this;
|
||||
}
|
||||
|
||||
ampersand();
|
||||
b.append("_since=");
|
||||
b.append(since);
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder setExcludeDeleted() {
|
||||
ampersand();
|
||||
b.append("not_deleted=true");
|
||||
return this;
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,410 @@
|
|||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
|
||||
|
||||
package org.mozilla.gecko.reading;
|
||||
|
||||
import static org.mozilla.gecko.db.BrowserContract.ReadingListItems.SYNC_CHANGE_FAVORITE_CHANGED;
|
||||
import static org.mozilla.gecko.db.BrowserContract.ReadingListItems.SYNC_CHANGE_FLAGS;
|
||||
import static org.mozilla.gecko.db.BrowserContract.ReadingListItems.SYNC_CHANGE_UNREAD_CHANGED;
|
||||
import static org.mozilla.gecko.db.BrowserContract.ReadingListItems.SYNC_STATUS;
|
||||
import static org.mozilla.gecko.db.BrowserContract.ReadingListItems.SYNC_STATUS_MODIFIED;
|
||||
import static org.mozilla.gecko.db.BrowserContract.ReadingListItems.SYNC_STATUS_NEW;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Queue;
|
||||
import java.util.concurrent.ConcurrentLinkedQueue;
|
||||
|
||||
import org.mozilla.gecko.background.common.log.Logger;
|
||||
import org.mozilla.gecko.db.BrowserContract;
|
||||
import org.mozilla.gecko.db.BrowserContract.ReadingListItems;
|
||||
import org.mozilla.gecko.sync.repositories.android.RepoUtils;
|
||||
|
||||
import android.content.ContentProviderClient;
|
||||
import android.content.ContentProviderOperation;
|
||||
import android.content.ContentProviderResult;
|
||||
import android.content.ContentValues;
|
||||
import android.content.OperationApplicationException;
|
||||
import android.database.Cursor;
|
||||
import android.net.Uri;
|
||||
import android.os.RemoteException;
|
||||
|
||||
public class LocalReadingListStorage implements ReadingListStorage {
|
||||
|
||||
private static final String WHERE_STATUS_NEW = "(" + SYNC_STATUS + " = " + SYNC_STATUS_NEW + ")";
|
||||
|
||||
final class LocalReadingListChangeAccumulator implements ReadingListChangeAccumulator {
|
||||
private static final String LOG_TAG = "RLChanges";
|
||||
|
||||
/**
|
||||
* These are changes that result from uploading new or changed records to the server.
|
||||
* They always correspond to local records.
|
||||
*/
|
||||
private final Queue<ClientReadingListRecord> changes;
|
||||
|
||||
/**
|
||||
* These are deletions that result from uploading new or changed records to the server.
|
||||
* They should always correspond to local records.
|
||||
* These are not common: they should only occur if a conflict occurs.
|
||||
*/
|
||||
private final Queue<ClientReadingListRecord> deletions;
|
||||
|
||||
/**
|
||||
* These are additions or changes fetched from the server.
|
||||
* At the point of collection we don't know if they're records
|
||||
* that exist locally.
|
||||
*
|
||||
* Batching these here, rather than in the client or the synchronizer,
|
||||
* puts the storage implementation in control of when batches are flushed,
|
||||
* or if batches are used at all.
|
||||
*/
|
||||
private final Queue<ServerReadingListRecord> additionsOrChanges;
|
||||
|
||||
LocalReadingListChangeAccumulator() {
|
||||
this.changes = new ConcurrentLinkedQueue<>();
|
||||
this.deletions = new ConcurrentLinkedQueue<>();
|
||||
this.additionsOrChanges = new ConcurrentLinkedQueue<>();
|
||||
}
|
||||
|
||||
public boolean flushDeletions() throws RemoteException {
|
||||
if (deletions.isEmpty()) {
|
||||
return true;
|
||||
}
|
||||
|
||||
long[] ids = new long[deletions.size()];
|
||||
String[] guids = new String[deletions.size()];
|
||||
int iID = 0;
|
||||
int iGUID = 0;
|
||||
for (ClientReadingListRecord record : deletions) {
|
||||
if (record.clientMetadata.id > -1L) {
|
||||
ids[iID++] = record.clientMetadata.id;
|
||||
} else {
|
||||
final String guid = record.getGUID();
|
||||
if (guid == null) {
|
||||
continue;
|
||||
}
|
||||
guids[iGUID++] = guid;
|
||||
}
|
||||
}
|
||||
|
||||
if (iID > 0) {
|
||||
client.delete(URI_WITH_DELETED, RepoUtils.computeSQLLongInClause(ids, ReadingListItems._ID), null);
|
||||
}
|
||||
|
||||
if (iGUID > 0) {
|
||||
client.delete(URI_WITH_DELETED, RepoUtils.computeSQLInClause(iGUID, ReadingListItems.GUID), guids);
|
||||
}
|
||||
|
||||
deletions.clear();
|
||||
return true;
|
||||
}
|
||||
|
||||
public boolean flushRecordChanges() throws RemoteException {
|
||||
if (changes.isEmpty() && additionsOrChanges.isEmpty()) {
|
||||
return true;
|
||||
}
|
||||
|
||||
// For each returned record, apply it to the local store and clear all sync flags.
|
||||
// We can do this because the server always returns the entire record.
|
||||
//
|
||||
// <https://github.com/mozilla-services/readinglist/issues/138> tracks not doing so
|
||||
// for certain patches, which allows us to optimize here.
|
||||
ArrayList<ContentProviderOperation> operations = new ArrayList<>(changes.size() + additionsOrChanges.size());
|
||||
for (ClientReadingListRecord rec : changes) {
|
||||
operations.add(makeUpdateOp(rec));
|
||||
}
|
||||
|
||||
for (ServerReadingListRecord rec : additionsOrChanges) {
|
||||
// TODO: skip records for which the local copy of the server timestamp
|
||||
// matches the timestamp in the incoming record.
|
||||
// TODO: we can do this by maintaining a lookup table, rather
|
||||
// than hitting the DB. When we do an insert after an upload, say, we
|
||||
// can make a note of it so the next download flush doesn't apply it twice.
|
||||
operations.add(makeUpdateOrInsertOp(rec));
|
||||
}
|
||||
|
||||
// TODO: tell delegate of success or failure.
|
||||
try {
|
||||
Logger.debug(LOG_TAG, "Applying " + operations.size() + " operations.");
|
||||
ContentProviderResult[] results = client.applyBatch(operations);
|
||||
} catch (OperationApplicationException e) {
|
||||
// Oops.
|
||||
Logger.warn(LOG_TAG, "Applying operations failed.", e);
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
private ContentProviderOperation makeUpdateOrInsertOp(ServerReadingListRecord rec) throws RemoteException {
|
||||
final ClientReadingListRecord clientRec = new ClientReadingListRecord(rec.serverMetadata, null, rec.fields);
|
||||
|
||||
// TODO: use UPDATE OR INSERT equivalent, rather than querying here.
|
||||
if (hasGUID(rec.serverMetadata.guid)) {
|
||||
return makeUpdateOp(clientRec);
|
||||
}
|
||||
|
||||
final ContentValues values = ReadingListClientContentValuesFactory.fromClientRecord(clientRec);
|
||||
return ContentProviderOperation.newInsert(URI_WITHOUT_DELETED)
|
||||
.withValues(values)
|
||||
.build();
|
||||
}
|
||||
|
||||
private ContentProviderOperation makeUpdateOp(ClientReadingListRecord rec) {
|
||||
// We can't use SQLiteQueryBuilder, because it can't do UPDATE,
|
||||
// nor can it give us a WHERE clause.
|
||||
final StringBuilder selection = new StringBuilder();
|
||||
final String[] selectionArgs;
|
||||
|
||||
// We don't apply changes that we've already applied.
|
||||
// We know they've already been applied because our local copy of the
|
||||
// server's version code/timestamp matches the value in the incoming record.
|
||||
long serverLastModified = rec.getServerLastModified();
|
||||
if (serverLastModified != -1L) {
|
||||
// This should always be the case here.
|
||||
selection.append("(" + ReadingListItems.SERVER_LAST_MODIFIED + " IS NOT ");
|
||||
selection.append(serverLastModified);
|
||||
selection.append(") AND ");
|
||||
}
|
||||
|
||||
if (rec.clientMetadata.id > -1L) {
|
||||
selection.append("(");
|
||||
selection.append(ReadingListItems._ID + " = ");
|
||||
selection.append(rec.clientMetadata.id);
|
||||
selection.append(")");
|
||||
selectionArgs = null;
|
||||
} else if (rec.serverMetadata.guid != null) {
|
||||
selection.append("(" + ReadingListItems.GUID + " = ?)");
|
||||
selectionArgs = new String[] { rec.serverMetadata.guid };
|
||||
} else {
|
||||
final String url = rec.fields.getString("url");
|
||||
final String resolvedURL = rec.fields.getString("resolved_url");
|
||||
|
||||
if (url == null && resolvedURL == null) {
|
||||
// We're outta luck.
|
||||
return null;
|
||||
}
|
||||
|
||||
selection.append("((" + ReadingListItems.URL + " = ?) OR (" + ReadingListItems.RESOLVED_URL + " = ?))");
|
||||
if (url != null && resolvedURL != null) {
|
||||
selectionArgs = new String[] { url, resolvedURL };
|
||||
} else {
|
||||
final String arg = url == null ? resolvedURL : url;
|
||||
selectionArgs = new String[] { arg, arg };
|
||||
}
|
||||
}
|
||||
|
||||
final ContentValues values = ReadingListClientContentValuesFactory.fromClientRecord(rec);
|
||||
return ContentProviderOperation.newUpdate(URI_WITHOUT_DELETED)
|
||||
.withSelection(selection.toString(), selectionArgs)
|
||||
.withValues(values)
|
||||
.build();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void finish() throws Exception {
|
||||
flushDeletions();
|
||||
flushRecordChanges();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void addDeletion(ClientReadingListRecord record) {
|
||||
deletions.add(record);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void addChangedRecord(ClientReadingListRecord record) {
|
||||
changes.add(record);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void addUploadedRecord(ClientReadingListRecord up,
|
||||
ServerReadingListRecord down) {
|
||||
// TODO
|
||||
}
|
||||
|
||||
@Override
|
||||
public void addDownloadedRecord(ServerReadingListRecord down) {
|
||||
additionsOrChanges.add(down);
|
||||
}
|
||||
}
|
||||
|
||||
private final ContentProviderClient client;
|
||||
private final Uri URI_WITHOUT_DELETED = BrowserContract.READING_LIST_AUTHORITY_URI
|
||||
.buildUpon()
|
||||
.appendPath("items")
|
||||
.appendQueryParameter(BrowserContract.PARAM_IS_SYNC, "1")
|
||||
.appendQueryParameter(BrowserContract.PARAM_SHOW_DELETED, "0")
|
||||
.build();
|
||||
|
||||
private final Uri URI_WITH_DELETED = BrowserContract.READING_LIST_AUTHORITY_URI
|
||||
.buildUpon()
|
||||
.appendPath("items")
|
||||
.appendQueryParameter(BrowserContract.PARAM_IS_SYNC, "1")
|
||||
.appendQueryParameter(BrowserContract.PARAM_SHOW_DELETED, "1")
|
||||
.build();
|
||||
|
||||
public LocalReadingListStorage(final ContentProviderClient client) {
|
||||
this.client = client;
|
||||
}
|
||||
|
||||
public boolean hasGUID(String guid) throws RemoteException {
|
||||
final String[] projection = new String[] { ReadingListItems.GUID };
|
||||
final String selection = ReadingListItems.GUID + " = ?";
|
||||
final String[] selectionArgs = new String[] { guid };
|
||||
final Cursor cursor = this.client.query(URI_WITHOUT_DELETED, projection, selection, selectionArgs, null);
|
||||
try {
|
||||
return cursor.moveToFirst();
|
||||
} finally {
|
||||
cursor.close();
|
||||
}
|
||||
}
|
||||
|
||||
public Cursor getModifiedWithSelection(final String selection) {
|
||||
final String[] projection = new String[] {
|
||||
ReadingListItems.GUID,
|
||||
ReadingListItems.IS_FAVORITE,
|
||||
ReadingListItems.RESOLVED_TITLE,
|
||||
ReadingListItems.RESOLVED_URL,
|
||||
ReadingListItems.EXCERPT,
|
||||
};
|
||||
|
||||
|
||||
try {
|
||||
return client.query(URI_WITHOUT_DELETED, projection, selection, null, null);
|
||||
} catch (RemoteException e) {
|
||||
throw new IllegalStateException(e);
|
||||
}
|
||||
}
|
||||
@Override
|
||||
public Cursor getModified() {
|
||||
final String selection = ReadingListItems.SYNC_STATUS + " = " + ReadingListItems.SYNC_STATUS_MODIFIED;
|
||||
return getModifiedWithSelection(selection);
|
||||
}
|
||||
|
||||
// Return changed items that aren't just status changes.
|
||||
// This isn't necessary because we insist on processing status changes before modified items.
|
||||
// Currently we only need this for tests...
|
||||
public Cursor getNonStatusModified() {
|
||||
final String selection = ReadingListItems.SYNC_STATUS + " = " + ReadingListItems.SYNC_STATUS_MODIFIED +
|
||||
" AND ((" + ReadingListItems.SYNC_CHANGE_FLAGS + " & " + ReadingListItems.SYNC_CHANGE_RESOLVED + ") > 0)";
|
||||
|
||||
return getModifiedWithSelection(selection);
|
||||
}
|
||||
|
||||
// These will never conflict (in the case of unread status changes), or
|
||||
// we don't care if they overwrite the server value (in the case of favorite changes).
|
||||
// N.B., don't actually send each field if the appropriate change flag isn't set!
|
||||
@Override
|
||||
public Cursor getStatusChanges() {
|
||||
final String[] projection = new String[] {
|
||||
ReadingListItems.GUID,
|
||||
ReadingListItems.IS_FAVORITE,
|
||||
ReadingListItems.IS_UNREAD,
|
||||
ReadingListItems.MARKED_READ_BY,
|
||||
ReadingListItems.MARKED_READ_ON,
|
||||
ReadingListItems.SYNC_CHANGE_FLAGS,
|
||||
};
|
||||
|
||||
final String selection =
|
||||
SYNC_STATUS + " = " + SYNC_STATUS_MODIFIED + " AND " +
|
||||
"((" + SYNC_CHANGE_FLAGS + " & (" + SYNC_CHANGE_UNREAD_CHANGED + " | " + SYNC_CHANGE_FAVORITE_CHANGED + ")) > 0)";
|
||||
|
||||
try {
|
||||
return client.query(URI_WITHOUT_DELETED, projection, selection, null, null);
|
||||
} catch (RemoteException e) {
|
||||
throw new IllegalStateException(e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public Cursor getNew() {
|
||||
// N.B., query for items that have no GUID, regardless of status.
|
||||
// They should all be marked as NEW, but belt and braces.
|
||||
final String selection = WHERE_STATUS_NEW + " OR (" + ReadingListItems.GUID + " IS NULL)";
|
||||
|
||||
try {
|
||||
return client.query(URI_WITHOUT_DELETED, null, selection, null, null);
|
||||
} catch (RemoteException e) {
|
||||
throw new IllegalStateException(e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public Cursor getAll() {
|
||||
try {
|
||||
return client.query(URI_WITHOUT_DELETED, null, null, null, null);
|
||||
} catch (RemoteException e) {
|
||||
throw new IllegalStateException(e);
|
||||
}
|
||||
}
|
||||
|
||||
private ContentProviderOperation updateAddedByNames(final String local) {
|
||||
String[] selectionArgs = new String[] {"$local"};
|
||||
String selection = WHERE_STATUS_NEW + " AND (" + ReadingListItems.ADDED_BY + " = ?)";
|
||||
return ContentProviderOperation.newUpdate(URI_WITHOUT_DELETED)
|
||||
.withValue(ReadingListItems.ADDED_BY, local)
|
||||
.withSelection(selection, selectionArgs)
|
||||
.build();
|
||||
}
|
||||
|
||||
private ContentProviderOperation updateMarkedReadByNames(final String local) {
|
||||
String[] selectionArgs = new String[] {"$local"};
|
||||
String selection = ReadingListItems.MARKED_READ_BY + " = ?";
|
||||
return ContentProviderOperation.newUpdate(URI_WITHOUT_DELETED)
|
||||
.withValue(ReadingListItems.MARKED_READ_BY, local)
|
||||
.withSelection(selection, selectionArgs)
|
||||
.build();
|
||||
}
|
||||
|
||||
/**
|
||||
* Consumers of the reading list provider don't know the device name.
|
||||
* Rather than smearing that logic into callers, or requiring the database
|
||||
* to be able to figure out the name of the device, we have the SyncAdapter
|
||||
* do it.
|
||||
*
|
||||
* After all, the SyncAdapter knows everything -- prefs, channels, profiles,
|
||||
* Firefox Account details, etc.
|
||||
*
|
||||
* To allow this, the CP writes the magic string "$local" wherever a device
|
||||
* name is needed. Here in storage, we run a quick UPDATE pass prior to
|
||||
* synchronizing, so the device name is 'calcified' at the time of the first
|
||||
* sync of that record. The SyncAdapter calls this prior to invoking the
|
||||
* synchronizer.
|
||||
*/
|
||||
public void updateLocalNames(final String local) {
|
||||
ArrayList<ContentProviderOperation> ops = new ArrayList<ContentProviderOperation>(2);
|
||||
ops.add(updateAddedByNames(local));
|
||||
ops.add(updateMarkedReadByNames(local));
|
||||
|
||||
try {
|
||||
client.applyBatch(ops);
|
||||
} catch (RemoteException e) {
|
||||
return;
|
||||
} catch (OperationApplicationException e) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public ReadingListChangeAccumulator getChangeAccumulator() {
|
||||
return new LocalReadingListChangeAccumulator();
|
||||
}
|
||||
|
||||
/**
|
||||
* Unused: we implicitly do this when we apply the server record.
|
||||
*/
|
||||
/*
|
||||
public void markStatusChangedItemsAsSynced(Collection<String> uploaded) {
|
||||
ContentValues values = new ContentValues();
|
||||
values.put(ReadingListItems.SYNC_CHANGE_FLAGS, ReadingListItems.SYNC_CHANGE_NONE);
|
||||
values.put(ReadingListItems.SYNC_STATUS, ReadingListItems.SYNC_STATUS_SYNCED);
|
||||
final String where = RepoUtils.computeSQLInClause(uploaded.size(), ReadingListItems.GUID);
|
||||
final String[] args = uploaded.toArray(new String[uploaded.size()]);
|
||||
try {
|
||||
client.update(URI_WITHOUT_DELETED, values, where, args);
|
||||
} catch (RemoteException e) {
|
||||
// Nothing we can do.
|
||||
}
|
||||
}
|
||||
*/
|
||||
}
|
|
@ -0,0 +1,19 @@
|
|||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
|
||||
|
||||
package org.mozilla.gecko.reading;
|
||||
|
||||
|
||||
/**
|
||||
* Grab one of these, then you can add records to it by parsing
|
||||
* server responses. Finishing it will flush those changes (e.g.,
|
||||
* via UPDATE) to the DB.
|
||||
*/
|
||||
public interface ReadingListChangeAccumulator {
|
||||
void addDeletion(ClientReadingListRecord record);
|
||||
void addChangedRecord(ClientReadingListRecord record);
|
||||
void addUploadedRecord(ClientReadingListRecord up, ServerReadingListRecord down);
|
||||
void addDownloadedRecord(ServerReadingListRecord down);
|
||||
void finish() throws Exception;
|
||||
}
|
|
@ -0,0 +1,607 @@
|
|||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
|
||||
|
||||
package org.mozilla.gecko.reading;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.net.URI;
|
||||
import java.net.URISyntaxException;
|
||||
import java.security.GeneralSecurityException;
|
||||
import java.util.Queue;
|
||||
import java.util.concurrent.Executor;
|
||||
|
||||
import org.mozilla.gecko.background.common.log.Logger;
|
||||
import org.mozilla.gecko.reading.ReadingListResponse.ResponseFactory;
|
||||
import org.mozilla.gecko.sync.ExtendedJSONObject;
|
||||
import org.mozilla.gecko.sync.net.AuthHeaderProvider;
|
||||
import org.mozilla.gecko.sync.net.BaseResource;
|
||||
import org.mozilla.gecko.sync.net.BaseResourceDelegate;
|
||||
import org.mozilla.gecko.sync.net.BasicAuthHeaderProvider;
|
||||
import org.mozilla.gecko.sync.net.MozResponse;
|
||||
import org.mozilla.gecko.sync.net.Resource;
|
||||
|
||||
import ch.boye.httpclientandroidlib.HttpResponse;
|
||||
import ch.boye.httpclientandroidlib.client.ClientProtocolException;
|
||||
import ch.boye.httpclientandroidlib.client.methods.HttpRequestBase;
|
||||
import ch.boye.httpclientandroidlib.impl.client.DefaultHttpClient;
|
||||
|
||||
/**
|
||||
* This client exposes an API for the reading list service, documented at
|
||||
* https://github.com/mozilla-services/readinglist/
|
||||
*/
|
||||
public class ReadingListClient {
|
||||
static final String LOG_TAG = ReadingListClient.class.getSimpleName();
|
||||
private final AuthHeaderProvider auth;
|
||||
|
||||
private final URI articlesURI; // .../articles
|
||||
private final URI articlesBaseURI; // .../articles/
|
||||
|
||||
/**
|
||||
* Use a {@link BasicAuthHeaderProvider} for testing, and an FxA OAuth provider for the real service.
|
||||
*/
|
||||
public ReadingListClient(final URI serviceURI, final AuthHeaderProvider auth) {
|
||||
this.articlesURI = serviceURI.resolve("articles");
|
||||
this.articlesBaseURI = serviceURI.resolve("articles/");
|
||||
this.auth = auth;
|
||||
}
|
||||
|
||||
private BaseResource getRelativeArticleResource(final String rel) {
|
||||
return new BaseResource(this.articlesBaseURI.resolve(rel));
|
||||
}
|
||||
|
||||
private static final class DelegatingUploadResourceDelegate extends UploadResourceDelegate<ReadingListRecordResponse> {
|
||||
private final ClientReadingListRecord up;
|
||||
private final ReadingListRecordUploadDelegate uploadDelegate;
|
||||
|
||||
DelegatingUploadResourceDelegate(Resource resource,
|
||||
AuthHeaderProvider auth,
|
||||
ResponseFactory<ReadingListRecordResponse> factory,
|
||||
ClientReadingListRecord up,
|
||||
ReadingListRecordUploadDelegate uploadDelegate) {
|
||||
super(resource, auth, factory);
|
||||
this.up = up;
|
||||
this.uploadDelegate = uploadDelegate;
|
||||
}
|
||||
|
||||
@Override
|
||||
void onFailure(MozResponse response) {
|
||||
Logger.warn(LOG_TAG, "Upload got failure response " + response.httpResponse().getStatusLine());
|
||||
response.logResponseBody(LOG_TAG);
|
||||
if (response.getStatusCode() == 400) {
|
||||
// Error response.
|
||||
uploadDelegate.onBadRequest(up, response);
|
||||
} else {
|
||||
uploadDelegate.onFailure(up, response);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
void onFailure(Exception ex) {
|
||||
Logger.warn(LOG_TAG, "Upload failed.", ex);
|
||||
uploadDelegate.onFailure(up, ex);
|
||||
}
|
||||
|
||||
@Override
|
||||
void onSuccess(ReadingListRecordResponse response) {
|
||||
Logger.debug(LOG_TAG, "Upload: onSuccess: " + response.httpResponse().getStatusLine());
|
||||
final ServerReadingListRecord down;
|
||||
try {
|
||||
down = response.getRecord();
|
||||
Logger.debug(LOG_TAG, "Upload succeeded. Got GUID " + down.getGUID());
|
||||
} catch (Exception e) {
|
||||
uploadDelegate.onFailure(up, e);
|
||||
return;
|
||||
}
|
||||
|
||||
uploadDelegate.onSuccess(up, response, down);
|
||||
}
|
||||
|
||||
@Override
|
||||
void onSeeOther(ReadingListRecordResponse response) {
|
||||
uploadDelegate.onConflict(up, response);
|
||||
}
|
||||
}
|
||||
|
||||
private static abstract class ReadingListResourceDelegate<T extends ReadingListResponse> extends BaseResourceDelegate {
|
||||
private final ReadingListResponse.ResponseFactory<T> factory;
|
||||
private final AuthHeaderProvider auth;
|
||||
|
||||
public ReadingListResourceDelegate(Resource resource, AuthHeaderProvider auth, ReadingListResponse.ResponseFactory<T> factory) {
|
||||
super(resource);
|
||||
this.auth = auth;
|
||||
this.factory = factory;
|
||||
}
|
||||
|
||||
abstract void onSuccess(T response);
|
||||
abstract void onNonSuccess(T response);
|
||||
abstract void onFailure(MozResponse response);
|
||||
abstract void onFailure(Exception ex);
|
||||
|
||||
@Override
|
||||
public void handleHttpResponse(HttpResponse response) {
|
||||
final T resp = factory.getResponse(response);
|
||||
if (resp.wasSuccessful()) {
|
||||
onSuccess(resp);
|
||||
} else {
|
||||
onNonSuccess(resp);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void handleTransportException(GeneralSecurityException e) {
|
||||
onFailure(e);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void handleHttpProtocolException(ClientProtocolException e) {
|
||||
onFailure(e);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void handleHttpIOException(IOException e) {
|
||||
onFailure(e);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getUserAgent() {
|
||||
return ReadingListConstants.USER_AGENT;
|
||||
}
|
||||
|
||||
@Override
|
||||
public AuthHeaderProvider getAuthHeaderProvider() {
|
||||
return auth;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void addHeaders(HttpRequestBase request, DefaultHttpClient client) {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* An intermediate delegate class that handles all of the shared storage behavior,
|
||||
* such as handling If-Modified-Since.
|
||||
*/
|
||||
private static abstract class StorageResourceDelegate<T extends ReadingListResponse> extends ReadingListResourceDelegate<T> {
|
||||
private final long ifModifiedSince;
|
||||
|
||||
public StorageResourceDelegate(Resource resource,
|
||||
AuthHeaderProvider auth,
|
||||
ReadingListResponse.ResponseFactory<T> factory,
|
||||
long ifModifiedSince) {
|
||||
super(resource, auth, factory);
|
||||
this.ifModifiedSince = ifModifiedSince;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void addHeaders(HttpRequestBase request, DefaultHttpClient client) {
|
||||
if (ifModifiedSince != -1L) {
|
||||
// TODO: format?
|
||||
request.addHeader("If-Modified-Since", "" + ifModifiedSince);
|
||||
}
|
||||
super.addHeaders(request, client);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Wraps the @{link ReadingListRecordDelegate} interface to yield a {@link StorageResourceDelegate}.
|
||||
*/
|
||||
private static abstract class RecordResourceDelegate<T extends ReadingListResponse> extends StorageResourceDelegate<T> {
|
||||
protected final ReadingListRecordDelegate recordDelegate;
|
||||
|
||||
public RecordResourceDelegate(Resource resource,
|
||||
AuthHeaderProvider auth,
|
||||
ReadingListRecordDelegate recordDelegate,
|
||||
ReadingListResponse.ResponseFactory<T> factory,
|
||||
long ifModifiedSince) {
|
||||
super(resource, auth, factory, ifModifiedSince);
|
||||
this.recordDelegate = recordDelegate;
|
||||
}
|
||||
|
||||
abstract void onNotFound(ReadingListResponse resp);
|
||||
|
||||
@Override
|
||||
void onNonSuccess(T resp) {
|
||||
Logger.debug(LOG_TAG, "Got non-success record response " + resp.getStatusCode());
|
||||
resp.logResponseBody(LOG_TAG);
|
||||
|
||||
switch (resp.getStatusCode()) {
|
||||
case 304:
|
||||
onNotModified(resp);
|
||||
break;
|
||||
case 404:
|
||||
onNotFound(resp);
|
||||
break;
|
||||
default:
|
||||
onFailure(resp);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
void onFailure(MozResponse response) {
|
||||
recordDelegate.onFailure(response);
|
||||
}
|
||||
|
||||
@Override
|
||||
void onFailure(Exception ex) {
|
||||
recordDelegate.onFailure(ex);
|
||||
}
|
||||
|
||||
void onNotModified(T resp) {
|
||||
recordDelegate.onComplete(resp);
|
||||
}
|
||||
}
|
||||
|
||||
private static final class SingleRecordResourceDelegate extends RecordResourceDelegate<ReadingListRecordResponse> {
|
||||
private final String guid;
|
||||
|
||||
SingleRecordResourceDelegate(Resource resource,
|
||||
AuthHeaderProvider auth,
|
||||
ReadingListRecordDelegate recordDelegate,
|
||||
ResponseFactory<ReadingListRecordResponse> factory,
|
||||
long ifModifiedSince, String guid) {
|
||||
super(resource, auth, recordDelegate, factory, ifModifiedSince);
|
||||
this.guid = guid;
|
||||
}
|
||||
|
||||
@Override
|
||||
void onSuccess(ReadingListRecordResponse response) {
|
||||
final ServerReadingListRecord record;
|
||||
try {
|
||||
record = response.getRecord();
|
||||
} catch (Exception e) {
|
||||
recordDelegate.onFailure(e);
|
||||
return;
|
||||
}
|
||||
|
||||
recordDelegate.onRecordReceived(record);
|
||||
recordDelegate.onComplete(response);
|
||||
}
|
||||
|
||||
@Override
|
||||
void onNotFound(ReadingListResponse resp) {
|
||||
recordDelegate.onRecordMissingOrDeleted(guid, resp);
|
||||
}
|
||||
}
|
||||
|
||||
private static final class MultipleRecordResourceDelegate extends RecordResourceDelegate<ReadingListStorageResponse> {
|
||||
MultipleRecordResourceDelegate(Resource resource,
|
||||
AuthHeaderProvider auth,
|
||||
ReadingListRecordDelegate recordDelegate,
|
||||
ResponseFactory<ReadingListStorageResponse> factory,
|
||||
long ifModifiedSince) {
|
||||
super(resource, auth, recordDelegate, factory, ifModifiedSince);
|
||||
}
|
||||
|
||||
@Override
|
||||
void onSuccess(ReadingListStorageResponse response) {
|
||||
try {
|
||||
final Iterable<ServerReadingListRecord> records = response.getRecords();
|
||||
for (ServerReadingListRecord readingListRecord : records) {
|
||||
recordDelegate.onRecordReceived(readingListRecord);
|
||||
}
|
||||
} catch (Exception e) {
|
||||
recordDelegate.onFailure(e);
|
||||
return;
|
||||
}
|
||||
|
||||
recordDelegate.onComplete(response);
|
||||
}
|
||||
|
||||
@Override
|
||||
void onNotFound(ReadingListResponse resp) {
|
||||
// Should not occur against articlesURI root.
|
||||
recordDelegate.onFailure(resp);
|
||||
}
|
||||
}
|
||||
|
||||
private static abstract class UploadResourceDelegate<T extends ReadingListResponse> extends StorageResourceDelegate<T> {
|
||||
public UploadResourceDelegate(Resource resource,
|
||||
AuthHeaderProvider auth,
|
||||
ReadingListResponse.ResponseFactory<T> factory,
|
||||
long ifModifiedSince) {
|
||||
super(resource, auth, factory, ifModifiedSince);
|
||||
}
|
||||
|
||||
public UploadResourceDelegate(Resource resource,
|
||||
AuthHeaderProvider auth,
|
||||
ReadingListResponse.ResponseFactory<T> factory) {
|
||||
this(resource, auth, factory, -1L);
|
||||
}
|
||||
|
||||
@Override
|
||||
void onNonSuccess(T resp) {
|
||||
if (resp.getStatusCode() == 303) {
|
||||
onSeeOther(resp);
|
||||
return;
|
||||
}
|
||||
onFailure(resp);
|
||||
}
|
||||
|
||||
abstract void onSeeOther(T resp);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Recursively calls `patch` with items from the queue, delivering callbacks
|
||||
* to the provided delegate. Calls `onBatchDone` when the queue is exhausted.
|
||||
*
|
||||
* Uses the provided executor to flatten the recursive call stack.
|
||||
*/
|
||||
private abstract class BatchingUploadDelegate implements ReadingListRecordUploadDelegate {
|
||||
private final Queue<ClientReadingListRecord> queue;
|
||||
private final ReadingListRecordUploadDelegate batchUploadDelegate;
|
||||
private final Executor executor;
|
||||
|
||||
BatchingUploadDelegate(Queue<ClientReadingListRecord> queue,
|
||||
ReadingListRecordUploadDelegate batchUploadDelegate,
|
||||
Executor executor) {
|
||||
this.queue = queue;
|
||||
this.batchUploadDelegate = batchUploadDelegate;
|
||||
this.executor = executor;
|
||||
}
|
||||
|
||||
abstract void again(ClientReadingListRecord record);
|
||||
|
||||
void next() {
|
||||
final ClientReadingListRecord record = queue.poll();
|
||||
executor.execute(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
if (record == null) {
|
||||
batchUploadDelegate.onBatchDone();
|
||||
return;
|
||||
}
|
||||
|
||||
again(record);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onSuccess(ClientReadingListRecord up,
|
||||
ReadingListRecordResponse response,
|
||||
ServerReadingListRecord down) {
|
||||
batchUploadDelegate.onSuccess(up, response, down);
|
||||
next();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onInvalidUpload(ClientReadingListRecord up,
|
||||
ReadingListResponse response) {
|
||||
batchUploadDelegate.onInvalidUpload(up, response);
|
||||
next();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(ClientReadingListRecord up, MozResponse response) {
|
||||
batchUploadDelegate.onFailure(up, response);
|
||||
next();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(ClientReadingListRecord up, Exception ex) {
|
||||
batchUploadDelegate.onFailure(up, ex);
|
||||
next();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onConflict(ClientReadingListRecord up,
|
||||
ReadingListResponse response) {
|
||||
batchUploadDelegate.onConflict(up, response);
|
||||
next();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onBadRequest(ClientReadingListRecord up, MozResponse response) {
|
||||
batchUploadDelegate.onBadRequest(up, response);
|
||||
next();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onBatchDone() {
|
||||
// This should never occur, but if it does, pass through.
|
||||
batchUploadDelegate.onBatchDone();
|
||||
}
|
||||
}
|
||||
|
||||
private class PostBatchingUploadDelegate extends BatchingUploadDelegate {
|
||||
PostBatchingUploadDelegate(Queue<ClientReadingListRecord> queue,
|
||||
ReadingListRecordUploadDelegate batchUploadDelegate,
|
||||
Executor executor) {
|
||||
super(queue, batchUploadDelegate, executor);
|
||||
}
|
||||
|
||||
@Override
|
||||
void again(ClientReadingListRecord record) {
|
||||
add(record, PostBatchingUploadDelegate.this);
|
||||
}
|
||||
}
|
||||
|
||||
private class PatchBatchingUploadDelegate extends BatchingUploadDelegate {
|
||||
PatchBatchingUploadDelegate(Queue<ClientReadingListRecord> queue,
|
||||
ReadingListRecordUploadDelegate batchUploadDelegate,
|
||||
Executor executor) {
|
||||
super(queue, batchUploadDelegate, executor);
|
||||
}
|
||||
|
||||
@Override
|
||||
void again(ClientReadingListRecord record) {
|
||||
patch(record, PatchBatchingUploadDelegate.this);
|
||||
}
|
||||
}
|
||||
|
||||
// Deliberately declare `delegate` non-final so we can't capture it below. We prefer
|
||||
// to use `recordDelegate` explicitly.
|
||||
public void getOne(final String guid, ReadingListRecordDelegate delegate, final long ifModifiedSince) {
|
||||
final BaseResource r = getRelativeArticleResource(guid);
|
||||
r.delegate = new SingleRecordResourceDelegate(r, auth, delegate, ReadingListRecordResponse.FACTORY, ifModifiedSince, guid);
|
||||
if (ReadingListConstants.DEBUG) {
|
||||
Logger.info(LOG_TAG, "Getting record " + guid);
|
||||
}
|
||||
r.get();
|
||||
}
|
||||
|
||||
// Deliberately declare `delegate` non-final so we can't capture it below. We prefer
|
||||
// to use `recordDelegate` explicitly.
|
||||
public void getAll(final FetchSpec spec, ReadingListRecordDelegate delegate, final long ifModifiedSince) throws URISyntaxException {
|
||||
final BaseResource r = new BaseResource(spec.getURI(this.articlesURI));
|
||||
r.delegate = new MultipleRecordResourceDelegate(r, auth, delegate, ReadingListStorageResponse.FACTORY, ifModifiedSince);
|
||||
if (ReadingListConstants.DEBUG) {
|
||||
Logger.info(LOG_TAG, "Getting all records from " + r.getURIString());
|
||||
}
|
||||
r.get();
|
||||
}
|
||||
|
||||
/**
|
||||
* Mutates the provided queue.
|
||||
*/
|
||||
public void patch(final Queue<ClientReadingListRecord> queue, final Executor executor, final ReadingListRecordUploadDelegate batchUploadDelegate) {
|
||||
if (queue.isEmpty()) {
|
||||
batchUploadDelegate.onBatchDone();
|
||||
return;
|
||||
}
|
||||
|
||||
final ReadingListRecordUploadDelegate uploadDelegate = new PatchBatchingUploadDelegate(queue, batchUploadDelegate, executor);
|
||||
|
||||
patch(queue.poll(), uploadDelegate);
|
||||
}
|
||||
|
||||
public void patch(final ClientReadingListRecord up, final ReadingListRecordUploadDelegate uploadDelegate) {
|
||||
final String guid = up.getGUID();
|
||||
if (guid == null) {
|
||||
uploadDelegate.onFailure(up, new IllegalArgumentException("Supplied record must have a GUID."));
|
||||
return;
|
||||
}
|
||||
|
||||
final BaseResource r = getRelativeArticleResource(guid);
|
||||
r.delegate = new DelegatingUploadResourceDelegate(r, auth, ReadingListRecordResponse.FACTORY, up,
|
||||
uploadDelegate);
|
||||
|
||||
final ExtendedJSONObject body = up.toJSON();
|
||||
if (ReadingListConstants.DEBUG) {
|
||||
Logger.info(LOG_TAG, "Patching record " + guid + ": " + body.toJSONString());
|
||||
}
|
||||
r.post(body);
|
||||
}
|
||||
|
||||
/**
|
||||
* Mutates the provided queue.
|
||||
*/
|
||||
public void add(final Queue<ClientReadingListRecord> queue, final Executor executor, final ReadingListRecordUploadDelegate batchUploadDelegate) {
|
||||
if (queue.isEmpty()) {
|
||||
batchUploadDelegate.onBatchDone();
|
||||
return;
|
||||
}
|
||||
|
||||
final ReadingListRecordUploadDelegate uploadDelegate = new PostBatchingUploadDelegate(queue, batchUploadDelegate, executor);
|
||||
|
||||
add(queue.poll(), uploadDelegate);
|
||||
}
|
||||
|
||||
public void add(final ClientReadingListRecord up, final ReadingListRecordUploadDelegate uploadDelegate) {
|
||||
final BaseResource r = new BaseResource(this.articlesURI);
|
||||
r.delegate = new DelegatingUploadResourceDelegate(r, auth, ReadingListRecordResponse.FACTORY, up,
|
||||
uploadDelegate);
|
||||
|
||||
final ExtendedJSONObject body = up.toJSON();
|
||||
if (ReadingListConstants.DEBUG) {
|
||||
Logger.info(LOG_TAG, "Uploading new record: " + body.toJSONString());
|
||||
}
|
||||
r.post(body);
|
||||
}
|
||||
|
||||
public void delete(final String guid, final ReadingListDeleteDelegate delegate, final long ifUnmodifiedSince) {
|
||||
final BaseResource r = getRelativeArticleResource(guid);
|
||||
|
||||
// If If-Unmodified-Since is provided, and the record has been modified,
|
||||
// we'll receive a 412 Precondition Failed.
|
||||
// If the record is missing or already deleted, a 404 will be returned.
|
||||
// Otherwise, the response will be the deleted record.
|
||||
r.delegate = new ReadingListResourceDelegate<ReadingListRecordResponse>(r, auth, ReadingListRecordResponse.FACTORY) {
|
||||
@Override
|
||||
public void addHeaders(HttpRequestBase request, DefaultHttpClient client) {
|
||||
if (ifUnmodifiedSince != -1) {
|
||||
request.addHeader("If-Unmodified-Since", "" + ifUnmodifiedSince);
|
||||
}
|
||||
super.addHeaders(request, client);
|
||||
}
|
||||
|
||||
@Override
|
||||
void onFailure(MozResponse response) {
|
||||
switch (response.getStatusCode()) {
|
||||
case 412:
|
||||
delegate.onPreconditionFailed(guid, response);
|
||||
return;
|
||||
}
|
||||
delegate.onFailure(response);
|
||||
}
|
||||
|
||||
@Override
|
||||
void onSuccess(ReadingListRecordResponse response) {
|
||||
final ReadingListRecord record;
|
||||
try {
|
||||
record = response.getRecord();
|
||||
} catch (Exception e) {
|
||||
delegate.onFailure(e);
|
||||
return;
|
||||
}
|
||||
|
||||
delegate.onSuccess(response, record);
|
||||
}
|
||||
|
||||
@Override
|
||||
void onFailure(Exception ex) {
|
||||
delegate.onFailure(ex);
|
||||
}
|
||||
|
||||
@Override
|
||||
void onNonSuccess(ReadingListRecordResponse response) {
|
||||
if (response.getStatusCode() == 404) {
|
||||
// Already deleted!
|
||||
delegate.onRecordMissingOrDeleted(guid, response);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
if (ReadingListConstants.DEBUG) {
|
||||
Logger.debug(LOG_TAG, "Deleting " + r.getURIString());
|
||||
}
|
||||
r.delete();
|
||||
}
|
||||
|
||||
// TODO: modified times etc.
|
||||
public void wipe(final ReadingListWipeDelegate delegate) {
|
||||
Logger.info(LOG_TAG, "Wiping server.");
|
||||
final BaseResource r = new BaseResource(this.articlesURI);
|
||||
|
||||
r.delegate = new ReadingListResourceDelegate<ReadingListStorageResponse>(r, auth, ReadingListStorageResponse.FACTORY) {
|
||||
|
||||
@Override
|
||||
void onSuccess(ReadingListStorageResponse response) {
|
||||
Logger.info(LOG_TAG, "Wipe succeded.");
|
||||
delegate.onSuccess(response);
|
||||
}
|
||||
|
||||
@Override
|
||||
void onNonSuccess(ReadingListStorageResponse response) {
|
||||
Logger.warn(LOG_TAG, "Wipe failed: " + response.getStatusCode());
|
||||
onFailure(response);
|
||||
}
|
||||
|
||||
@Override
|
||||
void onFailure(MozResponse response) {
|
||||
Logger.warn(LOG_TAG, "Wipe failed: " + response.getStatusCode());
|
||||
delegate.onFailure(response);
|
||||
}
|
||||
|
||||
@Override
|
||||
void onFailure(Exception ex) {
|
||||
Logger.warn(LOG_TAG, "Wipe failed.", ex);
|
||||
delegate.onFailure(ex);
|
||||
}
|
||||
};
|
||||
|
||||
r.delete();
|
||||
}
|
||||
}
|
|
@ -0,0 +1,94 @@
|
|||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
|
||||
|
||||
package org.mozilla.gecko.reading;
|
||||
|
||||
import java.util.Map.Entry;
|
||||
import java.util.Set;
|
||||
|
||||
import org.mozilla.gecko.db.BrowserContract.ReadingListItems;
|
||||
import org.mozilla.gecko.reading.ReadingListRecord.ServerMetadata;
|
||||
import org.mozilla.gecko.sync.ExtendedJSONObject;
|
||||
|
||||
import android.content.ContentValues;
|
||||
|
||||
public class ReadingListClientContentValuesFactory {
|
||||
public static ContentValues fromClientRecord(ClientReadingListRecord record) {
|
||||
// Do each of these.
|
||||
ExtendedJSONObject fields = record.fields;
|
||||
ServerMetadata sm = record.serverMetadata;
|
||||
|
||||
final ContentValues values = new ContentValues();
|
||||
|
||||
if (sm.guid != null) {
|
||||
values.put(ReadingListItems.GUID, sm.guid);
|
||||
}
|
||||
|
||||
if (sm.lastModified > -1L) {
|
||||
values.put(ReadingListItems.SERVER_LAST_MODIFIED, sm.lastModified);
|
||||
}
|
||||
|
||||
final Set<Entry<String, Object>> entries = fields.entrySet();
|
||||
|
||||
for (Entry<String,Object> entry : entries) {
|
||||
final String key = entry.getKey();
|
||||
final String field = mapField(key);
|
||||
if (field == null) {
|
||||
continue;
|
||||
}
|
||||
|
||||
final Object v = entry.getValue();
|
||||
if (v == null) {
|
||||
values.putNull(field);
|
||||
} else if (v instanceof Boolean) {
|
||||
values.put(field, ((Boolean) v) ? 1 : 0);
|
||||
} else if (v instanceof Long) {
|
||||
values.put(field, (Long) v);
|
||||
} else if (v instanceof Integer) {
|
||||
values.put(field, (Integer) v);
|
||||
} else if (v instanceof String) {
|
||||
values.put(field, (String) v);
|
||||
} else if (v instanceof Double) {
|
||||
values.put(field, (Double) v);
|
||||
} else {
|
||||
throw new IllegalArgumentException("Unknown value " + v + " of type " + v.getClass().getSimpleName());
|
||||
}
|
||||
}
|
||||
|
||||
// Clear the sync flags.
|
||||
values.put(ReadingListItems.SYNC_STATUS, ReadingListItems.SYNC_STATUS_SYNCED);
|
||||
values.put(ReadingListItems.SYNC_CHANGE_FLAGS, ReadingListItems.SYNC_CHANGE_NONE);
|
||||
|
||||
return values;
|
||||
}
|
||||
|
||||
/**
|
||||
* Only returns valid columns.
|
||||
*/
|
||||
private static String mapField(String key) {
|
||||
if (key == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
switch (key) {
|
||||
case "unread":
|
||||
return "is_unread";
|
||||
case "favorite":
|
||||
return "is_favorite";
|
||||
case "archived":
|
||||
return "is_archived";
|
||||
case "deleted":
|
||||
return "is_deleted";
|
||||
}
|
||||
|
||||
// Validation.
|
||||
for (int i = 0; i < ReadingListItems.ALL_FIELDS.length; ++i) {
|
||||
if (key.equals(ReadingListItems.ALL_FIELDS[i])) {
|
||||
return key;
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,221 @@
|
|||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
|
||||
|
||||
package org.mozilla.gecko.reading;
|
||||
|
||||
import org.mozilla.gecko.AppConstants.Versions;
|
||||
import org.mozilla.gecko.db.BrowserContract.ReadingListItems;
|
||||
import org.mozilla.gecko.reading.ReadingListRecord.ServerMetadata;
|
||||
import org.mozilla.gecko.sync.ExtendedJSONObject;
|
||||
|
||||
import android.annotation.TargetApi;
|
||||
import android.database.Cursor;
|
||||
import android.database.CursorWindow;
|
||||
import android.database.sqlite.SQLiteCursor;
|
||||
import android.os.Build;
|
||||
|
||||
/**
|
||||
* This class converts database rows into {@link ClientReadingListRecord}s.
|
||||
*
|
||||
* In doing so it has to:
|
||||
*
|
||||
* * Translate column names.
|
||||
* * Convert INTEGER columns into booleans.
|
||||
* * Eliminate fields that aren't present in the wire format.
|
||||
* * Extract fields that are part of {@link ClientMetadata} instances.
|
||||
*
|
||||
* The caller is responsible for closing the cursor.
|
||||
*/
|
||||
public class ReadingListClientRecordFactory {
|
||||
public static final int MAX_SERVER_STRING_CHARS = 1024;
|
||||
|
||||
private final Cursor cursor;
|
||||
|
||||
private final String[] fields;
|
||||
private final int[] columns;
|
||||
|
||||
public ReadingListClientRecordFactory(final Cursor cursor, final String[] fields) throws IllegalArgumentException {
|
||||
this.cursor = cursor;
|
||||
|
||||
// Does this cursor have an _ID?
|
||||
final int idIndex = cursor.getColumnIndex(ReadingListItems._ID);
|
||||
final int extra = (idIndex != -1) ? 1 : 0;
|
||||
final int cols = fields.length + extra;
|
||||
|
||||
this.fields = new String[cols];
|
||||
this.columns = new int[cols];
|
||||
|
||||
for (int i = 0; i < fields.length; ++i) {
|
||||
final int index = cursor.getColumnIndex(fields[i]);
|
||||
if (index == -1) {
|
||||
continue;
|
||||
}
|
||||
this.fields[i] = mapColumn(fields[i]);
|
||||
this.columns[i] = index;
|
||||
}
|
||||
|
||||
if (idIndex != -1) {
|
||||
this.fields[fields.length] = "_id";
|
||||
this.columns[fields.length] = idIndex;
|
||||
}
|
||||
}
|
||||
|
||||
public ReadingListClientRecordFactory(final Cursor cursor) {
|
||||
this(cursor, ReadingListItems.ALL_FIELDS);
|
||||
}
|
||||
|
||||
private void putNull(ExtendedJSONObject o, String field) {
|
||||
o.put(field, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Map column names to protocol field names.
|
||||
*/
|
||||
private static String mapColumn(final String column) {
|
||||
switch (column) {
|
||||
case "is_unread":
|
||||
return "unread";
|
||||
case "is_favorite":
|
||||
return "favorite";
|
||||
case "is_archived":
|
||||
return "archived";
|
||||
}
|
||||
return column;
|
||||
}
|
||||
|
||||
private void put(ExtendedJSONObject o, String field, String value) {
|
||||
// All server strings are a max of 1024 characters.
|
||||
o.put(field, value.length() > MAX_SERVER_STRING_CHARS ? value.substring(0, MAX_SERVER_STRING_CHARS - 1) + "…" : value);
|
||||
}
|
||||
|
||||
private void put(ExtendedJSONObject o, String field, long value) {
|
||||
// Convert to boolean.
|
||||
switch (field) {
|
||||
case "unread":
|
||||
case "favorite":
|
||||
case "archived":
|
||||
case "is_article":
|
||||
o.put(field, value == 1);
|
||||
return;
|
||||
}
|
||||
o.put(field, value);
|
||||
}
|
||||
|
||||
@TargetApi(Build.VERSION_CODES.HONEYCOMB)
|
||||
private final void fillHoneycomb(ExtendedJSONObject o, Cursor c, String f, int i) {
|
||||
if (f == null) {
|
||||
return;
|
||||
}
|
||||
switch (c.getType(i)) {
|
||||
case Cursor.FIELD_TYPE_NULL:
|
||||
putNull(o, f);
|
||||
return;
|
||||
case Cursor.FIELD_TYPE_STRING:
|
||||
put(o, f, c.getString(i));
|
||||
return;
|
||||
case Cursor.FIELD_TYPE_INTEGER:
|
||||
put(o, f, c.getLong(i));
|
||||
return;
|
||||
case Cursor.FIELD_TYPE_FLOAT:
|
||||
o.put(f, c.getDouble(i));
|
||||
return;
|
||||
case Cursor.FIELD_TYPE_BLOB:
|
||||
// TODO: this probably doesn't serialize correctly.
|
||||
o.put(f, c.getBlob(i));
|
||||
return;
|
||||
default:
|
||||
// Do nothing.
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
@SuppressWarnings("deprecation")
|
||||
private final void fillGingerbread(ExtendedJSONObject o, Cursor c, String f, int i) {
|
||||
if (!(c instanceof SQLiteCursor)) {
|
||||
throw new IllegalStateException("Unable to handle cursors that don't have a CursorWindow!");
|
||||
}
|
||||
|
||||
final SQLiteCursor sqc = (SQLiteCursor) c;
|
||||
final CursorWindow w = sqc.getWindow();
|
||||
final int pos = c.getPosition();
|
||||
if (w.isNull(pos, i)) {
|
||||
putNull(o, f);
|
||||
} else if (w.isString(pos, i)) {
|
||||
put(o, f, c.getString(i));
|
||||
} else if (w.isLong(pos, i)) {
|
||||
put(o, f, c.getLong(i));
|
||||
} else if (w.isFloat(pos, i)) {
|
||||
o.put(f, c.getDouble(i));
|
||||
} else if (w.isBlob(pos, i)) {
|
||||
// TODO: this probably doesn't serialize correctly.
|
||||
o.put(f, c.getBlob(i));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* TODO: optionally produce a partial record by examining SYNC_CHANGE_FLAGS/SYNC_STATUS.
|
||||
*/
|
||||
public ClientReadingListRecord fromCursorRow() {
|
||||
final ExtendedJSONObject object = new ExtendedJSONObject();
|
||||
for (int i = 0; i < this.fields.length; ++i) {
|
||||
final String field = fields[i];
|
||||
if (field == null) {
|
||||
continue;
|
||||
}
|
||||
final int column = this.columns[i];
|
||||
if (Versions.feature11Plus) {
|
||||
fillHoneycomb(object, this.cursor, field, column);
|
||||
} else {
|
||||
fillGingerbread(object, this.cursor, field, column);
|
||||
}
|
||||
}
|
||||
|
||||
// Apply cross-field constraints.
|
||||
if (object.containsKey("unread") && object.getBoolean("unread")) {
|
||||
object.remove("marked_read_by");
|
||||
object.remove("marked_read_on");
|
||||
}
|
||||
|
||||
// Construct server metadata and client metadata from the object.
|
||||
final long serverLastModified = object.getLong("last_modified", -1L);
|
||||
final String guid = object.containsKey("guid") ? object.getString("guid") : null;
|
||||
final ServerMetadata sm = new ServerMetadata(guid, serverLastModified);
|
||||
|
||||
final long clientLastModified = object.getLong("client_last_modified", -1L);
|
||||
|
||||
// This has already been translated...
|
||||
final boolean isArchived = object.getBoolean("archived");
|
||||
|
||||
// ... but this is a client-only field, so it needs to be converted.
|
||||
final boolean isDeleted = object.getLong("is_deleted", 0L) == 1L;
|
||||
final long localID = object.getLong("_id", -1L);
|
||||
final ClientMetadata cm = new ClientMetadata(localID, clientLastModified, isDeleted, isArchived);
|
||||
|
||||
// Remove things that aren't part of the spec.
|
||||
object.remove("last_modified");
|
||||
object.remove("guid");
|
||||
object.remove("client_last_modified");
|
||||
object.remove("is_deleted");
|
||||
|
||||
object.remove(ReadingListItems.CONTENT_STATUS);
|
||||
object.remove(ReadingListItems.SYNC_STATUS);
|
||||
object.remove(ReadingListItems.SYNC_CHANGE_FLAGS);
|
||||
object.remove(ReadingListItems.CLIENT_LAST_MODIFIED);
|
||||
|
||||
return new ClientReadingListRecord(sm, cm, object);
|
||||
}
|
||||
|
||||
/**
|
||||
* Return a record from a cursor.
|
||||
* Make sure that the columns you specify in the constructor are a subset
|
||||
* of the columns in the cursor, or you'll have a bad time.
|
||||
*/
|
||||
public ClientReadingListRecord getNext() {
|
||||
if (!cursor.moveToNext()) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return fromCursorRow();
|
||||
}
|
||||
}
|
|
@ -11,4 +11,8 @@ public class ReadingListConstants {
|
|||
public static final String USER_AGENT = "Firefox-Android-FxReader/" + AppConstants.MOZ_APP_VERSION + " (" + AppConstants.MOZ_APP_DISPLAYNAME + ")";
|
||||
public static final String DEFAULT_DEV_ENDPOINT = "https://readinglist.dev.mozaws.net/v1/";
|
||||
public static final String DEFAULT_PROD_ENDPOINT = null; // TODO
|
||||
|
||||
public static final String OAUTH_ENDPOINT_PROD = "https://oauth.accounts.firefox.com/v1";
|
||||
|
||||
public static boolean DEBUG = false;
|
||||
}
|
||||
|
|
|
@ -0,0 +1,19 @@
|
|||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
|
||||
|
||||
package org.mozilla.gecko.reading;
|
||||
|
||||
import org.mozilla.gecko.sync.net.MozResponse;
|
||||
|
||||
/**
|
||||
* Response delegate for a server DELETE.
|
||||
* Only one of these methods will be called, and it will be called precisely once.
|
||||
*/
|
||||
public interface ReadingListDeleteDelegate {
|
||||
void onSuccess(ReadingListRecordResponse response, ReadingListRecord record);
|
||||
void onPreconditionFailed(String guid, MozResponse response);
|
||||
void onRecordMissingOrDeleted(String guid, MozResponse response);
|
||||
void onFailure(Exception e);
|
||||
void onFailure(MozResponse response);
|
||||
}
|
|
@ -0,0 +1,55 @@
|
|||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
|
||||
|
||||
package org.mozilla.gecko.reading;
|
||||
|
||||
import org.mozilla.gecko.sync.ExtendedJSONObject;
|
||||
|
||||
/**
|
||||
* This models the wire protocol format, not database contents.
|
||||
*/
|
||||
public abstract class ReadingListRecord {
|
||||
public static class ServerMetadata {
|
||||
public final String guid; // Null if not yet uploaded successfully.
|
||||
public final long lastModified; // A server timestamp.
|
||||
|
||||
public ServerMetadata(String guid, long lastModified) {
|
||||
this.guid = guid;
|
||||
this.lastModified = lastModified;
|
||||
}
|
||||
|
||||
/**
|
||||
* From server record.
|
||||
*/
|
||||
public ServerMetadata(ExtendedJSONObject obj) {
|
||||
this(obj.getString("id"), obj.getLong("last_modified"));
|
||||
}
|
||||
}
|
||||
|
||||
public final ServerMetadata serverMetadata;
|
||||
|
||||
public String getGUID() {
|
||||
if (serverMetadata == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return serverMetadata.guid;
|
||||
}
|
||||
|
||||
public long getServerLastModified() {
|
||||
if (serverMetadata == null) {
|
||||
return -1L;
|
||||
}
|
||||
|
||||
return serverMetadata.lastModified;
|
||||
}
|
||||
|
||||
protected ReadingListRecord(final ServerMetadata serverMetadata) {
|
||||
this.serverMetadata = serverMetadata;
|
||||
}
|
||||
|
||||
public abstract String getURL();
|
||||
public abstract String getTitle();
|
||||
public abstract String getAddedBy();
|
||||
}
|
|
@ -0,0 +1,26 @@
|
|||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
|
||||
|
||||
package org.mozilla.gecko.reading;
|
||||
|
||||
import org.mozilla.gecko.sync.net.MozResponse;
|
||||
|
||||
/**
|
||||
* Delegate for downloading records.
|
||||
*
|
||||
* onRecordReceived will be called at most once per record.
|
||||
* onComplete will be called at the end of a successful download.
|
||||
*
|
||||
* Otherwise, one of the failure methods will be called.
|
||||
*
|
||||
* onRecordMissingOrDeleted will only be called when fetching a single
|
||||
* record by ID.
|
||||
*/
|
||||
public interface ReadingListRecordDelegate {
|
||||
void onRecordReceived(ServerReadingListRecord record);
|
||||
void onComplete(ReadingListResponse response);
|
||||
void onFailure(MozResponse response);
|
||||
void onFailure(Exception error);
|
||||
void onRecordMissingOrDeleted(String guid, ReadingListResponse resp);
|
||||
}
|
|
@ -0,0 +1,41 @@
|
|||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
|
||||
|
||||
package org.mozilla.gecko.reading;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import org.json.simple.parser.ParseException;
|
||||
import org.mozilla.gecko.sync.NonObjectJSONException;
|
||||
|
||||
import ch.boye.httpclientandroidlib.HttpResponse;
|
||||
|
||||
/**
|
||||
* A storage response that contains a single record.
|
||||
*/
|
||||
public class ReadingListRecordResponse extends ReadingListResponse {
|
||||
@Override
|
||||
public boolean wasSuccessful() {
|
||||
final int code = getStatusCode();
|
||||
if (code == 200 || code == 201 || code == 204) {
|
||||
return true;
|
||||
}
|
||||
return super.wasSuccessful();
|
||||
}
|
||||
|
||||
public static final ReadingListResponse.ResponseFactory<ReadingListRecordResponse> FACTORY = new ReadingListResponse.ResponseFactory<ReadingListRecordResponse>() {
|
||||
@Override
|
||||
public ReadingListRecordResponse getResponse(HttpResponse r) {
|
||||
return new ReadingListRecordResponse(r);
|
||||
}
|
||||
};
|
||||
|
||||
public ReadingListRecordResponse(HttpResponse res) {
|
||||
super(res);
|
||||
}
|
||||
|
||||
public ServerReadingListRecord getRecord() throws IllegalStateException, NonObjectJSONException, IOException, ParseException {
|
||||
return new ServerReadingListRecord(jsonObjectBody());
|
||||
}
|
||||
}
|
|
@ -0,0 +1,20 @@
|
|||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
|
||||
|
||||
package org.mozilla.gecko.reading;
|
||||
|
||||
import org.mozilla.gecko.sync.net.MozResponse;
|
||||
|
||||
public interface ReadingListRecordUploadDelegate {
|
||||
// Called once per batch.
|
||||
public void onBatchDone();
|
||||
|
||||
// One of these is called once per record.
|
||||
public void onSuccess(ClientReadingListRecord up, ReadingListRecordResponse response, ServerReadingListRecord down);
|
||||
public void onConflict(ClientReadingListRecord up, ReadingListResponse response);
|
||||
public void onInvalidUpload(ClientReadingListRecord up, ReadingListResponse response);
|
||||
public void onBadRequest(ClientReadingListRecord up, MozResponse response);
|
||||
public void onFailure(ClientReadingListRecord up, Exception ex);
|
||||
public void onFailure(ClientReadingListRecord up, MozResponse response);
|
||||
}
|
|
@ -0,0 +1,26 @@
|
|||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
|
||||
|
||||
package org.mozilla.gecko.reading;
|
||||
|
||||
import org.mozilla.gecko.sync.net.MozResponse;
|
||||
|
||||
import ch.boye.httpclientandroidlib.HttpResponse;
|
||||
|
||||
/**
|
||||
* A MozResponse that knows about all of the general RL-related headers, like Last-Modified.
|
||||
*/
|
||||
public abstract class ReadingListResponse extends MozResponse {
|
||||
static interface ResponseFactory<T extends ReadingListResponse> {
|
||||
public T getResponse(HttpResponse r);
|
||||
}
|
||||
|
||||
public ReadingListResponse(HttpResponse res) {
|
||||
super(res);
|
||||
}
|
||||
|
||||
public long getLastModified() {
|
||||
return getLongHeader("Last-Modified");
|
||||
}
|
||||
}
|
|
@ -0,0 +1,15 @@
|
|||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
|
||||
|
||||
package org.mozilla.gecko.reading;
|
||||
|
||||
import android.database.Cursor;
|
||||
|
||||
public interface ReadingListStorage {
|
||||
Cursor getModified();
|
||||
Cursor getStatusChanges();
|
||||
Cursor getNew();
|
||||
Cursor getAll();
|
||||
ReadingListChangeAccumulator getChangeAccumulator();
|
||||
}
|
|
@ -0,0 +1,75 @@
|
|||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
|
||||
|
||||
package org.mozilla.gecko.reading;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Iterator;
|
||||
|
||||
import org.json.simple.JSONArray;
|
||||
import org.json.simple.JSONObject;
|
||||
import org.json.simple.parser.ParseException;
|
||||
import org.mozilla.gecko.background.common.log.Logger;
|
||||
import org.mozilla.gecko.sync.ExtendedJSONObject;
|
||||
import org.mozilla.gecko.sync.UnexpectedJSONException;
|
||||
|
||||
import ch.boye.httpclientandroidlib.HttpResponse;
|
||||
|
||||
/**
|
||||
* A storage response that contains multiple records.
|
||||
*/
|
||||
public class ReadingListStorageResponse extends ReadingListResponse {
|
||||
public static final ReadingListResponse.ResponseFactory<ReadingListStorageResponse> FACTORY = new ReadingListResponse.ResponseFactory<ReadingListStorageResponse>() {
|
||||
@Override
|
||||
public ReadingListStorageResponse getResponse(HttpResponse r) {
|
||||
return new ReadingListStorageResponse(r);
|
||||
}
|
||||
};
|
||||
|
||||
private static final String LOG_TAG = "StorageResponse";
|
||||
|
||||
public ReadingListStorageResponse(HttpResponse res) {
|
||||
super(res);
|
||||
}
|
||||
|
||||
public Iterable<ServerReadingListRecord> getRecords() throws IOException, ParseException, UnexpectedJSONException {
|
||||
final ExtendedJSONObject body = jsonObjectBody();
|
||||
final JSONArray items = body.getArray("items");
|
||||
|
||||
final int expected = getTotalRecords();
|
||||
final int actual = items.size();
|
||||
if (actual < expected) {
|
||||
Logger.warn(LOG_TAG, "Unexpected number of records. Got " + actual + ", expected " + expected);
|
||||
}
|
||||
|
||||
return new Iterable<ServerReadingListRecord>() {
|
||||
@Override
|
||||
public Iterator<ServerReadingListRecord> iterator() {
|
||||
return new Iterator<ServerReadingListRecord>() {
|
||||
int position = 0;
|
||||
|
||||
@Override
|
||||
public boolean hasNext() {
|
||||
return position < actual;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ServerReadingListRecord next() {
|
||||
final Object o = items.get(position++);
|
||||
return new ServerReadingListRecord(new ExtendedJSONObject((JSONObject) o));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void remove() {
|
||||
throw new RuntimeException("Cannot remove from iterator.");
|
||||
}
|
||||
};
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
public int getTotalRecords() {
|
||||
return getIntegerHeader("Total-Records");
|
||||
}
|
||||
}
|
|
@ -4,22 +4,291 @@
|
|||
|
||||
package org.mozilla.gecko.reading;
|
||||
|
||||
import java.net.URI;
|
||||
import java.net.URISyntaxException;
|
||||
import java.security.NoSuchAlgorithmException;
|
||||
import java.util.Collection;
|
||||
import java.util.concurrent.CountDownLatch;
|
||||
import java.util.concurrent.ExecutorService;
|
||||
import java.util.concurrent.Executors;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import org.mozilla.gecko.background.common.PrefsBranch;
|
||||
import org.mozilla.gecko.background.common.log.Logger;
|
||||
import org.mozilla.gecko.background.fxa.FxAccountClient;
|
||||
import org.mozilla.gecko.background.fxa.FxAccountClient20;
|
||||
import org.mozilla.gecko.background.fxa.FxAccountUtils;
|
||||
import org.mozilla.gecko.background.fxa.oauth.FxAccountAbstractClient.RequestDelegate;
|
||||
import org.mozilla.gecko.background.fxa.oauth.FxAccountAbstractClientException.FxAccountAbstractClientRemoteException;
|
||||
import org.mozilla.gecko.background.fxa.oauth.FxAccountOAuthClient10;
|
||||
import org.mozilla.gecko.background.fxa.oauth.FxAccountOAuthClient10.AuthorizationResponse;
|
||||
import org.mozilla.gecko.browserid.BrowserIDKeyPair;
|
||||
import org.mozilla.gecko.browserid.JSONWebTokenUtils;
|
||||
import org.mozilla.gecko.db.BrowserContract.ReadingListItems;
|
||||
import org.mozilla.gecko.fxa.authenticator.AndroidFxAccount;
|
||||
import org.mozilla.gecko.fxa.login.FxAccountLoginStateMachine;
|
||||
import org.mozilla.gecko.fxa.login.FxAccountLoginStateMachine.LoginStateMachineDelegate;
|
||||
import org.mozilla.gecko.fxa.login.FxAccountLoginTransition.Transition;
|
||||
import org.mozilla.gecko.fxa.login.Married;
|
||||
import org.mozilla.gecko.fxa.login.State;
|
||||
import org.mozilla.gecko.fxa.login.State.StateLabel;
|
||||
import org.mozilla.gecko.fxa.login.StateFactory;
|
||||
import org.mozilla.gecko.fxa.sync.FxAccountSyncDelegate;
|
||||
import org.mozilla.gecko.sync.net.AuthHeaderProvider;
|
||||
import org.mozilla.gecko.sync.net.BearerAuthHeaderProvider;
|
||||
|
||||
import android.accounts.Account;
|
||||
import android.content.AbstractThreadedSyncAdapter;
|
||||
import android.content.ContentProviderClient;
|
||||
import android.content.ContentResolver;
|
||||
import android.content.Context;
|
||||
import android.content.SharedPreferences;
|
||||
import android.content.SyncResult;
|
||||
import android.os.Bundle;
|
||||
|
||||
public class ReadingListSyncAdapter extends AbstractThreadedSyncAdapter {
|
||||
public ReadingListSyncAdapter(Context context, boolean autoInitialize) {
|
||||
super(context, autoInitialize);
|
||||
public static final String PREF_LOCAL_NAME = "device.localname";
|
||||
public static final String OAUTH_CLIENT_ID_FENNEC = "3332a18d142636cb";
|
||||
public static final String OAUTH_SCOPE_READINGLIST = "readinglist";
|
||||
|
||||
private static final String LOG_TAG = ReadingListSyncAdapter.class.getSimpleName();
|
||||
private static final long TIMEOUT_SECONDS = 60;
|
||||
protected final ExecutorService executor;
|
||||
|
||||
public ReadingListSyncAdapter(Context context, boolean autoInitialize) {
|
||||
super(context, autoInitialize);
|
||||
this.executor = Executors.newSingleThreadExecutor();
|
||||
}
|
||||
|
||||
|
||||
static final class SyncAdapterSynchronizerDelegate implements ReadingListSynchronizerDelegate {
|
||||
private final FxAccountSyncDelegate syncDelegate;
|
||||
private final ContentProviderClient cpc;
|
||||
private final SyncResult result;
|
||||
|
||||
SyncAdapterSynchronizerDelegate(FxAccountSyncDelegate syncDelegate,
|
||||
ContentProviderClient cpc,
|
||||
SyncResult result) {
|
||||
this.syncDelegate = syncDelegate;
|
||||
this.cpc = cpc;
|
||||
this.result = result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onPerformSync(Account account, Bundle extras, String authority, ContentProviderClient provider, SyncResult syncResult) {
|
||||
final AndroidFxAccount fxAccount = new AndroidFxAccount(getContext(), account);
|
||||
public void onUnableToSync(Exception e) {
|
||||
Logger.warn(LOG_TAG, "Unable to sync.", e);
|
||||
cpc.release();
|
||||
syncDelegate.handleError(e);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onStatusUploadComplete(Collection<String> uploaded,
|
||||
Collection<String> failed) {
|
||||
Logger.debug(LOG_TAG, "Step: onStatusUploadComplete");
|
||||
this.result.stats.numEntries += 1; // TODO: Bug 1140809.
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onNewItemUploadComplete(Collection<String> uploaded,
|
||||
Collection<String> failed) {
|
||||
Logger.debug(LOG_TAG, "Step: onNewItemUploadComplete");
|
||||
this.result.stats.numEntries += 1; // TODO: Bug 1140809.
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onModifiedUploadComplete() {
|
||||
Logger.debug(LOG_TAG, "Step: onModifiedUploadComplete");
|
||||
this.result.stats.numEntries += 1; // TODO: Bug 1140809.
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onDownloadComplete() {
|
||||
Logger.debug(LOG_TAG, "Step: onDownloadComplete");
|
||||
this.result.stats.numInserts += 1; // TODO: Bug 1140809.
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onComplete() {
|
||||
Logger.info(LOG_TAG, "Reading list synchronization complete.");
|
||||
cpc.release();
|
||||
syncDelegate.handleSuccess();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onPerformSync(final Account account, final Bundle extras, final String authority, final ContentProviderClient provider, final SyncResult syncResult) {
|
||||
Logger.setThreadLogTag(ReadingListConstants.GLOBAL_LOG_TAG);
|
||||
Logger.resetLogging();
|
||||
|
||||
final Context context = getContext();
|
||||
final AndroidFxAccount fxAccount = new AndroidFxAccount(context, account);
|
||||
|
||||
// If this sync was triggered by user action, this will be true.
|
||||
final boolean isImmediate = (extras != null) &&
|
||||
(extras.getBoolean(ContentResolver.SYNC_EXTRAS_UPLOAD, false) ||
|
||||
extras.getBoolean(ContentResolver.SYNC_EXTRAS_MANUAL, false));
|
||||
|
||||
final CountDownLatch latch = new CountDownLatch(1);
|
||||
final FxAccountSyncDelegate syncDelegate = new FxAccountSyncDelegate(latch, syncResult, fxAccount);
|
||||
try {
|
||||
final State state;
|
||||
try {
|
||||
state = fxAccount.getState();
|
||||
} catch (Exception e) {
|
||||
Logger.error(LOG_TAG, "Unable to sync.", e);
|
||||
return;
|
||||
}
|
||||
|
||||
final String oauthServerUri = ReadingListConstants.OAUTH_ENDPOINT_PROD;
|
||||
final String authServerEndpoint = fxAccount.getAccountServerURI();
|
||||
final String audience = FxAccountUtils.getAudienceForURL(oauthServerUri); // The assertion gets traded in for an oauth bearer token.
|
||||
|
||||
final SharedPreferences sharedPrefs = fxAccount.getReadingListPrefs();
|
||||
final FxAccountClient client = new FxAccountClient20(authServerEndpoint, executor);
|
||||
final FxAccountLoginStateMachine stateMachine = new FxAccountLoginStateMachine();
|
||||
|
||||
stateMachine.advance(state, StateLabel.Married, new LoginStateMachineDelegate() {
|
||||
@Override
|
||||
public FxAccountClient getClient() {
|
||||
return client;
|
||||
}
|
||||
|
||||
@Override
|
||||
public long getCertificateDurationInMilliseconds() {
|
||||
return 12 * 60 * 60 * 1000;
|
||||
}
|
||||
|
||||
@Override
|
||||
public long getAssertionDurationInMilliseconds() {
|
||||
return 15 * 60 * 1000;
|
||||
}
|
||||
|
||||
@Override
|
||||
public BrowserIDKeyPair generateKeyPair() throws NoSuchAlgorithmException {
|
||||
return StateFactory.generateKeyPair();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void handleTransition(Transition transition, State state) {
|
||||
Logger.info(LOG_TAG, "handleTransition: " + transition + " to " + state.getStateLabel());
|
||||
}
|
||||
|
||||
@Override
|
||||
public void handleFinal(State state) {
|
||||
Logger.info(LOG_TAG, "handleFinal: in " + state.getStateLabel());
|
||||
fxAccount.setState(state);
|
||||
|
||||
// TODO: scheduling, notifications.
|
||||
try {
|
||||
if (state.getStateLabel() != StateLabel.Married) {
|
||||
syncDelegate.handleCannotSync(state);
|
||||
return;
|
||||
}
|
||||
|
||||
final Married married = (Married) state;
|
||||
final String assertion = married.generateAssertion(audience, JSONWebTokenUtils.DEFAULT_ASSERTION_ISSUER);
|
||||
JSONWebTokenUtils.dumpAssertion(assertion);
|
||||
|
||||
final String clientID = OAUTH_CLIENT_ID_FENNEC;
|
||||
final String scope = OAUTH_SCOPE_READINGLIST;
|
||||
syncWithAssertion(clientID, scope, assertion, sharedPrefs, extras);
|
||||
} catch (Exception e) {
|
||||
syncDelegate.handleError(e);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
private void syncWithAssertion(final String client_id, final String scope, final String assertion,
|
||||
final SharedPreferences sharedPrefs, final Bundle extras) {
|
||||
final FxAccountOAuthClient10 oauthClient = new FxAccountOAuthClient10(oauthServerUri, executor);
|
||||
Logger.debug(LOG_TAG, "OAuth fetch.");
|
||||
oauthClient.authorization(client_id, assertion, null, scope, new RequestDelegate<FxAccountOAuthClient10.AuthorizationResponse>() {
|
||||
@Override
|
||||
public void handleSuccess(AuthorizationResponse result) {
|
||||
Logger.debug(LOG_TAG, "OAuth success.");
|
||||
syncWithAuthorization(result, sharedPrefs, extras);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void handleFailure(FxAccountAbstractClientRemoteException e) {
|
||||
Logger.error(LOG_TAG, "OAuth failure.", e);
|
||||
syncDelegate.handleError(e);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void handleError(Exception e) {
|
||||
Logger.error(LOG_TAG, "OAuth error.", e);
|
||||
syncDelegate.handleError(e);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
private void syncWithAuthorization(AuthorizationResponse authResponse,
|
||||
SharedPreferences sharedPrefs,
|
||||
Bundle extras) {
|
||||
final AuthHeaderProvider auth = new BearerAuthHeaderProvider(authResponse.access_token);
|
||||
|
||||
final String endpointString = ReadingListConstants.DEFAULT_DEV_ENDPOINT;
|
||||
final URI endpoint;
|
||||
Logger.info(LOG_TAG, "XXX Syncing to " + endpointString);
|
||||
try {
|
||||
endpoint = new URI(endpointString);
|
||||
} catch (URISyntaxException e) {
|
||||
// Should never happen.
|
||||
Logger.error(LOG_TAG, "Unexpected malformed URI for reading list service: " + endpointString);
|
||||
syncDelegate.handleError(e);
|
||||
return;
|
||||
}
|
||||
|
||||
final PrefsBranch branch = new PrefsBranch(sharedPrefs, "readinglist.");
|
||||
final ReadingListClient remote = new ReadingListClient(endpoint, auth);
|
||||
final ContentProviderClient cpc = getContentProviderClient(context); // TODO: make sure I'm always released!
|
||||
|
||||
final LocalReadingListStorage local = new LocalReadingListStorage(cpc);
|
||||
String localName = branch.getString(PREF_LOCAL_NAME, null);
|
||||
if (localName == null) {
|
||||
localName = FxAccountUtils.defaultClientName(context);
|
||||
}
|
||||
|
||||
// Make sure DB rows don't refer to placeholder values.
|
||||
local.updateLocalNames(localName);
|
||||
|
||||
final ReadingListSynchronizer synchronizer = new ReadingListSynchronizer(branch, remote, local);
|
||||
|
||||
synchronizer.syncAll(new SyncAdapterSynchronizerDelegate(syncDelegate, cpc, syncResult));
|
||||
// TODO: backoffs, and everything else handled by a SessionCallback.
|
||||
}
|
||||
});
|
||||
|
||||
latch.await(TIMEOUT_SECONDS, TimeUnit.SECONDS);
|
||||
Logger.info(LOG_TAG, "Reading list sync done.");
|
||||
|
||||
} catch (Exception e) {
|
||||
Logger.error(LOG_TAG, "Got error syncing.", e);
|
||||
syncDelegate.handleError(e);
|
||||
}
|
||||
/*
|
||||
* TODO:
|
||||
* * Account error notifications. How do we avoid these overlapping with Sync?
|
||||
* * Pickling. How do we avoid pickling twice if you use both Sync and RL?
|
||||
*/
|
||||
|
||||
/*
|
||||
* TODO:
|
||||
* * Auth.
|
||||
* * Server URI lookup.
|
||||
* * Syncing.
|
||||
* * Error handling.
|
||||
* * Backoff and retry-after.
|
||||
* * Sync scheduling.
|
||||
* * Forcing syncs/interactive use.
|
||||
*/
|
||||
}
|
||||
|
||||
|
||||
private ContentProviderClient getContentProviderClient(Context context) {
|
||||
final ContentResolver contentResolver = context.getContentResolver();
|
||||
final ContentProviderClient client = contentResolver.acquireContentProviderClient(ReadingListItems.CONTENT_URI);
|
||||
return client;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,645 @@
|
|||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
|
||||
|
||||
package org.mozilla.gecko.reading;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.net.URISyntaxException;
|
||||
import java.util.LinkedList;
|
||||
import java.util.Queue;
|
||||
import java.util.concurrent.Executor;
|
||||
import java.util.concurrent.Executors;
|
||||
|
||||
import org.json.simple.parser.ParseException;
|
||||
import org.mozilla.gecko.background.common.PrefsBranch;
|
||||
import org.mozilla.gecko.background.common.log.Logger;
|
||||
import org.mozilla.gecko.db.BrowserContract.ReadingListItems;
|
||||
import org.mozilla.gecko.reading.ReadingListRecord.ServerMetadata;
|
||||
import org.mozilla.gecko.sync.ExtendedJSONObject;
|
||||
import org.mozilla.gecko.sync.NonObjectJSONException;
|
||||
import org.mozilla.gecko.sync.net.MozResponse;
|
||||
|
||||
import android.database.Cursor;
|
||||
import android.text.TextUtils;
|
||||
|
||||
/**
|
||||
* This class implements the multi-phase synchronizing approach described
|
||||
* at <https://github.com/mozilla-services/readinglist/wiki/Client-phases>.
|
||||
*
|
||||
* This is also where delegate-based control flow comes to die.
|
||||
*/
|
||||
public class ReadingListSynchronizer {
|
||||
public static final String LOG_TAG = ReadingListSynchronizer.class.getSimpleName();
|
||||
|
||||
public static final String PREF_LAST_MODIFIED = "download.serverlastmodified";
|
||||
|
||||
private final PrefsBranch prefs;
|
||||
private final ReadingListClient remote;
|
||||
private final ReadingListStorage local;
|
||||
private final Executor executor;
|
||||
|
||||
private interface StageDelegate {
|
||||
void next();
|
||||
void fail();
|
||||
void fail(Exception e);
|
||||
}
|
||||
|
||||
private abstract static class NextDelegate implements StageDelegate {
|
||||
private final Executor executor;
|
||||
NextDelegate(final Executor executor) {
|
||||
this.executor = executor;
|
||||
}
|
||||
|
||||
abstract void doNext();
|
||||
abstract void doFail(Exception e);
|
||||
|
||||
@Override
|
||||
public void next() {
|
||||
executor.execute(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
doNext();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
@Override
|
||||
public void fail() {
|
||||
fail(null);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void fail(final Exception e) {
|
||||
executor.execute(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
doFail(e);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
public ReadingListSynchronizer(final PrefsBranch prefs, final ReadingListClient remote, final ReadingListStorage local) {
|
||||
this(prefs, remote, local, Executors.newSingleThreadExecutor());
|
||||
}
|
||||
|
||||
public ReadingListSynchronizer(final PrefsBranch prefs, final ReadingListClient remote, final ReadingListStorage local, Executor executor) {
|
||||
this.prefs = prefs;
|
||||
this.remote = remote;
|
||||
this.local = local;
|
||||
this.executor = executor;
|
||||
}
|
||||
|
||||
private static final class NewItemUploadDelegate implements ReadingListRecordUploadDelegate {
|
||||
public volatile int failures = 0;
|
||||
private final ReadingListChangeAccumulator acc;
|
||||
private final StageDelegate next;
|
||||
|
||||
NewItemUploadDelegate(ReadingListChangeAccumulator acc, StageDelegate next) {
|
||||
this.acc = acc;
|
||||
this.next = next;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onSuccess(ClientReadingListRecord up,
|
||||
ReadingListRecordResponse response,
|
||||
ServerReadingListRecord down) {
|
||||
// Apply the resulting record. The server will have populated some fields.
|
||||
acc.addChangedRecord(up.givenServerRecord(down));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onConflict(ClientReadingListRecord up, ReadingListResponse response) {
|
||||
ExtendedJSONObject body;
|
||||
try {
|
||||
body = response.jsonObjectBody();
|
||||
String conflicting = body.getString("id");
|
||||
Logger.warn(LOG_TAG, "Conflict detected: remote ID is " + conflicting);
|
||||
|
||||
// TODO: When an operation implies that a server record is a replacement
|
||||
// of what we uploaded, we should ensure that we have a local copy of
|
||||
// that server record!
|
||||
} catch (IllegalStateException | NonObjectJSONException | IOException |
|
||||
ParseException e) {
|
||||
// Oops.
|
||||
// But our workaround is the same either way.
|
||||
}
|
||||
|
||||
// Either the record exists locally, in which case we need to merge,
|
||||
// or it doesn't, and we'll download it shortly.
|
||||
// The simplest thing to do in both cases is to simply delete the local
|
||||
// record we tried to upload. Yes, we might lose some annotations, but
|
||||
// we can leave doing better to a follow-up.
|
||||
// Issues here are so unlikely that we don't do anything sophisticated
|
||||
// (like moving the record to a holding area) -- just delete it ASAP.
|
||||
acc.addDeletion(up);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onInvalidUpload(ClientReadingListRecord up, ReadingListResponse response) {
|
||||
recordFailed(up);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(ClientReadingListRecord up, MozResponse response) {
|
||||
recordFailed(up);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(ClientReadingListRecord up, Exception ex) {
|
||||
recordFailed(up);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onBadRequest(ClientReadingListRecord up, MozResponse response) {
|
||||
recordFailed(up);
|
||||
}
|
||||
|
||||
private void recordFailed(ClientReadingListRecord up) {
|
||||
++failures;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onBatchDone() {
|
||||
// We mark uploaded records as synced when we apply the server record with the
|
||||
// GUID -- we don't know the GUID yet!
|
||||
if (failures == 0) {
|
||||
try {
|
||||
next.next();
|
||||
} catch (Exception e) {
|
||||
next.fail(e);
|
||||
}
|
||||
return;
|
||||
}
|
||||
next.fail();
|
||||
}
|
||||
}
|
||||
|
||||
private static class StatusUploadDelegate implements ReadingListRecordUploadDelegate {
|
||||
private final ReadingListChangeAccumulator acc;
|
||||
|
||||
public volatile int failures = 0;
|
||||
private final StageDelegate next;
|
||||
|
||||
StatusUploadDelegate(ReadingListChangeAccumulator acc, StageDelegate next) {
|
||||
this.acc = acc;
|
||||
this.next = next;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onInvalidUpload(ClientReadingListRecord up,
|
||||
ReadingListResponse response) {
|
||||
recordFailed(up);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onConflict(ClientReadingListRecord up,
|
||||
ReadingListResponse response) {
|
||||
// This should never happen for a status-only change.
|
||||
// TODO: mark this record as requiring a full upload or download.
|
||||
failures++;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onSuccess(ClientReadingListRecord up,
|
||||
ReadingListRecordResponse response,
|
||||
ServerReadingListRecord down) {
|
||||
if (!TextUtils.equals(up.getGUID(), down.getGUID())) {
|
||||
// Uh oh!
|
||||
// This should never occur. We should get an onConflict instead,
|
||||
// so this would imply a server bug, or something like a truncated
|
||||
// over-long GUID string.
|
||||
//
|
||||
// Should we wish to recover from this case, probably the right approach
|
||||
// is to ensure that the GUID is overwritten locally (given that we know
|
||||
// the numeric ID).
|
||||
}
|
||||
|
||||
acc.addChangedRecord(up.givenServerRecord(down));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onBadRequest(ClientReadingListRecord up, MozResponse response) {
|
||||
recordFailed(up);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(ClientReadingListRecord up, Exception ex) {
|
||||
recordFailed(up);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(ClientReadingListRecord up, MozResponse response) {
|
||||
recordFailed(up);
|
||||
}
|
||||
|
||||
private void recordFailed(ClientReadingListRecord up) {
|
||||
++failures;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onBatchDone() {
|
||||
try {
|
||||
acc.finish();
|
||||
} catch (Exception e) {
|
||||
next.fail(e);
|
||||
return;
|
||||
}
|
||||
|
||||
if (failures == 0) {
|
||||
try {
|
||||
next.next();
|
||||
} catch (Exception e) {
|
||||
}
|
||||
}
|
||||
next.fail();
|
||||
}
|
||||
}
|
||||
|
||||
private Queue<ClientReadingListRecord> collectStatusChangesFromCursor(final Cursor cursor) {
|
||||
try {
|
||||
final Queue<ClientReadingListRecord> toUpload = new LinkedList<>();
|
||||
|
||||
// The columns should come in this order, FWIW.
|
||||
final int columnGUID = cursor.getColumnIndexOrThrow(ReadingListItems.GUID);
|
||||
final int columnIsUnread = cursor.getColumnIndexOrThrow(ReadingListItems.IS_UNREAD);
|
||||
final int columnIsFavorite = cursor.getColumnIndexOrThrow(ReadingListItems.IS_FAVORITE);
|
||||
final int columnMarkedReadBy = cursor.getColumnIndexOrThrow(ReadingListItems.MARKED_READ_BY);
|
||||
final int columnMarkedReadOn = cursor.getColumnIndexOrThrow(ReadingListItems.MARKED_READ_ON);
|
||||
final int columnChangeFlags = cursor.getColumnIndexOrThrow(ReadingListItems.SYNC_CHANGE_FLAGS);
|
||||
|
||||
while (cursor.moveToNext()) {
|
||||
final String guid = cursor.getString(columnGUID);
|
||||
if (guid == null) {
|
||||
// Nothing we can do here.
|
||||
continue;
|
||||
}
|
||||
|
||||
final ExtendedJSONObject o = new ExtendedJSONObject();
|
||||
o.put("id", guid);
|
||||
|
||||
final int changeFlags = cursor.getInt(columnChangeFlags);
|
||||
if ((changeFlags & ReadingListItems.SYNC_CHANGE_FAVORITE_CHANGED) > 0) {
|
||||
o.put("favorite", cursor.getInt(columnIsFavorite) == 1);
|
||||
}
|
||||
|
||||
if ((changeFlags & ReadingListItems.SYNC_CHANGE_UNREAD_CHANGED) > 0) {
|
||||
final boolean isUnread = cursor.getInt(columnIsUnread) == 1;
|
||||
o.put("unread", isUnread);
|
||||
if (!isUnread) {
|
||||
o.put("marked_read_by", cursor.getString(columnMarkedReadBy));
|
||||
o.put("marked_read_on", cursor.getLong(columnMarkedReadOn));
|
||||
}
|
||||
}
|
||||
|
||||
final ClientMetadata cm = null;
|
||||
final ServerMetadata sm = new ServerMetadata(guid, -1L);
|
||||
final ClientReadingListRecord record = new ClientReadingListRecord(sm, cm, o);
|
||||
toUpload.add(record);
|
||||
}
|
||||
|
||||
return toUpload;
|
||||
} finally {
|
||||
cursor.close();
|
||||
}
|
||||
}
|
||||
|
||||
private Queue<ClientReadingListRecord> accumulateNewItems(Cursor cursor) {
|
||||
try {
|
||||
final Queue<ClientReadingListRecord> toUpload = new LinkedList<>();
|
||||
final ReadingListClientRecordFactory factory = new ReadingListClientRecordFactory(cursor);
|
||||
|
||||
ClientReadingListRecord record;
|
||||
while ((record = factory.getNext()) != null) {
|
||||
toUpload.add(record);
|
||||
}
|
||||
return toUpload;
|
||||
} finally {
|
||||
cursor.close();
|
||||
}
|
||||
}
|
||||
|
||||
// N.B., status changes for items that haven't been uploaded yet are dealt with in
|
||||
// uploadNewItems.
|
||||
protected void uploadUnreadChanges(final StageDelegate delegate) {
|
||||
try {
|
||||
final Cursor cursor = local.getStatusChanges();
|
||||
|
||||
if (cursor == null) {
|
||||
delegate.fail(new RuntimeException("Unable to get unread item cursor."));
|
||||
return;
|
||||
}
|
||||
|
||||
final Queue<ClientReadingListRecord> toUpload = collectStatusChangesFromCursor(cursor);
|
||||
|
||||
// Nothing to do.
|
||||
if (toUpload.isEmpty()) {
|
||||
delegate.next();
|
||||
return;
|
||||
}
|
||||
|
||||
// Upload each record. This looks like batching, but it's really chained serial requests.
|
||||
final ReadingListChangeAccumulator acc = this.local.getChangeAccumulator();
|
||||
final StatusUploadDelegate uploadDelegate = new StatusUploadDelegate(acc, delegate);
|
||||
|
||||
// Don't send I-U-S; in the case of favorites we're
|
||||
// happy to overwrite the server value, and in the case of unread status
|
||||
// the server will reconcile for us.
|
||||
this.remote.patch(toUpload, executor, uploadDelegate);
|
||||
} catch (Exception e) {
|
||||
delegate.fail(e);
|
||||
}
|
||||
}
|
||||
|
||||
protected void uploadNewItems(final StageDelegate delegate) {
|
||||
try {
|
||||
final Cursor cursor = this.local.getNew();
|
||||
|
||||
if (cursor == null) {
|
||||
delegate.fail(new RuntimeException("Unable to get new item cursor."));
|
||||
return;
|
||||
}
|
||||
|
||||
Queue<ClientReadingListRecord> toUpload = accumulateNewItems(cursor);
|
||||
|
||||
// Nothing to do.
|
||||
if (toUpload.isEmpty()) {
|
||||
Logger.debug(LOG_TAG, "No new items to upload. Skipping.");
|
||||
delegate.next();
|
||||
return;
|
||||
}
|
||||
|
||||
final ReadingListChangeAccumulator acc = this.local.getChangeAccumulator();
|
||||
final NewItemUploadDelegate uploadDelegate = new NewItemUploadDelegate(acc, new StageDelegate() {
|
||||
private boolean tryFlushChanges() {
|
||||
Logger.debug(LOG_TAG, "Flushing post-upload changes.");
|
||||
try {
|
||||
acc.finish();
|
||||
return true;
|
||||
} catch (Exception e) {
|
||||
Logger.warn(LOG_TAG, "Flushing changes failed! This sync went wrong.", e);
|
||||
delegate.fail(e);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void next() {
|
||||
Logger.debug(LOG_TAG, "New items uploaded successfully.");
|
||||
|
||||
if (tryFlushChanges()) {
|
||||
delegate.next();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void fail() {
|
||||
Logger.warn(LOG_TAG, "Couldn't upload new items.");
|
||||
if (tryFlushChanges()) {
|
||||
delegate.fail();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void fail(Exception e) {
|
||||
Logger.warn(LOG_TAG, "Couldn't upload new items.", e);
|
||||
if (tryFlushChanges()) {
|
||||
delegate.fail(e);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// Handle 201 for success, 400 for invalid, 303 for redirect.
|
||||
// TODO: 200 == "was already on the server, we didn't touch it, here it is."
|
||||
// ... we need to apply it locally.
|
||||
this.remote.add(toUpload, executor, uploadDelegate);
|
||||
} catch (Exception e) {
|
||||
delegate.fail(e);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
private void uploadModified(final StageDelegate delegate) {
|
||||
// TODO
|
||||
delegate.next();
|
||||
}
|
||||
|
||||
private void downloadIncoming(final long since, final StageDelegate delegate) {
|
||||
final ReadingListChangeAccumulator postDownload = this.local.getChangeAccumulator();
|
||||
|
||||
final FetchSpec spec = new FetchSpec.Builder().setSince(since).build();
|
||||
|
||||
// TODO: should we flush the accumulator if we get a failure?
|
||||
ReadingListRecordDelegate recordDelegate = new ReadingListRecordDelegate() {
|
||||
@Override
|
||||
public void onRecordReceived(ServerReadingListRecord record) {
|
||||
postDownload.addDownloadedRecord(record);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onRecordMissingOrDeleted(String guid, ReadingListResponse resp) {
|
||||
// Should never occur. Deleted records will be processed by onRecordReceived.
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Exception error) {
|
||||
Logger.error(LOG_TAG, "Download failed. since = " + since + ".", error);
|
||||
delegate.fail(error);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(MozResponse response) {
|
||||
final int statusCode = response.getStatusCode();
|
||||
Logger.error(LOG_TAG, "Download failed. since = " + since + ". Response: " + statusCode);
|
||||
response.logResponseBody(LOG_TAG);
|
||||
delegate.fail();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onComplete(ReadingListResponse response) {
|
||||
long lastModified = response.getLastModified();
|
||||
Logger.info(LOG_TAG, "Server last modified: " + lastModified);
|
||||
try {
|
||||
postDownload.finish();
|
||||
|
||||
// Yay. We do this here so that if writing changes fails, we don't advance.
|
||||
advanceLastModified(lastModified);
|
||||
delegate.next();
|
||||
} catch (Exception e) {
|
||||
delegate.fail(e);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
try {
|
||||
remote.getAll(spec, recordDelegate, since);
|
||||
} catch (URISyntaxException e) {
|
||||
delegate.fail(e);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Upload unread changes, then upload new items, then call `done`.
|
||||
* Substantially modified records are uploaded last.
|
||||
*
|
||||
* @param syncDelegate only used for status callbacks.
|
||||
*/
|
||||
private void syncUp(final ReadingListSynchronizerDelegate syncDelegate, final StageDelegate done) {
|
||||
// Second.
|
||||
final StageDelegate onNewItemsUploaded = new NextDelegate(executor) {
|
||||
@Override
|
||||
public void doNext() {
|
||||
syncDelegate.onNewItemUploadComplete(null, null);
|
||||
done.next();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void doFail(Exception e) {
|
||||
done.fail(e);
|
||||
}
|
||||
};
|
||||
|
||||
// First.
|
||||
final StageDelegate onUnreadChangesUploaded = new NextDelegate(executor) {
|
||||
@Override
|
||||
public void doNext() {
|
||||
syncDelegate.onStatusUploadComplete(null, null);
|
||||
uploadNewItems(onNewItemsUploaded);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void doFail(Exception e) {
|
||||
Logger.warn(LOG_TAG, "Uploading unread changes failed.", e);
|
||||
done.fail(e);
|
||||
}
|
||||
};
|
||||
|
||||
try {
|
||||
uploadUnreadChanges(onUnreadChangesUploaded);
|
||||
} catch (Exception ee) {
|
||||
done.fail(ee);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Do an upload-only sync.
|
||||
* By "upload-only" we mean status-only changes and new items.
|
||||
* To upload modifications, use syncAll.
|
||||
*/
|
||||
/*
|
||||
// Not yet used
|
||||
public void syncUp(final ReadingListSynchronizerDelegate syncDelegate) {
|
||||
final StageDelegate onUploadCompleted = new StageDelegate() {
|
||||
@Override
|
||||
public void next() {
|
||||
// TODO
|
||||
syncDelegate.onNewItemUploadComplete(null, null);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void fail(Exception e) {
|
||||
syncDelegate.onUnableToSync(e);
|
||||
}
|
||||
};
|
||||
|
||||
executor.execute(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
try {
|
||||
syncUp(onUploadCompleted);
|
||||
} catch (Exception e) {
|
||||
syncDelegate.onUnableToSync(e);
|
||||
return;
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
*/
|
||||
|
||||
/**
|
||||
* Do a bidirectional sync.
|
||||
*/
|
||||
public void syncAll(final ReadingListSynchronizerDelegate syncDelegate) {
|
||||
syncAll(getLastModified(), syncDelegate);
|
||||
}
|
||||
|
||||
public void syncAll(final long since, final ReadingListSynchronizerDelegate syncDelegate) {
|
||||
// Fourth: call back to the synchronizer delegate.
|
||||
final StageDelegate onModifiedUploadComplete = new NextDelegate(executor) {
|
||||
@Override
|
||||
public void doNext() {
|
||||
syncDelegate.onModifiedUploadComplete();
|
||||
syncDelegate.onComplete();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void doFail(Exception e) {
|
||||
syncDelegate.onUnableToSync(e);
|
||||
}
|
||||
};
|
||||
|
||||
// Third: upload modified records.
|
||||
final StageDelegate onDownloadCompleted = new NextDelegate(executor) { // TODO: since.
|
||||
@Override
|
||||
public void doNext() {
|
||||
// TODO: save prefs.
|
||||
syncDelegate.onDownloadComplete();
|
||||
uploadModified(onModifiedUploadComplete);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void doFail(Exception e) {
|
||||
Logger.warn(LOG_TAG, "Download failed.", e);
|
||||
syncDelegate.onUnableToSync(e);
|
||||
}
|
||||
};
|
||||
|
||||
// Second: download incoming changes.
|
||||
final StageDelegate onUploadCompleted = new NextDelegate(executor) {
|
||||
@Override
|
||||
public void doNext() {
|
||||
// N.B., we apply the downloaded versions of all uploaded records.
|
||||
// That means the DB server timestamp matches the server's current
|
||||
// timestamp when we do a fetch; we skip records in this way.
|
||||
// We can also optimize by keeping the (guid, server timestamp) pair
|
||||
// in memory, but of course this runs into invalidation issues if
|
||||
// concurrent writes are occurring.
|
||||
downloadIncoming(since, onDownloadCompleted);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void doFail(Exception e) {
|
||||
Logger.warn(LOG_TAG, "Upload failed.", e);
|
||||
syncDelegate.onUnableToSync(e);
|
||||
}
|
||||
};
|
||||
|
||||
// First: upload changes and new items.
|
||||
executor.execute(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
try {
|
||||
syncUp(syncDelegate, onUploadCompleted);
|
||||
} catch (Exception e) {
|
||||
syncDelegate.onUnableToSync(e);
|
||||
return;
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// TODO: ensure that records we identified as conflicts have been downloaded.
|
||||
}
|
||||
|
||||
protected long getLastModified() {
|
||||
return prefs.getLong(PREF_LAST_MODIFIED, -1L);
|
||||
}
|
||||
|
||||
protected void advanceLastModified(final long lastModified) {
|
||||
if (getLastModified() > lastModified) {
|
||||
return;
|
||||
}
|
||||
prefs.edit().putLong(PREF_LAST_MODIFIED, lastModified).apply();
|
||||
}
|
||||
}
|
|
@ -0,0 +1,22 @@
|
|||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
|
||||
|
||||
package org.mozilla.gecko.reading;
|
||||
|
||||
import java.util.Collection;
|
||||
|
||||
public interface ReadingListSynchronizerDelegate {
|
||||
// Called on failure.
|
||||
void onUnableToSync(Exception e);
|
||||
|
||||
// These are called sequentially, or not at all
|
||||
// if a failure occurs.
|
||||
void onStatusUploadComplete(Collection<String> uploaded, Collection<String> failed);
|
||||
void onNewItemUploadComplete(Collection<String> uploaded, Collection<String> failed);
|
||||
void onDownloadComplete();
|
||||
void onModifiedUploadComplete();
|
||||
|
||||
// If no failure occurred, called at the end.
|
||||
void onComplete();
|
||||
}
|
|
@ -0,0 +1,14 @@
|
|||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
|
||||
|
||||
package org.mozilla.gecko.reading;
|
||||
|
||||
import org.mozilla.gecko.sync.net.MozResponse;
|
||||
|
||||
public interface ReadingListWipeDelegate {
|
||||
void onSuccess(ReadingListStorageResponse response);
|
||||
void onPreconditionFailed(MozResponse response);
|
||||
void onFailure(Exception e);
|
||||
void onFailure(MozResponse response);
|
||||
}
|
|
@ -0,0 +1,31 @@
|
|||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
|
||||
|
||||
package org.mozilla.gecko.reading;
|
||||
|
||||
import org.mozilla.gecko.sync.ExtendedJSONObject;
|
||||
|
||||
public class ServerReadingListRecord extends ReadingListRecord {
|
||||
final ExtendedJSONObject fields;
|
||||
|
||||
public ServerReadingListRecord(ExtendedJSONObject obj) {
|
||||
super(new ServerMetadata(obj));
|
||||
this.fields = obj.deepCopy();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getURL() {
|
||||
return this.fields.getString("url"); // TODO: resolved_url
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getTitle() {
|
||||
return this.fields.getString("title"); // TODO: resolved_title
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getAddedBy() {
|
||||
return this.fields.getString("added_by");
|
||||
}
|
||||
}
|
|
@ -178,4 +178,15 @@ public class MozResponse {
|
|||
public int backoffInSeconds() throws NumberFormatException {
|
||||
return this.getIntegerHeader("x-backoff");
|
||||
}
|
||||
|
||||
public void logResponseBody(final String logTag) {
|
||||
if (!Logger.LOG_PERSONAL_INFORMATION) {
|
||||
return;
|
||||
}
|
||||
try {
|
||||
Logger.pii(logTag, "Response body: " + body());
|
||||
} catch (Throwable e) {
|
||||
Logger.debug(logTag, "No response body.");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -526,6 +526,19 @@ var Addons = {
|
|||
let element = this._getElementForAddon(aAddon.id);
|
||||
if (!element) {
|
||||
element = this._createItemForAddon(aAddon);
|
||||
|
||||
// Themes aren't considered active on install, so set existing as disabled, and new one enabled.
|
||||
if (aAddon.type == "theme") {
|
||||
let item = list.firstElementChild;
|
||||
while (item) {
|
||||
if (item.addon && (item.addon.type == "theme")) {
|
||||
item.setAttribute("isDisabled", true);
|
||||
}
|
||||
item = item.nextSibling;
|
||||
}
|
||||
element.setAttribute("isDisabled", false);
|
||||
}
|
||||
|
||||
list.insertBefore(element, list.firstElementChild);
|
||||
}
|
||||
},
|
||||
|
|
|
@ -16,6 +16,9 @@ XPCOMUtils.defineLazyModuleGetter(this, "Notifications", "resource://gre/modules
|
|||
XPCOMUtils.defineLazyModuleGetter(this, "OS", "resource://gre/modules/osfile.jsm");
|
||||
XPCOMUtils.defineLazyModuleGetter(this, "Services", "resource://gre/modules/Services.jsm");
|
||||
|
||||
XPCOMUtils.defineLazyServiceGetter(this, "ParentalControls",
|
||||
"@mozilla.org/parental-controls-service;1", "nsIParentalControlsService");
|
||||
|
||||
let Log = Cu.import("resource://gre/modules/AndroidLog.jsm", {}).AndroidLog.i.bind(null, "DownloadNotifications");
|
||||
|
||||
XPCOMUtils.defineLazyGetter(this, "strings",
|
||||
|
@ -58,6 +61,13 @@ var DownloadNotifications = {
|
|||
return;
|
||||
}
|
||||
|
||||
if (!ParentalControls.isAllowed(ParentalControls.DOWNLOAD)) {
|
||||
download.cancel().catch(Cu.reportError);
|
||||
download.removePartialData().catch(Cu.reportError);
|
||||
window.NativeWindow.toast.show(strings.GetStringFromName("downloads.disabledInGuest"), "long");
|
||||
return;
|
||||
}
|
||||
|
||||
let notification = new DownloadNotification(download);
|
||||
notifications.set(download, notification);
|
||||
notification.showOrUpdate();
|
||||
|
|
|
@ -280,15 +280,12 @@ body {
|
|||
/*======= Controls toolbar =======*/
|
||||
|
||||
.toolbar {
|
||||
font-family: "Clear Sans",sans-serif;
|
||||
transition-property: visibility, opacity;
|
||||
transition-duration: 0.7s;
|
||||
visibility: visible;
|
||||
opacity: 1.0;
|
||||
font-family: sans-serif;
|
||||
transition-property: bottom;
|
||||
transition-duration: 0.3s;
|
||||
position: fixed;
|
||||
width: 100%;
|
||||
bottom: 0px;
|
||||
left: 0px;
|
||||
left: 0;
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
list-style: none;
|
||||
|
@ -296,11 +293,8 @@ body {
|
|||
border-top: 1px solid #D7D9DB;
|
||||
}
|
||||
|
||||
.toolbar-hidden {
|
||||
transition-property: visibility, opacity;
|
||||
transition-duration: 0.7s;
|
||||
visibility: hidden;
|
||||
opacity: 0.0;
|
||||
.toolbar[visible] {
|
||||
bottom: 0;
|
||||
}
|
||||
|
||||
.toolbar > * {
|
||||
|
@ -590,12 +584,20 @@ body {
|
|||
.button {
|
||||
height: 56px;
|
||||
}
|
||||
|
||||
.toolbar {
|
||||
bottom: -57px;
|
||||
}
|
||||
}
|
||||
|
||||
@media screen and (orientation: landscape) {
|
||||
.button {
|
||||
height: 40px;
|
||||
}
|
||||
|
||||
.toolbar {
|
||||
bottom: -41px;
|
||||
}
|
||||
}
|
||||
|
||||
@media screen and (min-width: 960px) {
|
||||
|
|
|
@ -21,7 +21,7 @@ RUN git config --global user.email "mozilla@example.com" && \
|
|||
git config --global user.name "mozilla"
|
||||
|
||||
# VCS Tools
|
||||
RUN npm install -g taskcluster-vcs@2.3.0
|
||||
RUN npm install -g taskcluster-vcs@2.3.1
|
||||
|
||||
# TODO enable worker
|
||||
# TODO volume mount permissions will be an issue
|
||||
|
|
|
@ -1 +1 @@
|
|||
0.5.1
|
||||
0.5.2
|
||||
|
|
|
@ -4,5 +4,5 @@ MAINTAINER Jonas Finnemann Jensen <jopsen@gmail.com>
|
|||
ENV PATH /home/worker/bin/:$PATH
|
||||
|
||||
# Add utilities and configuration
|
||||
RUN npm install -g taskcluster-vcs@0.0.2
|
||||
RUN npm install -g taskcluster-vcs@2.3.1
|
||||
ADD bin /home/worker/bin
|
||||
|
|
|
@ -1 +1 @@
|
|||
0.0.3
|
||||
0.0.4
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
FROM quay.io/mozilla/builder:0.5.0
|
||||
FROM quay.io/mozilla/builder:0.5.2
|
||||
MAINTAINER Wander Lairson Costa <wcosta@mozilla.com>
|
||||
|
||||
# Add utilities and configuration
|
||||
|
|
|
@ -1 +1 @@
|
|||
0.0.9
|
||||
0.0.10
|
||||
|
|
|
@ -16,7 +16,7 @@ RUN chmod u+x /home/worker/bin/buildbot_step
|
|||
RUN pip install virtualenv;
|
||||
RUN mkdir Documents; mkdir Pictures; mkdir Music; mkdir Videos; mkdir artifacts
|
||||
RUN chown -R worker:worker /home/worker/* /home/worker/.*
|
||||
RUN npm install -g taskcluster-vcs@2.2.0
|
||||
RUN npm install -g taskcluster-vcs@2.3.1
|
||||
ENV PATH $PATH:/home/worker/bin
|
||||
|
||||
# TODO Re-enable worker when bug 1093833 lands
|
||||
|
|
|
@ -1 +1 @@
|
|||
0.0.14
|
||||
0.0.15
|
||||
|
|
|
@ -208,7 +208,7 @@ class Graph(object):
|
|||
default=os.environ.get('GECKO_HEAD_REV'),
|
||||
help='Commit revision to use from head repository')
|
||||
@CommandArgument('--mozharness-rev',
|
||||
default='emulator-perf',
|
||||
default='default',
|
||||
help='Commit revision to use from mozharness repository')
|
||||
@CommandArgument('--message',
|
||||
help='Commit message to be parsed. Example: "try: -b do -p all -u all"')
|
||||
|
|
|
@ -45,8 +45,6 @@ let AboutReader = function(mm, win) {
|
|||
this._toolbarElementRef = Cu.getWeakReference(doc.getElementById("reader-toolbar"));
|
||||
this._messageElementRef = Cu.getWeakReference(doc.getElementById("reader-message"));
|
||||
|
||||
this._toolbarEnabled = false;
|
||||
|
||||
this._scrollOffset = win.pageYOffset;
|
||||
|
||||
doc.getElementById("container").addEventListener("click", this, false);
|
||||
|
@ -256,7 +254,8 @@ AboutReader.prototype = {
|
|||
|
||||
// Display the toolbar when all its initial component states are known
|
||||
if (isInitialStateChange) {
|
||||
this._setToolbarVisibility(true);
|
||||
// Hacks! Delay showing the toolbar to avoid position: fixed; jankiness. See bug 975533.
|
||||
this._win.setTimeout(() => this._setToolbarVisibility(true), 500);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -524,24 +523,22 @@ AboutReader.prototype = {
|
|||
},
|
||||
|
||||
_getToolbarVisibility: function Reader_getToolbarVisibility() {
|
||||
return !this._toolbarElement.classList.contains("toolbar-hidden");
|
||||
return this._toolbarElement.hasAttribute("visible");
|
||||
},
|
||||
|
||||
_setToolbarVisibility: function Reader_setToolbarVisibility(visible) {
|
||||
let dropdown = this._doc.getElementById("style-dropdown");
|
||||
dropdown.classList.remove("open");
|
||||
|
||||
if (!this._toolbarEnabled)
|
||||
if (this._getToolbarVisibility() === visible) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Don't allow visible toolbar until banner state is known
|
||||
if (this._isReadingListItem == -1)
|
||||
return;
|
||||
|
||||
if (this._getToolbarVisibility() === visible)
|
||||
return;
|
||||
|
||||
this._toolbarElement.classList.toggle("toolbar-hidden");
|
||||
if (visible) {
|
||||
this._toolbarElement.setAttribute("visible", true);
|
||||
} else {
|
||||
this._toolbarElement.removeAttribute("visible");
|
||||
}
|
||||
this._setSystemUIVisibility(visible);
|
||||
|
||||
if (!visible) {
|
||||
|
@ -711,9 +708,6 @@ AboutReader.prototype = {
|
|||
this._updateImageMargins();
|
||||
this._requestReadingListStatus();
|
||||
|
||||
this._toolbarEnabled = true;
|
||||
this._setToolbarVisibility(true);
|
||||
|
||||
this._requestFavicon();
|
||||
},
|
||||
|
||||
|
|
|
@ -219,7 +219,12 @@ this.ReaderMode = {
|
|||
createInstance(Ci.nsIDOMSerializer);
|
||||
let serializedDoc = yield Promise.resolve(serializer.serializeToString(doc));
|
||||
|
||||
let article = yield ReaderWorker.post("parseDocument", [uriParam, serializedDoc]);
|
||||
let article = null;
|
||||
try {
|
||||
article = yield ReaderWorker.post("parseDocument", [uriParam, serializedDoc]);
|
||||
} catch (e) {
|
||||
Cu.reportError("Error in ReaderWorker: " + e);
|
||||
}
|
||||
|
||||
if (!article) {
|
||||
this.log("Worker did not return an article");
|
||||
|
|
|
@ -26,7 +26,7 @@
|
|||
</div>
|
||||
</div>
|
||||
|
||||
<ul id="reader-toolbar" class="toolbar toolbar-hidden">
|
||||
<ul id="reader-toolbar" class="toolbar">
|
||||
<li><button id="close-button" class="button close-button"/></li>
|
||||
<li><button id="share-button" class="button share-button"/></li>
|
||||
<ul id="style-dropdown" class="dropdown">
|
||||
|
|
|
@ -388,9 +388,8 @@
|
|||
|
||||
<field name="_permanentKey">({})</field>
|
||||
|
||||
<property name="permanentKey"
|
||||
onget="return this._permanentKey;"
|
||||
onset="this._permanentKey = val;"/>
|
||||
<property name="permanentKey" readonly="true"
|
||||
onget="return this._permanentKey;"/>
|
||||
|
||||
<property name="outerWindowID" readonly="true">
|
||||
<getter><![CDATA[
|
||||
|
|
|
@ -15,6 +15,8 @@ loader.lazyRequireGetter(this, "prompt",
|
|||
"devtools/toolkit/security/prompt");
|
||||
loader.lazyRequireGetter(this, "cert",
|
||||
"devtools/toolkit/security/cert");
|
||||
loader.lazyRequireGetter(this, "asyncStorage",
|
||||
"devtools/toolkit/shared/async-storage");
|
||||
DevToolsUtils.defineLazyModuleGetter(this, "Task",
|
||||
"resource://gre/modules/Task.jsm");
|
||||
|
||||
|
@ -341,7 +343,6 @@ OOBCert.Client.prototype = {
|
|||
});
|
||||
break;
|
||||
case AuthenticationResult.ALLOW:
|
||||
case AuthenticationResult.ALLOW_PERSIST:
|
||||
// Step B.12
|
||||
// Client verifies received value matches K
|
||||
if (packet.k != oobData.k) {
|
||||
|
@ -353,6 +354,13 @@ OOBCert.Client.prototype = {
|
|||
transport.hooks = null;
|
||||
deferred.resolve(transport);
|
||||
break;
|
||||
case AuthenticationResult.ALLOW_PERSIST:
|
||||
// Server previously persisted Client as allowed
|
||||
// Step C.5
|
||||
// Debugging begins
|
||||
transport.hooks = null;
|
||||
deferred.resolve(transport);
|
||||
break;
|
||||
default:
|
||||
transport.close(new Error("Invalid auth result: " + authResult));
|
||||
return;
|
||||
|
@ -488,7 +496,18 @@ OOBCert.Server.prototype = {
|
|||
authenticate: Task.async(function*({ client, server, transport }) {
|
||||
// Step B.3 / C.3
|
||||
// TLS connection established, authentication begins
|
||||
// TODO: Bug 1032128: Consult a list of persisted, approved clients
|
||||
const storageKey = `devtools.auth.${this.mode}.approved-clients`;
|
||||
let approvedClients = (yield asyncStorage.getItem(storageKey)) || {};
|
||||
// Step C.4
|
||||
// Server sees that ClientCert is from a known client via hash(ClientCert)
|
||||
if (approvedClients[client.cert.sha256]) {
|
||||
let authResult = AuthenticationResult.ALLOW_PERSIST;
|
||||
transport.send({ authResult });
|
||||
// Step C.5
|
||||
// Debugging begins
|
||||
return authResult;
|
||||
}
|
||||
|
||||
// Step B.4
|
||||
// Server sees that ClientCert is from a unknown client
|
||||
// Tell client they are unknown and should display OOB client UX
|
||||
|
@ -499,19 +518,18 @@ OOBCert.Server.prototype = {
|
|||
// Step B.5
|
||||
// User is shown a Allow / Deny / Always Allow prompt on the Server
|
||||
// with Client name and hash(ClientCert)
|
||||
let result = yield this.allowConnection({
|
||||
let authResult = yield this.allowConnection({
|
||||
authentication: this.mode,
|
||||
client,
|
||||
server
|
||||
});
|
||||
|
||||
switch (result) {
|
||||
switch (authResult) {
|
||||
case AuthenticationResult.ALLOW_PERSIST:
|
||||
// TODO: Bug 1032128: Persist the client
|
||||
case AuthenticationResult.ALLOW:
|
||||
break; // Further processing
|
||||
default:
|
||||
return result; // Abort for any negative results
|
||||
return authResult; // Abort for any negative results
|
||||
}
|
||||
|
||||
// Examine additional data for authentication
|
||||
|
@ -540,14 +558,20 @@ OOBCert.Server.prototype = {
|
|||
|
||||
// Step B.11
|
||||
// Server sends K to Client over TLS connection
|
||||
transport.send({ authResult: result, k });
|
||||
transport.send({ authResult, k });
|
||||
|
||||
// Persist Client if we want to always allow in the future
|
||||
if (authResult === AuthenticationResult.ALLOW_PERSIST) {
|
||||
approvedClients[client.cert.sha256] = true;
|
||||
yield asyncStorage.setItem(storageKey, approvedClients);
|
||||
}
|
||||
|
||||
// Client may decide to abort if K does not match.
|
||||
// Server's portion of authentication is now complete.
|
||||
|
||||
// Step B.13
|
||||
// Debugging begins
|
||||
return result;
|
||||
return authResult;
|
||||
}),
|
||||
|
||||
/**
|
||||
|
|
|
@ -2593,7 +2593,7 @@ SourceActor.prototype = {
|
|||
*/
|
||||
_invertSourceMap: function ({ code, mappings }) {
|
||||
const generator = new SourceMapGenerator({ file: this.url });
|
||||
return DevToolsUtils.yieldingEach(mappings, m => {
|
||||
return DevToolsUtils.yieldingEach(mappings._array, m => {
|
||||
let mapping = {
|
||||
generated: {
|
||||
line: m.generatedLine,
|
||||
|
@ -5719,7 +5719,8 @@ ThreadSources.prototype = {
|
|||
} = map.generatedPositionFor({
|
||||
source: originalSourceActor.url,
|
||||
line: originalLine,
|
||||
column: originalColumn == null ? Infinity : originalColumn
|
||||
column: originalColumn == null ? 0 : originalColumn,
|
||||
bias: SourceMapConsumer.LEAST_UPPER_BOUND
|
||||
});
|
||||
|
||||
return new GeneratedLocation(
|
||||
|
|
|
@ -9,69 +9,69 @@
|
|||
|
||||
const asyncStorage = require("devtools/toolkit/shared/async-storage");
|
||||
add_task(function*() {
|
||||
is(typeof asyncStorage.length, 'function', "API exists.");
|
||||
is(typeof asyncStorage.key, 'function', "API exists.");
|
||||
is(typeof asyncStorage.getItem, 'function', "API exists.");
|
||||
is(typeof asyncStorage.setItem, 'function', "API exists.");
|
||||
is(typeof asyncStorage.removeItem, 'function', "API exists.");
|
||||
is(typeof asyncStorage.clear, 'function', "API exists.");
|
||||
is(typeof asyncStorage.length, "function", "API exists.");
|
||||
is(typeof asyncStorage.key, "function", "API exists.");
|
||||
is(typeof asyncStorage.getItem, "function", "API exists.");
|
||||
is(typeof asyncStorage.setItem, "function", "API exists.");
|
||||
is(typeof asyncStorage.removeItem, "function", "API exists.");
|
||||
is(typeof asyncStorage.clear, "function", "API exists.");
|
||||
});
|
||||
|
||||
add_task(function*() {
|
||||
yield asyncStorage.setItem('foo', 'bar');
|
||||
let value = yield asyncStorage.getItem('foo');
|
||||
is(value, 'bar', 'value is correct');
|
||||
yield asyncStorage.setItem('foo', 'overwritten');
|
||||
value = yield asyncStorage.getItem('foo');
|
||||
is(value, 'overwritten', 'value is correct');
|
||||
yield asyncStorage.removeItem('foo');
|
||||
value = yield asyncStorage.getItem('foo');
|
||||
is(value, null, 'value is correct');
|
||||
yield asyncStorage.setItem("foo", "bar");
|
||||
let value = yield asyncStorage.getItem("foo");
|
||||
is(value, "bar", "value is correct");
|
||||
yield asyncStorage.setItem("foo", "overwritten");
|
||||
value = yield asyncStorage.getItem("foo");
|
||||
is(value, "overwritten", "value is correct");
|
||||
yield asyncStorage.removeItem("foo");
|
||||
value = yield asyncStorage.getItem("foo");
|
||||
is(value, null, "value is correct");
|
||||
});
|
||||
|
||||
add_task(function*() {
|
||||
var object = {
|
||||
x: 1,
|
||||
y: 'foo',
|
||||
y: "foo",
|
||||
z: true
|
||||
};
|
||||
|
||||
yield asyncStorage.setItem('myobj', object);
|
||||
let value = yield asyncStorage.getItem('myobj');
|
||||
is(object.x, value.x, 'value is correct');
|
||||
is(object.y, value.y, 'value is correct');
|
||||
is(object.z, value.z, 'value is correct');
|
||||
yield asyncStorage.removeItem('myobj');
|
||||
value = yield asyncStorage.getItem('myobj');
|
||||
is(value, null, 'value is correct');
|
||||
yield asyncStorage.setItem("myobj", object);
|
||||
let value = yield asyncStorage.getItem("myobj");
|
||||
is(object.x, value.x, "value is correct");
|
||||
is(object.y, value.y, "value is correct");
|
||||
is(object.z, value.z, "value is correct");
|
||||
yield asyncStorage.removeItem("myobj");
|
||||
value = yield asyncStorage.getItem("myobj");
|
||||
is(value, null, "value is correct");
|
||||
});
|
||||
|
||||
add_task(function*() {
|
||||
yield asyncStorage.clear();
|
||||
let len = yield asyncStorage.length();
|
||||
is(len, 0, 'length is correct');
|
||||
yield asyncStorage.setItem('key1', 'value1');
|
||||
is(len, 0, "length is correct");
|
||||
yield asyncStorage.setItem("key1", "value1");
|
||||
len = yield asyncStorage.length();
|
||||
is(len, 1, 'length is correct');
|
||||
yield asyncStorage.setItem('key2', 'value2');
|
||||
is(len, 1, "length is correct");
|
||||
yield asyncStorage.setItem("key2", "value2");
|
||||
len = yield asyncStorage.length();
|
||||
is(len, 2, 'length is correct');
|
||||
yield asyncStorage.setItem('key3', 'value3');
|
||||
is(len, 2, "length is correct");
|
||||
yield asyncStorage.setItem("key3", "value3");
|
||||
len = yield asyncStorage.length();
|
||||
is(len, 3, 'length is correct');
|
||||
is(len, 3, "length is correct");
|
||||
|
||||
let key = yield asyncStorage.key(0);
|
||||
is(key, 'key1', 'key is correct');
|
||||
is(key, "key1", "key is correct");
|
||||
key = yield asyncStorage.key(1);
|
||||
is(key, 'key2', 'key is correct');
|
||||
is(key, "key2", "key is correct");
|
||||
key = yield asyncStorage.key(2);
|
||||
is(key, 'key3', 'key is correct');
|
||||
is(key, "key3", "key is correct");
|
||||
key = yield asyncStorage.key(3);
|
||||
is(key, null, 'key is correct');
|
||||
is(key, null, "key is correct");
|
||||
yield asyncStorage.clear();
|
||||
key = yield asyncStorage.key(0);
|
||||
is(key, null, 'key is correct');
|
||||
is(key, null, "key is correct");
|
||||
|
||||
len = yield asyncStorage.length();
|
||||
is(len, 0, 'length is correct');
|
||||
is(len, 0, "length is correct");
|
||||
});
|
||||
|
|
Разница между файлами не показана из-за своего большого размера
Загрузить разницу
Разница между файлами не показана из-за своего большого размера
Загрузить разницу
|
@ -126,6 +126,113 @@ define('test/source-map/util', ['require', 'exports', 'module' , 'lib/source-ma
|
|||
sourceRoot: '',
|
||||
mappings: 'CAAC,IAAI,IAAM,SAAUA,GAClB,OAAOC,IAAID;CCDb,IAAI,IAAM,SAAUE,GAClB,OAAOA'
|
||||
};
|
||||
// This mapping is identical to above, but uses the indexed format instead.
|
||||
exports.indexedTestMap = {
|
||||
version: 3,
|
||||
file: 'min.js',
|
||||
sections: [
|
||||
{
|
||||
offset: {
|
||||
line: 0,
|
||||
column: 0
|
||||
},
|
||||
map: {
|
||||
version: 3,
|
||||
sources: [
|
||||
"one.js"
|
||||
],
|
||||
sourcesContent: [
|
||||
' ONE.foo = function (bar) {\n' +
|
||||
' return baz(bar);\n' +
|
||||
' };',
|
||||
],
|
||||
names: [
|
||||
"bar",
|
||||
"baz"
|
||||
],
|
||||
mappings: "CAAC,IAAI,IAAM,SAAUA,GAClB,OAAOC,IAAID",
|
||||
file: "min.js",
|
||||
sourceRoot: "/the/root"
|
||||
}
|
||||
},
|
||||
{
|
||||
offset: {
|
||||
line: 1,
|
||||
column: 0
|
||||
},
|
||||
map: {
|
||||
version: 3,
|
||||
sources: [
|
||||
"two.js"
|
||||
],
|
||||
sourcesContent: [
|
||||
' TWO.inc = function (n) {\n' +
|
||||
' return n + 1;\n' +
|
||||
' };'
|
||||
],
|
||||
names: [
|
||||
"n"
|
||||
],
|
||||
mappings: "CAAC,IAAI,IAAM,SAAUA,GAClB,OAAOA",
|
||||
file: "min.js",
|
||||
sourceRoot: "/the/root"
|
||||
}
|
||||
}
|
||||
]
|
||||
};
|
||||
exports.indexedTestMapDifferentSourceRoots = {
|
||||
version: 3,
|
||||
file: 'min.js',
|
||||
sections: [
|
||||
{
|
||||
offset: {
|
||||
line: 0,
|
||||
column: 0
|
||||
},
|
||||
map: {
|
||||
version: 3,
|
||||
sources: [
|
||||
"one.js"
|
||||
],
|
||||
sourcesContent: [
|
||||
' ONE.foo = function (bar) {\n' +
|
||||
' return baz(bar);\n' +
|
||||
' };',
|
||||
],
|
||||
names: [
|
||||
"bar",
|
||||
"baz"
|
||||
],
|
||||
mappings: "CAAC,IAAI,IAAM,SAAUA,GAClB,OAAOC,IAAID",
|
||||
file: "min.js",
|
||||
sourceRoot: "/the/root"
|
||||
}
|
||||
},
|
||||
{
|
||||
offset: {
|
||||
line: 1,
|
||||
column: 0
|
||||
},
|
||||
map: {
|
||||
version: 3,
|
||||
sources: [
|
||||
"two.js"
|
||||
],
|
||||
sourcesContent: [
|
||||
' TWO.inc = function (n) {\n' +
|
||||
' return n + 1;\n' +
|
||||
' };'
|
||||
],
|
||||
names: [
|
||||
"n"
|
||||
],
|
||||
mappings: "CAAC,IAAI,IAAM,SAAUA,GAClB,OAAOA",
|
||||
file: "min.js",
|
||||
sourceRoot: "/different/root"
|
||||
}
|
||||
}
|
||||
]
|
||||
};
|
||||
exports.testMapWithSourcesContent = {
|
||||
version: 3,
|
||||
file: 'min.js',
|
||||
|
@ -168,12 +275,13 @@ define('test/source-map/util', ['require', 'exports', 'module' , 'lib/source-ma
|
|||
|
||||
|
||||
function assertMapping(generatedLine, generatedColumn, originalSource,
|
||||
originalLine, originalColumn, name, map, assert,
|
||||
originalLine, originalColumn, name, bias, map, assert,
|
||||
dontTestGenerated, dontTestOriginal) {
|
||||
if (!dontTestOriginal) {
|
||||
var origMapping = map.originalPositionFor({
|
||||
line: generatedLine,
|
||||
column: generatedColumn
|
||||
column: generatedColumn,
|
||||
bias: bias
|
||||
});
|
||||
assert.equal(origMapping.name, name,
|
||||
'Incorrect name, expected ' + JSON.stringify(name)
|
||||
|
@ -206,7 +314,8 @@ define('test/source-map/util', ['require', 'exports', 'module' , 'lib/source-ma
|
|||
var genMapping = map.generatedPositionFor({
|
||||
source: originalSource,
|
||||
line: originalLine,
|
||||
column: originalColumn
|
||||
column: originalColumn,
|
||||
bias: bias
|
||||
});
|
||||
assert.equal(genMapping.line, generatedLine,
|
||||
'Incorrect line, expected ' + JSON.stringify(generatedLine)
|
||||
|
@ -521,7 +630,7 @@ define('lib/source-map/util', ['require', 'exports', 'module' , ], function(requ
|
|||
return cmp;
|
||||
}
|
||||
|
||||
cmp = strcmp(mappingA.name, mappingB.name);
|
||||
cmp = mappingA.generatedColumn - mappingB.generatedColumn;
|
||||
if (cmp) {
|
||||
return cmp;
|
||||
}
|
||||
|
@ -531,7 +640,7 @@ define('lib/source-map/util', ['require', 'exports', 'module' , ], function(requ
|
|||
return cmp;
|
||||
}
|
||||
|
||||
return mappingA.generatedColumn - mappingB.generatedColumn;
|
||||
return strcmp(mappingA.name, mappingB.name);
|
||||
};
|
||||
exports.compareByOriginalPositions = compareByOriginalPositions;
|
||||
|
||||
|
|
|
@ -19,9 +19,10 @@ define("test/source-map/test-base64-vlq", ["require", "exports", "module"], func
|
|||
exports['test normal encoding and decoding'] = function (assert, util) {
|
||||
var result = {};
|
||||
for (var i = -255; i < 256; i++) {
|
||||
base64VLQ.decode(base64VLQ.encode(i), result);
|
||||
var str = base64VLQ.encode(i);
|
||||
base64VLQ.decode(str, 0, result);
|
||||
assert.equal(result.value, i);
|
||||
assert.equal(result.rest, "");
|
||||
assert.equal(result.rest, str.length);
|
||||
}
|
||||
};
|
||||
|
||||
|
|
|
@ -20,7 +20,7 @@ define("test/source-map/test-binary-search", ["require", "exports", "module"], f
|
|||
return a - b;
|
||||
}
|
||||
|
||||
exports['test too high'] = function (assert, util) {
|
||||
exports['test too high with default (glb) bias'] = function (assert, util) {
|
||||
var needle = 30;
|
||||
var haystack = [2,4,6,8,10,12,14,16,18,20];
|
||||
|
||||
|
@ -31,7 +31,7 @@ define("test/source-map/test-binary-search", ["require", "exports", "module"], f
|
|||
assert.equal(haystack[binarySearch.search(needle, haystack, numberCompare)], 20);
|
||||
};
|
||||
|
||||
exports['test too low'] = function (assert, util) {
|
||||
exports['test too low with default (glb) bias'] = function (assert, util) {
|
||||
var needle = 1;
|
||||
var haystack = [2,4,6,8,10,12,14,16,18,20];
|
||||
|
||||
|
@ -42,6 +42,30 @@ define("test/source-map/test-binary-search", ["require", "exports", "module"], f
|
|||
assert.equal(binarySearch.search(needle, haystack, numberCompare), -1);
|
||||
};
|
||||
|
||||
exports['test too high with lub bias'] = function (assert, util) {
|
||||
var needle = 30;
|
||||
var haystack = [2,4,6,8,10,12,14,16,18,20];
|
||||
|
||||
assert.doesNotThrow(function () {
|
||||
binarySearch.search(needle, haystack, numberCompare);
|
||||
});
|
||||
|
||||
assert.equal(binarySearch.search(needle, haystack, numberCompare,
|
||||
binarySearch.LEAST_UPPER_BOUND), -1);
|
||||
};
|
||||
|
||||
exports['test too low with lub bias'] = function (assert, util) {
|
||||
var needle = 1;
|
||||
var haystack = [2,4,6,8,10,12,14,16,18,20];
|
||||
|
||||
assert.doesNotThrow(function () {
|
||||
binarySearch.search(needle, haystack, numberCompare);
|
||||
});
|
||||
|
||||
assert.equal(haystack[binarySearch.search(needle, haystack, numberCompare,
|
||||
binarySearch.LEAST_UPPER_BOUND)], 2);
|
||||
};
|
||||
|
||||
exports['test exact search'] = function (assert, util) {
|
||||
var needle = 4;
|
||||
var haystack = [2,4,6,8,10,12,14,16,18,20];
|
||||
|
@ -49,13 +73,37 @@ define("test/source-map/test-binary-search", ["require", "exports", "module"], f
|
|||
assert.equal(haystack[binarySearch.search(needle, haystack, numberCompare)], 4);
|
||||
};
|
||||
|
||||
exports['test fuzzy search'] = function (assert, util) {
|
||||
exports['test fuzzy search with default (glb) bias'] = function (assert, util) {
|
||||
var needle = 19;
|
||||
var haystack = [2,4,6,8,10,12,14,16,18,20];
|
||||
|
||||
assert.equal(haystack[binarySearch.search(needle, haystack, numberCompare)], 18);
|
||||
};
|
||||
|
||||
exports['test fuzzy search with lub bias'] = function (assert, util) {
|
||||
var needle = 19;
|
||||
var haystack = [2,4,6,8,10,12,14,16,18,20];
|
||||
|
||||
assert.equal(haystack[binarySearch.search(needle, haystack, numberCompare,
|
||||
binarySearch.LEAST_UPPER_BOUND)], 20);
|
||||
};
|
||||
|
||||
exports['test multiple matches'] = function (assert, util) {
|
||||
var needle = 5;
|
||||
var haystack = [1, 1, 2, 5, 5, 5, 13, 21];
|
||||
|
||||
assert.equal(binarySearch.search(needle, haystack, numberCompare,
|
||||
binarySearch.LEAST_UPPER_BOUND), 3);
|
||||
};
|
||||
|
||||
exports['test multiple matches at the beginning'] = function (assert, util) {
|
||||
var needle = 1;
|
||||
var haystack = [1, 1, 2, 5, 5, 5, 13, 21];
|
||||
|
||||
assert.equal(binarySearch.search(needle, haystack, numberCompare,
|
||||
binarySearch.LEAST_UPPER_BOUND), 0);
|
||||
};
|
||||
|
||||
});
|
||||
function run_test() {
|
||||
runSourceMapTests('test/source-map/test-binary-search', do_throw);
|
||||
|
|
|
@ -56,34 +56,55 @@ define("test/source-map/test-dog-fooding", ["require", "exports", "module"], fun
|
|||
var smc = new SourceMapConsumer(smg.toString());
|
||||
|
||||
// Exact
|
||||
util.assertMapping(2, 2, '/wu/tang/gza.coffee', 1, 0, null, smc, assert);
|
||||
util.assertMapping(3, 2, '/wu/tang/gza.coffee', 2, 0, null, smc, assert);
|
||||
util.assertMapping(4, 2, '/wu/tang/gza.coffee', 3, 0, null, smc, assert);
|
||||
util.assertMapping(5, 2, '/wu/tang/gza.coffee', 4, 0, null, smc, assert);
|
||||
util.assertMapping(6, 12, '/wu/tang/gza.coffee', 5, 10, null, smc, assert);
|
||||
util.assertMapping(2, 2, '/wu/tang/gza.coffee', 1, 0, null, null, smc, assert);
|
||||
util.assertMapping(3, 2, '/wu/tang/gza.coffee', 2, 0, null, null, smc, assert);
|
||||
util.assertMapping(4, 2, '/wu/tang/gza.coffee', 3, 0, null, null, smc, assert);
|
||||
util.assertMapping(5, 2, '/wu/tang/gza.coffee', 4, 0, null, null, smc, assert);
|
||||
util.assertMapping(6, 12, '/wu/tang/gza.coffee', 5, 10, null, null, smc, assert);
|
||||
|
||||
// Fuzzy
|
||||
|
||||
// Generated to original
|
||||
util.assertMapping(2, 0, null, null, null, null, smc, assert, true);
|
||||
util.assertMapping(2, 9, '/wu/tang/gza.coffee', 1, 0, null, smc, assert, true);
|
||||
util.assertMapping(3, 0, null, null, null, null, smc, assert, true);
|
||||
util.assertMapping(3, 9, '/wu/tang/gza.coffee', 2, 0, null, smc, assert, true);
|
||||
util.assertMapping(4, 0, null, null, null, null, smc, assert, true);
|
||||
util.assertMapping(4, 9, '/wu/tang/gza.coffee', 3, 0, null, smc, assert, true);
|
||||
util.assertMapping(5, 0, null, null, null, null, smc, assert, true);
|
||||
util.assertMapping(5, 9, '/wu/tang/gza.coffee', 4, 0, null, smc, assert, true);
|
||||
util.assertMapping(6, 0, null, null, null, null, smc, assert, true);
|
||||
util.assertMapping(6, 9, null, null, null, null, smc, assert, true);
|
||||
util.assertMapping(6, 13, '/wu/tang/gza.coffee', 5, 10, null, smc, assert, true);
|
||||
// Generated to original with default (glb) bias.
|
||||
util.assertMapping(2, 0, null, null, null, null, null, smc, assert, true);
|
||||
util.assertMapping(2, 9, '/wu/tang/gza.coffee', 1, 0, null, null, smc, assert, true);
|
||||
util.assertMapping(3, 0, null, null, null, null, null, smc, assert, true);
|
||||
util.assertMapping(3, 9, '/wu/tang/gza.coffee', 2, 0, null, null, smc, assert, true);
|
||||
util.assertMapping(4, 0, null, null, null, null, null, smc, assert, true);
|
||||
util.assertMapping(4, 9, '/wu/tang/gza.coffee', 3, 0, null, null, smc, assert, true);
|
||||
util.assertMapping(5, 0, null, null, null, null, null, smc, assert, true);
|
||||
util.assertMapping(5, 9, '/wu/tang/gza.coffee', 4, 0, null, null, smc, assert, true);
|
||||
util.assertMapping(6, 0, null, null, null, null, null, smc, assert, true);
|
||||
util.assertMapping(6, 9, null, null, null, null, null, smc, assert, true);
|
||||
util.assertMapping(6, 13, '/wu/tang/gza.coffee', 5, 10, null, null, smc, assert, true);
|
||||
|
||||
// Original to generated
|
||||
util.assertMapping(2, 2, '/wu/tang/gza.coffee', 1, 1, null, smc, assert, null, true);
|
||||
util.assertMapping(3, 2, '/wu/tang/gza.coffee', 2, 3, null, smc, assert, null, true);
|
||||
util.assertMapping(4, 2, '/wu/tang/gza.coffee', 3, 6, null, smc, assert, null, true);
|
||||
util.assertMapping(5, 2, '/wu/tang/gza.coffee', 4, 9, null, smc, assert, null, true);
|
||||
util.assertMapping(5, 2, '/wu/tang/gza.coffee', 5, 9, null, smc, assert, null, true);
|
||||
util.assertMapping(6, 12, '/wu/tang/gza.coffee', 6, 19, null, smc, assert, null, true);
|
||||
// Generated to original with lub bias.
|
||||
util.assertMapping(2, 0, '/wu/tang/gza.coffee', 1, 0, null, SourceMapConsumer.LEAST_UPPER_BOUND, smc, assert, true);
|
||||
util.assertMapping(2, 9, null, null, null, null, SourceMapConsumer.LEAST_UPPER_BOUND, smc, assert, true);
|
||||
util.assertMapping(3, 0, '/wu/tang/gza.coffee', 2, 0, null, SourceMapConsumer.LEAST_UPPER_BOUND, smc, assert, true);
|
||||
util.assertMapping(3, 9, null, null, null, null, SourceMapConsumer.LEAST_UPPER_BOUND, smc, assert, true);
|
||||
util.assertMapping(4, 0, '/wu/tang/gza.coffee', 3, 0, null, SourceMapConsumer.LEAST_UPPER_BOUND, smc, assert, true);
|
||||
util.assertMapping(4, 9, null, null, null, null, SourceMapConsumer.LEAST_UPPER_BOUND, smc, assert, true);
|
||||
util.assertMapping(5, 0, '/wu/tang/gza.coffee', 4, 0, null, SourceMapConsumer.LEAST_UPPER_BOUND, smc, assert, true);
|
||||
util.assertMapping(5, 9, null, null, null, null, SourceMapConsumer.LEAST_UPPER_BOUND, smc, assert, true);
|
||||
util.assertMapping(6, 0, '/wu/tang/gza.coffee', 5, 10, null, SourceMapConsumer.LEAST_UPPER_BOUND, smc, assert, true);
|
||||
util.assertMapping(6, 9, '/wu/tang/gza.coffee', 5, 10, null, SourceMapConsumer.LEAST_UPPER_BOUND, smc, assert, true);
|
||||
util.assertMapping(6, 13, null, null, null, null, SourceMapConsumer.LEAST_UPPER_BOUND, smc, assert, true);
|
||||
|
||||
// Original to generated with default (glb) bias
|
||||
util.assertMapping(2, 2, '/wu/tang/gza.coffee', 1, 1, null, null, smc, assert, null, true);
|
||||
util.assertMapping(3, 2, '/wu/tang/gza.coffee', 2, 3, null, null, smc, assert, null, true);
|
||||
util.assertMapping(4, 2, '/wu/tang/gza.coffee', 3, 6, null, null, smc, assert, null, true);
|
||||
util.assertMapping(5, 2, '/wu/tang/gza.coffee', 4, 9, null, null, smc, assert, null, true);
|
||||
util.assertMapping(5, 2, '/wu/tang/gza.coffee', 5, 9, null, null, smc, assert, null, true);
|
||||
util.assertMapping(6, 12, '/wu/tang/gza.coffee', 6, 19, null, null, smc, assert, null, true);
|
||||
|
||||
// Original to generated with lub bias.
|
||||
util.assertMapping(3, 2, '/wu/tang/gza.coffee', 1, 1, null, SourceMapConsumer.LEAST_UPPER_BOUND, smc, assert, null, true);
|
||||
util.assertMapping(4, 2, '/wu/tang/gza.coffee', 2, 3, null, SourceMapConsumer.LEAST_UPPER_BOUND, smc, assert, null, true);
|
||||
util.assertMapping(5, 2, '/wu/tang/gza.coffee', 3, 6, null, SourceMapConsumer.LEAST_UPPER_BOUND, smc, assert, null, true);
|
||||
util.assertMapping(6, 12, '/wu/tang/gza.coffee', 4, 9, null, SourceMapConsumer.LEAST_UPPER_BOUND, smc, assert, null, true);
|
||||
util.assertMapping(6, 12, '/wu/tang/gza.coffee', 5, 9, null, SourceMapConsumer.LEAST_UPPER_BOUND, smc, assert, null, true);
|
||||
util.assertMapping(null, null, '/wu/tang/gza.coffee', 6, 19, null, SourceMapConsumer.LEAST_UPPER_BOUND, smc, assert, null, true);
|
||||
};
|
||||
|
||||
});
|
||||
|
|
|
@ -15,6 +15,8 @@ Components.utils.import('resource://test/Utils.jsm');
|
|||
define("test/source-map/test-source-map-consumer", ["require", "exports", "module"], function (require, exports, module) {
|
||||
|
||||
var SourceMapConsumer = require('source-map/source-map-consumer').SourceMapConsumer;
|
||||
var IndexedSourceMapConsumer = require('source-map/source-map-consumer').IndexedSourceMapConsumer;
|
||||
var BasicSourceMapConsumer = require('source-map/source-map-consumer').BasicSourceMapConsumer;
|
||||
var SourceMapGenerator = require('source-map/source-map-generator').SourceMapGenerator;
|
||||
|
||||
exports['test that we can instantiate with a string or an object'] = function (assert, util) {
|
||||
|
@ -26,6 +28,18 @@ define("test/source-map/test-source-map-consumer", ["require", "exports", "modul
|
|||
});
|
||||
};
|
||||
|
||||
exports['test that the object returned from new SourceMapConsumer inherits from SourceMapConsumer'] = function (assert, util) {
|
||||
assert.ok(new SourceMapConsumer(util.testMap) instanceof SourceMapConsumer);
|
||||
}
|
||||
|
||||
exports['test that a BasicSourceMapConsumer is returned for sourcemaps without sections'] = function(assert, util) {
|
||||
assert.ok(new SourceMapConsumer(util.testMap) instanceof BasicSourceMapConsumer);
|
||||
};
|
||||
|
||||
exports['test that an IndexedSourceMapConsumer is returned for sourcemaps with sections'] = function(assert, util) {
|
||||
assert.ok(new SourceMapConsumer(util.indexedTestMap) instanceof IndexedSourceMapConsumer);
|
||||
};
|
||||
|
||||
exports['test that the `sources` field has the original sources'] = function (assert, util) {
|
||||
var map;
|
||||
var sources;
|
||||
|
@ -36,6 +50,18 @@ define("test/source-map/test-source-map-consumer", ["require", "exports", "modul
|
|||
assert.equal(sources[1], '/the/root/two.js');
|
||||
assert.equal(sources.length, 2);
|
||||
|
||||
map = new SourceMapConsumer(util.indexedTestMap);
|
||||
sources = map.sources;
|
||||
assert.equal(sources[0], '/the/root/one.js');
|
||||
assert.equal(sources[1], '/the/root/two.js');
|
||||
assert.equal(sources.length, 2);
|
||||
|
||||
map = new SourceMapConsumer(util.indexedTestMapDifferentSourceRoots);
|
||||
sources = map.sources;
|
||||
assert.equal(sources[0], '/the/root/one.js');
|
||||
assert.equal(sources[1], '/different/root/two.js');
|
||||
assert.equal(sources.length, 2);
|
||||
|
||||
map = new SourceMapConsumer(util.testMapNoSourceRoot);
|
||||
sources = map.sources;
|
||||
assert.equal(sources[0], 'one.js');
|
||||
|
@ -101,34 +127,107 @@ define("test/source-map/test-source-map-consumer", ["require", "exports", "modul
|
|||
exports['test mapping tokens back exactly'] = function (assert, util) {
|
||||
var map = new SourceMapConsumer(util.testMap);
|
||||
|
||||
util.assertMapping(1, 1, '/the/root/one.js', 1, 1, null, map, assert);
|
||||
util.assertMapping(1, 5, '/the/root/one.js', 1, 5, null, map, assert);
|
||||
util.assertMapping(1, 9, '/the/root/one.js', 1, 11, null, map, assert);
|
||||
util.assertMapping(1, 18, '/the/root/one.js', 1, 21, 'bar', map, assert);
|
||||
util.assertMapping(1, 21, '/the/root/one.js', 2, 3, null, map, assert);
|
||||
util.assertMapping(1, 28, '/the/root/one.js', 2, 10, 'baz', map, assert);
|
||||
util.assertMapping(1, 32, '/the/root/one.js', 2, 14, 'bar', map, assert);
|
||||
util.assertMapping(1, 1, '/the/root/one.js', 1, 1, null, null, map, assert);
|
||||
util.assertMapping(1, 5, '/the/root/one.js', 1, 5, null, null, map, assert);
|
||||
util.assertMapping(1, 9, '/the/root/one.js', 1, 11, null, null, map, assert);
|
||||
util.assertMapping(1, 18, '/the/root/one.js', 1, 21, 'bar', null, map, assert);
|
||||
util.assertMapping(1, 21, '/the/root/one.js', 2, 3, null, null, map, assert);
|
||||
util.assertMapping(1, 28, '/the/root/one.js', 2, 10, 'baz', null, map, assert);
|
||||
util.assertMapping(1, 32, '/the/root/one.js', 2, 14, 'bar', null, map, assert);
|
||||
|
||||
util.assertMapping(2, 1, '/the/root/two.js', 1, 1, null, map, assert);
|
||||
util.assertMapping(2, 5, '/the/root/two.js', 1, 5, null, map, assert);
|
||||
util.assertMapping(2, 9, '/the/root/two.js', 1, 11, null, map, assert);
|
||||
util.assertMapping(2, 18, '/the/root/two.js', 1, 21, 'n', map, assert);
|
||||
util.assertMapping(2, 21, '/the/root/two.js', 2, 3, null, map, assert);
|
||||
util.assertMapping(2, 28, '/the/root/two.js', 2, 10, 'n', map, assert);
|
||||
util.assertMapping(2, 1, '/the/root/two.js', 1, 1, null, null, map, assert);
|
||||
util.assertMapping(2, 5, '/the/root/two.js', 1, 5, null, null, map, assert);
|
||||
util.assertMapping(2, 9, '/the/root/two.js', 1, 11, null, null, map, assert);
|
||||
util.assertMapping(2, 18, '/the/root/two.js', 1, 21, 'n', null, map, assert);
|
||||
util.assertMapping(2, 21, '/the/root/two.js', 2, 3, null, null, map, assert);
|
||||
util.assertMapping(2, 28, '/the/root/two.js', 2, 10, 'n', null, map, assert);
|
||||
};
|
||||
|
||||
exports['test mapping tokens back exactly in indexed source map'] = function (assert, util) {
|
||||
var map = new SourceMapConsumer(util.indexedTestMap);
|
||||
|
||||
util.assertMapping(1, 1, '/the/root/one.js', 1, 1, null, null, map, assert);
|
||||
util.assertMapping(1, 5, '/the/root/one.js', 1, 5, null, null, map, assert);
|
||||
util.assertMapping(1, 9, '/the/root/one.js', 1, 11, null, null, map, assert);
|
||||
util.assertMapping(1, 18, '/the/root/one.js', 1, 21, 'bar', null, map, assert);
|
||||
util.assertMapping(1, 21, '/the/root/one.js', 2, 3, null, null, map, assert);
|
||||
util.assertMapping(1, 28, '/the/root/one.js', 2, 10, 'baz', null, map, assert);
|
||||
util.assertMapping(1, 32, '/the/root/one.js', 2, 14, 'bar', null, map, assert);
|
||||
|
||||
util.assertMapping(2, 1, '/the/root/two.js', 1, 1, null, null, map, assert);
|
||||
util.assertMapping(2, 5, '/the/root/two.js', 1, 5, null, null, map, assert);
|
||||
util.assertMapping(2, 9, '/the/root/two.js', 1, 11, null, null, map, assert);
|
||||
util.assertMapping(2, 18, '/the/root/two.js', 1, 21, 'n', null, map, assert);
|
||||
util.assertMapping(2, 21, '/the/root/two.js', 2, 3, null, null, map, assert);
|
||||
util.assertMapping(2, 28, '/the/root/two.js', 2, 10, 'n', null, map, assert);
|
||||
};
|
||||
|
||||
|
||||
exports['test mapping tokens back exactly'] = function (assert, util) {
|
||||
var map = new SourceMapConsumer(util.testMap);
|
||||
|
||||
util.assertMapping(1, 1, '/the/root/one.js', 1, 1, null, null, map, assert);
|
||||
util.assertMapping(1, 5, '/the/root/one.js', 1, 5, null, null, map, assert);
|
||||
util.assertMapping(1, 9, '/the/root/one.js', 1, 11, null, null, map, assert);
|
||||
util.assertMapping(1, 18, '/the/root/one.js', 1, 21, 'bar', null, map, assert);
|
||||
util.assertMapping(1, 21, '/the/root/one.js', 2, 3, null, null, map, assert);
|
||||
util.assertMapping(1, 28, '/the/root/one.js', 2, 10, 'baz', null, map, assert);
|
||||
util.assertMapping(1, 32, '/the/root/one.js', 2, 14, 'bar', null, map, assert);
|
||||
|
||||
util.assertMapping(2, 1, '/the/root/two.js', 1, 1, null, null, map, assert);
|
||||
util.assertMapping(2, 5, '/the/root/two.js', 1, 5, null, null, map, assert);
|
||||
util.assertMapping(2, 9, '/the/root/two.js', 1, 11, null, null, map, assert);
|
||||
util.assertMapping(2, 18, '/the/root/two.js', 1, 21, 'n', null, map, assert);
|
||||
util.assertMapping(2, 21, '/the/root/two.js', 2, 3, null, null, map, assert);
|
||||
util.assertMapping(2, 28, '/the/root/two.js', 2, 10, 'n', null, map, assert);
|
||||
};
|
||||
|
||||
exports['test mapping tokens fuzzy'] = function (assert, util) {
|
||||
var map = new SourceMapConsumer(util.testMap);
|
||||
|
||||
// Finding original positions
|
||||
util.assertMapping(1, 20, '/the/root/one.js', 1, 21, 'bar', map, assert, true);
|
||||
util.assertMapping(1, 30, '/the/root/one.js', 2, 10, 'baz', map, assert, true);
|
||||
util.assertMapping(2, 12, '/the/root/two.js', 1, 11, null, map, assert, true);
|
||||
// Finding original positions with default (glb) bias.
|
||||
util.assertMapping(1, 20, '/the/root/one.js', 1, 21, 'bar', null, map, assert, true);
|
||||
util.assertMapping(1, 30, '/the/root/one.js', 2, 10, 'baz', null, map, assert, true);
|
||||
util.assertMapping(2, 12, '/the/root/two.js', 1, 11, null, null, map, assert, true);
|
||||
|
||||
// Finding generated positions
|
||||
util.assertMapping(1, 18, '/the/root/one.js', 1, 22, 'bar', map, assert, null, true);
|
||||
util.assertMapping(1, 28, '/the/root/one.js', 2, 13, 'baz', map, assert, null, true);
|
||||
util.assertMapping(2, 9, '/the/root/two.js', 1, 16, null, map, assert, null, true);
|
||||
// Finding original positions with lub bias.
|
||||
util.assertMapping(1, 16, '/the/root/one.js', 1, 21, 'bar', SourceMapConsumer.LEAST_UPPER_BOUND, map, assert, true);
|
||||
util.assertMapping(1, 26, '/the/root/one.js', 2, 10, 'baz', SourceMapConsumer.LEAST_UPPER_BOUND, map, assert, true);
|
||||
util.assertMapping(2, 6, '/the/root/two.js', 1, 11, null, SourceMapConsumer.LEAST_UPPER_BOUND, map, assert, true);
|
||||
|
||||
// Finding generated positions with default (glb) bias.
|
||||
util.assertMapping(1, 18, '/the/root/one.js', 1, 22, 'bar', null, map, assert, null, true);
|
||||
util.assertMapping(1, 28, '/the/root/one.js', 2, 13, 'baz', null, map, assert, null, true);
|
||||
util.assertMapping(2, 9, '/the/root/two.js', 1, 16, null, null, map, assert, null, true);
|
||||
|
||||
// Finding generated positions with lub bias.
|
||||
util.assertMapping(1, 18, '/the/root/one.js', 1, 20, 'bar', SourceMapConsumer.LEAST_UPPER_BOUND, map, assert, null, true);
|
||||
util.assertMapping(1, 28, '/the/root/one.js', 2, 7, 'baz', SourceMapConsumer.LEAST_UPPER_BOUND, map, assert, null, true);
|
||||
util.assertMapping(2, 9, '/the/root/two.js', 1, 6, null, SourceMapConsumer.LEAST_UPPER_BOUND, map, assert, null, true);
|
||||
};
|
||||
|
||||
exports['test mapping tokens fuzzy in indexed source map'] = function (assert, util) {
|
||||
var map = new SourceMapConsumer(util.indexedTestMap);
|
||||
|
||||
// Finding original positions with default (glb) bias.
|
||||
util.assertMapping(1, 20, '/the/root/one.js', 1, 21, 'bar', null, map, assert, true);
|
||||
util.assertMapping(1, 30, '/the/root/one.js', 2, 10, 'baz', null, map, assert, true);
|
||||
util.assertMapping(2, 12, '/the/root/two.js', 1, 11, null, null, map, assert, true);
|
||||
|
||||
// Finding original positions with lub bias.
|
||||
util.assertMapping(1, 16, '/the/root/one.js', 1, 21, 'bar', SourceMapConsumer.LEAST_UPPER_BOUND, map, assert, true);
|
||||
util.assertMapping(1, 26, '/the/root/one.js', 2, 10, 'baz', SourceMapConsumer.LEAST_UPPER_BOUND, map, assert, true);
|
||||
util.assertMapping(2, 6, '/the/root/two.js', 1, 11, null, SourceMapConsumer.LEAST_UPPER_BOUND, map, assert, true);
|
||||
|
||||
// Finding generated positions with default (glb) bias.
|
||||
util.assertMapping(1, 18, '/the/root/one.js', 1, 22, 'bar', null, map, assert, null, true);
|
||||
util.assertMapping(1, 28, '/the/root/one.js', 2, 13, 'baz', null, map, assert, null, true);
|
||||
util.assertMapping(2, 9, '/the/root/two.js', 1, 16, null, null, map, assert, null, true);
|
||||
|
||||
// Finding generated positions with lub bias.
|
||||
util.assertMapping(1, 18, '/the/root/one.js', 1, 20, 'bar', SourceMapConsumer.LEAST_UPPER_BOUND, map, assert, null, true);
|
||||
util.assertMapping(1, 28, '/the/root/one.js', 2, 7, 'baz', SourceMapConsumer.LEAST_UPPER_BOUND, map, assert, null, true);
|
||||
util.assertMapping(2, 9, '/the/root/two.js', 1, 6, null, SourceMapConsumer.LEAST_UPPER_BOUND, map, assert, null, true);
|
||||
};
|
||||
|
||||
exports['test mappings and end of lines'] = function (assert, util) {
|
||||
|
@ -145,14 +244,22 @@ define("test/source-map/test-source-map-consumer", ["require", "exports", "modul
|
|||
generated: { line: 2, column: 2 },
|
||||
source: 'bar.js'
|
||||
});
|
||||
smg.addMapping({
|
||||
original: { line: 1, column: 1 },
|
||||
generated: { line: 1, column: 1 },
|
||||
source: 'baz.js'
|
||||
});
|
||||
|
||||
var map = SourceMapConsumer.fromSourceMap(smg);
|
||||
|
||||
// When finding original positions, mappings end at the end of the line.
|
||||
util.assertMapping(2, 1, null, null, null, null, map, assert, true)
|
||||
util.assertMapping(2, 1, null, null, null, null, null, map, assert, true)
|
||||
|
||||
// When finding generated positions, mappings do not end at the end of the line.
|
||||
util.assertMapping(1, 1, 'bar.js', 2, 1, null, map, assert, null, true);
|
||||
util.assertMapping(1, 1, 'bar.js', 2, 1, null, null, map, assert, null, true);
|
||||
|
||||
// When finding generated positions with, mappings end at the end of the source.
|
||||
util.assertMapping(null, null, 'bar.js', 3, 1, null, SourceMapConsumer.LEAST_UPPER_BOUND, map, assert, null, true);
|
||||
};
|
||||
|
||||
exports['test creating source map consumers with )]}\' prefix'] = function (assert, util) {
|
||||
|
@ -193,6 +300,29 @@ define("test/source-map/test-source-map-consumer", ["require", "exports", "modul
|
|||
});
|
||||
};
|
||||
|
||||
exports['test eachMapping for indexed source maps'] = function(assert, util) {
|
||||
var map = new SourceMapConsumer(util.indexedTestMap);
|
||||
var previousLine = -Infinity;
|
||||
var previousColumn = -Infinity;
|
||||
map.eachMapping(function (mapping) {
|
||||
assert.ok(mapping.generatedLine >= previousLine);
|
||||
|
||||
if (mapping.source) {
|
||||
assert.equal(mapping.source.indexOf(util.testMap.sourceRoot), 0);
|
||||
}
|
||||
|
||||
if (mapping.generatedLine === previousLine) {
|
||||
assert.ok(mapping.generatedColumn >= previousColumn);
|
||||
previousColumn = mapping.generatedColumn;
|
||||
}
|
||||
else {
|
||||
previousLine = mapping.generatedLine;
|
||||
previousColumn = -Infinity;
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
|
||||
exports['test iterating over mappings in a different order'] = function (assert, util) {
|
||||
var map = new SourceMapConsumer(util.testMap);
|
||||
var previousLine = -Infinity;
|
||||
|
@ -221,6 +351,34 @@ define("test/source-map/test-source-map-consumer", ["require", "exports", "modul
|
|||
}, null, SourceMapConsumer.ORIGINAL_ORDER);
|
||||
};
|
||||
|
||||
exports['test iterating over mappings in a different order in indexed source maps'] = function (assert, util) {
|
||||
var map = new SourceMapConsumer(util.indexedTestMap);
|
||||
var previousLine = -Infinity;
|
||||
var previousColumn = -Infinity;
|
||||
var previousSource = "";
|
||||
map.eachMapping(function (mapping) {
|
||||
assert.ok(mapping.source >= previousSource);
|
||||
|
||||
if (mapping.source === previousSource) {
|
||||
assert.ok(mapping.originalLine >= previousLine);
|
||||
|
||||
if (mapping.originalLine === previousLine) {
|
||||
assert.ok(mapping.originalColumn >= previousColumn);
|
||||
previousColumn = mapping.originalColumn;
|
||||
}
|
||||
else {
|
||||
previousLine = mapping.originalLine;
|
||||
previousColumn = -Infinity;
|
||||
}
|
||||
}
|
||||
else {
|
||||
previousSource = mapping.source;
|
||||
previousLine = -Infinity;
|
||||
previousColumn = -Infinity;
|
||||
}
|
||||
}, null, SourceMapConsumer.ORIGINAL_ORDER);
|
||||
};
|
||||
|
||||
exports['test that we can set the context for `this` in eachMapping'] = function (assert, util) {
|
||||
var map = new SourceMapConsumer(util.testMap);
|
||||
var context = {};
|
||||
|
@ -229,6 +387,14 @@ define("test/source-map/test-source-map-consumer", ["require", "exports", "modul
|
|||
}, context);
|
||||
};
|
||||
|
||||
exports['test that we can set the context for `this` in eachMapping in indexed source maps'] = function (assert, util) {
|
||||
var map = new SourceMapConsumer(util.indexedTestMap);
|
||||
var context = {};
|
||||
map.eachMapping(function () {
|
||||
assert.equal(this, context);
|
||||
}, context);
|
||||
};
|
||||
|
||||
exports['test that the `sourcesContent` field has the original sources'] = function (assert, util) {
|
||||
var map = new SourceMapConsumer(util.testMapWithSourcesContent);
|
||||
var sourcesContent = map.sourcesContent;
|
||||
|
@ -276,6 +442,26 @@ define("test/source-map/test-source-map-consumer", ["require", "exports", "modul
|
|||
}, Error);
|
||||
};
|
||||
|
||||
exports['test that we can get the original source content for the sources on an indexed source map'] = function (assert, util) {
|
||||
var map = new SourceMapConsumer(util.indexedTestMap);
|
||||
var sources = map.sources;
|
||||
|
||||
assert.equal(map.sourceContentFor(sources[0]), ' ONE.foo = function (bar) {\n return baz(bar);\n };');
|
||||
assert.equal(map.sourceContentFor(sources[1]), ' TWO.inc = function (n) {\n return n + 1;\n };');
|
||||
assert.equal(map.sourceContentFor("one.js"), ' ONE.foo = function (bar) {\n return baz(bar);\n };');
|
||||
assert.equal(map.sourceContentFor("two.js"), ' TWO.inc = function (n) {\n return n + 1;\n };');
|
||||
assert.throws(function () {
|
||||
map.sourceContentFor("");
|
||||
}, Error);
|
||||
assert.throws(function () {
|
||||
map.sourceContentFor("/the/root/three.js");
|
||||
}, Error);
|
||||
assert.throws(function () {
|
||||
map.sourceContentFor("three.js");
|
||||
}, Error);
|
||||
};
|
||||
|
||||
|
||||
exports['test sourceRoot + generatedPositionFor'] = function (assert, util) {
|
||||
var map = new SourceMapGenerator({
|
||||
sourceRoot: 'foo/bar',
|
||||
|
@ -314,7 +500,7 @@ define("test/source-map/test-source-map-consumer", ["require", "exports", "modul
|
|||
assert.equal(pos.column, 2);
|
||||
};
|
||||
|
||||
exports['test allGeneratedPositionsFor'] = function (assert, util) {
|
||||
exports['test allGeneratedPositionsFor for line'] = function (assert, util) {
|
||||
var map = new SourceMapGenerator({
|
||||
file: 'generated.js'
|
||||
});
|
||||
|
@ -357,7 +543,7 @@ define("test/source-map/test-source-map-consumer", ["require", "exports", "modul
|
|||
assert.equal(mappings[1].column, 3);
|
||||
};
|
||||
|
||||
exports['test allGeneratedPositionsFor for line with no mappings'] = function (assert, util) {
|
||||
exports['test allGeneratedPositionsFor for line fuzzy'] = function (assert, util) {
|
||||
var map = new SourceMapGenerator({
|
||||
file: 'generated.js'
|
||||
});
|
||||
|
@ -383,10 +569,12 @@ define("test/source-map/test-source-map-consumer", ["require", "exports", "modul
|
|||
source: 'bar.coffee'
|
||||
});
|
||||
|
||||
assert.equal(mappings.length, 0);
|
||||
assert.equal(mappings.length, 1);
|
||||
assert.equal(mappings[0].line, 4);
|
||||
assert.equal(mappings[0].column, 2);
|
||||
};
|
||||
|
||||
exports['test allGeneratedPositionsFor source map with no mappings'] = function (assert, util) {
|
||||
exports['test allGeneratedPositionsFor for empty source map'] = function (assert, util) {
|
||||
var map = new SourceMapGenerator({
|
||||
file: 'generated.js'
|
||||
});
|
||||
|
@ -400,6 +588,64 @@ define("test/source-map/test-source-map-consumer", ["require", "exports", "modul
|
|||
assert.equal(mappings.length, 0);
|
||||
};
|
||||
|
||||
exports['test allGeneratedPositionsFor for column'] = function (assert, util) {
|
||||
var map = new SourceMapGenerator({
|
||||
file: 'generated.js'
|
||||
});
|
||||
map.addMapping({
|
||||
original: { line: 1, column: 1 },
|
||||
generated: { line: 1, column: 2 },
|
||||
source: 'foo.coffee'
|
||||
});
|
||||
map.addMapping({
|
||||
original: { line: 1, column: 1 },
|
||||
generated: { line: 1, column: 3 },
|
||||
source: 'foo.coffee'
|
||||
});
|
||||
map = new SourceMapConsumer(map.toString());
|
||||
|
||||
var mappings = map.allGeneratedPositionsFor({
|
||||
line: 1,
|
||||
column: 1,
|
||||
source: 'foo.coffee'
|
||||
});
|
||||
|
||||
assert.equal(mappings.length, 2);
|
||||
assert.equal(mappings[0].line, 1);
|
||||
assert.equal(mappings[0].column, 2);
|
||||
assert.equal(mappings[1].line, 1);
|
||||
assert.equal(mappings[1].column, 3);
|
||||
};
|
||||
|
||||
exports['test allGeneratedPositionsFor for column fuzzy'] = function (assert, util) {
|
||||
var map = new SourceMapGenerator({
|
||||
file: 'generated.js'
|
||||
});
|
||||
map.addMapping({
|
||||
original: { line: 1, column: 1 },
|
||||
generated: { line: 1, column: 2 },
|
||||
source: 'foo.coffee'
|
||||
});
|
||||
map.addMapping({
|
||||
original: { line: 1, column: 1 },
|
||||
generated: { line: 1, column: 3 },
|
||||
source: 'foo.coffee'
|
||||
});
|
||||
map = new SourceMapConsumer(map.toString());
|
||||
|
||||
var mappings = map.allGeneratedPositionsFor({
|
||||
line: 1,
|
||||
column: 0,
|
||||
source: 'foo.coffee'
|
||||
});
|
||||
|
||||
assert.equal(mappings.length, 2);
|
||||
assert.equal(mappings[0].line, 1);
|
||||
assert.equal(mappings[0].column, 2);
|
||||
assert.equal(mappings[1].line, 1);
|
||||
assert.equal(mappings[1].column, 3);
|
||||
};
|
||||
|
||||
exports['test computeColumnSpans'] = function (assert, util) {
|
||||
var map = new SourceMapGenerator({
|
||||
file: 'generated.js'
|
||||
|
@ -544,6 +790,20 @@ define("test/source-map/test-source-map-consumer", ["require", "exports", "modul
|
|||
'Source should be relative the host of the source root.');
|
||||
};
|
||||
|
||||
exports['test indexed source map errors when sections are out of order by line'] = function(assert, util) {
|
||||
// Make a deep copy of the indexedTestMap
|
||||
var misorderedIndexedTestMap = JSON.parse(JSON.stringify(util.indexedTestMap));
|
||||
|
||||
misorderedIndexedTestMap.sections[0].offset = {
|
||||
line: 2,
|
||||
column: 0
|
||||
};
|
||||
|
||||
assert.throws(function() {
|
||||
new SourceMapConsumer(misorderedIndexedTestMap);
|
||||
}, Error);
|
||||
};
|
||||
|
||||
exports['test github issue #64'] = function (assert, util) {
|
||||
var map = new SourceMapConsumer({
|
||||
"version": 3,
|
||||
|
|
|
@ -103,6 +103,27 @@ define("test/source-map/test-source-map-generator", ["require", "exports", "modu
|
|||
});
|
||||
};
|
||||
|
||||
exports['test adding mappings with skipValidation'] = function (assert, util) {
|
||||
var map = new SourceMapGenerator({
|
||||
file: 'generated-foo.js',
|
||||
sourceRoot: '.',
|
||||
skipValidation: true
|
||||
});
|
||||
|
||||
// Not enough info, caught by `util.getArgs`
|
||||
assert.throws(function () {
|
||||
map.addMapping({});
|
||||
});
|
||||
|
||||
// Original file position, but no source. Not checked.
|
||||
assert.doesNotThrow(function () {
|
||||
map.addMapping({
|
||||
generated: { line: 1, column: 1 },
|
||||
original: { line: 1, column: 1 }
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
exports['test that the correct mappings are being generated'] = function (assert, util) {
|
||||
var map = new SourceMapGenerator({
|
||||
file: 'min.js',
|
||||
|
@ -660,6 +681,48 @@ define("test/source-map/test-source-map-generator", ["require", "exports", "modu
|
|||
});
|
||||
};
|
||||
|
||||
exports['test applySourceMap with unexact match'] = function (assert, util) {
|
||||
var map1 = new SourceMapGenerator({
|
||||
file: 'bundled-source'
|
||||
});
|
||||
map1.addMapping({
|
||||
generated: { line: 1, column: 4 },
|
||||
original: { line: 1, column: 4 },
|
||||
source: 'transformed-source'
|
||||
});
|
||||
map1.addMapping({
|
||||
generated: { line: 2, column: 4 },
|
||||
original: { line: 2, column: 4 },
|
||||
source: 'transformed-source'
|
||||
});
|
||||
|
||||
var map2 = new SourceMapGenerator({
|
||||
file: 'transformed-source'
|
||||
});
|
||||
map2.addMapping({
|
||||
generated: { line: 2, column: 0 },
|
||||
original: { line: 1, column: 0 },
|
||||
source: 'original-source'
|
||||
});
|
||||
|
||||
var expectedMap = new SourceMapGenerator({
|
||||
file: 'bundled-source'
|
||||
});
|
||||
expectedMap.addMapping({
|
||||
generated: { line: 1, column: 4 },
|
||||
original: { line: 1, column: 4 },
|
||||
source: 'transformed-source'
|
||||
});
|
||||
expectedMap.addMapping({
|
||||
generated: { line: 2, column: 4 },
|
||||
original: { line: 1, column: 0 },
|
||||
source: 'original-source'
|
||||
});
|
||||
|
||||
map1.applySourceMap(new SourceMapConsumer(map2.toJSON()));
|
||||
|
||||
util.assertEqualMaps(assert, map1.toJSON(), expectedMap.toJSON());
|
||||
};
|
||||
});
|
||||
function run_test() {
|
||||
runSourceMapTests('test/source-map/test-source-map-generator', do_throw);
|
||||
|
|
Загрузка…
Ссылка в новой задаче