Bug 1403052 - Limit tab sync max_record_payload_size to 512k. r=markh

Also fixes an issue where we wouldn't encode to utf8 when comparing the actual
size to the limit after the first time.

MozReview-Commit-ID: Cf3byjI1FTZ

--HG--
extra : rebase_source : 272ec3b3ad85f8b44c4d69950be83419054abdab
This commit is contained in:
Thom Chiovoloni 2017-10-02 19:27:54 -04:00
Родитель 3ffa8c0b61
Коммит d39e820403
2 изменённых файлов: 19 добавлений и 4 удалений

Просмотреть файл

@ -205,6 +205,13 @@ TabStore.prototype = {
return allTabs; return allTabs;
}, },
getMaxRecordPayloadSize() {
// Tabs have a different max size due to being stored using memcached on the
// server (See bug 1403052), but we still check the server config to make
// sure we respect the global limits it sets.
return Math.min(512 * 1024, this.engine.service.getMaxRecordPayloadSize());
},
async createRecord(id, collection) { async createRecord(id, collection) {
let record = new TabSetRecord(collection, id); let record = new TabSetRecord(collection, id);
record.clientName = this.engine.service.clientsEngine.localName; record.clientName = this.engine.service.clientsEngine.localName;
@ -213,12 +220,12 @@ TabStore.prototype = {
let tabs = this.getAllTabs(true).sort(function(a, b) { let tabs = this.getAllTabs(true).sort(function(a, b) {
return b.lastUsed - a.lastUsed; return b.lastUsed - a.lastUsed;
}); });
let encoder = new TextEncoder("utf-8");
// Figure out how many tabs we can pack into a payload. // Figure out how many tabs we can pack into a payload.
// We use byteLength here because the data is not encrypted in ascii yet. // We use byteLength here because the data is not encrypted in ascii yet.
let size = new TextEncoder("utf-8").encode(JSON.stringify(tabs)).byteLength; let size = encoder.encode(JSON.stringify(tabs)).byteLength;
let origLength = tabs.length; let origLength = tabs.length;
const maxPayloadSize = this.engine.service.getMaxRecordPayloadSize(); const maxPayloadSize = this.getMaxRecordPayloadSize();
// See bug 535326 comment 8 for an explanation of the estimation // See bug 535326 comment 8 for an explanation of the estimation
const MAX_TAB_SIZE = maxPayloadSize / 4 * 3 - 1500; const MAX_TAB_SIZE = maxPayloadSize / 4 * 3 - 1500;
if (size > MAX_TAB_SIZE) { if (size > MAX_TAB_SIZE) {
@ -227,7 +234,7 @@ TabStore.prototype = {
tabs = tabs.slice(0, cutoff + 1); tabs = tabs.slice(0, cutoff + 1);
// Keep dropping off the last entry until the data fits // Keep dropping off the last entry until the data fits
while (JSON.stringify(tabs).length > MAX_TAB_SIZE) while (encoder.encode(JSON.stringify(tabs)).byteLength > MAX_TAB_SIZE)
tabs.pop(); tabs.pop();
} }

Просмотреть файл

@ -108,4 +108,12 @@ add_task(async function test_createRecord() {
record = await store.createRecord("fake-guid"); record = await store.createRecord("fake-guid");
ok(record instanceof TabSetRecord); ok(record instanceof TabSetRecord);
equal(record.tabs.length, 2501); equal(record.tabs.length, 2501);
store.getMaxRecordPayloadSize = () => 512 * 1024;
numtabs = 5200
_("Modify the max record payload size and create a big record");
store.getWindowEnumerator = mockGetWindowEnumerator.bind(this, "http://foo.com", 1, numtabs);
record = await store.createRecord("fake-guid");
ok(record instanceof TabSetRecord);
equal(record.tabs.length, 5021);
}); });