зеркало из https://github.com/mozilla/gecko-dev.git
Bug 1203167
- compress the cache file, r=adw.
This commit is contained in:
Родитель
0c67d18c28
Коммит
5c435de429
|
@ -29,6 +29,8 @@ XPCOMUtils.defineLazyModuleGetter(this, "setTimeout",
|
|||
"resource://gre/modules/Timer.jsm");
|
||||
XPCOMUtils.defineLazyModuleGetter(this, "clearTimeout",
|
||||
"resource://gre/modules/Timer.jsm");
|
||||
XPCOMUtils.defineLazyModuleGetter(this, "Lz4",
|
||||
"resource://gre/modules/lz4.js");
|
||||
|
||||
XPCOMUtils.defineLazyServiceGetter(this, "gTextToSubURI",
|
||||
"@mozilla.org/intl/texttosuburi;1",
|
||||
|
@ -95,7 +97,9 @@ const CACHE_INVALIDATION_DELAY = 1000;
|
|||
|
||||
// Current cache version. This should be incremented if the format of the cache
|
||||
// file is modified.
|
||||
const CACHE_VERSION = 7;
|
||||
const CACHE_VERSION = 1;
|
||||
|
||||
const CACHE_FILENAME = "search.json.mozlz4";
|
||||
|
||||
const ICON_DATAURL_PREFIX = "data:image/x-icon;base64,";
|
||||
|
||||
|
@ -2659,6 +2663,13 @@ SearchService.prototype = {
|
|||
// The boolean indicates that the initialization has started or not.
|
||||
_initStarted: null,
|
||||
|
||||
// Reading the JSON cache file is the first thing done during initialization.
|
||||
// During the async init, we save it in a field so that if we have to do a
|
||||
// sync init before the async init finishes, we can avoid reading the cache
|
||||
// with sync disk I/O and handling lz4 decompression synchronously.
|
||||
// This is set back to null as soon as the initialization is finished.
|
||||
_cacheFileJSON: null,
|
||||
|
||||
// If initialization has not been completed yet, perform synchronous
|
||||
// initialization.
|
||||
// Throws in case of initialization error.
|
||||
|
@ -2705,6 +2716,7 @@ SearchService.prototype = {
|
|||
this._addObservers();
|
||||
|
||||
gInitialized = true;
|
||||
this._cacheFileJSON = null;
|
||||
|
||||
this._initObservers.resolve(this._initRV);
|
||||
|
||||
|
@ -2748,6 +2760,7 @@ SearchService.prototype = {
|
|||
}
|
||||
this._addObservers();
|
||||
gInitialized = true;
|
||||
this._cacheFileJSON = null;
|
||||
this._initObservers.resolve(this._initRV);
|
||||
Services.obs.notifyObservers(null, SEARCH_SERVICE_TOPIC, "init-complete");
|
||||
Services.telemetry.getHistogramById("SEARCH_SERVICE_INIT_SYNC").add(false);
|
||||
|
@ -2838,9 +2851,10 @@ SearchService.prototype = {
|
|||
|
||||
try {
|
||||
LOG("_buildCache: Writing to cache file.");
|
||||
let path = OS.Path.join(OS.Constants.Path.profileDir, "search.json");
|
||||
let path = OS.Path.join(OS.Constants.Path.profileDir, CACHE_FILENAME);
|
||||
let data = gEncoder.encode(JSON.stringify(cache));
|
||||
let promise = OS.File.writeAtomic(path, data, { tmpPath: path + ".tmp"});
|
||||
let promise = OS.File.writeAtomic(path, data, {compression: "lz4",
|
||||
tmpPath: path + ".tmp"});
|
||||
|
||||
promise.then(
|
||||
function onSuccess() {
|
||||
|
@ -3086,6 +3100,7 @@ SearchService.prototype = {
|
|||
this._defaultEngine = null;
|
||||
this._visibleDefaultEngines = [];
|
||||
this._metaData = {};
|
||||
this._cacheFileJSON = null;
|
||||
|
||||
Task.spawn(function* () {
|
||||
try {
|
||||
|
@ -3102,27 +3117,47 @@ SearchService.prototype = {
|
|||
|
||||
// Typically we'll re-init as a result of a pref observer,
|
||||
// so signal to 'callers' that we're done.
|
||||
Services.obs.notifyObservers(null, SEARCH_SERVICE_TOPIC, "reinit-complete");
|
||||
Services.obs.notifyObservers(null, SEARCH_SERVICE_TOPIC, "init-complete");
|
||||
gInitialized = true;
|
||||
} catch (err) {
|
||||
LOG("Reinit failed: " + err);
|
||||
Services.obs.notifyObservers(null, SEARCH_SERVICE_TOPIC, "reinit-failed");
|
||||
} finally {
|
||||
Services.obs.notifyObservers(null, SEARCH_SERVICE_TOPIC, "reinit-complete");
|
||||
}
|
||||
}.bind(this));
|
||||
},
|
||||
|
||||
/**
|
||||
* Read the cache file synchronously. This also imports data from the old
|
||||
* search-metadata.json file if needed.
|
||||
*
|
||||
* @returns A JS object containing the cached data.
|
||||
*/
|
||||
_readCacheFile: function SRCH_SVC__readCacheFile() {
|
||||
let cacheFile = getDir(NS_APP_USER_PROFILE_50_DIR);
|
||||
cacheFile.append("search.json");
|
||||
if (this._cacheFileJSON) {
|
||||
return this._cacheFileJSON;
|
||||
}
|
||||
|
||||
let json = Cc["@mozilla.org/dom/json;1"].createInstance(Ci.nsIJSON);
|
||||
let cacheFile = getDir(NS_APP_USER_PROFILE_50_DIR);
|
||||
cacheFile.append(CACHE_FILENAME);
|
||||
|
||||
let stream;
|
||||
try {
|
||||
stream = Cc["@mozilla.org/network/file-input-stream;1"].
|
||||
createInstance(Ci.nsIFileInputStream);
|
||||
stream.init(cacheFile, MODE_RDONLY, FileUtils.PERMS_FILE, 0);
|
||||
return json.decodeFromStream(stream, stream.available());
|
||||
|
||||
let bis = Cc["@mozilla.org/binaryinputstream;1"]
|
||||
.createInstance(Ci.nsIBinaryInputStream);
|
||||
bis.setInputStream(stream);
|
||||
|
||||
let count = stream.available();
|
||||
let array = new Uint8Array(count);
|
||||
bis.readArrayBuffer(count, array.buffer);
|
||||
|
||||
let bytes = Lz4.decompressFileContent(array);
|
||||
return JSON.parse(new TextDecoder().decode(bytes));
|
||||
} catch(ex) {
|
||||
LOG("_readCacheFile: Error reading cache file: " + ex);
|
||||
} finally {
|
||||
|
@ -3131,7 +3166,7 @@ SearchService.prototype = {
|
|||
|
||||
try {
|
||||
cacheFile.leafName = "search-metadata.json";
|
||||
let stream = Cc["@mozilla.org/network/file-input-stream;1"].
|
||||
stream = Cc["@mozilla.org/network/file-input-stream;1"].
|
||||
createInstance(Ci.nsIFileInputStream);
|
||||
stream.init(cacheFile, MODE_RDONLY, FileUtils.PERMS_FILE, 0);
|
||||
let metadata = json.decodeFromStream(stream, stream.available());
|
||||
|
@ -3162,19 +3197,20 @@ SearchService.prototype = {
|
|||
},
|
||||
|
||||
/**
|
||||
* Read from a given cache file asynchronously.
|
||||
* Read the cache file asynchronously. This also imports data from the old
|
||||
* search-metadata.json file if needed.
|
||||
*
|
||||
* @returns {Promise} A promise, resolved successfully if retrieveing data
|
||||
* succeeds.
|
||||
*/
|
||||
_asyncReadCacheFile: function SRCH_SVC__asyncReadCacheFile() {
|
||||
let cacheFilePath = OS.Path.join(OS.Constants.Path.profileDir, "search.json");
|
||||
|
||||
return Task.spawn(function() {
|
||||
let json;
|
||||
try {
|
||||
let bytes = yield OS.File.read(cacheFilePath);
|
||||
let cacheFilePath = OS.Path.join(OS.Constants.Path.profileDir, CACHE_FILENAME);
|
||||
let bytes = yield OS.File.read(cacheFilePath, {compression: "lz4"});
|
||||
json = JSON.parse(new TextDecoder().decode(bytes));
|
||||
this._cacheFileJSON = json;
|
||||
} catch (ex) {
|
||||
LOG("_asyncReadCacheFile: Error reading cache file: " + ex);
|
||||
json = {};
|
||||
|
@ -3202,7 +3238,7 @@ SearchService.prototype = {
|
|||
} catch (ex) {}
|
||||
}
|
||||
throw new Task.Result(json);
|
||||
});
|
||||
}.bind(this));
|
||||
},
|
||||
|
||||
_batchTask: null,
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
{
|
||||
"version": 7,
|
||||
"version": 1,
|
||||
"buildID": "20121106",
|
||||
"locale": "en-US",
|
||||
"metaData": {},
|
||||
|
|
|
@ -21,6 +21,8 @@ const MODE_WRONLY = FileUtils.MODE_WRONLY;
|
|||
const MODE_CREATE = FileUtils.MODE_CREATE;
|
||||
const MODE_TRUNCATE = FileUtils.MODE_TRUNCATE;
|
||||
|
||||
const CACHE_FILENAME = "search.json.mozlz4";
|
||||
|
||||
// nsSearchService.js uses Services.appinfo.name to build a salt for a hash.
|
||||
var XULRuntime = Components.classesByID["{95d89e3e-a169-41a3-8e56-719978e15b12}"]
|
||||
.getService(Ci.nsIXULRuntime);
|
||||
|
@ -207,8 +209,8 @@ function getSearchMetadata()
|
|||
|
||||
function promiseCacheData() {
|
||||
return new Promise(resolve => Task.spawn(function* () {
|
||||
let path = OS.Path.join(OS.Constants.Path.profileDir, "search.json");
|
||||
let bytes = yield OS.File.read(path);
|
||||
let path = OS.Path.join(OS.Constants.Path.profileDir, CACHE_FILENAME);
|
||||
let bytes = yield OS.File.read(path, {compression: "lz4"});
|
||||
resolve(JSON.parse(new TextDecoder().decode(bytes)));
|
||||
}));
|
||||
}
|
||||
|
@ -233,12 +235,11 @@ function promiseGlobalMetadata() {
|
|||
|
||||
function promiseSaveGlobalMetadata(globalData) {
|
||||
return new Promise(resolve => Task.spawn(function* () {
|
||||
let path = OS.Path.join(OS.Constants.Path.profileDir, "search.json");
|
||||
let bytes = yield OS.File.read(path);
|
||||
let data = JSON.parse(new TextDecoder().decode(bytes));
|
||||
let data = yield promiseCacheData();
|
||||
data.metaData = globalData;
|
||||
yield OS.File.writeAtomic(path,
|
||||
new TextEncoder().encode(JSON.stringify(data)));
|
||||
yield OS.File.writeAtomic(OS.Path.join(OS.Constants.Path.profileDir, CACHE_FILENAME),
|
||||
new TextEncoder().encode(JSON.stringify(data)),
|
||||
{compression: "lz4"});
|
||||
resolve();
|
||||
}));
|
||||
}
|
||||
|
@ -257,7 +258,7 @@ var forceExpiration = Task.async(function* () {
|
|||
function removeCacheFile()
|
||||
{
|
||||
let file = gProfD.clone();
|
||||
file.append("search.json");
|
||||
file.append(CACHE_FILENAME);
|
||||
if (file.exists()) {
|
||||
file.remove(false);
|
||||
}
|
||||
|
|
|
@ -7,9 +7,6 @@
|
|||
|
||||
"use strict";
|
||||
|
||||
// Metadata to write to search-metadata.json for the test.
|
||||
var gMetadata = {"[profile]/test-search-engine.xml":{"used":true}};
|
||||
|
||||
/**
|
||||
* Gets a directory from the directory service.
|
||||
* @param aKey
|
||||
|
@ -35,7 +32,7 @@ function makeURI(uri) {
|
|||
var cacheTemplate, appPluginsPath, profPlugins;
|
||||
|
||||
/**
|
||||
* Test reading from search.json
|
||||
* Test reading from search.json.mozlz4
|
||||
*/
|
||||
function run_test() {
|
||||
removeMetadata();
|
||||
|
@ -81,41 +78,10 @@ function run_test() {
|
|||
}
|
||||
|
||||
add_test(function prepare_test_data() {
|
||||
|
||||
let ostream = Cc["@mozilla.org/network/file-output-stream;1"].
|
||||
createInstance(Ci.nsIFileOutputStream);
|
||||
let converter = Cc["@mozilla.org/intl/scriptableunicodeconverter"].
|
||||
createInstance(Ci.nsIScriptableUnicodeConverter);
|
||||
|
||||
// Write the modified cache template to the profile directory.
|
||||
let cacheFile = gProfD.clone();
|
||||
cacheFile.append("search.json");
|
||||
ostream.init(cacheFile, (MODE_WRONLY | MODE_CREATE | MODE_TRUNCATE), FileUtils.PERMS_FILE,
|
||||
ostream.DEFER_OPEN);
|
||||
converter.charset = "UTF-8";
|
||||
let data = converter.convertToInputStream(JSON.stringify(cacheTemplate));
|
||||
|
||||
// Write to the cache and metadata files asynchronously before starting the search service.
|
||||
NetUtil.asyncCopy(data, ostream, function afterMetadataCopy(aResult) {
|
||||
do_check_true(Components.isSuccessCode(aResult));
|
||||
let metadataFile = gProfD.clone();
|
||||
metadataFile.append("search-metadata.json");
|
||||
|
||||
let ostream = Cc["@mozilla.org/network/file-output-stream;1"].
|
||||
createInstance(Ci.nsIFileOutputStream);
|
||||
let converter = Cc["@mozilla.org/intl/scriptableunicodeconverter"].
|
||||
createInstance(Ci.nsIScriptableUnicodeConverter);
|
||||
|
||||
ostream.init(metadataFile, (MODE_WRONLY | MODE_CREATE | MODE_TRUNCATE), FileUtils.PERMS_FILE,
|
||||
ostream.DEFER_OPEN);
|
||||
converter.charset = "UTF-8";
|
||||
let data = converter.convertToInputStream(JSON.stringify(gMetadata));
|
||||
|
||||
NetUtil.asyncCopy(data, ostream, function afterCacheCopy(aResult) {
|
||||
do_check_true(Components.isSuccessCode(aResult));
|
||||
run_next_test();
|
||||
});
|
||||
});
|
||||
OS.File.writeAtomic(OS.Path.join(OS.Constants.Path.profileDir, CACHE_FILENAME),
|
||||
new TextEncoder().encode(JSON.stringify(cacheTemplate)),
|
||||
{compression: "lz4"})
|
||||
.then(run_next_test);
|
||||
});
|
||||
|
||||
/**
|
||||
|
@ -151,7 +117,7 @@ add_test(function test_cache_write() {
|
|||
do_print("test cache writing");
|
||||
|
||||
let cache = gProfD.clone();
|
||||
cache.append("search.json");
|
||||
cache.append(CACHE_FILENAME);
|
||||
do_check_false(cache.exists());
|
||||
|
||||
do_print("Next step is forcing flush");
|
||||
|
@ -169,14 +135,14 @@ add_test(function test_cache_write() {
|
|||
Services.obs.removeObserver(cacheWriteObserver, "browser-search-service");
|
||||
do_print("Cache write complete");
|
||||
do_check_true(cache.exists());
|
||||
// Check that the search.json cache matches the template
|
||||
// Check that the search.json.mozlz4 cache matches the template
|
||||
|
||||
let cacheWritten = readJSONFile(cache);
|
||||
|
||||
do_print("Check search.json");
|
||||
promiseCacheData().then(cacheWritten => {
|
||||
do_print("Check search.json.mozlz4");
|
||||
isSubObjectOf(cacheTemplate, cacheWritten);
|
||||
|
||||
run_next_test();
|
||||
});
|
||||
}
|
||||
};
|
||||
Services.obs.addObserver(cacheWriteObserver, "browser-search-service", false);
|
||||
|
|
|
@ -3,11 +3,11 @@
|
|||
|
||||
/*
|
||||
* test_nocache: Start search engine
|
||||
* - without search.json
|
||||
* - without search.json.mozlz4
|
||||
*
|
||||
* Ensure that :
|
||||
* - nothing explodes;
|
||||
* - search.json is created.
|
||||
* - search.json.mozlz4 is created.
|
||||
*/
|
||||
|
||||
function run_test()
|
||||
|
@ -34,7 +34,7 @@ add_task(function* test_nocache() {
|
|||
|
||||
// Check that search.json has been created.
|
||||
let cacheFile = gProfD.clone();
|
||||
cacheFile.append("search.json");
|
||||
cacheFile.append(CACHE_FILENAME);
|
||||
do_check_true(cacheFile.exists());
|
||||
|
||||
// Add engine and wait for cache update
|
||||
|
@ -46,10 +46,7 @@ add_task(function* test_nocache() {
|
|||
yield promiseAfterCache();
|
||||
|
||||
do_print("Searching test engine in cache");
|
||||
let path = OS.Path.join(OS.Constants.Path.profileDir, "search.json");
|
||||
let data = yield OS.File.read(path);
|
||||
let text = new TextDecoder().decode(data);
|
||||
let cache = JSON.parse(text);
|
||||
let cache = yield promiseCacheData();
|
||||
let found = false;
|
||||
for (let engine of cache.engines) {
|
||||
if (engine._shortName == "test-search-engine") {
|
||||
|
|
|
@ -9,9 +9,9 @@
|
|||
*
|
||||
* Ensure that :
|
||||
* - nothing explodes;
|
||||
* - if we change the order, search.json is updated;
|
||||
* - this search.json can be parsed;
|
||||
* - the order stored in search.json is consistent.
|
||||
* - if we change the order, search.json.mozlz4 is updated;
|
||||
* - this search.json.mozlz4 can be parsed;
|
||||
* - the order stored in search.json.mozlz4 is consistent.
|
||||
*
|
||||
* Notes:
|
||||
* - we install the search engines of test "test_downloadAndAddEngines.js"
|
||||
|
|
Загрузка…
Ссылка в новой задаче