зеркало из https://github.com/mozilla/gecko-dev.git
Bug 1276826 - Part 2. Send gethash request and handle gethash response for v4. r=francois
MozReview-Commit-ID: 6fIbN6a6gHI --HG-- extra : rebase_source : d64a2b4f85b24aac0a847a46748b3b3fdf0db749 extra : intermediate-source : c486204319c1afc158b871bad7edd2f7a565022a extra : source : adf76dccec9d51df0d3a659ffa3dd201d8115bc4
This commit is contained in:
Родитель
156d495c02
Коммит
758d80f10b
|
@ -5140,7 +5140,7 @@ pref("browser.safebrowsing.provider.google.reportURL", "https://safebrowsing.goo
|
|||
pref("browser.safebrowsing.provider.google4.pver", "4");
|
||||
pref("browser.safebrowsing.provider.google4.lists", "goog-phish-proto,googpub-phish-proto,goog-malware-proto,goog-unwanted-proto");
|
||||
pref("browser.safebrowsing.provider.google4.updateURL", "https://safebrowsing.googleapis.com/v4/threatListUpdates:fetch?$ct=application/x-protobuf&key=%GOOGLE_API_KEY%");
|
||||
pref("browser.safebrowsing.provider.google4.gethashURL", "https://safebrowsing.googleapis.com/v4/fullHashes:find?$req=%REQUEST_BASE64%&$ct=application/x-protobuf&key=%GOOGLE_API_KEY%");
|
||||
pref("browser.safebrowsing.provider.google4.gethashURL", "https://safebrowsing.googleapis.com/v4/fullHashes:find?$ct=application/x-protobuf&key=%GOOGLE_API_KEY%");
|
||||
pref("browser.safebrowsing.provider.google4.reportURL", "https://safebrowsing.google.com/safebrowsing/diagnostic?client=%NAME%&hl=%LOCALE%&site=");
|
||||
|
||||
pref("browser.safebrowsing.reportPhishMistakeURL", "https://%LOCALE%.phish-error.mozilla.com/?hl=%LOCALE%&url=");
|
||||
|
|
|
@ -4161,6 +4161,14 @@
|
|||
"bug_numbers": [1150921],
|
||||
"description": "Server HTTP status code from remote SafeBrowsing gethash lookups. (0=1xx, 1=200, 2=2xx, 3=204, 4=3xx, 5=400, 6=4xx, 7=403, 8=404, 9=408, 10=413, 11=5xx, 12=502|504|511, 13=503, 14=505, 15=Other)"
|
||||
},
|
||||
"URLCLASSIFIER_COMPLETION_ERROR": {
|
||||
"alert_emails": ["safebrowsing-telemetry@mozilla.org"],
|
||||
"expires_in_version": "59",
|
||||
"kind": "enumerated",
|
||||
"n_values": 16,
|
||||
"bug_numbers": [1276826],
|
||||
"description": "SafeBrowsing v4 hash completion error (0 = success, 1 = parsing failure, 2 = unknown threat type)"
|
||||
},
|
||||
"URLCLASSIFIER_COMPLETE_TIMEOUT": {
|
||||
"alert_emails": ["safebrowsing-telemetry@mozilla.org"],
|
||||
"expires_in_version": "56",
|
||||
|
|
|
@ -56,10 +56,13 @@ interface nsIUrlClassifierHashCompleter : nsISupports
|
|||
* The 32-bit hash encountered by the url-classifier.
|
||||
* @param gethashUrl
|
||||
* The gethash url to use.
|
||||
* @param tableName
|
||||
* The table where we matched the partial hash.
|
||||
* @param callback
|
||||
* An nsIUrlClassifierCompleterCallback instance.
|
||||
*/
|
||||
void complete(in ACString partialHash,
|
||||
in ACString gethashUrl,
|
||||
in ACString tableName,
|
||||
in nsIUrlClassifierHashCompleterCallback callback);
|
||||
};
|
||||
|
|
|
@ -9,6 +9,29 @@
|
|||
|
||||
interface nsIURI;
|
||||
|
||||
/**
|
||||
* Interface for parseFindFullHashResponseV4 callback
|
||||
*
|
||||
* @param aCompleteHash A 32-byte complete hash string.
|
||||
* @param aTableNames The table names that this complete hash is associated with.
|
||||
* Since the server responded with a threat type, multiple
|
||||
* list names can be returned. The caller is reponsible
|
||||
* for filtering out the unrequested table names.
|
||||
* See |convertThreatTypeToListNames| for the format.
|
||||
* @param aMinWaitDuration See "FindFullHashesResponse" in safebrowsing.proto.
|
||||
* @param aNegCacheDuration See "FindFullHashesResponse" in safebrowsing.proto.
|
||||
* @param aPerHashCacheDuration See "FindFullHashesResponse" in safebrowsing.proto.
|
||||
*
|
||||
*/
|
||||
[scriptable, function, uuid(fbb9684a-a0aa-11e6-88b0-08606e456b8a)]
|
||||
interface nsIUrlClassifierParseFindFullHashCallback : nsISupports {
|
||||
void onCompleteHashFound(in ACString aCompleteHash,
|
||||
in ACString aTableNames,
|
||||
in unsigned long aMinWaitDuration,
|
||||
in unsigned long aNegCacheDuration,
|
||||
in unsigned long aPerHashCacheDuration);
|
||||
};
|
||||
|
||||
[scriptable, uuid(e4f0e59c-b922-48b0-a7b6-1735c1f96fed)]
|
||||
interface nsIUrlClassifierUtils : nsISupports
|
||||
{
|
||||
|
@ -100,4 +123,14 @@ interface nsIUrlClassifierUtils : nsISupports
|
|||
[array, size_is(aPrefixCount)] in string aPrefixes,
|
||||
in uint32_t aListCount,
|
||||
in uint32_t aPrefixCount);
|
||||
|
||||
/**
|
||||
* Parse V4 FindFullHash response.
|
||||
*
|
||||
* @param aResponse Byte stream from the server.
|
||||
* @param aCallback The callback function on each complete hash parsed.
|
||||
* Can be called multiple times in one parsing.
|
||||
*/
|
||||
void parseFindFullHashResponseV4(in ACString aResponse,
|
||||
in nsIUrlClassifierParseFindFullHashCallback aCallback);
|
||||
};
|
||||
|
|
|
@ -968,7 +968,10 @@ nsUrlClassifierLookupCallback::LookupComplete(nsTArray<LookupResult>* results)
|
|||
partialHash.Assign(reinterpret_cast<char*>(&result.hash.prefix),
|
||||
PREFIX_SIZE);
|
||||
|
||||
nsresult rv = completer->Complete(partialHash, gethashUrl, this);
|
||||
nsresult rv = completer->Complete(partialHash,
|
||||
gethashUrl,
|
||||
result.mTableName,
|
||||
this);
|
||||
if (NS_SUCCEEDED(rv)) {
|
||||
mPendingCompletions++;
|
||||
}
|
||||
|
|
|
@ -17,6 +17,13 @@ Cu.import("resource://gre/modules/XPCOMUtils.jsm");
|
|||
Cu.import("resource://gre/modules/Services.jsm");
|
||||
Cu.import("resource://gre/modules/NetUtil.jsm");
|
||||
|
||||
XPCOMUtils.defineLazyServiceGetter(this, 'gDbService',
|
||||
'@mozilla.org/url-classifier/dbservice;1',
|
||||
'nsIUrlClassifierDBService');
|
||||
|
||||
XPCOMUtils.defineLazyServiceGetter(this, 'gUrlUtil',
|
||||
'@mozilla.org/url-classifier/utils;1',
|
||||
'nsIUrlClassifierUtils');
|
||||
|
||||
// Log only if browser.safebrowsing.debug is true
|
||||
function log(...stuff) {
|
||||
|
@ -174,7 +181,7 @@ HashCompleter.prototype = {
|
|||
// This is mainly how the HashCompleter interacts with other components.
|
||||
// Even though it only takes one partial hash and callback, subsequent
|
||||
// calls are made into the same HTTP request by using a thread dispatch.
|
||||
complete: function HC_complete(aPartialHash, aGethashUrl, aCallback) {
|
||||
complete: function HC_complete(aPartialHash, aGethashUrl, aTableName, aCallback) {
|
||||
if (!aGethashUrl) {
|
||||
throw Cr.NS_ERROR_NOT_INITIALIZED;
|
||||
}
|
||||
|
@ -183,13 +190,13 @@ HashCompleter.prototype = {
|
|||
this._currentRequest = new HashCompleterRequest(this, aGethashUrl);
|
||||
}
|
||||
if (this._currentRequest.gethashUrl == aGethashUrl) {
|
||||
this._currentRequest.add(aPartialHash, aCallback);
|
||||
this._currentRequest.add(aPartialHash, aCallback, aTableName);
|
||||
} else {
|
||||
if (!this._pendingRequests[aGethashUrl]) {
|
||||
this._pendingRequests[aGethashUrl] =
|
||||
new HashCompleterRequest(this, aGethashUrl);
|
||||
}
|
||||
this._pendingRequests[aGethashUrl].add(aPartialHash, aCallback);
|
||||
this._pendingRequests[aGethashUrl].add(aPartialHash, aCallback, aTableName);
|
||||
}
|
||||
|
||||
if (!this._backoffs[aGethashUrl]) {
|
||||
|
@ -279,6 +286,10 @@ function HashCompleterRequest(aCompleter, aGethashUrl) {
|
|||
// Whether we have been informed of a shutdown by the quit-application event.
|
||||
this._shuttingDown = false;
|
||||
this.gethashUrl = aGethashUrl;
|
||||
|
||||
// Multiple partial hashes can be associated with the same tables
|
||||
// so we use a map here.
|
||||
this.tableNames = new Map();
|
||||
}
|
||||
HashCompleterRequest.prototype = {
|
||||
QueryInterface: XPCOMUtils.generateQI([Ci.nsIRequestObserver,
|
||||
|
@ -288,12 +299,45 @@ HashCompleterRequest.prototype = {
|
|||
|
||||
// This is called by the HashCompleter to add a hash and callback to the
|
||||
// HashCompleterRequest. It must be called before calling |begin|.
|
||||
add: function HCR_add(aPartialHash, aCallback) {
|
||||
add: function HCR_add(aPartialHash, aCallback, aTableName) {
|
||||
this._requests.push({
|
||||
partialHash: aPartialHash,
|
||||
callback: aCallback,
|
||||
responses: []
|
||||
});
|
||||
|
||||
if (aTableName) {
|
||||
let isTableNameV4 = aTableName.endsWith('-proto');
|
||||
if (0 === this.tableNames.size) {
|
||||
// Decide if this request is v4 by the first added partial hash.
|
||||
this.isV4 = isTableNameV4;
|
||||
} else if (this.isV4 !== isTableNameV4) {
|
||||
log('ERROR: Cannot mix "proto" tables with other types within ' +
|
||||
'the same gethash URL.');
|
||||
}
|
||||
this.tableNames.set(aTableName);
|
||||
}
|
||||
},
|
||||
|
||||
fillTableStatesBase64: function HCR_fillTableStatesBase64(aCallback) {
|
||||
gDbService.getTables(aTableData => {
|
||||
aTableData.split("\n").forEach(line => {
|
||||
let p = line.indexOf(";");
|
||||
if (-1 === p) {
|
||||
return;
|
||||
}
|
||||
// [tableName];[stateBase64]:[checksumBase64]
|
||||
let tableName = line.substring(0, p);
|
||||
if (this.tableNames.has(tableName)) {
|
||||
let metadata = line.substring(p + 1).split(":");
|
||||
let stateBase64 = metadata[0];
|
||||
this.tableNames.set(tableName, stateBase64);
|
||||
}
|
||||
});
|
||||
|
||||
aCallback();
|
||||
});
|
||||
|
||||
},
|
||||
|
||||
// This initiates the HTTP request. It can fail due to backoff timings and
|
||||
|
@ -308,16 +352,22 @@ HashCompleterRequest.prototype = {
|
|||
|
||||
Services.obs.addObserver(this, "quit-application", false);
|
||||
|
||||
try {
|
||||
this.openChannel();
|
||||
// Notify the RequestBackoff if opening the channel succeeded. At this
|
||||
// point, finishRequest must be called.
|
||||
this._completer.noteRequest(this.gethashUrl);
|
||||
}
|
||||
catch (err) {
|
||||
this.notifyFailure(err);
|
||||
throw err;
|
||||
}
|
||||
// V4 requires table states to build the request so we need
|
||||
// a async call to retrieve the table states from disk.
|
||||
// Note that |HCR_begin| is fine to be sync because
|
||||
// it doesn't appear in a sync call chain.
|
||||
this.fillTableStatesBase64(() => {
|
||||
try {
|
||||
this.openChannel();
|
||||
// Notify the RequestBackoff if opening the channel succeeded. At this
|
||||
// point, finishRequest must be called.
|
||||
this._completer.noteRequest(this.gethashUrl);
|
||||
}
|
||||
catch (err) {
|
||||
this.notifyFailure(err);
|
||||
throw err;
|
||||
}
|
||||
});
|
||||
},
|
||||
|
||||
notify: function HCR_notify() {
|
||||
|
@ -336,8 +386,16 @@ HashCompleterRequest.prototype = {
|
|||
let loadFlags = Ci.nsIChannel.INHIBIT_CACHING |
|
||||
Ci.nsIChannel.LOAD_BYPASS_CACHE;
|
||||
|
||||
let actualGethashUrl = this.gethashUrl;
|
||||
if (this.isV4) {
|
||||
// As per spec, we add the request payload to the gethash url.
|
||||
actualGethashUrl += "&$req=" + this.buildRequestV4();
|
||||
}
|
||||
|
||||
log("actualGethashUrl: " + actualGethashUrl);
|
||||
|
||||
let channel = NetUtil.newChannel({
|
||||
uri: this.gethashUrl,
|
||||
uri: actualGethashUrl,
|
||||
loadUsingSystemPrincipal: true
|
||||
});
|
||||
channel.loadFlags = loadFlags;
|
||||
|
@ -348,8 +406,12 @@ HashCompleterRequest.prototype = {
|
|||
|
||||
this._channel = channel;
|
||||
|
||||
let body = this.buildRequest();
|
||||
this.addRequestBody(body);
|
||||
if (this.isV4) {
|
||||
httpChannel.setRequestHeader("X-HTTP-Method-Override", "POST", false);
|
||||
} else {
|
||||
let body = this.buildRequest();
|
||||
this.addRequestBody(body);
|
||||
}
|
||||
|
||||
// Set a timer that cancels the channel after timeout_ms in case we
|
||||
// don't get a gethash response.
|
||||
|
@ -361,6 +423,34 @@ HashCompleterRequest.prototype = {
|
|||
channel.asyncOpen2(this);
|
||||
},
|
||||
|
||||
buildRequestV4: function HCR_buildRequestV4() {
|
||||
// Convert the "name to state" mapping to two equal-length arrays.
|
||||
let tableNameArray = [];
|
||||
let stateArray = [];
|
||||
this.tableNames.forEach((state, name) => {
|
||||
// We skip the table which is not associated with a state.
|
||||
if (state) {
|
||||
tableNameArray.push(name);
|
||||
stateArray.push(state);
|
||||
}
|
||||
});
|
||||
|
||||
// Build the "distinct" prefix array.
|
||||
let prefixSet = new Set();
|
||||
this._requests.forEach(r => prefixSet.add(btoa(r.partialHash)));
|
||||
let prefixArray = Array.from(prefixSet);
|
||||
|
||||
log("Build v4 gethash request with " + JSON.stringify(tableNameArray) + ', '
|
||||
+ JSON.stringify(stateArray) + ', '
|
||||
+ JSON.stringify(prefixArray));
|
||||
|
||||
return gUrlUtil.makeFindFullHashRequestV4(tableNameArray,
|
||||
stateArray,
|
||||
prefixArray,
|
||||
tableNameArray.length,
|
||||
prefixArray.length);
|
||||
},
|
||||
|
||||
// Returns a string for the request body based on the contents of
|
||||
// this._requests.
|
||||
buildRequest: function HCR_buildRequest() {
|
||||
|
@ -414,7 +504,10 @@ HashCompleterRequest.prototype = {
|
|||
return;
|
||||
}
|
||||
|
||||
log('Response: ' + this._response);
|
||||
if (this.isV4) {
|
||||
return this.handleResponseV4();
|
||||
}
|
||||
|
||||
let start = 0;
|
||||
|
||||
let length = this._response.length;
|
||||
|
@ -423,6 +516,38 @@ HashCompleterRequest.prototype = {
|
|||
}
|
||||
},
|
||||
|
||||
handleResponseV4: function HCR_handleResponseV4() {
|
||||
let callback = (aCompleteHash,
|
||||
aTableNames,
|
||||
aMinWaitDuration,
|
||||
aNegCacheDuration,
|
||||
aPerHashCacheDuration) => {
|
||||
log("V4 response callback: " + JSON.stringify(aCompleteHash) + ", " +
|
||||
aTableNames + ", " +
|
||||
aMinWaitDuration + ", " +
|
||||
aNegCacheDuration + ", " +
|
||||
aPerHashCacheDuration);
|
||||
|
||||
// Filter table names which we didn't requested.
|
||||
let filteredTables = aTableNames.split(",").filter(name => {
|
||||
return this.tableNames.get(name);
|
||||
});
|
||||
if (0 === filteredTables.length) {
|
||||
log("ERROR: Got complete hash which is from unknown table.");
|
||||
return;
|
||||
}
|
||||
if (filteredTables.length > 1) {
|
||||
log("WARNING: Got complete hash which has ambigious threat type.");
|
||||
}
|
||||
|
||||
this.handleItem(aCompleteHash, filteredTables[0], 0);
|
||||
|
||||
// TODO: Bug 1311935 - Implement v4 cache.
|
||||
};
|
||||
|
||||
gUrlUtil.parseFindFullHashResponseV4(this._response, callback);
|
||||
},
|
||||
|
||||
// This parses a table entry in the response body and calls |handleItem|
|
||||
// for complete hash in the table entry.
|
||||
handleTable: function HCR_handleTable(aStart) {
|
||||
|
@ -465,7 +590,7 @@ HashCompleterRequest.prototype = {
|
|||
handleItem: function HCR_handleItem(aData, aTableName, aChunkId) {
|
||||
for (let i = 0; i < this._requests.length; i++) {
|
||||
let request = this._requests[i];
|
||||
if (aData.substring(0,4) == request.partialHash) {
|
||||
if (aData.startsWith(request.partialHash)) {
|
||||
request.responses.push({
|
||||
completeHash: aData,
|
||||
tableName: aTableName,
|
||||
|
|
|
@ -424,6 +424,53 @@ nsUrlClassifierUtils::MakeFindFullHashRequestV4(const char** aListNames,
|
|||
return NS_OK;
|
||||
}
|
||||
|
||||
static uint32_t
|
||||
DurationToMs(const Duration& aDuration)
|
||||
{
|
||||
return aDuration.seconds() * 1000 + aDuration.nanos() / 1000;
|
||||
}
|
||||
|
||||
NS_IMETHODIMP
|
||||
nsUrlClassifierUtils::ParseFindFullHashResponseV4(const nsACString& aResponse,
|
||||
nsIUrlClassifierParseFindFullHashCallback *aCallback)
|
||||
{
|
||||
enum CompletionErrorType {
|
||||
SUCCESS = 0,
|
||||
PARSING_FAILURE = 1,
|
||||
UNKNOWN_THREAT_TYPE = 2,
|
||||
};
|
||||
|
||||
FindFullHashesResponse r;
|
||||
if (!r.ParseFromArray(aResponse.BeginReading(), aResponse.Length())) {
|
||||
NS_WARNING("Invalid response");
|
||||
Telemetry::Accumulate(Telemetry::URLCLASSIFIER_COMPLETION_ERROR,
|
||||
PARSING_FAILURE);
|
||||
return NS_ERROR_FAILURE;
|
||||
}
|
||||
|
||||
bool hasUnknownThreatType = false;
|
||||
auto minWaitDuration = DurationToMs(r.minimum_wait_duration());
|
||||
auto negCacheDuration = DurationToMs(r.negative_cache_duration());
|
||||
for (auto& m : r.matches()) {
|
||||
nsCString tableNames;
|
||||
nsresult rv = ConvertThreatTypeToListNames(m.threat_type(), tableNames);
|
||||
if (NS_FAILED(rv)) {
|
||||
hasUnknownThreatType = true;
|
||||
continue; // Ignore un-convertable threat type.
|
||||
}
|
||||
auto& hash = m.threat().hash();
|
||||
aCallback->OnCompleteHashFound(nsCString(hash.c_str(), hash.length()),
|
||||
tableNames,
|
||||
minWaitDuration,
|
||||
negCacheDuration,
|
||||
DurationToMs(m.cache_duration()));
|
||||
}
|
||||
|
||||
Telemetry::Accumulate(Telemetry::URLCLASSIFIER_COMPLETION_ERROR,
|
||||
hasUnknownThreatType ? UNKNOWN_THREAT_TYPE : SUCCESS);
|
||||
|
||||
return NS_OK;
|
||||
}
|
||||
|
||||
//////////////////////////////////////////////////////////
|
||||
// nsIObserver
|
||||
|
|
|
@ -104,6 +104,141 @@ TEST(FindFullHash, Request)
|
|||
}
|
||||
}
|
||||
|
||||
/////////////////////////////////////////////////////////////
|
||||
// Following is to test parsing the gethash response.
|
||||
|
||||
namespace {
|
||||
|
||||
// safebrowsing::Duration manipulation.
|
||||
struct MyDuration {
|
||||
uint32_t mSecs;
|
||||
uint32_t mNanos;
|
||||
};
|
||||
void PopulateDuration(Duration& aDest, const MyDuration& aSrc)
|
||||
{
|
||||
aDest.set_seconds(aSrc.mSecs);
|
||||
aDest.set_nanos(aSrc.mNanos);
|
||||
}
|
||||
|
||||
// The expected match data.
|
||||
static MyDuration EXPECTED_MIN_WAIT_DURATION = { 12, 10 };
|
||||
static MyDuration EXPECTED_NEG_CACHE_DURATION = { 120, 9 };
|
||||
static const struct {
|
||||
nsCString mCompleteHash;
|
||||
ThreatType mThreatType;
|
||||
MyDuration mPerHashCacheDuration;
|
||||
} EXPECTED_MATCH[] = {
|
||||
{ nsCString("01234567890123456789012345678901"), SOCIAL_ENGINEERING_PUBLIC, { 8, 500 } },
|
||||
{ nsCString("12345678901234567890123456789012"), SOCIAL_ENGINEERING_PUBLIC, { 7, 100} },
|
||||
{ nsCString("23456789012345678901234567890123"), SOCIAL_ENGINEERING_PUBLIC, { 1, 20 } },
|
||||
};
|
||||
|
||||
class MyParseCallback final :
|
||||
public nsIUrlClassifierParseFindFullHashCallback {
|
||||
public:
|
||||
NS_DECL_ISUPPORTS
|
||||
|
||||
explicit MyParseCallback(uint32_t& aCallbackCount)
|
||||
: mCallbackCount(aCallbackCount)
|
||||
{
|
||||
}
|
||||
|
||||
NS_IMETHOD
|
||||
OnCompleteHashFound(const nsACString& aCompleteHash,
|
||||
const nsACString& aTableNames,
|
||||
uint32_t aMinWaitDuration,
|
||||
uint32_t aNegCacheDuration,
|
||||
uint32_t aPerHashCacheDuration) override
|
||||
{
|
||||
Verify(aCompleteHash,
|
||||
aTableNames,
|
||||
aMinWaitDuration,
|
||||
aNegCacheDuration,
|
||||
aPerHashCacheDuration);
|
||||
|
||||
return NS_OK;
|
||||
}
|
||||
|
||||
private:
|
||||
void
|
||||
Verify(const nsACString& aCompleteHash,
|
||||
const nsACString& aTableNames,
|
||||
uint32_t aMinWaitDuration,
|
||||
uint32_t aNegCacheDuration,
|
||||
uint32_t aPerHashCacheDuration)
|
||||
{
|
||||
auto expected = EXPECTED_MATCH[mCallbackCount];
|
||||
|
||||
ASSERT_TRUE(aCompleteHash.Equals(expected.mCompleteHash));
|
||||
|
||||
// Verify aTableNames
|
||||
nsCOMPtr<nsIUrlClassifierUtils> urlUtil =
|
||||
do_GetService("@mozilla.org/url-classifier/utils;1");
|
||||
nsCString tableNames;
|
||||
nsresult rv = urlUtil->ConvertThreatTypeToListNames(expected.mThreatType, tableNames);
|
||||
ASSERT_TRUE(NS_SUCCEEDED(rv));
|
||||
ASSERT_TRUE(aTableNames.Equals(tableNames));
|
||||
|
||||
VerifyDuration(aMinWaitDuration, EXPECTED_MIN_WAIT_DURATION);
|
||||
VerifyDuration(aNegCacheDuration, EXPECTED_NEG_CACHE_DURATION);
|
||||
VerifyDuration(aPerHashCacheDuration, expected.mPerHashCacheDuration);
|
||||
|
||||
mCallbackCount++;
|
||||
}
|
||||
|
||||
void
|
||||
VerifyDuration(uint32_t aToVerify, const MyDuration& aExpected)
|
||||
{
|
||||
ASSERT_TRUE(aToVerify == (aExpected.mSecs * 1000 + aExpected.mNanos / 1000));
|
||||
}
|
||||
|
||||
~MyParseCallback() {}
|
||||
|
||||
uint32_t& mCallbackCount;
|
||||
};
|
||||
|
||||
NS_IMPL_ISUPPORTS(MyParseCallback, nsIUrlClassifierParseFindFullHashCallback)
|
||||
|
||||
} // end of unnamed namespace.
|
||||
|
||||
TEST(FindFullHash, ParseRequest)
|
||||
{
|
||||
// Build response.
|
||||
FindFullHashesResponse r;
|
||||
|
||||
// Init response-wise durations.
|
||||
auto minWaitDuration = r.mutable_minimum_wait_duration();
|
||||
PopulateDuration(*minWaitDuration, EXPECTED_MIN_WAIT_DURATION);
|
||||
auto negCacheDuration = r.mutable_negative_cache_duration();
|
||||
PopulateDuration(*negCacheDuration, EXPECTED_NEG_CACHE_DURATION);
|
||||
|
||||
// Init matches.
|
||||
for (uint32_t i = 0; i < ArrayLength(EXPECTED_MATCH); i++) {
|
||||
auto expected = EXPECTED_MATCH[i];
|
||||
auto match = r.mutable_matches()->Add();
|
||||
match->set_threat_type(expected.mThreatType);
|
||||
match->mutable_threat()->set_hash(expected.mCompleteHash.BeginReading(),
|
||||
expected.mCompleteHash.Length());
|
||||
auto perHashCacheDuration = match->mutable_cache_duration();
|
||||
PopulateDuration(*perHashCacheDuration, expected.mPerHashCacheDuration);
|
||||
}
|
||||
std::string s;
|
||||
r.SerializeToString(&s);
|
||||
|
||||
uint32_t callbackCount = 0;
|
||||
nsCOMPtr<nsIUrlClassifierParseFindFullHashCallback> callback
|
||||
= new MyParseCallback(callbackCount);
|
||||
|
||||
nsCOMPtr<nsIUrlClassifierUtils> urlUtil =
|
||||
do_GetService("@mozilla.org/url-classifier/utils;1");
|
||||
nsresult rv = urlUtil->ParseFindFullHashResponseV4(nsCString(s.c_str(), s.size()),
|
||||
callback);
|
||||
NS_ENSURE_SUCCESS_VOID(rv);
|
||||
|
||||
ASSERT_EQ(callbackCount, ArrayLength(EXPECTED_MATCH));
|
||||
}
|
||||
|
||||
|
||||
/////////////////////////////////////////////////////////////
|
||||
namespace {
|
||||
|
||||
|
|
|
@ -426,4 +426,54 @@ LFSRgenerator.prototype = {
|
|||
},
|
||||
};
|
||||
|
||||
function waitUntilMetaDataSaved(expectedState, expectedChecksum, callback) {
|
||||
let dbService = Cc["@mozilla.org/url-classifier/dbservice;1"]
|
||||
.getService(Ci.nsIUrlClassifierDBService);
|
||||
|
||||
dbService.getTables(metaData => {
|
||||
do_print("metadata: " + metaData);
|
||||
let didCallback = false;
|
||||
metaData.split("\n").some(line => {
|
||||
// Parse [tableName];[stateBase64]
|
||||
let p = line.indexOf(";");
|
||||
if (-1 === p) {
|
||||
return false; // continue.
|
||||
}
|
||||
let tableName = line.substring(0, p);
|
||||
let metadata = line.substring(p + 1).split(":");
|
||||
let stateBase64 = metadata[0];
|
||||
let checksumBase64 = metadata[1];
|
||||
|
||||
if (tableName !== 'test-phish-proto') {
|
||||
return false; // continue.
|
||||
}
|
||||
|
||||
if (stateBase64 === btoa(expectedState) &&
|
||||
checksumBase64 === btoa(expectedChecksum)) {
|
||||
do_print('State has been saved to disk!');
|
||||
|
||||
// We slightly defer the callback to see if the in-memory
|
||||
// |getTables| caching works correctly.
|
||||
dbService.getTables(cachedMetadata => {
|
||||
equal(cachedMetadata, metaData);
|
||||
callback();
|
||||
});
|
||||
|
||||
// Even though we haven't done callback at this moment
|
||||
// but we still claim "we have" in order to stop repeating
|
||||
// a new timer.
|
||||
didCallback = true;
|
||||
}
|
||||
|
||||
return true; // break no matter whether the state is matching.
|
||||
});
|
||||
|
||||
if (!didCallback) {
|
||||
do_timeout(1000, waitUntilMetaDataSaved.bind(null, expectedState,
|
||||
expectedChecksum,
|
||||
callback));
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
cleanUp();
|
||||
|
|
|
@ -296,6 +296,7 @@ function runNextCompletion() {
|
|||
finishedCompletions = 0;
|
||||
for (let completion of completionSets[currentCompletionSet]) {
|
||||
completer.complete(completion.hash.substring(0,4), gethashUrl,
|
||||
"test-phish-shavar", // Could be arbitrary v2 table name.
|
||||
(new callback(completion)));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,165 @@
|
|||
Cu.import("resource://gre/modules/XPCOMUtils.jsm");
|
||||
Cu.import("resource://gre/modules/Services.jsm");
|
||||
|
||||
// These tables have a different update URL (for v4).
|
||||
const TEST_TABLE_DATA_V4 = {
|
||||
tableName: "test-phish-proto",
|
||||
providerName: "google4",
|
||||
updateUrl: "http://localhost:5555/safebrowsing/update?",
|
||||
gethashUrl: "http://localhost:5555/safebrowsing/gethash-v4?",
|
||||
};
|
||||
|
||||
const PREF_NEXTUPDATETIME_V4 = "browser.safebrowsing.provider.google4.nextupdatetime";
|
||||
|
||||
let gListManager = Cc["@mozilla.org/url-classifier/listmanager;1"]
|
||||
.getService(Ci.nsIUrlListManager);
|
||||
|
||||
let gCompleter = Cc["@mozilla.org/url-classifier/hashcompleter;1"]
|
||||
.getService(Ci.nsIUrlClassifierHashCompleter);
|
||||
|
||||
XPCOMUtils.defineLazyServiceGetter(this, 'gUrlUtil',
|
||||
'@mozilla.org/url-classifier/utils;1',
|
||||
'nsIUrlClassifierUtils');
|
||||
|
||||
// Handles request for TEST_TABLE_DATA_V4.
|
||||
let gHttpServV4 = null;
|
||||
let gExpectedGetHashQueryV4 = "";
|
||||
|
||||
const NEW_CLIENT_STATE = 'sta\0te';
|
||||
const CHECKSUM = '\x30\x67\xc7\x2c\x5e\x50\x1c\x31\xe3\xfe\xca\x73\xf0\x47\xdc\x34\x1a\x95\x63\x99\xec\x70\x5e\x0a\xee\x9e\xfb\x17\xa1\x55\x35\x78';
|
||||
|
||||
prefBranch.setBoolPref("browser.safebrowsing.debug", true);
|
||||
|
||||
// The "\xFF\xFF" is to generate a base64 string with "/".
|
||||
prefBranch.setCharPref("browser.safebrowsing.id", "Firefox\xFF\xFF");
|
||||
|
||||
// Register tables.
|
||||
gListManager.registerTable(TEST_TABLE_DATA_V4.tableName,
|
||||
TEST_TABLE_DATA_V4.providerName,
|
||||
TEST_TABLE_DATA_V4.updateUrl,
|
||||
TEST_TABLE_DATA_V4.gethashUrl);
|
||||
|
||||
// This is unfortunately needed since v4 gethash request
|
||||
// requires the threat type (table name) as well as the
|
||||
// state it's associated with. We have to run the update once
|
||||
// to have the state written.
|
||||
add_test(function test_update_v4() {
|
||||
gListManager.disableUpdate(TEST_TABLE_DATA_V4.tableName);
|
||||
gListManager.enableUpdate(TEST_TABLE_DATA_V4.tableName);
|
||||
|
||||
// Force table update.
|
||||
prefBranch.setCharPref(PREF_NEXTUPDATETIME_V4, "1");
|
||||
gListManager.maybeToggleUpdateChecking();
|
||||
});
|
||||
|
||||
add_test(function test_getHashRequestV4() {
|
||||
let request = gUrlUtil.makeFindFullHashRequestV4([TEST_TABLE_DATA_V4.tableName],
|
||||
[btoa(NEW_CLIENT_STATE)],
|
||||
[btoa("0123"), btoa("1234567"), btoa("1111")],
|
||||
1,
|
||||
3);
|
||||
gExpectedGetHashQueryV4 = '&$req=' + request;
|
||||
|
||||
let completeFinishedCnt = 0;
|
||||
|
||||
gCompleter.complete("0123", TEST_TABLE_DATA_V4.gethashUrl, TEST_TABLE_DATA_V4.tableName, {
|
||||
completion: function (hash, table, chunkId) {
|
||||
equal(hash, "01234567890123456789012345678901");
|
||||
equal(table, TEST_TABLE_DATA_V4.tableName);
|
||||
equal(chunkId, 0);
|
||||
do_print("completion: " + hash + ", " + table + ", " + chunkId);
|
||||
},
|
||||
|
||||
completionFinished: function (status) {
|
||||
equal(status, Cr.NS_OK);
|
||||
completeFinishedCnt++;
|
||||
if (3 === completeFinishedCnt) {
|
||||
run_next_test();
|
||||
}
|
||||
},
|
||||
});
|
||||
|
||||
gCompleter.complete("1234567", TEST_TABLE_DATA_V4.gethashUrl, TEST_TABLE_DATA_V4.tableName, {
|
||||
completion: function (hash, table, chunkId) {
|
||||
equal(hash, "12345678901234567890123456789012");
|
||||
equal(table, TEST_TABLE_DATA_V4.tableName);
|
||||
equal(chunkId, 0);
|
||||
do_print("completion: " + hash + ", " + table + ", " + chunkId);
|
||||
},
|
||||
|
||||
completionFinished: function (status) {
|
||||
equal(status, Cr.NS_OK);
|
||||
completeFinishedCnt++;
|
||||
if (3 === completeFinishedCnt) {
|
||||
run_next_test();
|
||||
}
|
||||
},
|
||||
});
|
||||
|
||||
gCompleter.complete("1111", TEST_TABLE_DATA_V4.gethashUrl, TEST_TABLE_DATA_V4.tableName, {
|
||||
completion: function (hash, table, chunkId) {
|
||||
ok(false, "1111 is not the prefix of " + hash);
|
||||
},
|
||||
|
||||
completionFinished: function (status) {
|
||||
equal(status, Cr.NS_OK);
|
||||
completeFinishedCnt++;
|
||||
if (3 === completeFinishedCnt) {
|
||||
run_next_test();
|
||||
}
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
function run_test() {
|
||||
gHttpServV4 = new HttpServer();
|
||||
gHttpServV4.registerDirectory("/", do_get_cwd());
|
||||
|
||||
// Update handler. Will respond a valid state to be verified in the
|
||||
// gethash handler.
|
||||
gHttpServV4.registerPathHandler("/safebrowsing/update", function(request, response) {
|
||||
response.setHeader("Content-Type",
|
||||
"application/vnd.google.safebrowsing-update", false);
|
||||
response.setStatusLine(request.httpVersion, 200, "OK");
|
||||
|
||||
// The protobuf binary represention of response:
|
||||
//
|
||||
// [
|
||||
// {
|
||||
// 'threat_type': 2, // SOCIAL_ENGINEERING_PUBLIC
|
||||
// 'response_type': 2, // FULL_UPDATE
|
||||
// 'new_client_state': 'sta\x00te', // NEW_CLIENT_STATE
|
||||
// 'checksum': { "sha256": CHECKSUM }, // CHECKSUM
|
||||
// 'additions': { 'compression_type': RAW,
|
||||
// 'prefix_size': 4,
|
||||
// 'raw_hashes': "00000001000000020000000300000004"}
|
||||
// }
|
||||
// ]
|
||||
//
|
||||
let content = "\x0A\x4A\x08\x02\x20\x02\x2A\x18\x08\x01\x12\x14\x08\x04\x12\x10\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x02\x00\x00\x00\x03\x3A\x06\x73\x74\x61\x00\x74\x65\x42\x22\x0A\x20\x30\x67\xC7\x2C\x5E\x50\x1C\x31\xE3\xFE\xCA\x73\xF0\x47\xDC\x34\x1A\x95\x63\x99\xEC\x70\x5E\x0A\xEE\x9E\xFB\x17\xA1\x55\x35\x78\x12\x08\x08\x08\x10\x80\x94\xEB\xDC\x03";
|
||||
|
||||
response.bodyOutputStream.write(content, content.length);
|
||||
|
||||
waitUntilMetaDataSaved(NEW_CLIENT_STATE, CHECKSUM, () => {
|
||||
run_next_test();
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
// V4 gethash handler.
|
||||
gHttpServV4.registerPathHandler("/safebrowsing/gethash-v4", function(request, response) {
|
||||
equal(request.queryString, gExpectedGetHashQueryV4);
|
||||
|
||||
// { nsCString("01234567890123456789012345678901"), SOCIAL_ENGINEERING_PUBLIC, { 8, 500 } },
|
||||
// { nsCString("12345678901234567890123456789012"), SOCIAL_ENGINEERING_PUBLIC, { 7, 100} },
|
||||
// { nsCString("23456789012345678901234567890123"), SOCIAL_ENGINEERING_PUBLIC, { 1, 20 } },
|
||||
let content = "\x0A\x2D\x08\x02\x1A\x22\x0A\x20\x30\x31\x32\x33\x34\x35\x36\x37\x38\x39\x30\x31\x32\x33\x34\x35\x36\x37\x38\x39\x30\x31\x32\x33\x34\x35\x36\x37\x38\x39\x30\x31\x2A\x05\x08\x08\x10\xF4\x03\x0A\x2C\x08\x02\x1A\x22\x0A\x20\x31\x32\x33\x34\x35\x36\x37\x38\x39\x30\x31\x32\x33\x34\x35\x36\x37\x38\x39\x30\x31\x32\x33\x34\x35\x36\x37\x38\x39\x30\x31\x32\x2A\x04\x08\x07\x10\x64\x0A\x2C\x08\x02\x1A\x22\x0A\x20\x32\x33\x34\x35\x36\x37\x38\x39\x30\x31\x32\x33\x34\x35\x36\x37\x38\x39\x30\x31\x32\x33\x34\x35\x36\x37\x38\x39\x30\x31\x32\x33\x2A\x04\x08\x01\x10\x14\x12\x04\x08\x0C\x10\x0A\x1A\x04\x08\x78\x10\x09";
|
||||
|
||||
response.setStatusLine(request.httpVersion, 200, "OK");
|
||||
response.bodyOutputStream.write(content, content.length);
|
||||
});
|
||||
|
||||
gHttpServV4.start(5555);
|
||||
|
||||
run_next_test();
|
||||
}
|
|
@ -334,53 +334,3 @@ function readFileToString(aFilename) {
|
|||
let buf = NetUtil.readInputStreamToString(stream, stream.available());
|
||||
return buf;
|
||||
}
|
||||
|
||||
function waitUntilMetaDataSaved(expectedState, expectedChecksum, callback) {
|
||||
let dbService = Cc["@mozilla.org/url-classifier/dbservice;1"]
|
||||
.getService(Ci.nsIUrlClassifierDBService);
|
||||
|
||||
dbService.getTables(metaData => {
|
||||
do_print("metadata: " + metaData);
|
||||
let didCallback = false;
|
||||
metaData.split("\n").some(line => {
|
||||
// Parse [tableName];[stateBase64]
|
||||
let p = line.indexOf(";");
|
||||
if (-1 === p) {
|
||||
return false; // continue.
|
||||
}
|
||||
let tableName = line.substring(0, p);
|
||||
let metadata = line.substring(p + 1).split(":");
|
||||
let stateBase64 = metadata[0];
|
||||
let checksumBase64 = metadata[1];
|
||||
|
||||
if (tableName !== 'test-phish-proto') {
|
||||
return false; // continue.
|
||||
}
|
||||
|
||||
if (stateBase64 === btoa(expectedState) &&
|
||||
checksumBase64 === btoa(expectedChecksum)) {
|
||||
do_print('State has been saved to disk!');
|
||||
|
||||
// We slightly defer the callback to see if the in-memory
|
||||
// |getTables| caching works correctly.
|
||||
dbService.getTables(cachedMetadata => {
|
||||
equal(cachedMetadata, metaData);
|
||||
callback();
|
||||
});
|
||||
|
||||
// Even though we haven't done callback at this moment
|
||||
// but we still claim "we have" in order to stop repeating
|
||||
// a new timer.
|
||||
didCallback = true;
|
||||
}
|
||||
|
||||
return true; // break no matter whether the state is matching.
|
||||
});
|
||||
|
||||
if (!didCallback) {
|
||||
do_timeout(1000, waitUntilMetaDataSaved.bind(null, expectedState,
|
||||
expectedChecksum,
|
||||
callback));
|
||||
}
|
||||
});
|
||||
}
|
||||
|
|
|
@ -20,7 +20,7 @@ QueryInterface: function(iid)
|
|||
return this;
|
||||
},
|
||||
|
||||
complete: function(partialHash, gethashUrl, cb)
|
||||
complete: function(partialHash, gethashUrl, tableName, cb)
|
||||
{
|
||||
this.queries.push(partialHash);
|
||||
var fragments = this.fragments;
|
||||
|
|
|
@ -11,6 +11,7 @@ support-files =
|
|||
[test_backoff.js]
|
||||
[test_dbservice.js]
|
||||
[test_hashcompleter.js]
|
||||
[test_hashcompleter_v4.js]
|
||||
# Bug 752243: Profile cleanup frequently fails
|
||||
#skip-if = os == "mac" || os == "linux"
|
||||
[test_partial.js]
|
||||
|
|
Загрузка…
Ссылка в новой задаче