зеркало из https://github.com/mozilla/pjs.git
fix url-classifier checks against bare-domain entries. b=402435, r=tony
This commit is contained in:
Родитель
e3f9003fe2
Коммит
e20efd0ec2
|
@ -702,7 +702,10 @@ nsUrlClassifierDBServiceWorker::GetLookupFragments(const nsACString& spec,
|
|||
paths.AppendCString(path);
|
||||
}
|
||||
|
||||
numComponents = 0;
|
||||
// Check an empty path (for whole-domain blacklist entries)
|
||||
paths.AppendCString(EmptyCString());
|
||||
|
||||
numComponents = 1;
|
||||
path.BeginReading(begin);
|
||||
path.EndReading(end);
|
||||
iter = begin;
|
||||
|
|
|
@ -44,6 +44,11 @@ for (var i = 0; i < chunk1Urls.length; i++) {
|
|||
phishUnexpected[chunk1Urls[i]] = true;
|
||||
}
|
||||
|
||||
// Check that the entries hit based on sub-parts
|
||||
phishExpected["baz.com/foo/bar"] = true;
|
||||
phishExpected["foo.bar.baz.com/foo"] = true;
|
||||
phishExpected["bar.baz.com/"] = true;
|
||||
|
||||
var numExpecting;
|
||||
|
||||
function testFailure(arg) {
|
||||
|
|
Загрузка…
Ссылка в новой задаче