Bug 1878375 - Synchronize vendored Rust libraries with mozilla-central. r=darktrojan
mozilla-central: 0f6960cdd1dea56a9d701960330de90ab97ca8e5 comm-central: 1ddbf5d8031166aac486ec0e3b30c84bf7ec4e49 Differential Revision: https://phabricator.services.mozilla.com/D220986 --HG-- extra : rebase_source : d6d1d3935d328acc56b61d920177938126b687e1 extra : amend_source : 776538c8024d7c65a6eab316836d7487c287e3a2
This commit is contained in:
Родитель
84f76a1e2b
Коммит
728c59b214
|
@ -56,9 +56,9 @@ git = "https://github.com/mozilla-spidermonkey/jsparagus"
|
|||
rev = "61f399c53a641ebd3077c1f39f054f6d396a633c"
|
||||
replace-with = "vendored-sources"
|
||||
|
||||
[source."git+https://github.com/mozilla/application-services?rev=c3774b262f27fabdd8ae7d064db5745029b347b9"]
|
||||
[source."git+https://github.com/mozilla/application-services?rev=dbeaef2eb28e9da7cc2f96e26296513cc4e07c07"]
|
||||
git = "https://github.com/mozilla/application-services"
|
||||
rev = "c3774b262f27fabdd8ae7d064db5745029b347b9"
|
||||
rev = "dbeaef2eb28e9da7cc2f96e26296513cc4e07c07"
|
||||
replace-with = "vendored-sources"
|
||||
|
||||
[source."git+https://github.com/mozilla/audioipc?rev=e6f44a2bd1e57d11dfc737632a9e849077632330"]
|
||||
|
|
|
@ -1535,7 +1535,7 @@ dependencies = [
|
|||
[[package]]
|
||||
name = "error-support"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/mozilla/application-services?rev=c3774b262f27fabdd8ae7d064db5745029b347b9#c3774b262f27fabdd8ae7d064db5745029b347b9"
|
||||
source = "git+https://github.com/mozilla/application-services?rev=dbeaef2eb28e9da7cc2f96e26296513cc4e07c07#dbeaef2eb28e9da7cc2f96e26296513cc4e07c07"
|
||||
dependencies = [
|
||||
"error-support-macros",
|
||||
"lazy_static",
|
||||
|
@ -1547,7 +1547,7 @@ dependencies = [
|
|||
[[package]]
|
||||
name = "error-support-macros"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/mozilla/application-services?rev=c3774b262f27fabdd8ae7d064db5745029b347b9#c3774b262f27fabdd8ae7d064db5745029b347b9"
|
||||
source = "git+https://github.com/mozilla/application-services?rev=dbeaef2eb28e9da7cc2f96e26296513cc4e07c07#dbeaef2eb28e9da7cc2f96e26296513cc4e07c07"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
|
@ -2752,7 +2752,7 @@ dependencies = [
|
|||
[[package]]
|
||||
name = "interrupt-support"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/mozilla/application-services?rev=c3774b262f27fabdd8ae7d064db5745029b347b9#c3774b262f27fabdd8ae7d064db5745029b347b9"
|
||||
source = "git+https://github.com/mozilla/application-services?rev=dbeaef2eb28e9da7cc2f96e26296513cc4e07c07#dbeaef2eb28e9da7cc2f96e26296513cc4e07c07"
|
||||
dependencies = [
|
||||
"lazy_static",
|
||||
"parking_lot",
|
||||
|
@ -3706,7 +3706,7 @@ dependencies = [
|
|||
[[package]]
|
||||
name = "nss_build_common"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/mozilla/application-services?rev=c3774b262f27fabdd8ae7d064db5745029b347b9#c3774b262f27fabdd8ae7d064db5745029b347b9"
|
||||
source = "git+https://github.com/mozilla/application-services?rev=dbeaef2eb28e9da7cc2f96e26296513cc4e07c07#dbeaef2eb28e9da7cc2f96e26296513cc4e07c07"
|
||||
|
||||
[[package]]
|
||||
name = "nsstring"
|
||||
|
@ -3892,7 +3892,7 @@ checksum = "d01a5bd0424d00070b0098dd17ebca6f961a959dead1dbcbbbc1d1cd8d3deeba"
|
|||
[[package]]
|
||||
name = "payload-support"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/mozilla/application-services?rev=c3774b262f27fabdd8ae7d064db5745029b347b9#c3774b262f27fabdd8ae7d064db5745029b347b9"
|
||||
source = "git+https://github.com/mozilla/application-services?rev=dbeaef2eb28e9da7cc2f96e26296513cc4e07c07#dbeaef2eb28e9da7cc2f96e26296513cc4e07c07"
|
||||
dependencies = [
|
||||
"serde",
|
||||
"serde_derive",
|
||||
|
@ -4340,7 +4340,7 @@ checksum = "dbb5fb1acd8a1a18b3dd5be62d25485eb770e05afb408a9627d14d451bae12da"
|
|||
[[package]]
|
||||
name = "relevancy"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/mozilla/application-services?rev=c3774b262f27fabdd8ae7d064db5745029b347b9#c3774b262f27fabdd8ae7d064db5745029b347b9"
|
||||
source = "git+https://github.com/mozilla/application-services?rev=dbeaef2eb28e9da7cc2f96e26296513cc4e07c07#dbeaef2eb28e9da7cc2f96e26296513cc4e07c07"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"base64",
|
||||
|
@ -4363,7 +4363,7 @@ dependencies = [
|
|||
[[package]]
|
||||
name = "remote_settings"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/mozilla/application-services?rev=c3774b262f27fabdd8ae7d064db5745029b347b9#c3774b262f27fabdd8ae7d064db5745029b347b9"
|
||||
source = "git+https://github.com/mozilla/application-services?rev=dbeaef2eb28e9da7cc2f96e26296513cc4e07c07#dbeaef2eb28e9da7cc2f96e26296513cc4e07c07"
|
||||
dependencies = [
|
||||
"log",
|
||||
"parking_lot",
|
||||
|
@ -4850,7 +4850,7 @@ dependencies = [
|
|||
[[package]]
|
||||
name = "sql-support"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/mozilla/application-services?rev=c3774b262f27fabdd8ae7d064db5745029b347b9#c3774b262f27fabdd8ae7d064db5745029b347b9"
|
||||
source = "git+https://github.com/mozilla/application-services?rev=dbeaef2eb28e9da7cc2f96e26296513cc4e07c07#dbeaef2eb28e9da7cc2f96e26296513cc4e07c07"
|
||||
dependencies = [
|
||||
"ffi-support",
|
||||
"interrupt-support",
|
||||
|
@ -5014,7 +5014,7 @@ checksum = "81cdd64d312baedb58e21336b31bc043b77e01cc99033ce76ef539f78e965ebc"
|
|||
[[package]]
|
||||
name = "suggest"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/mozilla/application-services?rev=c3774b262f27fabdd8ae7d064db5745029b347b9#c3774b262f27fabdd8ae7d064db5745029b347b9"
|
||||
source = "git+https://github.com/mozilla/application-services?rev=dbeaef2eb28e9da7cc2f96e26296513cc4e07c07#dbeaef2eb28e9da7cc2f96e26296513cc4e07c07"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"chrono",
|
||||
|
@ -5066,7 +5066,7 @@ dependencies = [
|
|||
[[package]]
|
||||
name = "sync-guid"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/mozilla/application-services?rev=c3774b262f27fabdd8ae7d064db5745029b347b9#c3774b262f27fabdd8ae7d064db5745029b347b9"
|
||||
source = "git+https://github.com/mozilla/application-services?rev=dbeaef2eb28e9da7cc2f96e26296513cc4e07c07#dbeaef2eb28e9da7cc2f96e26296513cc4e07c07"
|
||||
dependencies = [
|
||||
"base64",
|
||||
"rand",
|
||||
|
@ -5077,7 +5077,7 @@ dependencies = [
|
|||
[[package]]
|
||||
name = "sync15"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/mozilla/application-services?rev=c3774b262f27fabdd8ae7d064db5745029b347b9#c3774b262f27fabdd8ae7d064db5745029b347b9"
|
||||
source = "git+https://github.com/mozilla/application-services?rev=dbeaef2eb28e9da7cc2f96e26296513cc4e07c07#dbeaef2eb28e9da7cc2f96e26296513cc4e07c07"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"error-support",
|
||||
|
@ -5126,7 +5126,7 @@ dependencies = [
|
|||
[[package]]
|
||||
name = "tabs"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/mozilla/application-services?rev=c3774b262f27fabdd8ae7d064db5745029b347b9#c3774b262f27fabdd8ae7d064db5745029b347b9"
|
||||
source = "git+https://github.com/mozilla/application-services?rev=dbeaef2eb28e9da7cc2f96e26296513cc4e07c07#dbeaef2eb28e9da7cc2f96e26296513cc4e07c07"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"error-support",
|
||||
|
@ -5347,7 +5347,7 @@ checksum = "497961ef93d974e23eb6f433eb5fe1b7930b659f06d12dec6fc44a8f554c0bba"
|
|||
[[package]]
|
||||
name = "types"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/mozilla/application-services?rev=c3774b262f27fabdd8ae7d064db5745029b347b9#c3774b262f27fabdd8ae7d064db5745029b347b9"
|
||||
source = "git+https://github.com/mozilla/application-services?rev=dbeaef2eb28e9da7cc2f96e26296513cc4e07c07#dbeaef2eb28e9da7cc2f96e26296513cc4e07c07"
|
||||
dependencies = [
|
||||
"rusqlite",
|
||||
"serde",
|
||||
|
@ -5687,7 +5687,7 @@ checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f"
|
|||
[[package]]
|
||||
name = "viaduct"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/mozilla/application-services?rev=c3774b262f27fabdd8ae7d064db5745029b347b9#c3774b262f27fabdd8ae7d064db5745029b347b9"
|
||||
source = "git+https://github.com/mozilla/application-services?rev=dbeaef2eb28e9da7cc2f96e26296513cc4e07c07#dbeaef2eb28e9da7cc2f96e26296513cc4e07c07"
|
||||
dependencies = [
|
||||
"ffi-support",
|
||||
"log",
|
||||
|
@ -5741,7 +5741,7 @@ version = "0.3.100"
|
|||
[[package]]
|
||||
name = "webext-storage"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/mozilla/application-services?rev=c3774b262f27fabdd8ae7d064db5745029b347b9#c3774b262f27fabdd8ae7d064db5745029b347b9"
|
||||
source = "git+https://github.com/mozilla/application-services?rev=dbeaef2eb28e9da7cc2f96e26296513cc4e07c07#dbeaef2eb28e9da7cc2f96e26296513cc4e07c07"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"error-support",
|
||||
|
|
|
@ -197,12 +197,12 @@ icu_segmenter_data = { path = "../../intl/icu_segmenter_data" }
|
|||
libudev-sys = { path = "../../dom/webauthn/libudev-sys" }
|
||||
midir = { git = "https://github.com/mozilla/midir.git", rev = "85156e360a37d851734118104619f86bd18e94c6" }
|
||||
malloc_size_of_derive = { path = "../../xpcom/rust/malloc_size_of_derive" }
|
||||
interrupt-support = { git = "https://github.com/mozilla/application-services", rev = "c3774b262f27fabdd8ae7d064db5745029b347b9" }
|
||||
relevancy = { git = "https://github.com/mozilla/application-services", rev = "c3774b262f27fabdd8ae7d064db5745029b347b9" }
|
||||
sql-support = { git = "https://github.com/mozilla/application-services", rev = "c3774b262f27fabdd8ae7d064db5745029b347b9" }
|
||||
suggest = { git = "https://github.com/mozilla/application-services", rev = "c3774b262f27fabdd8ae7d064db5745029b347b9" }
|
||||
sync15 = { git = "https://github.com/mozilla/application-services", rev = "c3774b262f27fabdd8ae7d064db5745029b347b9" }
|
||||
tabs = { git = "https://github.com/mozilla/application-services", rev = "c3774b262f27fabdd8ae7d064db5745029b347b9" }
|
||||
viaduct = { git = "https://github.com/mozilla/application-services", rev = "c3774b262f27fabdd8ae7d064db5745029b347b9" }
|
||||
webext-storage = { git = "https://github.com/mozilla/application-services", rev = "c3774b262f27fabdd8ae7d064db5745029b347b9" }
|
||||
interrupt-support = { git = "https://github.com/mozilla/application-services", rev = "dbeaef2eb28e9da7cc2f96e26296513cc4e07c07" }
|
||||
relevancy = { git = "https://github.com/mozilla/application-services", rev = "dbeaef2eb28e9da7cc2f96e26296513cc4e07c07" }
|
||||
sql-support = { git = "https://github.com/mozilla/application-services", rev = "dbeaef2eb28e9da7cc2f96e26296513cc4e07c07" }
|
||||
suggest = { git = "https://github.com/mozilla/application-services", rev = "dbeaef2eb28e9da7cc2f96e26296513cc4e07c07" }
|
||||
sync15 = { git = "https://github.com/mozilla/application-services", rev = "dbeaef2eb28e9da7cc2f96e26296513cc4e07c07" }
|
||||
tabs = { git = "https://github.com/mozilla/application-services", rev = "dbeaef2eb28e9da7cc2f96e26296513cc4e07c07" }
|
||||
viaduct = { git = "https://github.com/mozilla/application-services", rev = "dbeaef2eb28e9da7cc2f96e26296513cc4e07c07" }
|
||||
webext-storage = { git = "https://github.com/mozilla/application-services", rev = "dbeaef2eb28e9da7cc2f96e26296513cc4e07c07" }
|
||||
gpu-descriptor = { git = "https://github.com/zakarumych/gpu-descriptor", rev = "7b71a4e47c81903ad75e2c53deb5ab1310f6ff4d" }
|
||||
|
|
|
@ -1 +1 @@
|
|||
{"mc_workspace_toml": "948ffb790b7c291e164071ecffb81aaf89d0956d12d36ff97634362396a45fac3e5a3bb98e7c2864873c1d18e2f854c292fcc736a5ab4d6589ea2947c44a3eca", "mc_gkrust_toml": "522d8c92ca3a3c089a1479dc20cc9015c16a1f550579346c328a46fd74ffc3e957656bb445cd3cade971a8fbf892dfe47ea73d7f7153ba59de5a2ac68357739b", "mc_hack_toml": "0be5955346b278ea0611209027bbee30ee54d0d8ed7ffb48b5705db70de2bdd285e16e22c297b5f65acb18427f7937567ed8fd6a7ff26b8579000faf00095f59", "mc_cargo_lock": "1be4813e9988e6365aaaf63a86502f4cadcd78b812b2a6b381f359719ca228ff6c0555745ebdad25276cf53a4ea40177795b86724c167951008612aa66a50dae"}
|
||||
{"mc_workspace_toml": "4d9c8de77b285abd07c16048e687178a82a83b0fba370c05dca6e34f56bedcba9b22194c42bfa61ad1370b889e1b933a45056b40a4345b4b07b5ced158bdf073", "mc_gkrust_toml": "522d8c92ca3a3c089a1479dc20cc9015c16a1f550579346c328a46fd74ffc3e957656bb445cd3cade971a8fbf892dfe47ea73d7f7153ba59de5a2ac68357739b", "mc_hack_toml": "0be5955346b278ea0611209027bbee30ee54d0d8ed7ffb48b5705db70de2bdd285e16e22c297b5f65acb18427f7937567ed8fd6a7ff26b8579000faf00095f59", "mc_cargo_lock": "8bf47c5fafa83e58b804cb5f9f22fb1aa3b7ea8b95ecf11bf516480a2cb289452498f74fb49b064a1f48828a632c0b4edd8e0747beca702744dc9a62e615da8f"}
|
|
@ -16,8 +16,8 @@ harness = false
|
|||
[dependencies]
|
||||
mozilla-central-workspace-hack = { version = "0.1", features = ['gkrust'], optional = true }
|
||||
gkrust-shared = { version = "0.1.0", path = "../../../toolkit/library/rust/shared" }
|
||||
sys_tray = { version = "0.1.0", path = "../sys_tray" }
|
||||
ews_xpcom = { version = "0.1.0", path = "../ews_xpcom" }
|
||||
sys_tray = { version = "0.1.0", path = "../sys_tray" }
|
||||
aa-stroke = { git = "https://github.com/FirefoxGraphics/aa-stroke", rev = "d94278ed9c7020f50232689a26d1277eb0eb74d2" }
|
||||
app_services_logger = { path = "../../../services/common/app_services_logger" }
|
||||
audio_thread_priority = { version = "0.32" }
|
||||
|
|
|
@ -1 +1 @@
|
|||
{"files":{"Cargo.toml":"b6f48d7892872e05decf3ce4e4198f4481c753fbb74384d9cbc18b4fe1affd12","README.md":"5e28baf874b643d756228bdab345e287bf107d3182dfe6a18aafadcc4b9a3fc9","benches/benchmark_all.rs":"3582f21af9758766ff32ed95f90b69984b32091b1e31e0c0bef307c22fd82f18","build.rs":"78780c5cccfe22c3ff4198624b9e188559c437c3e6fa1c8bb66548eee6aa66bf","metrics.yaml":"0540ab2271aeab7f07335c7ceec12acde942995f9dcb3c29070489aa61899d56","src/benchmarks/README.md":"ccee8dbddba8762d0453fa855bd6984137b224b8c019f3dd8e86a3c303f51d71","src/benchmarks/client.rs":"3b92c350ad396b0e0b5438c7b7b94b08b322702f419ca9b815e6732bd174f8a1","src/benchmarks/ingest.rs":"ca368573591519e6b777d659d5615718bedb6eee1734e25242627f8116b10881","src/benchmarks/mod.rs":"2d7c20d47d6c7e17bc738255a31119bd0c4a4e495419a00c7b10b251ace9ef6b","src/benchmarks/query.rs":"0cac34ce895dd810513b30113b2608fecaece560c74a3286af55e3554f7a7b5a","src/bin/debug_ingestion_sizes.rs":"ce6e810be7b3fc19e826d75b622b82cfab5a1a99397a6d0833c2c4eebff2d364","src/config.rs":"d40c6e83d8b5faa32c66110803ca9e78611d43507e9d3f1e191a93a7773c37b3","src/db.rs":"1fbcdcc2e99692024fcb8a85aafa9dacb35d83703ce509bea1d0c694d379ceb2","src/error.rs":"183a92511565439915275e0705e6740ff513c2549f2ef78fd3055d8aaaf81021","src/fakespot.rs":"03d3aac07b3a3a9ceb8d2c452d4a122bfebf04579829e62e83487877055312d4","src/keyword.rs":"988d0ab021c0df19cfd3c519df7d37f606bf984cd14d0efca4e5a7aff88344dd","src/lib.rs":"455e7ca3cf1c95e04ac567e147a906a07df3e7d548546db399370e3fac3f94c9","src/metrics.rs":"b3b9816b8eda366f808b56e242ac6aa43a5b141ad01d43420fdfcbfca13e1cfc","src/pocket.rs":"1316668840ec9b4ea886223921dc9d3b5a1731d1a5206c0b1089f2a6c45c1b7b","src/provider.rs":"922ff68a4a49da637982d7b480651acf507197f62e0c0a04fce7237c243dc5e2","src/query.rs":"d9a26c024b8ac19ba0dbd6068a3bab881fa60928ecbac080bae80b8cbd2d5320","src/rs.rs":"81c11647b9ee4e52925b98e21adfd7c5c053846918500927502c41850f8f8a09","src/schema.rs":"2f4b0fbf0fd931c1cafa3be8dbddb9791c402125807b687d0bcf4d23b52743fc","src/store.rs":"a848bfbe254e83a6bb9a958a25984357ddb02d679af010cf170cb2498b3c1cd7","src/suggest.udl":"5eedf30402ed121e7f5b052782a55f64d7ca7a690901cd87f657a48f5206228b","src/suggestion.rs":"e24d951b564905f5bcd9b230a28fec78cbd4c29f8ef46bec014b06219902e3f3","src/testing/client.rs":"f059e844f336fd45fe4a335c67779558a98796d86f9ec0f43f96e6a0a6309f69","src/testing/data.rs":"d4fc5227996a8b115d93243fdbd83bc57d73a8c2d4c0b20dffa15bbec27925cb","src/testing/mod.rs":"b6ad90bb951fe0233493a7a1625f9979b6b8a946c5e027342ec25caeb5d1fed9","src/yelp.rs":"bc036ff71b438d53ce8811acd8d650d83ef03faeea476f5b659b403c1e64ff2b","uniffi.toml":"f26317442ddb5b3281245bef6e60ffcb78bb95d29fe4a351a56dbb88d4ec8aab"},"package":null}
|
||||
{"files":{"Cargo.toml":"b6f48d7892872e05decf3ce4e4198f4481c753fbb74384d9cbc18b4fe1affd12","README.md":"5e28baf874b643d756228bdab345e287bf107d3182dfe6a18aafadcc4b9a3fc9","benches/benchmark_all.rs":"3582f21af9758766ff32ed95f90b69984b32091b1e31e0c0bef307c22fd82f18","build.rs":"78780c5cccfe22c3ff4198624b9e188559c437c3e6fa1c8bb66548eee6aa66bf","metrics.yaml":"0540ab2271aeab7f07335c7ceec12acde942995f9dcb3c29070489aa61899d56","src/benchmarks/README.md":"ccee8dbddba8762d0453fa855bd6984137b224b8c019f3dd8e86a3c303f51d71","src/benchmarks/client.rs":"3b92c350ad396b0e0b5438c7b7b94b08b322702f419ca9b815e6732bd174f8a1","src/benchmarks/ingest.rs":"ca368573591519e6b777d659d5615718bedb6eee1734e25242627f8116b10881","src/benchmarks/mod.rs":"2d7c20d47d6c7e17bc738255a31119bd0c4a4e495419a00c7b10b251ace9ef6b","src/benchmarks/query.rs":"ce78057e0ed43a419cc92d2bceb0bbef8aad9b113ef0341cf5f1d8d1578848e0","src/bin/debug_ingestion_sizes.rs":"ce6e810be7b3fc19e826d75b622b82cfab5a1a99397a6d0833c2c4eebff2d364","src/config.rs":"d40c6e83d8b5faa32c66110803ca9e78611d43507e9d3f1e191a93a7773c37b3","src/db.rs":"96156fcb8f8dc9dd98334d77fc53623046529dcf6b57f6282f120ad33cb8bcac","src/error.rs":"183a92511565439915275e0705e6740ff513c2549f2ef78fd3055d8aaaf81021","src/fakespot.rs":"03d3aac07b3a3a9ceb8d2c452d4a122bfebf04579829e62e83487877055312d4","src/keyword.rs":"988d0ab021c0df19cfd3c519df7d37f606bf984cd14d0efca4e5a7aff88344dd","src/lib.rs":"ab7fa1005cc371b15fde5211d7dabae5034f3b236580047655897be30280b2c9","src/metrics.rs":"b3b9816b8eda366f808b56e242ac6aa43a5b141ad01d43420fdfcbfca13e1cfc","src/pocket.rs":"1316668840ec9b4ea886223921dc9d3b5a1731d1a5206c0b1089f2a6c45c1b7b","src/provider.rs":"3ad644f7b3942e4228bdc9ec03e12af5faa55e3f9ab668617bef98e4c61ee1f7","src/query.rs":"6b94eb913908c36798129311bf519fa76478c6d35c2154efd639c4ba26b95a59","src/rs.rs":"16cf7fae7e935ef94beab532537564975addcd35b1a9b4b4a9fa1d34d51fbcfb","src/schema.rs":"5f2449c6ef798addf6b1d9869936edfe0629f6d543f3cb957becdf0595b29a41","src/store.rs":"57d00989eca564ce1497fa813b2f1bdf8acb8cbba1603fe6000c3abef37e5959","src/suggest.udl":"ba5e99a5d8c09ade945d537e0f90276cc6964ebc9ff454e5dbe5c9b3ba58f8eb","src/suggestion.rs":"3a91ecd648183f34b3834f5eaa14e487f29d8b227e7d4755288c3eade6d990e3","src/testing/client.rs":"f8c9bd32d0f4cf364daebe114d580c7e36a83b69c07884d14170969620d9a437","src/testing/data.rs":"d4fc5227996a8b115d93243fdbd83bc57d73a8c2d4c0b20dffa15bbec27925cb","src/testing/mod.rs":"800e5317bc2e13ee2b416bb0eb5c25ad9108a1afe8c73c9d2f6787572aa95c3a","src/yelp.rs":"bc036ff71b438d53ce8811acd8d650d83ef03faeea476f5b659b403c1e64ff2b","uniffi.toml":"f26317442ddb5b3281245bef6e60ffcb78bb95d29fe4a351a56dbb88d4ec8aab"},"package":null}
|
|
@ -40,7 +40,7 @@ impl BenchmarkWithInput for QueryBenchmark {
|
|||
InputType(SuggestionQuery {
|
||||
providers: vec![self.provider],
|
||||
keyword: self.query.to_string(),
|
||||
limit: None,
|
||||
..SuggestionQuery::default()
|
||||
})
|
||||
}
|
||||
|
||||
|
|
|
@ -13,7 +13,7 @@ use rusqlite::{
|
|||
types::{FromSql, ToSql},
|
||||
Connection, OpenFlags, OptionalExtension,
|
||||
};
|
||||
use sql_support::{open_database::open_database_with_flags, ConnExt};
|
||||
use sql_support::{open_database::open_database_with_flags, repeat_sql_vars, ConnExt};
|
||||
|
||||
use crate::{
|
||||
config::{SuggestGlobalConfig, SuggestProviderConfig},
|
||||
|
@ -24,8 +24,9 @@ use crate::{
|
|||
provider::SuggestionProvider,
|
||||
rs::{
|
||||
DownloadedAmoSuggestion, DownloadedAmpSuggestion, DownloadedAmpWikipediaSuggestion,
|
||||
DownloadedFakespotSuggestion, DownloadedMdnSuggestion, DownloadedPocketSuggestion,
|
||||
DownloadedWeatherData, DownloadedWikipediaSuggestion, Record, SuggestRecordId,
|
||||
DownloadedExposureSuggestion, DownloadedFakespotSuggestion, DownloadedMdnSuggestion,
|
||||
DownloadedPocketSuggestion, DownloadedWeatherData, DownloadedWikipediaSuggestion, Record,
|
||||
SuggestRecordId,
|
||||
},
|
||||
schema::{clear_database, SuggestConnectionInitializer},
|
||||
suggestion::{cook_raw_suggestion_url, AmpSuggestionType, Suggestion},
|
||||
|
@ -805,6 +806,83 @@ impl<'a> SuggestDao<'a> {
|
|||
)
|
||||
}
|
||||
|
||||
/// Fetches exposure suggestions
|
||||
pub fn fetch_exposure_suggestions(&self, query: &SuggestionQuery) -> Result<Vec<Suggestion>> {
|
||||
// A single exposure suggestion can be spread across multiple remote
|
||||
// settings records, for example if it has very many keywords. On ingest
|
||||
// we will insert one row in `exposure_custom_details` and one row in
|
||||
// `suggestions` per record, but that's only an implementation detail.
|
||||
// Logically, and for consumers, there's only ever at most one exposure
|
||||
// suggestion with a given exposure suggestion type.
|
||||
//
|
||||
// Why do insertions this way? It's how other suggestions work, and it
|
||||
// lets us perform relational operations on suggestions, records, and
|
||||
// keywords. For example, when a record is deleted we can look up its ID
|
||||
// in `suggestions`, join the keywords table on the suggestion ID, and
|
||||
// delete the keywords that were added by that record.
|
||||
|
||||
let Some(suggestion_types) = query
|
||||
.provider_constraints
|
||||
.as_ref()
|
||||
.and_then(|c| c.exposure_suggestion_types.as_ref())
|
||||
else {
|
||||
return Ok(vec![]);
|
||||
};
|
||||
|
||||
let keyword = query.keyword.to_lowercase();
|
||||
let params = rusqlite::params_from_iter(
|
||||
std::iter::once(&SuggestionProvider::Exposure as &dyn ToSql)
|
||||
.chain(std::iter::once(&keyword as &dyn ToSql))
|
||||
.chain(suggestion_types.iter().map(|t| t as &dyn ToSql)),
|
||||
);
|
||||
self.conn.query_rows_and_then_cached(
|
||||
&format!(
|
||||
r#"
|
||||
SELECT DISTINCT
|
||||
d.type
|
||||
FROM
|
||||
suggestions s
|
||||
JOIN
|
||||
exposure_custom_details d
|
||||
ON d.suggestion_id = s.id
|
||||
JOIN
|
||||
keywords k
|
||||
ON k.suggestion_id = s.id
|
||||
WHERE
|
||||
s.provider = ?
|
||||
AND k.keyword = ?
|
||||
AND d.type IN ({})
|
||||
ORDER BY
|
||||
d.type
|
||||
"#,
|
||||
repeat_sql_vars(suggestion_types.len())
|
||||
),
|
||||
params,
|
||||
|row| -> Result<Suggestion> {
|
||||
Ok(Suggestion::Exposure {
|
||||
suggestion_type: row.get("type")?,
|
||||
score: 1.0,
|
||||
})
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
pub fn is_exposure_suggestion_ingested(&self, record_id: &SuggestRecordId) -> Result<bool> {
|
||||
Ok(self.conn.exists(
|
||||
r#"
|
||||
SELECT
|
||||
id
|
||||
FROM
|
||||
suggestions
|
||||
WHERE
|
||||
record_id = :record_id
|
||||
"#,
|
||||
named_params! {
|
||||
":record_id": record_id.as_str(),
|
||||
},
|
||||
)?)
|
||||
}
|
||||
|
||||
/// Inserts all suggestions from a downloaded AMO attachment into
|
||||
/// the database.
|
||||
pub fn insert_amo_suggestions(
|
||||
|
@ -1058,6 +1136,39 @@ impl<'a> SuggestDao<'a> {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
/// Inserts exposure suggestion records data into the database.
|
||||
pub fn insert_exposure_suggestions(
|
||||
&mut self,
|
||||
record_id: &SuggestRecordId,
|
||||
suggestion_type: &str,
|
||||
suggestions: &[DownloadedExposureSuggestion],
|
||||
) -> Result<()> {
|
||||
// `suggestion.keywords()` can yield duplicates for exposure
|
||||
// suggestions, so ignore failures on insert in the uniqueness
|
||||
// constraint on `(suggestion_id, keyword)`.
|
||||
let mut keyword_insert = KeywordInsertStatement::new_with_or_ignore(self.conn)?;
|
||||
let mut suggestion_insert = SuggestionInsertStatement::new(self.conn)?;
|
||||
let mut exposure_insert = ExposureInsertStatement::new(self.conn)?;
|
||||
for suggestion in suggestions {
|
||||
self.scope.err_if_interrupted()?;
|
||||
let suggestion_id = suggestion_insert.execute(
|
||||
record_id,
|
||||
"", // title, not used by exposure suggestions
|
||||
"", // url, not used by exposure suggestions
|
||||
DEFAULT_SUGGESTION_SCORE,
|
||||
SuggestionProvider::Exposure,
|
||||
)?;
|
||||
exposure_insert.execute(suggestion_id, suggestion_type)?;
|
||||
|
||||
// Exposure suggestions don't use `rank` but `(suggestion_id, rank)`
|
||||
// must be unique since there's an index on that tuple.
|
||||
for (rank, keyword) in suggestion.keywords().enumerate() {
|
||||
keyword_insert.execute(suggestion_id, &keyword, None, rank)?;
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Inserts or replaces an icon for a suggestion into the database.
|
||||
pub fn put_icon(&mut self, icon_id: &str, data: &[u8], mimetype: &str) -> Result<()> {
|
||||
self.conn.execute(
|
||||
|
@ -1498,6 +1609,28 @@ impl<'conn> FakespotInsertStatement<'conn> {
|
|||
}
|
||||
}
|
||||
|
||||
struct ExposureInsertStatement<'conn>(rusqlite::Statement<'conn>);
|
||||
|
||||
impl<'conn> ExposureInsertStatement<'conn> {
|
||||
fn new(conn: &'conn Connection) -> Result<Self> {
|
||||
Ok(Self(conn.prepare(
|
||||
"INSERT INTO exposure_custom_details(
|
||||
suggestion_id,
|
||||
type
|
||||
)
|
||||
VALUES(?, ?)
|
||||
",
|
||||
)?))
|
||||
}
|
||||
|
||||
fn execute(&mut self, suggestion_id: i64, suggestion_type: &str) -> Result<()> {
|
||||
self.0
|
||||
.execute((suggestion_id, suggestion_type))
|
||||
.with_context("exposure insert")?;
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
struct KeywordInsertStatement<'conn>(rusqlite::Statement<'conn>);
|
||||
|
||||
impl<'conn> KeywordInsertStatement<'conn> {
|
||||
|
@ -1514,6 +1647,19 @@ impl<'conn> KeywordInsertStatement<'conn> {
|
|||
)?))
|
||||
}
|
||||
|
||||
fn new_with_or_ignore(conn: &'conn Connection) -> Result<Self> {
|
||||
Ok(Self(conn.prepare(
|
||||
"INSERT OR IGNORE INTO keywords(
|
||||
suggestion_id,
|
||||
keyword,
|
||||
full_keyword_id,
|
||||
rank
|
||||
)
|
||||
VALUES(?, ?, ?, ?)
|
||||
",
|
||||
)?))
|
||||
}
|
||||
|
||||
fn execute(
|
||||
&mut self,
|
||||
suggestion_id: i64,
|
||||
|
|
|
@ -26,7 +26,7 @@ mod yelp;
|
|||
pub use config::{SuggestGlobalConfig, SuggestProviderConfig};
|
||||
pub use error::SuggestApiError;
|
||||
pub use metrics::{LabeledTimingSample, SuggestIngestionMetrics};
|
||||
pub use provider::SuggestionProvider;
|
||||
pub use provider::{SuggestionProvider, SuggestionProviderConstraints};
|
||||
pub use query::{QueryWithMetricsResult, SuggestionQuery};
|
||||
pub use store::{InterruptKind, SuggestIngestionConstraints, SuggestStore, SuggestStoreBuilder};
|
||||
pub use suggestion::{raw_suggestion_url_matches, Suggestion};
|
||||
|
|
|
@ -12,6 +12,17 @@ use rusqlite::{
|
|||
|
||||
use crate::rs::SuggestRecordType;
|
||||
|
||||
/// Record types from these providers will be ingested when consumers do not
|
||||
/// specify providers in `SuggestIngestionConstraints`.
|
||||
pub(crate) const DEFAULT_INGEST_PROVIDERS: [SuggestionProvider; 6] = [
|
||||
SuggestionProvider::Amp,
|
||||
SuggestionProvider::Wikipedia,
|
||||
SuggestionProvider::Amo,
|
||||
SuggestionProvider::Yelp,
|
||||
SuggestionProvider::Mdn,
|
||||
SuggestionProvider::AmpMobile,
|
||||
];
|
||||
|
||||
/// A provider is a source of search suggestions.
|
||||
#[derive(Clone, Copy, Debug, Eq, PartialEq, Hash)]
|
||||
#[repr(u8)]
|
||||
|
@ -25,6 +36,7 @@ pub enum SuggestionProvider {
|
|||
Weather = 7,
|
||||
AmpMobile = 8,
|
||||
Fakespot = 9,
|
||||
Exposure = 10,
|
||||
}
|
||||
|
||||
impl fmt::Display for SuggestionProvider {
|
||||
|
@ -39,6 +51,7 @@ impl fmt::Display for SuggestionProvider {
|
|||
Self::Weather => write!(f, "weather"),
|
||||
Self::AmpMobile => write!(f, "ampmobile"),
|
||||
Self::Fakespot => write!(f, "fakespot"),
|
||||
Self::Exposure => write!(f, "exposure"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -54,7 +67,7 @@ impl FromSql for SuggestionProvider {
|
|||
}
|
||||
|
||||
impl SuggestionProvider {
|
||||
pub fn all() -> [Self; 9] {
|
||||
pub fn all() -> [Self; 10] {
|
||||
[
|
||||
Self::Amp,
|
||||
Self::Wikipedia,
|
||||
|
@ -65,36 +78,39 @@ impl SuggestionProvider {
|
|||
Self::Weather,
|
||||
Self::AmpMobile,
|
||||
Self::Fakespot,
|
||||
Self::Exposure,
|
||||
]
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub(crate) fn from_u8(v: u8) -> Option<Self> {
|
||||
match v {
|
||||
1 => Some(SuggestionProvider::Amp),
|
||||
2 => Some(SuggestionProvider::Wikipedia),
|
||||
3 => Some(SuggestionProvider::Amo),
|
||||
4 => Some(SuggestionProvider::Pocket),
|
||||
5 => Some(SuggestionProvider::Yelp),
|
||||
6 => Some(SuggestionProvider::Mdn),
|
||||
7 => Some(SuggestionProvider::Weather),
|
||||
8 => Some(SuggestionProvider::AmpMobile),
|
||||
9 => Some(SuggestionProvider::Fakespot),
|
||||
1 => Some(Self::Amp),
|
||||
2 => Some(Self::Wikipedia),
|
||||
3 => Some(Self::Amo),
|
||||
4 => Some(Self::Pocket),
|
||||
5 => Some(Self::Yelp),
|
||||
6 => Some(Self::Mdn),
|
||||
7 => Some(Self::Weather),
|
||||
8 => Some(Self::AmpMobile),
|
||||
9 => Some(Self::Fakespot),
|
||||
10 => Some(Self::Exposure),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn record_type(&self) -> SuggestRecordType {
|
||||
match self {
|
||||
SuggestionProvider::Amp => SuggestRecordType::AmpWikipedia,
|
||||
SuggestionProvider::Wikipedia => SuggestRecordType::AmpWikipedia,
|
||||
SuggestionProvider::Amo => SuggestRecordType::Amo,
|
||||
SuggestionProvider::Pocket => SuggestRecordType::Pocket,
|
||||
SuggestionProvider::Yelp => SuggestRecordType::Yelp,
|
||||
SuggestionProvider::Mdn => SuggestRecordType::Mdn,
|
||||
SuggestionProvider::Weather => SuggestRecordType::Weather,
|
||||
SuggestionProvider::AmpMobile => SuggestRecordType::AmpMobile,
|
||||
SuggestionProvider::Fakespot => SuggestRecordType::Fakespot,
|
||||
Self::Amp => SuggestRecordType::AmpWikipedia,
|
||||
Self::Wikipedia => SuggestRecordType::AmpWikipedia,
|
||||
Self::Amo => SuggestRecordType::Amo,
|
||||
Self::Pocket => SuggestRecordType::Pocket,
|
||||
Self::Yelp => SuggestRecordType::Yelp,
|
||||
Self::Mdn => SuggestRecordType::Mdn,
|
||||
Self::Weather => SuggestRecordType::Weather,
|
||||
Self::AmpMobile => SuggestRecordType::AmpMobile,
|
||||
Self::Fakespot => SuggestRecordType::Fakespot,
|
||||
Self::Exposure => SuggestRecordType::Exposure,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -104,3 +120,8 @@ impl ToSql for SuggestionProvider {
|
|||
Ok(ToSqlOutput::from(*self as u8))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Default, Debug)]
|
||||
pub struct SuggestionProviderConstraints {
|
||||
pub exposure_suggestion_types: Option<Vec<String>>,
|
||||
}
|
||||
|
|
|
@ -2,13 +2,14 @@
|
|||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
|
||||
|
||||
use crate::{LabeledTimingSample, Suggestion, SuggestionProvider};
|
||||
use crate::{LabeledTimingSample, Suggestion, SuggestionProvider, SuggestionProviderConstraints};
|
||||
|
||||
/// A query for suggestions to show in the address bar.
|
||||
#[derive(Clone, Debug, Default)]
|
||||
pub struct SuggestionQuery {
|
||||
pub keyword: String,
|
||||
pub providers: Vec<SuggestionProvider>,
|
||||
pub provider_constraints: Option<SuggestionProviderConstraints>,
|
||||
pub limit: Option<i32>,
|
||||
}
|
||||
|
||||
|
@ -24,7 +25,7 @@ impl SuggestionQuery {
|
|||
Self {
|
||||
keyword: keyword.to_string(),
|
||||
providers: Vec::from(SuggestionProvider::all()),
|
||||
limit: None,
|
||||
..Self::default()
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -32,7 +33,7 @@ impl SuggestionQuery {
|
|||
Self {
|
||||
keyword: keyword.to_string(),
|
||||
providers,
|
||||
limit: None,
|
||||
..Self::default()
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -50,7 +51,7 @@ impl SuggestionQuery {
|
|||
Self {
|
||||
keyword: keyword.into(),
|
||||
providers: vec![SuggestionProvider::Amp],
|
||||
limit: None,
|
||||
..Self::default()
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -58,7 +59,7 @@ impl SuggestionQuery {
|
|||
Self {
|
||||
keyword: keyword.into(),
|
||||
providers: vec![SuggestionProvider::Wikipedia],
|
||||
limit: None,
|
||||
..Self::default()
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -66,7 +67,7 @@ impl SuggestionQuery {
|
|||
Self {
|
||||
keyword: keyword.into(),
|
||||
providers: vec![SuggestionProvider::AmpMobile],
|
||||
limit: None,
|
||||
..Self::default()
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -74,7 +75,7 @@ impl SuggestionQuery {
|
|||
Self {
|
||||
keyword: keyword.into(),
|
||||
providers: vec![SuggestionProvider::Amo],
|
||||
limit: None,
|
||||
..Self::default()
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -82,7 +83,7 @@ impl SuggestionQuery {
|
|||
Self {
|
||||
keyword: keyword.into(),
|
||||
providers: vec![SuggestionProvider::Pocket],
|
||||
limit: None,
|
||||
..Self::default()
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -90,7 +91,7 @@ impl SuggestionQuery {
|
|||
Self {
|
||||
keyword: keyword.into(),
|
||||
providers: vec![SuggestionProvider::Yelp],
|
||||
limit: None,
|
||||
..Self::default()
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -98,7 +99,7 @@ impl SuggestionQuery {
|
|||
Self {
|
||||
keyword: keyword.into(),
|
||||
providers: vec![SuggestionProvider::Mdn],
|
||||
limit: None,
|
||||
..Self::default()
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -106,7 +107,7 @@ impl SuggestionQuery {
|
|||
Self {
|
||||
keyword: keyword.into(),
|
||||
providers: vec![SuggestionProvider::Fakespot],
|
||||
limit: None,
|
||||
..Self::default()
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -114,7 +115,20 @@ impl SuggestionQuery {
|
|||
Self {
|
||||
keyword: keyword.into(),
|
||||
providers: vec![SuggestionProvider::Weather],
|
||||
limit: None,
|
||||
..Self::default()
|
||||
}
|
||||
}
|
||||
|
||||
pub fn exposure(keyword: &str, suggestion_types: &[&str]) -> Self {
|
||||
Self {
|
||||
keyword: keyword.into(),
|
||||
providers: vec![SuggestionProvider::Exposure],
|
||||
provider_constraints: Some(SuggestionProviderConstraints {
|
||||
exposure_suggestion_types: Some(
|
||||
suggestion_types.iter().map(|s| s.to_string()).collect(),
|
||||
),
|
||||
}),
|
||||
..Self::default()
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -38,22 +38,6 @@ use serde::{Deserialize, Deserializer};
|
|||
|
||||
use crate::{db::SuggestDao, error::Error, provider::SuggestionProvider, Result};
|
||||
|
||||
/// A list of default record types to download if nothing is specified.
|
||||
/// This defaults to all record types available as-of Fx128.
|
||||
/// Consumers should specify provider types in `SuggestIngestionConstraints` if they want a
|
||||
/// different set.
|
||||
pub(crate) const DEFAULT_RECORDS_TYPES: [SuggestRecordType; 9] = [
|
||||
SuggestRecordType::Icon,
|
||||
SuggestRecordType::AmpWikipedia,
|
||||
SuggestRecordType::Amo,
|
||||
SuggestRecordType::Pocket,
|
||||
SuggestRecordType::Yelp,
|
||||
SuggestRecordType::Mdn,
|
||||
SuggestRecordType::Weather,
|
||||
SuggestRecordType::GlobalConfig,
|
||||
SuggestRecordType::AmpMobile,
|
||||
];
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
pub enum Collection {
|
||||
Quicksuggest,
|
||||
|
@ -194,8 +178,8 @@ impl Record {
|
|||
|
||||
/// A record in the Suggest Remote Settings collection.
|
||||
///
|
||||
/// Except for the type, Suggest records don't carry additional fields. All
|
||||
/// suggestions are stored in each record's attachment.
|
||||
/// Most Suggest records don't carry inline fields except for `type`.
|
||||
/// Suggestions themselves are typically stored in each record's attachment.
|
||||
#[derive(Clone, Debug, Deserialize)]
|
||||
#[serde(tag = "type")]
|
||||
pub(crate) enum SuggestRecord {
|
||||
|
@ -219,6 +203,8 @@ pub(crate) enum SuggestRecord {
|
|||
AmpMobile,
|
||||
#[serde(rename = "fakespot-suggestions")]
|
||||
Fakespot,
|
||||
#[serde(rename = "exposure-suggestions")]
|
||||
Exposure(DownloadedExposureRecord),
|
||||
}
|
||||
|
||||
/// Enum for the different record types that can be consumed.
|
||||
|
@ -236,6 +222,7 @@ pub enum SuggestRecordType {
|
|||
GlobalConfig,
|
||||
AmpMobile,
|
||||
Fakespot,
|
||||
Exposure,
|
||||
}
|
||||
|
||||
impl From<&SuggestRecord> for SuggestRecordType {
|
||||
|
@ -251,6 +238,7 @@ impl From<&SuggestRecord> for SuggestRecordType {
|
|||
SuggestRecord::GlobalConfig(_) => Self::GlobalConfig,
|
||||
SuggestRecord::AmpMobile => Self::AmpMobile,
|
||||
SuggestRecord::Fakespot => Self::Fakespot,
|
||||
SuggestRecord::Exposure(_) => Self::Exposure,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -278,6 +266,7 @@ impl SuggestRecordType {
|
|||
Self::GlobalConfig,
|
||||
Self::AmpMobile,
|
||||
Self::Fakespot,
|
||||
Self::Exposure,
|
||||
]
|
||||
}
|
||||
|
||||
|
@ -293,6 +282,7 @@ impl SuggestRecordType {
|
|||
Self::GlobalConfig => "configuration",
|
||||
Self::AmpMobile => "amp-mobile-suggestions",
|
||||
Self::Fakespot => "fakespot-suggestions",
|
||||
Self::Exposure => "exposure-suggestions",
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -558,6 +548,86 @@ pub(crate) struct DownloadedFakespotSuggestion {
|
|||
pub url: String,
|
||||
}
|
||||
|
||||
/// An exposure suggestion record's inline data
|
||||
#[derive(Clone, Debug, Deserialize)]
|
||||
pub(crate) struct DownloadedExposureRecord {
|
||||
pub suggestion_type: String,
|
||||
}
|
||||
|
||||
/// An exposure suggestion to ingest from an attachment
|
||||
#[derive(Clone, Debug, Deserialize)]
|
||||
pub(crate) struct DownloadedExposureSuggestion {
|
||||
keywords: Vec<FullOrPrefixKeywords<String>>,
|
||||
}
|
||||
|
||||
impl DownloadedExposureSuggestion {
|
||||
/// Iterate over all keywords for this suggestion. Iteration may contain
|
||||
/// duplicate keywords depending on the structure of the data, so do not
|
||||
/// assume keywords are unique. Duplicates are not filtered out because
|
||||
/// doing so would require O(number of keywords) space, and the number of
|
||||
/// keywords can be very large. If you are inserting into the store, rely on
|
||||
/// uniqueness constraints and use `INSERT OR IGNORE`.
|
||||
pub fn keywords(&self) -> impl Iterator<Item = String> + '_ {
|
||||
self.keywords.iter().flat_map(|e| e.keywords())
|
||||
}
|
||||
}
|
||||
|
||||
/// A single full keyword or a `(prefix, suffixes)` tuple representing multiple
|
||||
/// prefix keywords. Prefix keywords are enumerated by appending to `prefix`
|
||||
/// each possible prefix of each suffix, including the full suffix. The prefix
|
||||
/// is also enumerated by itself. Examples:
|
||||
///
|
||||
/// `FullOrPrefixKeywords::Full("some full keyword")`
|
||||
/// => "some full keyword"
|
||||
///
|
||||
/// `FullOrPrefixKeywords::Prefix(("sug", vec!["gest", "arplum"]))`
|
||||
/// => "sug"
|
||||
/// "sugg"
|
||||
/// "sugge"
|
||||
/// "sugges"
|
||||
/// "suggest"
|
||||
/// "suga"
|
||||
/// "sugar"
|
||||
/// "sugarp"
|
||||
/// "sugarpl"
|
||||
/// "sugarplu"
|
||||
/// "sugarplum"
|
||||
#[derive(Clone, Debug, Deserialize)]
|
||||
#[serde(untagged)]
|
||||
enum FullOrPrefixKeywords<T> {
|
||||
Full(T),
|
||||
Prefix((T, Vec<T>)),
|
||||
}
|
||||
|
||||
impl<T> From<T> for FullOrPrefixKeywords<T> {
|
||||
fn from(full_keyword: T) -> Self {
|
||||
Self::Full(full_keyword)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> From<(T, Vec<T>)> for FullOrPrefixKeywords<T> {
|
||||
fn from(prefix_suffixes: (T, Vec<T>)) -> Self {
|
||||
Self::Prefix(prefix_suffixes)
|
||||
}
|
||||
}
|
||||
|
||||
impl FullOrPrefixKeywords<String> {
|
||||
pub fn keywords(&self) -> Box<dyn Iterator<Item = String> + '_> {
|
||||
match self {
|
||||
FullOrPrefixKeywords::Full(kw) => Box::new(std::iter::once(kw.to_owned())),
|
||||
FullOrPrefixKeywords::Prefix((prefix, suffixes)) => Box::new(
|
||||
std::iter::once(prefix.to_owned()).chain(suffixes.iter().flat_map(|suffix| {
|
||||
let mut kw = prefix.clone();
|
||||
suffix.chars().map(move |c| {
|
||||
kw.push(c);
|
||||
kw.clone()
|
||||
})
|
||||
})),
|
||||
),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Weather data to ingest from a weather record
|
||||
#[derive(Clone, Debug, Deserialize)]
|
||||
pub(crate) struct DownloadedWeatherData {
|
||||
|
@ -706,4 +776,124 @@ mod test {
|
|||
],
|
||||
);
|
||||
}
|
||||
|
||||
fn full_or_prefix_keywords_to_owned(
|
||||
kws: Vec<FullOrPrefixKeywords<&str>>,
|
||||
) -> Vec<FullOrPrefixKeywords<String>> {
|
||||
kws.iter()
|
||||
.map(|val| match val {
|
||||
FullOrPrefixKeywords::Full(s) => FullOrPrefixKeywords::Full(s.to_string()),
|
||||
FullOrPrefixKeywords::Prefix((prefix, suffixes)) => FullOrPrefixKeywords::Prefix((
|
||||
prefix.to_string(),
|
||||
suffixes.iter().map(|s| s.to_string()).collect(),
|
||||
)),
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_exposure_keywords() {
|
||||
let suggestion = DownloadedExposureSuggestion {
|
||||
keywords: full_or_prefix_keywords_to_owned(vec![
|
||||
"no suffixes".into(),
|
||||
("empty suffixes", vec![]).into(),
|
||||
("empty string suffix", vec![""]).into(),
|
||||
("choco", vec!["", "bo", "late"]).into(),
|
||||
"duplicate 1".into(),
|
||||
"duplicate 1".into(),
|
||||
("dup", vec!["licate 1", "licate 2"]).into(),
|
||||
("dup", vec!["lo", "licate 2", "licate 3"]).into(),
|
||||
("duplic", vec!["ate 3", "ar", "ate 4"]).into(),
|
||||
("du", vec!["plicate 4", "plicate 5", "nk"]).into(),
|
||||
]),
|
||||
};
|
||||
|
||||
assert_eq!(
|
||||
Vec::from_iter(suggestion.keywords()),
|
||||
vec![
|
||||
"no suffixes",
|
||||
"empty suffixes",
|
||||
"empty string suffix",
|
||||
"choco",
|
||||
"chocob",
|
||||
"chocobo",
|
||||
"chocol",
|
||||
"chocola",
|
||||
"chocolat",
|
||||
"chocolate",
|
||||
"duplicate 1",
|
||||
"duplicate 1",
|
||||
"dup",
|
||||
"dupl",
|
||||
"dupli",
|
||||
"duplic",
|
||||
"duplica",
|
||||
"duplicat",
|
||||
"duplicate",
|
||||
"duplicate ",
|
||||
"duplicate 1",
|
||||
"dupl",
|
||||
"dupli",
|
||||
"duplic",
|
||||
"duplica",
|
||||
"duplicat",
|
||||
"duplicate",
|
||||
"duplicate ",
|
||||
"duplicate 2",
|
||||
"dup",
|
||||
"dupl",
|
||||
"duplo",
|
||||
"dupl",
|
||||
"dupli",
|
||||
"duplic",
|
||||
"duplica",
|
||||
"duplicat",
|
||||
"duplicate",
|
||||
"duplicate ",
|
||||
"duplicate 2",
|
||||
"dupl",
|
||||
"dupli",
|
||||
"duplic",
|
||||
"duplica",
|
||||
"duplicat",
|
||||
"duplicate",
|
||||
"duplicate ",
|
||||
"duplicate 3",
|
||||
"duplic",
|
||||
"duplica",
|
||||
"duplicat",
|
||||
"duplicate",
|
||||
"duplicate ",
|
||||
"duplicate 3",
|
||||
"duplica",
|
||||
"duplicar",
|
||||
"duplica",
|
||||
"duplicat",
|
||||
"duplicate",
|
||||
"duplicate ",
|
||||
"duplicate 4",
|
||||
"du",
|
||||
"dup",
|
||||
"dupl",
|
||||
"dupli",
|
||||
"duplic",
|
||||
"duplica",
|
||||
"duplicat",
|
||||
"duplicate",
|
||||
"duplicate ",
|
||||
"duplicate 4",
|
||||
"dup",
|
||||
"dupl",
|
||||
"dupli",
|
||||
"duplic",
|
||||
"duplica",
|
||||
"duplicat",
|
||||
"duplicate",
|
||||
"duplicate ",
|
||||
"duplicate 5",
|
||||
"dun",
|
||||
"dunk",
|
||||
],
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -19,7 +19,7 @@ use sql_support::{
|
|||
/// [`SuggestConnectionInitializer::upgrade_from`].
|
||||
/// a. If suggestions should be re-ingested after the migration, call `clear_database()` inside
|
||||
/// the migration.
|
||||
pub const VERSION: u32 = 25;
|
||||
pub const VERSION: u32 = 26;
|
||||
|
||||
/// The current Suggest database schema.
|
||||
pub const SQL: &str = "
|
||||
|
@ -171,6 +171,13 @@ CREATE TABLE mdn_custom_details(
|
|||
FOREIGN KEY(suggestion_id) REFERENCES suggestions(id) ON DELETE CASCADE
|
||||
);
|
||||
|
||||
CREATE TABLE exposure_custom_details(
|
||||
suggestion_id INTEGER PRIMARY KEY,
|
||||
type TEXT NOT NULL,
|
||||
FOREIGN KEY(suggestion_id) REFERENCES suggestions(id) ON DELETE CASCADE
|
||||
);
|
||||
CREATE INDEX exposure_custom_details_type ON exposure_custom_details(type);
|
||||
|
||||
CREATE TABLE dismissed_suggestions (
|
||||
url TEXT PRIMARY KEY
|
||||
) WITHOUT ROWID;
|
||||
|
@ -423,6 +430,20 @@ CREATE TABLE ingested_records(
|
|||
)?;
|
||||
Ok(())
|
||||
}
|
||||
25 => {
|
||||
// Create the exposure suggestions table and index.
|
||||
tx.execute_batch(
|
||||
"
|
||||
CREATE TABLE exposure_custom_details(
|
||||
suggestion_id INTEGER PRIMARY KEY,
|
||||
type TEXT NOT NULL,
|
||||
FOREIGN KEY(suggestion_id) REFERENCES suggestions(id) ON DELETE CASCADE
|
||||
);
|
||||
CREATE INDEX exposure_custom_details_type ON exposure_custom_details(type);
|
||||
",
|
||||
)?;
|
||||
Ok(())
|
||||
}
|
||||
_ => Err(open_database::Error::IncompatibleVersion(version)),
|
||||
}
|
||||
}
|
||||
|
|
|
@ -21,10 +21,10 @@ use crate::{
|
|||
db::{ConnectionType, IngestedRecord, Sqlite3Extension, SuggestDao, SuggestDb},
|
||||
error::Error,
|
||||
metrics::{DownloadTimer, SuggestIngestionMetrics, SuggestQueryMetrics},
|
||||
provider::SuggestionProvider,
|
||||
provider::{SuggestionProvider, SuggestionProviderConstraints, DEFAULT_INGEST_PROVIDERS},
|
||||
rs::{
|
||||
Client, Collection, Record, RemoteSettingsClient, SuggestAttachment, SuggestRecord,
|
||||
SuggestRecordId, SuggestRecordType, DEFAULT_RECORDS_TYPES,
|
||||
SuggestRecordId, SuggestRecordType,
|
||||
},
|
||||
suggestion::AmpSuggestionType,
|
||||
QueryWithMetricsResult, Result, SuggestApiResult, Suggestion, SuggestionQuery,
|
||||
|
@ -255,6 +255,7 @@ impl SuggestStore {
|
|||
#[derive(Clone, Default, Debug)]
|
||||
pub struct SuggestIngestionConstraints {
|
||||
pub providers: Option<Vec<SuggestionProvider>>,
|
||||
pub provider_constraints: Option<SuggestionProviderConstraints>,
|
||||
/// Only run ingestion if the table `suggestions` is empty
|
||||
pub empty_only: bool,
|
||||
}
|
||||
|
@ -272,6 +273,7 @@ impl SuggestIngestionConstraints {
|
|||
SuggestionProvider::Weather,
|
||||
SuggestionProvider::AmpMobile,
|
||||
SuggestionProvider::Fakespot,
|
||||
SuggestionProvider::Exposure,
|
||||
]),
|
||||
..Self::default()
|
||||
}
|
||||
|
@ -336,6 +338,7 @@ impl<S> SuggestStoreInner<S> {
|
|||
SuggestionProvider::Mdn => dao.fetch_mdn_suggestions(&query),
|
||||
SuggestionProvider::Weather => dao.fetch_weather_suggestions(&query),
|
||||
SuggestionProvider::Fakespot => dao.fetch_fakespot_suggestions(&query),
|
||||
SuggestionProvider::Exposure => dao.fetch_exposure_suggestions(&query),
|
||||
})
|
||||
})?;
|
||||
suggestions.extend(new_suggestions);
|
||||
|
@ -423,24 +426,28 @@ where
|
|||
return Ok(metrics);
|
||||
}
|
||||
|
||||
// Figure out which record types we're ingesting
|
||||
let ingest_record_types = if let Some(rt) = &constraints.providers {
|
||||
rt.iter()
|
||||
.map(|x| x.record_type())
|
||||
// Always ingest these types
|
||||
.chain([SuggestRecordType::Icon, SuggestRecordType::GlobalConfig])
|
||||
.collect::<BTreeSet<_>>()
|
||||
} else {
|
||||
DEFAULT_RECORDS_TYPES.into_iter().collect()
|
||||
};
|
||||
|
||||
// Group record types by collection
|
||||
let mut record_types_by_collection = HashMap::<Collection, Vec<SuggestRecordType>>::new();
|
||||
for record_type in ingest_record_types {
|
||||
// Figure out which record types we're ingesting and group them by
|
||||
// collection. A record type may be used by multiple providers, but we
|
||||
// want to ingest each one at most once.
|
||||
let mut record_types_by_collection = HashMap::<Collection, BTreeSet<_>>::new();
|
||||
for p in constraints
|
||||
.providers
|
||||
.as_ref()
|
||||
.unwrap_or(&DEFAULT_INGEST_PROVIDERS.to_vec())
|
||||
.iter()
|
||||
{
|
||||
record_types_by_collection
|
||||
.entry(record_type.collection())
|
||||
.entry(p.record_type().collection())
|
||||
.or_default()
|
||||
.push(record_type);
|
||||
.insert(p.record_type());
|
||||
}
|
||||
|
||||
// Always ingest these record types.
|
||||
for rt in [SuggestRecordType::Icon, SuggestRecordType::GlobalConfig] {
|
||||
record_types_by_collection
|
||||
.entry(rt.collection())
|
||||
.or_default()
|
||||
.insert(rt);
|
||||
}
|
||||
|
||||
// Create a single write scope for all DB operations
|
||||
|
@ -458,7 +465,7 @@ where
|
|||
// For each record type in that collection, calculate the changes and pass them to
|
||||
// [Self::ingest_records]
|
||||
for record_type in record_types {
|
||||
breadcrumb!("Ingesting {record_type}");
|
||||
breadcrumb!("Ingesting record_type: {record_type}");
|
||||
metrics.measure_ingest(record_type.to_string(), |download_timer| {
|
||||
let changes = RecordChanges::new(
|
||||
records.iter().filter(|r| r.record_type() == record_type),
|
||||
|
@ -467,8 +474,9 @@ where
|
|||
&& i.collection == collection.name()
|
||||
}),
|
||||
);
|
||||
write_scope
|
||||
.write(|dao| self.ingest_records(dao, collection, changes, download_timer))
|
||||
write_scope.write(|dao| {
|
||||
self.process_changes(dao, collection, changes, &constraints, download_timer)
|
||||
})
|
||||
})?;
|
||||
write_scope.err_if_interrupted()?;
|
||||
}
|
||||
|
@ -478,28 +486,35 @@ where
|
|||
Ok(metrics)
|
||||
}
|
||||
|
||||
fn ingest_records(
|
||||
fn process_changes(
|
||||
&self,
|
||||
dao: &mut SuggestDao,
|
||||
collection: Collection,
|
||||
changes: RecordChanges<'_>,
|
||||
constraints: &SuggestIngestionConstraints,
|
||||
download_timer: &mut DownloadTimer,
|
||||
) -> Result<()> {
|
||||
for record in &changes.new {
|
||||
log::trace!("Ingesting: {}", record.id.as_str());
|
||||
self.ingest_record(dao, record, download_timer)?;
|
||||
log::trace!("Ingesting record ID: {}", record.id.as_str());
|
||||
self.process_record(dao, record, constraints, download_timer)?;
|
||||
}
|
||||
for record in &changes.updated {
|
||||
// Drop any data that we previously ingested from this record.
|
||||
// Suggestions in particular don't have a stable identifier, and
|
||||
// determining which suggestions in the record actually changed is
|
||||
// more complicated than dropping and re-ingesting all of them.
|
||||
log::trace!("Reingesting: {}", record.id.as_str());
|
||||
log::trace!("Reingesting updated record ID: {}", record.id.as_str());
|
||||
dao.delete_record_data(&record.id)?;
|
||||
self.ingest_record(dao, record, download_timer)?;
|
||||
self.process_record(dao, record, constraints, download_timer)?;
|
||||
}
|
||||
for record in &changes.unchanged {
|
||||
if self.should_reprocess_record(dao, record)? {
|
||||
log::trace!("Reingesting unchanged record ID: {}", record.id.as_str());
|
||||
self.process_record(dao, record, constraints, download_timer)?;
|
||||
}
|
||||
}
|
||||
for record in &changes.deleted {
|
||||
log::trace!("Deleting: {:?}", record.id);
|
||||
log::trace!("Deleting record ID: {:?}", record.id);
|
||||
dao.delete_record_data(&record.id)?;
|
||||
}
|
||||
dao.update_ingested_records(
|
||||
|
@ -511,15 +526,16 @@ where
|
|||
Ok(())
|
||||
}
|
||||
|
||||
fn ingest_record(
|
||||
fn process_record(
|
||||
&self,
|
||||
dao: &mut SuggestDao,
|
||||
record: &Record,
|
||||
constraints: &SuggestIngestionConstraints,
|
||||
download_timer: &mut DownloadTimer,
|
||||
) -> Result<()> {
|
||||
match &record.payload {
|
||||
SuggestRecord::AmpWikipedia => {
|
||||
self.ingest_attachment(
|
||||
self.download_attachment(
|
||||
dao,
|
||||
record,
|
||||
download_timer,
|
||||
|
@ -529,7 +545,7 @@ where
|
|||
)?;
|
||||
}
|
||||
SuggestRecord::AmpMobile => {
|
||||
self.ingest_attachment(
|
||||
self.download_attachment(
|
||||
dao,
|
||||
record,
|
||||
download_timer,
|
||||
|
@ -551,7 +567,7 @@ where
|
|||
dao.put_icon(icon_id, &data, &attachment.mimetype)?;
|
||||
}
|
||||
SuggestRecord::Amo => {
|
||||
self.ingest_attachment(
|
||||
self.download_attachment(
|
||||
dao,
|
||||
record,
|
||||
download_timer,
|
||||
|
@ -561,7 +577,7 @@ where
|
|||
)?;
|
||||
}
|
||||
SuggestRecord::Pocket => {
|
||||
self.ingest_attachment(
|
||||
self.download_attachment(
|
||||
dao,
|
||||
record,
|
||||
download_timer,
|
||||
|
@ -571,7 +587,7 @@ where
|
|||
)?;
|
||||
}
|
||||
SuggestRecord::Yelp => {
|
||||
self.ingest_attachment(
|
||||
self.download_attachment(
|
||||
dao,
|
||||
record,
|
||||
download_timer,
|
||||
|
@ -582,7 +598,7 @@ where
|
|||
)?;
|
||||
}
|
||||
SuggestRecord::Mdn => {
|
||||
self.ingest_attachment(
|
||||
self.download_attachment(
|
||||
dao,
|
||||
record,
|
||||
download_timer,
|
||||
|
@ -596,7 +612,7 @@ where
|
|||
dao.put_global_config(&SuggestGlobalConfig::from(config))?
|
||||
}
|
||||
SuggestRecord::Fakespot => {
|
||||
self.ingest_attachment(
|
||||
self.download_attachment(
|
||||
dao,
|
||||
record,
|
||||
download_timer,
|
||||
|
@ -605,11 +621,35 @@ where
|
|||
},
|
||||
)?;
|
||||
}
|
||||
SuggestRecord::Exposure(r) => {
|
||||
// Ingest this record's attachment if its suggestion type
|
||||
// matches a type in the constraints.
|
||||
if let Some(suggestion_types) = constraints
|
||||
.provider_constraints
|
||||
.as_ref()
|
||||
.and_then(|c| c.exposure_suggestion_types.as_ref())
|
||||
{
|
||||
if suggestion_types.iter().any(|t| *t == r.suggestion_type) {
|
||||
self.download_attachment(
|
||||
dao,
|
||||
record,
|
||||
download_timer,
|
||||
|dao, record_id, suggestions| {
|
||||
dao.insert_exposure_suggestions(
|
||||
record_id,
|
||||
&r.suggestion_type,
|
||||
suggestions,
|
||||
)
|
||||
},
|
||||
)?;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn ingest_attachment<T>(
|
||||
fn download_attachment<T>(
|
||||
&self,
|
||||
dao: &mut SuggestDao,
|
||||
record: &Record,
|
||||
|
@ -633,6 +673,20 @@ where
|
|||
Err(_) => Ok(()),
|
||||
}
|
||||
}
|
||||
|
||||
fn should_reprocess_record(&self, dao: &mut SuggestDao, record: &Record) -> Result<bool> {
|
||||
match &record.payload {
|
||||
SuggestRecord::Exposure(_) => {
|
||||
// Even though the record was previously ingested, its
|
||||
// suggestion wouldn't have been if it never matched the
|
||||
// provider constraints of any ingest. Return true if the
|
||||
// suggestion is not ingested. If the provider constraints of
|
||||
// the current ingest do match the suggestion, we'll ingest it.
|
||||
Ok(!dao.is_exposure_suggestion_ingested(&record.id)?)
|
||||
}
|
||||
_ => Ok(false),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Tracks changes in suggest records since the last ingestion
|
||||
|
@ -640,6 +694,7 @@ struct RecordChanges<'a> {
|
|||
new: Vec<&'a Record>,
|
||||
updated: Vec<&'a Record>,
|
||||
deleted: Vec<&'a IngestedRecord>,
|
||||
unchanged: Vec<&'a Record>,
|
||||
}
|
||||
|
||||
impl<'a> RecordChanges<'a> {
|
||||
|
@ -653,12 +708,15 @@ impl<'a> RecordChanges<'a> {
|
|||
// Remove existing records from ingested_map.
|
||||
let mut new = vec![];
|
||||
let mut updated = vec![];
|
||||
let mut unchanged = vec![];
|
||||
for r in current {
|
||||
match ingested_map.entry(r.id.as_str()) {
|
||||
Entry::Vacant(_) => new.push(r),
|
||||
Entry::Occupied(e) => {
|
||||
if e.remove().last_modified != r.last_modified {
|
||||
updated.push(r);
|
||||
} else {
|
||||
unchanged.push(r);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -669,6 +727,7 @@ impl<'a> RecordChanges<'a> {
|
|||
new,
|
||||
deleted,
|
||||
updated,
|
||||
unchanged,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -707,7 +766,13 @@ where
|
|||
);
|
||||
writer
|
||||
.write(|dao| {
|
||||
self.ingest_records(dao, ingest_record_type.collection(), changes, &mut timer)
|
||||
self.process_changes(
|
||||
dao,
|
||||
ingest_record_type.collection(),
|
||||
changes,
|
||||
&SuggestIngestionConstraints::default(),
|
||||
&mut timer,
|
||||
)
|
||||
})
|
||||
.unwrap();
|
||||
}
|
||||
|
@ -1962,9 +2027,11 @@ mod tests {
|
|||
"weather",
|
||||
"weather-1",
|
||||
json!({
|
||||
"min_keyword_length": 3,
|
||||
"keywords": ["ab", "xyz", "weather"],
|
||||
"score": "0.24"
|
||||
"weather": {
|
||||
"min_keyword_length": 3,
|
||||
"keywords": ["ab", "xyz", "weather"],
|
||||
"score": "0.24"
|
||||
},
|
||||
}),
|
||||
));
|
||||
store.ingest(SuggestIngestionConstraints::all_providers());
|
||||
|
@ -2066,7 +2133,9 @@ mod tests {
|
|||
"configuration",
|
||||
"configuration-1",
|
||||
json!({
|
||||
"show_less_frequently_cap": 3,
|
||||
"configuration": {
|
||||
"show_less_frequently_cap": 3,
|
||||
},
|
||||
}),
|
||||
));
|
||||
store.ingest(SuggestIngestionConstraints::all_providers());
|
||||
|
@ -2119,9 +2188,11 @@ mod tests {
|
|||
"weather",
|
||||
"weather-1",
|
||||
json!({
|
||||
"min_keyword_length": 3,
|
||||
"keywords": ["weather"],
|
||||
"score": "0.24"
|
||||
"weather": {
|
||||
"min_keyword_length": 3,
|
||||
"keywords": ["weather"],
|
||||
"score": "0.24"
|
||||
},
|
||||
}),
|
||||
));
|
||||
store.ingest(SuggestIngestionConstraints::all_providers());
|
||||
|
@ -2410,4 +2481,587 @@ mod tests {
|
|||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn exposure_basic() -> anyhow::Result<()> {
|
||||
before_each();
|
||||
|
||||
let store = TestStore::new(
|
||||
MockRemoteSettingsClient::default()
|
||||
.with_full_record(
|
||||
"exposure-suggestions",
|
||||
"exposure-0",
|
||||
Some(json!({
|
||||
"suggestion_type": "aaa",
|
||||
})),
|
||||
Some(json!({
|
||||
"keywords": [
|
||||
"aaa keyword",
|
||||
"both keyword",
|
||||
["common prefix", [" aaa"]],
|
||||
["choco", ["bo", "late"]],
|
||||
["dup", ["licate 1", "licate 2"]],
|
||||
],
|
||||
})),
|
||||
)
|
||||
.with_full_record(
|
||||
"exposure-suggestions",
|
||||
"exposure-1",
|
||||
Some(json!({
|
||||
"suggestion_type": "bbb",
|
||||
})),
|
||||
Some(json!({
|
||||
"keywords": [
|
||||
"bbb keyword",
|
||||
"both keyword",
|
||||
["common prefix", [" bbb"]],
|
||||
],
|
||||
})),
|
||||
),
|
||||
);
|
||||
store.ingest(SuggestIngestionConstraints {
|
||||
providers: Some(vec![SuggestionProvider::Exposure]),
|
||||
provider_constraints: Some(SuggestionProviderConstraints {
|
||||
exposure_suggestion_types: Some(vec!["aaa".to_string(), "bbb".to_string()]),
|
||||
}),
|
||||
..SuggestIngestionConstraints::all_providers()
|
||||
});
|
||||
|
||||
let no_matches = vec!["aaa", "both", "common prefi", "choc", "chocolate extra"];
|
||||
for query in &no_matches {
|
||||
assert_eq!(
|
||||
store.fetch_suggestions(SuggestionQuery::exposure(query, &["aaa"])),
|
||||
vec![],
|
||||
);
|
||||
assert_eq!(
|
||||
store.fetch_suggestions(SuggestionQuery::exposure(query, &["bbb"])),
|
||||
vec![],
|
||||
);
|
||||
assert_eq!(
|
||||
store.fetch_suggestions(SuggestionQuery::exposure(query, &["aaa", "bbb"])),
|
||||
vec![],
|
||||
);
|
||||
assert_eq!(
|
||||
store.fetch_suggestions(SuggestionQuery::exposure(query, &["aaa", "zzz"])),
|
||||
vec![],
|
||||
);
|
||||
assert_eq!(
|
||||
store.fetch_suggestions(SuggestionQuery::exposure(query, &["zzz"])),
|
||||
vec![],
|
||||
);
|
||||
}
|
||||
|
||||
let aaa_only_matches = vec![
|
||||
"aaa keyword",
|
||||
"common prefix a",
|
||||
"common prefix aa",
|
||||
"common prefix aaa",
|
||||
"choco",
|
||||
"chocob",
|
||||
"chocobo",
|
||||
"chocol",
|
||||
"chocolate",
|
||||
"dup",
|
||||
"dupl",
|
||||
"duplicate",
|
||||
"duplicate ",
|
||||
"duplicate 1",
|
||||
"duplicate 2",
|
||||
];
|
||||
for query in &aaa_only_matches {
|
||||
assert_eq!(
|
||||
store.fetch_suggestions(SuggestionQuery::exposure(query, &["aaa"])),
|
||||
vec![Suggestion::Exposure {
|
||||
suggestion_type: "aaa".into(),
|
||||
score: 1.0,
|
||||
}],
|
||||
);
|
||||
assert_eq!(
|
||||
store.fetch_suggestions(SuggestionQuery::exposure(query, &["aaa", "bbb"])),
|
||||
vec![Suggestion::Exposure {
|
||||
suggestion_type: "aaa".into(),
|
||||
score: 1.0,
|
||||
}],
|
||||
);
|
||||
assert_eq!(
|
||||
store.fetch_suggestions(SuggestionQuery::exposure(query, &["bbb", "aaa"])),
|
||||
vec![Suggestion::Exposure {
|
||||
suggestion_type: "aaa".into(),
|
||||
score: 1.0,
|
||||
}],
|
||||
);
|
||||
assert_eq!(
|
||||
store.fetch_suggestions(SuggestionQuery::exposure(query, &["aaa", "zzz"])),
|
||||
vec![Suggestion::Exposure {
|
||||
suggestion_type: "aaa".into(),
|
||||
score: 1.0,
|
||||
}],
|
||||
);
|
||||
assert_eq!(
|
||||
store.fetch_suggestions(SuggestionQuery::exposure(query, &["zzz", "aaa"])),
|
||||
vec![Suggestion::Exposure {
|
||||
suggestion_type: "aaa".into(),
|
||||
score: 1.0,
|
||||
}],
|
||||
);
|
||||
assert_eq!(
|
||||
store.fetch_suggestions(SuggestionQuery::exposure(query, &["bbb"])),
|
||||
vec![],
|
||||
);
|
||||
assert_eq!(
|
||||
store.fetch_suggestions(SuggestionQuery::exposure(query, &["zzz"])),
|
||||
vec![],
|
||||
);
|
||||
}
|
||||
|
||||
let bbb_only_matches = vec![
|
||||
"bbb keyword",
|
||||
"common prefix b",
|
||||
"common prefix bb",
|
||||
"common prefix bbb",
|
||||
];
|
||||
for query in &bbb_only_matches {
|
||||
assert_eq!(
|
||||
store.fetch_suggestions(SuggestionQuery::exposure(query, &["bbb"])),
|
||||
vec![Suggestion::Exposure {
|
||||
suggestion_type: "bbb".into(),
|
||||
score: 1.0,
|
||||
}],
|
||||
);
|
||||
assert_eq!(
|
||||
store.fetch_suggestions(SuggestionQuery::exposure(query, &["bbb", "aaa"])),
|
||||
vec![Suggestion::Exposure {
|
||||
suggestion_type: "bbb".into(),
|
||||
score: 1.0,
|
||||
}],
|
||||
);
|
||||
assert_eq!(
|
||||
store.fetch_suggestions(SuggestionQuery::exposure(query, &["aaa", "bbb"])),
|
||||
vec![Suggestion::Exposure {
|
||||
suggestion_type: "bbb".into(),
|
||||
score: 1.0,
|
||||
}],
|
||||
);
|
||||
assert_eq!(
|
||||
store.fetch_suggestions(SuggestionQuery::exposure(query, &["bbb", "zzz"])),
|
||||
vec![Suggestion::Exposure {
|
||||
suggestion_type: "bbb".into(),
|
||||
score: 1.0,
|
||||
}],
|
||||
);
|
||||
assert_eq!(
|
||||
store.fetch_suggestions(SuggestionQuery::exposure(query, &["zzz", "bbb"])),
|
||||
vec![Suggestion::Exposure {
|
||||
suggestion_type: "bbb".into(),
|
||||
score: 1.0,
|
||||
}],
|
||||
);
|
||||
assert_eq!(
|
||||
store.fetch_suggestions(SuggestionQuery::exposure(query, &["aaa"])),
|
||||
vec![],
|
||||
);
|
||||
assert_eq!(
|
||||
store.fetch_suggestions(SuggestionQuery::exposure(query, &["zzz"])),
|
||||
vec![],
|
||||
);
|
||||
}
|
||||
|
||||
let both_matches = vec!["both keyword", "common prefix", "common prefix "];
|
||||
for query in &both_matches {
|
||||
assert_eq!(
|
||||
store.fetch_suggestions(SuggestionQuery::exposure(query, &["aaa"])),
|
||||
vec![Suggestion::Exposure {
|
||||
suggestion_type: "aaa".into(),
|
||||
score: 1.0,
|
||||
}],
|
||||
);
|
||||
assert_eq!(
|
||||
store.fetch_suggestions(SuggestionQuery::exposure(query, &["bbb"])),
|
||||
vec![Suggestion::Exposure {
|
||||
suggestion_type: "bbb".into(),
|
||||
score: 1.0,
|
||||
}],
|
||||
);
|
||||
assert_eq!(
|
||||
store.fetch_suggestions(SuggestionQuery::exposure(query, &["aaa", "bbb"])),
|
||||
vec![
|
||||
Suggestion::Exposure {
|
||||
suggestion_type: "aaa".into(),
|
||||
score: 1.0,
|
||||
},
|
||||
Suggestion::Exposure {
|
||||
suggestion_type: "bbb".into(),
|
||||
score: 1.0,
|
||||
},
|
||||
],
|
||||
);
|
||||
assert_eq!(
|
||||
store.fetch_suggestions(SuggestionQuery::exposure(query, &["bbb", "aaa"])),
|
||||
vec![
|
||||
Suggestion::Exposure {
|
||||
suggestion_type: "aaa".into(),
|
||||
score: 1.0,
|
||||
},
|
||||
Suggestion::Exposure {
|
||||
suggestion_type: "bbb".into(),
|
||||
score: 1.0,
|
||||
},
|
||||
],
|
||||
);
|
||||
assert_eq!(
|
||||
store.fetch_suggestions(SuggestionQuery::exposure(query, &["aaa", "zzz"])),
|
||||
vec![Suggestion::Exposure {
|
||||
suggestion_type: "aaa".into(),
|
||||
score: 1.0,
|
||||
}],
|
||||
);
|
||||
assert_eq!(
|
||||
store.fetch_suggestions(SuggestionQuery::exposure(query, &["zzz", "aaa"])),
|
||||
vec![Suggestion::Exposure {
|
||||
suggestion_type: "aaa".into(),
|
||||
score: 1.0,
|
||||
}],
|
||||
);
|
||||
assert_eq!(
|
||||
store.fetch_suggestions(SuggestionQuery::exposure(query, &["bbb", "zzz"])),
|
||||
vec![Suggestion::Exposure {
|
||||
suggestion_type: "bbb".into(),
|
||||
score: 1.0,
|
||||
}],
|
||||
);
|
||||
assert_eq!(
|
||||
store.fetch_suggestions(SuggestionQuery::exposure(query, &["zzz", "bbb"])),
|
||||
vec![Suggestion::Exposure {
|
||||
suggestion_type: "bbb".into(),
|
||||
score: 1.0,
|
||||
}],
|
||||
);
|
||||
assert_eq!(
|
||||
store.fetch_suggestions(SuggestionQuery::exposure(query, &["aaa", "zzz", "bbb"])),
|
||||
vec![
|
||||
Suggestion::Exposure {
|
||||
suggestion_type: "aaa".into(),
|
||||
score: 1.0,
|
||||
},
|
||||
Suggestion::Exposure {
|
||||
suggestion_type: "bbb".into(),
|
||||
score: 1.0,
|
||||
},
|
||||
],
|
||||
);
|
||||
assert_eq!(
|
||||
store.fetch_suggestions(SuggestionQuery::exposure(query, &["zzz"])),
|
||||
vec![],
|
||||
);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn exposure_spread_across_multiple_records() -> anyhow::Result<()> {
|
||||
before_each();
|
||||
|
||||
let mut store = TestStore::new(
|
||||
MockRemoteSettingsClient::default()
|
||||
.with_full_record(
|
||||
"exposure-suggestions",
|
||||
"exposure-0",
|
||||
Some(json!({
|
||||
"suggestion_type": "aaa",
|
||||
})),
|
||||
Some(json!({
|
||||
"keywords": [
|
||||
"record 0 keyword",
|
||||
["sug", ["gest"]],
|
||||
],
|
||||
})),
|
||||
)
|
||||
.with_full_record(
|
||||
"exposure-suggestions",
|
||||
"exposure-1",
|
||||
Some(json!({
|
||||
"suggestion_type": "aaa",
|
||||
})),
|
||||
Some(json!({
|
||||
"keywords": [
|
||||
"record 1 keyword",
|
||||
["sug", ["arplum"]],
|
||||
],
|
||||
})),
|
||||
),
|
||||
);
|
||||
store.ingest(SuggestIngestionConstraints {
|
||||
providers: Some(vec![SuggestionProvider::Exposure]),
|
||||
provider_constraints: Some(SuggestionProviderConstraints {
|
||||
exposure_suggestion_types: Some(vec!["aaa".to_string()]),
|
||||
}),
|
||||
..SuggestIngestionConstraints::all_providers()
|
||||
});
|
||||
|
||||
let matches = vec![
|
||||
"record 0 keyword",
|
||||
"sug",
|
||||
"sugg",
|
||||
"sugge",
|
||||
"sugges",
|
||||
"suggest",
|
||||
"record 1 keyword",
|
||||
"suga",
|
||||
"sugar",
|
||||
"sugarp",
|
||||
"sugarpl",
|
||||
"sugarplu",
|
||||
"sugarplum",
|
||||
];
|
||||
for query in &matches {
|
||||
assert_eq!(
|
||||
store.fetch_suggestions(SuggestionQuery::exposure(query, &["aaa"])),
|
||||
vec![Suggestion::Exposure {
|
||||
suggestion_type: "aaa".into(),
|
||||
score: 1.0,
|
||||
}],
|
||||
);
|
||||
}
|
||||
|
||||
// Delete the first record.
|
||||
store
|
||||
.client_mut()
|
||||
.delete_record(Collection::Quicksuggest.name(), "exposure-0");
|
||||
store.ingest(SuggestIngestionConstraints {
|
||||
providers: Some(vec![SuggestionProvider::Exposure]),
|
||||
provider_constraints: Some(SuggestionProviderConstraints {
|
||||
exposure_suggestion_types: Some(vec!["aaa".to_string()]),
|
||||
}),
|
||||
..SuggestIngestionConstraints::all_providers()
|
||||
});
|
||||
|
||||
// Keywords from the second record should still return the suggestion.
|
||||
let record_1_matches = vec![
|
||||
"record 1 keyword",
|
||||
"sug",
|
||||
"suga",
|
||||
"sugar",
|
||||
"sugarp",
|
||||
"sugarpl",
|
||||
"sugarplu",
|
||||
"sugarplum",
|
||||
];
|
||||
for query in &record_1_matches {
|
||||
assert_eq!(
|
||||
store.fetch_suggestions(SuggestionQuery::exposure(query, &["aaa"])),
|
||||
vec![Suggestion::Exposure {
|
||||
suggestion_type: "aaa".into(),
|
||||
score: 1.0,
|
||||
}],
|
||||
);
|
||||
}
|
||||
|
||||
// Keywords from the first record should not return the suggestion.
|
||||
let record_0_matches = vec!["record 0 keyword", "sugg", "sugge", "sugges", "suggest"];
|
||||
for query in &record_0_matches {
|
||||
assert_eq!(
|
||||
store.fetch_suggestions(SuggestionQuery::exposure(query, &["exposure-test"])),
|
||||
vec![]
|
||||
);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn exposure_ingest() -> anyhow::Result<()> {
|
||||
before_each();
|
||||
|
||||
// Create suggestions with types "aaa" and "bbb".
|
||||
let store = TestStore::new(
|
||||
MockRemoteSettingsClient::default()
|
||||
.with_full_record(
|
||||
"exposure-suggestions",
|
||||
"exposure-0",
|
||||
Some(json!({
|
||||
"suggestion_type": "aaa",
|
||||
})),
|
||||
Some(json!({
|
||||
"keywords": ["aaa keyword", "both keyword"],
|
||||
})),
|
||||
)
|
||||
.with_full_record(
|
||||
"exposure-suggestions",
|
||||
"exposure-1",
|
||||
Some(json!({
|
||||
"suggestion_type": "bbb",
|
||||
})),
|
||||
Some(json!({
|
||||
"keywords": ["bbb keyword", "both keyword"],
|
||||
})),
|
||||
),
|
||||
);
|
||||
|
||||
// Ingest but don't pass in any provider constraints. The records will
|
||||
// be ingested but their attachments won't be, so fetches shouldn't
|
||||
// return any suggestions.
|
||||
store.ingest(SuggestIngestionConstraints {
|
||||
providers: Some(vec![SuggestionProvider::Exposure]),
|
||||
provider_constraints: None,
|
||||
..SuggestIngestionConstraints::all_providers()
|
||||
});
|
||||
|
||||
let ingest_1_queries = [
|
||||
("aaa keyword", vec!["aaa"]),
|
||||
("aaa keyword", vec!["bbb"]),
|
||||
("aaa keyword", vec!["aaa", "bbb"]),
|
||||
("bbb keyword", vec!["aaa"]),
|
||||
("bbb keyword", vec!["bbb"]),
|
||||
("bbb keyword", vec!["aaa", "bbb"]),
|
||||
("both keyword", vec!["aaa"]),
|
||||
("both keyword", vec!["bbb"]),
|
||||
("both keyword", vec!["aaa", "bbb"]),
|
||||
];
|
||||
for (query, types) in &ingest_1_queries {
|
||||
assert_eq!(
|
||||
store.fetch_suggestions(SuggestionQuery::exposure(query, types)),
|
||||
vec![],
|
||||
);
|
||||
}
|
||||
|
||||
// Ingest only the "bbb" suggestion. The "bbb" attachment should be
|
||||
// ingested, so "bbb" fetches should return the "bbb" suggestion.
|
||||
store.ingest(SuggestIngestionConstraints {
|
||||
providers: Some(vec![SuggestionProvider::Exposure]),
|
||||
provider_constraints: Some(SuggestionProviderConstraints {
|
||||
exposure_suggestion_types: Some(vec!["bbb".to_string()]),
|
||||
}),
|
||||
..SuggestIngestionConstraints::all_providers()
|
||||
});
|
||||
|
||||
let ingest_2_queries = [
|
||||
("aaa keyword", vec!["aaa"], vec![]),
|
||||
("aaa keyword", vec!["bbb"], vec![]),
|
||||
("aaa keyword", vec!["aaa", "bbb"], vec![]),
|
||||
("bbb keyword", vec!["aaa"], vec![]),
|
||||
("bbb keyword", vec!["bbb"], vec!["bbb"]),
|
||||
("bbb keyword", vec!["aaa", "bbb"], vec!["bbb"]),
|
||||
("both keyword", vec!["aaa"], vec![]),
|
||||
("both keyword", vec!["bbb"], vec!["bbb"]),
|
||||
("both keyword", vec!["aaa", "bbb"], vec!["bbb"]),
|
||||
];
|
||||
for (query, types, expected_types) in &ingest_2_queries {
|
||||
assert_eq!(
|
||||
store.fetch_suggestions(SuggestionQuery::exposure(query, types)),
|
||||
expected_types
|
||||
.iter()
|
||||
.map(|t| Suggestion::Exposure {
|
||||
suggestion_type: t.to_string(),
|
||||
score: 1.0,
|
||||
})
|
||||
.collect::<Vec<Suggestion>>(),
|
||||
);
|
||||
}
|
||||
|
||||
// Now ingest the "aaa" suggestion.
|
||||
store.ingest(SuggestIngestionConstraints {
|
||||
providers: Some(vec![SuggestionProvider::Exposure]),
|
||||
provider_constraints: Some(SuggestionProviderConstraints {
|
||||
exposure_suggestion_types: Some(vec!["aaa".to_string()]),
|
||||
}),
|
||||
..SuggestIngestionConstraints::all_providers()
|
||||
});
|
||||
|
||||
let ingest_3_queries = [
|
||||
("aaa keyword", vec!["aaa"], vec!["aaa"]),
|
||||
("aaa keyword", vec!["bbb"], vec![]),
|
||||
("aaa keyword", vec!["aaa", "bbb"], vec!["aaa"]),
|
||||
("bbb keyword", vec!["aaa"], vec![]),
|
||||
("bbb keyword", vec!["bbb"], vec!["bbb"]),
|
||||
("bbb keyword", vec!["aaa", "bbb"], vec!["bbb"]),
|
||||
("both keyword", vec!["aaa"], vec!["aaa"]),
|
||||
("both keyword", vec!["bbb"], vec!["bbb"]),
|
||||
("both keyword", vec!["aaa", "bbb"], vec!["aaa", "bbb"]),
|
||||
];
|
||||
for (query, types, expected_types) in &ingest_3_queries {
|
||||
assert_eq!(
|
||||
store.fetch_suggestions(SuggestionQuery::exposure(query, types)),
|
||||
expected_types
|
||||
.iter()
|
||||
.map(|t| Suggestion::Exposure {
|
||||
suggestion_type: t.to_string(),
|
||||
score: 1.0,
|
||||
})
|
||||
.collect::<Vec<Suggestion>>(),
|
||||
);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn exposure_ingest_new_record() -> anyhow::Result<()> {
|
||||
before_each();
|
||||
|
||||
// Create an exposure suggestion and ingest it.
|
||||
let mut store = TestStore::new(MockRemoteSettingsClient::default().with_full_record(
|
||||
"exposure-suggestions",
|
||||
"exposure-0",
|
||||
Some(json!({
|
||||
"suggestion_type": "aaa",
|
||||
})),
|
||||
Some(json!({
|
||||
"keywords": ["old keyword"],
|
||||
})),
|
||||
));
|
||||
store.ingest(SuggestIngestionConstraints {
|
||||
providers: Some(vec![SuggestionProvider::Exposure]),
|
||||
provider_constraints: Some(SuggestionProviderConstraints {
|
||||
exposure_suggestion_types: Some(vec!["aaa".to_string()]),
|
||||
}),
|
||||
..SuggestIngestionConstraints::all_providers()
|
||||
});
|
||||
|
||||
// Add a new record of the same exposure type.
|
||||
store.client_mut().add_full_record(
|
||||
"exposure-suggestions",
|
||||
"exposure-1",
|
||||
Some(json!({
|
||||
"suggestion_type": "aaa",
|
||||
})),
|
||||
Some(json!({
|
||||
"keywords": ["new keyword"],
|
||||
})),
|
||||
);
|
||||
|
||||
// Ingest, but don't ingest the exposure type. The store will download
|
||||
// the new record but shouldn't ingest its attachment.
|
||||
store.ingest(SuggestIngestionConstraints {
|
||||
providers: Some(vec![SuggestionProvider::Exposure]),
|
||||
provider_constraints: None,
|
||||
..SuggestIngestionConstraints::all_providers()
|
||||
});
|
||||
assert_eq!(
|
||||
store.fetch_suggestions(SuggestionQuery::exposure("new keyword", &["aaa"])),
|
||||
vec![],
|
||||
);
|
||||
|
||||
// Ingest again with the exposure type. The new record will be
|
||||
// unchanged, but the store should now ingest its attachment.
|
||||
store.ingest(SuggestIngestionConstraints {
|
||||
providers: Some(vec![SuggestionProvider::Exposure]),
|
||||
provider_constraints: Some(SuggestionProviderConstraints {
|
||||
exposure_suggestion_types: Some(vec!["aaa".to_string()]),
|
||||
}),
|
||||
..SuggestIngestionConstraints::all_providers()
|
||||
});
|
||||
|
||||
// The keyword in the new attachment should match the suggestion,
|
||||
// confirming that the new record's attachment was ingested.
|
||||
assert_eq!(
|
||||
store.fetch_suggestions(SuggestionQuery::exposure("new keyword", &["aaa"])),
|
||||
vec![Suggestion::Exposure {
|
||||
suggestion_type: "aaa".to_string(),
|
||||
score: 1.0,
|
||||
}]
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
|
|
@ -35,6 +35,7 @@ enum SuggestionProvider {
|
|||
"Weather",
|
||||
"AmpMobile",
|
||||
"Fakespot",
|
||||
"Exposure",
|
||||
};
|
||||
|
||||
[Enum]
|
||||
|
@ -107,16 +108,22 @@ interface Suggestion {
|
|||
string? icon_mimetype,
|
||||
f64 score
|
||||
);
|
||||
Exposure(
|
||||
string suggestion_type,
|
||||
f64 score
|
||||
);
|
||||
};
|
||||
|
||||
dictionary SuggestionQuery {
|
||||
string keyword;
|
||||
sequence<SuggestionProvider> providers;
|
||||
SuggestionProviderConstraints? provider_constraints = null;
|
||||
i32? limit = null;
|
||||
};
|
||||
|
||||
dictionary SuggestIngestionConstraints {
|
||||
sequence<SuggestionProvider>? providers = null;
|
||||
SuggestionProviderConstraints? provider_constraints = null;
|
||||
// Only ingest if the table `suggestions` is empty.
|
||||
//
|
||||
// This is indented to handle periodic updates. Consumers can schedule an ingest with
|
||||
|
@ -127,6 +134,15 @@ dictionary SuggestIngestionConstraints {
|
|||
boolean empty_only = false;
|
||||
};
|
||||
|
||||
// Some providers manage multiple suggestion subtypes. Queries, ingests, and
|
||||
// other operations on those providers must be constrained to a desired subtype.
|
||||
dictionary SuggestionProviderConstraints {
|
||||
// `Exposure` provider - For each desired exposure suggestion type, this
|
||||
// should contain the value of the `suggestion_type` field of its remote
|
||||
// settings record(s).
|
||||
sequence<string>? exposure_suggestion_types = null;
|
||||
};
|
||||
|
||||
dictionary SuggestIngestionMetrics {
|
||||
// Samples for the `suggest.ingestion_time` metric
|
||||
sequence<LabeledTimingSample> ingestion_times;
|
||||
|
|
|
@ -92,6 +92,10 @@ pub enum Suggestion {
|
|||
icon_mimetype: Option<String>,
|
||||
score: f64,
|
||||
},
|
||||
Exposure {
|
||||
suggestion_type: String,
|
||||
score: f64,
|
||||
},
|
||||
}
|
||||
|
||||
impl PartialOrd for Suggestion {
|
||||
|
@ -102,22 +106,9 @@ impl PartialOrd for Suggestion {
|
|||
|
||||
impl Ord for Suggestion {
|
||||
fn cmp(&self, other: &Self) -> std::cmp::Ordering {
|
||||
let a_score = match self {
|
||||
Suggestion::Amp { score, .. }
|
||||
| Suggestion::Pocket { score, .. }
|
||||
| Suggestion::Amo { score, .. } => score,
|
||||
Suggestion::Fakespot { score, .. } => score,
|
||||
_ => &DEFAULT_SUGGESTION_SCORE,
|
||||
};
|
||||
let b_score = match other {
|
||||
Suggestion::Amp { score, .. }
|
||||
| Suggestion::Pocket { score, .. }
|
||||
| Suggestion::Amo { score, .. } => score,
|
||||
Suggestion::Fakespot { score, .. } => score,
|
||||
_ => &DEFAULT_SUGGESTION_SCORE,
|
||||
};
|
||||
b_score
|
||||
.partial_cmp(a_score)
|
||||
other
|
||||
.score()
|
||||
.partial_cmp(&self.score())
|
||||
.unwrap_or(std::cmp::Ordering::Equal)
|
||||
}
|
||||
}
|
||||
|
@ -160,9 +151,9 @@ impl Suggestion {
|
|||
| Self::Wikipedia { title, .. }
|
||||
| Self::Amo { title, .. }
|
||||
| Self::Yelp { title, .. }
|
||||
| Self::Mdn { title, .. } => title,
|
||||
Self::Weather { .. } => "weather",
|
||||
Self::Fakespot { title, .. } => title,
|
||||
| Self::Mdn { title, .. }
|
||||
| Self::Fakespot { title, .. } => title,
|
||||
_ => "untitled",
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -175,6 +166,20 @@ impl Suggestion {
|
|||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn score(&self) -> f64 {
|
||||
match self {
|
||||
Self::Amp { score, .. }
|
||||
| Self::Pocket { score, .. }
|
||||
| Self::Amo { score, .. }
|
||||
| Self::Yelp { score, .. }
|
||||
| Self::Mdn { score, .. }
|
||||
| Self::Weather { score, .. }
|
||||
| Self::Fakespot { score, .. }
|
||||
| Self::Exposure { score, .. } => *score,
|
||||
Self::Wikipedia { .. } => DEFAULT_SUGGESTION_SCORE,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
|
|
|
@ -12,6 +12,7 @@ use crate::{
|
|||
db::SuggestDao,
|
||||
error::Error,
|
||||
rs::{Client, Collection, Record, SuggestRecordId, SuggestRecordType},
|
||||
testing::JsonExt,
|
||||
Result,
|
||||
};
|
||||
|
||||
|
@ -71,6 +72,17 @@ impl MockRemoteSettingsClient {
|
|||
self
|
||||
}
|
||||
|
||||
pub fn with_full_record(
|
||||
mut self,
|
||||
record_type: &str,
|
||||
record_id: &str,
|
||||
inline_data: Option<JsonValue>,
|
||||
items: Option<JsonValue>,
|
||||
) -> Self {
|
||||
self.add_full_record(record_type, record_id, inline_data, items);
|
||||
self
|
||||
}
|
||||
|
||||
// Non-Consuming Builder API, this is best for updating an existing client
|
||||
|
||||
/// Add a record to the mock data
|
||||
|
@ -83,25 +95,7 @@ impl MockRemoteSettingsClient {
|
|||
record_id: &str,
|
||||
items: JsonValue,
|
||||
) -> &mut Self {
|
||||
let location = format!("{record_type}-{record_id}.json");
|
||||
self.records.push(Record {
|
||||
id: SuggestRecordId::new(record_id.to_string()),
|
||||
collection: record_type_for_str(record_type).collection(),
|
||||
last_modified: self.last_modified_timestamp,
|
||||
attachment: Some(Attachment {
|
||||
filename: location.clone(),
|
||||
mimetype: "application/json".into(),
|
||||
hash: "".into(),
|
||||
size: 0,
|
||||
location: location.clone(),
|
||||
}),
|
||||
payload: serde_json::from_value(json!({"type": record_type})).unwrap(),
|
||||
});
|
||||
self.attachments.insert(
|
||||
location,
|
||||
serde_json::to_vec(&items).expect("error serializing attachment data"),
|
||||
);
|
||||
self
|
||||
self.add_full_record(record_type, record_id, None, Some(items))
|
||||
}
|
||||
|
||||
/// Add a record for an icon to the mock data
|
||||
|
@ -133,14 +127,7 @@ impl MockRemoteSettingsClient {
|
|||
record_type: &str,
|
||||
record_id: &str,
|
||||
) -> &mut Self {
|
||||
self.records.push(Record {
|
||||
id: SuggestRecordId::new(record_id.to_string()),
|
||||
collection: record_type_for_str(record_type).collection(),
|
||||
last_modified: self.last_modified_timestamp,
|
||||
attachment: None,
|
||||
payload: serde_json::from_value(json!({"type": record_type})).unwrap(),
|
||||
});
|
||||
self
|
||||
self.add_full_record(record_type, record_id, None, None)
|
||||
}
|
||||
|
||||
/// Add a record to the mock data, with data stored inline rather than in an attachment
|
||||
|
@ -153,17 +140,44 @@ impl MockRemoteSettingsClient {
|
|||
record_id: &str,
|
||||
inline_data: JsonValue,
|
||||
) -> &mut Self {
|
||||
self.add_full_record(record_type, record_id, Some(inline_data), None)
|
||||
}
|
||||
|
||||
/// Add a record with optional extra fields stored inline and attachment
|
||||
/// items
|
||||
pub fn add_full_record(
|
||||
&mut self,
|
||||
record_type: &str,
|
||||
record_id: &str,
|
||||
inline_data: Option<JsonValue>,
|
||||
items: Option<JsonValue>,
|
||||
) -> &mut Self {
|
||||
let location = format!("{record_type}-{record_id}.json");
|
||||
self.records.push(Record {
|
||||
id: SuggestRecordId::new(record_id.to_string()),
|
||||
collection: record_type_for_str(record_type).collection(),
|
||||
last_modified: self.last_modified_timestamp,
|
||||
payload: serde_json::from_value(json!({
|
||||
"type": record_type,
|
||||
record_type: inline_data,
|
||||
}))
|
||||
payload: serde_json::from_value(
|
||||
json!({
|
||||
"type": record_type,
|
||||
})
|
||||
.merge(inline_data.unwrap_or(json!({}))),
|
||||
)
|
||||
.unwrap(),
|
||||
attachment: None,
|
||||
attachment: items.as_ref().map(|_| Attachment {
|
||||
filename: location.clone(),
|
||||
mimetype: "application/json".into(),
|
||||
hash: "".into(),
|
||||
size: 0,
|
||||
location: location.clone(),
|
||||
}),
|
||||
});
|
||||
if let Some(i) = items {
|
||||
self.attachments.insert(
|
||||
location,
|
||||
serde_json::to_vec(&i).expect("error serializing attachment data"),
|
||||
);
|
||||
}
|
||||
self
|
||||
}
|
||||
|
||||
|
|
|
@ -43,6 +43,7 @@ impl Suggestion {
|
|||
Self::Weather { score, .. } => score,
|
||||
Self::Wikipedia { .. } => panic!("with_score not valid for wikipedia suggestions"),
|
||||
Self::Fakespot { score, .. } => score,
|
||||
Self::Exposure { score, .. } => score,
|
||||
};
|
||||
*current_score = score;
|
||||
self
|
||||
|
|
Загрузка…
Ссылка в новой задаче