Bug 1757407 - Add l10nregistry-rs to mozilla-central; r=platform-i18n-reviewers,gregtatum,jfkthame,flod

This is a copy of git revision 9da98c6e5325a5f2997191727eaa82c0ebc2004c from
https://github.com/mozilla/l10nregistry-rs.

Differential Revision: https://phabricator.services.mozilla.com/D139850
This commit is contained in:
Dan Minor 2022-03-25 20:39:43 +00:00
Родитель d710062933
Коммит 80a80ab927
30 изменённых файлов: 4373 добавлений и 0 удалений

2
intl/l10n/rust/l10nregistry-rs/.gitignore поставляемый Normal file
Просмотреть файл

@ -0,0 +1,2 @@
/target
Cargo.lock

Просмотреть файл

@ -0,0 +1,78 @@
[package]
name = "l10nregistry"
version = "0.3.0"
authors = ["Zibi Braniecki <gandalf@mozilla.com>"]
license = "Apache-2.0/MIT"
edition = "2018"
[dependencies]
async-trait = "0.1"
fluent-bundle = "0.15.2"
fluent-fallback = "0.6.0"
fluent-testing = { version = "0.0.2", optional = true, features = ["sync", "async"] }
futures = "0.3"
pin-project-lite = "0.2"
unic-langid = "0.9"
tokio = { version = "1.0", optional = true, features = ["rt-multi-thread", "macros"] }
replace_with = "0.1"
rustc-hash = "1"
[dev-dependencies]
unic-langid = { version = "0.9", features = ["macros"] }
serial_test = "0.5"
criterion = "0.3"
[features]
default = []
tokio-io = ["tokio"]
test-fluent = []
[[bench]]
name = "preferences"
harness = false
required-features = ["tokio", "test-fluent"]
[[bench]]
name = "localization"
harness = false
required-features = ["tokio", "test-fluent"]
[[bench]]
name = "source"
harness = false
required-features = ["tokio", "test-fluent"]
[[bench]]
name = "solver"
harness = false
required-features = ["tokio", "test-fluent"]
[[bench]]
name = "registry"
harness = false
required-features = ["tokio", "test-fluent"]
[[test]]
name = "source"
path = "tests/source.rs"
required-features = ["tokio", "test-fluent"]
[[test]]
name = "registry"
path = "tests/registry.rs"
required-features = ["tokio", "test-fluent"]
[[test]]
name = "localization"
path = "tests/localization.rs"
required-features = ["tokio", "test-fluent"]
[[test]]
name = "scenarios_sync"
path = "tests/scenarios_sync.rs"
required-features = ["test-fluent"]
[[test]]
name = "scenarios_async"
path = "tests/scenarios_async.rs"
required-features = ["tokio", "test-fluent"]

Просмотреть файл

@ -0,0 +1,201 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "{}"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright 2017 Mozilla
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

Просмотреть файл

@ -0,0 +1,19 @@
Copyright 2017 Mozilla
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
of the Software, and to permit persons to whom the Software is furnished to do
so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

Просмотреть файл

@ -0,0 +1,70 @@
use criterion::criterion_group;
use criterion::criterion_main;
use criterion::Criterion;
use fluent_bundle::FluentArgs;
use fluent_fallback::{types::L10nKey, Localization};
use fluent_testing::get_scenarios;
use l10nregistry::testing::TestFileFetcher;
fn preferences_bench(c: &mut Criterion) {
let fetcher = TestFileFetcher::new();
let mut group = c.benchmark_group("localization/scenarios");
for scenario in get_scenarios() {
let res_ids = scenario.res_ids.clone();
let l10n_keys: Vec<(String, Option<FluentArgs>)> = scenario
.queries
.iter()
.map(|q| {
(
q.input.id.clone(),
q.input.args.as_ref().map(|args| {
let mut result = FluentArgs::new();
for arg in args.as_slice() {
result.set(arg.id.clone(), arg.value.clone());
}
result
}),
)
})
.collect();
group.bench_function(format!("{}/format_value_sync", scenario.name), |b| {
b.iter(|| {
let (env, reg) = fetcher.get_registry_and_environment(&scenario);
let mut errors = vec![];
let loc = Localization::with_env(res_ids.clone(), true, env.clone(), reg.clone());
let bundles = loc.bundles();
for key in l10n_keys.iter() {
bundles.format_value_sync(&key.0, key.1.as_ref(), &mut errors);
}
})
});
let keys: Vec<L10nKey> = l10n_keys
.into_iter()
.map(|key| L10nKey {
id: key.0.into(),
args: key.1,
})
.collect();
group.bench_function(format!("{}/format_messages_sync", scenario.name), |b| {
b.iter(|| {
let (env, reg) = fetcher.get_registry_and_environment(&scenario);
let mut errors = vec![];
let loc = Localization::with_env(res_ids.clone(), true, env.clone(), reg.clone());
let bundles = loc.bundles();
bundles.format_messages_sync(&keys, &mut errors);
})
});
}
group.finish();
}
criterion_group!(benches, preferences_bench);
criterion_main!(benches);

Просмотреть файл

@ -0,0 +1,65 @@
use criterion::criterion_group;
use criterion::criterion_main;
use criterion::Criterion;
use fluent_testing::get_scenarios;
use l10nregistry::testing::TestFileFetcher;
use unic_langid::LanguageIdentifier;
fn preferences_bench(c: &mut Criterion) {
let fetcher = TestFileFetcher::new();
let mut group = c.benchmark_group("registry/scenarios");
for scenario in get_scenarios() {
let res_ids = scenario.res_ids.clone();
let locales: Vec<LanguageIdentifier> = scenario
.locales
.iter()
.map(|l| l.parse().unwrap())
.collect();
group.bench_function(format!("{}/sync/first_bundle", scenario.name), |b| {
b.iter(|| {
let reg = fetcher.get_registry(&scenario);
let mut bundles =
reg.generate_bundles_sync(locales.clone().into_iter(), res_ids.clone());
for _ in 0..locales.len() {
if bundles.next().is_some() {
break;
}
}
})
});
#[cfg(feature = "tokio")]
{
use futures::stream::StreamExt;
let rt = tokio::runtime::Runtime::new().unwrap();
group.bench_function(&format!("{}/async/first_bundle", scenario.name), |b| {
b.iter(|| {
rt.block_on(async {
let reg = fetcher.get_registry(&scenario);
let mut bundles =
reg.generate_bundles(locales.clone().into_iter(), res_ids.clone());
for _ in 0..locales.len() {
if bundles.next().await.is_some() {
break;
}
}
});
})
});
}
}
group.finish();
}
criterion_group!(benches, preferences_bench);
criterion_main!(benches);

Просмотреть файл

@ -0,0 +1,133 @@
use criterion::criterion_group;
use criterion::criterion_main;
use criterion::Criterion;
use futures::stream::StreamExt;
use l10nregistry::source::ResourceId;
use l10nregistry::testing::{FileSource, RegistrySetup, TestFileFetcher};
use unic_langid::LanguageIdentifier;
fn get_paths() -> Vec<ResourceId> {
let paths: Vec<&'static str> = vec![
"branding/brand.ftl",
"browser/sanitize.ftl",
"browser/preferences/blocklists.ftl",
"browser/preferences/colors.ftl",
"browser/preferences/selectBookmark.ftl",
"browser/preferences/connection.ftl",
"browser/preferences/addEngine.ftl",
"browser/preferences/siteDataSettings.ftl",
"browser/preferences/fonts.ftl",
"browser/preferences/languages.ftl",
"browser/preferences/preferences.ftl",
"security/certificates/certManager.ftl",
"security/certificates/deviceManager.ftl",
"toolkit/global/textActions.ftl",
"toolkit/printing/printUI.ftl",
"toolkit/updates/history.ftl",
"toolkit/featuregates/features.ftl",
];
paths.into_iter().map(ResourceId::from).collect()
}
fn registry_bench(c: &mut Criterion) {
let en_us: LanguageIdentifier = "en-US".parse().unwrap();
let mut group = c.benchmark_group("non-metasource");
let setup = RegistrySetup::new(
"test",
vec![
FileSource::new("toolkit", None, vec![en_us.clone()], "toolkit/{locale}/"),
FileSource::new("browser", None, vec![en_us.clone()], "browser/{locale}/"),
FileSource::new("toolkit", None, vec![en_us.clone()], "toolkit/{locale}/"),
FileSource::new("browser", None, vec![en_us.clone()], "browser/{locale}/"),
],
vec![en_us.clone()],
);
let fetcher = TestFileFetcher::new();
let (_, reg) = fetcher.get_registry_and_environment(setup);
group.bench_function(&format!("serial",), |b| {
b.iter(|| {
let lang_ids = vec![en_us.clone()];
let mut i = reg.generate_bundles_sync(lang_ids.into_iter(), get_paths());
while let Some(_) = i.next() {}
})
});
let rt = tokio::runtime::Runtime::new().unwrap();
group.bench_function(&format!("parallel",), |b| {
b.iter(|| {
let lang_ids = vec![en_us.clone()];
let mut i = reg.generate_bundles(lang_ids.into_iter(), get_paths());
rt.block_on(async { while let Some(_) = i.next().await {} });
})
});
group.finish();
}
fn registry_metasource_bench(c: &mut Criterion) {
let en_us: LanguageIdentifier = "en-US".parse().unwrap();
let mut group = c.benchmark_group("metasource");
let setup = RegistrySetup::new(
"test",
vec![
FileSource::new(
"toolkit",
Some("app"),
vec![en_us.clone()],
"toolkit/{locale}/",
),
FileSource::new(
"browser",
Some("app"),
vec![en_us.clone()],
"browser/{locale}/",
),
FileSource::new(
"toolkit",
Some("langpack"),
vec![en_us.clone()],
"toolkit/{locale}/",
),
FileSource::new(
"browser",
Some("langpack"),
vec![en_us.clone()],
"browser/{locale}/",
),
],
vec![en_us.clone()],
);
let fetcher = TestFileFetcher::new();
let (_, reg) = fetcher.get_registry_and_environment(setup);
group.bench_function(&format!("serial",), |b| {
b.iter(|| {
let lang_ids = vec![en_us.clone()];
let mut i = reg.generate_bundles_sync(lang_ids.into_iter(), get_paths());
while let Some(_) = i.next() {}
})
});
let rt = tokio::runtime::Runtime::new().unwrap();
group.bench_function(&format!("parallel",), |b| {
b.iter(|| {
let lang_ids = vec![en_us.clone()];
let mut i = reg.generate_bundles(lang_ids.into_iter(), get_paths());
rt.block_on(async { while let Some(_) = i.next().await {} });
})
});
group.finish();
}
criterion_group!(
name = benches;
config = Criterion::default().sample_size(10);
targets = registry_bench, registry_metasource_bench
);
criterion_main!(benches);

Просмотреть файл

@ -0,0 +1,120 @@
use criterion::criterion_group;
use criterion::criterion_main;
use criterion::Criterion;
use futures::stream::Collect;
use futures::stream::FuturesOrdered;
use futures::StreamExt;
use l10nregistry::solver::testing::get_scenarios;
use l10nregistry::solver::{AsyncTester, ParallelProblemSolver, SerialProblemSolver, SyncTester};
use std::future::Future;
use std::pin::Pin;
use std::task::{Context, Poll};
pub struct MockTester {
values: Vec<Vec<bool>>,
}
impl SyncTester for MockTester {
fn test_sync(&self, res_idx: usize, source_idx: usize) -> bool {
self.values[res_idx][source_idx]
}
}
pub struct SingleTestResult(bool);
impl Future for SingleTestResult {
type Output = bool;
fn poll(self: Pin<&mut Self>, _cx: &mut Context<'_>) -> Poll<Self::Output> {
self.0.into()
}
}
pub type ResourceSetStream = Collect<FuturesOrdered<SingleTestResult>, Vec<bool>>;
pub struct TestResult(ResourceSetStream);
impl std::marker::Unpin for TestResult {}
impl Future for TestResult {
type Output = Vec<bool>;
fn poll(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {
let pinned = Pin::new(&mut self.0);
pinned.poll(cx)
}
}
impl AsyncTester for MockTester {
type Result = TestResult;
fn test_async(&self, query: Vec<(usize, usize)>) -> Self::Result {
let futures = query
.into_iter()
.map(|(res_idx, source_idx)| SingleTestResult(self.test_sync(res_idx, source_idx)))
.collect::<Vec<_>>();
TestResult(futures.into_iter().collect::<FuturesOrdered<_>>().collect())
}
}
struct TestStream<'t> {
solver: ParallelProblemSolver<MockTester>,
tester: &'t MockTester,
}
impl<'t> TestStream<'t> {
pub fn new(solver: ParallelProblemSolver<MockTester>, tester: &'t MockTester) -> Self {
Self { solver, tester }
}
}
impl<'t> futures::stream::Stream for TestStream<'t> {
type Item = Vec<usize>;
fn poll_next(
mut self: std::pin::Pin<&mut Self>,
cx: &mut std::task::Context<'_>,
) -> std::task::Poll<Option<Self::Item>> {
let tester = self.tester;
let solver = &mut self.solver;
let pinned = std::pin::Pin::new(solver);
pinned
.try_poll_next(cx, tester, false)
.map(|v| v.ok().flatten())
}
}
fn solver_bench(c: &mut Criterion) {
let scenarios = get_scenarios();
let mut group = c.benchmark_group("solver");
for scenario in scenarios {
let tester = MockTester {
values: scenario.values.clone(),
};
group.bench_function(&format!("serial/{}", &scenario.name), |b| {
b.iter(|| {
let mut gen = SerialProblemSolver::new(scenario.width, scenario.depth);
while let Ok(Some(_)) = gen.try_next(&tester, false) {}
})
});
{
let rt = tokio::runtime::Runtime::new().unwrap();
group.bench_function(&format!("parallel/{}", &scenario.name), |b| {
b.iter(|| {
let gen = ParallelProblemSolver::new(scenario.width, scenario.depth);
let mut t = TestStream::new(gen, &tester);
rt.block_on(async { while let Some(_) = t.next().await {} });
})
});
}
}
group.finish();
}
criterion_group!(benches, solver_bench);
criterion_main!(benches);

Просмотреть файл

@ -0,0 +1,60 @@
use criterion::criterion_group;
use criterion::criterion_main;
use criterion::Criterion;
use fluent_testing::get_scenarios;
use l10nregistry::testing::TestFileFetcher;
use unic_langid::LanguageIdentifier;
fn get_locales<S>(input: &[S]) -> Vec<LanguageIdentifier>
where
S: AsRef<str>,
{
input.iter().map(|s| s.as_ref().parse().unwrap()).collect()
}
fn source_bench(c: &mut Criterion) {
let fetcher = TestFileFetcher::new();
let mut group = c.benchmark_group("source/scenarios");
for scenario in get_scenarios() {
let res_ids = scenario.res_ids.clone();
let locales: Vec<LanguageIdentifier> = get_locales(&scenario.locales);
let sources: Vec<_> = scenario
.file_sources
.iter()
.map(|s| {
fetcher.get_test_file_source(&s.name, None, get_locales(&s.locales), &s.path_scheme)
})
.collect();
group.bench_function(format!("{}/has_file", scenario.name), |b| {
b.iter(|| {
for source in &sources {
for res_id in &res_ids {
source.has_file(&locales[0], &res_id);
}
}
})
});
group.bench_function(format!("{}/sync/fetch_file_sync", scenario.name), |b| {
b.iter(|| {
for source in &sources {
for res_id in &res_ids {
source.fetch_file_sync(&locales[0], &res_id, false);
}
}
})
});
}
group.finish();
}
criterion_group!(benches, source_bench);
criterion_main!(benches);

Просмотреть файл

@ -0,0 +1,5 @@
use crate::errors::L10nRegistryError;
pub trait ErrorReporter {
fn report_errors(&self, errors: Vec<L10nRegistryError>);
}

Просмотреть файл

@ -0,0 +1,74 @@
use fluent_bundle::FluentError;
use fluent_fallback::types::ResourceId;
use std::error::Error;
use unic_langid::LanguageIdentifier;
#[derive(Debug, Clone, PartialEq)]
pub enum L10nRegistryError {
FluentError {
resource_id: ResourceId,
loc: Option<(usize, usize)>,
error: FluentError,
},
MissingResource {
locale: LanguageIdentifier,
resource_id: ResourceId,
},
}
impl std::fmt::Display for L10nRegistryError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Self::MissingResource {
locale,
resource_id,
} => {
write!(
f,
"Missing resource in locale {}: {}",
locale, resource_id.value
)
}
Self::FluentError {
resource_id,
loc,
error,
} => {
if let Some(loc) = loc {
write!(
f,
"Fluent Error in {}[line: {}, col: {}]: {}",
resource_id.value, loc.0, loc.1, error
)
} else {
write!(f, "Fluent Error in {}: {}", resource_id.value, error)
}
}
}
}
}
impl Error for L10nRegistryError {}
#[derive(Debug, Clone, PartialEq)]
pub enum L10nRegistrySetupError {
RegistryLocked,
DuplicatedSource { name: String },
MissingSource { name: String },
}
impl std::fmt::Display for L10nRegistrySetupError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Self::RegistryLocked => write!(f, "Can't modify a registry when locked."),
Self::DuplicatedSource { name } => {
write!(f, "Source with a name {} is already registered.", &name)
}
Self::MissingSource { name } => {
write!(f, "Cannot find a source with a name {}.", &name)
}
}
}
}
impl Error for L10nRegistrySetupError {}

Просмотреть файл

@ -0,0 +1,5 @@
use fluent_bundle::FluentBundle as FluentBundleBase;
pub use fluent_bundle::{FluentError, FluentResource};
use std::rc::Rc;
pub type FluentBundle = FluentBundleBase<Rc<FluentResource>>;

Просмотреть файл

@ -0,0 +1,8 @@
pub mod env;
pub mod errors;
pub mod fluent;
pub mod registry;
pub mod solver;
pub mod source;
#[cfg(feature = "test-fluent")]
pub mod testing;

Просмотреть файл

@ -0,0 +1,240 @@
use std::{
pin::Pin,
task::{Context, Poll},
};
use super::{BundleAdapter, L10nRegistry, L10nRegistryLocked};
use crate::solver::{AsyncTester, ParallelProblemSolver};
use crate::{
env::ErrorReporter,
errors::L10nRegistryError,
fluent::{FluentBundle, FluentError},
source::{ResourceOption, ResourceStatus},
};
use fluent_fallback::{generator::BundleStream, types::ResourceId};
use futures::{
stream::{Collect, FuturesOrdered},
Stream, StreamExt,
};
use std::future::Future;
use unic_langid::LanguageIdentifier;
impl<'a, B> L10nRegistryLocked<'a, B> {}
impl<P, B> L10nRegistry<P, B>
where
P: Clone,
B: Clone,
{
pub fn generate_bundles_for_lang(
&self,
langid: LanguageIdentifier,
resource_ids: Vec<ResourceId>,
) -> GenerateBundles<P, B> {
let lang_ids = vec![langid];
GenerateBundles::new(self.clone(), lang_ids.into_iter(), resource_ids)
}
pub fn generate_bundles(
&self,
locales: std::vec::IntoIter<LanguageIdentifier>,
resource_ids: Vec<ResourceId>,
) -> GenerateBundles<P, B> {
GenerateBundles::new(self.clone(), locales, resource_ids)
}
}
enum State<P, B> {
Empty,
Locale(LanguageIdentifier),
Solver {
locale: LanguageIdentifier,
solver: ParallelProblemSolver<GenerateBundles<P, B>>,
},
}
impl<P, B> Default for State<P, B> {
fn default() -> Self {
Self::Empty
}
}
impl<P, B> State<P, B> {
fn get_locale(&self) -> &LanguageIdentifier {
match self {
Self::Locale(locale) => locale,
Self::Solver { locale, .. } => locale,
Self::Empty => unreachable!(),
}
}
fn take_solver(&mut self) -> ParallelProblemSolver<GenerateBundles<P, B>> {
replace_with::replace_with_or_default_and_return(self, |self_| match self_ {
Self::Solver { locale, solver } => (solver, Self::Locale(locale)),
_ => unreachable!(),
})
}
fn put_back_solver(&mut self, solver: ParallelProblemSolver<GenerateBundles<P, B>>) {
replace_with::replace_with_or_default(self, |self_| match self_ {
Self::Locale(locale) => Self::Solver { locale, solver },
_ => unreachable!(),
})
}
}
pub struct GenerateBundles<P, B> {
reg: L10nRegistry<P, B>,
locales: std::vec::IntoIter<LanguageIdentifier>,
current_metasource: usize,
resource_ids: Vec<ResourceId>,
state: State<P, B>,
}
impl<P, B> GenerateBundles<P, B> {
fn new(
reg: L10nRegistry<P, B>,
locales: std::vec::IntoIter<LanguageIdentifier>,
resource_ids: Vec<ResourceId>,
) -> Self {
Self {
reg,
locales,
current_metasource: 0,
resource_ids,
state: State::Empty,
}
}
}
pub type ResourceSetStream = Collect<FuturesOrdered<ResourceStatus>, Vec<ResourceOption>>;
pub struct TestResult(ResourceSetStream);
impl std::marker::Unpin for TestResult {}
impl Future for TestResult {
type Output = Vec<bool>;
fn poll(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {
let pinned = Pin::new(&mut self.0);
pinned
.poll(cx)
.map(|set| set.iter().map(|c| !c.is_required_and_missing()).collect())
}
}
impl<'l, P, B> AsyncTester for GenerateBundles<P, B> {
type Result = TestResult;
fn test_async(&self, query: Vec<(usize, usize)>) -> Self::Result {
let locale = self.state.get_locale();
let lock = self.reg.lock();
let stream = query
.iter()
.map(|(res_idx, source_idx)| {
let resource_id = &self.resource_ids[*res_idx];
lock.source_idx(self.current_metasource, *source_idx)
.fetch_file(locale, resource_id)
})
.collect::<FuturesOrdered<_>>();
TestResult(stream.collect::<_>())
}
}
#[async_trait::async_trait(?Send)]
impl<P, B> BundleStream for GenerateBundles<P, B> {
async fn prefetch_async(&mut self) {
todo!();
}
}
macro_rules! try_next_metasource {
( $self:ident ) => {{
if $self.current_metasource > 0 {
$self.current_metasource -= 1;
let solver = ParallelProblemSolver::new(
$self.resource_ids.len(),
$self.reg.lock().metasource_len($self.current_metasource),
);
$self.state = State::Solver {
locale: $self.state.get_locale().clone(),
solver,
};
continue;
}
}};
}
impl<P, B> Stream for GenerateBundles<P, B>
where
P: ErrorReporter,
B: BundleAdapter,
{
type Item = Result<FluentBundle, (FluentBundle, Vec<FluentError>)>;
fn poll_next(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<Self::Item>> {
loop {
if let State::Solver { .. } = self.state {
let mut solver = self.state.take_solver();
let pinned_solver = Pin::new(&mut solver);
match pinned_solver.try_poll_next(cx, &self, false) {
std::task::Poll::Ready(order) => match order {
Ok(Some(order)) => {
let locale = self.state.get_locale();
let bundle = self.reg.lock().bundle_from_order(
self.current_metasource,
locale.clone(),
&order,
&self.resource_ids,
&self.reg.shared.provider,
);
self.state.put_back_solver(solver);
if bundle.is_some() {
return bundle.into();
} else {
continue;
}
}
Ok(None) => {
try_next_metasource!(self);
self.state = State::Empty;
continue;
}
Err(idx) => {
try_next_metasource!(self);
// Only signal an error if we run out of metasources
// to try.
self.reg.shared.provider.report_errors(vec![
L10nRegistryError::MissingResource {
locale: self.state.get_locale().clone(),
resource_id: self.resource_ids[idx].clone(),
},
]);
self.state = State::Empty;
continue;
}
},
std::task::Poll::Pending => {
self.state.put_back_solver(solver);
return std::task::Poll::Pending;
}
}
} else if let Some(locale) = self.locales.next() {
if self.reg.lock().number_of_metasources() == 0 {
return None.into();
}
let number_of_metasources = self.reg.lock().number_of_metasources() - 1;
self.current_metasource = number_of_metasources;
let solver = ParallelProblemSolver::new(
self.resource_ids.len(),
self.reg.lock().metasource_len(self.current_metasource),
);
self.state = State::Solver { locale, solver };
} else {
return None.into();
}
}
}
}

Просмотреть файл

@ -0,0 +1,267 @@
mod asynchronous;
mod synchronous;
use std::{
cell::{Ref, RefCell},
collections::HashSet,
rc::Rc,
};
use crate::errors::L10nRegistrySetupError;
use crate::source::{FileSource, ResourceId};
use crate::env::ErrorReporter;
use crate::fluent::FluentBundle;
use fluent_bundle::FluentResource;
use fluent_fallback::generator::BundleGenerator;
use unic_langid::LanguageIdentifier;
pub use asynchronous::GenerateBundles;
pub use synchronous::GenerateBundlesSync;
pub type FluentResourceSet = Vec<Rc<FluentResource>>;
#[derive(Default)]
struct Shared<P, B> {
sources: RefCell<Vec<Vec<FileSource>>>,
provider: P,
bundle_adapter: Option<B>,
}
pub struct L10nRegistryLocked<'a, B> {
lock: Ref<'a, Vec<Vec<FileSource>>>,
bundle_adapter: Option<&'a B>,
}
impl<'a, B> L10nRegistryLocked<'a, B> {
pub fn iter(&self, metasource: usize) -> impl Iterator<Item = &FileSource> {
self.lock
.get(metasource)
.expect("Index out-of-range")
.iter()
}
pub fn number_of_metasources(&self) -> usize {
self.lock.len()
}
pub fn metasource_len(&self, metasource: usize) -> usize {
self.lock.get(metasource).expect("Index out-of-range").len()
}
pub fn source_idx(&self, metasource: usize, index: usize) -> &FileSource {
let source_idx = self.metasource_len(metasource) - 1 - index;
self.lock[metasource]
.get(source_idx)
.expect("Index out-of-range")
}
pub fn get_source(&self, metasource: usize, name: &str) -> Option<&FileSource> {
self.lock
.get(metasource)
.expect("Index out-of-range")
.iter()
.find(|&source| source.name == name)
}
pub fn generate_sources_for_file<'l>(
&'l self,
metasource: usize,
langid: &'l LanguageIdentifier,
resource_id: &'l ResourceId,
) -> impl Iterator<Item = &FileSource> {
self.iter(metasource)
.filter(move |source| source.has_file(langid, resource_id) != Some(false))
}
}
pub trait BundleAdapter {
fn adapt_bundle(&self, bundle: &mut FluentBundle);
}
#[derive(Clone)]
pub struct L10nRegistry<P, B> {
shared: Rc<Shared<P, B>>,
}
impl<P, B> L10nRegistry<P, B> {
pub fn with_provider(provider: P) -> Self {
Self {
shared: Rc::new(Shared {
sources: Default::default(),
provider,
bundle_adapter: None,
}),
}
}
pub fn set_adapt_bundle(&mut self, bundle_adapter: B) -> Result<(), L10nRegistrySetupError>
where
B: BundleAdapter,
{
let shared = Rc::get_mut(&mut self.shared).ok_or(L10nRegistrySetupError::RegistryLocked)?;
shared.bundle_adapter = Some(bundle_adapter);
Ok(())
}
pub fn lock(&self) -> L10nRegistryLocked<'_, B> {
L10nRegistryLocked {
lock: self.shared.sources.borrow(),
bundle_adapter: self.shared.bundle_adapter.as_ref(),
}
}
pub fn register_sources(
&self,
new_sources: Vec<FileSource>,
) -> Result<(), L10nRegistrySetupError> {
let mut sources = self
.shared
.sources
.try_borrow_mut()
.map_err(|_| L10nRegistrySetupError::RegistryLocked)?;
for new_source in new_sources {
if let Some(metasource) = sources
.iter_mut()
.find(|source| source[0].metasource == new_source.metasource)
{
metasource.push(new_source);
} else {
sources.push(vec![new_source]);
}
}
Ok(())
}
pub fn update_sources(
&self,
upd_sources: Vec<FileSource>,
) -> Result<(), L10nRegistrySetupError> {
let mut sources = self
.shared
.sources
.try_borrow_mut()
.map_err(|_| L10nRegistrySetupError::RegistryLocked)?;
for upd_source in upd_sources {
if let Some(metasource) = sources
.iter_mut()
.find(|source| source[0].metasource == upd_source.metasource)
{
if let Some(idx) = metasource.iter().position(|source| *source == upd_source) {
*metasource.get_mut(idx).unwrap() = upd_source;
} else {
return Err(L10nRegistrySetupError::MissingSource {
name: upd_source.name,
});
}
}
}
Ok(())
}
pub fn remove_sources<S>(&self, del_sources: Vec<S>) -> Result<(), L10nRegistrySetupError>
where
S: ToString,
{
let mut sources = self
.shared
.sources
.try_borrow_mut()
.map_err(|_| L10nRegistrySetupError::RegistryLocked)?;
let del_sources: Vec<String> = del_sources.into_iter().map(|s| s.to_string()).collect();
for metasource in sources.iter_mut() {
metasource.retain(|source| !del_sources.contains(&source.name));
}
sources.retain(|metasource| !metasource.is_empty());
Ok(())
}
pub fn clear_sources(&self) -> Result<(), L10nRegistrySetupError> {
let mut sources = self
.shared
.sources
.try_borrow_mut()
.map_err(|_| L10nRegistrySetupError::RegistryLocked)?;
sources.clear();
Ok(())
}
pub fn get_source_names(&self) -> Result<Vec<String>, L10nRegistrySetupError> {
let sources = self
.shared
.sources
.try_borrow_mut()
.map_err(|_| L10nRegistrySetupError::RegistryLocked)?;
Ok(sources.iter().flatten().map(|s| s.name.clone()).collect())
}
pub fn has_source(&self, name: &str) -> Result<bool, L10nRegistrySetupError> {
let sources = self
.shared
.sources
.try_borrow_mut()
.map_err(|_| L10nRegistrySetupError::RegistryLocked)?;
Ok(sources.iter().flatten().any(|source| source.name == name))
}
pub fn get_source(&self, name: &str) -> Result<Option<FileSource>, L10nRegistrySetupError> {
let sources = self
.shared
.sources
.try_borrow_mut()
.map_err(|_| L10nRegistrySetupError::RegistryLocked)?;
Ok(sources
.iter()
.flatten()
.find(|source| source.name == name)
.cloned())
}
pub fn get_available_locales(&self) -> Result<Vec<LanguageIdentifier>, L10nRegistrySetupError> {
let sources = self
.shared
.sources
.try_borrow_mut()
.map_err(|_| L10nRegistrySetupError::RegistryLocked)?;
let mut result = HashSet::new();
for source in sources.iter().flatten() {
for locale in source.locales() {
result.insert(locale);
}
}
Ok(result.into_iter().map(|l| l.to_owned()).collect())
}
}
impl<P, B> BundleGenerator for L10nRegistry<P, B>
where
P: ErrorReporter + Clone,
B: BundleAdapter + Clone,
{
type Resource = Rc<FluentResource>;
type Iter = GenerateBundlesSync<P, B>;
type Stream = GenerateBundles<P, B>;
type LocalesIter = std::vec::IntoIter<LanguageIdentifier>;
fn bundles_iter(
&self,
locales: Self::LocalesIter,
resource_ids: Vec<ResourceId>,
) -> Self::Iter {
let resource_ids = resource_ids.into_iter().collect();
self.generate_bundles_sync(locales, resource_ids)
}
fn bundles_stream(
&self,
locales: Self::LocalesIter,
resource_ids: Vec<ResourceId>,
) -> Self::Stream {
let resource_ids = resource_ids.into_iter().collect();
self.generate_bundles(locales, resource_ids)
}
}

Просмотреть файл

@ -0,0 +1,272 @@
use super::{BundleAdapter, L10nRegistry, L10nRegistryLocked};
use crate::env::ErrorReporter;
use crate::errors::L10nRegistryError;
use crate::fluent::{FluentBundle, FluentError};
use crate::solver::{SerialProblemSolver, SyncTester};
use crate::source::ResourceOption;
use fluent_fallback::{generator::BundleIterator, types::ResourceId};
use unic_langid::LanguageIdentifier;
impl<'a, B> L10nRegistryLocked<'a, B> {
pub(crate) fn bundle_from_order<P>(
&self,
metasource: usize,
locale: LanguageIdentifier,
source_order: &[usize],
resource_ids: &[ResourceId],
error_reporter: &P,
) -> Option<Result<FluentBundle, (FluentBundle, Vec<FluentError>)>>
where
P: ErrorReporter,
B: BundleAdapter,
{
let mut bundle = FluentBundle::new(vec![locale.clone()]);
if let Some(bundle_adapter) = self.bundle_adapter {
bundle_adapter.adapt_bundle(&mut bundle);
}
let mut errors = vec![];
for (&source_idx, resource_id) in source_order.iter().zip(resource_ids.iter()) {
let source = self.source_idx(metasource, source_idx);
if let ResourceOption::Some(res) =
source.fetch_file_sync(&locale, resource_id, /* overload */ true)
{
if source.options.allow_override {
bundle.add_resource_overriding(res);
} else if let Err(err) = bundle.add_resource(res) {
errors.extend(err.into_iter().map(|error| L10nRegistryError::FluentError {
resource_id: resource_id.clone(),
loc: None,
error,
}));
}
} else if resource_id.is_required() {
return None;
}
}
if !errors.is_empty() {
error_reporter.report_errors(errors);
}
Some(Ok(bundle))
}
}
impl<P, B> L10nRegistry<P, B>
where
P: Clone,
B: Clone,
{
pub fn generate_bundles_for_lang_sync(
&self,
langid: LanguageIdentifier,
resource_ids: Vec<ResourceId>,
) -> GenerateBundlesSync<P, B> {
let lang_ids = vec![langid];
GenerateBundlesSync::new(self.clone(), lang_ids.into_iter(), resource_ids)
}
pub fn generate_bundles_sync(
&self,
locales: std::vec::IntoIter<LanguageIdentifier>,
resource_ids: Vec<ResourceId>,
) -> GenerateBundlesSync<P, B> {
GenerateBundlesSync::new(self.clone(), locales, resource_ids)
}
}
enum State {
Empty,
Locale(LanguageIdentifier),
Solver {
locale: LanguageIdentifier,
solver: SerialProblemSolver,
},
}
impl Default for State {
fn default() -> Self {
Self::Empty
}
}
impl State {
fn get_locale(&self) -> &LanguageIdentifier {
match self {
Self::Locale(locale) => locale,
Self::Solver { locale, .. } => locale,
Self::Empty => unreachable!(),
}
}
fn take_solver(&mut self) -> SerialProblemSolver {
replace_with::replace_with_or_default_and_return(self, |self_| match self_ {
Self::Solver { locale, solver } => (solver, Self::Locale(locale)),
_ => unreachable!(),
})
}
fn put_back_solver(&mut self, solver: SerialProblemSolver) {
replace_with::replace_with_or_default(self, |self_| match self_ {
Self::Locale(locale) => Self::Solver { locale, solver },
_ => unreachable!(),
})
}
}
pub struct GenerateBundlesSync<P, B> {
reg: L10nRegistry<P, B>,
locales: std::vec::IntoIter<LanguageIdentifier>,
current_metasource: usize,
resource_ids: Vec<ResourceId>,
state: State,
}
impl<P, B> GenerateBundlesSync<P, B> {
fn new(
reg: L10nRegistry<P, B>,
locales: std::vec::IntoIter<LanguageIdentifier>,
resource_ids: Vec<ResourceId>,
) -> Self {
Self {
reg,
locales,
current_metasource: 0,
resource_ids,
state: State::Empty,
}
}
}
impl<P, B> SyncTester for GenerateBundlesSync<P, B> {
fn test_sync(&self, res_idx: usize, source_idx: usize) -> bool {
let locale = self.state.get_locale();
let resource_id = &self.resource_ids[res_idx];
!self
.reg
.lock()
.source_idx(self.current_metasource, source_idx)
.fetch_file_sync(locale, resource_id, /* overload */ true)
.is_required_and_missing()
}
}
impl<P, B> BundleIterator for GenerateBundlesSync<P, B>
where
P: ErrorReporter,
{
fn prefetch_sync(&mut self) {
if let State::Solver { .. } = self.state {
let mut solver = self.state.take_solver();
if let Err(idx) = solver.try_next(self, true) {
self.reg
.shared
.provider
.report_errors(vec![L10nRegistryError::MissingResource {
locale: self.state.get_locale().clone(),
resource_id: self.resource_ids[idx].clone(),
}]);
}
self.state.put_back_solver(solver);
return;
}
if let Some(locale) = self.locales.next() {
let mut solver = SerialProblemSolver::new(
self.resource_ids.len(),
self.reg.lock().metasource_len(self.current_metasource),
);
self.state = State::Locale(locale.clone());
if let Err(idx) = solver.try_next(self, true) {
self.reg
.shared
.provider
.report_errors(vec![L10nRegistryError::MissingResource {
locale,
resource_id: self.resource_ids[idx].clone(),
}]);
}
self.state.put_back_solver(solver);
}
}
}
macro_rules! try_next_metasource {
( $self:ident ) => {{
if $self.current_metasource > 0 {
$self.current_metasource -= 1;
let solver = SerialProblemSolver::new(
$self.resource_ids.len(),
$self.reg.lock().metasource_len($self.current_metasource),
);
$self.state = State::Solver {
locale: $self.state.get_locale().clone(),
solver,
};
continue;
}
}};
}
impl<P, B> Iterator for GenerateBundlesSync<P, B>
where
P: ErrorReporter,
B: BundleAdapter,
{
type Item = Result<FluentBundle, (FluentBundle, Vec<FluentError>)>;
fn next(&mut self) -> Option<Self::Item> {
loop {
if let State::Solver { .. } = self.state {
let mut solver = self.state.take_solver();
match solver.try_next(self, false) {
Ok(Some(order)) => {
let locale = self.state.get_locale();
let bundle = self.reg.lock().bundle_from_order(
self.current_metasource,
locale.clone(),
&order,
&self.resource_ids,
&self.reg.shared.provider,
);
self.state.put_back_solver(solver);
if bundle.is_some() {
return bundle;
} else {
continue;
}
}
Ok(None) => {
try_next_metasource!(self);
}
Err(idx) => {
try_next_metasource!(self);
// Only signal an error if we run out of metasources
// to try.
self.reg.shared.provider.report_errors(vec![
L10nRegistryError::MissingResource {
locale: self.state.get_locale().clone(),
resource_id: self.resource_ids[idx].clone(),
},
]);
}
}
self.state = State::Empty;
}
let locale = self.locales.next()?;
if self.reg.lock().number_of_metasources() == 0 {
return None;
}
self.current_metasource = self.reg.lock().number_of_metasources() - 1;
let solver = SerialProblemSolver::new(
self.resource_ids.len(),
self.reg.lock().metasource_len(self.current_metasource),
);
self.state = State::Solver { locale, solver };
}
}
}

Просмотреть файл

@ -0,0 +1,240 @@
Source Order Problem Solver
======================
This module contains an algorithm used to power the `FluentBundle` generator in `L10nRegistry`.
The main concept behind it is a problem solver which takes a list of resources and a list of sources and computes all possible iterations of valid combinations of source orders that allow for creation of `FluentBundle` with the requested resources.
The algorithm is notoriously hard to read, write, and modify, which prompts this documentation to be extensive and provide an example with diagram presentations to aid the reader.
# Example
For the purpose of a graphical illustration of the example, we will evaluate a scenario with two sources and three resources.
The sources and resource identifiers will be named in concise way (*1* or *A*) to simplify diagrams, while a more tangible names derived from real-world examples in Firefox use-case will be listed in their initial definition.
### Sources
A source can be a packaged directory, and a language pack, or any other directory, zip file, or remote source which contains localization resource files.
In the example, we have two sources:
* Source 1 named ***0*** (e.g. `browser`)
* Source 2 named ***1*** (e.g. `toolkit`)
### Resources
A resource is a single Fluent Translation List file. `FluentBundle` is a combination of such resources used together to resolve translations. This algorithm operates on lists of resource identifiers which represent relative paths within the source.
In the example we have three resources:
* Resource 1 named ***A*** (e.g. `branding/brand.ftl`)
* Resource 2 named ***B*** (e.g. `errors/common.ftl`)
* Resource 3 named ***C*** (e.g. `menu/list.ftl`)
## Task
The task in this example is to generate all possible iterations of the three resources from the given two sources. Since I/O is expensive, and in most production scenarios all necessary translations are available in the first set, the iterator is used to lazily fallback on the alternative sets only in case of missing translations.
If all resources are available in both sources, the iterator should produce the following results:
1. `[A0, B0, C0]`
2. `[A0, B0, C1]`
3. `[A0, B1, C0]`
4. `[A0, B1, C1]`
5. `[A1, B0, C0]`
6. `[A1, B0, C1]`
7. `[A1, B1, C0]`
8. `[A1, B1, C1]`
Since the resources are defined by their column, we can store the resources as `[A, B, C]` separately and simplify the notation to just:
1. `[0, 0, 0]`
2. `[0, 0, 1]`
3. `[0, 1, 0]`
4. `[0, 1, 1]`
5. `[1, 0, 0]`
6. `[1, 0, 1]`
7. `[1, 1, 0]`
8. `[1, 1, 1]`
This notation will be used from now on.
## State
For the in-detail diagrams on the algorithm, we'll use another way to look at the iterator - by evaluating it state. At every point of the algorithm, there is a *partial solution* which may lead to a *complete solution*. It is encoded as:
```rust
struct Solution {
candidate: Vec<usize>,
idx: usize,
}
```
and which starting point can be visualized as:
```text
┌┲━┱┬───┬───┐
│┃0┃│ │ │
└╂─╂┴───┴───┘
┃ ┃
┗━┛
```
###### Diagrams generated with use of http://marklodato.github.io/js-boxdrawing/
where the horizontal block is a candidate, vertical block is a set of sources possible for each resource, and the arrow represents the index of a resource the iterator is currently evaluating.
With those tools introduced, we can now guide the reader through how the algorithm works.
But before we do that, it is important to justify writing a custom algorithm in place of existing generic solutions, and explain the two testing strategies which heavily impact the algorithm.
# Existing libraries
Intuitively, the starting point to exploration of the problem scope would be to look at it as some variation of the [Cartesian Product](https://en.wikipedia.org/wiki/Cartesian_product) iterator.
#### Python
In Python, `itertools` package provides a function [`itertools::product`](https://docs.python.org/3/library/itertools.html#itertools.product) which can be used to generate such iterator:
```python
import itertools
for set in itertools.product(range(2), repeat=3):
print(set)
```
#### Rust
In Rust, crate [`itertools`](https://crates.io/crates/itertools) provides, [`multi_cartesian_product`](https://docs.rs/itertools/0.9.0/itertools/trait.Itertools.html#method.multi_cartesian_product) which can be used like this:
```rust
use itertools::Itertools;
let multi_prod = (0..3).map(|i| 0..2)
.multi_cartesian_product();
for set in multi_prod {
println!("{:?}", set);
}
```
([playground](https://play.rust-lang.org/?version=stable&mode=debug&edition=2018&gist=6ef231f6b011b234babb0aa3e68b78ab))
#### Reasons for a custom algorithm
Unfortunately, the computational complexity of generating all possible sets is growing exponentially, both in the cost of CPU and memory use.
On a high-end laptop, computing the sets for all possible variations of the example above generates *8* sets and takes only *700 nanoseconds*, but computing the same for four sources and 16 resources (a scenario theoretically possible in Firefox with one language pack and Preferences UI for example) generates over *4 billion* sets and takes over *2 minutes*.
Since one part of static cost is the I/O, the application of a [Memoization](https://en.wikipedia.org/wiki/Memoization) technique allows us to minimize the cost of constructing, storing and retrieving sets.
Second important observation is that in most scenarios any resource exists in only some of the sources, and ability to bail out from a branch of candidates that cannot lead to a solution yields significantly fewer permutations in result.
## Optimizations
The algorithm used here is highly efficient. For the conservative scenario listed above, where 4 sources and 15 resources are all present in every source, the total time on the reference hardware is cut from *2 minutes* to *24 seconds*, while generating the same *4 billion* sets for a **5x** performance improvement.
### Streaming Iterator
Unline regular iterator, a streaming iterator allows a borrowed reference to be returned, which in this case, where the solver yields a read-only "view" of a solution, allows us to avoid having to clone it.
### Cache
Memory is much less of a problem for the algorithm than CPU usage, so the solver uses a matrix of source/resource `Option` to memoize visited cells. This allows for each source/resource combination to be tested only once after which all future tests can be skipped.
### Backtracking
This optimization allows to benefit from the recognition of the fact that most resources are only available in some sources.
Instead of generating all possible sets and then ignoring ones which are incomplete, it allows the algorithm to [backtrack](https://en.wikipedia.org/wiki/Backtracking) from partial candidates that cannot lead to a complete solution.
That technique is very powerful in the `L10nRegistry` use case and in many scenarios leads to 10-100x speed ups even in cases where all sets have to be generated.
# Serial vs Parallel Testing
At the core of the solver is a *tester* component which is responsible for eagerly evaluating candidates to allow for early bailouts from partial solutions which cannot lead to a complete solution.
This can be performed in one of two ways:
### Serial
The algorithm is synchronous and each extension of the candidate is evaluated serially, one by one, allowing the for *backtracking* as soon as a given extension of a partial solution is confirmed to not lead to a complete solution.
Bringing back the initial state of the solver:
```text
┌┲━┱┬───┬───┐
│┃0┃│ │ │
└╂─╂┴───┴───┘
┃ ┃
┗━┛
```
The tester will evaluate whether the first resource **A** is available in the first source **0**. The testing will be performed synchronously, and the result will inform the algorithm on whether the candidate may lead to a complete solution, or this branch should be bailed out from, and the next candidate must be tried.
#### Success case
If the test returns a success, the extensions of the candidate is generated:
```text
┌┲━┱┬┲━┱┬───┐
│┃0┃│┃0┃│ │
└╂─╂┴╂─╂┴───┘
┃ ┃ ┃ ┃
┗━┛ ┗━┛
```
When a candidate is complete, in other words, when the last cell of a candidate has been tested and did not lead to a backtrack, we know that the candidate is a solution to the problem, and we can yield it from the iterator.
#### Failure case
If the test returns a failure, the next step is to evaluate alternative source for the same resource. Let's assume that *Source 0* had *Resource A* but it does not have *Resource B*. In such case, the algorithm will increment the second cell's source index:
```text
┏━┓
┃0┃
┌┲━┱┬╂─╂┬───┐
│┃0┃│┃1┃│ │
└╂─╂┴┺━┹┴───┘
┃ ┃
┗━┛
```
and that will potentially lead to a partial solution `[0, 1, ]` to be stored for the next iteration.
If the test fails and no more sources can be generated, the algorithm will *backtrack* from the current cell looking for a cell with the **highest** index prior to the cell that was being evaluated which is not yet on the last source. If such cell is found, the results of all cells **to the right** of the newfound cell are **erased** and the next branch can be evaluated.
If no such cell can be found, that means that the iterator is complete.
### Parallel
If the testing can be performed in parallel, like an asynchronous I/O, the above *serial* solution is sub-optimal as it misses on the benefit of testing multiple cells at once.
In such a scenario, the algorithm will construct a candidate that *can* be valid (bailing only from candidates that have been already memoized as unavailable), and then test all of the untested cells in that candidate at once.
```text
┌┲━┱┬┲━┱┬┲━┱┐
│┃0┃│┃0┃│┃0┃│
└╂─╂┴╂─╂┴╂─╂┘
┃ ┃ ┃ ┃ ┃ ┃
┗━┛ ┗━┛ ┗━┛
```
When the parallel execution returns, the algorithm memoizes all new cell results and tests if the candidate is now a valid complete solution.
#### Success case
If the result a set of successes, the candidate is returned as a solution, and the algorithm proceeds to the same operation as if it was a failure.
#### Failure case
If the result contains failures, the iterator will now backtrack to find the closest lower or equal cell to the current index which can be advanced to the next source.
In the example state above, the current cell can be advanced to *source 1* and then just a set of `[None, None, 1]` is to be evaluated by the tester (since we know that *A0* and *B0* are valid).
If that is successful, the `[0, 0, 1]` set is a complete solution and is yielded.
Then, if the iterator is resumed, the next state to be tested is:
```text
┏━┓
┃0┃
┌┲━┱┬╂─╂┬┲━┱┐
│┃0┃│┃1┃│┃0┃│
└╂─╂┴┺━┹┴╂─╂┘
┃ ┃ ┃ ┃
┗━┛ ┗━┛
```
since cell *2* was at the highest index, cell *1* is the highest lower than *2* that was not at the highest source index position. That cell is advanced, and all cells after it are *pruned* (in this case, cell *2* is the only one). Then, the memoization kicks in, and since *A0* and *C0* are already cached as valid, the tester receives just `[None, 1, None]` to be tested and the algorithm continues.
# Summary
The algorithm explained above is tailored to the problem domain of `L10nRegistry` and is designed to be further extended in the future.
It is important to maintain this guide up to date as any changes to the algorithm are to be made.
Good luck.

Просмотреть файл

@ -0,0 +1,122 @@
mod parallel;
mod serial;
pub mod testing;
pub use parallel::{AsyncTester, ParallelProblemSolver};
pub use serial::{SerialProblemSolver, SyncTester};
pub struct ProblemSolver {
width: usize,
depth: usize,
cache: Vec<Vec<Option<bool>>>,
solution: Vec<usize>,
idx: usize,
dirty: bool,
}
impl ProblemSolver {
pub fn new(width: usize, depth: usize) -> Self {
Self {
width,
depth,
cache: vec![vec![None; depth]; width],
solution: vec![0; width],
idx: 0,
dirty: false,
}
}
}
impl ProblemSolver {
pub fn bail(&mut self) -> bool {
if self.try_advance_source() {
true
} else {
self.try_backtrack()
}
}
pub fn has_missing_cell(&self) -> Option<usize> {
for res_idx in 0..self.width {
if self.cache[res_idx].iter().all(|c| *c == Some(false)) {
return Some(res_idx);
}
}
None
}
fn is_cell_missing(&self, res_idx: usize, source_idx: usize) -> bool {
if let Some(false) = self.cache[res_idx][source_idx] {
return true;
}
false
}
fn is_current_cell_missing(&self) -> bool {
let res_idx = self.idx;
let source_idx = self.solution[res_idx];
let cell = &self.cache[res_idx][source_idx];
if let Some(false) = cell {
return true;
}
false
}
pub fn try_advance_resource(&mut self) -> bool {
if self.idx >= self.width - 1 {
false
} else {
self.idx += 1;
while self.is_current_cell_missing() {
if !self.try_advance_source() {
return false;
}
}
true
}
}
pub fn try_advance_source(&mut self) -> bool {
while self.solution[self.idx] < self.depth - 1 {
self.solution[self.idx] += 1;
if !self.is_current_cell_missing() {
return true;
}
}
false
}
pub fn try_backtrack(&mut self) -> bool {
while self.solution[self.idx] == self.depth - 1 {
if self.idx == 0 {
return false;
}
self.idx -= 1;
}
self.solution[self.idx] += 1;
self.prune()
}
pub fn prune(&mut self) -> bool {
for i in self.idx + 1..self.width {
let mut source_idx = 0;
while self.is_cell_missing(i, source_idx) {
if source_idx >= self.depth - 1 {
return false;
}
source_idx += 1;
}
self.solution[i] = source_idx;
}
true
}
pub fn is_complete(&self) -> bool {
self.idx == self.width - 1
}
}

Просмотреть файл

@ -0,0 +1,187 @@
use super::ProblemSolver;
use std::ops::{Deref, DerefMut};
use futures::ready;
use std::future::Future;
use std::pin::Pin;
pub trait AsyncTester {
type Result: Future<Output = Vec<bool>>;
fn test_async(&self, query: Vec<(usize, usize)>) -> Self::Result;
}
pub struct ParallelProblemSolver<T>
where
T: AsyncTester,
{
solver: ProblemSolver,
current_test: Option<(T::Result, Vec<usize>)>,
}
impl<T: AsyncTester> Deref for ParallelProblemSolver<T> {
type Target = ProblemSolver;
fn deref(&self) -> &Self::Target {
&self.solver
}
}
impl<T: AsyncTester> DerefMut for ParallelProblemSolver<T> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.solver
}
}
impl<T: AsyncTester> ParallelProblemSolver<T> {
pub fn new(width: usize, depth: usize) -> Self {
Self {
solver: ProblemSolver::new(width, depth),
current_test: None,
}
}
}
type TestQuery = (Vec<(usize, usize)>, Vec<usize>);
impl<T: AsyncTester> ParallelProblemSolver<T> {
pub fn try_generate_complete_candidate(&mut self) -> bool {
while !self.is_complete() {
while self.is_current_cell_missing() {
if !self.try_advance_source() {
return false;
}
}
if !self.try_advance_resource() {
return false;
}
}
true
}
fn try_generate_test_query(&mut self) -> Result<TestQuery, usize> {
let mut test_cells = vec![];
let query = self
.solution
.iter()
.enumerate()
.filter_map(|(res_idx, source_idx)| {
let cell = self.cache[res_idx][*source_idx];
match cell {
None => {
test_cells.push(res_idx);
Some(Ok((res_idx, *source_idx)))
}
Some(false) => Some(Err(res_idx)),
Some(true) => None,
}
})
.collect::<Result<_, _>>()?;
Ok((query, test_cells))
}
fn apply_test_result(
&mut self,
resources: Vec<bool>,
testing_cells: Vec<usize>,
) -> Result<(), usize> {
let mut first_missing = None;
for (result, res_idx) in resources.into_iter().zip(testing_cells) {
let source_idx = self.solution[res_idx];
self.cache[res_idx][source_idx] = Some(result);
if !result && first_missing.is_none() {
first_missing = Some(res_idx);
}
}
if let Some(idx) = first_missing {
Err(idx)
} else {
Ok(())
}
}
pub fn try_poll_next(
mut self: std::pin::Pin<&mut Self>,
cx: &mut std::task::Context<'_>,
tester: &T,
prefetch: bool,
) -> std::task::Poll<Result<Option<Vec<usize>>, usize>>
where
<T as AsyncTester>::Result: Unpin,
{
if self.width == 0 || self.depth == 0 {
return Ok(None).into();
}
'outer: loop {
if let Some((test, testing_cells)) = &mut self.current_test {
let pinned = Pin::new(test);
let set = ready!(pinned.poll(cx));
let testing_cells = testing_cells.clone();
if let Err(res_idx) = self.apply_test_result(set, testing_cells) {
self.idx = res_idx;
self.prune();
if !self.bail() {
if let Some(res_idx) = self.has_missing_cell() {
return Err(res_idx).into();
} else {
return Ok(None).into();
}
}
self.current_test = None;
continue 'outer;
} else {
self.current_test = None;
if !prefetch {
self.dirty = true;
}
return Ok(Some(self.solution.clone())).into();
}
} else {
if self.dirty {
if !self.bail() {
if let Some(res_idx) = self.has_missing_cell() {
return Err(res_idx).into();
} else {
return Ok(None).into();
}
}
self.dirty = false;
}
while self.try_generate_complete_candidate() {
match self.try_generate_test_query() {
Ok((query, testing_cells)) => {
self.current_test = Some((tester.test_async(query), testing_cells));
continue 'outer;
}
Err(res_idx) => {
self.idx = res_idx;
self.prune();
if !self.bail() {
if let Some(res_idx) = self.has_missing_cell() {
return Err(res_idx).into();
} else {
return Ok(None).into();
}
}
}
}
}
return Ok(None).into();
}
}
}
}
#[cfg(test)]
mod tests {
#[test]
fn problem_solver() {
// let keys = vec!["key1.ftl", "key2.ftl"];
// let sources = vec!["source1", "source2"];
// let args = ("foo",);
// let ps = ProblemSolver::new(keys.len(), sources.len(), &foo);
}
}

Просмотреть файл

@ -0,0 +1,88 @@
use super::ProblemSolver;
use std::ops::{Deref, DerefMut};
pub trait SyncTester {
fn test_sync(&self, res_idx: usize, source_idx: usize) -> bool;
}
pub struct SerialProblemSolver(ProblemSolver);
impl Deref for SerialProblemSolver {
type Target = ProblemSolver;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl DerefMut for SerialProblemSolver {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
impl SerialProblemSolver {
pub fn new(width: usize, depth: usize) -> Self {
Self(ProblemSolver::new(width, depth))
}
}
impl SerialProblemSolver {
fn test_current_cell<T>(&mut self, tester: &T) -> bool
where
T: SyncTester,
{
let res_idx = self.idx;
let source_idx = self.solution[res_idx];
let cell = &mut self.cache[res_idx][source_idx];
*cell.get_or_insert_with(|| tester.test_sync(res_idx, source_idx))
}
pub fn try_next<T>(&mut self, tester: &T, prefetch: bool) -> Result<Option<&[usize]>, usize>
where
T: SyncTester,
{
if self.width == 0 || self.depth == 0 {
return Ok(None);
}
if self.dirty {
if !self.bail() {
return Ok(None);
}
self.dirty = false;
}
loop {
if !self.test_current_cell(tester) {
if !self.bail() {
if let Some(res_idx) = self.has_missing_cell() {
return Err(res_idx);
} else {
return Ok(None);
}
}
continue;
}
if self.is_complete() {
if !prefetch {
self.dirty = true;
}
return Ok(Some(&self.solution));
}
if !self.try_advance_resource() {
return Ok(None);
}
}
}
}
#[cfg(test)]
mod tests {
#[test]
fn problem_solver() {
// let keys = vec!["key1.ftl", "key2.ftl"];
// let sources = vec!["source1", "source2"];
// let args = ("foo",);
// let ps = ProblemSolver::new(keys.len(), sources.len(), &foo);
}
}

Просмотреть файл

@ -0,0 +1,38 @@
mod scenarios;
pub use scenarios::get_scenarios;
/// Define a testing scenario.
pub struct Scenario {
/// Name of the scenario.
pub name: String,
/// Number of resources.
pub width: usize,
/// Number of sources.
pub depth: usize,
/// Vector of resources, containing a vector of sources, with true indicating
/// whether the resource is present in that source.
pub values: Vec<Vec<bool>>,
/// Vector of solutions, each containing a vector of resources, with the index
/// indicating from which source the resource is chosen.
/// TODO(issue#17): This field is currently unused!
pub solutions: Vec<Vec<usize>>,
}
impl Scenario {
pub fn new<S: ToString>(
name: S,
width: usize,
depth: usize,
values: Vec<Vec<bool>>,
solutions: Vec<Vec<usize>>,
) -> Self {
Self {
name: name.to_string(),
width,
depth,
values,
solutions,
}
}
}

Просмотреть файл

@ -0,0 +1,151 @@
use super::*;
pub fn get_scenarios() -> Vec<Scenario> {
vec![
Scenario::new("no-sources", 1, 0, vec![], vec![]),
Scenario::new("no-resources", 1, 0, vec![vec![true]], vec![]),
Scenario::new("no-keys", 0, 1, vec![], vec![]),
Scenario::new(
"one-res-two-sources",
1,
2,
vec![vec![true, true]],
vec![vec![0], vec![1]],
),
Scenario::new(
"two-res-two-sources",
2,
2,
vec![vec![false, true], vec![true, false]],
vec![vec![1, 0]],
),
Scenario::new(
"small",
3,
2,
vec![vec![true, true], vec![true, true], vec![true, true]],
vec![
vec![0, 0, 0],
vec![0, 0, 1],
vec![0, 1, 0],
vec![0, 1, 1],
vec![1, 0, 0],
vec![1, 0, 1],
vec![1, 1, 0],
vec![1, 1, 1],
],
),
Scenario::new(
"incomplete",
3,
2,
vec![vec![true, false], vec![false, true], vec![true, true]],
vec![vec![0, 1, 0], vec![0, 1, 1]],
),
Scenario::new(
"preferences",
19,
2,
vec![
vec![true, false],
vec![true, false],
vec![true, false],
vec![true, false],
vec![true, false],
vec![true, false],
vec![true, false],
vec![true, false],
vec![true, false],
vec![true, false],
vec![true, false],
vec![true, false],
vec![true, false],
vec![true, false],
vec![true, false],
vec![true, false],
vec![false, true],
vec![false, true],
vec![false, true],
],
vec![vec![
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1,
]],
),
Scenario::new(
"langpack",
3,
4,
vec![
vec![true, true, true, true],
vec![true, true, true, true],
vec![true, true, true, true],
],
vec![
vec![0, 0, 0],
vec![0, 0, 1],
vec![0, 0, 2],
vec![0, 0, 3],
vec![0, 1, 0],
vec![0, 1, 1],
vec![0, 1, 2],
vec![0, 1, 3],
vec![0, 2, 0],
vec![0, 2, 1],
vec![0, 2, 2],
vec![0, 2, 3],
vec![0, 3, 0],
vec![0, 3, 1],
vec![0, 3, 2],
vec![0, 3, 3],
vec![1, 0, 0],
vec![1, 0, 1],
vec![1, 0, 2],
vec![1, 0, 3],
vec![1, 1, 0],
vec![1, 1, 1],
vec![1, 1, 2],
vec![1, 1, 3],
vec![1, 2, 0],
vec![1, 2, 1],
vec![1, 2, 2],
vec![1, 2, 3],
vec![1, 3, 0],
vec![1, 3, 1],
vec![1, 3, 2],
vec![1, 3, 3],
vec![2, 0, 0],
vec![2, 0, 1],
vec![2, 0, 2],
vec![2, 0, 3],
vec![2, 1, 0],
vec![2, 1, 1],
vec![2, 1, 2],
vec![2, 1, 3],
vec![2, 2, 0],
vec![2, 2, 1],
vec![2, 2, 2],
vec![2, 2, 3],
vec![2, 3, 0],
vec![2, 3, 1],
vec![2, 3, 2],
vec![2, 3, 3],
vec![3, 0, 0],
vec![3, 0, 1],
vec![3, 0, 2],
vec![3, 0, 3],
vec![3, 1, 0],
vec![3, 1, 1],
vec![3, 1, 2],
vec![3, 1, 3],
vec![3, 2, 0],
vec![3, 2, 1],
vec![3, 2, 2],
vec![3, 2, 3],
vec![3, 3, 0],
vec![3, 3, 1],
vec![3, 3, 2],
vec![3, 3, 3],
],
),
]
}

Просмотреть файл

@ -0,0 +1,30 @@
use async_trait::async_trait;
use fluent_fallback::types::ResourceId;
use std::io;
/// The users of [`FileSource`] implement this trait to provide loading of
/// resources, returning the contents of a resource as a
/// `String`. [`FileSource`] handles the conversion from string representation
/// into `FluentResource`.
///
/// [`FileSource`]: source/struct.FileSource.html
#[async_trait(?Send)]
pub trait FileFetcher {
/// Return the `String` representation for `path`. This version is
/// blocking.
///
/// See [`fetch`](#tymethod.fetch).
fn fetch_sync(&self, resource_id: &ResourceId) -> io::Result<String>;
/// Return the `String` representation for `path`.
///
/// On success, returns `Poll::Ready(Ok(..))`.
///
/// If no resource is available to be fetched, the method returns
/// `Poll::Pending` and arranges for the current task (via
/// `cx.waker().wake_by_ref()`) to receive a notification when the resource
/// is available.
///
/// See [`fetch_sync`](#tymethod.fetch_sync)
async fn fetch(&self, path: &ResourceId) -> io::Result<String>;
}

Просмотреть файл

@ -0,0 +1,558 @@
mod fetcher;
pub use fetcher::FileFetcher;
pub use fluent_fallback::types::{ResourceId, ToResourceId};
use crate::env::ErrorReporter;
use crate::errors::L10nRegistryError;
use crate::fluent::FluentResource;
use std::{
borrow::Borrow,
cell::RefCell,
fmt,
hash::{Hash, Hasher},
pin::Pin,
rc::Rc,
task::Poll,
};
use futures::{future::Shared, Future, FutureExt};
use rustc_hash::FxHashMap;
use unic_langid::LanguageIdentifier;
pub type RcResource = Rc<FluentResource>;
/// An option type whose None variant is either optional or required.
///
/// This behaves similarly to the standard-library [`Option`] type
/// except that there are two [`None`]-like variants:
/// [`ResourceOption::MissingOptional`] and [`ResourceOption::MissingRequired`].
#[derive(Clone, Debug)]
pub enum ResourceOption {
/// An available resource.
Some(RcResource),
/// A missing optional resource.
MissingOptional,
/// A missing required resource.
MissingRequired,
}
impl ResourceOption {
/// Creates a resource option that is either [`ResourceOption::MissingRequired`]
/// or [`ResourceOption::MissingOptional`] based on whether the given [`ResourceId`]
/// is required or optional.
pub fn missing_resource(resource_id: &ResourceId) -> Self {
if resource_id.is_required() {
Self::MissingRequired
} else {
Self::MissingOptional
}
}
/// Returns [`true`] if this option contains a recource, otherwise [`false`].
pub fn is_some(&self) -> bool {
matches!(self, Self::Some(_))
}
/// Resource [`true`] if this option is missing a resource of any type, otherwise [`false`].
pub fn is_none(&self) -> bool {
matches!(self, Self::MissingOptional | Self::MissingRequired)
}
/// Returns [`true`] if this option is missing a required resource, otherwise [`false`].
pub fn is_required_and_missing(&self) -> bool {
matches!(self, Self::MissingRequired)
}
}
impl From<ResourceOption> for Option<RcResource> {
fn from(other: ResourceOption) -> Self {
match other {
ResourceOption::Some(id) => Some(id),
_ => None,
}
}
}
pub type ResourceFuture = Shared<Pin<Box<dyn Future<Output = ResourceOption>>>>;
#[derive(Debug, Clone)]
pub enum ResourceStatus {
/// The resource is missing. Don't bother trying to fetch.
MissingRequired,
MissingOptional,
/// The resource is loading and future will deliver the result.
Loading(ResourceFuture),
/// The resource is loaded and parsed.
Loaded(RcResource),
}
impl From<ResourceOption> for ResourceStatus {
fn from(input: ResourceOption) -> Self {
match input {
ResourceOption::Some(res) => Self::Loaded(res),
ResourceOption::MissingOptional => Self::MissingOptional,
ResourceOption::MissingRequired => Self::MissingRequired,
}
}
}
impl Future for ResourceStatus {
type Output = ResourceOption;
fn poll(mut self: Pin<&mut Self>, cx: &mut std::task::Context<'_>) -> Poll<Self::Output> {
use ResourceStatus::*;
let this = &mut *self;
match this {
MissingRequired => ResourceOption::MissingRequired.into(),
MissingOptional => ResourceOption::MissingOptional.into(),
Loaded(res) => ResourceOption::Some(res.clone()).into(),
Loading(res) => Pin::new(res).poll(cx),
}
}
}
/// `FileSource` provides a generic fetching and caching of fluent resources.
/// The user of `FileSource` provides a [`FileFetcher`](trait.FileFetcher.html)
/// implementation and `FileSource` takes care of the rest.
#[derive(Clone)]
pub struct FileSource {
/// Name of the FileSource, e.g. "browser"
pub name: String,
/// Pre-formatted path for the FileSource, e.g. "/browser/data/locale/{locale}/"
pub pre_path: String,
/// Metasource name for the FileSource, e.g. "app", "langpack"
/// Only sources from the same metasource are passed into the solver.
pub metasource: String,
/// The locales for which data is present in the FileSource, e.g. ["en-US", "pl"]
locales: Vec<LanguageIdentifier>,
shared: Rc<Inner>,
index: Option<Vec<String>>,
pub options: FileSourceOptions,
}
struct Inner {
fetcher: Box<dyn FileFetcher>,
error_reporter: Option<RefCell<Box<dyn ErrorReporter>>>,
entries: RefCell<FxHashMap<String, ResourceStatus>>,
}
impl fmt::Display for FileSource {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", self.name)
}
}
impl PartialEq<FileSource> for FileSource {
fn eq(&self, other: &Self) -> bool {
self.name == other.name && self.metasource == other.metasource
}
}
impl Eq for FileSource {}
impl Hash for FileSource {
fn hash<H: Hasher>(&self, state: &mut H) {
self.name.hash(state)
}
}
#[derive(PartialEq, Clone, Debug)]
pub struct FileSourceOptions {
pub allow_override: bool,
}
impl Default for FileSourceOptions {
fn default() -> Self {
Self {
allow_override: false,
}
}
}
impl FileSource {
/// Create a `FileSource` using the provided [`FileFetcher`](../trait.FileFetcher.html).
pub fn new(
name: String,
metasource: Option<String>,
locales: Vec<LanguageIdentifier>,
pre_path: String,
options: FileSourceOptions,
fetcher: impl FileFetcher + 'static,
) -> Self {
FileSource {
name,
metasource: metasource.unwrap_or_default(),
pre_path,
locales,
index: None,
shared: Rc::new(Inner {
entries: RefCell::new(FxHashMap::default()),
fetcher: Box::new(fetcher),
error_reporter: None,
}),
options,
}
}
pub fn new_with_index(
name: String,
metasource: Option<String>,
locales: Vec<LanguageIdentifier>,
pre_path: String,
options: FileSourceOptions,
fetcher: impl FileFetcher + 'static,
index: Vec<String>,
) -> Self {
FileSource {
name,
metasource: metasource.unwrap_or_default(),
pre_path,
locales,
index: Some(index),
shared: Rc::new(Inner {
entries: RefCell::new(FxHashMap::default()),
fetcher: Box::new(fetcher),
error_reporter: None,
}),
options,
}
}
pub fn set_reporter(&mut self, reporter: impl ErrorReporter + 'static) {
let mut shared = Rc::get_mut(&mut self.shared).unwrap();
shared.error_reporter = Some(RefCell::new(Box::new(reporter)));
}
}
fn calculate_pos_in_source(source: &str, idx: usize) -> (usize, usize) {
let mut ptr = 0;
let mut result = (1, 1);
for line in source.lines() {
let bytes = line.as_bytes().len();
if ptr + bytes < idx {
ptr += bytes + 1;
result.0 += 1;
} else {
result.1 = idx - ptr + 1;
break;
}
}
result
}
impl FileSource {
fn get_path(&self, locale: &LanguageIdentifier, resource_id: &ResourceId) -> String {
format!(
"{}{}",
self.pre_path.replace("{locale}", &locale.to_string()),
resource_id.value,
)
}
fn fetch_sync(&self, resource_id: &ResourceId) -> ResourceOption {
self.shared
.fetcher
.fetch_sync(resource_id)
.ok()
.map(|source| match FluentResource::try_new(source) {
Ok(res) => ResourceOption::Some(Rc::new(res)),
Err((res, errors)) => {
if let Some(reporter) = &self.shared.error_reporter {
reporter.borrow().report_errors(
errors
.into_iter()
.map(|e| L10nRegistryError::FluentError {
resource_id: resource_id.clone(),
loc: Some(calculate_pos_in_source(res.source(), e.pos.start)),
error: e.into(),
})
.collect(),
);
}
ResourceOption::Some(Rc::new(res))
}
})
.unwrap_or_else(|| ResourceOption::missing_resource(resource_id))
}
/// Attempt to synchronously fetch resource for the combination of `locale`
/// and `path`. Returns `Some(ResourceResult)` if the resource is available,
/// else `None`.
pub fn fetch_file_sync(
&self,
locale: &LanguageIdentifier,
resource_id: &ResourceId,
overload: bool,
) -> ResourceOption {
use ResourceStatus::*;
if self.has_file(locale, resource_id) == Some(false) {
return ResourceOption::missing_resource(resource_id);
}
let full_path_id = self
.get_path(locale, resource_id)
.to_resource_id(resource_id.resource_type);
let res = self.shared.lookup_resource(full_path_id.clone(), || {
self.fetch_sync(&full_path_id).into()
});
match res {
MissingRequired => ResourceOption::MissingRequired,
MissingOptional => ResourceOption::MissingOptional,
Loaded(res) => ResourceOption::Some(res),
Loading(..) if overload => {
// A sync load has been requested for the same resource that has
// a pending async load in progress. How do we handle this?
//
// Ideally, we would sync load and resolve all the pending
// futures with the result. With the current Futures and
// combinators, it's unclear how to proceed. One potential
// solution is to store a oneshot::Sender and
// Shared<oneshot::Receiver>. When the async loading future
// resolves it would check that the state is still `Loading`,
// and if so, send the result. The sync load would do the same
// send on the oneshot::Sender.
//
// For now, we warn and return the resource, paying the cost of
// duplication of the resource.
self.fetch_sync(&full_path_id)
}
Loading(..) => {
panic!("[l10nregistry] Attempting to synchronously load file {} while it's being loaded asynchronously.", &full_path_id.value);
}
}
}
/// Attempt to fetch resource for the combination of `locale` and `path`.
/// Returns [`ResourceStatus`](enum.ResourceStatus.html) which is
/// a `Future` that can be polled.
pub fn fetch_file(
&self,
locale: &LanguageIdentifier,
resource_id: &ResourceId,
) -> ResourceStatus {
use ResourceStatus::*;
if self.has_file(locale, resource_id) == Some(false) {
return ResourceOption::missing_resource(resource_id).into();
}
let full_path_id = self
.get_path(locale, resource_id)
.to_resource_id(resource_id.resource_type);
self.shared.lookup_resource(full_path_id.clone(), || {
let shared = self.shared.clone();
Loading(read_resource(full_path_id, shared).boxed_local().shared())
})
}
/// Determine if the `FileSource` has a loaded resource for the combination
/// of `locale` and `path`. Returns `Some(true)` if the file is loaded, else
/// `Some(false)`. `None` is returned if there is an outstanding async fetch
/// pending and the status is yet to be determined.
pub fn has_file<L: Borrow<LanguageIdentifier>>(
&self,
locale: L,
path: &ResourceId,
) -> Option<bool> {
let locale = locale.borrow();
if !self.locales.contains(locale) {
Some(false)
} else {
let full_path = self.get_path(locale, path);
if let Some(index) = &self.index {
return Some(index.iter().any(|p| p == &full_path));
}
self.shared.has_file(&full_path)
}
}
pub fn locales(&self) -> &[LanguageIdentifier] {
&self.locales
}
pub fn get_index(&self) -> Option<&Vec<String>> {
self.index.as_ref()
}
}
impl std::fmt::Debug for FileSource {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> fmt::Result {
if let Some(index) = &self.index {
f.debug_struct("FileSource")
.field("name", &self.name)
.field("metasource", &self.metasource)
.field("locales", &self.locales)
.field("pre_path", &self.pre_path)
.field("index", index)
.finish()
} else {
f.debug_struct("FileSource")
.field("name", &self.name)
.field("metasource", &self.metasource)
.field("locales", &self.locales)
.field("pre_path", &self.pre_path)
.finish()
}
}
}
impl Inner {
fn lookup_resource<F>(&self, resource_id: ResourceId, f: F) -> ResourceStatus
where
F: FnOnce() -> ResourceStatus,
{
let mut lock = self.entries.borrow_mut();
lock.entry(resource_id.value).or_insert_with(|| f()).clone()
}
fn update_resource(&self, resource_id: ResourceId, resource: ResourceOption) -> ResourceOption {
let mut lock = self.entries.borrow_mut();
let entry = lock.get_mut(&resource_id.value);
match entry {
Some(entry) => *entry = resource.clone().into(),
_ => panic!("Expected "),
}
resource
}
pub fn has_file(&self, full_path: &str) -> Option<bool> {
match self.entries.borrow().get(full_path) {
Some(ResourceStatus::MissingRequired) => Some(false),
Some(ResourceStatus::MissingOptional) => Some(false),
Some(ResourceStatus::Loaded(_)) => Some(true),
Some(ResourceStatus::Loading(_)) | None => None,
}
}
}
async fn read_resource(resource_id: ResourceId, shared: Rc<Inner>) -> ResourceOption {
let resource = shared
.fetcher
.fetch(&resource_id)
.await
.ok()
.map(|source| match FluentResource::try_new(source) {
Ok(res) => ResourceOption::Some(Rc::new(res)),
Err((res, errors)) => {
if let Some(reporter) = &shared.error_reporter.borrow() {
reporter.borrow().report_errors(
errors
.into_iter()
.map(|e| L10nRegistryError::FluentError {
resource_id: resource_id.clone(),
loc: Some(calculate_pos_in_source(res.source(), e.pos.start)),
error: e.into(),
})
.collect(),
);
}
ResourceOption::Some(Rc::new(res))
}
})
.unwrap_or_else(|| ResourceOption::missing_resource(&resource_id));
// insert the resource into the cache
shared.update_resource(resource_id, resource)
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn calculate_source_pos() {
let source = r#"
key = Value
key2 = Value 2
"#
.trim();
let result = calculate_pos_in_source(source, 0);
assert_eq!(result, (1, 1));
let result = calculate_pos_in_source(source, 1);
assert_eq!(result, (1, 2));
let result = calculate_pos_in_source(source, 12);
assert_eq!(result, (2, 1));
let result = calculate_pos_in_source(source, 13);
assert_eq!(result, (3, 1));
}
}
#[cfg(test)]
#[cfg(all(feature = "tokio", feature = "test-fluent"))]
mod tests_tokio {
use super::*;
use crate::testing::TestFileFetcher;
static FTL_RESOURCE_PRESENT: &str = "toolkit/global/textActions.ftl";
static FTL_RESOURCE_MISSING: &str = "missing.ftl";
#[tokio::test]
async fn file_source_fetch() {
let fetcher = TestFileFetcher::new();
let en_us: LanguageIdentifier = "en-US".parse().unwrap();
let fs1 =
fetcher.get_test_file_source("toolkit", None, vec![en_us.clone()], "toolkit/{locale}/");
let file = fs1.fetch_file(&en_us, &FTL_RESOURCE_PRESENT.into()).await;
assert!(file.is_some());
}
#[tokio::test]
async fn file_source_fetch_missing() {
let fetcher = TestFileFetcher::new();
let en_us: LanguageIdentifier = "en-US".parse().unwrap();
let fs1 =
fetcher.get_test_file_source("toolkit", None, vec![en_us.clone()], "toolkit/{locale}/");
let file = fs1.fetch_file(&en_us, &FTL_RESOURCE_MISSING.into()).await;
assert!(file.is_none());
}
#[tokio::test]
async fn file_source_already_loaded() {
let fetcher = TestFileFetcher::new();
let en_us: LanguageIdentifier = "en-US".parse().unwrap();
let fs1 =
fetcher.get_test_file_source("toolkit", None, vec![en_us.clone()], "toolkit/{locale}/");
let file = fs1.fetch_file(&en_us, &FTL_RESOURCE_PRESENT.into()).await;
assert!(file.is_some());
let file = fs1.fetch_file(&en_us, &FTL_RESOURCE_PRESENT.into()).await;
assert!(file.is_some());
}
#[tokio::test]
async fn file_source_concurrent() {
let fetcher = TestFileFetcher::new();
let en_us: LanguageIdentifier = "en-US".parse().unwrap();
let fs1 =
fetcher.get_test_file_source("toolkit", None, vec![en_us.clone()], "toolkit/{locale}/");
let file1 = fs1.fetch_file(&en_us, &FTL_RESOURCE_PRESENT.into());
let file2 = fs1.fetch_file(&en_us, &FTL_RESOURCE_PRESENT.into());
assert!(file1.await.is_some());
assert!(file2.await.is_some());
}
#[test]
fn file_source_sync_after_async_fail() {
let fetcher = TestFileFetcher::new();
let en_us: LanguageIdentifier = "en-US".parse().unwrap();
let fs1 =
fetcher.get_test_file_source("toolkit", None, vec![en_us.clone()], "toolkit/{locale}/");
let _ = fs1.fetch_file(&en_us, &FTL_RESOURCE_PRESENT.into());
let file2 = fs1.fetch_file_sync(&en_us, &FTL_RESOURCE_PRESENT.into(), true);
assert!(file2.is_some());
}
}

Просмотреть файл

@ -0,0 +1,322 @@
use crate::env::ErrorReporter;
use crate::errors::L10nRegistryError;
use crate::fluent::FluentBundle;
use crate::registry::BundleAdapter;
use crate::registry::L10nRegistry;
use crate::source::FileFetcher;
use async_trait::async_trait;
use fluent_fallback::{env::LocalesProvider, types::ResourceId};
use fluent_testing::MockFileSystem;
use std::cell::RefCell;
use std::rc::Rc;
use unic_langid::LanguageIdentifier;
pub struct RegistrySetup {
pub name: String,
pub file_sources: Vec<FileSource>,
pub locales: Vec<LanguageIdentifier>,
}
pub struct FileSource {
pub name: String,
pub metasource: String,
pub locales: Vec<LanguageIdentifier>,
pub path_scheme: String,
}
#[derive(Clone)]
pub struct MockBundleAdapter;
impl BundleAdapter for MockBundleAdapter {
fn adapt_bundle(&self, _bundle: &mut FluentBundle) {}
}
impl FileSource {
pub fn new<S>(
name: S,
metasource: Option<S>,
locales: Vec<LanguageIdentifier>,
path_scheme: S,
) -> Self
where
S: ToString,
{
let metasource = match metasource {
Some(s) => s.to_string(),
None => String::default(),
};
Self {
name: name.to_string(),
metasource,
locales,
path_scheme: path_scheme.to_string(),
}
}
}
impl RegistrySetup {
pub fn new(
name: &str,
file_sources: Vec<FileSource>,
locales: Vec<LanguageIdentifier>,
) -> Self {
Self {
name: name.to_string(),
file_sources,
locales,
}
}
}
impl From<fluent_testing::scenarios::structs::Scenario> for RegistrySetup {
fn from(s: fluent_testing::scenarios::structs::Scenario) -> Self {
Self {
name: s.name,
file_sources: s
.file_sources
.into_iter()
.map(|source| {
FileSource::new(
source.name,
None,
source
.locales
.into_iter()
.map(|l| l.parse().unwrap())
.collect(),
source.path_scheme,
)
})
.collect(),
locales: s
.locales
.into_iter()
.map(|loc| loc.parse().unwrap())
.collect(),
}
}
}
impl From<&fluent_testing::scenarios::structs::Scenario> for RegistrySetup {
fn from(s: &fluent_testing::scenarios::structs::Scenario) -> Self {
Self {
name: s.name.clone(),
file_sources: s
.file_sources
.iter()
.map(|source| {
FileSource::new(
source.name.clone(),
None,
source.locales.iter().map(|l| l.parse().unwrap()).collect(),
source.path_scheme.clone(),
)
})
.collect(),
locales: s.locales.iter().map(|loc| loc.parse().unwrap()).collect(),
}
}
}
#[derive(Default)]
struct InnerFileFetcher {
fs: MockFileSystem,
}
#[derive(Clone)]
pub struct TestFileFetcher {
inner: Rc<InnerFileFetcher>,
}
impl TestFileFetcher {
pub fn new() -> Self {
Self {
inner: Rc::new(InnerFileFetcher::default()),
}
}
pub fn get_test_file_source(
&self,
name: &str,
metasource: Option<String>,
locales: Vec<LanguageIdentifier>,
path: &str,
) -> crate::source::FileSource {
crate::source::FileSource::new(
name.to_string(),
metasource,
locales,
path.to_string(),
Default::default(),
self.clone(),
)
}
pub fn get_test_file_source_with_index(
&self,
name: &str,
metasource: Option<String>,
locales: Vec<LanguageIdentifier>,
path: &str,
index: Vec<&str>,
) -> crate::source::FileSource {
crate::source::FileSource::new_with_index(
name.to_string(),
metasource,
locales,
path.to_string(),
Default::default(),
self.clone(),
index.into_iter().map(|s| s.to_string()).collect(),
)
}
pub fn get_registry<S>(&self, setup: S) -> L10nRegistry<TestEnvironment, MockBundleAdapter>
where
S: Into<RegistrySetup>,
{
self.get_registry_and_environment(setup).1
}
pub fn get_registry_and_environment<S>(
&self,
setup: S,
) -> (
TestEnvironment,
L10nRegistry<TestEnvironment, MockBundleAdapter>,
)
where
S: Into<RegistrySetup>,
{
let setup: RegistrySetup = setup.into();
let provider = TestEnvironment::new(setup.locales);
let reg = L10nRegistry::with_provider(provider.clone());
let sources = setup
.file_sources
.into_iter()
.map(|source| {
let mut s = self.get_test_file_source(
&source.name,
Some(source.metasource),
source.locales,
&source.path_scheme,
);
s.set_reporter(provider.clone());
s
})
.collect();
reg.register_sources(sources).unwrap();
(provider, reg)
}
pub fn get_registry_and_environment_with_adapter<S, B>(
&self,
setup: S,
bundle_adapter: B,
) -> (TestEnvironment, L10nRegistry<TestEnvironment, B>)
where
S: Into<RegistrySetup>,
B: BundleAdapter,
{
let setup: RegistrySetup = setup.into();
let provider = TestEnvironment::new(setup.locales);
let mut reg = L10nRegistry::with_provider(provider.clone());
let sources = setup
.file_sources
.into_iter()
.map(|source| {
let mut s = self.get_test_file_source(
&source.name,
None,
source.locales,
&source.path_scheme,
);
s.set_reporter(provider.clone());
s
})
.collect();
reg.register_sources(sources).unwrap();
reg.set_adapt_bundle(bundle_adapter)
.expect("Failed to set bundle adapter.");
(provider, reg)
}
}
#[async_trait(?Send)]
impl FileFetcher for TestFileFetcher {
fn fetch_sync(&self, resource_id: &ResourceId) -> std::io::Result<String> {
self.inner.fs.get_test_file_sync(&resource_id.value)
}
async fn fetch(&self, resource_id: &ResourceId) -> std::io::Result<String> {
self.inner.fs.get_test_file_async(&resource_id.value).await
}
}
pub enum ErrorStrategy {
Panic,
Report,
Nothing,
}
pub struct InnerTestEnvironment {
locales: Vec<LanguageIdentifier>,
errors: Vec<L10nRegistryError>,
error_strategy: ErrorStrategy,
}
#[derive(Clone)]
pub struct TestEnvironment {
inner: Rc<RefCell<InnerTestEnvironment>>,
}
impl TestEnvironment {
pub fn new(locales: Vec<LanguageIdentifier>) -> Self {
Self {
inner: Rc::new(RefCell::new(InnerTestEnvironment {
locales,
errors: vec![],
error_strategy: ErrorStrategy::Report,
})),
}
}
pub fn set_locales(&self, locales: Vec<LanguageIdentifier>) {
self.inner.borrow_mut().locales = locales;
}
pub fn errors(&self) -> Vec<L10nRegistryError> {
self.inner.borrow().errors.clone()
}
pub fn clear_errors(&self) {
self.inner.borrow_mut().errors.clear()
}
}
impl LocalesProvider for TestEnvironment {
type Iter = std::vec::IntoIter<LanguageIdentifier>;
fn locales(&self) -> Self::Iter {
self.inner.borrow().locales.clone().into_iter()
}
}
impl ErrorReporter for TestEnvironment {
fn report_errors(&self, errors: Vec<L10nRegistryError>) {
match self.inner.borrow().error_strategy {
ErrorStrategy::Panic => {
panic!("Errors: {:#?}", errors);
}
ErrorStrategy::Report => {
#[cfg(test)] // Don't let printing affect benchmarks
eprintln!("Errors: {:#?}", errors);
}
ErrorStrategy::Nothing => {}
}
self.inner.borrow_mut().errors.extend(errors);
}
}

Просмотреть файл

@ -0,0 +1,201 @@
use std::borrow::Cow;
use fluent_fallback::{
env::LocalesProvider,
types::{L10nKey, ResourceId},
Localization,
};
use l10nregistry::testing::{
FileSource, MockBundleAdapter, RegistrySetup, TestEnvironment, TestFileFetcher,
};
use serial_test::serial;
use unic_langid::{langid, LanguageIdentifier};
type L10nRegistry = l10nregistry::registry::L10nRegistry<TestEnvironment, MockBundleAdapter>;
static LOCALES: &[LanguageIdentifier] = &[langid!("pl"), langid!("en-US")];
static mut FILE_FETCHER: Option<TestFileFetcher> = None;
static mut L10N_REGISTRY: Option<L10nRegistry> = None;
const FTL_RESOURCE: &str = "toolkit/updates/history.ftl";
const L10N_ID_PL_EN: (&str, Option<&str>) = ("history-title", Some("Historia aktualizacji"));
const L10N_ID_MISSING: (&str, Option<&str>) = ("missing-id", None);
const L10N_ID_ONLY_EN: (&str, Option<&str>) = (
"history-intro",
Some("The following updates have been installed"),
);
fn get_file_fetcher() -> &'static TestFileFetcher {
let fetcher: &mut Option<TestFileFetcher> = unsafe { &mut FILE_FETCHER };
fetcher.get_or_insert_with(|| TestFileFetcher::new())
}
fn get_l10n_registry() -> &'static L10nRegistry {
let reg: &mut Option<L10nRegistry> = unsafe { &mut L10N_REGISTRY };
reg.get_or_insert_with(|| {
let fetcher = get_file_fetcher();
let setup = RegistrySetup::new(
"test",
vec![
FileSource::new(
"toolkit",
None,
get_app_locales().to_vec(),
"toolkit/{locale}/",
),
FileSource::new(
"browser",
None,
get_app_locales().to_vec(),
"browser/{locale}/",
),
],
get_app_locales().to_vec(),
);
fetcher.get_registry_and_environment(setup).1
})
}
fn get_app_locales() -> &'static [LanguageIdentifier] {
LOCALES
}
struct LocalesService;
impl LocalesProvider for LocalesService {
type Iter = std::vec::IntoIter<LanguageIdentifier>;
fn locales(&self) -> Self::Iter {
get_app_locales().to_vec().into_iter()
}
}
fn sync_localization(
reg: &'static L10nRegistry,
res_ids: Vec<ResourceId>,
) -> Localization<L10nRegistry, LocalesService> {
Localization::with_env(res_ids, true, LocalesService, reg.clone())
}
fn async_localization(
reg: &'static L10nRegistry,
res_ids: Vec<ResourceId>,
) -> Localization<L10nRegistry, LocalesService> {
Localization::with_env(res_ids, false, LocalesService, reg.clone())
}
fn setup_sync_test() -> Localization<L10nRegistry, LocalesService> {
sync_localization(get_l10n_registry(), vec![FTL_RESOURCE.into()])
}
fn setup_async_test() -> Localization<L10nRegistry, LocalesService> {
async_localization(get_l10n_registry(), vec![FTL_RESOURCE.into()])
}
#[test]
#[serial]
fn localization_format_value_sync() {
let loc = setup_sync_test();
let bundles = loc.bundles();
let mut errors = vec![];
for query in &[L10N_ID_PL_EN, L10N_ID_MISSING, L10N_ID_ONLY_EN] {
let value = bundles
.format_value_sync(query.0, None, &mut errors)
.unwrap();
assert_eq!(value, query.1.map(|s| Cow::Borrowed(s)));
}
assert_eq!(errors.len(), 4);
}
#[test]
#[serial]
fn localization_format_values_sync() {
let loc = setup_sync_test();
let bundles = loc.bundles();
let mut errors = vec![];
let ids = &[L10N_ID_PL_EN, L10N_ID_MISSING, L10N_ID_ONLY_EN];
let keys = ids
.iter()
.map(|query| L10nKey {
id: query.0.into(),
args: None,
})
.collect::<Vec<_>>();
let values = bundles.format_values_sync(&keys, &mut errors).unwrap();
assert_eq!(values.len(), ids.len());
for (value, query) in values.iter().zip(ids) {
if let Some(expected) = query.1 {
assert_eq!(*value, Some(Cow::Borrowed(expected)));
}
}
assert_eq!(errors.len(), 4);
}
#[tokio::test]
#[serial]
async fn localization_format_value_async() {
let loc = setup_async_test();
let bundles = loc.bundles();
let mut errors = vec![];
for query in &[L10N_ID_PL_EN, L10N_ID_MISSING, L10N_ID_ONLY_EN] {
let value = bundles.format_value(query.0, None, &mut errors).await;
if let Some(expected) = query.1 {
assert_eq!(value, Some(Cow::Borrowed(expected)));
}
}
}
#[tokio::test]
#[serial]
async fn localization_format_values_async() {
let loc = setup_async_test();
let bundles = loc.bundles();
let mut errors = vec![];
let ids = &[L10N_ID_PL_EN, L10N_ID_MISSING, L10N_ID_ONLY_EN];
let keys = ids
.iter()
.map(|query| L10nKey {
id: query.0.into(),
args: None,
})
.collect::<Vec<_>>();
let values = bundles.format_values(&keys, &mut errors).await;
assert_eq!(values.len(), ids.len());
for (value, query) in values.iter().zip(ids) {
if let Some(expected) = query.1 {
assert_eq!(*value, Some(Cow::Borrowed(expected)));
}
}
}
#[tokio::test]
#[serial]
async fn localization_upgrade() {
let mut loc = setup_sync_test();
let bundles = loc.bundles();
let mut errors = vec![];
let value = bundles
.format_value_sync(L10N_ID_PL_EN.0, None, &mut errors)
.unwrap();
assert_eq!(value, L10N_ID_PL_EN.1.map(|s| Cow::Borrowed(s)));
loc.set_async();
let bundles = loc.bundles();
let value = bundles
.format_value(L10N_ID_PL_EN.0, None, &mut errors)
.await;
assert_eq!(value, L10N_ID_PL_EN.1.map(|s| Cow::Borrowed(s)));
}

Просмотреть файл

@ -0,0 +1,296 @@
use l10nregistry::testing::{FileSource, RegistrySetup, TestFileFetcher};
use unic_langid::LanguageIdentifier;
static FTL_RESOURCE_TOOLKIT: &str = "toolkit/global/textActions.ftl";
static FTL_RESOURCE_BROWSER: &str = "branding/brand.ftl";
#[test]
fn test_generate_sources_for_file() {
let en_us: LanguageIdentifier = "en-US".parse().unwrap();
let setup = RegistrySetup::new(
"test",
vec![
FileSource::new("toolkit", None, vec![en_us.clone()], "toolkit/{locale}/"),
FileSource::new("browser", None, vec![en_us.clone()], "browser/{locale}/"),
],
vec![en_us.clone()],
);
let fetcher = TestFileFetcher::new();
let (_, reg) = fetcher.get_registry_and_environment(setup);
{
let lock = reg.lock();
let toolkit = lock.get_source(0, "toolkit").unwrap();
let browser = lock.get_source(0, "browser").unwrap();
let toolkit_resource_id = FTL_RESOURCE_TOOLKIT.into();
let mut i = lock.generate_sources_for_file(0, &en_us, &toolkit_resource_id);
assert_eq!(i.next(), Some(toolkit));
assert_eq!(i.next(), Some(browser));
assert_eq!(i.next(), None);
assert!(browser
.fetch_file_sync(&en_us, &FTL_RESOURCE_TOOLKIT.into(), false)
.is_none());
let mut i = lock.generate_sources_for_file(0, &en_us, &toolkit_resource_id);
assert_eq!(i.next(), Some(toolkit));
assert_eq!(i.next(), None);
assert!(toolkit
.fetch_file_sync(&en_us, &FTL_RESOURCE_TOOLKIT.into(), false)
.is_some());
let mut i = lock.generate_sources_for_file(0, &en_us, &toolkit_resource_id);
assert_eq!(i.next(), Some(toolkit));
assert_eq!(i.next(), None);
}
}
#[test]
fn test_generate_bundles_for_lang_sync() {
let en_us: LanguageIdentifier = "en-US".parse().unwrap();
let setup = RegistrySetup::new(
"test",
vec![
FileSource::new("toolkit", None, vec![en_us.clone()], "toolkit/{locale}/"),
FileSource::new("browser", None, vec![en_us.clone()], "browser/{locale}/"),
],
vec![en_us.clone()],
);
let fetcher = TestFileFetcher::new();
let (_, reg) = fetcher.get_registry_and_environment(setup);
let paths = vec![FTL_RESOURCE_TOOLKIT.into(), FTL_RESOURCE_BROWSER.into()];
let mut i = reg.generate_bundles_for_lang_sync(en_us.clone(), paths);
assert!(i.next().is_some());
assert!(i.next().is_none());
}
#[test]
fn test_generate_bundles_sync() {
let en_us: LanguageIdentifier = "en-US".parse().unwrap();
let setup = RegistrySetup::new(
"test",
vec![
FileSource::new("toolkit", None, vec![en_us.clone()], "toolkit/{locale}/"),
FileSource::new("browser", None, vec![en_us.clone()], "browser/{locale}/"),
],
vec![en_us.clone()],
);
let fetcher = TestFileFetcher::new();
let (_, reg) = fetcher.get_registry_and_environment(setup);
let paths = vec![FTL_RESOURCE_TOOLKIT.into(), FTL_RESOURCE_BROWSER.into()];
let lang_ids = vec![en_us];
let mut i = reg.generate_bundles_sync(lang_ids.into_iter(), paths);
assert!(i.next().is_some());
assert!(i.next().is_none());
}
#[tokio::test]
async fn test_generate_bundles_for_lang() {
use futures::stream::StreamExt;
let en_us: LanguageIdentifier = "en-US".parse().unwrap();
let setup = RegistrySetup::new(
"test",
vec![
FileSource::new("toolkit", None, vec![en_us.clone()], "toolkit/{locale}/"),
FileSource::new("browser", None, vec![en_us.clone()], "browser/{locale}/"),
],
vec![en_us.clone()],
);
let fetcher = TestFileFetcher::new();
let (_, reg) = fetcher.get_registry_and_environment(setup);
let paths = vec![FTL_RESOURCE_TOOLKIT.into(), FTL_RESOURCE_BROWSER.into()];
let mut i = reg.generate_bundles_for_lang(en_us, paths);
assert!(i.next().await.is_some());
assert!(i.next().await.is_none());
}
#[tokio::test]
async fn test_generate_bundles() {
use futures::stream::StreamExt;
let en_us: LanguageIdentifier = "en-US".parse().unwrap();
let setup = RegistrySetup::new(
"test",
vec![
FileSource::new("toolkit", None, vec![en_us.clone()], "toolkit/{locale}/"),
FileSource::new("browser", None, vec![en_us.clone()], "browser/{locale}/"),
],
vec![en_us.clone()],
);
let fetcher = TestFileFetcher::new();
let (_, reg) = fetcher.get_registry_and_environment(setup);
let paths = vec![FTL_RESOURCE_TOOLKIT.into(), FTL_RESOURCE_BROWSER.into()];
let langs = vec![en_us];
let mut i = reg.generate_bundles(langs.into_iter(), paths);
assert!(i.next().await.is_some());
assert!(i.next().await.is_none());
}
#[test]
fn test_manage_sources() {
let en_us: LanguageIdentifier = "en-US".parse().unwrap();
let setup = RegistrySetup::new(
"test",
vec![
FileSource::new("toolkit", None, vec![en_us.clone()], "toolkit/{locale}/"),
FileSource::new("browser", None, vec![en_us.clone()], "browser/{locale}/"),
],
vec![en_us.clone()],
);
let fetcher = TestFileFetcher::new();
let (_, reg) = fetcher.get_registry_and_environment(setup);
let lang_ids = vec![en_us];
let paths = vec![FTL_RESOURCE_TOOLKIT.into(), FTL_RESOURCE_BROWSER.into()];
let mut i = reg.generate_bundles_sync(lang_ids.clone().into_iter(), paths);
assert!(i.next().is_some());
assert!(i.next().is_none());
reg.clone()
.remove_sources(vec!["toolkit"])
.expect("Failed to remove a source.");
let paths = vec![FTL_RESOURCE_TOOLKIT.into(), FTL_RESOURCE_BROWSER.into()];
let mut i = reg.generate_bundles_sync(lang_ids.clone().into_iter(), paths);
assert!(i.next().is_none());
let paths = vec![FTL_RESOURCE_BROWSER.into()];
let mut i = reg.generate_bundles_sync(lang_ids.clone().into_iter(), paths);
assert!(i.next().is_some());
assert!(i.next().is_none());
reg.register_sources(vec![fetcher.get_test_file_source(
"toolkit",
None,
lang_ids.clone(),
"browser/{locale}/",
)])
.expect("Failed to register a source.");
let paths = vec![FTL_RESOURCE_TOOLKIT.into(), FTL_RESOURCE_BROWSER.into()];
let mut i = reg.generate_bundles_sync(lang_ids.clone().into_iter(), paths);
assert!(i.next().is_none());
reg.update_sources(vec![fetcher.get_test_file_source(
"toolkit",
None,
lang_ids.clone(),
"toolkit/{locale}/",
)])
.expect("Failed to update a source.");
let paths = vec![FTL_RESOURCE_TOOLKIT.into(), FTL_RESOURCE_BROWSER.into()];
let mut i = reg.generate_bundles_sync(lang_ids.clone().into_iter(), paths);
assert!(i.next().is_some());
assert!(i.next().is_none());
}
#[test]
fn test_generate_bundles_with_metasources_sync() {
let en_us: LanguageIdentifier = "en-US".parse().unwrap();
let setup = RegistrySetup::new(
"test",
vec![
FileSource::new(
"toolkit",
Some("app"),
vec![en_us.clone()],
"toolkit/{locale}/",
),
FileSource::new(
"browser",
Some("app"),
vec![en_us.clone()],
"browser/{locale}/",
),
FileSource::new(
"toolkit",
Some("langpack"),
vec![en_us.clone()],
"toolkit/{locale}/",
),
FileSource::new(
"browser",
Some("langpack"),
vec![en_us.clone()],
"browser/{locale}/",
),
],
vec![en_us.clone()],
);
let fetcher = TestFileFetcher::new();
let (_, reg) = fetcher.get_registry_and_environment(setup);
let paths = vec![FTL_RESOURCE_TOOLKIT.into(), FTL_RESOURCE_BROWSER.into()];
let lang_ids = vec![en_us];
let mut i = reg.generate_bundles_sync(lang_ids.into_iter(), paths);
assert!(i.next().is_some());
assert!(i.next().is_some());
assert!(i.next().is_none());
}
#[tokio::test]
async fn test_generate_bundles_with_metasources() {
use futures::stream::StreamExt;
let en_us: LanguageIdentifier = "en-US".parse().unwrap();
let setup = RegistrySetup::new(
"test",
vec![
FileSource::new(
"toolkit",
Some("app"),
vec![en_us.clone()],
"toolkit/{locale}/",
),
FileSource::new(
"browser",
Some("app"),
vec![en_us.clone()],
"browser/{locale}/",
),
FileSource::new(
"toolkit",
Some("langpack"),
vec![en_us.clone()],
"toolkit/{locale}/",
),
FileSource::new(
"browser",
Some("langpack"),
vec![en_us.clone()],
"browser/{locale}/",
),
],
vec![en_us.clone()],
);
let fetcher = TestFileFetcher::new();
let (_, reg) = fetcher.get_registry_and_environment(setup);
let paths = vec![FTL_RESOURCE_TOOLKIT.into(), FTL_RESOURCE_BROWSER.into()];
let langs = vec![en_us];
let mut i = reg.generate_bundles(langs.into_iter(), paths);
assert!(i.next().await.is_some());
assert!(i.next().await.is_some());
assert!(i.next().await.is_none());
}

Просмотреть файл

@ -0,0 +1,109 @@
use fluent_bundle::FluentArgs;
use fluent_fallback::Localization;
use fluent_testing::get_scenarios;
use l10nregistry::fluent::FluentBundle;
use l10nregistry::registry::BundleAdapter;
use l10nregistry::testing::{RegistrySetup, TestFileFetcher};
#[derive(Clone)]
struct ScenarioBundleAdapter {}
impl BundleAdapter for ScenarioBundleAdapter {
fn adapt_bundle(&self, bundle: &mut FluentBundle) {
bundle.set_use_isolating(false);
bundle
.add_function("PLATFORM", |_positional, _named| "linux".into())
.expect("Failed to add a function to the bundle.");
}
}
#[tokio::test]
async fn scenarios_async() {
use fluent_testing::scenarios::structs::Scenario;
let fetcher = TestFileFetcher::new();
let scenarios = get_scenarios();
let adapter = ScenarioBundleAdapter {};
let cannot_produce_bundle = |scenario: &Scenario| {
scenario
.queries
.iter()
.any(|query| query.exceptional_context.blocks_bundle_generation())
};
for scenario in scenarios {
println!("scenario: {}", scenario.name);
let setup: RegistrySetup = (&scenario).into();
let (env, reg) = fetcher.get_registry_and_environment_with_adapter(setup, adapter.clone());
let loc = Localization::with_env(scenario.res_ids.clone(), false, env.clone(), reg);
let bundles = loc.bundles();
let no_bundles = cannot_produce_bundle(&scenario);
let mut errors = vec![];
for query in scenario.queries.iter() {
let errors_start_len = errors.len();
let args = query.input.args.as_ref().map(|args| {
let mut result = FluentArgs::new();
for arg in args.as_slice() {
result.set(arg.id.clone(), arg.value.clone());
}
result
});
if let Some(output) = &query.output {
if let Some(value) = &output.value {
let v = bundles
.format_value(&query.input.id, args.as_ref(), &mut errors)
.await;
if no_bundles || query.exceptional_context.causes_failed_value_lookup() {
assert!(v.is_none());
if no_bundles {
continue;
}
} else {
assert_eq!(v.unwrap(), value.as_str())
}
}
}
if query.exceptional_context.causes_reported_format_error() {
assert!(
errors.len() > errors_start_len,
"expected reported errors for query {:#?}",
query
);
} else {
assert_eq!(
errors.len(),
errors_start_len,
"expected no reported errors for query {:#?}",
query
);
}
}
if scenario
.queries
.iter()
.any(|query| query.exceptional_context.missing_required_resource())
{
assert!(
!env.errors().is_empty(),
"expected errors for scenario {{ {} }}, but found none",
scenario.name
);
} else {
assert!(
env.errors().is_empty(),
"expected no errors for scenario {{ {} }}, but found {:#?}",
scenario.name,
env.errors()
);
}
}
}

Просмотреть файл

@ -0,0 +1,107 @@
use fluent_bundle::FluentArgs;
use fluent_fallback::Localization;
use fluent_testing::get_scenarios;
use l10nregistry::fluent::FluentBundle;
use l10nregistry::registry::BundleAdapter;
use l10nregistry::testing::{RegistrySetup, TestFileFetcher};
#[derive(Clone)]
struct ScenarioBundleAdapter {}
impl BundleAdapter for ScenarioBundleAdapter {
fn adapt_bundle(&self, bundle: &mut FluentBundle) {
bundle.set_use_isolating(false);
bundle
.add_function("PLATFORM", |_positional, _named| "linux".into())
.expect("Failed to add a function to the bundle.");
}
}
#[test]
fn scenarios_sync() {
use fluent_testing::scenarios::structs::Scenario;
let fetcher = TestFileFetcher::new();
let scenarios = get_scenarios();
let adapter = ScenarioBundleAdapter {};
let cannot_produce_bundle = |scenario: &Scenario| {
scenario
.queries
.iter()
.any(|query| query.exceptional_context.blocks_bundle_generation())
};
for scenario in scenarios {
println!("scenario: {}", scenario.name);
let setup: RegistrySetup = (&scenario).into();
let (env, reg) = fetcher.get_registry_and_environment_with_adapter(setup, adapter.clone());
let loc = Localization::with_env(scenario.res_ids.clone(), true, env.clone(), reg);
let bundles = loc.bundles();
let no_bundles = cannot_produce_bundle(&scenario);
let mut errors = vec![];
for query in scenario.queries.iter() {
let errors_start_len = errors.len();
let args = query.input.args.as_ref().map(|args| {
let mut result = FluentArgs::new();
for arg in args.as_slice() {
result.set(arg.id.clone(), arg.value.clone());
}
result
});
if let Some(output) = &query.output {
if let Some(value) = &output.value {
let v = bundles.format_value_sync(&query.input.id, args.as_ref(), &mut errors);
if no_bundles || query.exceptional_context.causes_failed_value_lookup() {
assert!(v.is_err() || v.unwrap().is_none());
if no_bundles {
continue;
}
} else {
assert_eq!(v.unwrap().unwrap(), value.as_str())
}
}
}
if query.exceptional_context.causes_reported_format_error() {
assert!(
errors.len() > errors_start_len,
"expected reported errors for query {:#?}",
query
);
} else {
assert_eq!(
errors.len(),
errors_start_len,
"expected no reported errors for query {:#?}",
query
);
}
}
if scenario
.queries
.iter()
.any(|query| query.exceptional_context.missing_required_resource())
{
assert!(
!env.errors().is_empty(),
"expected errors for scenario {{ {} }}, but found none",
scenario.name
);
} else {
assert!(
env.errors().is_empty(),
"expected no errors for scenario {{ {} }}, but found {:#?}",
scenario.name,
env.errors()
);
}
}
}

Просмотреть файл

@ -0,0 +1,305 @@
use fluent_fallback::types::{ResourceType, ToResourceId};
use futures::future::join_all;
use l10nregistry::testing::TestFileFetcher;
use unic_langid::LanguageIdentifier;
static FTL_RESOURCE_PRESENT: &str = "toolkit/global/textActions.ftl";
static FTL_RESOURCE_MISSING: &str = "missing.ftl";
#[test]
fn test_fetch_sync() {
let fetcher = TestFileFetcher::new();
let en_us: LanguageIdentifier = "en-US".parse().unwrap();
let fs1 =
fetcher.get_test_file_source("toolkit", None, vec![en_us.clone()], "toolkit/{locale}/");
let file = fs1.fetch_file_sync(&en_us, &FTL_RESOURCE_PRESENT.into(), false);
assert!(file.is_some());
assert!(!file.is_none());
assert!(!file.is_required_and_missing());
let file = fs1.fetch_file_sync(
&en_us,
&FTL_RESOURCE_PRESENT.to_resource_id(ResourceType::Required),
false,
);
assert!(file.is_some());
assert!(!file.is_none());
assert!(!file.is_required_and_missing());
let file = fs1.fetch_file_sync(
&en_us,
&FTL_RESOURCE_PRESENT.to_resource_id(ResourceType::Optional),
false,
);
assert!(file.is_some());
assert!(!file.is_none());
assert!(!file.is_required_and_missing());
let file = fs1.fetch_file_sync(&en_us, &FTL_RESOURCE_MISSING.into(), false);
assert!(!file.is_some());
assert!(file.is_none());
assert!(file.is_required_and_missing());
let file = fs1.fetch_file_sync(
&en_us,
&FTL_RESOURCE_MISSING.to_resource_id(ResourceType::Required),
false,
);
assert!(!file.is_some());
assert!(file.is_none());
assert!(file.is_required_and_missing());
let file = fs1.fetch_file_sync(
&en_us,
&FTL_RESOURCE_MISSING.to_resource_id(ResourceType::Optional),
false,
);
assert!(!file.is_some());
assert!(file.is_none());
assert!(!file.is_required_and_missing());
}
#[tokio::test]
async fn test_fetch_async() {
let fetcher = TestFileFetcher::new();
let en_us: LanguageIdentifier = "en-US".parse().unwrap();
let fs1 =
fetcher.get_test_file_source("toolkit", None, vec![en_us.clone()], "toolkit/{locale}/");
let file = fs1.fetch_file(&en_us, &FTL_RESOURCE_PRESENT.into()).await;
assert!(file.is_some());
assert!(!file.is_none());
assert!(!file.is_required_and_missing());
let file = fs1
.fetch_file(
&en_us,
&FTL_RESOURCE_PRESENT.to_resource_id(ResourceType::Required),
)
.await;
assert!(file.is_some());
assert!(!file.is_none());
assert!(!file.is_required_and_missing());
let file = fs1
.fetch_file(
&en_us,
&FTL_RESOURCE_PRESENT.to_resource_id(ResourceType::Optional),
)
.await;
assert!(file.is_some());
assert!(!file.is_none());
assert!(!file.is_required_and_missing());
let file = fs1.fetch_file(&en_us, &FTL_RESOURCE_MISSING.into()).await;
assert!(!file.is_some());
assert!(file.is_none());
assert!(file.is_required_and_missing());
let file = fs1
.fetch_file(
&en_us,
&FTL_RESOURCE_MISSING.to_resource_id(ResourceType::Required),
)
.await;
assert!(!file.is_some());
assert!(file.is_none());
assert!(file.is_required_and_missing());
let file = fs1
.fetch_file(
&en_us,
&FTL_RESOURCE_MISSING.to_resource_id(ResourceType::Optional),
)
.await;
assert!(!file.is_some());
assert!(file.is_none());
assert!(!file.is_required_and_missing());
}
#[tokio::test]
async fn test_fetch_sync_2_async() {
let fetcher = TestFileFetcher::new();
let en_us: LanguageIdentifier = "en-US".parse().unwrap();
let fs1 =
fetcher.get_test_file_source("toolkit", None, vec![en_us.clone()], "toolkit/{locale}/");
assert!(fs1
.fetch_file_sync(&en_us, &FTL_RESOURCE_PRESENT.into(), false)
.is_some());
assert!(fs1
.fetch_file(&en_us, &FTL_RESOURCE_PRESENT.into())
.await
.is_some());
assert!(fs1
.fetch_file_sync(&en_us, &FTL_RESOURCE_PRESENT.into(), false)
.is_some());
}
#[tokio::test]
async fn test_fetch_async_2_sync() {
let fetcher = TestFileFetcher::new();
let en_us: LanguageIdentifier = "en-US".parse().unwrap();
let fs1 =
fetcher.get_test_file_source("toolkit", None, vec![en_us.clone()], "toolkit/{locale}/");
assert!(fs1
.fetch_file(&en_us, &FTL_RESOURCE_PRESENT.into())
.await
.is_some());
assert!(fs1
.fetch_file_sync(&en_us, &FTL_RESOURCE_PRESENT.into(), false)
.is_some());
}
#[test]
fn test_fetch_has_value_required_sync() {
let fetcher = TestFileFetcher::new();
let en_us: LanguageIdentifier = "en-US".parse().unwrap();
let path = FTL_RESOURCE_PRESENT.into();
let path_missing = FTL_RESOURCE_MISSING.into();
let fs1 =
fetcher.get_test_file_source("toolkit", None, vec![en_us.clone()], "toolkit/{locale}/");
assert_eq!(fs1.has_file(&en_us, &path), None);
assert!(fs1.fetch_file_sync(&en_us, &path, false).is_some());
assert_eq!(fs1.has_file(&en_us, &path), Some(true));
assert_eq!(fs1.has_file(&en_us, &path_missing), None);
assert!(fs1.fetch_file_sync(&en_us, &path_missing, false).is_none());
assert_eq!(fs1.has_file(&en_us, &path_missing), Some(false));
}
#[test]
fn test_fetch_has_value_optional_sync() {
let fetcher = TestFileFetcher::new();
let en_us: LanguageIdentifier = "en-US".parse().unwrap();
let path = FTL_RESOURCE_PRESENT.to_resource_id(ResourceType::Optional);
let path_missing = FTL_RESOURCE_MISSING.to_resource_id(ResourceType::Optional);
let fs1 =
fetcher.get_test_file_source("toolkit", None, vec![en_us.clone()], "toolkit/{locale}/");
assert_eq!(fs1.has_file(&en_us, &path), None);
assert!(fs1.fetch_file_sync(&en_us, &path, false).is_some());
assert_eq!(fs1.has_file(&en_us, &path), Some(true));
assert_eq!(fs1.has_file(&en_us, &path_missing), None);
assert!(fs1.fetch_file_sync(&en_us, &path_missing, false).is_none());
assert_eq!(fs1.has_file(&en_us, &path_missing), Some(false));
}
#[tokio::test]
async fn test_fetch_has_value_required_async() {
let fetcher = TestFileFetcher::new();
let en_us: LanguageIdentifier = "en-US".parse().unwrap();
let path = FTL_RESOURCE_PRESENT.into();
let path_missing = FTL_RESOURCE_MISSING.into();
let fs1 =
fetcher.get_test_file_source("toolkit", None, vec![en_us.clone()], "toolkit/{locale}/");
assert_eq!(fs1.has_file(&en_us, &path), None);
assert!(fs1.fetch_file(&en_us, &path).await.is_some());
println!("Completed");
assert_eq!(fs1.has_file(&en_us, &path), Some(true));
assert_eq!(fs1.has_file(&en_us, &path_missing), None);
assert!(fs1.fetch_file(&en_us, &path_missing).await.is_none());
assert!(fs1
.fetch_file(&en_us, &path_missing)
.await
.is_required_and_missing());
assert_eq!(fs1.has_file(&en_us, &path_missing), Some(false));
assert!(fs1.fetch_file_sync(&en_us, &path_missing, false).is_none());
assert!(fs1
.fetch_file_sync(&en_us, &path_missing, false)
.is_required_and_missing());
}
#[tokio::test]
async fn test_fetch_has_value_optional_async() {
let fetcher = TestFileFetcher::new();
let en_us: LanguageIdentifier = "en-US".parse().unwrap();
let path = FTL_RESOURCE_PRESENT.to_resource_id(ResourceType::Optional);
let path_missing = FTL_RESOURCE_MISSING.to_resource_id(ResourceType::Optional);
let fs1 =
fetcher.get_test_file_source("toolkit", None, vec![en_us.clone()], "toolkit/{locale}/");
assert_eq!(fs1.has_file(&en_us, &path), None);
assert!(fs1.fetch_file(&en_us, &path).await.is_some());
println!("Completed");
assert_eq!(fs1.has_file(&en_us, &path), Some(true));
assert_eq!(fs1.has_file(&en_us, &path_missing), None);
assert!(fs1.fetch_file(&en_us, &path_missing).await.is_none());
assert!(!fs1
.fetch_file(&en_us, &path_missing)
.await
.is_required_and_missing());
assert_eq!(fs1.has_file(&en_us, &path_missing), Some(false));
assert!(fs1.fetch_file_sync(&en_us, &path_missing, false).is_none());
assert!(!fs1
.fetch_file_sync(&en_us, &path_missing, false)
.is_required_and_missing());
}
#[tokio::test]
async fn test_fetch_async_consecutive() {
let fetcher = TestFileFetcher::new();
let en_us: LanguageIdentifier = "en-US".parse().unwrap();
let fs1 =
fetcher.get_test_file_source("toolkit", None, vec![en_us.clone()], "toolkit/{locale}/");
let results = join_all(vec![
fs1.fetch_file(&en_us, &FTL_RESOURCE_PRESENT.into()),
fs1.fetch_file(&en_us, &FTL_RESOURCE_PRESENT.into()),
])
.await;
assert!(results[0].is_some());
assert!(results[1].is_some());
assert!(fs1
.fetch_file(&en_us, &FTL_RESOURCE_PRESENT.into())
.await
.is_some());
}
#[test]
fn test_indexed() {
let fetcher = TestFileFetcher::new();
let en_us: LanguageIdentifier = "en-US".parse().unwrap();
let path = FTL_RESOURCE_PRESENT;
let path_missing = FTL_RESOURCE_MISSING;
let fs1 = fetcher.get_test_file_source_with_index(
"toolkit",
None,
vec![en_us.clone()],
"toolkit/{locale}/",
vec!["toolkit/en-US/toolkit/global/textActions.ftl"],
);
assert_eq!(fs1.has_file(&en_us, &path.into()), Some(true));
assert!(fs1.fetch_file_sync(&en_us, &path.into(), false).is_some());
assert_eq!(fs1.has_file(&en_us, &path.into()), Some(true));
assert_eq!(fs1.has_file(&en_us, &path_missing.into()), Some(false));
assert!(fs1
.fetch_file_sync(&en_us, &path_missing.into(), false)
.is_none());
assert_eq!(fs1.has_file(&en_us, &path_missing.into()), Some(false));
}