зеркало из https://github.com/mozilla/gecko-dev.git
Bug 1774806
- Update unicode-normalization to 0.2.10. r=emilio,supply-chain-reviewers
Differential Revision: https://phabricator.services.mozilla.com/D150285
This commit is contained in:
Родитель
3bc124d290
Коммит
5d8ac446c1
|
@ -5654,9 +5654,9 @@ checksum = "5bd2fe26506023ed7b5e1e315add59d6f584c621d037f9368fea9cfb988f368c"
|
|||
|
||||
[[package]]
|
||||
name = "unicode-normalization"
|
||||
version = "0.1.19"
|
||||
version = "0.1.20"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d54590932941a9e9266f0832deed84ebe1bf2e4c9e4a3554d393d18f5e854bf9"
|
||||
checksum = "81dee68f85cab8cf68dec42158baf3a79a1cdc065a8b103025965d6ccb7f6cbd"
|
||||
dependencies = [
|
||||
"tinyvec",
|
||||
]
|
||||
|
|
|
@ -89,3 +89,9 @@ who = "Mike Hommey <mh+mozilla@glandium.org>"
|
|||
criteria = "safe-to-deploy"
|
||||
delta = "1.0.0 -> 1.0.1"
|
||||
|
||||
[[audits.unicode-normalization]]
|
||||
who = "Mike Hommey <mh+mozilla@glandium.org>"
|
||||
criteria = "safe-to-deploy"
|
||||
delta = "0.1.19 -> 0.1.20"
|
||||
notes = "I am the author of most of these changes upstream, and prepared the release myself, at which point I looked at the other changes since 0.1.19."
|
||||
|
||||
|
|
|
@ -1 +1 @@
|
|||
{"files":{"COPYRIGHT":"23860c2a7b5d96b21569afedf033469bab9fe14a1b24a35068b8641c578ce24d","Cargo.toml":"34370ae727c107ec51fd6809e01ff76220a1bcc2b849b8d277bf9c7bf1875abd","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"7b63ecd5f1902af1b63729947373683c32745c16a10e8e6292e2e2dcd7e90ae0","README.md":"80e4415e2f0941aac11b7e5c1db946d00139db2f1a67774fcd0c0bfde52217fe","benches/bench.rs":"827e5343b059a732904be29717c2797203bfd0a633edf08042afea65372a3e2c","scripts/unicode.py":"c00cb48507e4564a2dcf17a95a5fb1206830f748a8444d296f95b5d2dd09b72c","src/__test_api.rs":"78e21bfa0b98894f545c8ed3e31cec20d7a48951a7f3ed69a6130c4b3d463aee","src/decompose.rs":"c0eb774843a545356e63bbcd7fb926f80d3c97ef4601ca3701fc34154f2e9905","src/lib.rs":"3eaa16b8b4d2d8e15d38b56760fb432ec7665e22360fd4c587c9b724486ba90e","src/lookups.rs":"ca7022bf19a82108df1f5bd78c7fc30806f931d932a65538be818caaa5f7049d","src/no_std_prelude.rs":"602e81e67b8952b6571826f431e3b6787be3073bc10f38a0d3374278f81a6a1f","src/normalize.rs":"de2670b4437d335d42884af844a750f70e541467ecd34077dfe032103cb9b041","src/perfect_hash.rs":"400c84e2f467f61bd55d55d08672da6a9ad7a57c938ce5d0c701a6994b1b273b","src/quick_check.rs":"9756312d75fc31b67fca954e44a4812945a7e436b03ba18b9a2441f6de570f6f","src/recompose.rs":"a6228ad7561a5c7a1ef1d510159bdde1eea8a161007c80e470432e9b844d5536","src/replace.rs":"b24c904f3e00851a78820e30ddfa4ff10c795f8925fd0ee7f5870f31fdfa770b","src/stream_safe.rs":"383d71f0da401af8e735877e43855c7e16cb06deb2263539cdec2a407dbe257d","src/tables.rs":"d24cf5a2a6d5059543b39eec6806c93fa8c314b52b251ddd354affcf91ef7f0b","src/test.rs":"0def2cb0a013fba29938262b3cd3533fbb10eacaf6bcd82eef1f91759fe0a2eb"},"package":"d54590932941a9e9266f0832deed84ebe1bf2e4c9e4a3554d393d18f5e854bf9"}
|
||||
{"files":{"COPYRIGHT":"23860c2a7b5d96b21569afedf033469bab9fe14a1b24a35068b8641c578ce24d","Cargo.toml":"0c5982585c7dfd09c79b804a14be1116bfaa7281dc5d3ecbb6915e569ed6aff7","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"7b63ecd5f1902af1b63729947373683c32745c16a10e8e6292e2e2dcd7e90ae0","README.md":"7bef9d9fba0d6241f26cc14785e9616f8a5c35ad75a2991980fe2ca0961c1d6a","benches/bench.rs":"827e5343b059a732904be29717c2797203bfd0a633edf08042afea65372a3e2c","scripts/unicode.py":"36c126935b828df07a3405d4200753a229f83596d413026f3cf98876f5bd80db","src/__test_api.rs":"78e21bfa0b98894f545c8ed3e31cec20d7a48951a7f3ed69a6130c4b3d463aee","src/decompose.rs":"c0eb774843a545356e63bbcd7fb926f80d3c97ef4601ca3701fc34154f2e9905","src/lib.rs":"1983769ea083caa36b0736c87cf2a98e91c2b900f1d5dec64e327360fa862386","src/lookups.rs":"962f9909b32e02b8a2a05836135d9cd39bb1ce01f7c659de99cbd8a3a3c78574","src/no_std_prelude.rs":"602e81e67b8952b6571826f431e3b6787be3073bc10f38a0d3374278f81a6a1f","src/normalize.rs":"de2670b4437d335d42884af844a750f70e541467ecd34077dfe032103cb9b041","src/perfect_hash.rs":"400c84e2f467f61bd55d55d08672da6a9ad7a57c938ce5d0c701a6994b1b273b","src/quick_check.rs":"9756312d75fc31b67fca954e44a4812945a7e436b03ba18b9a2441f6de570f6f","src/recompose.rs":"a6228ad7561a5c7a1ef1d510159bdde1eea8a161007c80e470432e9b844d5536","src/replace.rs":"b24c904f3e00851a78820e30ddfa4ff10c795f8925fd0ee7f5870f31fdfa770b","src/stream_safe.rs":"383d71f0da401af8e735877e43855c7e16cb06deb2263539cdec2a407dbe257d","src/tables.rs":"76d7d739c472931ab598ba59909258cf5b45cbcea680aae003093237361f8e4c","src/test.rs":"3af8ad8c6bd2cc1ca44660bd265ad813c88d3074b448df4d9ff376b25fb77d26"},"package":"81dee68f85cab8cf68dec42158baf3a79a1cdc065a8b103025965d6ccb7f6cbd"}
|
|
@ -3,17 +3,16 @@
|
|||
# When uploading crates to the registry Cargo will automatically
|
||||
# "normalize" Cargo.toml files for maximal compatibility
|
||||
# with all versions of Cargo and also rewrite `path` dependencies
|
||||
# to registry (e.g., crates.io) dependencies
|
||||
# to registry (e.g., crates.io) dependencies.
|
||||
#
|
||||
# If you believe there's an error in this file please file an
|
||||
# issue against the rust-lang/cargo repository. If you're
|
||||
# editing this file be aware that the upstream Cargo.toml
|
||||
# will likely look very different (and much more reasonable)
|
||||
# If you are reading this file be aware that the original Cargo.toml
|
||||
# will likely look very different (and much more reasonable).
|
||||
# See Cargo.toml.orig for the original contents.
|
||||
|
||||
[package]
|
||||
edition = "2018"
|
||||
name = "unicode-normalization"
|
||||
version = "0.1.19"
|
||||
version = "0.1.20"
|
||||
authors = ["kwantam <kwantam@gmail.com>", "Manish Goregaokar <manishsmail@gmail.com>"]
|
||||
exclude = ["target/*", "Cargo.lock", "scripts/tmp", "*.txt", "tests/*"]
|
||||
description = "This crate provides functions for normalization of\nUnicode strings, including Canonical and Compatible\nDecomposition and Recomposition, as described in\nUnicode Standard Annex #15.\n"
|
||||
|
|
|
@ -31,7 +31,7 @@ to your `Cargo.toml`:
|
|||
|
||||
```toml
|
||||
[dependencies]
|
||||
unicode-normalization = "0.1.19"
|
||||
unicode-normalization = "0.1.20"
|
||||
```
|
||||
|
||||
## `no_std` + `alloc` support
|
||||
|
|
|
@ -391,9 +391,19 @@ def gen_composition_table(canon_comp, out):
|
|||
def gen_decomposition_tables(canon_decomp, compat_decomp, cjk_compat_variants_decomp, out):
|
||||
tables = [(canon_decomp, 'canonical'), (compat_decomp, 'compatibility'), (cjk_compat_variants_decomp, 'cjk_compat_variants')]
|
||||
for table, name in tables:
|
||||
gen_mph_data(name + '_decomposed', table, "(u32, &'static [char])",
|
||||
lambda k: "(0x{:x}, &[{}])".format(k,
|
||||
", ".join("'\\u{%s}'" % hexify(c) for c in table[k])))
|
||||
offsets = {}
|
||||
offset = 0
|
||||
out.write("pub(crate) const %s_DECOMPOSED_CHARS: &[char] = &[\n" % name.upper())
|
||||
for k, v in table.items():
|
||||
offsets[k] = offset
|
||||
offset += len(v)
|
||||
for c in v:
|
||||
out.write(" '\\u{%s}',\n" % hexify(c))
|
||||
# The largest offset must fit in a u16.
|
||||
assert offset < 65536
|
||||
out.write("];\n")
|
||||
gen_mph_data(name + '_decomposed', table, "(u32, (u16, u16))",
|
||||
lambda k: "(0x{:x}, ({}, {}))".format(k, offsets[k], len(table[k])))
|
||||
|
||||
def gen_qc_match(prop_table, out):
|
||||
out.write(" match c {\n")
|
||||
|
|
|
@ -34,7 +34,7 @@
|
|||
//!
|
||||
//! ```toml
|
||||
//! [dependencies]
|
||||
//! unicode-normalization = "0.1.19"
|
||||
//! unicode-normalization = "0.1.20"
|
||||
//! ```
|
||||
|
||||
#![deny(missing_docs, unsafe_code)]
|
||||
|
@ -62,7 +62,10 @@ pub use crate::recompose::Recompositions;
|
|||
pub use crate::replace::Replacements;
|
||||
pub use crate::stream_safe::StreamSafe;
|
||||
pub use crate::tables::UNICODE_VERSION;
|
||||
use core::str::Chars;
|
||||
use core::{
|
||||
str::Chars,
|
||||
option,
|
||||
};
|
||||
|
||||
mod no_std_prelude;
|
||||
|
||||
|
@ -166,6 +169,39 @@ impl<'a> UnicodeNormalization<Chars<'a>> for &'a str {
|
|||
}
|
||||
}
|
||||
|
||||
|
||||
impl UnicodeNormalization<option::IntoIter<char>> for char {
|
||||
#[inline]
|
||||
fn nfd(self) -> Decompositions<option::IntoIter<char>> {
|
||||
decompose::new_canonical(Some(self).into_iter())
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn nfkd(self) -> Decompositions<option::IntoIter<char>> {
|
||||
decompose::new_compatible(Some(self).into_iter())
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn nfc(self) -> Recompositions<option::IntoIter<char>> {
|
||||
recompose::new_canonical(Some(self).into_iter())
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn nfkc(self) -> Recompositions<option::IntoIter<char>> {
|
||||
recompose::new_compatible(Some(self).into_iter())
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn cjk_compat_variants(self) -> Replacements<option::IntoIter<char>> {
|
||||
replace::new_cjk_compat_variants(Some(self).into_iter())
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn stream_safe(self) -> StreamSafe<option::IntoIter<char>> {
|
||||
StreamSafe::new(Some(self).into_iter())
|
||||
}
|
||||
}
|
||||
|
||||
impl<I: Iterator<Item = char>> UnicodeNormalization<I> for I {
|
||||
#[inline]
|
||||
fn nfd(self) -> Decompositions<I> {
|
||||
|
|
|
@ -51,6 +51,7 @@ pub(crate) fn canonical_fully_decomposed(c: char) -> Option<&'static [char]> {
|
|||
pair_lookup_fv_opt,
|
||||
None,
|
||||
)
|
||||
.map(|(start, len)| &CANONICAL_DECOMPOSED_CHARS[start as usize..][..len as usize])
|
||||
}
|
||||
|
||||
pub(crate) fn compatibility_fully_decomposed(c: char) -> Option<&'static [char]> {
|
||||
|
@ -62,6 +63,7 @@ pub(crate) fn compatibility_fully_decomposed(c: char) -> Option<&'static [char]>
|
|||
pair_lookup_fv_opt,
|
||||
None,
|
||||
)
|
||||
.map(|(start, len)| &COMPATIBILITY_DECOMPOSED_CHARS[start as usize..][..len as usize])
|
||||
}
|
||||
|
||||
pub(crate) fn cjk_compat_variants_fully_decomposed(c: char) -> Option<&'static [char]> {
|
||||
|
@ -73,6 +75,7 @@ pub(crate) fn cjk_compat_variants_fully_decomposed(c: char) -> Option<&'static [
|
|||
pair_lookup_fv_opt,
|
||||
None,
|
||||
)
|
||||
.map(|(start, len)| &CJK_COMPAT_VARIANTS_DECOMPOSED_CHARS[start as usize..][..len as usize])
|
||||
}
|
||||
|
||||
/// Return whether the given character is a combining mark (`General_Category=Mark`)
|
||||
|
|
0
third_party/rust/unicode-normalization/src/no_std_prelude.rs
поставляемый
Normal file → Executable file
0
third_party/rust/unicode-normalization/src/no_std_prelude.rs
поставляемый
Normal file → Executable file
Разница между файлами не показана из-за своего большого размера
Загрузить разницу
|
@ -105,6 +105,11 @@ fn test_nfkc() {
|
|||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_normalize_char() {
|
||||
assert_eq!('\u{2126}'.nfd().to_string(), "\u{3a9}")
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_is_combining_mark_ascii() {
|
||||
for cp in 0..0x7f {
|
||||
|
|
Загрузка…
Ссылка в новой задаче