Bug 1631722 - Vendor sfv crate,r=valentin

***
Vendor

Differential Revision: https://phabricator.services.mozilla.com/D83502
This commit is contained in:
undef1nd 2020-08-04 17:22:45 +00:00
Родитель 14202a2c79
Коммит c79925c0b3
57 изменённых файлов: 20669 добавлений и 7225 удалений

7
Cargo.lock сгенерированный
Просмотреть файл

@ -2304,9 +2304,12 @@ dependencies = [
[[package]]
name = "indexmap"
version = "1.1.0"
version = "1.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a4d6d89e0948bf10c08b9ecc8ac5b83f07f857ebe2c0cbe38de15b4e4f510356"
checksum = "c398b2b113b55809ceb9ee3e753fcbac793f1956663f3c36549c1346015c2afe"
dependencies = [
"autocfg 1.0.0",
]
[[package]]
name = "inflate"

1
third_party/rust/data-encoding/.cargo-checksum.json поставляемый Normal file
Просмотреть файл

@ -0,0 +1 @@
{"files":{"Cargo.toml":"b5ad20e8d3fdfb4240c75817cc9855c09e155c931db70c83d733d11a8cfab48a","LICENSE":"d9ae65b8784809f801d67a1805ba55e5c911978aae6173c85f4ce9bccfaa7373","README.md":"677d8c82010c722e013ae024718145b00c8f876e16b6fe018433b5f8631f5a9e","src/lib.rs":"432828e9a8510a64ad03edddb08d39cd411fc5618df0d47de580936cfe831c29"},"package":"72aa14c04dfae8dd7d8a2b1cb7ca2152618cd01336dbfe704b8dcbf8d41dbd69"}

30
third_party/rust/data-encoding/Cargo.toml поставляемый Normal file
Просмотреть файл

@ -0,0 +1,30 @@
# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
#
# When uploading crates to the registry Cargo will automatically
# "normalize" Cargo.toml files for maximal compatibility
# with all versions of Cargo and also rewrite `path` dependencies
# to registry (e.g., crates.io) dependencies
#
# If you believe there's an error in this file please file an
# issue against the rust-lang/cargo repository. If you're
# editing this file be aware that the upstream Cargo.toml
# will likely look very different (and much more reasonable)
[package]
edition = "2018"
name = "data-encoding"
version = "2.2.1"
authors = ["Julien Cretin <git@ia0.eu>"]
include = ["Cargo.toml", "LICENSE", "README.md", "src/lib.rs"]
description = "Efficient and customizable data-encoding functions like base64, base32, and hex"
documentation = "https://docs.rs/data-encoding"
readme = "README.md"
keywords = ["no_std", "base64", "base32", "hex"]
categories = ["encoding", "no-std"]
license = "MIT"
repository = "https://github.com/ia0/data-encoding"
[features]
alloc = []
default = ["std"]
std = ["alloc"]

22
third_party/rust/data-encoding/LICENSE поставляемый Normal file
Просмотреть файл

@ -0,0 +1,22 @@
The MIT License (MIT)
Copyright (c) 2015-2017 Julien Cretin
Copyright (c) 2017 Google Inc.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

114
third_party/rust/data-encoding/README.md поставляемый Normal file
Просмотреть файл

@ -0,0 +1,114 @@
[![Build Status][travis_badge]][travis]
[![Build Status][appveyor_badge]][appveyor]
[![Coverage Status][coveralls_badge]][coveralls]
[![fuzzit][fuzzit_badge]][fuzzit]
## Common use-cases
This library provides the following common encodings:
- `HEXLOWER`: lowercase hexadecimal
- `HEXLOWER_PERMISSIVE`: lowercase hexadecimal with case-insensitive decoding
- `HEXUPPER`: uppercase hexadecimal
- `HEXUPPER_PERMISSIVE`: uppercase hexadecimal with case-insensitive decoding
- `BASE32`: RFC4648 base32
- `BASE32_NOPAD`: RFC4648 base32 without padding
- `BASE32_DNSSEC`: RFC5155 base32
- `BASE32_DNSCURVE`: DNSCurve base32
- `BASE32HEX`: RFC4648 base32hex
- `BASE32HEX_NOPAD`: RFC4648 base32hex without padding
- `BASE64`: RFC4648 base64
- `BASE64_NOPAD`: RFC4648 base64 without padding
- `BASE64_MIME`: RFC2045-like base64
- `BASE64URL`: RFC4648 base64url
- `BASE64URL_NOPAD`: RFC4648 base64url without padding
Typical usage looks like:
```rust
// allocating functions
BASE64.encode(&input_to_encode)
HEXLOWER.decode(&input_to_decode)
// in-place functions
BASE32.encode_mut(&input_to_encode, &mut encoded_output)
BASE64_URL.decode_mut(&input_to_decode, &mut decoded_output)
```
See the [documentation] or the [changelog] for more details.
## Custom use-cases
This library also provides the possibility to define custom little-endian ASCII
base-conversion encodings for bases of size 2, 4, 8, 16, 32, and 64 (for which
all above use-cases are particular instances). It supports:
- padded and unpadded encodings
- canonical encodings (e.g. trailing bits are checked)
- in-place encoding and decoding functions
- partial decoding functions (e.g. for error recovery)
- character translation (e.g. for case-insensitivity)
- most and least significant bit-order
- ignoring characters when decoding (e.g. for skipping newlines)
- wrapping the output when encoding
The typical definition of a custom encoding looks like:
```rust
lazy_static! {
static ref HEX: Encoding = {
let mut spec = Specification::new();
spec.symbols.push_str("0123456789abcdef");
spec.translate.from.push_str("ABCDEF");
spec.translate.to.push_str("abcdef");
spec.encoding().unwrap()
};
static ref BASE64: Encoding = {
let mut spec = Specification::new();
spec.symbols.push_str(
"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/");
spec.padding = Some('=');
spec.encoding().unwrap()
};
}
```
You may also use the [macro] library to define a compile-time custom encoding:
```rust
const HEX: Encoding = new_encoding!{
symbols: "0123456789abcdef",
translate_from: "ABCDEF",
translate_to: "abcdef",
};
const BASE64: Encoding = new_encoding!{
symbols: "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/",
padding: '=',
};
```
See the [documentation] or the [changelog] for more details.
## Performance
The performance of the encoding and decoding functions (for both common and
custom encodings) are similar to existing implementations in C, Rust, and other
high-performance languages (see how to run the benchmarks on [github]).
## Swiss-knife binary
This crate is a library. If you are looking for the [binary] using this library,
see the installation instructions on [github].
[appveyor]: https://ci.appveyor.com/project/ia0/data-encoding
[appveyor_badge]:https://ci.appveyor.com/api/projects/status/wm4ga69xnlriukhl/branch/master?svg=true
[binary]: https://crates.io/crates/data-encoding-bin
[changelog]: https://github.com/ia0/data-encoding/blob/master/lib/CHANGELOG.md
[coveralls]: https://coveralls.io/github/ia0/data-encoding?branch=master
[coveralls_badge]: https://coveralls.io/repos/github/ia0/data-encoding/badge.svg?branch=master
[documentation]: https://docs.rs/data-encoding
[fuzzit]: https://app.fuzzit.dev/orgs/ia0-gh/dashboard
[fuzzit_badge]: https://app.fuzzit.dev/badge?org_id=ia0-gh
[github]: https://github.com/ia0/data-encoding
[macro]: https://crates.io/crates/data-encoding-macro
[travis]: https://travis-ci.org/ia0/data-encoding
[travis_badge]: https://travis-ci.org/ia0/data-encoding.svg?branch=master

2367
third_party/rust/data-encoding/src/lib.rs поставляемый Normal file

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Просмотреть файл

@ -1 +1 @@
{"files":{"Cargo.toml":"eeb5f5f88978e950820356ca434718da8ed9b40bf2745b03a400ac5a8b9fa57d","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"ecc269ef87fd38a1d98e30bfac9ba964a9dbd9315c3770fed98d4d7cb5882055","README.rst":"cdcaa79d0e2a2d4980604827b6b043a464a9949fa2bfd033f65a5d6ea4a844de","benches/bench.rs":"e34466bc3c56d3f0bb2ccf37a0588dbec51bb1048729f3b5f2ef41e36900460f","benches/faststring.rs":"c490c03dc5f3d686fcb17c92d4dd79428ca9eff78f9a2c1cab60f83c48140827","src/equivalent.rs":"4d07c0ae8c8ff405fdbb45e6c891158d3fdcfedd47001e4cec090c79b5c56564","src/lib.rs":"31cde3e6244107cea11be6584dd803eaa441fb9fb762f410c85975e5a286a071","src/macros.rs":"472c9ec707495e6de60b4e67c1b25f2201bb962fa6672fae32addde2eb4df376","src/map.rs":"7530ca07f39ba3d7101c129c729af58457fa0e7f41e9ae6ed662c070d2a058a3","src/mutable_keys.rs":"2bf26fb36ad0ccd3c40b0f2cc4e5b8429e6627207f50fca07110a5011880a9dc","src/rayon/map.rs":"9fc361acad0c65bdee35b826915fc595708728382fa8995254a2adf6277d5979","src/rayon/mod.rs":"d0657d28aaf5f5df8b6904ed4da6987565d98f92f686f30fb0b0af2eac94fdac","src/rayon/set.rs":"cea5e1724bc8449b0bbacbb18c5ae60a2d41cfc5cfd496e89e1c17f77c6e7a49","src/serde.rs":"96850a3adc93ad890573de596cfe1edd70a71e0ad1a0d55963c40f12f49e63d0","src/set.rs":"311b415b5e310f32036075d03b3e9bf7783cb86e9b0747be19dc02e292953326","src/util.rs":"331f80b48387878caa01ab9cfd43927ea0c15129c6beb755d6b40abc6ada900f","tests/equivalent_trait.rs":"f48ef255e4bc6bc85ed11fd9bee4cc53759efb182e448d315f8d12af1f80b05d","tests/quick.rs":"025e9e4355c4ce76daf0366d5bde2e32bf90fe2d27831a7b7617a6d0e5974529","tests/serde.rs":"48f2a2184c819ffaa5c234ccea9c3bea1c58edf8ad9ada1476eedc179438d07d","tests/tests.rs":"c916ae9c5d08c042b7c3a0447ef3db5a1b9d37b3122fddace4235296a623725b"},"package":"a4d6d89e0948bf10c08b9ecc8ac5b83f07f857ebe2c0cbe38de15b4e4f510356"}
{"files":{"Cargo.toml":"dedc5ce728d1f98682e6fc55539bb6a15ada4ded07f81fe8210e8544464b3fba","LICENSE-APACHE":"7cfd738c53d61c79f07e348f622bf7707c9084237054d37fbe07788a75f5881c","LICENSE-MIT":"f7ab4cf986b56676dc613a20d3f5954c8dbbfb37a5edc8b55d299a7d9176a0f5","README.rst":"4a103371195eac4f630aab1759720a6e053bc29f091b7af3778626e305f0849a","benches/bench.rs":"7222f0b0120f1631fa26f8ffb315cf76845b525e8322e19735d72a2db16ba626","benches/faststring.rs":"0ee3c06a15c0b5738f1095cd301767f6370eaf5274d8bcdf15288eb878f2f62e","build.rs":"5f3ac5f25ea9588e3e4707da4c74e8567fcc62c91884ae9f6ccf881c66dd93ac","src/equivalent.rs":"d869950d65b923bdf4ffa26a3c0eead1adaef8f9e0326e8686371b96ac22d80e","src/lib.rs":"a1a1b04aac1f1c41371715603153254eb1c45ce20efb47acf9de62f645f5d4d7","src/macros.rs":"a87e45347bfcc6e69db88ee7f183d1df090548e3113aeb07dac3f3c56791811b","src/map.rs":"3e28d4d8d5cef7543abf18d114196550efa7de8518bb7513b548b9d34697e7e1","src/mutable_keys.rs":"f04d509baa275e7a97f6d1652c793e53d8a9c4d5a209e4a8e468ac65a161a975","src/rayon/map.rs":"b081a70d4fe20b7d9fd9ec5a40ed67ff89109aea3c8355b4b50960a08eeccc88","src/rayon/mod.rs":"1237365c4521aac5789970bdbd4a9e25a678d97dd0cc5ac4aea10beffccc9bfc","src/rayon/set.rs":"06fdaa47971279971c023097545ccba2af198cb05688ef8fbc7a574d023f6743","src/serde.rs":"3b8154dc705b8371db6ec8a1b44b619b5364f0ca0da968113a25fae494ba8d19","src/set.rs":"e6c9be75ccd4b8d869cb43646a9fa2b2fb3e34bb7be8c5f06236f71d160d8af4","src/util.rs":"7b09237478dad26482c3291110eb1e3ac8568d3a39698ab582c007fbd4a442f9","tests/equivalent_trait.rs":"3ae2b7538b7d61fb1e0d03416988002f3324718045fc38724b73b78bb0ce2be9","tests/macros_full_path.rs":"2c484b9ef500d0f6927c57b66665cb53f451467c094f335d11ed0a39c7da63cd","tests/quick.rs":"d4dc05e81fb69001d58ec670f4ffd4aca5d0dc8bf067b610a2264806e1d67668","tests/serde.rs":"6dd6cb13d5c9558167c1a4b9900b2bb16ce212e0f00df106283bbf6d5c1896e4","tests/tests.rs":"fef6e099a997ef6f36b75c08f7ba780161e119f00665356cd4bca6011aa403c2"},"package":"c398b2b113b55809ceb9ee3e753fcbac793f1956663f3c36549c1346015c2afe"}

15
third_party/rust/indexmap/Cargo.toml поставляемый
Просмотреть файл

@ -12,12 +12,13 @@
[package]
name = "indexmap"
version = "1.1.0"
version = "1.4.0"
authors = ["bluss", "Josh Stone <cuviper@gmail.com>"]
build = "build.rs"
description = "A hash table with consistent order and fast iteration.\n\nThe indexmap is a hash table where the iteration order of the key-value\npairs is independent of the hash values of the keys. It has the usual\nhash table functionality, it preserves insertion order except after\nremovals, and it allows lookup of its elements by either hash table key\nor numerical index. A corresponding hash set type is also provided.\n\nThis crate was initially published under the name ordermap, but it was renamed to\nindexmap.\n"
documentation = "https://docs.rs/indexmap/"
keywords = ["hashmap"]
categories = ["data-structures"]
keywords = ["hashmap", "no_std"]
categories = ["data-structures", "no-std"]
license = "Apache-2.0/MIT"
repository = "https://github.com/bluss/indexmap"
[package.metadata.docs.rs]
@ -25,6 +26,7 @@ features = ["serde-1", "rayon"]
[package.metadata.release]
no-dev-version = true
tag-name = "{{version}}"
[profile.bench]
debug = true
@ -47,14 +49,17 @@ version = "0.8"
version = "1.3"
[dev-dependencies.quickcheck]
version = "0.8"
version = "0.9"
default-features = false
[dev-dependencies.rand]
version = "0.6"
version = "0.7"
features = ["small_rng"]
[dev-dependencies.serde_test]
version = "1.0.99"
[build-dependencies.autocfg]
version = "1"
[features]
serde-1 = ["serde"]

402
third_party/rust/indexmap/LICENSE-APACHE поставляемый
Просмотреть файл

@ -1,201 +1,201 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

50
third_party/rust/indexmap/LICENSE-MIT поставляемый
Просмотреть файл

@ -1,25 +1,25 @@
Copyright (c) 2016--2017
Permission is hereby granted, free of charge, to any
person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the
Software without restriction, including without
limitation the rights to use, copy, modify, merge,
publish, distribute, sublicense, and/or sell copies of
the Software, and to permit persons to whom the Software
is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice
shall be included in all copies or substantial portions
of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
Copyright (c) 2016--2017
Permission is hereby granted, free of charge, to any
person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the
Software without restriction, including without
limitation the rights to use, copy, modify, merge,
publish, distribute, sublicense, and/or sell copies of
the Software, and to permit persons to whom the Software
is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice
shall be included in all copies or substantial portions
of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.

635
third_party/rust/indexmap/README.rst поставляемый
Просмотреть файл

@ -1,304 +1,331 @@
indexmap
========
|build_status|_ |crates|_ |docs|_ |rustc|_
.. |crates| image:: https://img.shields.io/crates/v/indexmap.svg
.. _crates: https://crates.io/crates/indexmap
.. |build_status| image:: https://travis-ci.org/bluss/indexmap.svg
.. _build_status: https://travis-ci.org/bluss/indexmap
.. |docs| image:: https://docs.rs/indexmap/badge.svg
.. _docs: https://docs.rs/indexmap
.. |rustc| image:: https://img.shields.io/badge/rust-1.18%2B-orange.svg
.. _rustc: https://img.shields.io/badge/rust-1.18%2B-orange.svg
A safe, pure-Rust hash table which preserves insertion order.
This crate implements compact map and set data-structures,
where the iteration order of the keys is independent from their hash or
value. It preserves insertion order (except after removals), and it
allows lookup of entries by either hash table key or numerical index.
Note: this crate was originally released under the name ``ordermap``,
but it was renamed to ``indexmap`` to better reflect its features.
Background
==========
This was inspired by Python 3.6's new dict implementation (which remembers
the insertion order and is fast to iterate, and is compact in memory).
Some of those features were translated to Rust, and some were not. The result
was indexmap, a hash table that has following properties:
- Order is **independent of hash function** and hash values of keys.
- Fast to iterate.
- Indexed in compact space.
- Preserves insertion order **as long** as you don't call ``.remove()``.
- Uses robin hood hashing just like Rust's libstd ``HashMap`` used to do
(before std switched to hashbrown).
- It's the usual backwards shift deletion, but only on the index vector, so
it's cheaper because it's moving less memory around.
Does not implement (Yet)
------------------------
- ``.reserve()`` exists but does not have a complete implementation
Performance
-----------
``IndexMap`` derives a couple of performance facts directly from how it is constructed,
which is roughly:
Two vectors, the first, sparse, with hashes and key-value indices, and the
second, dense, the key-value pairs.
- Iteration is very fast since it is on the dense key-values.
- Removal is fast since it moves memory areas only in the first vector,
and uses a single swap in the second vector.
- Lookup is fast-ish because the hashes and indices are densely stored.
Lookup also is slow-ish since hashes and key-value pairs are stored in
separate places. (Visible when cpu caches size is limiting.)
- In practice, ``IndexMap`` has been tested out as the hashmap in rustc in PR45282_ and
the performance was roughly on par across the whole workload.
- If you want the properties of ``IndexMap``, or its strongest performance points
fits your workload, it might be the best hash table implementation.
.. _PR45282: https://github.com/rust-lang/rust/pull/45282
Interesting Features
--------------------
- Insertion order is preserved (``.swap_remove()`` perturbs the order, like the method name says).
- Implements ``.pop() -> Option<(K, V)>`` in O(1) time.
- ``IndexMap::new()`` is empty and uses no allocation until you insert something.
- Lookup key-value pairs by index and vice versa.
- No ``unsafe``.
- Supports ``IndexMut``.
Where to go from here?
----------------------
- Ideas and PRs for how to implement insertion-order preserving remove (for example tombstones)
are welcome. The plan is to split the crate into two hash table implementations
a) the current compact index space version and b) the full insertion order version.
Ideas that we already did
-------------------------
- It can be an *indexable* ordered map in the current fashion
(This was implemented in 0.2.0, for potential use as a graph datastructure).
- Idea for more cache efficient lookup (This was implemented in 0.1.2).
Current ``indices: Vec<Pos>``. ``Pos`` is interpreted as ``(u32, u32)`` more
or less when ``.raw_capacity()`` fits in 32 bits. ``Pos`` then stores both the lower
half of the hash and the entry index.
This means that the hash values in ``Bucket`` don't need to be accessed
while scanning for an entry.
Recent Changes
==============
- 1.1.0
- Added optional feature `"rayon"` that adds parallel iterator support
to `IndexMap` and `IndexSet` using Rayon. This includes all the regular
iterators in parallel versions, and parallel sort.
- Implemented ``Clone`` for ``map::{Iter, Keys, Values}`` and
``set::{Difference, Intersection, Iter, SymmetricDifference, Union}``
- Implemented ``Debug`` for ``map::{Entry, IntoIter, Iter, Keys, Values}`` and
``set::{Difference, Intersection, IntoIter, Iter, SymmetricDifference, Union}``
- Serde trait ``IntoDeserializer`` are implemented for ``IndexMap`` and ``IndexSet``.
- Minimum Rust version requirement increased to Rust 1.30 for development builds.
- 1.0.2
- The new methods ``IndexMap::insert_full`` and ``IndexSet::insert_full`` are
both like ``insert`` with the index included in the return value.
- The new method ``Entry::and_modify`` can be used to modify occupied
entries, matching the new methods of ``std`` maps in Rust 1.26.
- The new method ``Entry::or_default`` inserts a default value in unoccupied
entries, matching the new methods of ``std`` maps in Rust 1.28.
- 1.0.1
- Document Rust version policy for the crate (see rustdoc)
- 1.0.0
- This is the 1.0 release for ``indexmap``! (the crate and datastructure
formerly known as “ordermap”)
- ``OccupiedEntry::insert`` changed its signature, to use ``&mut self`` for
the method receiver, matching the equivalent method for a standard
``HashMap``. Thanks to @dtolnay for finding this bug.
- The deprecated old names from ordermap were removed: ``OrderMap``,
``OrderSet``, ``ordermap!{}``, ``orderset!{}``. Use the new ``IndexMap``
etc names instead.
- 0.4.1
- Renamed crate to ``indexmap``; the ``ordermap`` crate is now deprecated
and the types ``OrderMap/Set`` now have a deprecation notice.
- 0.4.0
- This is the last release series for this ``ordermap`` under that name,
because the crate is **going to be renamed** to ``indexmap`` (with types
``IndexMap``, ``IndexSet``) and no change in functionality!
- The map and its associated structs moved into the ``map`` submodule of the
crate, so that the map and set are symmetric
+ The iterators, ``Entry`` and other structs are now under ``ordermap::map::``
- Internally refactored ``OrderMap<K, V, S>`` so that all the main algorithms
(insertion, lookup, removal etc) that don't use the ``S`` parameter (the
hasher) are compiled without depending on ``S``, which reduces generics bloat.
- ``Entry<K, V>`` no longer has a type parameter ``S``, which is just like
the standard ``HashMap``'s entry.
- Minimum Rust version requirement increased to Rust 1.18
- 0.3.5
- Documentation improvements
- 0.3.4
- The ``.retain()`` methods for ``OrderMap`` and ``OrderSet`` now
traverse the elements in order, and the retained elements **keep their order**
- Added new methods ``.sort_by()``, ``.sort_keys()`` to ``OrderMap`` and
``.sort_by()``, ``.sort()`` to ``OrderSet``. These methods allow you to
sort the maps in place efficiently.
- 0.3.3
- Document insertion behaviour better by @lucab
- Updated dependences (no feature changes) by @ignatenkobrain
- 0.3.2
- Add ``OrderSet`` by @cuviper!
- ``OrderMap::drain`` is now (too) a double ended iterator.
- 0.3.1
- In all ordermap iterators, forward the ``collect`` method to the underlying
iterator as well.
- Add crates.io categories.
- 0.3.0
- The methods ``get_pair``, ``get_pair_index`` were both replaced by
``get_full`` (and the same for the mutable case).
- Method ``swap_remove_pair`` replaced by ``swap_remove_full``.
- Add trait ``MutableKeys`` for opt-in mutable key access. Mutable key access
is only possible through the methods of this extension trait.
- Add new trait ``Equivalent`` for key equivalence. This extends the
``Borrow`` trait mechanism for ``OrderMap::get`` in a backwards compatible
way, just some minor type inference related issues may become apparent.
See `#10`__ for more information.
- Implement ``Extend<(&K, &V)>`` by @xfix.
__ https://github.com/bluss/ordermap/pull/10
- 0.2.13
- Fix deserialization to support custom hashers by @Techcable.
- Add methods ``.index()`` on the entry types by @garro95.
- 0.2.12
- Add methods ``.with_hasher()``, ``.hasher()``.
- 0.2.11
- Support ``ExactSizeIterator`` for the iterators. By @Binero.
- Use ``Box<[Pos]>`` internally, saving a word in the ``OrderMap`` struct.
- Serde support, with crate feature ``"serde-1"``. By @xfix.
- 0.2.10
- Add iterator ``.drain(..)`` by @stevej.
- 0.2.9
- Add method ``.is_empty()`` by @overvenus.
- Implement ``PartialEq, Eq`` by @overvenus.
- Add method ``.sorted_by()``.
- 0.2.8
- Add iterators ``.values()`` and ``.values_mut()``.
- Fix compatibility with 32-bit platforms.
- 0.2.7
- Add ``.retain()``.
- 0.2.6
- Add ``OccupiedEntry::remove_entry`` and other minor entry methods,
so that it now has all the features of ``HashMap``'s entries.
- 0.2.5
- Improved ``.pop()`` slightly.
- 0.2.4
- Improved performance of ``.insert()`` (`#3`__) by @pczarn.
__ https://github.com/bluss/ordermap/pull/3
- 0.2.3
- Generalize ``Entry`` for now, so that it works on hashmaps with non-default
hasher. However, there's a lingering compat issue since libstd ``HashMap``
does not parameterize its entries by the hasher (``S`` typarm).
- Special case some iterator methods like ``.nth()``.
- 0.2.2
- Disable the verbose ``Debug`` impl by default.
- 0.2.1
- Fix doc links and clarify docs.
- 0.2.0
- Add more ``HashMap`` methods & compat with its API.
- Experimental support for ``.entry()`` (the simplest parts of the API).
- Add ``.reserve()`` (placeholder impl).
- Add ``.remove()`` as synonym for ``.swap_remove()``.
- Changed ``.insert()`` to swap value if the entry already exists, and
return ``Option``.
- Experimental support as an *indexed* hash map! Added methods
``.get_index()``, ``.get_index_mut()``, ``.swap_remove_index()``,
``.get_pair_index()``, ``.get_pair_index_mut()``.
- 0.1.2
- Implement the 32/32 split idea for ``Pos`` which improves cache utilization
and lookup performance.
- 0.1.1
- Initial release.
indexmap
========
|build_status|_ |crates|_ |docs|_ |rustc|_
.. |crates| image:: https://img.shields.io/crates/v/indexmap.svg
.. _crates: https://crates.io/crates/indexmap
.. |build_status| image:: https://travis-ci.org/bluss/indexmap.svg
.. _build_status: https://travis-ci.org/bluss/indexmap
.. |docs| image:: https://docs.rs/indexmap/badge.svg
.. _docs: https://docs.rs/indexmap
.. |rustc| image:: https://img.shields.io/badge/rust-1.18%2B-orange.svg
.. _rustc: https://img.shields.io/badge/rust-1.18%2B-orange.svg
A safe, pure-Rust hash table which preserves (in a limited sense) insertion
order.
This crate implements compact map and set data-structures,
where the iteration order of the keys is independent from their hash or
value. It preserves insertion order (except after removals), and it
allows lookup of entries by either hash table key or numerical index.
Note: this crate was originally released under the name ``ordermap``,
but it was renamed to ``indexmap`` to better reflect its features.
Background
==========
This was inspired by Python 3.6's new dict implementation (which remembers
the insertion order and is fast to iterate, and is compact in memory).
Some of those features were translated to Rust, and some were not. The result
was indexmap, a hash table that has following properties:
- Order is **independent of hash function** and hash values of keys.
- Fast to iterate.
- Indexed in compact space.
- Preserves insertion order **as long** as you don't call ``.remove()``.
- Uses robin hood hashing just like Rust's libstd ``HashMap`` used to do
(before std switched to hashbrown).
- It's the usual backwards shift deletion, but only on the index vector, so
it's cheaper because it's moving less memory around.
Does not implement (Yet)
------------------------
- ``.reserve()`` exists but does not have a complete implementation
Performance
-----------
``IndexMap`` derives a couple of performance facts directly from how it is constructed,
which is roughly:
Two vectors, the first, sparse, with hashes and key-value indices, and the
second, dense, the key-value pairs.
- Iteration is very fast since it is on the dense key-values.
- Removal is fast since it moves memory areas only in the first vector,
and uses a single swap in the second vector.
- Lookup is fast-ish because the hashes and indices are densely stored.
Lookup also is slow-ish since hashes and key-value pairs are stored in
separate places. (Visible when cpu caches size is limiting.)
- In practice, ``IndexMap`` has been tested out as the hashmap in rustc in PR45282_ and
the performance was roughly on par across the whole workload.
- If you want the properties of ``IndexMap``, or its strongest performance points
fits your workload, it might be the best hash table implementation.
.. _PR45282: https://github.com/rust-lang/rust/pull/45282
- Idea for more cache efficient lookup (This was implemented in 0.1.2).
Current ``indices: Vec<Pos>``. ``Pos`` is interpreted as ``(u32, u32)`` more
or less when ``.raw_capacity()`` fits in 32 bits. ``Pos`` then stores both the lower
half of the hash and the entry index.
This means that the hash values in ``Bucket`` don't need to be accessed
while scanning for an entry.
Recent Changes
==============
- 1.4.0
- Add new method ``get_index_of`` by @Thermatrix in PR 115_ and 120_
- Fix build script rebuild-if-changed configuration to use "build.rs";
fixes issue 123_. Fix by @cuviper.
- Dev-dependencies (rand and quickcheck) have been updated. The crate's tests
now run using Rust 1.32 or later (MSRV for building the crate has not changed).
by @kjeremy and @bluss
.. _123: https://github.com/bluss/indexmap/issues/123
.. _115: https://github.com/bluss/indexmap/pull/115
.. _120: https://github.com/bluss/indexmap/pull/120
- 1.3.2
- Maintenance update to regenerate the published `Cargo.toml`.
- 1.3.1
- Maintenance update for formatting and ``autocfg`` 1.0.
- 1.3.0
- The deprecation messages in the previous version have been removed.
(The methods have not otherwise changed.) Docs for removal methods have been
improved.
- From Rust 1.36, this crate supports being built **without std**, requiring
``alloc`` instead. This is enabled automatically when it is detected that
``std`` is not available. There is no crate feature to enable/disable to
trigger this. The new build-dep ``autocfg`` enables this.
- 1.2.0
- Plain ``.remove()`` now has a deprecation message, it informs the user
about picking one of the removal functions ``swap_remove`` and ``shift_remove``
which have different performance and order semantics.
Plain ``.remove()`` will not be removed, the warning message and method
will remain until further.
- Add new method ``shift_remove`` for order preserving removal on the map,
and ``shift_take`` for the corresponding operation on the set.
- Add methods ``swap_remove``, ``swap_remove_entry`` to ``Entry``.
- Fix indexset/indexmap to support full paths, like ``indexmap::indexmap!()``
- Internal improvements: fix warnings, deprecations and style lints
- 1.1.0
- Added optional feature `"rayon"` that adds parallel iterator support
to `IndexMap` and `IndexSet` using Rayon. This includes all the regular
iterators in parallel versions, and parallel sort.
- Implemented ``Clone`` for ``map::{Iter, Keys, Values}`` and
``set::{Difference, Intersection, Iter, SymmetricDifference, Union}``
- Implemented ``Debug`` for ``map::{Entry, IntoIter, Iter, Keys, Values}`` and
``set::{Difference, Intersection, IntoIter, Iter, SymmetricDifference, Union}``
- Serde trait ``IntoDeserializer`` are implemented for ``IndexMap`` and ``IndexSet``.
- Minimum Rust version requirement increased to Rust 1.30 for development builds.
- 1.0.2
- The new methods ``IndexMap::insert_full`` and ``IndexSet::insert_full`` are
both like ``insert`` with the index included in the return value.
- The new method ``Entry::and_modify`` can be used to modify occupied
entries, matching the new methods of ``std`` maps in Rust 1.26.
- The new method ``Entry::or_default`` inserts a default value in unoccupied
entries, matching the new methods of ``std`` maps in Rust 1.28.
- 1.0.1
- Document Rust version policy for the crate (see rustdoc)
- 1.0.0
- This is the 1.0 release for ``indexmap``! (the crate and datastructure
formerly known as “ordermap”)
- ``OccupiedEntry::insert`` changed its signature, to use ``&mut self`` for
the method receiver, matching the equivalent method for a standard
``HashMap``. Thanks to @dtolnay for finding this bug.
- The deprecated old names from ordermap were removed: ``OrderMap``,
``OrderSet``, ``ordermap!{}``, ``orderset!{}``. Use the new ``IndexMap``
etc names instead.
- 0.4.1
- Renamed crate to ``indexmap``; the ``ordermap`` crate is now deprecated
and the types ``OrderMap/Set`` now have a deprecation notice.
- 0.4.0
- This is the last release series for this ``ordermap`` under that name,
because the crate is **going to be renamed** to ``indexmap`` (with types
``IndexMap``, ``IndexSet``) and no change in functionality!
- The map and its associated structs moved into the ``map`` submodule of the
crate, so that the map and set are symmetric
+ The iterators, ``Entry`` and other structs are now under ``ordermap::map::``
- Internally refactored ``OrderMap<K, V, S>`` so that all the main algorithms
(insertion, lookup, removal etc) that don't use the ``S`` parameter (the
hasher) are compiled without depending on ``S``, which reduces generics bloat.
- ``Entry<K, V>`` no longer has a type parameter ``S``, which is just like
the standard ``HashMap``'s entry.
- Minimum Rust version requirement increased to Rust 1.18
- 0.3.5
- Documentation improvements
- 0.3.4
- The ``.retain()`` methods for ``OrderMap`` and ``OrderSet`` now
traverse the elements in order, and the retained elements **keep their order**
- Added new methods ``.sort_by()``, ``.sort_keys()`` to ``OrderMap`` and
``.sort_by()``, ``.sort()`` to ``OrderSet``. These methods allow you to
sort the maps in place efficiently.
- 0.3.3
- Document insertion behaviour better by @lucab
- Updated dependences (no feature changes) by @ignatenkobrain
- 0.3.2
- Add ``OrderSet`` by @cuviper!
- ``OrderMap::drain`` is now (too) a double ended iterator.
- 0.3.1
- In all ordermap iterators, forward the ``collect`` method to the underlying
iterator as well.
- Add crates.io categories.
- 0.3.0
- The methods ``get_pair``, ``get_pair_index`` were both replaced by
``get_full`` (and the same for the mutable case).
- Method ``swap_remove_pair`` replaced by ``swap_remove_full``.
- Add trait ``MutableKeys`` for opt-in mutable key access. Mutable key access
is only possible through the methods of this extension trait.
- Add new trait ``Equivalent`` for key equivalence. This extends the
``Borrow`` trait mechanism for ``OrderMap::get`` in a backwards compatible
way, just some minor type inference related issues may become apparent.
See `#10`__ for more information.
- Implement ``Extend<(&K, &V)>`` by @xfix.
__ https://github.com/bluss/ordermap/pull/10
- 0.2.13
- Fix deserialization to support custom hashers by @Techcable.
- Add methods ``.index()`` on the entry types by @garro95.
- 0.2.12
- Add methods ``.with_hasher()``, ``.hasher()``.
- 0.2.11
- Support ``ExactSizeIterator`` for the iterators. By @Binero.
- Use ``Box<[Pos]>`` internally, saving a word in the ``OrderMap`` struct.
- Serde support, with crate feature ``"serde-1"``. By @xfix.
- 0.2.10
- Add iterator ``.drain(..)`` by @stevej.
- 0.2.9
- Add method ``.is_empty()`` by @overvenus.
- Implement ``PartialEq, Eq`` by @overvenus.
- Add method ``.sorted_by()``.
- 0.2.8
- Add iterators ``.values()`` and ``.values_mut()``.
- Fix compatibility with 32-bit platforms.
- 0.2.7
- Add ``.retain()``.
- 0.2.6
- Add ``OccupiedEntry::remove_entry`` and other minor entry methods,
so that it now has all the features of ``HashMap``'s entries.
- 0.2.5
- Improved ``.pop()`` slightly.
- 0.2.4
- Improved performance of ``.insert()`` (`#3`__) by @pczarn.
__ https://github.com/bluss/ordermap/pull/3
- 0.2.3
- Generalize ``Entry`` for now, so that it works on hashmaps with non-default
hasher. However, there's a lingering compat issue since libstd ``HashMap``
does not parameterize its entries by the hasher (``S`` typarm).
- Special case some iterator methods like ``.nth()``.
- 0.2.2
- Disable the verbose ``Debug`` impl by default.
- 0.2.1
- Fix doc links and clarify docs.
- 0.2.0
- Add more ``HashMap`` methods & compat with its API.
- Experimental support for ``.entry()`` (the simplest parts of the API).
- Add ``.reserve()`` (placeholder impl).
- Add ``.remove()`` as synonym for ``.swap_remove()``.
- Changed ``.insert()`` to swap value if the entry already exists, and
return ``Option``.
- Experimental support as an *indexed* hash map! Added methods
``.get_index()``, ``.get_index_mut()``, ``.swap_remove_index()``,
``.get_pair_index()``, ``.get_pair_index_mut()``.
- 0.1.2
- Implement the 32/32 split idea for ``Pos`` which improves cache utilization
and lookup performance.
- 0.1.1
- Initial release.

1509
third_party/rust/indexmap/benches/bench.rs поставляемый

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Просмотреть файл

@ -1,184 +1,182 @@
#![feature(test)]
extern crate test;
extern crate rand;
extern crate lazy_static;
use test::Bencher;
extern crate indexmap;
use indexmap::IndexMap;
use std::collections::HashMap;
use std::iter::FromIterator;
use rand::thread_rng;
use rand::seq::SliceRandom;
use std::hash::{Hash, Hasher};
use std::borrow::Borrow;
use std::ops::Deref;
use std::mem;
#[derive(PartialEq, Eq, Copy, Clone)]
pub struct OneShot<T: ?Sized>(pub T);
impl Hash for OneShot<str>
{
fn hash<H: Hasher>(&self, h: &mut H) {
h.write(self.0.as_bytes())
}
}
impl<'a, S> From<&'a S> for &'a OneShot<str>
where S: AsRef<str>
{
fn from(s: &'a S) -> Self {
let s: &str = s.as_ref();
unsafe {
mem::transmute(s)
}
}
}
impl Hash for OneShot<String>
{
fn hash<H: Hasher>(&self, h: &mut H) {
h.write(self.0.as_bytes())
}
}
impl Borrow<OneShot<str>> for OneShot<String>
{
fn borrow(&self) -> &OneShot<str> {
<&OneShot<str>>::from(&self.0)
}
}
impl<T> Deref for OneShot<T>
{
type Target = T;
fn deref(&self) -> &T {
&self.0
}
}
fn shuffled_keys<I>(iter: I) -> Vec<I::Item>
where I: IntoIterator
{
let mut v = Vec::from_iter(iter);
let mut rng = thread_rng();
v.shuffle(&mut rng);
v
}
#[bench]
fn insert_hashmap_string_10_000(b: &mut Bencher) {
let c = 10_000;
b.iter(|| {
let mut map = HashMap::with_capacity(c);
for x in 0..c {
map.insert(x.to_string(), ());
}
map
});
}
#[bench]
fn insert_hashmap_string_oneshot_10_000(b: &mut Bencher) {
let c = 10_000;
b.iter(|| {
let mut map = HashMap::with_capacity(c);
for x in 0..c {
map.insert(OneShot(x.to_string()), ());
}
map
});
}
#[bench]
fn insert_orderedmap_string_10_000(b: &mut Bencher) {
let c = 10_000;
b.iter(|| {
let mut map = IndexMap::with_capacity(c);
for x in 0..c {
map.insert(x.to_string(), ());
}
map
});
}
#[bench]
fn lookup_hashmap_10_000_exist_string(b: &mut Bencher) {
let c = 10_000;
let mut map = HashMap::with_capacity(c);
let keys = shuffled_keys(0..c);
for &key in &keys {
map.insert(key.to_string(), 1);
}
let lookups = (5000..c).map(|x| x.to_string()).collect::<Vec<_>>();
b.iter(|| {
let mut found = 0;
for key in &lookups {
found += map.get(key).is_some() as i32;
}
found
});
}
#[bench]
fn lookup_hashmap_10_000_exist_string_oneshot(b: &mut Bencher) {
let c = 10_000;
let mut map = HashMap::with_capacity(c);
let keys = shuffled_keys(0..c);
for &key in &keys {
map.insert(OneShot(key.to_string()), 1);
}
let lookups = (5000..c).map(|x| OneShot(x.to_string())).collect::<Vec<_>>();
b.iter(|| {
let mut found = 0;
for key in &lookups {
found += map.get(key).is_some() as i32;
}
found
});
}
#[bench]
fn lookup_ordermap_10_000_exist_string(b: &mut Bencher) {
let c = 10_000;
let mut map = IndexMap::with_capacity(c);
let keys = shuffled_keys(0..c);
for &key in &keys {
map.insert(key.to_string(), 1);
}
let lookups = (5000..c).map(|x| x.to_string()).collect::<Vec<_>>();
b.iter(|| {
let mut found = 0;
for key in &lookups {
found += map.get(key).is_some() as i32;
}
found
});
}
#[bench]
fn lookup_ordermap_10_000_exist_string_oneshot(b: &mut Bencher) {
let c = 10_000;
let mut map = IndexMap::with_capacity(c);
let keys = shuffled_keys(0..c);
for &key in &keys {
map.insert(OneShot(key.to_string()), 1);
}
let lookups = (5000..c).map(|x| OneShot(x.to_string())).collect::<Vec<_>>();
b.iter(|| {
let mut found = 0;
for key in &lookups {
found += map.get(key).is_some() as i32;
}
found
});
}
#![feature(test)]
extern crate lazy_static;
extern crate rand;
extern crate test;
use test::Bencher;
extern crate indexmap;
use indexmap::IndexMap;
use std::collections::HashMap;
use std::iter::FromIterator;
use rand::seq::SliceRandom;
use rand::thread_rng;
use std::hash::{Hash, Hasher};
use std::borrow::Borrow;
use std::ops::Deref;
#[derive(PartialEq, Eq, Copy, Clone)]
#[repr(transparent)]
pub struct OneShot<T: ?Sized>(pub T);
impl Hash for OneShot<str> {
fn hash<H: Hasher>(&self, h: &mut H) {
h.write(self.0.as_bytes())
}
}
impl<'a, S> From<&'a S> for &'a OneShot<str>
where
S: AsRef<str>,
{
fn from(s: &'a S) -> Self {
let s: &str = s.as_ref();
unsafe { &*(s as *const str as *const OneShot<str>) }
}
}
impl Hash for OneShot<String> {
fn hash<H: Hasher>(&self, h: &mut H) {
h.write(self.0.as_bytes())
}
}
impl Borrow<OneShot<str>> for OneShot<String> {
fn borrow(&self) -> &OneShot<str> {
<&OneShot<str>>::from(&self.0)
}
}
impl<T> Deref for OneShot<T> {
type Target = T;
fn deref(&self) -> &T {
&self.0
}
}
fn shuffled_keys<I>(iter: I) -> Vec<I::Item>
where
I: IntoIterator,
{
let mut v = Vec::from_iter(iter);
let mut rng = thread_rng();
v.shuffle(&mut rng);
v
}
#[bench]
fn insert_hashmap_string_10_000(b: &mut Bencher) {
let c = 10_000;
b.iter(|| {
let mut map = HashMap::with_capacity(c);
for x in 0..c {
map.insert(x.to_string(), ());
}
map
});
}
#[bench]
fn insert_hashmap_string_oneshot_10_000(b: &mut Bencher) {
let c = 10_000;
b.iter(|| {
let mut map = HashMap::with_capacity(c);
for x in 0..c {
map.insert(OneShot(x.to_string()), ());
}
map
});
}
#[bench]
fn insert_orderedmap_string_10_000(b: &mut Bencher) {
let c = 10_000;
b.iter(|| {
let mut map = IndexMap::with_capacity(c);
for x in 0..c {
map.insert(x.to_string(), ());
}
map
});
}
#[bench]
fn lookup_hashmap_10_000_exist_string(b: &mut Bencher) {
let c = 10_000;
let mut map = HashMap::with_capacity(c);
let keys = shuffled_keys(0..c);
for &key in &keys {
map.insert(key.to_string(), 1);
}
let lookups = (5000..c).map(|x| x.to_string()).collect::<Vec<_>>();
b.iter(|| {
let mut found = 0;
for key in &lookups {
found += map.get(key).is_some() as i32;
}
found
});
}
#[bench]
fn lookup_hashmap_10_000_exist_string_oneshot(b: &mut Bencher) {
let c = 10_000;
let mut map = HashMap::with_capacity(c);
let keys = shuffled_keys(0..c);
for &key in &keys {
map.insert(OneShot(key.to_string()), 1);
}
let lookups = (5000..c)
.map(|x| OneShot(x.to_string()))
.collect::<Vec<_>>();
b.iter(|| {
let mut found = 0;
for key in &lookups {
found += map.get(key).is_some() as i32;
}
found
});
}
#[bench]
fn lookup_ordermap_10_000_exist_string(b: &mut Bencher) {
let c = 10_000;
let mut map = IndexMap::with_capacity(c);
let keys = shuffled_keys(0..c);
for &key in &keys {
map.insert(key.to_string(), 1);
}
let lookups = (5000..c).map(|x| x.to_string()).collect::<Vec<_>>();
b.iter(|| {
let mut found = 0;
for key in &lookups {
found += map.get(key).is_some() as i32;
}
found
});
}
#[bench]
fn lookup_ordermap_10_000_exist_string_oneshot(b: &mut Bencher) {
let c = 10_000;
let mut map = IndexMap::with_capacity(c);
let keys = shuffled_keys(0..c);
for &key in &keys {
map.insert(OneShot(key.to_string()), 1);
}
let lookups = (5000..c)
.map(|x| OneShot(x.to_string()))
.collect::<Vec<_>>();
b.iter(|| {
let mut found = 0;
for key in &lookups {
found += map.get(key).is_some() as i32;
}
found
});
}

7
third_party/rust/indexmap/build.rs поставляемый Normal file
Просмотреть файл

@ -0,0 +1,7 @@
extern crate autocfg;
fn main() {
let ac = autocfg::new();
ac.emit_sysroot_crate("std");
autocfg::rerun_path("build.rs");
}

54
third_party/rust/indexmap/src/equivalent.rs поставляемый
Просмотреть файл

@ -1,27 +1,27 @@
use std::borrow::Borrow;
/// Key equivalence trait.
///
/// This trait allows hash table lookup to be customized.
/// It has one blanket implementation that uses the regular `Borrow` solution,
/// just like `HashMap` and `BTreeMap` do, so that you can pass `&str` to lookup
/// into a map with `String` keys and so on.
///
/// # Contract
///
/// The implementor **must** hash like `K`, if it is hashable.
pub trait Equivalent<K: ?Sized> {
/// Compare self to `key` and return `true` if they are equal.
fn equivalent(&self, key: &K) -> bool;
}
impl<Q: ?Sized, K: ?Sized> Equivalent<K> for Q
where Q: Eq,
K: Borrow<Q>,
{
#[inline]
fn equivalent(&self, key: &K) -> bool {
*self == *key.borrow()
}
}
use std::borrow::Borrow;
/// Key equivalence trait.
///
/// This trait allows hash table lookup to be customized.
/// It has one blanket implementation that uses the regular `Borrow` solution,
/// just like `HashMap` and `BTreeMap` do, so that you can pass `&str` to lookup
/// into a map with `String` keys and so on.
///
/// # Contract
///
/// The implementor **must** hash like `K`, if it is hashable.
pub trait Equivalent<K: ?Sized> {
/// Compare self to `key` and return `true` if they are equal.
fn equivalent(&self, key: &K) -> bool;
}
impl<Q: ?Sized, K: ?Sized> Equivalent<K> for Q
where
Q: Eq,
K: Borrow<Q>,
{
#[inline]
fn equivalent(&self, key: &K) -> bool {
*self == *key.borrow()
}
}

257
third_party/rust/indexmap/src/lib.rs поставляемый
Просмотреть файл

@ -1,93 +1,164 @@
#![deny(unsafe_code)]
#![doc(html_root_url = "https://docs.rs/indexmap/1/")]
//! [`IndexMap`] is a hash table where the iteration order of the key-value
//! pairs is independent of the hash values of the keys.
//!
//! [`IndexSet`] is a corresponding hash set using the same implementation and
//! with similar properties.
//!
//! [`IndexMap`]: map/struct.IndexMap.html
//! [`IndexSet`]: set/struct.IndexSet.html
//!
//!
//! ## Rust Version
//!
//! This version of indexmap requires Rust 1.18 or later, or 1.30+ for
//! development builds.
//!
//! The indexmap 1.x release series will use a carefully considered version
//! upgrade policy, where in a later 1.x version, we will raise the minimum
//! required Rust version.
#[macro_use]
mod macros;
#[cfg(feature = "serde-1")]
mod serde;
mod util;
mod equivalent;
mod mutable_keys;
pub mod set;
pub mod map;
// Placed after `map` and `set` so new `rayon` methods on the types
// are documented after the "normal" methods.
#[cfg(feature = "rayon")]
mod rayon;
pub use equivalent::Equivalent;
pub use map::IndexMap;
pub use set::IndexSet;
// shared private items
/// Hash value newtype. Not larger than usize, since anything larger
/// isn't used for selecting position anyway.
#[derive(Copy, Debug)]
struct HashValue(usize);
impl HashValue {
#[inline(always)]
fn get(self) -> usize { self.0 }
}
impl Clone for HashValue {
#[inline]
fn clone(&self) -> Self { *self }
}
impl PartialEq for HashValue {
#[inline]
fn eq(&self, rhs: &Self) -> bool {
self.0 == rhs.0
}
}
#[derive(Copy, Clone, Debug)]
struct Bucket<K, V> {
hash: HashValue,
key: K,
value: V,
}
impl<K, V> Bucket<K, V> {
// field accessors -- used for `f` instead of closures in `.map(f)`
fn key_ref(&self) -> &K { &self.key }
fn value_ref(&self) -> &V { &self.value }
fn value_mut(&mut self) -> &mut V { &mut self.value }
fn key(self) -> K { self.key }
fn key_value(self) -> (K, V) { (self.key, self.value) }
fn refs(&self) -> (&K, &V) { (&self.key, &self.value) }
fn ref_mut(&mut self) -> (&K, &mut V) { (&self.key, &mut self.value) }
fn muts(&mut self) -> (&mut K, &mut V) { (&mut self.key, &mut self.value) }
}
trait Entries {
type Entry;
fn into_entries(self) -> Vec<Self::Entry>;
fn as_entries(&self) -> &[Self::Entry];
fn as_entries_mut(&mut self) -> &mut [Self::Entry];
fn with_entries<F>(&mut self, f: F)
where F: FnOnce(&mut [Self::Entry]);
}
#![deny(unsafe_code)]
#![doc(html_root_url = "https://docs.rs/indexmap/1/")]
#![cfg_attr(not(has_std), no_std)]
//! [`IndexMap`] is a hash table where the iteration order of the key-value
//! pairs is independent of the hash values of the keys.
//!
//! [`IndexSet`] is a corresponding hash set using the same implementation and
//! with similar properties.
//!
//! [`IndexMap`]: map/struct.IndexMap.html
//! [`IndexSet`]: set/struct.IndexSet.html
//!
//!
//! ### Feature Highlights
//!
//! [`IndexMap`] and [`IndexSet`] are drop-in compatible with the std `HashMap`
//! and `HashSet`, but they also have some features of note:
//!
//! - The ordering semantics (see their documentation for details)
//! - Sorting methods and the [`.pop()`][IndexMap::pop] methods.
//! - The [`Equivalent`] trait, which offers more flexible equality definitions
//! between borrowed and owned versions of keys.
//! - The [`MutableKeys`][map::MutableKeys] trait, which gives opt-in mutable
//! access to hash map keys.
//!
//! ### Rust Version
//!
//! This version of indexmap requires Rust 1.18 or later, or 1.32+ for
//! development builds, and Rust 1.36+ for using with `alloc` (without `std`),
//! see below.
//!
//! The indexmap 1.x release series will use a carefully considered version
//! upgrade policy, where in a later 1.x version, we will raise the minimum
//! required Rust version.
//!
//! ## No Standard Library Targets
//!
//! From Rust 1.36, this crate supports being built without `std`, requiring
//! `alloc` instead. This is enabled automatically when it is detected that
//! `std` is not available. There is no crate feature to enable/disable to
//! trigger this. It can be tested by building for a std-less target.
//!
//! - Creating maps and sets using [`new`][IndexMap::new] and
//! [`with_capacity`][IndexMap::with_capacity] is unavailable without `std`.
//! Use methods [`IndexMap::default`][def],
//! [`with_hasher`][IndexMap::with_hasher],
//! [`with_capacity_and_hasher`][IndexMap::with_capacity_and_hasher] instead.
//! A no-std compatible hasher will be needed as well, for example
//! from the crate `twox-hash`.
//! - Macros [`indexmap!`] and [`indexset!`] are unavailable without `std`.
//!
//! [def]: map/struct.IndexMap.html#impl-Default
#[cfg(not(has_std))]
#[macro_use(vec)]
extern crate alloc;
#[cfg(not(has_std))]
pub(crate) mod std {
pub use core::*;
pub mod alloc {
pub use alloc::*;
}
pub mod collections {
pub use alloc::collections::*;
}
pub use alloc::vec;
}
#[cfg(not(has_std))]
use std::vec::Vec;
#[macro_use]
mod macros;
mod equivalent;
mod mutable_keys;
#[cfg(feature = "serde-1")]
mod serde;
mod util;
pub mod map;
pub mod set;
// Placed after `map` and `set` so new `rayon` methods on the types
// are documented after the "normal" methods.
#[cfg(feature = "rayon")]
mod rayon;
pub use equivalent::Equivalent;
pub use map::IndexMap;
pub use set::IndexSet;
// shared private items
/// Hash value newtype. Not larger than usize, since anything larger
/// isn't used for selecting position anyway.
#[derive(Copy, Debug)]
struct HashValue(usize);
impl HashValue {
#[inline(always)]
fn get(self) -> usize {
self.0
}
}
impl Clone for HashValue {
#[inline]
fn clone(&self) -> Self {
*self
}
}
impl PartialEq for HashValue {
#[inline]
fn eq(&self, rhs: &Self) -> bool {
self.0 == rhs.0
}
}
#[derive(Copy, Clone, Debug)]
struct Bucket<K, V> {
hash: HashValue,
key: K,
value: V,
}
impl<K, V> Bucket<K, V> {
// field accessors -- used for `f` instead of closures in `.map(f)`
fn key_ref(&self) -> &K {
&self.key
}
fn value_ref(&self) -> &V {
&self.value
}
fn value_mut(&mut self) -> &mut V {
&mut self.value
}
fn key(self) -> K {
self.key
}
fn key_value(self) -> (K, V) {
(self.key, self.value)
}
fn refs(&self) -> (&K, &V) {
(&self.key, &self.value)
}
fn ref_mut(&mut self) -> (&K, &mut V) {
(&self.key, &mut self.value)
}
fn muts(&mut self) -> (&mut K, &mut V) {
(&mut self.key, &mut self.value)
}
}
trait Entries {
type Entry;
fn into_entries(self) -> Vec<Self::Entry>;
fn as_entries(&self) -> &[Self::Entry];
fn as_entries_mut(&mut self) -> &mut [Self::Entry];
fn with_entries<F>(&mut self, f: F)
where
F: FnOnce(&mut [Self::Entry]);
}

245
third_party/rust/indexmap/src/macros.rs поставляемый
Просмотреть файл

@ -1,122 +1,123 @@
#[macro_export]
/// Create an `IndexMap` from a list of key-value pairs
///
/// ## Example
///
/// ```
/// #[macro_use] extern crate indexmap;
/// # fn main() {
///
/// let map = indexmap!{
/// "a" => 1,
/// "b" => 2,
/// };
/// assert_eq!(map["a"], 1);
/// assert_eq!(map["b"], 2);
/// assert_eq!(map.get("c"), None);
///
/// // "a" is the first key
/// assert_eq!(map.keys().next(), Some(&"a"));
/// # }
/// ```
macro_rules! indexmap {
(@single $($x:tt)*) => (());
(@count $($rest:expr),*) => (<[()]>::len(&[$(indexmap!(@single $rest)),*]));
($($key:expr => $value:expr,)+) => { indexmap!($($key => $value),+) };
($($key:expr => $value:expr),*) => {
{
let _cap = indexmap!(@count $($key),*);
let mut _map = $crate::IndexMap::with_capacity(_cap);
$(
_map.insert($key, $value);
)*
_map
}
};
}
#[macro_export]
/// Create an `IndexSet` from a list of values
///
/// ## Example
///
/// ```
/// #[macro_use] extern crate indexmap;
/// # fn main() {
///
/// let set = indexset!{
/// "a",
/// "b",
/// };
/// assert!(set.contains("a"));
/// assert!(set.contains("b"));
/// assert!(!set.contains("c"));
///
/// // "a" is the first value
/// assert_eq!(set.iter().next(), Some(&"a"));
/// # }
/// ```
macro_rules! indexset {
(@single $($x:tt)*) => (());
(@count $($rest:expr),*) => (<[()]>::len(&[$(indexset!(@single $rest)),*]));
($($value:expr,)+) => { indexset!($($value),+) };
($($value:expr),*) => {
{
let _cap = indexset!(@count $($value),*);
let mut _set = $crate::IndexSet::with_capacity(_cap);
$(
_set.insert($value);
)*
_set
}
};
}
// generate all the Iterator methods by just forwarding to the underlying
// self.iter and mapping its element.
macro_rules! iterator_methods {
// $map_elt is the mapping function from the underlying iterator's element
// same mapping function for both options and iterators
($map_elt:expr) => {
fn next(&mut self) -> Option<Self::Item> {
self.iter.next().map($map_elt)
}
fn size_hint(&self) -> (usize, Option<usize>) {
self.iter.size_hint()
}
fn count(self) -> usize {
self.iter.len()
}
fn nth(&mut self, n: usize) -> Option<Self::Item> {
self.iter.nth(n).map($map_elt)
}
fn last(mut self) -> Option<Self::Item> {
self.next_back()
}
fn collect<C>(self) -> C
where C: FromIterator<Self::Item>
{
// NB: forwarding this directly to standard iterators will
// allow it to leverage unstable traits like `TrustedLen`.
self.iter.map($map_elt).collect()
}
}
}
macro_rules! double_ended_iterator_methods {
// $map_elt is the mapping function from the underlying iterator's element
// same mapping function for both options and iterators
($map_elt:expr) => {
fn next_back(&mut self) -> Option<Self::Item> {
self.iter.next_back().map($map_elt)
}
}
}
#[cfg(has_std)]
#[macro_export(local_inner_macros)]
/// Create an `IndexMap` from a list of key-value pairs
///
/// ## Example
///
/// ```
/// #[macro_use] extern crate indexmap;
/// # fn main() {
///
/// let map = indexmap!{
/// "a" => 1,
/// "b" => 2,
/// };
/// assert_eq!(map["a"], 1);
/// assert_eq!(map["b"], 2);
/// assert_eq!(map.get("c"), None);
///
/// // "a" is the first key
/// assert_eq!(map.keys().next(), Some(&"a"));
/// # }
/// ```
macro_rules! indexmap {
(@single $($x:tt)*) => (());
(@count $($rest:expr),*) => (<[()]>::len(&[$(indexmap!(@single $rest)),*]));
($($key:expr => $value:expr,)+) => { indexmap!($($key => $value),+) };
($($key:expr => $value:expr),*) => {
{
let _cap = indexmap!(@count $($key),*);
let mut _map = $crate::IndexMap::with_capacity(_cap);
$(
_map.insert($key, $value);
)*
_map
}
};
}
#[cfg(has_std)]
#[macro_export(local_inner_macros)]
/// Create an `IndexSet` from a list of values
///
/// ## Example
///
/// ```
/// #[macro_use] extern crate indexmap;
/// # fn main() {
///
/// let set = indexset!{
/// "a",
/// "b",
/// };
/// assert!(set.contains("a"));
/// assert!(set.contains("b"));
/// assert!(!set.contains("c"));
///
/// // "a" is the first value
/// assert_eq!(set.iter().next(), Some(&"a"));
/// # }
/// ```
macro_rules! indexset {
(@single $($x:tt)*) => (());
(@count $($rest:expr),*) => (<[()]>::len(&[$(indexset!(@single $rest)),*]));
($($value:expr,)+) => { indexset!($($value),+) };
($($value:expr),*) => {
{
let _cap = indexset!(@count $($value),*);
let mut _set = $crate::IndexSet::with_capacity(_cap);
$(
_set.insert($value);
)*
_set
}
};
}
// generate all the Iterator methods by just forwarding to the underlying
// self.iter and mapping its element.
macro_rules! iterator_methods {
// $map_elt is the mapping function from the underlying iterator's element
// same mapping function for both options and iterators
($map_elt:expr) => {
fn next(&mut self) -> Option<Self::Item> {
self.iter.next().map($map_elt)
}
fn size_hint(&self) -> (usize, Option<usize>) {
self.iter.size_hint()
}
fn count(self) -> usize {
self.iter.len()
}
fn nth(&mut self, n: usize) -> Option<Self::Item> {
self.iter.nth(n).map($map_elt)
}
fn last(mut self) -> Option<Self::Item> {
self.next_back()
}
fn collect<C>(self) -> C
where C: FromIterator<Self::Item>
{
// NB: forwarding this directly to standard iterators will
// allow it to leverage unstable traits like `TrustedLen`.
self.iter.map($map_elt).collect()
}
}
}
macro_rules! double_ended_iterator_methods {
// $map_elt is the mapping function from the underlying iterator's element
// same mapping function for both options and iterators
($map_elt:expr) => {
fn next_back(&mut self) -> Option<Self::Item> {
self.iter.next_back().map($map_elt)
}
}
}

4834
third_party/rust/indexmap/src/map.rs поставляемый

Разница между файлами не показана из-за своего большого размера Загрузить разницу

147
third_party/rust/indexmap/src/mutable_keys.rs поставляемый
Просмотреть файл

@ -1,71 +1,76 @@
use std::hash::Hash;
use std::hash::BuildHasher;
use super::{IndexMap, Equivalent};
pub struct PrivateMarker { }
/// Opt-in mutable access to keys.
///
/// These methods expose `&mut K`, mutable references to the key as it is stored
/// in the map.
/// You are allowed to modify the keys in the hashmap **if the modifcation
/// does not change the keys hash and equality**.
///
/// If keys are modified erronously, you can no longer look them up.
/// This is sound (memory safe) but a logical error hazard (just like
/// implementing PartialEq, Eq, or Hash incorrectly would be).
///
/// `use` this trait to enable its methods for `IndexMap`.
pub trait MutableKeys {
type Key;
type Value;
/// Return item index, mutable reference to key and value
fn get_full_mut2<Q: ?Sized>(&mut self, key: &Q)
-> Option<(usize, &mut Self::Key, &mut Self::Value)>
where Q: Hash + Equivalent<Self::Key>;
/// Scan through each key-value pair in the map and keep those where the
/// closure `keep` returns `true`.
///
/// The elements are visited in order, and remaining elements keep their
/// order.
///
/// Computes in **O(n)** time (average).
fn retain2<F>(&mut self, keep: F)
where F: FnMut(&mut Self::Key, &mut Self::Value) -> bool;
/// This method is not useful in itself – it is there to “seal” the trait
/// for external implementation, so that we can add methods without
/// causing breaking changes.
fn __private_marker(&self) -> PrivateMarker;
}
/// Opt-in mutable access to keys.
///
/// See [`MutableKeys`](trait.MutableKeys.html) for more information.
impl<K, V, S> MutableKeys for IndexMap<K, V, S>
where K: Eq + Hash,
S: BuildHasher,
{
type Key = K;
type Value = V;
fn get_full_mut2<Q: ?Sized>(&mut self, key: &Q)
-> Option<(usize, &mut K, &mut V)>
where Q: Hash + Equivalent<K>,
{
self.get_full_mut2_impl(key)
}
fn retain2<F>(&mut self, keep: F)
where F: FnMut(&mut K, &mut V) -> bool,
{
self.retain_mut(keep)
}
fn __private_marker(&self) -> PrivateMarker {
PrivateMarker { }
}
}
use std::hash::BuildHasher;
use std::hash::Hash;
use super::{Equivalent, IndexMap};
pub struct PrivateMarker {}
/// Opt-in mutable access to keys.
///
/// These methods expose `&mut K`, mutable references to the key as it is stored
/// in the map.
/// You are allowed to modify the keys in the hashmap **if the modifcation
/// does not change the keys hash and equality**.
///
/// If keys are modified erronously, you can no longer look them up.
/// This is sound (memory safe) but a logical error hazard (just like
/// implementing PartialEq, Eq, or Hash incorrectly would be).
///
/// `use` this trait to enable its methods for `IndexMap`.
pub trait MutableKeys {
type Key;
type Value;
/// Return item index, mutable reference to key and value
fn get_full_mut2<Q: ?Sized>(
&mut self,
key: &Q,
) -> Option<(usize, &mut Self::Key, &mut Self::Value)>
where
Q: Hash + Equivalent<Self::Key>;
/// Scan through each key-value pair in the map and keep those where the
/// closure `keep` returns `true`.
///
/// The elements are visited in order, and remaining elements keep their
/// order.
///
/// Computes in **O(n)** time (average).
fn retain2<F>(&mut self, keep: F)
where
F: FnMut(&mut Self::Key, &mut Self::Value) -> bool;
/// This method is not useful in itself – it is there to “seal” the trait
/// for external implementation, so that we can add methods without
/// causing breaking changes.
fn __private_marker(&self) -> PrivateMarker;
}
/// Opt-in mutable access to keys.
///
/// See [`MutableKeys`](trait.MutableKeys.html) for more information.
impl<K, V, S> MutableKeys for IndexMap<K, V, S>
where
K: Eq + Hash,
S: BuildHasher,
{
type Key = K;
type Value = V;
fn get_full_mut2<Q: ?Sized>(&mut self, key: &Q) -> Option<(usize, &mut K, &mut V)>
where
Q: Hash + Equivalent<K>,
{
self.get_full_mut2_impl(key)
}
fn retain2<F>(&mut self, keep: F)
where
F: FnMut(&mut K, &mut V) -> bool,
{
self.retain_mut(keep)
}
fn __private_marker(&self) -> PrivateMarker {
PrivateMarker {}
}
}

957
third_party/rust/indexmap/src/rayon/map.rs поставляемый
Просмотреть файл

@ -1,469 +1,488 @@
//! Parallel iterator types for `IndexMap` with [rayon](https://docs.rs/rayon/1.0/rayon).
//!
//! You will rarely need to interact with this module directly unless you need to name one of the
//! iterator types.
//!
//! Requires crate feature `"rayon"`
use super::collect;
use super::rayon::prelude::*;
use super::rayon::iter::plumbing::{Consumer, UnindexedConsumer, ProducerCallback};
use std::cmp::Ordering;
use std::fmt;
use std::hash::Hash;
use std::hash::BuildHasher;
use Bucket;
use Entries;
use IndexMap;
/// Requires crate feature `"rayon"`.
impl<K, V, S> IntoParallelIterator for IndexMap<K, V, S>
where K: Hash + Eq + Send,
V: Send,
S: BuildHasher,
{
type Item = (K, V);
type Iter = IntoParIter<K, V>;
fn into_par_iter(self) -> Self::Iter {
IntoParIter {
entries: self.into_entries(),
}
}
}
/// A parallel owning iterator over the entries of a `IndexMap`.
///
/// This `struct` is created by the [`into_par_iter`] method on [`IndexMap`]
/// (provided by rayon's `IntoParallelIterator` trait). See its documentation for more.
///
/// [`into_par_iter`]: ../struct.IndexMap.html#method.into_par_iter
/// [`IndexMap`]: ../struct.IndexMap.html
pub struct IntoParIter<K, V> {
entries: Vec<Bucket<K, V>>,
}
impl<K: fmt::Debug, V: fmt::Debug> fmt::Debug for IntoParIter<K, V> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let iter = self.entries.iter().map(Bucket::refs);
f.debug_list().entries(iter).finish()
}
}
impl<K: Send, V: Send> ParallelIterator for IntoParIter<K, V> {
type Item = (K, V);
parallel_iterator_methods!(Bucket::key_value);
}
impl<K: Send, V: Send> IndexedParallelIterator for IntoParIter<K, V> {
indexed_parallel_iterator_methods!(Bucket::key_value);
}
/// Requires crate feature `"rayon"`.
impl<'a, K, V, S> IntoParallelIterator for &'a IndexMap<K, V, S>
where K: Hash + Eq + Sync,
V: Sync,
S: BuildHasher,
{
type Item = (&'a K, &'a V);
type Iter = ParIter<'a, K, V>;
fn into_par_iter(self) -> Self::Iter {
ParIter {
entries: self.as_entries(),
}
}
}
/// A parallel iterator over the entries of a `IndexMap`.
///
/// This `struct` is created by the [`par_iter`] method on [`IndexMap`]
/// (provided by rayon's `IntoParallelRefIterator` trait). See its documentation for more.
///
/// [`par_iter`]: ../struct.IndexMap.html#method.par_iter
/// [`IndexMap`]: ../struct.IndexMap.html
pub struct ParIter<'a, K: 'a, V: 'a> {
entries: &'a [Bucket<K, V>],
}
impl<'a, K, V> Clone for ParIter<'a, K, V> {
fn clone(&self) -> ParIter<'a, K, V> {
ParIter { ..*self }
}
}
impl<'a, K: fmt::Debug, V: fmt::Debug> fmt::Debug for ParIter<'a, K, V> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let iter = self.entries.iter().map(Bucket::refs);
f.debug_list().entries(iter).finish()
}
}
impl<'a, K: Sync, V: Sync> ParallelIterator for ParIter<'a, K, V> {
type Item = (&'a K, &'a V);
parallel_iterator_methods!(Bucket::refs);
}
impl<'a, K: Sync, V: Sync> IndexedParallelIterator for ParIter<'a, K, V> {
indexed_parallel_iterator_methods!(Bucket::refs);
}
/// Requires crate feature `"rayon"`.
impl<'a, K, V, S> IntoParallelIterator for &'a mut IndexMap<K, V, S>
where K: Hash + Eq + Sync + Send,
V: Send,
S: BuildHasher,
{
type Item = (&'a K, &'a mut V);
type Iter = ParIterMut<'a, K, V>;
fn into_par_iter(self) -> Self::Iter {
ParIterMut {
entries: self.as_entries_mut(),
}
}
}
/// A parallel mutable iterator over the entries of a `IndexMap`.
///
/// This `struct` is created by the [`par_iter_mut`] method on [`IndexMap`]
/// (provided by rayon's `IntoParallelRefMutIterator` trait). See its documentation for more.
///
/// [`par_iter_mut`]: ../struct.IndexMap.html#method.par_iter_mut
/// [`IndexMap`]: ../struct.IndexMap.html
pub struct ParIterMut<'a, K: 'a, V: 'a> {
entries: &'a mut [Bucket<K, V>],
}
impl<'a, K: Sync + Send, V: Send> ParallelIterator for ParIterMut<'a, K, V> {
type Item = (&'a K, &'a mut V);
parallel_iterator_methods!(Bucket::ref_mut);
}
impl<'a, K: Sync + Send, V: Send> IndexedParallelIterator for ParIterMut<'a, K, V> {
indexed_parallel_iterator_methods!(Bucket::ref_mut);
}
/// Requires crate feature `"rayon"`.
impl<K, V, S> IndexMap<K, V, S>
where K: Hash + Eq + Sync,
V: Sync,
S: BuildHasher,
{
/// Return a parallel iterator over the keys of the map.
///
/// While parallel iterators can process items in any order, their relative order
/// in the map is still preserved for operations like `reduce` and `collect`.
pub fn par_keys(&self) -> ParKeys<K, V> {
ParKeys {
entries: self.as_entries(),
}
}
/// Return a parallel iterator over the values of the map.
///
/// While parallel iterators can process items in any order, their relative order
/// in the map is still preserved for operations like `reduce` and `collect`.
pub fn par_values(&self) -> ParValues<K, V> {
ParValues {
entries: self.as_entries(),
}
}
/// Returns `true` if `self` contains all of the same key-value pairs as `other`,
/// regardless of each map's indexed order, determined in parallel.
pub fn par_eq<V2, S2>(&self, other: &IndexMap<K, V2, S2>) -> bool
where V: PartialEq<V2>,
V2: Sync,
S2: BuildHasher + Sync
{
self.len() == other.len() &&
self.par_iter().all(move |(key, value)| {
other.get(key).map_or(false, |v| *value == *v)
})
}
}
/// A parallel iterator over the keys of a `IndexMap`.
///
/// This `struct` is created by the [`par_keys`] method on [`IndexMap`]. See its
/// documentation for more.
///
/// [`par_keys`]: ../struct.IndexMap.html#method.par_keys
/// [`IndexMap`]: ../struct.IndexMap.html
pub struct ParKeys<'a, K: 'a, V: 'a> {
entries: &'a [Bucket<K, V>],
}
impl<'a, K, V> Clone for ParKeys<'a, K, V> {
fn clone(&self) -> ParKeys<'a, K, V> {
ParKeys { ..*self }
}
}
impl<'a, K: fmt::Debug, V> fmt::Debug for ParKeys<'a, K, V> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let iter = self.entries.iter().map(Bucket::key_ref);
f.debug_list().entries(iter).finish()
}
}
impl<'a, K: Sync, V: Sync> ParallelIterator for ParKeys<'a, K, V> {
type Item = &'a K;
parallel_iterator_methods!(Bucket::key_ref);
}
impl<'a, K: Sync, V: Sync> IndexedParallelIterator for ParKeys<'a, K, V> {
indexed_parallel_iterator_methods!(Bucket::key_ref);
}
/// A parallel iterator over the values of a `IndexMap`.
///
/// This `struct` is created by the [`par_values`] method on [`IndexMap`]. See its
/// documentation for more.
///
/// [`par_values`]: ../struct.IndexMap.html#method.par_values
/// [`IndexMap`]: ../struct.IndexMap.html
pub struct ParValues<'a, K: 'a, V: 'a> {
entries: &'a [Bucket<K, V>],
}
impl<'a, K, V> Clone for ParValues<'a, K, V> {
fn clone(&self) -> ParValues<'a, K, V> {
ParValues { ..*self }
}
}
impl<'a, K, V: fmt::Debug> fmt::Debug for ParValues<'a, K, V> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let iter = self.entries.iter().map(Bucket::value_ref);
f.debug_list().entries(iter).finish()
}
}
impl<'a, K: Sync, V: Sync> ParallelIterator for ParValues<'a, K, V> {
type Item = &'a V;
parallel_iterator_methods!(Bucket::value_ref);
}
impl<'a, K: Sync, V: Sync> IndexedParallelIterator for ParValues<'a, K, V> {
indexed_parallel_iterator_methods!(Bucket::value_ref);
}
/// Requires crate feature `"rayon"`.
impl<K, V, S> IndexMap<K, V, S>
where K: Hash + Eq + Send,
V: Send,
S: BuildHasher,
{
/// Return a parallel iterator over mutable references to the the values of the map
///
/// While parallel iterators can process items in any order, their relative order
/// in the map is still preserved for operations like `reduce` and `collect`.
pub fn par_values_mut(&mut self) -> ParValuesMut<K, V> {
ParValuesMut {
entries: self.as_entries_mut(),
}
}
/// Sort the maps key-value pairs in parallel, by the default ordering of the keys.
pub fn par_sort_keys(&mut self)
where K: Ord,
{
self.with_entries(|entries| {
entries.par_sort_by(|a, b| K::cmp(&a.key, &b.key));
});
}
/// Sort the maps key-value pairs in place and in parallel, using the comparison
/// function `compare`.
///
/// The comparison function receives two key and value pairs to compare (you
/// can sort by keys or values or their combination as needed).
pub fn par_sort_by<F>(&mut self, cmp: F)
where F: Fn(&K, &V, &K, &V) -> Ordering + Sync,
{
self.with_entries(|entries| {
entries.par_sort_by(move |a, b| cmp(&a.key, &a.value, &b.key, &b.value));
});
}
/// Sort the key-value pairs of the map in parallel and return a by value parallel
/// iterator of the key-value pairs with the result.
pub fn par_sorted_by<F>(self, cmp: F) -> IntoParIter<K, V>
where F: Fn(&K, &V, &K, &V) -> Ordering + Sync
{
let mut entries = self.into_entries();
entries.par_sort_by(move |a, b| cmp(&a.key, &a.value, &b.key, &b.value));
IntoParIter { entries }
}
}
/// A parallel mutable iterator over the values of a `IndexMap`.
///
/// This `struct` is created by the [`par_values_mut`] method on [`IndexMap`]. See its
/// documentation for more.
///
/// [`par_values_mut`]: ../struct.IndexMap.html#method.par_values_mut
/// [`IndexMap`]: ../struct.IndexMap.html
pub struct ParValuesMut<'a, K: 'a, V: 'a> {
entries: &'a mut [Bucket<K, V>],
}
impl<'a, K: Send, V: Send> ParallelIterator for ParValuesMut<'a, K, V> {
type Item = &'a mut V;
parallel_iterator_methods!(Bucket::value_mut);
}
impl<'a, K: Send, V: Send> IndexedParallelIterator for ParValuesMut<'a, K, V> {
indexed_parallel_iterator_methods!(Bucket::value_mut);
}
/// Requires crate feature `"rayon"`.
impl<K, V, S> FromParallelIterator<(K, V)> for IndexMap<K, V, S>
where K: Eq + Hash + Send,
V: Send,
S: BuildHasher + Default + Send,
{
fn from_par_iter<I>(iter: I) -> Self
where I: IntoParallelIterator<Item = (K, V)>
{
let list = collect(iter);
let len = list.iter().map(Vec::len).sum();
let mut map = Self::with_capacity_and_hasher(len, S::default());
for vec in list {
map.extend(vec);
}
map
}
}
/// Requires crate feature `"rayon"`.
impl<K, V, S> ParallelExtend<(K, V)> for IndexMap<K, V, S>
where K: Eq + Hash + Send,
V: Send,
S: BuildHasher + Send,
{
fn par_extend<I>(&mut self, iter: I)
where I: IntoParallelIterator<Item = (K, V)>
{
for vec in collect(iter) {
self.extend(vec);
}
}
}
/// Requires crate feature `"rayon"`.
impl<'a, K: 'a, V: 'a, S> ParallelExtend<(&'a K, &'a V)> for IndexMap<K, V, S>
where K: Copy + Eq + Hash + Send + Sync,
V: Copy + Send + Sync,
S: BuildHasher + Send,
{
fn par_extend<I>(&mut self, iter: I)
where I: IntoParallelIterator<Item = (&'a K, &'a V)>
{
for vec in collect(iter) {
self.extend(vec);
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn insert_order() {
let insert = [0, 4, 2, 12, 8, 7, 11, 5, 3, 17, 19, 22, 23];
let mut map = IndexMap::new();
for &elt in &insert {
map.insert(elt, ());
}
assert_eq!(map.par_keys().count(), map.len());
assert_eq!(map.par_keys().count(), insert.len());
insert.par_iter().zip(map.par_keys()).for_each(|(a, b)| {
assert_eq!(a, b);
});
(0..insert.len()).into_par_iter().zip(map.par_keys()).for_each(|(i, k)| {
assert_eq!(map.get_index(i).unwrap().0, k);
});
}
#[test]
fn partial_eq_and_eq() {
let mut map_a = IndexMap::new();
map_a.insert(1, "1");
map_a.insert(2, "2");
let mut map_b = map_a.clone();
assert!(map_a.par_eq(&map_b));
map_b.remove(&1);
assert!(!map_a.par_eq(&map_b));
map_b.insert(3, "3");
assert!(!map_a.par_eq(&map_b));
let map_c: IndexMap<_, String>
= map_b.into_par_iter().map(|(k, v)| (k, v.to_owned())).collect();
assert!(!map_a.par_eq(&map_c));
assert!(!map_c.par_eq(&map_a));
}
#[test]
fn extend() {
let mut map = IndexMap::new();
map.par_extend(vec![(&1, &2), (&3, &4)]);
map.par_extend(vec![(5, 6)]);
assert_eq!(map.into_par_iter().collect::<Vec<_>>(), vec![(1, 2), (3, 4), (5, 6)]);
}
#[test]
fn keys() {
let vec = vec![(1, 'a'), (2, 'b'), (3, 'c')];
let map: IndexMap<_, _> = vec.into_par_iter().collect();
let keys: Vec<_> = map.par_keys().cloned().collect();
assert_eq!(keys.len(), 3);
assert!(keys.contains(&1));
assert!(keys.contains(&2));
assert!(keys.contains(&3));
}
#[test]
fn values() {
let vec = vec![(1, 'a'), (2, 'b'), (3, 'c')];
let map: IndexMap<_, _> = vec.into_par_iter().collect();
let values: Vec<_> = map.par_values().cloned().collect();
assert_eq!(values.len(), 3);
assert!(values.contains(&'a'));
assert!(values.contains(&'b'));
assert!(values.contains(&'c'));
}
#[test]
fn values_mut() {
let vec = vec![(1, 1), (2, 2), (3, 3)];
let mut map: IndexMap<_, _> = vec.into_par_iter().collect();
map.par_values_mut().for_each(|value| {
*value = (*value) * 2
});
let values: Vec<_> = map.par_values().cloned().collect();
assert_eq!(values.len(), 3);
assert!(values.contains(&2));
assert!(values.contains(&4));
assert!(values.contains(&6));
}
}
//! Parallel iterator types for `IndexMap` with [rayon](https://docs.rs/rayon/1.0/rayon).
//!
//! You will rarely need to interact with this module directly unless you need to name one of the
//! iterator types.
//!
//! Requires crate feature `"rayon"`
use super::collect;
use super::rayon::iter::plumbing::{Consumer, ProducerCallback, UnindexedConsumer};
use super::rayon::prelude::*;
use std::cmp::Ordering;
use std::fmt;
use std::hash::BuildHasher;
use std::hash::Hash;
use Bucket;
use Entries;
use IndexMap;
/// Requires crate feature `"rayon"`.
impl<K, V, S> IntoParallelIterator for IndexMap<K, V, S>
where
K: Hash + Eq + Send,
V: Send,
S: BuildHasher,
{
type Item = (K, V);
type Iter = IntoParIter<K, V>;
fn into_par_iter(self) -> Self::Iter {
IntoParIter {
entries: self.into_entries(),
}
}
}
/// A parallel owning iterator over the entries of a `IndexMap`.
///
/// This `struct` is created by the [`into_par_iter`] method on [`IndexMap`]
/// (provided by rayon's `IntoParallelIterator` trait). See its documentation for more.
///
/// [`into_par_iter`]: ../struct.IndexMap.html#method.into_par_iter
/// [`IndexMap`]: ../struct.IndexMap.html
pub struct IntoParIter<K, V> {
entries: Vec<Bucket<K, V>>,
}
impl<K: fmt::Debug, V: fmt::Debug> fmt::Debug for IntoParIter<K, V> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let iter = self.entries.iter().map(Bucket::refs);
f.debug_list().entries(iter).finish()
}
}
impl<K: Send, V: Send> ParallelIterator for IntoParIter<K, V> {
type Item = (K, V);
parallel_iterator_methods!(Bucket::key_value);
}
impl<K: Send, V: Send> IndexedParallelIterator for IntoParIter<K, V> {
indexed_parallel_iterator_methods!(Bucket::key_value);
}
/// Requires crate feature `"rayon"`.
impl<'a, K, V, S> IntoParallelIterator for &'a IndexMap<K, V, S>
where
K: Hash + Eq + Sync,
V: Sync,
S: BuildHasher,
{
type Item = (&'a K, &'a V);
type Iter = ParIter<'a, K, V>;
fn into_par_iter(self) -> Self::Iter {
ParIter {
entries: self.as_entries(),
}
}
}
/// A parallel iterator over the entries of a `IndexMap`.
///
/// This `struct` is created by the [`par_iter`] method on [`IndexMap`]
/// (provided by rayon's `IntoParallelRefIterator` trait). See its documentation for more.
///
/// [`par_iter`]: ../struct.IndexMap.html#method.par_iter
/// [`IndexMap`]: ../struct.IndexMap.html
pub struct ParIter<'a, K: 'a, V: 'a> {
entries: &'a [Bucket<K, V>],
}
impl<'a, K, V> Clone for ParIter<'a, K, V> {
fn clone(&self) -> ParIter<'a, K, V> {
ParIter { ..*self }
}
}
impl<'a, K: fmt::Debug, V: fmt::Debug> fmt::Debug for ParIter<'a, K, V> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let iter = self.entries.iter().map(Bucket::refs);
f.debug_list().entries(iter).finish()
}
}
impl<'a, K: Sync, V: Sync> ParallelIterator for ParIter<'a, K, V> {
type Item = (&'a K, &'a V);
parallel_iterator_methods!(Bucket::refs);
}
impl<'a, K: Sync, V: Sync> IndexedParallelIterator for ParIter<'a, K, V> {
indexed_parallel_iterator_methods!(Bucket::refs);
}
/// Requires crate feature `"rayon"`.
impl<'a, K, V, S> IntoParallelIterator for &'a mut IndexMap<K, V, S>
where
K: Hash + Eq + Sync + Send,
V: Send,
S: BuildHasher,
{
type Item = (&'a K, &'a mut V);
type Iter = ParIterMut<'a, K, V>;
fn into_par_iter(self) -> Self::Iter {
ParIterMut {
entries: self.as_entries_mut(),
}
}
}
/// A parallel mutable iterator over the entries of a `IndexMap`.
///
/// This `struct` is created by the [`par_iter_mut`] method on [`IndexMap`]
/// (provided by rayon's `IntoParallelRefMutIterator` trait). See its documentation for more.
///
/// [`par_iter_mut`]: ../struct.IndexMap.html#method.par_iter_mut
/// [`IndexMap`]: ../struct.IndexMap.html
pub struct ParIterMut<'a, K: 'a, V: 'a> {
entries: &'a mut [Bucket<K, V>],
}
impl<'a, K: Sync + Send, V: Send> ParallelIterator for ParIterMut<'a, K, V> {
type Item = (&'a K, &'a mut V);
parallel_iterator_methods!(Bucket::ref_mut);
}
impl<'a, K: Sync + Send, V: Send> IndexedParallelIterator for ParIterMut<'a, K, V> {
indexed_parallel_iterator_methods!(Bucket::ref_mut);
}
/// Parallel iterator methods and other parallel methods.
///
/// The following methods **require crate feature `"rayon"`**.
///
/// See also the `IntoParallelIterator` implementations.
impl<K, V, S> IndexMap<K, V, S>
where
K: Hash + Eq + Sync,
V: Sync,
S: BuildHasher,
{
/// Return a parallel iterator over the keys of the map.
///
/// While parallel iterators can process items in any order, their relative order
/// in the map is still preserved for operations like `reduce` and `collect`.
pub fn par_keys(&self) -> ParKeys<K, V> {
ParKeys {
entries: self.as_entries(),
}
}
/// Return a parallel iterator over the values of the map.
///
/// While parallel iterators can process items in any order, their relative order
/// in the map is still preserved for operations like `reduce` and `collect`.
pub fn par_values(&self) -> ParValues<K, V> {
ParValues {
entries: self.as_entries(),
}
}
/// Returns `true` if `self` contains all of the same key-value pairs as `other`,
/// regardless of each map's indexed order, determined in parallel.
pub fn par_eq<V2, S2>(&self, other: &IndexMap<K, V2, S2>) -> bool
where
V: PartialEq<V2>,
V2: Sync,
S2: BuildHasher + Sync,
{
self.len() == other.len()
&& self
.par_iter()
.all(move |(key, value)| other.get(key).map_or(false, |v| *value == *v))
}
}
/// A parallel iterator over the keys of a `IndexMap`.
///
/// This `struct` is created by the [`par_keys`] method on [`IndexMap`]. See its
/// documentation for more.
///
/// [`par_keys`]: ../struct.IndexMap.html#method.par_keys
/// [`IndexMap`]: ../struct.IndexMap.html
pub struct ParKeys<'a, K: 'a, V: 'a> {
entries: &'a [Bucket<K, V>],
}
impl<'a, K, V> Clone for ParKeys<'a, K, V> {
fn clone(&self) -> ParKeys<'a, K, V> {
ParKeys { ..*self }
}
}
impl<'a, K: fmt::Debug, V> fmt::Debug for ParKeys<'a, K, V> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let iter = self.entries.iter().map(Bucket::key_ref);
f.debug_list().entries(iter).finish()
}
}
impl<'a, K: Sync, V: Sync> ParallelIterator for ParKeys<'a, K, V> {
type Item = &'a K;
parallel_iterator_methods!(Bucket::key_ref);
}
impl<'a, K: Sync, V: Sync> IndexedParallelIterator for ParKeys<'a, K, V> {
indexed_parallel_iterator_methods!(Bucket::key_ref);
}
/// A parallel iterator over the values of a `IndexMap`.
///
/// This `struct` is created by the [`par_values`] method on [`IndexMap`]. See its
/// documentation for more.
///
/// [`par_values`]: ../struct.IndexMap.html#method.par_values
/// [`IndexMap`]: ../struct.IndexMap.html
pub struct ParValues<'a, K: 'a, V: 'a> {
entries: &'a [Bucket<K, V>],
}
impl<'a, K, V> Clone for ParValues<'a, K, V> {
fn clone(&self) -> ParValues<'a, K, V> {
ParValues { ..*self }
}
}
impl<'a, K, V: fmt::Debug> fmt::Debug for ParValues<'a, K, V> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let iter = self.entries.iter().map(Bucket::value_ref);
f.debug_list().entries(iter).finish()
}
}
impl<'a, K: Sync, V: Sync> ParallelIterator for ParValues<'a, K, V> {
type Item = &'a V;
parallel_iterator_methods!(Bucket::value_ref);
}
impl<'a, K: Sync, V: Sync> IndexedParallelIterator for ParValues<'a, K, V> {
indexed_parallel_iterator_methods!(Bucket::value_ref);
}
/// Requires crate feature `"rayon"`.
impl<K, V, S> IndexMap<K, V, S>
where
K: Hash + Eq + Send,
V: Send,
S: BuildHasher,
{
/// Return a parallel iterator over mutable references to the the values of the map
///
/// While parallel iterators can process items in any order, their relative order
/// in the map is still preserved for operations like `reduce` and `collect`.
pub fn par_values_mut(&mut self) -> ParValuesMut<K, V> {
ParValuesMut {
entries: self.as_entries_mut(),
}
}
/// Sort the maps key-value pairs in parallel, by the default ordering of the keys.
pub fn par_sort_keys(&mut self)
where
K: Ord,
{
self.with_entries(|entries| {
entries.par_sort_by(|a, b| K::cmp(&a.key, &b.key));
});
}
/// Sort the maps key-value pairs in place and in parallel, using the comparison
/// function `compare`.
///
/// The comparison function receives two key and value pairs to compare (you
/// can sort by keys or values or their combination as needed).
pub fn par_sort_by<F>(&mut self, cmp: F)
where
F: Fn(&K, &V, &K, &V) -> Ordering + Sync,
{
self.with_entries(|entries| {
entries.par_sort_by(move |a, b| cmp(&a.key, &a.value, &b.key, &b.value));
});
}
/// Sort the key-value pairs of the map in parallel and return a by value parallel
/// iterator of the key-value pairs with the result.
pub fn par_sorted_by<F>(self, cmp: F) -> IntoParIter<K, V>
where
F: Fn(&K, &V, &K, &V) -> Ordering + Sync,
{
let mut entries = self.into_entries();
entries.par_sort_by(move |a, b| cmp(&a.key, &a.value, &b.key, &b.value));
IntoParIter { entries }
}
}
/// A parallel mutable iterator over the values of a `IndexMap`.
///
/// This `struct` is created by the [`par_values_mut`] method on [`IndexMap`]. See its
/// documentation for more.
///
/// [`par_values_mut`]: ../struct.IndexMap.html#method.par_values_mut
/// [`IndexMap`]: ../struct.IndexMap.html
pub struct ParValuesMut<'a, K: 'a, V: 'a> {
entries: &'a mut [Bucket<K, V>],
}
impl<'a, K: Send, V: Send> ParallelIterator for ParValuesMut<'a, K, V> {
type Item = &'a mut V;
parallel_iterator_methods!(Bucket::value_mut);
}
impl<'a, K: Send, V: Send> IndexedParallelIterator for ParValuesMut<'a, K, V> {
indexed_parallel_iterator_methods!(Bucket::value_mut);
}
/// Requires crate feature `"rayon"`.
impl<K, V, S> FromParallelIterator<(K, V)> for IndexMap<K, V, S>
where
K: Eq + Hash + Send,
V: Send,
S: BuildHasher + Default + Send,
{
fn from_par_iter<I>(iter: I) -> Self
where
I: IntoParallelIterator<Item = (K, V)>,
{
let list = collect(iter);
let len = list.iter().map(Vec::len).sum();
let mut map = Self::with_capacity_and_hasher(len, S::default());
for vec in list {
map.extend(vec);
}
map
}
}
/// Requires crate feature `"rayon"`.
impl<K, V, S> ParallelExtend<(K, V)> for IndexMap<K, V, S>
where
K: Eq + Hash + Send,
V: Send,
S: BuildHasher + Send,
{
fn par_extend<I>(&mut self, iter: I)
where
I: IntoParallelIterator<Item = (K, V)>,
{
for vec in collect(iter) {
self.extend(vec);
}
}
}
/// Requires crate feature `"rayon"`.
impl<'a, K: 'a, V: 'a, S> ParallelExtend<(&'a K, &'a V)> for IndexMap<K, V, S>
where
K: Copy + Eq + Hash + Send + Sync,
V: Copy + Send + Sync,
S: BuildHasher + Send,
{
fn par_extend<I>(&mut self, iter: I)
where
I: IntoParallelIterator<Item = (&'a K, &'a V)>,
{
for vec in collect(iter) {
self.extend(vec);
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn insert_order() {
let insert = [0, 4, 2, 12, 8, 7, 11, 5, 3, 17, 19, 22, 23];
let mut map = IndexMap::new();
for &elt in &insert {
map.insert(elt, ());
}
assert_eq!(map.par_keys().count(), map.len());
assert_eq!(map.par_keys().count(), insert.len());
insert.par_iter().zip(map.par_keys()).for_each(|(a, b)| {
assert_eq!(a, b);
});
(0..insert.len())
.into_par_iter()
.zip(map.par_keys())
.for_each(|(i, k)| {
assert_eq!(map.get_index(i).unwrap().0, k);
});
}
#[test]
fn partial_eq_and_eq() {
let mut map_a = IndexMap::new();
map_a.insert(1, "1");
map_a.insert(2, "2");
let mut map_b = map_a.clone();
assert!(map_a.par_eq(&map_b));
map_b.swap_remove(&1);
assert!(!map_a.par_eq(&map_b));
map_b.insert(3, "3");
assert!(!map_a.par_eq(&map_b));
let map_c: IndexMap<_, String> = map_b
.into_par_iter()
.map(|(k, v)| (k, v.to_owned()))
.collect();
assert!(!map_a.par_eq(&map_c));
assert!(!map_c.par_eq(&map_a));
}
#[test]
fn extend() {
let mut map = IndexMap::new();
map.par_extend(vec![(&1, &2), (&3, &4)]);
map.par_extend(vec![(5, 6)]);
assert_eq!(
map.into_par_iter().collect::<Vec<_>>(),
vec![(1, 2), (3, 4), (5, 6)]
);
}
#[test]
fn keys() {
let vec = vec![(1, 'a'), (2, 'b'), (3, 'c')];
let map: IndexMap<_, _> = vec.into_par_iter().collect();
let keys: Vec<_> = map.par_keys().cloned().collect();
assert_eq!(keys.len(), 3);
assert!(keys.contains(&1));
assert!(keys.contains(&2));
assert!(keys.contains(&3));
}
#[test]
fn values() {
let vec = vec![(1, 'a'), (2, 'b'), (3, 'c')];
let map: IndexMap<_, _> = vec.into_par_iter().collect();
let values: Vec<_> = map.par_values().cloned().collect();
assert_eq!(values.len(), 3);
assert!(values.contains(&'a'));
assert!(values.contains(&'b'));
assert!(values.contains(&'c'));
}
#[test]
fn values_mut() {
let vec = vec![(1, 1), (2, 2), (3, 3)];
let mut map: IndexMap<_, _> = vec.into_par_iter().collect();
map.par_values_mut().for_each(|value| *value *= 2);
let values: Vec<_> = map.par_values().cloned().collect();
assert_eq!(values.len(), 3);
assert!(values.contains(&2));
assert!(values.contains(&4));
assert!(values.contains(&6));
}
}

155
third_party/rust/indexmap/src/rayon/mod.rs поставляемый
Просмотреть файл

@ -1,79 +1,76 @@
extern crate rayon;
use self::rayon::prelude::*;
use std::collections::LinkedList;
// generate `ParallelIterator` methods by just forwarding to the underlying
// self.entries and mapping its elements.
macro_rules! parallel_iterator_methods {
// $map_elt is the mapping function from the underlying iterator's element
($map_elt:expr) => {
fn drive_unindexed<C>(self, consumer: C) -> C::Result
where C: UnindexedConsumer<Self::Item>
{
self.entries.into_par_iter()
.map($map_elt)
.drive_unindexed(consumer)
}
// NB: This allows indexed collection, e.g. directly into a `Vec`, but the
// underlying iterator must really be indexed. We should remove this if we
// start having tombstones that must be filtered out.
fn opt_len(&self) -> Option<usize> {
Some(self.entries.len())
}
}
}
// generate `IndexedParallelIterator` methods by just forwarding to the underlying
// self.entries and mapping its elements.
macro_rules! indexed_parallel_iterator_methods {
// $map_elt is the mapping function from the underlying iterator's element
($map_elt:expr) => {
fn drive<C>(self, consumer: C) -> C::Result
where C: Consumer<Self::Item>
{
self.entries.into_par_iter()
.map($map_elt)
.drive(consumer)
}
fn len(&self) -> usize {
self.entries.len()
}
fn with_producer<CB>(self, callback: CB) -> CB::Output
where CB: ProducerCallback<Self::Item>
{
self.entries.into_par_iter()
.map($map_elt)
.with_producer(callback)
}
}
}
pub mod map;
pub mod set;
// This form of intermediate collection is also how Rayon collects `HashMap`.
// Note that the order will also be preserved!
fn collect<I: IntoParallelIterator>(iter: I) -> LinkedList<Vec<I::Item>> {
iter.into_par_iter()
.fold(Vec::new, |mut vec, elem| {
vec.push(elem);
vec
})
.map(|vec| {
let mut list = LinkedList::new();
list.push_back(vec);
list
})
.reduce(LinkedList::new, |mut list1, mut list2| {
list1.append(&mut list2);
list1
})
}
extern crate rayon;
use self::rayon::prelude::*;
use std::collections::LinkedList;
// generate `ParallelIterator` methods by just forwarding to the underlying
// self.entries and mapping its elements.
macro_rules! parallel_iterator_methods {
// $map_elt is the mapping function from the underlying iterator's element
($map_elt:expr) => {
fn drive_unindexed<C>(self, consumer: C) -> C::Result
where C: UnindexedConsumer<Self::Item>
{
self.entries.into_par_iter()
.map($map_elt)
.drive_unindexed(consumer)
}
// NB: This allows indexed collection, e.g. directly into a `Vec`, but the
// underlying iterator must really be indexed. We should remove this if we
// start having tombstones that must be filtered out.
fn opt_len(&self) -> Option<usize> {
Some(self.entries.len())
}
}
}
// generate `IndexedParallelIterator` methods by just forwarding to the underlying
// self.entries and mapping its elements.
macro_rules! indexed_parallel_iterator_methods {
// $map_elt is the mapping function from the underlying iterator's element
($map_elt:expr) => {
fn drive<C>(self, consumer: C) -> C::Result
where C: Consumer<Self::Item>
{
self.entries.into_par_iter()
.map($map_elt)
.drive(consumer)
}
fn len(&self) -> usize {
self.entries.len()
}
fn with_producer<CB>(self, callback: CB) -> CB::Output
where CB: ProducerCallback<Self::Item>
{
self.entries.into_par_iter()
.map($map_elt)
.with_producer(callback)
}
}
}
pub mod map;
pub mod set;
// This form of intermediate collection is also how Rayon collects `HashMap`.
// Note that the order will also be preserved!
fn collect<I: IntoParallelIterator>(iter: I) -> LinkedList<Vec<I::Item>> {
iter.into_par_iter()
.fold(Vec::new, |mut vec, elem| {
vec.push(elem);
vec
})
.map(|vec| {
let mut list = LinkedList::new();
list.push_back(vec);
list
})
.reduce(LinkedList::new, |mut list1, mut list2| {
list1.append(&mut list2);
list1
})
}

1293
third_party/rust/indexmap/src/rayon/set.rs поставляемый

Разница между файлами не показана из-за своего большого размера Загрузить разницу

314
third_party/rust/indexmap/src/serde.rs поставляемый
Просмотреть файл

@ -1,149 +1,165 @@
extern crate serde;
use self::serde::ser::{Serialize, Serializer, SerializeMap, SerializeSeq};
use self::serde::de::{Deserialize, Deserializer, Error, IntoDeserializer, MapAccess, SeqAccess, Visitor};
use self::serde::de::value::{MapDeserializer, SeqDeserializer};
use std::fmt::{self, Formatter};
use std::hash::{BuildHasher, Hash};
use std::marker::PhantomData;
use IndexMap;
/// Requires crate feature `"serde-1"`
impl<K, V, S> Serialize for IndexMap<K, V, S>
where K: Serialize + Hash + Eq,
V: Serialize,
S: BuildHasher
{
fn serialize<T>(&self, serializer: T) -> Result<T::Ok, T::Error>
where T: Serializer
{
let mut map_serializer = try!(serializer.serialize_map(Some(self.len())));
for (key, value) in self {
try!(map_serializer.serialize_entry(key, value));
}
map_serializer.end()
}
}
struct OrderMapVisitor<K, V, S>(PhantomData<(K, V, S)>);
impl<'de, K, V, S> Visitor<'de> for OrderMapVisitor<K, V, S>
where K: Deserialize<'de> + Eq + Hash,
V: Deserialize<'de>,
S: Default + BuildHasher
{
type Value = IndexMap<K, V, S>;
fn expecting(&self, formatter: &mut Formatter) -> fmt::Result {
write!(formatter, "a map")
}
fn visit_map<A>(self, mut map: A) -> Result<Self::Value, A::Error>
where A: MapAccess<'de>
{
let mut values = IndexMap::with_capacity_and_hasher(map.size_hint().unwrap_or(0), S::default());
while let Some((key, value)) = try!(map.next_entry()) {
values.insert(key, value);
}
Ok(values)
}
}
/// Requires crate feature `"serde-1"`
impl<'de, K, V, S> Deserialize<'de> for IndexMap<K, V, S>
where K: Deserialize<'de> + Eq + Hash,
V: Deserialize<'de>,
S: Default + BuildHasher
{
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where D: Deserializer<'de>
{
deserializer.deserialize_map(OrderMapVisitor(PhantomData))
}
}
impl<'de, K, V, S, E> IntoDeserializer<'de, E> for IndexMap<K, V, S>
where K: IntoDeserializer<'de, E> + Eq + Hash,
V: IntoDeserializer<'de, E>,
S: BuildHasher,
E: Error,
{
type Deserializer = MapDeserializer<'de, <Self as IntoIterator>::IntoIter, E>;
fn into_deserializer(self) -> Self::Deserializer {
MapDeserializer::new(self.into_iter())
}
}
use IndexSet;
/// Requires crate feature `"serde-1"`
impl<T, S> Serialize for IndexSet<T, S>
where T: Serialize + Hash + Eq,
S: BuildHasher
{
fn serialize<Se>(&self, serializer: Se) -> Result<Se::Ok, Se::Error>
where Se: Serializer
{
let mut set_serializer = try!(serializer.serialize_seq(Some(self.len())));
for value in self {
try!(set_serializer.serialize_element(value));
}
set_serializer.end()
}
}
struct OrderSetVisitor<T, S>(PhantomData<(T, S)>);
impl<'de, T, S> Visitor<'de> for OrderSetVisitor<T, S>
where T: Deserialize<'de> + Eq + Hash,
S: Default + BuildHasher
{
type Value = IndexSet<T, S>;
fn expecting(&self, formatter: &mut Formatter) -> fmt::Result {
write!(formatter, "a set")
}
fn visit_seq<A>(self, mut seq: A) -> Result<Self::Value, A::Error>
where A: SeqAccess<'de>
{
let mut values = IndexSet::with_capacity_and_hasher(seq.size_hint().unwrap_or(0), S::default());
while let Some(value) = try!(seq.next_element()) {
values.insert(value);
}
Ok(values)
}
}
/// Requires crate feature `"serde-1"`
impl<'de, T, S> Deserialize<'de> for IndexSet<T, S>
where T: Deserialize<'de> + Eq + Hash,
S: Default + BuildHasher
{
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where D: Deserializer<'de>
{
deserializer.deserialize_seq(OrderSetVisitor(PhantomData))
}
}
impl<'de, T, S, E> IntoDeserializer<'de, E> for IndexSet<T, S>
where T: IntoDeserializer<'de, E> + Eq + Hash,
S: BuildHasher,
E: Error,
{
type Deserializer = SeqDeserializer<<Self as IntoIterator>::IntoIter, E>;
fn into_deserializer(self) -> Self::Deserializer {
SeqDeserializer::new(self.into_iter())
}
}
extern crate serde;
use self::serde::de::value::{MapDeserializer, SeqDeserializer};
use self::serde::de::{
Deserialize, Deserializer, Error, IntoDeserializer, MapAccess, SeqAccess, Visitor,
};
use self::serde::ser::{Serialize, SerializeMap, SerializeSeq, Serializer};
use std::fmt::{self, Formatter};
use std::hash::{BuildHasher, Hash};
use std::marker::PhantomData;
use IndexMap;
/// Requires crate feature `"serde-1"`
impl<K, V, S> Serialize for IndexMap<K, V, S>
where
K: Serialize + Hash + Eq,
V: Serialize,
S: BuildHasher,
{
fn serialize<T>(&self, serializer: T) -> Result<T::Ok, T::Error>
where
T: Serializer,
{
let mut map_serializer = serializer.serialize_map(Some(self.len()))?;
for (key, value) in self {
map_serializer.serialize_entry(key, value)?;
}
map_serializer.end()
}
}
struct OrderMapVisitor<K, V, S>(PhantomData<(K, V, S)>);
impl<'de, K, V, S> Visitor<'de> for OrderMapVisitor<K, V, S>
where
K: Deserialize<'de> + Eq + Hash,
V: Deserialize<'de>,
S: Default + BuildHasher,
{
type Value = IndexMap<K, V, S>;
fn expecting(&self, formatter: &mut Formatter) -> fmt::Result {
write!(formatter, "a map")
}
fn visit_map<A>(self, mut map: A) -> Result<Self::Value, A::Error>
where
A: MapAccess<'de>,
{
let mut values =
IndexMap::with_capacity_and_hasher(map.size_hint().unwrap_or(0), S::default());
while let Some((key, value)) = map.next_entry()? {
values.insert(key, value);
}
Ok(values)
}
}
/// Requires crate feature `"serde-1"`
impl<'de, K, V, S> Deserialize<'de> for IndexMap<K, V, S>
where
K: Deserialize<'de> + Eq + Hash,
V: Deserialize<'de>,
S: Default + BuildHasher,
{
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
deserializer.deserialize_map(OrderMapVisitor(PhantomData))
}
}
impl<'de, K, V, S, E> IntoDeserializer<'de, E> for IndexMap<K, V, S>
where
K: IntoDeserializer<'de, E> + Eq + Hash,
V: IntoDeserializer<'de, E>,
S: BuildHasher,
E: Error,
{
type Deserializer = MapDeserializer<'de, <Self as IntoIterator>::IntoIter, E>;
fn into_deserializer(self) -> Self::Deserializer {
MapDeserializer::new(self.into_iter())
}
}
use IndexSet;
/// Requires crate feature `"serde-1"`
impl<T, S> Serialize for IndexSet<T, S>
where
T: Serialize + Hash + Eq,
S: BuildHasher,
{
fn serialize<Se>(&self, serializer: Se) -> Result<Se::Ok, Se::Error>
where
Se: Serializer,
{
let mut set_serializer = serializer.serialize_seq(Some(self.len()))?;
for value in self {
set_serializer.serialize_element(value)?;
}
set_serializer.end()
}
}
struct OrderSetVisitor<T, S>(PhantomData<(T, S)>);
impl<'de, T, S> Visitor<'de> for OrderSetVisitor<T, S>
where
T: Deserialize<'de> + Eq + Hash,
S: Default + BuildHasher,
{
type Value = IndexSet<T, S>;
fn expecting(&self, formatter: &mut Formatter) -> fmt::Result {
write!(formatter, "a set")
}
fn visit_seq<A>(self, mut seq: A) -> Result<Self::Value, A::Error>
where
A: SeqAccess<'de>,
{
let mut values =
IndexSet::with_capacity_and_hasher(seq.size_hint().unwrap_or(0), S::default());
while let Some(value) = seq.next_element()? {
values.insert(value);
}
Ok(values)
}
}
/// Requires crate feature `"serde-1"`
impl<'de, T, S> Deserialize<'de> for IndexSet<T, S>
where
T: Deserialize<'de> + Eq + Hash,
S: Default + BuildHasher,
{
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
deserializer.deserialize_seq(OrderSetVisitor(PhantomData))
}
}
impl<'de, T, S, E> IntoDeserializer<'de, E> for IndexSet<T, S>
where
T: IntoDeserializer<'de, E> + Eq + Hash,
S: BuildHasher,
E: Error,
{
type Deserializer = SeqDeserializer<<Self as IntoIterator>::IntoIter, E>;
fn into_deserializer(self) -> Self::Deserializer {
SeqDeserializer::new(self.into_iter())
}
}

2838
third_party/rust/indexmap/src/set.rs поставляемый

Разница между файлами не показана из-за своего большого размера Загрузить разницу

36
third_party/rust/indexmap/src/util.rs поставляемый
Просмотреть файл

@ -1,17 +1,19 @@
use std::iter::Enumerate;
use std::mem::size_of;
pub fn third<A, B, C>(t: (A, B, C)) -> C { t.2 }
pub fn enumerate<I>(iterable: I) -> Enumerate<I::IntoIter>
where I: IntoIterator
{
iterable.into_iter().enumerate()
}
/// return the number of steps from a to b
pub fn ptrdistance<T>(a: *const T, b: *const T) -> usize {
debug_assert!(a as usize <= b as usize);
(b as usize - a as usize) / size_of::<T>()
}
use std::iter::Enumerate;
use std::mem::size_of;
pub fn third<A, B, C>(t: (A, B, C)) -> C {
t.2
}
pub fn enumerate<I>(iterable: I) -> Enumerate<I::IntoIter>
where
I: IntoIterator,
{
iterable.into_iter().enumerate()
}
/// return the number of steps from a to b
pub fn ptrdistance<T>(a: *const T, b: *const T) -> usize {
debug_assert!(a as usize <= b as usize);
(b as usize - a as usize) / size_of::<T>()
}

Просмотреть файл

@ -1,55 +1,55 @@
#[macro_use] extern crate indexmap;
use indexmap::Equivalent;
use std::hash::Hash;
#[derive(Debug, Hash)]
pub struct Pair<A, B>(pub A, pub B);
impl<A, B, C, D> PartialEq<(A, B)> for Pair<C, D>
where C: PartialEq<A>,
D: PartialEq<B>,
{
fn eq(&self, rhs: &(A, B)) -> bool {
self.0 == rhs.0 &&
self.1 == rhs.1 &&
true
}
}
impl<A, B, X> Equivalent<X> for Pair<A, B>
where Pair<A, B>: PartialEq<X>,
A: Hash + Eq,
B: Hash + Eq,
{
fn equivalent(&self, other: &X) -> bool {
*self == *other
}
}
#[test]
fn test_lookup() {
let s = String::from;
let map = indexmap! {
(s("a"), s("b")) => 1,
(s("a"), s("x")) => 2,
};
assert!(map.contains_key(&Pair("a", "b")));
assert!(!map.contains_key(&Pair("b", "a")));
}
#[test]
fn test_string_str() {
let s = String::from;
let mut map = indexmap! {
s("a") => 1, s("b") => 2,
s("x") => 3, s("y") => 4,
};
assert!(map.contains_key("a"));
assert!(!map.contains_key("z"));
assert_eq!(map.remove("b"), Some(2));
}
#[macro_use]
extern crate indexmap;
use indexmap::Equivalent;
use std::hash::Hash;
#[derive(Debug, Hash)]
pub struct Pair<A, B>(pub A, pub B);
impl<A, B, C, D> PartialEq<(A, B)> for Pair<C, D>
where
C: PartialEq<A>,
D: PartialEq<B>,
{
fn eq(&self, rhs: &(A, B)) -> bool {
self.0 == rhs.0 && self.1 == rhs.1
}
}
impl<A, B, X> Equivalent<X> for Pair<A, B>
where
Pair<A, B>: PartialEq<X>,
A: Hash + Eq,
B: Hash + Eq,
{
fn equivalent(&self, other: &X) -> bool {
*self == *other
}
}
#[test]
fn test_lookup() {
let s = String::from;
let map = indexmap! {
(s("a"), s("b")) => 1,
(s("a"), s("x")) => 2,
};
assert!(map.contains_key(&Pair("a", "b")));
assert!(!map.contains_key(&Pair("b", "a")));
}
#[test]
fn test_string_str() {
let s = String::from;
let mut map = indexmap! {
s("a") => 1, s("b") => 2,
s("x") => 3, s("y") => 4,
};
assert!(map.contains_key("a"));
assert!(!map.contains_key("z"));
assert_eq!(map.swap_remove("b"), Some(2));
}

19
third_party/rust/indexmap/tests/macros_full_path.rs поставляемый Normal file
Просмотреть файл

@ -0,0 +1,19 @@
#[test]
fn test_create_map() {
let _m = indexmap::indexmap! {
1 => 2,
7 => 1,
2 => 2,
3 => 3,
};
}
#[test]
fn test_create_set() {
let _s = indexmap::indexset! {
1,
7,
2,
3,
};
}

767
third_party/rust/indexmap/tests/quick.rs поставляемый
Просмотреть файл

@ -1,368 +1,399 @@
extern crate indexmap;
extern crate itertools;
#[macro_use]
extern crate quickcheck;
extern crate rand;
extern crate fnv;
use indexmap::IndexMap;
use itertools::Itertools;
use quickcheck::Arbitrary;
use quickcheck::Gen;
use rand::Rng;
use fnv::FnvHasher;
use std::hash::{BuildHasher, BuildHasherDefault};
type FnvBuilder = BuildHasherDefault<FnvHasher>;
type OrderMapFnv<K, V> = IndexMap<K, V, FnvBuilder>;
use std::collections::HashSet;
use std::collections::HashMap;
use std::iter::FromIterator;
use std::hash::Hash;
use std::fmt::Debug;
use std::ops::Deref;
use std::cmp::min;
use indexmap::map::Entry as OEntry;
use std::collections::hash_map::Entry as HEntry;
fn set<'a, T: 'a, I>(iter: I) -> HashSet<T>
where I: IntoIterator<Item=&'a T>,
T: Copy + Hash + Eq
{
iter.into_iter().cloned().collect()
}
fn indexmap<'a, T: 'a, I>(iter: I) -> IndexMap<T, ()>
where I: IntoIterator<Item=&'a T>,
T: Copy + Hash + Eq,
{
IndexMap::from_iter(iter.into_iter().cloned().map(|k| (k, ())))
}
quickcheck! {
fn contains(insert: Vec<u32>) -> bool {
let mut map = IndexMap::new();
for &key in &insert {
map.insert(key, ());
}
insert.iter().all(|&key| map.get(&key).is_some())
}
fn contains_not(insert: Vec<u8>, not: Vec<u8>) -> bool {
let mut map = IndexMap::new();
for &key in &insert {
map.insert(key, ());
}
let nots = &set(&not) - &set(&insert);
nots.iter().all(|&key| map.get(&key).is_none())
}
fn insert_remove(insert: Vec<u8>, remove: Vec<u8>) -> bool {
let mut map = IndexMap::new();
for &key in &insert {
map.insert(key, ());
}
for &key in &remove {
map.swap_remove(&key);
}
let elements = &set(&insert) - &set(&remove);
map.len() == elements.len() && map.iter().count() == elements.len() &&
elements.iter().all(|k| map.get(k).is_some())
}
fn insertion_order(insert: Vec<u32>) -> bool {
let mut map = IndexMap::new();
for &key in &insert {
map.insert(key, ());
}
itertools::assert_equal(insert.iter().unique(), map.keys());
true
}
fn pop(insert: Vec<u8>) -> bool {
let mut map = IndexMap::new();
for &key in &insert {
map.insert(key, ());
}
let mut pops = Vec::new();
while let Some((key, _v)) = map.pop() {
pops.push(key);
}
pops.reverse();
itertools::assert_equal(insert.iter().unique(), &pops);
true
}
fn with_cap(cap: usize) -> bool {
let map: IndexMap<u8, u8> = IndexMap::with_capacity(cap);
println!("wish: {}, got: {} (diff: {})", cap, map.capacity(), map.capacity() as isize - cap as isize);
map.capacity() >= cap
}
fn drain(insert: Vec<u8>) -> bool {
let mut map = IndexMap::new();
for &key in &insert {
map.insert(key, ());
}
let mut clone = map.clone();
let drained = clone.drain(..);
for (key, _) in drained {
map.remove(&key);
}
map.is_empty()
}
}
use Op::*;
#[derive(Copy, Clone, Debug)]
enum Op<K, V> {
Add(K, V),
Remove(K),
AddEntry(K, V),
RemoveEntry(K),
}
impl<K, V> Arbitrary for Op<K, V>
where K: Arbitrary,
V: Arbitrary,
{
fn arbitrary<G: Gen>(g: &mut G) -> Self {
match g.gen::<u32>() % 4 {
0 => Add(K::arbitrary(g), V::arbitrary(g)),
1 => AddEntry(K::arbitrary(g), V::arbitrary(g)),
2 => Remove(K::arbitrary(g)),
_ => RemoveEntry(K::arbitrary(g)),
}
}
}
fn do_ops<K, V, S>(ops: &[Op<K, V>], a: &mut IndexMap<K, V, S>, b: &mut HashMap<K, V>)
where K: Hash + Eq + Clone,
V: Clone,
S: BuildHasher,
{
for op in ops {
match *op {
Add(ref k, ref v) => {
a.insert(k.clone(), v.clone());
b.insert(k.clone(), v.clone());
}
AddEntry(ref k, ref v) => {
a.entry(k.clone()).or_insert(v.clone());
b.entry(k.clone()).or_insert(v.clone());
}
Remove(ref k) => {
a.swap_remove(k);
b.remove(k);
}
RemoveEntry(ref k) => {
match a.entry(k.clone()) {
OEntry::Occupied(ent) => { ent.remove_entry(); },
_ => { }
}
match b.entry(k.clone()) {
HEntry::Occupied(ent) => { ent.remove_entry(); },
_ => { }
}
}
}
//println!("{:?}", a);
}
}
fn assert_maps_equivalent<K, V>(a: &IndexMap<K, V>, b: &HashMap<K, V>) -> bool
where K: Hash + Eq + Debug,
V: Eq + Debug,
{
assert_eq!(a.len(), b.len());
assert_eq!(a.iter().next().is_some(), b.iter().next().is_some());
for key in a.keys() {
assert!(b.contains_key(key), "b does not contain {:?}", key);
}
for key in b.keys() {
assert!(a.get(key).is_some(), "a does not contain {:?}", key);
}
for key in a.keys() {
assert_eq!(a[key], b[key]);
}
true
}
quickcheck! {
fn operations_i8(ops: Large<Vec<Op<i8, i8>>>) -> bool {
let mut map = IndexMap::new();
let mut reference = HashMap::new();
do_ops(&ops, &mut map, &mut reference);
assert_maps_equivalent(&map, &reference)
}
fn operations_string(ops: Vec<Op<Alpha, i8>>) -> bool {
let mut map = IndexMap::new();
let mut reference = HashMap::new();
do_ops(&ops, &mut map, &mut reference);
assert_maps_equivalent(&map, &reference)
}
fn keys_values(ops: Large<Vec<Op<i8, i8>>>) -> bool {
let mut map = IndexMap::new();
let mut reference = HashMap::new();
do_ops(&ops, &mut map, &mut reference);
let mut visit = IndexMap::new();
for (k, v) in map.keys().zip(map.values()) {
assert_eq!(&map[k], v);
assert!(!visit.contains_key(k));
visit.insert(*k, *v);
}
assert_eq!(visit.len(), reference.len());
true
}
fn keys_values_mut(ops: Large<Vec<Op<i8, i8>>>) -> bool {
let mut map = IndexMap::new();
let mut reference = HashMap::new();
do_ops(&ops, &mut map, &mut reference);
let mut visit = IndexMap::new();
let keys = Vec::from_iter(map.keys().cloned());
for (k, v) in keys.iter().zip(map.values_mut()) {
assert_eq!(&reference[k], v);
assert!(!visit.contains_key(k));
visit.insert(*k, *v);
}
assert_eq!(visit.len(), reference.len());
true
}
fn equality(ops1: Vec<Op<i8, i8>>, removes: Vec<usize>) -> bool {
let mut map = IndexMap::new();
let mut reference = HashMap::new();
do_ops(&ops1, &mut map, &mut reference);
let mut ops2 = ops1.clone();
for &r in &removes {
if !ops2.is_empty() {
let i = r % ops2.len();
ops2.remove(i);
}
}
let mut map2 = OrderMapFnv::default();
let mut reference2 = HashMap::new();
do_ops(&ops2, &mut map2, &mut reference2);
assert_eq!(map == map2, reference == reference2);
true
}
fn retain_ordered(keys: Large<Vec<i8>>, remove: Large<Vec<i8>>) -> () {
let mut map = indexmap(keys.iter());
let initial_map = map.clone(); // deduplicated in-order input
let remove_map = indexmap(remove.iter());
let keys_s = set(keys.iter());
let remove_s = set(remove.iter());
let answer = &keys_s - &remove_s;
map.retain(|k, _| !remove_map.contains_key(k));
// check the values
assert_eq!(map.len(), answer.len());
for key in &answer {
assert!(map.contains_key(key));
}
// check the order
itertools::assert_equal(map.keys(), initial_map.keys().filter(|&k| !remove_map.contains_key(k)));
}
fn sort_1(keyvals: Large<Vec<(i8, i8)>>) -> () {
let mut map: IndexMap<_, _> = IndexMap::from_iter(keyvals.to_vec());
let mut answer = keyvals.0;
answer.sort_by_key(|t| t.0);
// reverse dedup: Because IndexMap::from_iter keeps the last value for
// identical keys
answer.reverse();
answer.dedup_by_key(|t| t.0);
answer.reverse();
map.sort_by(|k1, _, k2, _| Ord::cmp(k1, k2));
// check it contains all the values it should
for &(key, val) in &answer {
assert_eq!(map[&key], val);
}
// check the order
let mapv = Vec::from_iter(map);
assert_eq!(answer, mapv);
}
fn sort_2(keyvals: Large<Vec<(i8, i8)>>) -> () {
let mut map: IndexMap<_, _> = IndexMap::from_iter(keyvals.to_vec());
map.sort_by(|_, v1, _, v2| Ord::cmp(v1, v2));
assert_sorted_by_key(map, |t| t.1);
}
}
fn assert_sorted_by_key<I, Key, X>(iterable: I, key: Key)
where I: IntoIterator,
I::Item: Ord + Clone + Debug,
Key: Fn(&I::Item) -> X,
X: Ord,
{
let input = Vec::from_iter(iterable);
let mut sorted = input.clone();
sorted.sort_by_key(key);
assert_eq!(input, sorted);
}
#[derive(Clone, Debug, Hash, PartialEq, Eq)]
struct Alpha(String);
impl Deref for Alpha {
type Target = String;
fn deref(&self) -> &String { &self.0 }
}
const ALPHABET: &'static [u8] = b"abcdefghijklmnopqrstuvwxyz";
impl Arbitrary for Alpha {
fn arbitrary<G: Gen>(g: &mut G) -> Self {
let len = g.next_u32() % g.size() as u32;
let len = min(len, 16);
Alpha((0..len).map(|_| {
ALPHABET[g.next_u32() as usize % ALPHABET.len()] as char
}).collect())
}
fn shrink(&self) -> Box<Iterator<Item=Self>> {
Box::new((**self).shrink().map(Alpha))
}
}
/// quickcheck Arbitrary adaptor -- make a larger vec
#[derive(Clone, Debug)]
struct Large<T>(T);
impl<T> Deref for Large<T> {
type Target = T;
fn deref(&self) -> &T { &self.0 }
}
impl<T> Arbitrary for Large<Vec<T>>
where T: Arbitrary
{
fn arbitrary<G: Gen>(g: &mut G) -> Self {
let len = g.next_u32() % (g.size() * 10) as u32;
Large((0..len).map(|_| T::arbitrary(g)).collect())
}
fn shrink(&self) -> Box<Iterator<Item=Self>> {
Box::new((**self).shrink().map(Large))
}
}
extern crate indexmap;
extern crate itertools;
#[macro_use]
extern crate quickcheck;
extern crate rand;
extern crate fnv;
use indexmap::IndexMap;
use itertools::Itertools;
use quickcheck::Arbitrary;
use quickcheck::Gen;
use rand::Rng;
use fnv::FnvHasher;
use std::hash::{BuildHasher, BuildHasherDefault};
type FnvBuilder = BuildHasherDefault<FnvHasher>;
type OrderMapFnv<K, V> = IndexMap<K, V, FnvBuilder>;
use std::cmp::min;
use std::collections::HashMap;
use std::collections::HashSet;
use std::fmt::Debug;
use std::hash::Hash;
use std::iter::FromIterator;
use std::ops::Deref;
use indexmap::map::Entry as OEntry;
use std::collections::hash_map::Entry as HEntry;
fn set<'a, T: 'a, I>(iter: I) -> HashSet<T>
where
I: IntoIterator<Item = &'a T>,
T: Copy + Hash + Eq,
{
iter.into_iter().cloned().collect()
}
fn indexmap<'a, T: 'a, I>(iter: I) -> IndexMap<T, ()>
where
I: IntoIterator<Item = &'a T>,
T: Copy + Hash + Eq,
{
IndexMap::from_iter(iter.into_iter().cloned().map(|k| (k, ())))
}
quickcheck! {
fn contains(insert: Vec<u32>) -> bool {
let mut map = IndexMap::new();
for &key in &insert {
map.insert(key, ());
}
insert.iter().all(|&key| map.get(&key).is_some())
}
fn contains_not(insert: Vec<u8>, not: Vec<u8>) -> bool {
let mut map = IndexMap::new();
for &key in &insert {
map.insert(key, ());
}
let nots = &set(&not) - &set(&insert);
nots.iter().all(|&key| map.get(&key).is_none())
}
fn insert_remove(insert: Vec<u8>, remove: Vec<u8>) -> bool {
let mut map = IndexMap::new();
for &key in &insert {
map.insert(key, ());
}
for &key in &remove {
map.swap_remove(&key);
}
let elements = &set(&insert) - &set(&remove);
map.len() == elements.len() && map.iter().count() == elements.len() &&
elements.iter().all(|k| map.get(k).is_some())
}
fn insertion_order(insert: Vec<u32>) -> bool {
let mut map = IndexMap::new();
for &key in &insert {
map.insert(key, ());
}
itertools::assert_equal(insert.iter().unique(), map.keys());
true
}
fn pop(insert: Vec<u8>) -> bool {
let mut map = IndexMap::new();
for &key in &insert {
map.insert(key, ());
}
let mut pops = Vec::new();
while let Some((key, _v)) = map.pop() {
pops.push(key);
}
pops.reverse();
itertools::assert_equal(insert.iter().unique(), &pops);
true
}
fn with_cap(cap: usize) -> bool {
let map: IndexMap<u8, u8> = IndexMap::with_capacity(cap);
println!("wish: {}, got: {} (diff: {})", cap, map.capacity(), map.capacity() as isize - cap as isize);
map.capacity() >= cap
}
fn drain(insert: Vec<u8>) -> bool {
let mut map = IndexMap::new();
for &key in &insert {
map.insert(key, ());
}
let mut clone = map.clone();
let drained = clone.drain(..);
for (key, _) in drained {
map.swap_remove(&key);
}
map.is_empty()
}
fn shift_remove(insert: Vec<u8>, remove: Vec<u8>) -> bool {
let mut map = IndexMap::new();
for &key in &insert {
map.insert(key, ());
}
for &key in &remove {
map.shift_remove(&key);
}
let elements = &set(&insert) - &set(&remove);
// Check that order is preserved after removals
let mut iter = map.keys();
for &key in insert.iter().unique() {
if elements.contains(&key) {
assert_eq!(Some(key), iter.next().cloned());
}
}
map.len() == elements.len() && map.iter().count() == elements.len() &&
elements.iter().all(|k| map.get(k).is_some())
}
}
use Op::*;
#[derive(Copy, Clone, Debug)]
enum Op<K, V> {
Add(K, V),
Remove(K),
AddEntry(K, V),
RemoveEntry(K),
}
impl<K, V> Arbitrary for Op<K, V>
where
K: Arbitrary,
V: Arbitrary,
{
fn arbitrary<G: Gen>(g: &mut G) -> Self {
match g.gen::<u32>() % 4 {
0 => Add(K::arbitrary(g), V::arbitrary(g)),
1 => AddEntry(K::arbitrary(g), V::arbitrary(g)),
2 => Remove(K::arbitrary(g)),
_ => RemoveEntry(K::arbitrary(g)),
}
}
}
fn do_ops<K, V, S>(ops: &[Op<K, V>], a: &mut IndexMap<K, V, S>, b: &mut HashMap<K, V>)
where
K: Hash + Eq + Clone,
V: Clone,
S: BuildHasher,
{
for op in ops {
match *op {
Add(ref k, ref v) => {
a.insert(k.clone(), v.clone());
b.insert(k.clone(), v.clone());
}
AddEntry(ref k, ref v) => {
a.entry(k.clone()).or_insert_with(|| v.clone());
b.entry(k.clone()).or_insert_with(|| v.clone());
}
Remove(ref k) => {
a.swap_remove(k);
b.remove(k);
}
RemoveEntry(ref k) => {
if let OEntry::Occupied(ent) = a.entry(k.clone()) {
ent.swap_remove_entry();
}
if let HEntry::Occupied(ent) = b.entry(k.clone()) {
ent.remove_entry();
}
}
}
//println!("{:?}", a);
}
}
fn assert_maps_equivalent<K, V>(a: &IndexMap<K, V>, b: &HashMap<K, V>) -> bool
where
K: Hash + Eq + Debug,
V: Eq + Debug,
{
assert_eq!(a.len(), b.len());
assert_eq!(a.iter().next().is_some(), b.iter().next().is_some());
for key in a.keys() {
assert!(b.contains_key(key), "b does not contain {:?}", key);
}
for key in b.keys() {
assert!(a.get(key).is_some(), "a does not contain {:?}", key);
}
for key in a.keys() {
assert_eq!(a[key], b[key]);
}
true
}
quickcheck! {
fn operations_i8(ops: Large<Vec<Op<i8, i8>>>) -> bool {
let mut map = IndexMap::new();
let mut reference = HashMap::new();
do_ops(&ops, &mut map, &mut reference);
assert_maps_equivalent(&map, &reference)
}
fn operations_string(ops: Vec<Op<Alpha, i8>>) -> bool {
let mut map = IndexMap::new();
let mut reference = HashMap::new();
do_ops(&ops, &mut map, &mut reference);
assert_maps_equivalent(&map, &reference)
}
fn keys_values(ops: Large<Vec<Op<i8, i8>>>) -> bool {
let mut map = IndexMap::new();
let mut reference = HashMap::new();
do_ops(&ops, &mut map, &mut reference);
let mut visit = IndexMap::new();
for (k, v) in map.keys().zip(map.values()) {
assert_eq!(&map[k], v);
assert!(!visit.contains_key(k));
visit.insert(*k, *v);
}
assert_eq!(visit.len(), reference.len());
true
}
fn keys_values_mut(ops: Large<Vec<Op<i8, i8>>>) -> bool {
let mut map = IndexMap::new();
let mut reference = HashMap::new();
do_ops(&ops, &mut map, &mut reference);
let mut visit = IndexMap::new();
let keys = Vec::from_iter(map.keys().cloned());
for (k, v) in keys.iter().zip(map.values_mut()) {
assert_eq!(&reference[k], v);
assert!(!visit.contains_key(k));
visit.insert(*k, *v);
}
assert_eq!(visit.len(), reference.len());
true
}
fn equality(ops1: Vec<Op<i8, i8>>, removes: Vec<usize>) -> bool {
let mut map = IndexMap::new();
let mut reference = HashMap::new();
do_ops(&ops1, &mut map, &mut reference);
let mut ops2 = ops1.clone();
for &r in &removes {
if !ops2.is_empty() {
let i = r % ops2.len();
ops2.remove(i);
}
}
let mut map2 = OrderMapFnv::default();
let mut reference2 = HashMap::new();
do_ops(&ops2, &mut map2, &mut reference2);
assert_eq!(map == map2, reference == reference2);
true
}
fn retain_ordered(keys: Large<Vec<i8>>, remove: Large<Vec<i8>>) -> () {
let mut map = indexmap(keys.iter());
let initial_map = map.clone(); // deduplicated in-order input
let remove_map = indexmap(remove.iter());
let keys_s = set(keys.iter());
let remove_s = set(remove.iter());
let answer = &keys_s - &remove_s;
map.retain(|k, _| !remove_map.contains_key(k));
// check the values
assert_eq!(map.len(), answer.len());
for key in &answer {
assert!(map.contains_key(key));
}
// check the order
itertools::assert_equal(map.keys(), initial_map.keys().filter(|&k| !remove_map.contains_key(k)));
}
fn sort_1(keyvals: Large<Vec<(i8, i8)>>) -> () {
let mut map: IndexMap<_, _> = IndexMap::from_iter(keyvals.to_vec());
let mut answer = keyvals.0;
answer.sort_by_key(|t| t.0);
// reverse dedup: Because IndexMap::from_iter keeps the last value for
// identical keys
answer.reverse();
answer.dedup_by_key(|t| t.0);
answer.reverse();
map.sort_by(|k1, _, k2, _| Ord::cmp(k1, k2));
// check it contains all the values it should
for &(key, val) in &answer {
assert_eq!(map[&key], val);
}
// check the order
let mapv = Vec::from_iter(map);
assert_eq!(answer, mapv);
}
fn sort_2(keyvals: Large<Vec<(i8, i8)>>) -> () {
let mut map: IndexMap<_, _> = IndexMap::from_iter(keyvals.to_vec());
map.sort_by(|_, v1, _, v2| Ord::cmp(v1, v2));
assert_sorted_by_key(map, |t| t.1);
}
}
fn assert_sorted_by_key<I, Key, X>(iterable: I, key: Key)
where
I: IntoIterator,
I::Item: Ord + Clone + Debug,
Key: Fn(&I::Item) -> X,
X: Ord,
{
let input = Vec::from_iter(iterable);
let mut sorted = input.clone();
sorted.sort_by_key(key);
assert_eq!(input, sorted);
}
#[derive(Clone, Debug, Hash, PartialEq, Eq)]
struct Alpha(String);
impl Deref for Alpha {
type Target = String;
fn deref(&self) -> &String {
&self.0
}
}
const ALPHABET: &[u8] = b"abcdefghijklmnopqrstuvwxyz";
impl Arbitrary for Alpha {
fn arbitrary<G: Gen>(g: &mut G) -> Self {
let len = g.next_u32() % g.size() as u32;
let len = min(len, 16);
Alpha(
(0..len)
.map(|_| ALPHABET[g.next_u32() as usize % ALPHABET.len()] as char)
.collect(),
)
}
fn shrink(&self) -> Box<dyn Iterator<Item = Self>> {
Box::new((**self).shrink().map(Alpha))
}
}
/// quickcheck Arbitrary adaptor -- make a larger vec
#[derive(Clone, Debug)]
struct Large<T>(T);
impl<T> Deref for Large<T> {
type Target = T;
fn deref(&self) -> &T {
&self.0
}
}
impl<T> Arbitrary for Large<Vec<T>>
where
T: Arbitrary,
{
fn arbitrary<G: Gen>(g: &mut G) -> Self {
let len = g.next_u32() % (g.size() * 10) as u32;
Large((0..len).map(|_| T::arbitrary(g)).collect())
}
fn shrink(&self) -> Box<dyn Iterator<Item = Self>> {
Box::new((**self).shrink().map(Large))
}
}

134
third_party/rust/indexmap/tests/serde.rs поставляемый
Просмотреть файл

@ -1,59 +1,75 @@
#![cfg(feature = "serde-1")]
#[macro_use]
extern crate indexmap;
extern crate serde_test;
extern crate fnv;
use serde_test::{Token, assert_tokens};
#[test]
fn test_serde() {
let map = indexmap! { 1 => 2, 3 => 4 };
assert_tokens(&map,
&[Token::Map { len: Some(2) },
Token::I32(1),
Token::I32(2),
Token::I32(3),
Token::I32(4),
Token::MapEnd]);
}
#[test]
fn test_serde_set() {
let set = indexset! { 1, 2, 3, 4 };
assert_tokens(&set,
&[Token::Seq { len: Some(4) },
Token::I32(1),
Token::I32(2),
Token::I32(3),
Token::I32(4),
Token::SeqEnd]);
}
#[test]
fn test_serde_fnv_hasher() {
let mut map: ::indexmap::IndexMap<i32, i32, ::fnv::FnvBuildHasher> = Default::default();
map.insert(1, 2);
map.insert(3, 4);
assert_tokens(&map,
&[Token::Map { len: Some(2) },
Token::I32(1),
Token::I32(2),
Token::I32(3),
Token::I32(4),
Token::MapEnd]);
}
#[test]
fn test_serde_map_fnv_hasher() {
let mut set: ::indexmap::IndexSet<i32, ::fnv::FnvBuildHasher> = Default::default();
set.extend(1..5);
assert_tokens(&set,
&[Token::Seq { len: Some(4) },
Token::I32(1),
Token::I32(2),
Token::I32(3),
Token::I32(4),
Token::SeqEnd]);
}
#![cfg(feature = "serde-1")]
#[macro_use]
extern crate indexmap;
extern crate fnv;
extern crate serde_test;
use serde_test::{assert_tokens, Token};
#[test]
fn test_serde() {
let map = indexmap! { 1 => 2, 3 => 4 };
assert_tokens(
&map,
&[
Token::Map { len: Some(2) },
Token::I32(1),
Token::I32(2),
Token::I32(3),
Token::I32(4),
Token::MapEnd,
],
);
}
#[test]
fn test_serde_set() {
let set = indexset! { 1, 2, 3, 4 };
assert_tokens(
&set,
&[
Token::Seq { len: Some(4) },
Token::I32(1),
Token::I32(2),
Token::I32(3),
Token::I32(4),
Token::SeqEnd,
],
);
}
#[test]
fn test_serde_fnv_hasher() {
let mut map: ::indexmap::IndexMap<i32, i32, ::fnv::FnvBuildHasher> = Default::default();
map.insert(1, 2);
map.insert(3, 4);
assert_tokens(
&map,
&[
Token::Map { len: Some(2) },
Token::I32(1),
Token::I32(2),
Token::I32(3),
Token::I32(4),
Token::MapEnd,
],
);
}
#[test]
fn test_serde_map_fnv_hasher() {
let mut set: ::indexmap::IndexSet<i32, ::fnv::FnvBuildHasher> = Default::default();
set.extend(1..5);
assert_tokens(
&set,
&[
Token::Seq { len: Some(4) },
Token::I32(1),
Token::I32(2),
Token::I32(3),
Token::I32(4),
Token::SeqEnd,
],
);
}

62
third_party/rust/indexmap/tests/tests.rs поставляемый
Просмотреть файл

@ -1,32 +1,30 @@
#[macro_use]
extern crate indexmap;
extern crate itertools;
#[test]
fn test_sort() {
let m = indexmap! {
1 => 2,
7 => 1,
2 => 2,
3 => 3,
};
itertools::assert_equal(m.sorted_by(|_k1, v1, _k2, v2| v1.cmp(v2)),
vec![(7, 1), (1, 2), (2, 2), (3, 3)]);
}
#[test]
fn test_sort_set() {
let s = indexset! {
1,
7,
2,
3,
};
itertools::assert_equal(s.sorted_by(|v1, v2| v1.cmp(v2)),
vec![1, 2, 3, 7]);
}
#[macro_use]
extern crate indexmap;
extern crate itertools;
#[test]
fn test_sort() {
let m = indexmap! {
1 => 2,
7 => 1,
2 => 2,
3 => 3,
};
itertools::assert_equal(
m.sorted_by(|_k1, v1, _k2, v2| v1.cmp(v2)),
vec![(7, 1), (1, 2), (2, 2), (3, 3)],
);
}
#[test]
fn test_sort_set() {
let s = indexset! {
1,
7,
2,
3,
};
itertools::assert_equal(s.sorted_by(|v1, v2| v1.cmp(v2)), vec![1, 2, 3, 7]);
}

1
third_party/rust/rust_decimal/.cargo-checksum.json поставляемый Normal file
Просмотреть файл

@ -0,0 +1 @@
{"files":{"CODE_OF_CONDUCT.md":"64765f10290cfce7191b4208cb21698b708a118568f5016602cccc304846a09a","CONTRIBUTING.md":"471d6281fb5038e17e32d3b4450aacf542a396709605aa170e07d3971d70b9c1","Cargo.toml":"5bcdb31d3230d6592b1940f0730bc6a0a07c05ef245a4a71ae9ff48b83cc5f38","LICENSE":"f8218253704e32441cafea1b9b3bcb2c6a3c51c5553cd8513d179290202bccb2","README.md":"2e6fc38c2289725da3fea1e2429fdc6482484e32b3e11d0216b719d871193fc5","VERSION.md":"172eea9bab41bd1493cd6a4a03a6df5cdfba66a9f02ec79b776fe71ad55d5be8","benches/lib_benches.rs":"39a5a691cd614aee08c0be202d715045dfe1d27e0a998fd983b8cc2ceaca7b55","rustfmt.toml":"f33bda44a494d17c95b7bc1b3dd88c203030b75be766f3a7f9b63ef45d960bb0","src/decimal.rs":"23b00c66f1024c7883f654d492fa6563173b47aa7ad26b4641315883a8278ea4","src/error.rs":"7f546cbfb6b1fdc6bb7bb3d6ef9f1a2462e30beba6f561e1890e7515c9bfb640","src/lib.rs":"104050f8a7d36317da0021dd4b42973e5f6cd928d748f3d0621f100d8d66fa6e","src/postgres.rs":"454630887e43403011dacee0682d163e92aed0071f3258ee616624ec11b82eb6","src/serde_types.rs":"9eadeca56538e69cd909853dd378bffecf2acc98c651ca2eec3192b81044b0a1","tests/decimal_tests.rs":"6c2d5a178a064e4a5e1131ed0d6c14527e9ac819f52379c0225872fa23788bcf"},"package":"95ba36e8c41bf675947e200af432325f332f60a0aea0ef2dc456636c2f6037d7"}

46
third_party/rust/rust_decimal/CODE_OF_CONDUCT.md поставляемый Normal file
Просмотреть файл

@ -0,0 +1,46 @@
# Contributor Covenant Code of Conduct
## Our Pledge
In the interest of fostering an open and welcoming environment, we as contributors and maintainers pledge to making participation in our project and our community a harassment-free experience for everyone, regardless of age, body size, disability, ethnicity, gender identity and expression, level of experience, nationality, personal appearance, race, religion, or sexual identity and orientation.
## Our Standards
Examples of behavior that contributes to creating a positive environment include:
* Using welcoming and inclusive language
* Being respectful of differing viewpoints and experiences
* Gracefully accepting constructive criticism
* Focusing on what is best for the community
* Showing empathy towards other community members
Examples of unacceptable behavior by participants include:
* The use of sexualized language or imagery and unwelcome sexual attention or advances
* Trolling, insulting/derogatory comments, and personal or political attacks
* Public or private harassment
* Publishing others' private information, such as a physical or electronic address, without explicit permission
* Other conduct which could reasonably be considered inappropriate in a professional setting
## Our Responsibilities
Project maintainers are responsible for clarifying the standards of acceptable behavior and are expected to take appropriate and fair corrective action in response to any instances of unacceptable behavior.
Project maintainers have the right and responsibility to remove, edit, or reject comments, commits, code, wiki edits, issues, and other contributions that are not aligned to this Code of Conduct, or to ban temporarily or permanently any contributor for other behaviors that they deem inappropriate, threatening, offensive, or harmful.
## Scope
This Code of Conduct applies both within project spaces and in public spaces when an individual is representing the project or its community. Examples of representing a project or community include using an official project e-mail address, posting via an official social media account, or acting as an appointed representative at an online or offline event. Representation of a project may be further defined and clarified by project maintainers.
## Enforcement
Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by contacting the project team at paul@form1.co.nz. The project team will review and investigate all complaints, and will respond in a way that it deems appropriate to the circumstances. The project team is obligated to maintain confidentiality with regard to the reporter of an incident. Further details of specific enforcement policies may be posted separately.
Project maintainers who do not follow or enforce the Code of Conduct in good faith may face temporary or permanent repercussions as determined by other members of the project's leadership.
## Attribution
This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, available at [http://contributor-covenant.org/version/1/4][version]
[homepage]: http://contributor-covenant.org
[version]: http://contributor-covenant.org/version/1/4/

40
third_party/rust/rust_decimal/CONTRIBUTING.md поставляемый Normal file
Просмотреть файл

@ -0,0 +1,40 @@
# Contributing to Rust Decimal
Rust Decimal welcomes contributions from everyone. Here are the guidelines if you are
thinking of helping us:
## Contributions
Contributions to Rust Decimal or its dependencies should be made in the form of GitHub
pull requests. Each pull request will be reviewed by a core contributor
(someone with permission to land patches) and either landed in the main tree or
given feedback for changes that would be required. All contributions should
follow this format, even those from core contributors.
Should you wish to work on an issue, please claim it first by commenting on
the GitHub issue that you want to work on it. This is to prevent duplicated
efforts from contributors on the same issue.
## Pull Request Checklist
- Branch from the master branch and, if needed, rebase to the current master
branch before submitting your pull request. If it doesn't merge cleanly with
master you may be asked to rebase your changes.
- If your patch is not getting reviewed or you need a specific person to review
it, you can @-reply a reviewer asking for a review in the pull request or inside a
comment.
- Add tests relevant to the fixed bug or new feature.
## Conduct
In all Rust Decimal related forums, we follow the [Rust Code of
Conduct](https://www.rust-lang.org/conduct.html). For escalation or moderation of
issues, please contact Paul (paul@form1.co.nz) instead of the Rust
moderation team.
## Communication
Opening tickets on the
[paupino/rust-decimal](https://github.com/paupino/rust-decimal) project is the preferred method of communication.

82
third_party/rust/rust_decimal/Cargo.toml поставляемый Normal file
Просмотреть файл

@ -0,0 +1,82 @@
# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
#
# When uploading crates to the registry Cargo will automatically
# "normalize" Cargo.toml files for maximal compatibility
# with all versions of Cargo and also rewrite `path` dependencies
# to registry (e.g., crates.io) dependencies
#
# If you believe there's an error in this file please file an
# issue against the rust-lang/cargo repository. If you're
# editing this file be aware that the upstream Cargo.toml
# will likely look very different (and much more reasonable)
[package]
edition = "2018"
name = "rust_decimal"
version = "1.7.0"
authors = ["Paul Mason <paul@form1.co.nz>"]
description = "A Decimal Implementation written in pure Rust suitable for financial calculations."
documentation = "https://docs.rs/rust_decimal/"
readme = "./README.md"
keywords = ["decimal", "financial", "fixed", "precision"]
categories = ["science", "data-structures"]
license = "MIT"
repository = "https://github.com/paupino/rust-decimal"
[dependencies.byteorder]
version = "1.3"
optional = true
[dependencies.bytes]
version = "0.5"
optional = true
[dependencies.diesel]
version = "1.4"
features = ["postgres"]
optional = true
default-features = false
[dependencies.num-traits]
version = "0.2"
[dependencies.postgres]
version = "0.17"
optional = true
[dependencies.serde]
version = "1.0"
optional = true
[dependencies.tokio-postgres]
version = "0.5"
optional = true
[dev-dependencies.bincode]
version = "1.3"
[dev-dependencies.bytes]
version = "0.5"
[dev-dependencies.futures]
version = "0.3"
[dev-dependencies.rand]
version = "0.7"
[dev-dependencies.serde_derive]
version = "1.0"
[dev-dependencies.serde_json]
version = "1.0"
[dev-dependencies.tokio]
version = "0.2"
features = ["rt-threaded", "test-util", "macros"]
[features]
db-diesel-postgres = ["diesel"]
db-postgres = ["postgres", "bytes", "byteorder"]
db-tokio-postgres = ["postgres", "tokio-postgres", "bytes", "byteorder"]
default = ["serde"]
serde-bincode = ["serde"]
serde-float = ["serde"]
tokio-pg = ["db-tokio-postgres"]

21
third_party/rust/rust_decimal/LICENSE поставляемый Normal file
Просмотреть файл

@ -0,0 +1,21 @@
MIT License
Copyright (c) 2016 Paul Mason
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

84
third_party/rust/rust_decimal/README.md поставляемый Normal file
Просмотреть файл

@ -0,0 +1,84 @@
# Decimal &emsp; [![Build Status]][actions] [![Latest Version]][crates.io]
[Build Status]: https://img.shields.io/endpoint.svg?url=https%3A%2F%2Factions-badge.atrox.dev%2Fpaupino%2Frust-decimal%2Fbadge&label=build&logo=none
[actions]: https://actions-badge.atrox.dev/paupino/rust-decimal/goto
[Latest Version]: https://img.shields.io/crates/v/rust-decimal.svg
[crates.io]: https://crates.io/crates/rust-decimal
A Decimal implementation written in pure Rust suitable for financial calculations that require significant integral and fractional digits with no round-off errors.
The binary representation consists of a 96 bit integer number, a scaling factor used to specify the decimal fraction and a 1 bit sign. Because of this representation, trailing zeros are preserved and may be exposed when in string form. These can be truncated using the `normalize` or `round_dp` functions.
[Documentation](https://docs.rs/rust_decimal/)
## Usage
Decimal numbers can be created in a few distinct ways. The easiest and most optimal method of creating a Decimal is to use the procedural macro within the `rust_decimal_macros` crate:
```rust
// Procedural macros need importing directly
use rust_decimal_macros::*;
let number = dec!(-1.23);
```
Alternatively you can also use one of the Decimal number convenience functions:
```rust
use rust_decimal::prelude::*;
// Using an integer followed by the decimal points
let scaled = Decimal::new(202, 2); // 2.02
// From a string representation
let from_string = Decimal::from_str("2.02").unwrap(); // 2.02
// Using the `Into` trait
let my_int : Decimal = 3i32.into();
// Using the raw decimal representation
// 3.1415926535897932384626433832
let pi = Decimal::from_parts(1102470952, 185874565, 1703060790, false, 28);
```
## Features
* [db-postgres](#db-postgres)
* [db-tokio-postgres](#db-tokio-postgres)
* [db-diesel-postgres](#db-diesel-postgres)
* [serde-float](#serde-float)
* [serde-bincode](#serde-bincode)
## `db-postgres`
This feature enables a PostgreSQL communication module. It allows for reading and writing the `Decimal`
type by transparently serializing/deserializing into the `NUMERIC` data type within PostgreSQL.
## `db-tokio-postgres`
Enables the tokio postgres module allowing for async communication with PostgreSQL.
## `db-diesel-postgres`
Enable `diesel` PostgreSQL support.
## `serde-float`
Enable this so that JSON serialization of Decimal types are sent as a float instead of a string (default).
e.g. with this turned on, JSON serialization would output:
```
{
"value": 1.234
}
```
## `serde-bincode`
Since `bincode` does not specify type information, we need to ensure that a type hint is provided in order to
correctly be able to deserialize. Enabling this feature on it's own will force deserialization to use `deserialize_str`
instead of `deserialize_any`.
If, for some reason, you also have `serde-float` enabled then this will use `deserialize_f64` as a type hint. Because
converting to `f64` _loses_ precision, it's highly recommended that you do NOT enable this feature when working with
`bincode`. That being said, this will only use 8 bytes so is slightly more efficient in regards to storage size.

274
third_party/rust/rust_decimal/VERSION.md поставляемый Normal file
Просмотреть файл

@ -0,0 +1,274 @@
# Version History
## 1.7.0
* Enables `bincode` support via the feature `serde-bincode`. This provides a long term fix for a regression
that was introduced in version `0.6.5` (tests now cover this case!). [Issue 43](https://github.com/paupino/rust-decimal/issues/43).
* Fixes issue where `rescale` on zero would not have an affect. This was due to an early exit condition which failed to
set the new scale. [Issue 253](https://github.com/paupino/rust-decimal/issues/253).
* Add `min` and `max` functions, similar to what `f32` and `f64` provide. Thank you [@michalsieron](https://github.com/michalsieron).
* Updates documentation for `is_sign_positive` and `is_sign_negative` to specify that the sign bit is being checked.
Please note: feature naming conventions have been modified, however backwards compatible aliases have been created where
necessary. It's highly recommended that you move over to the new naming conventions as these aliases may be removed at a
later date.
## 1.6.0
* Fixes issue with PostgreSQL conversions whereby certain inputs would cause unexpected
outputs. [Issue 241](https://github.com/paupino/rust-decimal/issues/241).
* Fixes issue with `from_str_radix` whereby rounding logic would kick in too early,
especially with radix less than 10. [Issue 242](https://github.com/paupino/rust-decimal/issues/242).
* Fixes issue whereby `from_str` (implicity `from_str_radix`) would panic when there was overflow
and overflow significant digit was < 5. [Issue 246](https://github.com/paupino/rust-decimal/issues/246).
* Make `bytes` and `byteorder` optional since they're only used in the `postgres` feature and tests.
* Fix edge case in `from_i128_with_scale` when `i128::MIN` was provided.
Thank you to [@serejkaaa512](https://github.com/serejkaaa512), [@AbsurdlySuspicious](https://github.com/AbsurdlySuspicious) and [@0e4ef622]((https://github.com/0e4ef622)) for your contributions!
## 1.5.0
* Added additional `RoundStrategy` abilities: `RoundUp` to always round up and `RoundDown` to always round down.
* Updated prelude to include expected structs and traits by default.
Special thank you to [@jean-airoldie](https://github.com/jean-airoldie) for adding the additional rounding strategies and to [@pfrenssen](https://github.com/pfrenssen) for fixing an
issue in the README.
## 1.4.1
* Performance improvements for `to_f64` when using a scale > 0.
Special thank you to [@hengchu](https://github.com/hengchu) who discovered and resolved the issue!
## 1.4.0
* Allow uppercase "E" in scientific notation.
* Allow scientific notation in `dec!` macro.
* Deprecate `set_sign` and replace with `set_sign_positive` and `set_sign_negative`. This is intended
to improve the readability of the API.
* Fixes precision issue when parsing `f64` values. The base 2 mantissa of the float was assuming guaranteed accuracy
of 53 bit precision, however 52 bit precision is more accurate (`f64` only).
* Removes deprecated usage of `Error::description`.
## 1.3.0
* Replace `num` dependency with `num_trait` - implemented `Signed` and `Num` traits.
## 1.2.1
* Fixes issue whereby overflow would occur reading from PostgreSQL with high precision. The library now
handles this by rounding high precision numbers as they're read as opposed to crashing (similar to other
underflow situations e.g. 1/3).
## 1.2.0
* Retain trailing zeros from PostgreSQL. This ensures that the scale is maintained when serialized into the Decimal type.
* Fixes issue where -0 != 0 (these are now equivalent - thank you @hengchu for discovering).
* Improve hashing function so that the following property is true: `k1 == k2 -> hash(k1) == hash(k2)`
* Update normalize function so that -0 normalizes to 0.
Special thanks to @hathawsh for their help in this release!
## 1.1.0
* Update to Postgres 0.17 and add postgres async/await support via `tokio-pg`
* Added option for serializing decimals as float via `serde-float`
Special thanks to @pimeys and @kaibyao!
## 1.0.3
Updates dependencies to prevent build issues.
## 1.0.2
Bug fix release:
* Fixes issue where scaling logic produced incorrect results when one arm was a high precision zero. Thank you @KonishchevDmitry!
## 1.0.1
Bug fix release:
* Fixes issue where `ToSql` was incorrectly calculating weight when whole portion = numeric portion.
* Fixes issue where `Decimal::new` incorrectly handled `i64::max_value()` and `i64::min_value()`.
* Fixes issue where `rem` operation incorrectly returned results when `scale` was required.
## 1.0.0
This release represents the start of semantic versioning and allows the library to start making fundamental improvements under
the guise of V2.0. Leading up to that I expect to release 1.x versions which will include adding
various mathematical functions such as `pow`, `ln`, `log10` etc.
Version `1.0.0` does come with some new features:
* Checked Operations! This implements `checked_add`, `checked_sub`, `checked_mul`, `checked_div` and `checked_rem`.
* Fixes overflow from `max_value()` and `min_value()` for `i32` and `i64`.
* Minor documentation improvements and test coverage.
Special thanks to @0e4ef622 for their help with this release!
## 0.11.3
* Add prelude to help num trait inclusion (`use rust_decimal::prelude::*`)
* Add `Default` trait to the library. This is equivalent to using `Decimal::zero()`
* Added assignment operators for references.
Special thanks to @jean-airoldie for his help with this release!
## 0.11.2
* Fall back to `from_scientific` when `from_str` fails during deserialization. Thanks @mattjbray!
* Added basic `Sum` trait implementation
## 0.11.1
* Fixes a bug in `floor` and `ceil` where negative numbers were incorrectly handled.
## 0.11.0
* Macros are now supported on stable. This does use a [hack](https://github.com/dtolnay/proc-macro-hack) for the meantime
so due diligence is required before usage.
* Fixes issue when parsing strings where an underscore preceded a decimal point.
* `const_fn` support via a feature flag. In the future this will be the default option however in order to support older
compiler versions is behind a feature flag.
## 0.10.2
* Macros (nightly) now output structural data as opposed to serialized data. This is fully backwards compatible and results in some minor performance improvements. Also, removed feature gate so that it can be compiled in stable.
* Fixes a string parsing bug when given highly significant numbers that require rounding.
## 0.10.1
* Bumped dependencies to remove some legacy serialization requirements.
## 0.10.0
Special thanks to @xilec, @snd and @AndrewSpeed for their help with this release.
* New rounding strategies introduced via `round_dp_with_strategy`. Previously default rounding support used bankers rounding by default whereas now you can choose to round the half way point either up or down.
* PostgreSQL write performance improved so that it is at least 3 times faster than the previous implementation.
* `Debug` trait now outputs the actual decimal number by default to make it more useful within consuming libraries (e.g. `criterion.rs`). To get something similar to the previous functionality you can use the `unpack` argument - this is likely for core `rust-decimal` library maintainers.
* Various other performance improvements for common operations such as `rescale`, `sub` and `div`.
## 0.9.1
* Performance optimization for `add`.
## 0.9.0
* Introduces the `Neg` trait to support the ability to use `-decimal_variable`.
* Fixes bug with underflow on addition.
## 0.8.1
This release updates the published documentation only and is a no-op for functionality.
## 0.8.0
* Introduces `from_scientific` allowing parsing of scientific notation into the Decimal type.
* Fixes a bug when formatting a number with a leading zero's.
## 0.7.2
* Fixes bug in `rescale` whereby scaling which invoked rounding incorrectly set the new scale for the left/right sides.
## 0.7.1
* Fixes bug in `cmp` whereby two negatives would return an incorrect result.
* Further documentation examples
* Small improvements in division logic
* New `abs`, `floor` and `ceil` functions.
## 0.7.0
This is a minor version bump as we slowly build our way towards 1.0. Thank you for everyone's support and help as we get there! This has a few notable changes - also introducing a few new interfaces which is the reason for the version bump:
* `from_parts` function to allow effective creation of `Decimal`'s without requiring binary serialization. An example of this benefit is with the lazy static group initializers for Postgres.
* `normalize` function to allow stripping trailing zero's easily.
* `trunc` function allows truncation of a number without any rounding. This effectively "truncates" the fractional part of the number.
* `fract` function returns the fractional part of the number without the integral.
* Minor improvements in some iterator logic, utilizing the compiler for further optimizations.
* Fixes issue in string parsing logic whereby `_` would cause numbers to be incorrectly identified.
* Many improvements to `mul`. Numbers utilizing the `lo` portion of the decimal only will now be shortcut and bigger numbers will now correctly overflow. True overflows will still panic, however large underflows will now be rounded as necessary as opposed to panicing.
* `Hash` was implemented by convention in `0.6.5` however is reimplemented explicitly in `0.7.0` for effectiveness.
* PostgreSQL read performance improved by pre-caching groups and leveraging `normalize` (i.e. avoiding strings). Further optimizations can be made in write however require some `div` optimizations first.
* Added short circuit write improvement for zero in PostgreSQL writes.
* Benchmarks are now recorded per build so we can start tracking where slow downs have occurred. This does mean there is a performance hit on Travis builds however hopefully the pay off will make it worthwhile.
## 0.6.5
Fixes issue with rescale sometimes causing a silent overflow which led to incorrect results during addition, subtraction and compare. Consequently Decimal now rounds the most significant number so that these operations work successfully.
In addition, Decimal now derive's the `Hash` trait so that it can be used for indexing.
## 0.6.4
Fixes silent overflow errors when parsing highly significant strings. `from_str` will now round in these scenario's, similar to oleaut32 behavior.
## 0.6.3
Fixes a regression in ordering where by different scales would be rescaled towards losing precision instead of increasing precision. Have added numerous test suites to help cover more issues like this in the future.
Also fixes an issue in parsing invalid strings whereby the precision exceeded our maximum precision. Previously, this would work with unintended results however this now returns an Error returned from `FromStr`.
## 0.6.2
Fixes an issue with division of rational numbers allowing results greater than `MAX_PRECISION`. This would ultimately cause issues for future operations on this number.
In addition, in some cases transitive operations would not be equal due to overflow being lost.
## 0.6.1
This minor release is purely to expose `rust_decimal_macros` for use on the nightly channel. Documentation has been updated accordingly.
## 0.6.0
This release has a few major changes to the internal workings of the `Decimal` implementation and consequently comes with a number of performance improvements.
* Floats can now be parsed into a `Decimal` type using `from_f32` and `from_f64`.
* `add`, `sub`, `mul` run roughly 1500% faster than before.
* `div` run's roughly 1000% faster than before with room for future improvement.
* Also get significant speed improvements with `cmp`, `rescale`, `round_dp` and some string manipulations.
* Implemented `*Assign` traits for simpler usage.
* Removed `BigInt` and `BigUint` as being intermediary data types.
## 0.5.2
Minor bug fix to prevent a `panic` from overflow during comparison of high significant digit decimals.
## 0.5.1
Minor bux fix to prevent `panic` upon parsing an empty string.
## 0.5.0
* Removes postgres from default feature set.
* `bincode` support for serde
* Better support for format strings
* Benchmarks added to tests
## 0.4.2
Fixes bug in `cmp` whereby negative's were not being compared correctly.
## 0.4.1
Minor bug fix to support creating negative numbers using the default constructor.
## 0.4.0
This release is a stylistic cleanup however does include some minor changes that may break existing builds.
### Changed
* Serde is now optional. You can enable Serde support within `features` using the keyword `serde`.
* Serde now returns errors on invalid input as opposed to `0`.
* `f64` conversion support has been added.
* Update Postgres dependency to use v0.15.
## 0.3.1
This is a documentation release that should help with discoverability and usage.
## 0.3.0
### Changed
* Removed trait `ToDecimal` and replaced with builtin [`From`](https://doc.rust-lang.org/std/convert/trait.From.html) trait ([`#12`](https://github.com/paupino/rust-decimal/pull/12))

194
third_party/rust/rust_decimal/benches/lib_benches.rs поставляемый Normal file
Просмотреть файл

@ -0,0 +1,194 @@
#![feature(test)]
extern crate test;
use rust_decimal::Decimal;
use std::str::FromStr;
macro_rules! bench_decimal_op {
($name:ident, $op:tt, $y:expr) => {
#[bench]
fn $name(b: &mut ::test::Bencher) {
let x = Decimal::from_str("2.01").unwrap();
let y = Decimal::from_str($y).unwrap();
b.iter(|| {
let result = x $op y;
::test::black_box(result);
});
}
}
}
macro_rules! bench_fold_op {
($name:ident, $op:tt, $init:expr, $count:expr) => {
#[bench]
fn $name(b: &mut ::test::Bencher) {
fn fold(values: &[Decimal]) -> Decimal {
let mut acc: Decimal = $init.into();
for value in values {
acc = acc $op value;
}
acc
}
let values: Vec<Decimal> = test::black_box((1..$count).map(|i| i.into()).collect());
b.iter(|| {
let result = fold(&values);
::test::black_box(result);
});
}
}
}
/* Add */
bench_decimal_op!(add_one, +, "1");
bench_decimal_op!(add_two, +, "2");
bench_decimal_op!(add_one_hundred, +, "100");
bench_decimal_op!(add_point_zero_one, +, "0.01");
bench_decimal_op!(add_negative_point_five, +, "-0.5");
bench_decimal_op!(add_pi, +, "3.1415926535897932384626433832");
bench_decimal_op!(add_negative_pi, +, "-3.1415926535897932384626433832");
bench_fold_op!(add_10k, +, 0, 10_000);
/* Sub */
bench_decimal_op!(sub_one, -, "1");
bench_decimal_op!(sub_two, -, "2");
bench_decimal_op!(sub_one_hundred, -, "100");
bench_decimal_op!(sub_point_zero_one, -, "0.01");
bench_decimal_op!(sub_negative_point_five, -, "-0.5");
bench_decimal_op!(sub_pi, -, "3.1415926535897932384626433832");
bench_decimal_op!(sub_negative_pi, -, "-3.1415926535897932384626433832");
bench_fold_op!(sub_10k, -, 5_000_000, 10_000);
/* Mul */
bench_decimal_op!(mul_one, *, "1");
bench_decimal_op!(mul_two, *, "2");
bench_decimal_op!(mul_one_hundred, *, "100");
bench_decimal_op!(mul_point_zero_one, *, "0.01");
bench_decimal_op!(mul_negative_point_five, *, "-0.5");
bench_decimal_op!(mul_pi, *, "3.1415926535897932384626433832");
bench_decimal_op!(mul_negative_pi, *, "-3.1415926535897932384626433832");
/* Div */
bench_decimal_op!(div_one, /, "1");
bench_decimal_op!(div_two, /, "2");
bench_decimal_op!(div_one_hundred, /, "100");
bench_decimal_op!(div_point_zero_one, /, "0.01");
bench_decimal_op!(div_negative_point_five, /, "-0.5");
bench_decimal_op!(div_pi, /, "3.1415926535897932384626433832");
bench_decimal_op!(div_negative_pi, /, "-3.1415926535897932384626433832");
bench_fold_op!(div_10k, /, Decimal::max_value(), 10_000);
/* Iteration */
struct DecimalIterator {
count: usize,
}
impl DecimalIterator {
fn new() -> DecimalIterator {
DecimalIterator { count: 0 }
}
}
impl Iterator for DecimalIterator {
type Item = Decimal;
fn next(&mut self) -> Option<Decimal> {
self.count += 1;
if self.count < 6 {
Some(Decimal::new(314, 2))
} else {
None
}
}
}
#[bench]
fn iterator_individual(b: &mut ::test::Bencher) {
b.iter(|| {
let mut result = Decimal::new(0, 0);
let iterator = DecimalIterator::new();
for i in iterator {
result += i;
}
::test::black_box(result);
});
}
#[bench]
fn iterator_sum(b: &mut ::test::Bencher) {
b.iter(|| {
let result: Decimal = DecimalIterator::new().sum();
::test::black_box(result);
});
}
#[bench]
fn decimal_from_str(b: &mut test::Bencher) {
let samples_strs = &[
"3950.123456",
"3950",
"0.1",
"0.01",
"0.001",
"0.0001",
"0.00001",
"0.000001",
"1",
"-100",
"-123.456",
"119996.25",
"1000000",
"9999999.99999",
"12340.56789",
];
b.iter(|| {
for s in samples_strs {
let result = Decimal::from_str(s).unwrap();
test::black_box(result);
}
})
}
#[cfg(feature = "postgres")]
#[bench]
fn to_from_sql(b: &mut ::test::Bencher) {
use postgres::types::{FromSql, Kind, ToSql, Type};
let samples_strs = &[
"3950.123456",
"3950",
"0.1",
"0.01",
"0.001",
"0.0001",
"0.00001",
"0.000001",
"1",
"-100",
"-123.456",
"119996.25",
"1000000",
"9999999.99999",
"12340.56789",
];
let samples: Vec<Decimal> = test::black_box(samples_strs.iter().map(|x| Decimal::from_str(x).unwrap()).collect());
let t = Type::_new("".into(), 0, Kind::Simple, "".into());
let mut vec = Vec::<u8>::with_capacity(100);
b.iter(|| {
for _ in 0..100 {
for sample in &samples {
vec.clear();
sample.to_sql(&t, &mut vec).unwrap();
let result = Decimal::from_sql(&t, &vec).unwrap();
::test::black_box(result);
}
}
});
}

1
third_party/rust/rust_decimal/rustfmt.toml поставляемый Normal file
Просмотреть файл

@ -0,0 +1 @@
max_width = 120

3161
third_party/rust/rust_decimal/src/decimal.rs поставляемый Normal file

Разница между файлами не показана из-за своего большого размера Загрузить разницу

31
third_party/rust/rust_decimal/src/error.rs поставляемый Normal file
Просмотреть файл

@ -0,0 +1,31 @@
use std::{error, fmt};
/// Error type for the library.
#[derive(Clone, Debug)]
pub struct Error {
message: String,
}
impl Error {
/// Instantiate an error with the specified error message.
///
/// This function is only available within the crate as there should never
/// be a need to create this error outside of the library.
pub(crate) fn new<S: Into<String>>(message: S) -> Error {
Error {
message: message.into(),
}
}
}
impl error::Error for Error {
fn description(&self) -> &str {
&self.message
}
}
impl fmt::Display for Error {
fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
f.pad(&self.message)
}
}

56
third_party/rust/rust_decimal/src/lib.rs поставляемый Normal file
Просмотреть файл

@ -0,0 +1,56 @@
//!
//! A Decimal implementation written in pure Rust suitable
//! for financial calculations that require significant integral
//! and fractional digits with no round-off errors.
//!
//! The binary representation consists of a 96 bit integer number,
//! a scaling factor used to specify the decimal fraction and a 1
//! bit sign. Because of this representation, trailing zeros are
//! preserved and may be exposed when in string form. These can be
//! truncated using the `normalize` or `round_dp` functions.
//!
//! ## Usage
//!
//! Decimal numbers can be created in a few distinct ways, depending
//! on the rust compiler version you're targeting.
//!
//! The stable version of rust requires you to create a Decimal number
//! using one of it's convenience methods.
//!
//! ```rust
//! use rust_decimal::prelude::*;
//!
//! // Using an integer followed by the decimal points
//! let scaled = Decimal::new(202, 2); // 2.02
//!
//! // From a string representation
//! let from_string = Decimal::from_str("2.02").unwrap(); // 2.02
//!
//! // Using the `Into` trait
//! let my_int : Decimal = 3i32.into();
//!
//! // Using the raw decimal representation
//! // 3.1415926535897932384626433832
//! let pi = Decimal::from_parts(1102470952, 185874565, 1703060790, false, 28);
//! ```
//!
mod decimal;
mod error;
#[cfg(any(feature = "postgres", feature = "diesel"))]
mod postgres;
#[cfg(feature = "serde")]
mod serde_types;
pub use decimal::{Decimal, RoundingStrategy};
pub use error::Error;
pub mod prelude {
pub use crate::{Decimal, RoundingStrategy};
pub use num_traits::{FromPrimitive, One, ToPrimitive, Zero};
pub use std::str::FromStr;
}
#[cfg(feature = "diesel")]
#[macro_use]
extern crate diesel;

856
third_party/rust/rust_decimal/src/postgres.rs поставляемый Normal file
Просмотреть файл

@ -0,0 +1,856 @@
use num_traits::Zero;
use crate::Decimal;
use std::{convert::TryInto, error, fmt, result::*};
use crate::decimal::{div_by_u32, is_all_zero, mul_by_u32, MAX_PRECISION};
#[derive(Debug, Clone)]
pub struct InvalidDecimal {
inner: Option<String>,
}
impl fmt::Display for InvalidDecimal {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
if let Some(ref msg) = self.inner {
fmt.write_fmt(format_args!("Invalid Decimal: {}", msg))
} else {
fmt.write_str("Invalid Decimal")
}
}
}
impl error::Error for InvalidDecimal {}
struct PostgresDecimal<D> {
neg: bool,
weight: i16,
scale: u16,
digits: D,
}
impl Decimal {
fn from_postgres<D: ExactSizeIterator<Item = u16>>(
PostgresDecimal {
neg,
scale,
digits,
weight,
}: PostgresDecimal<D>,
) -> Result<Self, InvalidDecimal> {
let mut digits = digits.into_iter().collect::<Vec<_>>();
let fractionals_part_count = digits.len() as i32 + (-weight as i32) - 1;
let integers_part_count = weight as i32 + 1;
let mut result = Decimal::zero();
// adding integer part
if integers_part_count > 0 {
let (start_integers, last) = if integers_part_count > digits.len() as i32 {
(integers_part_count - digits.len() as i32, digits.len() as i32)
} else {
(0, integers_part_count)
};
let integers: Vec<_> = digits.drain(..last as usize).collect();
for digit in integers {
result *= Decimal::from_i128_with_scale(10i128.pow(4), 0);
result += Decimal::new(digit as i64, 0);
}
result *= Decimal::from_i128_with_scale(10i128.pow(4 * start_integers as u32), 0);
}
// adding fractional part
if fractionals_part_count > 0 {
let dec: Vec<_> = digits.into_iter().collect();
let start_fractionals = if weight < 0 { (-weight as u32) - 1 } else { 0 };
for (i, digit) in dec.into_iter().enumerate() {
let fract_pow = 4 * (i as u32 + 1 + start_fractionals);
if fract_pow <= MAX_PRECISION {
result += Decimal::new(digit as i64, 0) / Decimal::from_i128_with_scale(10i128.pow(fract_pow), 0);
} else if fract_pow == MAX_PRECISION + 4 {
// rounding last digit
if digit >= 5000 {
result +=
Decimal::new(1 as i64, 0) / Decimal::from_i128_with_scale(10i128.pow(MAX_PRECISION), 0);
}
}
}
}
result.set_sign_negative(neg);
// Rescale to the postgres value, automatically rounding as needed.
result.rescale(scale as u32);
Ok(result)
}
fn to_postgres(self) -> PostgresDecimal<Vec<i16>> {
if self.is_zero() {
return PostgresDecimal {
neg: false,
weight: 0,
scale: 0,
digits: vec![0],
};
}
let scale = self.scale() as u16;
let groups_diff = scale & 0x3; // groups_diff = scale % 4
let mut mantissa = self.mantissa_array4();
if groups_diff > 0 {
let remainder = 4 - groups_diff;
let power = 10u32.pow(u32::from(remainder));
mul_by_u32(&mut mantissa, power);
}
// array to store max mantissa of Decimal in Postgres decimal format
const MAX_GROUP_COUNT: usize = 8;
let mut digits = Vec::with_capacity(MAX_GROUP_COUNT);
while !is_all_zero(&mantissa) {
let digit = div_by_u32(&mut mantissa, 10000) as u16;
digits.push(digit.try_into().unwrap());
}
digits.reverse();
let digits_after_decimal = (scale + 3) as u16 / 4;
let weight = digits.len() as i16 - digits_after_decimal as i16 - 1;
let unnecessary_zeroes = if weight >= 0 {
let index_of_decimal = (weight + 1) as usize;
digits
.get(index_of_decimal..)
.expect("enough digits exist")
.iter()
.rev()
.take_while(|i| **i == 0)
.count()
} else {
0
};
let relevant_digits = digits.len() - unnecessary_zeroes;
digits.truncate(relevant_digits);
PostgresDecimal {
neg: self.is_sign_negative(),
digits,
scale,
weight,
}
}
}
#[cfg(feature = "diesel")]
mod diesel {
use super::*;
use ::diesel::{
deserialize::{self, FromSql},
pg::data_types::PgNumeric,
pg::Pg,
serialize::{self, Output, ToSql},
sql_types::Numeric,
};
use ::std::{
convert::{TryFrom, TryInto},
io::Write,
};
impl<'a> TryFrom<&'a PgNumeric> for Decimal {
type Error = Box<dyn error::Error + Send + Sync>;
fn try_from(numeric: &'a PgNumeric) -> deserialize::Result<Self> {
let (neg, weight, scale, digits) = match *numeric {
PgNumeric::Positive {
weight,
scale,
ref digits,
} => (false, weight, scale, digits),
PgNumeric::Negative {
weight,
scale,
ref digits,
} => (true, weight, scale, digits),
PgNumeric::NaN => return Err(Box::from("NaN is not supported in Decimal")),
};
Ok(Self::from_postgres(PostgresDecimal {
neg,
weight,
scale,
digits: digits.iter().copied().map(|v| v.try_into().unwrap()),
})
.map_err(Box::new)?)
}
}
impl TryFrom<PgNumeric> for Decimal {
type Error = Box<dyn error::Error + Send + Sync>;
fn try_from(numeric: PgNumeric) -> deserialize::Result<Self> {
(&numeric).try_into()
}
}
impl<'a> From<&'a Decimal> for PgNumeric {
// NOTE(clippy): Clippy suggests to replace the `.take_while(|i| i.is_zero())`
// with `.take_while(Zero::is_zero)`, but that's a false positive.
// The closure gets an `&&i16` due to autoderef `<i16 as Zero>::is_zero(&self) -> bool`
// is called. There is no impl for `&i16` that would work with this closure.
#[allow(clippy::assign_op_pattern, clippy::redundant_closure)]
fn from(decimal: &'a Decimal) -> Self {
let PostgresDecimal {
neg,
weight,
scale,
digits,
} = decimal.to_postgres();
let digits = digits.into_iter().map(|v| v.try_into().unwrap()).collect();
if neg {
PgNumeric::Negative { digits, scale, weight }
} else {
PgNumeric::Positive { digits, scale, weight }
}
}
}
impl From<Decimal> for PgNumeric {
fn from(bigdecimal: Decimal) -> Self {
(&bigdecimal).into()
}
}
impl ToSql<Numeric, Pg> for Decimal {
fn to_sql<W: Write>(&self, out: &mut Output<W, Pg>) -> serialize::Result {
let numeric = PgNumeric::from(self);
ToSql::<Numeric, Pg>::to_sql(&numeric, out)
}
}
impl FromSql<Numeric, Pg> for Decimal {
fn from_sql(numeric: Option<&[u8]>) -> deserialize::Result<Self> {
PgNumeric::from_sql(numeric)?.try_into()
}
}
#[cfg(test)]
mod pg_tests {
use super::*;
use std::str::FromStr;
#[test]
fn test_unnecessary_zeroes() {
fn extract(value: &str) -> Decimal {
Decimal::from_str(value).unwrap()
}
let tests = &[
("0.000001660"),
("41.120255926293000"),
("0.5538973300"),
("08883.55986854293100"),
("0.0000_0000_0016_6000_00"),
("0.00000166650000"),
("1666500000000"),
("1666500000000.0000054500"),
("8944.000000000000"),
];
for &value in tests {
let value = extract(value);
let pg = PgNumeric::from(value);
let dec = Decimal::try_from(pg).unwrap();
assert_eq!(dec, value);
}
}
#[test]
fn decimal_to_pgnumeric_converts_digits_to_base_10000() {
let decimal = Decimal::from_str("1").unwrap();
let expected = PgNumeric::Positive {
weight: 0,
scale: 0,
digits: vec![1],
};
assert_eq!(expected, decimal.into());
let decimal = Decimal::from_str("10").unwrap();
let expected = PgNumeric::Positive {
weight: 0,
scale: 0,
digits: vec![10],
};
assert_eq!(expected, decimal.into());
let decimal = Decimal::from_str("10000").unwrap();
let expected = PgNumeric::Positive {
weight: 1,
scale: 0,
digits: vec![1, 0],
};
assert_eq!(expected, decimal.into());
let decimal = Decimal::from_str("10001").unwrap();
let expected = PgNumeric::Positive {
weight: 1,
scale: 0,
digits: vec![1, 1],
};
assert_eq!(expected, decimal.into());
let decimal = Decimal::from_str("100000000").unwrap();
let expected = PgNumeric::Positive {
weight: 2,
scale: 0,
digits: vec![1, 0, 0],
};
assert_eq!(expected, decimal.into());
}
#[test]
fn decimal_to_pg_numeric_properly_adjusts_scale() {
let decimal = Decimal::from_str("1").unwrap();
let expected = PgNumeric::Positive {
weight: 0,
scale: 0,
digits: vec![1],
};
assert_eq!(expected, decimal.into());
let decimal = Decimal::from_str("1.0").unwrap();
let expected = PgNumeric::Positive {
weight: 0,
scale: 1,
digits: vec![1],
};
assert_eq!(expected, decimal.into());
let decimal = Decimal::from_str("1.1").unwrap();
let expected = PgNumeric::Positive {
weight: 0,
scale: 1,
digits: vec![1, 1000],
};
assert_eq!(expected, decimal.into());
let decimal = Decimal::from_str("1.10").unwrap();
let expected = PgNumeric::Positive {
weight: 0,
scale: 2,
digits: vec![1, 1000],
};
assert_eq!(expected, decimal.into());
let decimal = Decimal::from_str("100000000.0001").unwrap();
let expected = PgNumeric::Positive {
weight: 2,
scale: 4,
digits: vec![1, 0, 0, 1],
};
assert_eq!(expected, decimal.into());
let decimal = Decimal::from_str("0.1").unwrap();
let expected = PgNumeric::Positive {
weight: -1,
scale: 1,
digits: vec![1000],
};
assert_eq!(expected, decimal.into());
}
#[test]
#[cfg(feature = "unstable")]
fn decimal_to_pg_numeric_retains_sign() {
let decimal = Decimal::from_str("123.456").unwrap();
let expected = PgNumeric::Positive {
weight: 0,
scale: 3,
digits: vec![123, 4560],
};
assert_eq!(expected, decimal.into());
let decimal = Decimal::from_str("-123.456").unwrap();
let expected = PgNumeric::Negative {
weight: 0,
scale: 3,
digits: vec![123, 4560],
};
assert_eq!(expected, decimal.into());
}
#[test]
fn pg_numeric_to_decimal_works() {
let expected = Decimal::from_str("50").unwrap();
let pg_numeric = PgNumeric::Positive {
weight: 0,
scale: 0,
digits: vec![50],
};
let res: Decimal = pg_numeric.try_into().unwrap();
assert_eq!(res, expected);
let expected = Decimal::from_str("123.456").unwrap();
let pg_numeric = PgNumeric::Positive {
weight: 0,
scale: 3,
digits: vec![123, 4560],
};
let res: Decimal = pg_numeric.try_into().unwrap();
assert_eq!(res, expected);
let expected = Decimal::from_str("-56.78").unwrap();
let pg_numeric = PgNumeric::Negative {
weight: 0,
scale: 2,
digits: vec![56, 7800],
};
let res: Decimal = pg_numeric.try_into().unwrap();
assert_eq!(res, expected);
// Verify no trailing zeroes are lost.
let expected = Decimal::from_str("1.100").unwrap();
let pg_numeric = PgNumeric::Positive {
weight: 0,
scale: 3,
digits: vec![1, 1000],
};
let res: Decimal = pg_numeric.try_into().unwrap();
assert_eq!(res.to_string(), expected.to_string());
// To represent 5.00, Postgres can return either [5, 0] as the list of digits.
let expected = Decimal::from_str("5.00").unwrap();
let pg_numeric = PgNumeric::Positive {
weight: 0,
scale: 2,
digits: vec![5, 0],
};
let res: Decimal = pg_numeric.try_into().unwrap();
assert_eq!(res.to_string(), expected.to_string());
// To represent 5.00, Postgres can return [5] as the list of digits.
let expected = Decimal::from_str("5.00").unwrap();
let pg_numeric = PgNumeric::Positive {
weight: 0,
scale: 2,
digits: vec![5],
};
let res: Decimal = pg_numeric.try_into().unwrap();
assert_eq!(res.to_string(), expected.to_string());
let expected = Decimal::from_str("3.1415926535897932384626433833").unwrap();
let pg_numeric = PgNumeric::Positive {
weight: 0,
scale: 30,
digits: vec![3, 1415, 9265, 3589, 7932, 3846, 2643, 3832, 7950, 2800],
};
let res: Decimal = pg_numeric.try_into().unwrap();
assert_eq!(res.to_string(), expected.to_string());
let expected = Decimal::from_str("3.1415926535897932384626433833").unwrap();
let pg_numeric = PgNumeric::Positive {
weight: 0,
scale: 34,
digits: vec![3, 1415, 9265, 3589, 7932, 3846, 2643, 3832, 7950, 2800],
};
let res: Decimal = pg_numeric.try_into().unwrap();
assert_eq!(res.to_string(), expected.to_string());
let expected = Decimal::from_str("1.2345678901234567890123456790").unwrap();
let pg_numeric = PgNumeric::Positive {
weight: 0,
scale: 34,
digits: vec![1, 2345, 6789, 0123, 4567, 8901, 2345, 6789, 5000, 0],
};
let res: Decimal = pg_numeric.try_into().unwrap();
assert_eq!(res.to_string(), expected.to_string());
}
}
}
#[cfg(feature = "postgres")]
mod postgres {
use super::*;
use ::byteorder::{BigEndian, ReadBytesExt};
use ::bytes::{BufMut, BytesMut};
use ::postgres::types::*;
use ::std::io::Cursor;
impl<'a> FromSql<'a> for Decimal {
// Decimals are represented as follows:
// Header:
// u16 numGroups
// i16 weightFirstGroup (10000^weight)
// u16 sign (0x0000 = positive, 0x4000 = negative, 0xC000 = NaN)
// i16 dscale. Number of digits (in base 10) to print after decimal separator
//
// Psuedo code :
// const Decimals [
// 0.0000000000000000000000000001,
// 0.000000000000000000000001,
// 0.00000000000000000001,
// 0.0000000000000001,
// 0.000000000001,
// 0.00000001,
// 0.0001,
// 1,
// 10000,
// 100000000,
// 1000000000000,
// 10000000000000000,
// 100000000000000000000,
// 1000000000000000000000000,
// 10000000000000000000000000000
// ]
// overflow = false
// result = 0
// for i = 0, weight = weightFirstGroup + 7; i < numGroups; i++, weight--
// group = read.u16
// if weight < 0 or weight > MaxNum
// overflow = true
// else
// result += Decimals[weight] * group
// sign == 0x4000 ? -result : result
// So if we were to take the number: 3950.123456
//
// Stored on Disk:
// 00 03 00 00 00 00 00 06 0F 6E 04 D2 15 E0
//
// Number of groups: 00 03
// Weight of first group: 00 00
// Sign: 00 00
// DScale: 00 06
//
// 0F 6E = 3950
// result = result + 3950 * 1;
// 04 D2 = 1234
// result = result + 1234 * 0.0001;
// 15 E0 = 5600
// result = result + 5600 * 0.00000001;
//
fn from_sql(_: &Type, raw: &[u8]) -> Result<Decimal, Box<dyn error::Error + 'static + Sync + Send>> {
let mut raw = Cursor::new(raw);
let num_groups = raw.read_u16::<BigEndian>()?;
let weight = raw.read_i16::<BigEndian>()?; // 10000^weight
// Sign: 0x0000 = positive, 0x4000 = negative, 0xC000 = NaN
let sign = raw.read_u16::<BigEndian>()?;
// Number of digits (in base 10) to print after decimal separator
let scale = raw.read_u16::<BigEndian>()?;
// Read all of the groups
let mut groups = Vec::new();
for _ in 0..num_groups as usize {
groups.push(raw.read_u16::<BigEndian>()?);
}
Ok(Self::from_postgres(PostgresDecimal {
neg: sign == 0x4000,
weight,
scale,
digits: groups.into_iter(),
})
.map_err(Box::new)?)
}
fn accepts(ty: &Type) -> bool {
match ty {
&Type::NUMERIC => true,
_ => false,
}
}
}
impl ToSql for Decimal {
fn to_sql(
&self,
_: &Type,
out: &mut BytesMut,
) -> Result<IsNull, Box<dyn error::Error + 'static + Sync + Send>> {
let PostgresDecimal {
neg,
weight,
scale,
digits,
} = self.to_postgres();
let num_digits = digits.len();
// Reserve bytes
out.reserve(8 + num_digits * 2);
// Number of groups
out.put_u16(num_digits.try_into().unwrap());
// Weight of first group
out.put_i16(weight);
// Sign
out.put_u16(if neg { 0x4000 } else { 0x0000 });
// DScale
out.put_u16(scale);
// Now process the number
for digit in digits[0..num_digits].iter() {
out.put_i16(*digit);
}
Ok(IsNull::No)
}
fn accepts(ty: &Type) -> bool {
match ty {
&Type::NUMERIC => true,
_ => false,
}
}
to_sql_checked!();
}
#[cfg(test)]
mod test {
use super::*;
use ::postgres::{Client, NoTls};
use std::str::FromStr;
/// Gets the URL for connecting to PostgreSQL for testing. Set the POSTGRES_URL
/// environment variable to change from the default of "postgres://postgres@localhost".
fn get_postgres_url() -> String {
if let Ok(url) = std::env::var("POSTGRES_URL") {
return url;
}
"postgres://postgres@localhost".to_string()
}
pub static TEST_DECIMALS: &[(u32, u32, &str, &str)] = &[
// precision, scale, sent, expected
(35, 6, "3950.123456", "3950.123456"),
(35, 2, "3950.123456", "3950.12"),
(35, 2, "3950.1256", "3950.13"),
(10, 2, "3950.123456", "3950.12"),
(35, 6, "3950", "3950.000000"),
(4, 0, "3950", "3950"),
(35, 6, "0.1", "0.100000"),
(35, 6, "0.01", "0.010000"),
(35, 6, "0.001", "0.001000"),
(35, 6, "0.0001", "0.000100"),
(35, 6, "0.00001", "0.000010"),
(35, 6, "0.000001", "0.000001"),
(35, 6, "1", "1.000000"),
(35, 6, "-100", "-100.000000"),
(35, 6, "-123.456", "-123.456000"),
(35, 6, "119996.25", "119996.250000"),
(35, 6, "1000000", "1000000.000000"),
(35, 6, "9999999.99999", "9999999.999990"),
(35, 6, "12340.56789", "12340.567890"),
// Scale is only 28 since that is the maximum we can represent.
(65, 30, "1.2", "1.2000000000000000000000000000"),
// Pi - rounded at scale 28
(
65,
30,
"3.141592653589793238462643383279",
"3.1415926535897932384626433833",
),
(
65,
34,
"3.1415926535897932384626433832795028",
"3.1415926535897932384626433833",
),
// Unrounded number
(
65,
34,
"1.234567890123456789012345678950000",
"1.2345678901234567890123456790",
),
(
65,
34, // No rounding due to 49999 after significant digits
"1.234567890123456789012345678949999",
"1.2345678901234567890123456789",
),
// 0xFFFF_FFFF_FFFF_FFFF_FFFF_FFFF (96 bit)
(35, 0, "79228162514264337593543950335", "79228162514264337593543950335"),
// 0x0FFF_FFFF_FFFF_FFFF_FFFF_FFFF (95 bit)
(35, 1, "4951760157141521099596496895", "4951760157141521099596496895.0"),
// 0x1000_0000_0000_0000_0000_0000
(35, 1, "4951760157141521099596496896", "4951760157141521099596496896.0"),
(35, 6, "18446744073709551615", "18446744073709551615.000000"),
(35, 6, "-18446744073709551615", "-18446744073709551615.000000"),
(35, 6, "0.10001", "0.100010"),
(35, 6, "0.12345", "0.123450"),
];
#[test]
fn test_null() {
let mut client = match Client::connect(&get_postgres_url(), NoTls) {
Ok(x) => x,
Err(err) => panic!("{:#?}", err),
};
// Test NULL
let result: Option<Decimal> = match client.query("SELECT NULL::numeric", &[]) {
Ok(x) => x.iter().next().unwrap().get(0),
Err(err) => panic!("{:#?}", err),
};
assert_eq!(None, result);
}
#[tokio::test]
#[cfg(feature = "tokio-pg")]
async fn async_test_null() {
use ::futures::future::FutureExt;
use ::tokio_postgres::connect;
let (client, connection) = connect(&get_postgres_url(), NoTls).await.unwrap();
let connection = connection.map(|e| e.unwrap());
tokio::spawn(connection);
let statement = client.prepare(&"SELECT NULL::numeric").await.unwrap();
let rows = client.query(&statement, &[]).await.unwrap();
let result: Option<Decimal> = rows.iter().next().unwrap().get(0);
assert_eq!(None, result);
}
#[test]
fn read_numeric_type() {
let mut client = match Client::connect(&get_postgres_url(), NoTls) {
Ok(x) => x,
Err(err) => panic!("{:#?}", err),
};
for &(precision, scale, sent, expected) in TEST_DECIMALS.iter() {
let result: Decimal =
match client.query(&*format!("SELECT {}::NUMERIC({}, {})", sent, precision, scale), &[]) {
Ok(x) => x.iter().next().unwrap().get(0),
Err(err) => panic!("SELECT {}::NUMERIC({}, {}), error - {:#?}", sent, precision, scale, err),
};
assert_eq!(
expected,
result.to_string(),
"NUMERIC({}, {}) sent: {}",
precision,
scale,
sent
);
}
}
#[tokio::test]
#[cfg(feature = "tokio-pg")]
async fn async_read_numeric_type() {
use ::futures::future::FutureExt;
use ::tokio_postgres::connect;
let (client, connection) = connect(&get_postgres_url(), NoTls).await.unwrap();
let connection = connection.map(|e| e.unwrap());
tokio::spawn(connection);
for &(precision, scale, sent, expected) in TEST_DECIMALS.iter() {
let statement = client
.prepare(&*format!("SELECT {}::NUMERIC({}, {})", sent, precision, scale))
.await
.unwrap();
let rows = client.query(&statement, &[]).await.unwrap();
let result: Decimal = rows.iter().next().unwrap().get(0);
assert_eq!(expected, result.to_string(), "NUMERIC({}, {})", precision, scale);
}
}
#[test]
fn write_numeric_type() {
let mut client = match Client::connect(&get_postgres_url(), NoTls) {
Ok(x) => x,
Err(err) => panic!("{:#?}", err),
};
for &(precision, scale, sent, expected) in TEST_DECIMALS.iter() {
let number = Decimal::from_str(sent).unwrap();
let result: Decimal =
match client.query(&*format!("SELECT $1::NUMERIC({}, {})", precision, scale), &[&number]) {
Ok(x) => x.iter().next().unwrap().get(0),
Err(err) => panic!("{:#?}", err),
};
assert_eq!(expected, result.to_string(), "NUMERIC({}, {})", precision, scale);
}
}
#[tokio::test]
#[cfg(feature = "tokio-pg")]
async fn async_write_numeric_type() {
use ::futures::future::FutureExt;
use ::tokio_postgres::connect;
let (client, connection) = connect(&get_postgres_url(), NoTls).await.unwrap();
let connection = connection.map(|e| e.unwrap());
tokio::spawn(connection);
for &(precision, scale, sent, expected) in TEST_DECIMALS.iter() {
let statement = client
.prepare(&*format!("SELECT $1::NUMERIC({}, {})", precision, scale))
.await
.unwrap();
let number = Decimal::from_str(sent).unwrap();
let rows = client.query(&statement, &[&number]).await.unwrap();
let result: Decimal = rows.iter().next().unwrap().get(0);
assert_eq!(expected, result.to_string(), "NUMERIC({}, {})", precision, scale);
}
}
#[test]
fn numeric_overflow() {
let tests = [(4, 4, "3950.1234")];
let mut client = match Client::connect(&get_postgres_url(), NoTls) {
Ok(x) => x,
Err(err) => panic!("{:#?}", err),
};
for &(precision, scale, sent) in tests.iter() {
match client.query(&*format!("SELECT {}::NUMERIC({}, {})", sent, precision, scale), &[]) {
Ok(_) => panic!(
"Expected numeric overflow for {}::NUMERIC({}, {})",
sent, precision, scale
),
Err(err) => {
assert_eq!("22003", err.code().unwrap().code(), "Unexpected error code");
}
};
}
}
#[tokio::test]
#[cfg(feature = "tokio-pg")]
async fn async_numeric_overflow() {
use ::futures::future::FutureExt;
use ::tokio_postgres::connect;
let tests = [(4, 4, "3950.1234")];
let (client, connection) = connect(&get_postgres_url(), NoTls).await.unwrap();
let connection = connection.map(|e| e.unwrap());
tokio::spawn(connection);
for &(precision, scale, sent) in tests.iter() {
let statement = client
.prepare(&*format!("SELECT {}::NUMERIC({}, {})", sent, precision, scale))
.await
.unwrap();
match client.query(&statement, &[]).await {
Ok(_) => panic!(
"Expected numeric overflow for {}::NUMERIC({}, {})",
sent, precision, scale
),
Err(err) => assert_eq!("22003", err.code().unwrap().code(), "Unexpected error code"),
}
}
}
}
}

218
third_party/rust/rust_decimal/src/serde_types.rs поставляемый Normal file
Просмотреть файл

@ -0,0 +1,218 @@
use crate::Decimal;
use num_traits::FromPrimitive;
use serde::{self, de::Unexpected};
use std::{fmt, str::FromStr};
#[cfg(not(feature = "serde-bincode"))]
impl<'de> serde::Deserialize<'de> for Decimal {
fn deserialize<D>(deserializer: D) -> Result<Decimal, D::Error>
where
D: serde::de::Deserializer<'de>,
{
deserializer.deserialize_any(DecimalVisitor)
}
}
#[cfg(all(feature = "serde-bincode", not(feature = "serde-float")))]
impl<'de> serde::Deserialize<'de> for Decimal {
fn deserialize<D>(deserializer: D) -> Result<Decimal, D::Error>
where
D: serde::de::Deserializer<'de>,
{
deserializer.deserialize_str(DecimalVisitor)
}
}
#[cfg(all(feature = "serde-bincode", feature = "serde-float"))]
impl<'de> serde::Deserialize<'de> for Decimal {
fn deserialize<D>(deserializer: D) -> Result<Decimal, D::Error>
where
D: serde::de::Deserializer<'de>,
{
deserializer.deserialize_f64(DecimalVisitor)
}
}
struct DecimalVisitor;
impl<'de> serde::de::Visitor<'de> for DecimalVisitor {
type Value = Decimal;
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
write!(formatter, "a Decimal type representing a fixed-point number")
}
fn visit_i64<E>(self, value: i64) -> Result<Decimal, E>
where
E: serde::de::Error,
{
match Decimal::from_i64(value) {
Some(s) => Ok(s),
None => Err(E::invalid_value(Unexpected::Signed(value), &self)),
}
}
fn visit_u64<E>(self, value: u64) -> Result<Decimal, E>
where
E: serde::de::Error,
{
match Decimal::from_u64(value) {
Some(s) => Ok(s),
None => Err(E::invalid_value(Unexpected::Unsigned(value), &self)),
}
}
fn visit_f64<E>(self, value: f64) -> Result<Decimal, E>
where
E: serde::de::Error,
{
Decimal::from_str(&value.to_string()).map_err(|_| E::invalid_value(Unexpected::Float(value), &self))
}
fn visit_str<E>(self, value: &str) -> Result<Decimal, E>
where
E: serde::de::Error,
{
Decimal::from_str(value)
.or_else(|_| Decimal::from_scientific(value))
.map_err(|_| E::invalid_value(Unexpected::Str(value), &self))
}
}
#[cfg(not(feature = "serde-float"))]
impl serde::Serialize for Decimal {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
serializer.serialize_str(&self.to_string())
}
}
#[cfg(feature = "serde-float")]
impl serde::Serialize for Decimal {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
use num_traits::ToPrimitive;
serializer.serialize_f64(self.to_f64().unwrap())
}
}
#[cfg(test)]
mod test {
use super::*;
use serde_derive::{Deserialize, Serialize};
#[derive(Serialize, Deserialize, Debug)]
struct Record {
amount: Decimal,
}
#[test]
#[cfg(not(feature = "serde-bincode"))]
fn deserialize_valid_decimal() {
let data = [
("{\"amount\":\"1.234\"}", "1.234"),
("{\"amount\":1234}", "1234"),
("{\"amount\":1234.56}", "1234.56"),
("{\"amount\":\"1.23456e3\"}", "1234.56"),
];
for &(serialized, value) in data.iter() {
let result = serde_json::from_str(serialized);
assert_eq!(
true,
result.is_ok(),
"expected successful deseralization for {}. Error: {:?}",
serialized,
result.err().unwrap()
);
let record: Record = result.unwrap();
assert_eq!(
value,
record.amount.to_string(),
"expected: {}, actual: {}",
value,
record.amount.to_string()
);
}
}
#[test]
#[should_panic]
fn deserialize_invalid_decimal() {
let serialized = "{\"amount\":\"foo\"}";
let _: Record = serde_json::from_str(serialized).unwrap();
}
#[test]
#[cfg(not(feature = "serde-float"))]
fn serialize_decimal() {
let record = Record {
amount: Decimal::new(1234, 3),
};
let serialized = serde_json::to_string(&record).unwrap();
assert_eq!("{\"amount\":\"1.234\"}", serialized);
}
#[test]
#[cfg(feature = "serde-float")]
fn serialize_decimal() {
let record = Record {
amount: Decimal::new(1234, 3),
};
let serialized = serde_json::to_string(&record).unwrap();
assert_eq!("{\"amount\":1.234}", serialized);
}
#[test]
#[cfg(all(feature = "serde-bincode", not(feature = "serde-float")))]
fn bincode_serialization() {
use bincode::{deserialize, serialize};
let data = [
"0",
"0.00",
"3.14159",
"-3.14159",
"1234567890123.4567890",
"-1234567890123.4567890",
];
for &raw in data.iter() {
let value = Decimal::from_str(raw).unwrap();
let encoded = serialize(&value).unwrap();
let decoded: Decimal = deserialize(&encoded[..]).unwrap();
assert_eq!(value, decoded);
assert_eq!(8usize + raw.len(), encoded.len());
}
}
#[test]
#[cfg(all(feature = "serde-bincode", feature = "serde-float"))]
fn bincode_serialization() {
use bincode::{deserialize, serialize};
let data = [
("0", "0"),
("0.00", "0.00"),
("3.14159", "3.14159"),
("-3.14159", "-3.14159"),
("1234567890123.4567890", "1234567890123.4568"),
("-1234567890123.4567890", "-1234567890123.4568"),
];
for &(value, expected) in data.iter() {
let value = Decimal::from_str(value).unwrap();
let expected = Decimal::from_str(expected).unwrap();
let encoded = serialize(&value).unwrap();
let decoded: Decimal = deserialize(&encoded[..]).unwrap();
assert_eq!(expected, decoded);
assert_eq!(8usize, encoded.len());
}
}
}

1633
third_party/rust/rust_decimal/tests/decimal_tests.rs поставляемый Normal file

Разница между файлами не показана из-за своего большого размера Загрузить разницу

1
third_party/rust/sfv/.cargo-checksum.json поставляемый Normal file
Просмотреть файл

@ -0,0 +1 @@
{"files":{"Cargo.toml":"6c1a426ff00326697cc6561be64161485eb93a8a7210f8c41b95399716515570","LICENSE":"1f256ecad192880510e84ad60474eab7589218784b9a50bc7ceee34c2b91f1d5","README.md":"e02dc4a10eb762cd776e653a08ddb2fc28b69e6bc8ce35b00b7985fe9314a3a1","benches/bench.rs":"bbc60db4b542abb3738eba80f5c7c54ac39301ed5e48e2ae2a94cecfdb42e33f","src/lib.rs":"e0853ae0a080e73876e5657acb4f25b41c1455105b2fa4d94b4161dac4df468b","src/parser.rs":"4de9bc1e04b536357d4c635350ba0dc1fbafae4b5741f6cd47dffd904468c251","src/ref_serializer.rs":"c21367c69f1a0ac1414e5fcfcdefbcd87a823a375e814d1e29c3520014966068","src/serializer.rs":"5d7a4d18a4508d433993b6a7ee405285ed2b33cbc6b84101cc4720c897f5586e","src/test_parser.rs":"7a2728e7cbdcb1f3bb42e009045ec0dcfca241316a2aee4905925d4b1ce0bb3a","src/test_serializer.rs":"2419279c9a9a4f48952836d63f3822281c18691d86c146749a573c52a41d6ff0","src/utils.rs":"94c8f79f4747973819b9da2c1a9f6246bf3b5ea7450b376a98eb055f6acf8e73"},"package":"83166498beeaadbb6ddf69e7ed7b2b009e2b2d4e827aae762d2d310d4f648a3b"}

46
third_party/rust/sfv/Cargo.toml поставляемый Normal file
Просмотреть файл

@ -0,0 +1,46 @@
# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
#
# When uploading crates to the registry Cargo will automatically
# "normalize" Cargo.toml files for maximal compatibility
# with all versions of Cargo and also rewrite `path` dependencies
# to registry (e.g., crates.io) dependencies
#
# If you believe there's an error in this file please file an
# issue against the rust-lang/cargo repository. If you're
# editing this file be aware that the upstream Cargo.toml
# will likely look very different (and much more reasonable)
[package]
edition = "2018"
name = "sfv"
version = "0.4.0"
authors = ["Tania Batieva <yalyna.ts@gmail.com>"]
exclude = ["tests/**", ".github/*"]
description = "Structured HTTP field values parser.\nImplementation of IETF draft https://httpwg.org/http-extensions/draft-ietf-httpbis-header-structure.html"
documentation = "https://docs.rs/sfv"
readme = "README.md"
keywords = ["http-header", "structured-header"]
license = "MIT/Apache-2.0"
repository = "https://github.com/undef1nd/sfv"
[[bench]]
name = "bench"
harness = false
[dependencies.data-encoding]
version = "2.2.1"
[dependencies.indexmap]
version = "1.3.2"
[dependencies.rust_decimal]
version = "1.6.0"
[dev-dependencies.criterion]
version = "0.3.3"
[dev-dependencies.serde]
version = "1.0"
features = ["derive"]
[dev-dependencies.serde_json]
version = "1.0"
features = ["preserve_order"]

373
third_party/rust/sfv/LICENSE поставляемый Normal file
Просмотреть файл

@ -0,0 +1,373 @@
Mozilla Public License Version 2.0
==================================
1. Definitions
--------------
1.1. "Contributor"
means each individual or legal entity that creates, contributes to
the creation of, or owns Covered Software.
1.2. "Contributor Version"
means the combination of the Contributions of others (if any) used
by a Contributor and that particular Contributor's Contribution.
1.3. "Contribution"
means Covered Software of a particular Contributor.
1.4. "Covered Software"
means Source Code Form to which the initial Contributor has attached
the notice in Exhibit A, the Executable Form of such Source Code
Form, and Modifications of such Source Code Form, in each case
including portions thereof.
1.5. "Incompatible With Secondary Licenses"
means
(a) that the initial Contributor has attached the notice described
in Exhibit B to the Covered Software; or
(b) that the Covered Software was made available under the terms of
version 1.1 or earlier of the License, but not also under the
terms of a Secondary License.
1.6. "Executable Form"
means any form of the work other than Source Code Form.
1.7. "Larger Work"
means a work that combines Covered Software with other material, in
a separate file or files, that is not Covered Software.
1.8. "License"
means this document.
1.9. "Licensable"
means having the right to grant, to the maximum extent possible,
whether at the time of the initial grant or subsequently, any and
all of the rights conveyed by this License.
1.10. "Modifications"
means any of the following:
(a) any file in Source Code Form that results from an addition to,
deletion from, or modification of the contents of Covered
Software; or
(b) any new file in Source Code Form that contains any Covered
Software.
1.11. "Patent Claims" of a Contributor
means any patent claim(s), including without limitation, method,
process, and apparatus claims, in any patent Licensable by such
Contributor that would be infringed, but for the grant of the
License, by the making, using, selling, offering for sale, having
made, import, or transfer of either its Contributions or its
Contributor Version.
1.12. "Secondary License"
means either the GNU General Public License, Version 2.0, the GNU
Lesser General Public License, Version 2.1, the GNU Affero General
Public License, Version 3.0, or any later versions of those
licenses.
1.13. "Source Code Form"
means the form of the work preferred for making modifications.
1.14. "You" (or "Your")
means an individual or a legal entity exercising rights under this
License. For legal entities, "You" includes any entity that
controls, is controlled by, or is under common control with You. For
purposes of this definition, "control" means (a) the power, direct
or indirect, to cause the direction or management of such entity,
whether by contract or otherwise, or (b) ownership of more than
fifty percent (50%) of the outstanding shares or beneficial
ownership of such entity.
2. License Grants and Conditions
--------------------------------
2.1. Grants
Each Contributor hereby grants You a world-wide, royalty-free,
non-exclusive license:
(a) under intellectual property rights (other than patent or trademark)
Licensable by such Contributor to use, reproduce, make available,
modify, display, perform, distribute, and otherwise exploit its
Contributions, either on an unmodified basis, with Modifications, or
as part of a Larger Work; and
(b) under Patent Claims of such Contributor to make, use, sell, offer
for sale, have made, import, and otherwise transfer either its
Contributions or its Contributor Version.
2.2. Effective Date
The licenses granted in Section 2.1 with respect to any Contribution
become effective for each Contribution on the date the Contributor first
distributes such Contribution.
2.3. Limitations on Grant Scope
The licenses granted in this Section 2 are the only rights granted under
this License. No additional rights or licenses will be implied from the
distribution or licensing of Covered Software under this License.
Notwithstanding Section 2.1(b) above, no patent license is granted by a
Contributor:
(a) for any code that a Contributor has removed from Covered Software;
or
(b) for infringements caused by: (i) Your and any other third party's
modifications of Covered Software, or (ii) the combination of its
Contributions with other software (except as part of its Contributor
Version); or
(c) under Patent Claims infringed by Covered Software in the absence of
its Contributions.
This License does not grant any rights in the trademarks, service marks,
or logos of any Contributor (except as may be necessary to comply with
the notice requirements in Section 3.4).
2.4. Subsequent Licenses
No Contributor makes additional grants as a result of Your choice to
distribute the Covered Software under a subsequent version of this
License (see Section 10.2) or under the terms of a Secondary License (if
permitted under the terms of Section 3.3).
2.5. Representation
Each Contributor represents that the Contributor believes its
Contributions are its original creation(s) or it has sufficient rights
to grant the rights to its Contributions conveyed by this License.
2.6. Fair Use
This License is not intended to limit any rights You have under
applicable copyright doctrines of fair use, fair dealing, or other
equivalents.
2.7. Conditions
Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted
in Section 2.1.
3. Responsibilities
-------------------
3.1. Distribution of Source Form
All distribution of Covered Software in Source Code Form, including any
Modifications that You create or to which You contribute, must be under
the terms of this License. You must inform recipients that the Source
Code Form of the Covered Software is governed by the terms of this
License, and how they can obtain a copy of this License. You may not
attempt to alter or restrict the recipients' rights in the Source Code
Form.
3.2. Distribution of Executable Form
If You distribute Covered Software in Executable Form then:
(a) such Covered Software must also be made available in Source Code
Form, as described in Section 3.1, and You must inform recipients of
the Executable Form how they can obtain a copy of such Source Code
Form by reasonable means in a timely manner, at a charge no more
than the cost of distribution to the recipient; and
(b) You may distribute such Executable Form under the terms of this
License, or sublicense it under different terms, provided that the
license for the Executable Form does not attempt to limit or alter
the recipients' rights in the Source Code Form under this License.
3.3. Distribution of a Larger Work
You may create and distribute a Larger Work under terms of Your choice,
provided that You also comply with the requirements of this License for
the Covered Software. If the Larger Work is a combination of Covered
Software with a work governed by one or more Secondary Licenses, and the
Covered Software is not Incompatible With Secondary Licenses, this
License permits You to additionally distribute such Covered Software
under the terms of such Secondary License(s), so that the recipient of
the Larger Work may, at their option, further distribute the Covered
Software under the terms of either this License or such Secondary
License(s).
3.4. Notices
You may not remove or alter the substance of any license notices
(including copyright notices, patent notices, disclaimers of warranty,
or limitations of liability) contained within the Source Code Form of
the Covered Software, except that You may alter any license notices to
the extent required to remedy known factual inaccuracies.
3.5. Application of Additional Terms
You may choose to offer, and to charge a fee for, warranty, support,
indemnity or liability obligations to one or more recipients of Covered
Software. However, You may do so only on Your own behalf, and not on
behalf of any Contributor. You must make it absolutely clear that any
such warranty, support, indemnity, or liability obligation is offered by
You alone, and You hereby agree to indemnify every Contributor for any
liability incurred by such Contributor as a result of warranty, support,
indemnity or liability terms You offer. You may include additional
disclaimers of warranty and limitations of liability specific to any
jurisdiction.
4. Inability to Comply Due to Statute or Regulation
---------------------------------------------------
If it is impossible for You to comply with any of the terms of this
License with respect to some or all of the Covered Software due to
statute, judicial order, or regulation then You must: (a) comply with
the terms of this License to the maximum extent possible; and (b)
describe the limitations and the code they affect. Such description must
be placed in a text file included with all distributions of the Covered
Software under this License. Except to the extent prohibited by statute
or regulation, such description must be sufficiently detailed for a
recipient of ordinary skill to be able to understand it.
5. Termination
--------------
5.1. The rights granted under this License will terminate automatically
if You fail to comply with any of its terms. However, if You become
compliant, then the rights granted under this License from a particular
Contributor are reinstated (a) provisionally, unless and until such
Contributor explicitly and finally terminates Your grants, and (b) on an
ongoing basis, if such Contributor fails to notify You of the
non-compliance by some reasonable means prior to 60 days after You have
come back into compliance. Moreover, Your grants from a particular
Contributor are reinstated on an ongoing basis if such Contributor
notifies You of the non-compliance by some reasonable means, this is the
first time You have received notice of non-compliance with this License
from such Contributor, and You become compliant prior to 30 days after
Your receipt of the notice.
5.2. If You initiate litigation against any entity by asserting a patent
infringement claim (excluding declaratory judgment actions,
counter-claims, and cross-claims) alleging that a Contributor Version
directly or indirectly infringes any patent, then the rights granted to
You by any and all Contributors for the Covered Software under Section
2.1 of this License shall terminate.
5.3. In the event of termination under Sections 5.1 or 5.2 above, all
end user license agreements (excluding distributors and resellers) which
have been validly granted by You or Your distributors under this License
prior to termination shall survive termination.
************************************************************************
* *
* 6. Disclaimer of Warranty *
* ------------------------- *
* *
* Covered Software is provided under this License on an "as is" *
* basis, without warranty of any kind, either expressed, implied, or *
* statutory, including, without limitation, warranties that the *
* Covered Software is free of defects, merchantable, fit for a *
* particular purpose or non-infringing. The entire risk as to the *
* quality and performance of the Covered Software is with You. *
* Should any Covered Software prove defective in any respect, You *
* (not any Contributor) assume the cost of any necessary servicing, *
* repair, or correction. This disclaimer of warranty constitutes an *
* essential part of this License. No use of any Covered Software is *
* authorized under this License except under this disclaimer. *
* *
************************************************************************
************************************************************************
* *
* 7. Limitation of Liability *
* -------------------------- *
* *
* Under no circumstances and under no legal theory, whether tort *
* (including negligence), contract, or otherwise, shall any *
* Contributor, or anyone who distributes Covered Software as *
* permitted above, be liable to You for any direct, indirect, *
* special, incidental, or consequential damages of any character *
* including, without limitation, damages for lost profits, loss of *
* goodwill, work stoppage, computer failure or malfunction, or any *
* and all other commercial damages or losses, even if such party *
* shall have been informed of the possibility of such damages. This *
* limitation of liability shall not apply to liability for death or *
* personal injury resulting from such party's negligence to the *
* extent applicable law prohibits such limitation. Some *
* jurisdictions do not allow the exclusion or limitation of *
* incidental or consequential damages, so this exclusion and *
* limitation may not apply to You. *
* *
************************************************************************
8. Litigation
-------------
Any litigation relating to this License may be brought only in the
courts of a jurisdiction where the defendant maintains its principal
place of business and such litigation shall be governed by laws of that
jurisdiction, without reference to its conflict-of-law provisions.
Nothing in this Section shall prevent a party's ability to bring
cross-claims or counter-claims.
9. Miscellaneous
----------------
This License represents the complete agreement concerning the subject
matter hereof. If any provision of this License is held to be
unenforceable, such provision shall be reformed only to the extent
necessary to make it enforceable. Any law or regulation which provides
that the language of a contract shall be construed against the drafter
shall not be used to construe this License against a Contributor.
10. Versions of the License
---------------------------
10.1. New Versions
Mozilla Foundation is the license steward. Except as provided in Section
10.3, no one other than the license steward has the right to modify or
publish new versions of this License. Each version will be given a
distinguishing version number.
10.2. Effect of New Versions
You may distribute the Covered Software under the terms of the version
of the License under which You originally received the Covered Software,
or under the terms of any subsequent version published by the license
steward.
10.3. Modified Versions
If you create software not governed by this License, and you want to
create a new license for such software, you may create and use a
modified version of this License if you rename the license and remove
any references to the name of the license steward (except to note that
such modified license differs from this License).
10.4. Distributing Source Code Form that is Incompatible With Secondary
Licenses
If You choose to distribute Source Code Form that is Incompatible With
Secondary Licenses under the terms of this version of the License, the
notice described in Exhibit B of this License must be attached.
Exhibit A - Source Code Form License Notice
-------------------------------------------
This Source Code Form is subject to the terms of the Mozilla Public
License, v. 2.0. If a copy of the MPL was not distributed with this
file, You can obtain one at http://mozilla.org/MPL/2.0/.
If it is not possible or desirable to put the notice in a particular
file, then You may include the notice in a location (such as a LICENSE
file in a relevant directory) where a recipient would be likely to look
for such a notice.
You may add additional accurate notices of copyright ownership.
Exhibit B - "Incompatible With Secondary Licenses" Notice
---------------------------------------------------------
This Source Code Form is "Incompatible With Secondary Licenses", as
defined by the Mozilla Public License, v. 2.0.

10
third_party/rust/sfv/README.md поставляемый Normal file
Просмотреть файл

@ -0,0 +1,10 @@
![Build Status](https://github.com/undef1nd/structured-headers/workflows/CI/badge.svg)
[![Version](https://img.shields.io/crates/v/sfv.svg)](https://crates.io/crates/sfv)
# Structured Field Values for HTTP
[Documentation](https://docs.rs/sfv/0.2.0/sfv/)
`sfv` crate is an implementation of IETF draft [Structured Field Values for HTTP](https://httpwg.org/http-extensions/draft-ietf-httpbis-header-structure.html) for parsing and serializing HTTP field values (also known as "structured headers" or "structured trailers").
It also exposes a set of types that might be useful for defining new structured fields.

171
third_party/rust/sfv/benches/bench.rs поставляемый Normal file
Просмотреть файл

@ -0,0 +1,171 @@
#[macro_use]
extern crate criterion;
use criterion::{BenchmarkId, Criterion};
use rust_decimal::prelude::FromPrimitive;
use sfv::{BareItem, Decimal, Parser, SerializeValue};
use sfv::{RefBareItem, RefDictSerializer, RefItemSerializer, RefListSerializer};
criterion_main!(parsing, serializing, ref_serializing);
criterion_group!(parsing, parsing_item, parsing_list, parsing_dict);
fn parsing_item(c: &mut Criterion) {
let fixture =
"c29tZXZlcnlsb25nc3RyaW5ndmFsdWVyZXByZXNlbnRlZGFzYnl0ZXNhbnNvbWVvdGhlcmxvbmdsaW5l";
c.bench_with_input(
BenchmarkId::new("parsing_item", fixture),
&fixture,
move |bench, &input| {
bench.iter(|| Parser::parse_item(input.as_bytes()).unwrap());
},
);
}
fn parsing_list(c: &mut Criterion) {
let fixture = "a, abcdefghigklmnoprst, 123456785686457, 99999999999.999, (), (\"somelongstringvalue\" \"anotherlongstringvalue\";key=:c29tZXZlciBsb25nc3RyaW5ndmFsdWVyZXByZXNlbnRlZGFzYnl0ZXM: 145)";
c.bench_with_input(
BenchmarkId::new("parsing_list", fixture),
&fixture,
move |bench, &input| {
bench.iter(|| Parser::parse_list(input.as_bytes()).unwrap());
},
);
}
fn parsing_dict(c: &mut Criterion) {
let fixture = "a, dict_key2=abcdefghigklmnoprst, dict_key3=123456785686457, dict_key4=(\"inner-list-member\" :aW5uZXItbGlzdC1tZW1iZXI=:);key=aW5uZXItbGlzdC1wYXJhbWV0ZXJz";
c.bench_with_input(
BenchmarkId::new("parsing_dict", fixture),
&fixture,
move |bench, &input| {
bench.iter(|| Parser::parse_dictionary(input.as_bytes()).unwrap());
},
);
}
criterion_group!(
serializing,
serializing_item,
serializing_list,
serializing_dict
);
fn serializing_item(c: &mut Criterion) {
let fixture =
"c29tZXZlcnlsb25nc3RyaW5ndmFsdWVyZXByZXNlbnRlZGFzYnl0ZXNhbnNvbWVvdGhlcmxvbmdsaW5l";
c.bench_with_input(
BenchmarkId::new("serializing_item", fixture),
&fixture,
move |bench, &input| {
let parsed_item = Parser::parse_item(input.as_bytes()).unwrap();
bench.iter(|| parsed_item.serialize_value().unwrap());
},
);
}
fn serializing_list(c: &mut Criterion) {
let fixture = "a, abcdefghigklmnoprst, 123456785686457, 99999999999.999, (), (\"somelongstringvalue\" \"anotherlongstringvalue\";key=:c29tZXZlciBsb25nc3RyaW5ndmFsdWVyZXByZXNlbnRlZGFzYnl0ZXM: 145)";
c.bench_with_input(
BenchmarkId::new("serializing_list", fixture),
&fixture,
move |bench, &input| {
let parsed_list = Parser::parse_list(input.as_bytes()).unwrap();
bench.iter(|| parsed_list.serialize_value().unwrap());
},
);
}
fn serializing_dict(c: &mut Criterion) {
let fixture = "a, dict_key2=abcdefghigklmnoprst, dict_key3=123456785686457, dict_key4=(\"inner-list-member\" :aW5uZXItbGlzdC1tZW1iZXI=:);key=aW5uZXItbGlzdC1wYXJhbWV0ZXJz";
c.bench_with_input(
BenchmarkId::new("serializing_dict", fixture),
&fixture,
move |bench, &input| {
let parsed_dict = Parser::parse_dictionary(input.as_bytes()).unwrap();
bench.iter(|| parsed_dict.serialize_value().unwrap());
},
);
}
criterion_group!(
ref_serializing,
serializing_ref_item,
serializing_ref_list,
serializing_ref_dict
);
fn serializing_ref_item(c: &mut Criterion) {
let fixture =
"c29tZXZlcnlsb25nc3RyaW5ndmFsdWVyZXByZXNlbnRlZGFzYnl0ZXNhbnNvbWVvdGhlcmxvbmdsaW5l";
c.bench_with_input(
BenchmarkId::new("serializing_ref_item", fixture),
&fixture,
move |bench, &input| {
bench.iter(|| {
let mut output = String::new();
let ser = RefItemSerializer::new(&mut output);
ser.bare_item(&RefBareItem::ByteSeq(input.as_bytes()))
.unwrap();
});
},
);
}
fn serializing_ref_list(c: &mut Criterion) {
c.bench_function("serializing_ref_list", move |bench| {
bench.iter(|| {
let mut output = String::new();
let ser = RefListSerializer::new(&mut output);
ser.bare_item(&RefBareItem::Token("a"))
.unwrap()
.bare_item(&RefBareItem::Token("abcdefghigklmnoprst"))
.unwrap()
.bare_item(&RefBareItem::Integer(123456785686457))
.unwrap()
.bare_item(&RefBareItem::Decimal(
Decimal::from_f64(99999999999.999).unwrap(),
))
.unwrap()
.open_inner_list()
.close_inner_list()
.open_inner_list()
.inner_list_bare_item(&RefBareItem::String("somelongstringvalue"))
.unwrap()
.inner_list_bare_item(&RefBareItem::String("anotherlongstringvalue"))
.unwrap()
.inner_list_parameter(
"key",
&RefBareItem::ByteSeq("somever longstringvaluerepresentedasbytes".as_bytes()),
)
.unwrap()
.inner_list_bare_item(&RefBareItem::Integer(145))
.unwrap()
.close_inner_list();
});
});
}
fn serializing_ref_dict(c: &mut Criterion) {
c.bench_function("serializing_ref_dict", move |bench| {
bench.iter(|| {
let mut output = String::new();
RefDictSerializer::new(&mut output)
.bare_item_member("a", &RefBareItem::Boolean(true))
.unwrap()
.bare_item_member("dict_key2", &RefBareItem::Token("abcdefghigklmnoprst"))
.unwrap()
.bare_item_member("dict_key3", &RefBareItem::Integer(123456785686457))
.unwrap()
.open_inner_list("dict_key4")
.unwrap()
.inner_list_bare_item(&RefBareItem::String("inner-list-member"))
.unwrap()
.inner_list_bare_item(&RefBareItem::ByteSeq("inner-list-member".as_bytes()))
.unwrap()
.close_inner_list()
.parameter("key", &RefBareItem::Token("aW5uZXItbGlzdC1wYXJhbWV0ZXJz"))
.unwrap();
});
});
}

387
third_party/rust/sfv/src/lib.rs поставляемый Normal file
Просмотреть файл

@ -0,0 +1,387 @@
/*!
`sfv` crate is an implementation of IETF draft [Structured Field Values for HTTP](https://httpwg.org/http-extensions/draft-ietf-httpbis-header-structure.html)
for parsing and serializing structured HTTP field values.
It also exposes a set of types that might be useful for defining new structured fields.
# Data Structures
There are three types of structured fields:
- `Item` - can be an `Integer`, `Decimal`, `String`, `Token`, `Byte Sequence`, or `Boolean`. It can have associated `Parameters`.
- `List` - array of zero or more members, each of which can be an `Item` or an `InnerList`, both of which can be `Parameterized`.
- `Dictionary` - ordered map of name-value pairs, where the names are short textual strings and the values are `Items` or arrays of `Items` (represented with `InnerList`), both of which can be `Parameterized`. There can be zero or more members, and their names are unique in the scope of the `Dictionary` they occur within.
There's also a few primitive types used to construct structured field values:
- `BareItem` used as `Item`'s value or as a parameter value in `Parameters`.
- `Parameters` are an ordered map of key-value pairs that are associated with an `Item` or `InnerList`. The keys are unique within the scope the `Parameters` they occur within, and the values are `BareItem`.
- `InnerList` is an array of zero or more `Items`. Can have `Parameters`.
- `ListEntry` represents either `Item` or `InnerList` as a member of `List` or as member-value in `Dictionary`.
# Examples
### Parsing
```
use sfv::Parser;
// Parsing structured field value of Item type
let item_header_input = "12.445;foo=bar";
let item = Parser::parse_item(item_header_input.as_bytes());
assert!(item.is_ok());
println!("{:#?}", item);
// Parsing structured field value of List type
let list_header_input = "1;a=tok, (\"foo\" \"bar\");baz, ()";
let list = Parser::parse_list(list_header_input.as_bytes());
assert!(list.is_ok());
println!("{:#?}", list);
// Parsing structured field value of Dictionary type
let dict_header_input = "a=?0, b, c; foo=bar, rating=1.5, fruits=(apple pear)";
let dict = Parser::parse_dictionary(dict_header_input.as_bytes());
assert!(dict.is_ok());
println!("{:#?}", dict);
```
### Value Creation and Serialization
Create `Item` with empty parameters:
```
use sfv::{Item, BareItem, SerializeValue};
let str_item = Item::new(BareItem::String(String::from("foo")));
assert_eq!(str_item.serialize_value().unwrap(), "\"foo\"");
```
Create `Item` field value with parameters:
```
use sfv::{Item, BareItem, SerializeValue, Parameters, Decimal, FromPrimitive};
let mut params = Parameters::new();
let decimal = Decimal::from_f64(13.45655).unwrap();
params.insert("key".into(), BareItem::Decimal(decimal));
let int_item = Item::with_params(BareItem::Integer(99), params);
assert_eq!(int_item.serialize_value().unwrap(), "99;key=13.457");
```
Create `List` field value with `Item` and parametrized `InnerList` as members:
```
use sfv::{Item, BareItem, InnerList, List, SerializeValue, Parameters};
// Create Item
let tok_item = BareItem::Token("tok".into());
// Create InnerList members
let str_item = Item::new(BareItem::String(String::from("foo")));
let mut int_item_params = Parameters::new();
int_item_params.insert("key".into(), BareItem::Boolean(false));
let int_item = Item::with_params(BareItem::Integer(99), int_item_params);
// Create InnerList
let mut inner_list_params = Parameters::new();
inner_list_params.insert("bar".into(), BareItem::Boolean(true));
let inner_list = InnerList::with_params(vec![int_item, str_item], inner_list_params);
let list: List = vec![Item::new(tok_item).into(), inner_list.into()];
assert_eq!(
list.serialize_value().unwrap(),
"tok, (99;key=?0 \"foo\");bar"
);
```
Create `Dictionary` field value:
```
use sfv::{Parser, Item, BareItem, SerializeValue, ParseValue, Dictionary};
let member_value1 = Item::new(BareItem::String(String::from("apple")));
let member_value2 = Item::new(BareItem::Boolean(true));
let member_value3 = Item::new(BareItem::Boolean(false));
let mut dict = Dictionary::new();
dict.insert("key1".into(), member_value1.into());
dict.insert("key2".into(), member_value2.into());
dict.insert("key3".into(), member_value3.into());
assert_eq!(
dict.serialize_value().unwrap(),
"key1=\"apple\", key2, key3=?0"
);
```
*/
mod parser;
mod ref_serializer;
mod serializer;
mod utils;
#[cfg(test)]
mod test_parser;
#[cfg(test)]
mod test_serializer;
use indexmap::IndexMap;
pub use rust_decimal::{
prelude::{FromPrimitive, FromStr},
Decimal,
};
pub use parser::{ParseMore, ParseValue, Parser};
pub use ref_serializer::{RefDictSerializer, RefItemSerializer, RefListSerializer};
pub use serializer::SerializeValue;
type SFVResult<T> = std::result::Result<T, &'static str>;
/// Represents `Item` type structured field value.
/// Can be used as a member of `List` or `Dictionary`.
// sf-item = bare-item parameters
// bare-item = sf-integer / sf-decimal / sf-string / sf-token
// / sf-binary / sf-boolean
#[derive(Debug, PartialEq, Clone)]
pub struct Item {
/// Value of `Item`.
pub bare_item: BareItem,
/// `Item`'s associated parameters. Can be empty.
pub params: Parameters,
}
impl Item {
/// Returns new `Item` with empty `Parameters`.
pub fn new(bare_item: BareItem) -> Item {
Item {
bare_item,
params: Parameters::new(),
}
}
/// Returns new `Item` with specified `Parameters`.
pub fn with_params(bare_item: BareItem, params: Parameters) -> Item {
Item { bare_item, params }
}
}
/// Represents `Dictionary` type structured field value.
// sf-dictionary = dict-member *( OWS "," OWS dict-member )
// dict-member = member-name [ "=" member-value ]
// member-name = key
// member-value = sf-item / inner-list
pub type Dictionary = IndexMap<String, ListEntry>;
/// Represents `List` type structured field value.
// sf-list = list-member *( OWS "," OWS list-member )
// list-member = sf-item / inner-list
pub type List = Vec<ListEntry>;
/// Parameters of `Item` or `InnerList`.
// parameters = *( ";" *SP parameter )
// parameter = param-name [ "=" param-value ]
// param-name = key
// key = ( lcalpha / "*" )
// *( lcalpha / DIGIT / "_" / "-" / "." / "*" )
// lcalpha = %x61-7A ; a-z
// param-value = bare-item
pub type Parameters = IndexMap<String, BareItem>;
/// Represents a member of `List` or `Dictionary` structured field value.
#[derive(Debug, PartialEq, Clone)]
pub enum ListEntry {
/// Member of `Item` type.
Item(Item),
/// Member of `InnerList` (array of `Items`) type.
InnerList(InnerList),
}
impl From<Item> for ListEntry {
fn from(item: Item) -> Self {
ListEntry::Item(item)
}
}
impl From<InnerList> for ListEntry {
fn from(item: InnerList) -> Self {
ListEntry::InnerList(item)
}
}
/// Array of `Items` with associated `Parameters`.
// inner-list = "(" *SP [ sf-item *( 1*SP sf-item ) *SP ] ")"
// parameters
#[derive(Debug, PartialEq, Clone)]
pub struct InnerList {
/// `Items` that `InnerList` contains. Can be empty
pub items: Vec<Item>,
/// `InnerList`'s associated parameters. Can be empty.
pub params: Parameters,
}
impl InnerList {
/// Returns new `InnerList` with empty `Parameters`.
pub fn new(items: Vec<Item>) -> InnerList {
InnerList {
items,
params: Parameters::new(),
}
}
/// Returns new `InnerList` with specified `Parameters`.
pub fn with_params(items: Vec<Item>, params: Parameters) -> InnerList {
InnerList { items, params }
}
}
/// `BareItem` type is used to construct `Items` or `Parameters` values.
#[derive(Debug, PartialEq, Clone)]
pub enum BareItem {
/// Decimal number
// sf-decimal = ["-"] 1*12DIGIT "." 1*3DIGIT
Decimal(Decimal),
/// Integer number
// sf-integer = ["-"] 1*15DIGIT
Integer(i64),
// sf-string = DQUOTE *chr DQUOTE
// chr = unescaped / escaped
// unescaped = %x20-21 / %x23-5B / %x5D-7E
// escaped = "\" ( DQUOTE / "\" )
String(String),
// ":" *(base64) ":"
// base64 = ALPHA / DIGIT / "+" / "/" / "="
ByteSeq(Vec<u8>),
// sf-boolean = "?" boolean
// boolean = "0" / "1"
Boolean(bool),
// sf-token = ( ALPHA / "*" ) *( tchar / ":" / "/" )
Token(String),
}
impl BareItem {
/// If `BareItem` is a decimal, returns `Decimal`, otherwise returns `None`.
/// ```
/// # use sfv::{BareItem, Decimal, FromPrimitive};
/// let decimal_number = Decimal::from_f64(415.566).unwrap();
/// let bare_item: BareItem = decimal_number.into();
/// assert_eq!(bare_item.as_decimal().unwrap(), decimal_number);
/// ```
pub fn as_decimal(&self) -> Option<Decimal> {
match *self {
BareItem::Decimal(val) => Some(val),
_ => None,
}
}
/// If `BareItem` is an integer, returns `i64`, otherwise returns `None`.
/// ```
/// # use sfv::BareItem;
/// let bare_item: BareItem = 100.into();
/// assert_eq!(bare_item.as_int().unwrap(), 100);
/// ```
pub fn as_int(&self) -> Option<i64> {
match *self {
BareItem::Integer(val) => Some(val),
_ => None,
}
}
/// If `BareItem` is `String`, returns `&str`, otherwise returns `None`.
/// ```
/// # use sfv::BareItem;
/// let bare_item = BareItem::String("foo".into());
/// assert_eq!(bare_item.as_str().unwrap(), "foo");
/// ```
pub fn as_str(&self) -> Option<&str> {
match *self {
BareItem::String(ref val) => Some(val),
_ => None,
}
}
/// If `BareItem` is a `ByteSeq`, returns `&Vec<u8>`, otherwise returns `None`.
/// ```
/// # use sfv::BareItem;
/// let bare_item = BareItem::ByteSeq("foo".to_owned().into_bytes());
/// assert_eq!(bare_item.as_byte_seq().unwrap().as_slice(), "foo".as_bytes());
/// ```
pub fn as_byte_seq(&self) -> Option<&Vec<u8>> {
match *self {
BareItem::ByteSeq(ref val) => Some(val),
_ => None,
}
}
/// If `BareItem` is a `Boolean`, returns `bool`, otherwise returns `None`.
/// ```
/// # use sfv::{BareItem, Decimal, FromPrimitive};
/// let bare_item = BareItem::Boolean(true);
/// assert_eq!(bare_item.as_bool().unwrap(), true);
/// ```
pub fn as_bool(&self) -> Option<bool> {
match *self {
BareItem::Boolean(val) => Some(val),
_ => None,
}
}
/// If `BareItem` is a `Token`, returns `&str`, otherwise returns `None`.
/// ```
/// use sfv::BareItem;
///
/// let bare_item = BareItem::Token("*bar".into());
/// assert_eq!(bare_item.as_token().unwrap(), "*bar");
/// ```
pub fn as_token(&self) -> Option<&str> {
match *self {
BareItem::Token(ref val) => Some(val),
_ => None,
}
}
}
impl From<i64> for BareItem {
/// Convert `i64` into `BareItem::Integer`
/// ```
/// # use sfv::BareItem;
/// let bare_item: BareItem = 456.into();
/// assert_eq!(bare_item.as_int().unwrap(), 456);
/// ```
fn from(item: i64) -> Self {
BareItem::Integer(item)
}
}
impl From<Decimal> for BareItem {
/// Convert `Decimal` into `BareItem::Decimal`
/// ```
/// # use sfv::{BareItem, Decimal, FromPrimitive};
/// let decimal_number = Decimal::from_f64(48.01).unwrap();
/// let bare_item: BareItem = decimal_number.into();
/// assert_eq!(bare_item.as_decimal().unwrap(), decimal_number);
/// ```
fn from(item: Decimal) -> Self {
BareItem::Decimal(item)
}
}
#[derive(Debug, PartialEq)]
pub(crate) enum Num {
Decimal(Decimal),
Integer(i64),
}
#[derive(Debug, PartialEq, Clone)]
pub enum RefBareItem<'a> {
Integer(i64),
Decimal(Decimal),
String(&'a str),
ByteSeq(&'a [u8]),
Boolean(bool),
Token(&'a str),
}
impl BareItem {
fn to_ref_bare_item(&self) -> RefBareItem {
match self {
BareItem::Integer(val) => RefBareItem::Integer(*val),
BareItem::Decimal(val) => RefBareItem::Decimal(*val),
BareItem::String(val) => RefBareItem::String(val),
BareItem::ByteSeq(val) => RefBareItem::ByteSeq(val.as_slice()),
BareItem::Boolean(val) => RefBareItem::Boolean(*val),
BareItem::Token(val) => RefBareItem::Token(val),
}
}
}

477
third_party/rust/sfv/src/parser.rs поставляемый Normal file
Просмотреть файл

@ -0,0 +1,477 @@
use crate::utils;
use crate::{
BareItem, Decimal, Dictionary, FromStr, InnerList, Item, List, ListEntry, Num, Parameters,
SFVResult,
};
use std::iter::Peekable;
use std::str::{from_utf8, Chars};
/// Implements parsing logic for each structured field value type.
pub trait ParseValue {
/// This method should not be used for parsing input into structured field value.
/// Use `Parser::parse_item`, `Parser::parse_list` or `Parsers::parse_dictionary` for that.
fn parse(input_chars: &mut Peekable<Chars>) -> SFVResult<Self>
where
Self: Sized;
}
/// If structured field value of List or Dictionary type is split into multiple lines,
/// allows to parse more lines and merge them into already existing structure field value.
pub trait ParseMore {
/// If structured field value is split across lines,
/// parses and merges next line into a single structured field value.
/// # Examples
/// ```
/// # use sfv::{Parser, SerializeValue, ParseMore};
///
/// let mut list_field = Parser::parse_list("11, (12 13)".as_bytes()).unwrap();
/// list_field.parse_more("\"foo\", \"bar\"".as_bytes()).unwrap();
///
/// assert_eq!(list_field.serialize_value().unwrap(), "11, (12 13), \"foo\", \"bar\"");
fn parse_more(&mut self, input_bytes: &[u8]) -> SFVResult<()>
where
Self: Sized;
}
impl ParseValue for Item {
fn parse(input_chars: &mut Peekable<Chars>) -> SFVResult<Item> {
// https://httpwg.org/http-extensions/draft-ietf-httpbis-header-structure.html#parse-item
let bare_item = Parser::parse_bare_item(input_chars)?;
let params = Parser::parse_parameters(input_chars)?;
Ok(Item { bare_item, params })
}
}
impl ParseValue for List {
fn parse(input_chars: &mut Peekable<Chars>) -> SFVResult<List> {
// https://httpwg.org/http-extensions/draft-ietf-httpbis-header-structure.html#parse-list
// List represents an array of (item_or_inner_list, parameters)
let mut members = vec![];
while input_chars.peek().is_some() {
members.push(Parser::parse_list_entry(input_chars)?);
utils::consume_ows_chars(input_chars);
if input_chars.peek().is_none() {
return Ok(members);
}
if let Some(c) = input_chars.next() {
if c != ',' {
return Err("parse_list: trailing characters after list member");
}
}
utils::consume_ows_chars(input_chars);
if input_chars.peek().is_none() {
return Err("parse_list: trailing comma");
}
}
Ok(members)
}
}
impl ParseValue for Dictionary {
fn parse(input_chars: &mut Peekable<Chars>) -> SFVResult<Dictionary> {
let mut dict = Dictionary::new();
while input_chars.peek().is_some() {
let this_key = Parser::parse_key(input_chars)?;
if let Some('=') = input_chars.peek() {
input_chars.next();
let member = Parser::parse_list_entry(input_chars)?;
dict.insert(this_key, member);
} else {
let value = true;
let params = Parser::parse_parameters(input_chars)?;
let member = Item {
bare_item: BareItem::Boolean(value),
params,
};
dict.insert(this_key, member.into());
}
utils::consume_ows_chars(input_chars);
if input_chars.peek().is_none() {
return Ok(dict);
}
if let Some(c) = input_chars.next() {
if c != ',' {
return Err("parse_dict: trailing characters after dictionary member");
}
}
utils::consume_ows_chars(input_chars);
if input_chars.peek().is_none() {
return Err("parse_dict: trailing comma");
}
}
Ok(dict)
}
}
impl ParseMore for List {
fn parse_more(&mut self, input_bytes: &[u8]) -> SFVResult<()> {
let parsed_list = Parser::parse_list(input_bytes)?;
self.extend(parsed_list);
Ok(())
}
}
impl ParseMore for Dictionary {
fn parse_more(&mut self, input_bytes: &[u8]) -> SFVResult<()> {
let parsed_dict = Parser::parse_dictionary(input_bytes)?;
self.extend(parsed_dict);
Ok(())
}
}
/// Exposes methods for parsing input into structured field value.
pub struct Parser;
impl Parser {
/// Parses input into structured field value of Dictionary type
pub fn parse_dictionary(input_bytes: &[u8]) -> SFVResult<Dictionary> {
Self::parse::<Dictionary>(input_bytes)
}
/// Parses input into structured field value of List type
pub fn parse_list(input_bytes: &[u8]) -> SFVResult<List> {
Self::parse::<List>(input_bytes)
}
/// Parses input into structured field value of Item type
pub fn parse_item(input_bytes: &[u8]) -> SFVResult<Item> {
Self::parse::<Item>(input_bytes)
}
// Generic parse method for checking input before parsing
// and handling trailing text error
fn parse<T: ParseValue>(input_bytes: &[u8]) -> SFVResult<T> {
// https://httpwg.org/http-extensions/draft-ietf-httpbis-header-structure.html#text-parse
if !input_bytes.is_ascii() {
return Err("parse: non-ascii characters in input");
}
let mut input_chars = from_utf8(input_bytes)
.map_err(|_| "parse: conversion from bytes to str failed")?
.chars()
.peekable();
utils::consume_sp_chars(&mut input_chars);
let output = T::parse(&mut input_chars)?;
utils::consume_sp_chars(&mut input_chars);
if input_chars.next().is_some() {
return Err("parse: trailing characters after parsed value");
};
Ok(output)
}
fn parse_list_entry(input_chars: &mut Peekable<Chars>) -> SFVResult<ListEntry> {
// https://httpwg.org/http-extensions/draft-ietf-httpbis-header-structure.html#parse-item-or-list
// ListEntry represents a tuple (item_or_inner_list, parameters)
match input_chars.peek() {
Some('(') => {
let parsed = Self::parse_inner_list(input_chars)?;
Ok(ListEntry::InnerList(parsed))
}
_ => {
let parsed = Item::parse(input_chars)?;
Ok(ListEntry::Item(parsed))
}
}
}
pub(crate) fn parse_inner_list(input_chars: &mut Peekable<Chars>) -> SFVResult<InnerList> {
// https://httpwg.org/http-extensions/draft-ietf-httpbis-header-structure.html#parse-innerlist
if Some('(') != input_chars.next() {
return Err("parse_inner_list: input does not start with '('");
}
let mut inner_list = Vec::new();
while input_chars.peek().is_some() {
utils::consume_sp_chars(input_chars);
if Some(&')') == input_chars.peek() {
input_chars.next();
let params = Self::parse_parameters(input_chars)?;
return Ok(InnerList {
items: inner_list,
params,
});
}
let parsed_item = Item::parse(input_chars)?;
inner_list.push(parsed_item);
if let Some(c) = input_chars.peek() {
if c != &' ' && c != &')' {
return Err("parse_inner_list: bad delimitation");
}
}
}
Err("parse_inner_list: the end of the inner list was not found")
}
pub(crate) fn parse_bare_item(mut input_chars: &mut Peekable<Chars>) -> SFVResult<BareItem> {
// https://httpwg.org/http-extensions/draft-ietf-httpbis-header-structure.html#parse-bare-item
if input_chars.peek().is_none() {
return Err("parse_bare_item: empty item");
}
match input_chars.peek() {
Some(&'?') => Ok(BareItem::Boolean(Self::parse_bool(&mut input_chars)?)),
Some(&'"') => Ok(BareItem::String(Self::parse_string(&mut input_chars)?)),
Some(&':') => Ok(BareItem::ByteSeq(Self::parse_byte_sequence(
&mut input_chars,
)?)),
Some(&c) if c == '*' || c.is_ascii_alphabetic() => {
Ok(BareItem::Token(Self::parse_token(&mut input_chars)?))
}
Some(&c) if c == '-' || c.is_ascii_digit() => {
match Self::parse_number(&mut input_chars)? {
Num::Decimal(val) => Ok(BareItem::Decimal(val)),
Num::Integer(val) => Ok(BareItem::Integer(val)),
}
}
_ => Err("parse_bare_item: item type can't be identified"),
}
}
pub(crate) fn parse_bool(input_chars: &mut Peekable<Chars>) -> SFVResult<bool> {
// https://httpwg.org/http-extensions/draft-ietf-httpbis-header-structure.html#parse-boolean
if input_chars.next() != Some('?') {
return Err("parse_bool: first character is not '?'");
}
match input_chars.next() {
Some('0') => Ok(false),
Some('1') => Ok(true),
_ => Err("parse_bool: invalid variant"),
}
}
pub(crate) fn parse_string(input_chars: &mut Peekable<Chars>) -> SFVResult<String> {
// https://httpwg.org/http-extensions/draft-ietf-httpbis-header-structure.html#parse-string
if input_chars.next() != Some('\"') {
return Err("parse_string: first character is not '\"'");
}
let mut output_string = String::from("");
while let Some(curr_char) = input_chars.next() {
match curr_char {
'\"' => return Ok(output_string),
'\x7f' | '\x00'..='\x1f' => return Err("parse_string: not a visible character"),
'\\' => match input_chars.next() {
Some(c) if c == '\\' || c == '\"' => {
output_string.push(c);
}
None => return Err("parse_string: last input character is '\\'"),
_ => return Err("parse_string: disallowed character after '\\'"),
},
_ => output_string.push(curr_char),
}
}
Err("parse_string: no closing '\"'")
}
pub(crate) fn parse_token(input_chars: &mut Peekable<Chars>) -> SFVResult<String> {
// https://httpwg.org/http-extensions/draft-ietf-httpbis-header-structure.html#parse-token
if let Some(first_char) = input_chars.peek() {
if !first_char.is_ascii_alphabetic() && first_char != &'*' {
return Err("parse_token: first character is not ALPHA or '*'");
}
} else {
return Err("parse_token: empty input string");
}
let mut output_string = String::from("");
while let Some(curr_char) = input_chars.peek() {
if !utils::is_tchar(*curr_char) && curr_char != &':' && curr_char != &'/' {
return Ok(output_string);
}
match input_chars.next() {
Some(c) => output_string.push(c),
None => return Err("parse_token: end of the string"),
}
}
Ok(output_string)
}
pub(crate) fn parse_byte_sequence(input_chars: &mut Peekable<Chars>) -> SFVResult<Vec<u8>> {
// https://httpwg.org/http-extensions/draft-ietf-httpbis-header-structure.html#parse-binary
if input_chars.next() != Some(':') {
return Err("parse_byte_seq: first char is not ':'");
}
if !input_chars.clone().any(|c| c == ':') {
return Err("parse_byte_seq: no closing ':'");
}
let b64_content = input_chars.take_while(|c| c != &':').collect::<String>();
if !b64_content.chars().all(utils::is_allowed_b64_content) {
return Err("parse_byte_seq: invalid char in byte sequence");
}
match utils::base64()?.decode(b64_content.as_bytes()) {
Ok(content) => Ok(content),
Err(_) => Err("parse_byte_seq: decoding error"),
}
}
pub(crate) fn parse_number(input_chars: &mut Peekable<Chars>) -> SFVResult<Num> {
// https://httpwg.org/http-extensions/draft-ietf-httpbis-header-structure.html#parse-number
let mut sign = 1;
if let Some('-') = input_chars.peek() {
sign = -1;
input_chars.next();
}
match input_chars.peek() {
Some(c) if !c.is_ascii_digit() => {
return Err("parse_number: input number does not start with a digit")
}
None => return Err("parse_number: input number lacks a digit"),
_ => (),
}
// Get number from input as a string and identify whether it's a decimal or integer
let (is_integer, input_number) = Self::extract_digits(input_chars)?;
// Parse input_number from string into integer
if is_integer {
let output_number = input_number
.parse::<i64>()
.map_err(|_err| "parse_number: parsing i64 failed")?
* sign;
let (min_int, max_int) = (-999_999_999_999_999_i64, 999_999_999_999_999_i64);
if !(min_int <= output_number && output_number <= max_int) {
return Err("parse_number: integer number is out of range");
}
return Ok(Num::Integer(output_number));
}
// Parse input_number from string into decimal
let chars_after_dot = input_number
.find('.')
.map(|dot_pos| input_number.len() - dot_pos - 1);
match chars_after_dot {
Some(0) => Err("parse_number: decimal ends with '.'"),
Some(1..=3) => {
let mut output_number = Decimal::from_str(&input_number)
.map_err(|_err| "parse_number: parsing f64 failed")?;
if sign == -1 {
output_number.set_sign_negative(true)
}
Ok(Num::Decimal(output_number))
}
_ => Err("parse_number: invalid decimal fraction length"),
}
}
fn extract_digits(input_chars: &mut Peekable<Chars>) -> SFVResult<(bool, String)> {
let mut is_integer = true;
let mut input_number = String::from("");
while let Some(curr_char) = input_chars.peek() {
if curr_char.is_ascii_digit() {
input_number.push(*curr_char);
input_chars.next();
} else if curr_char == &'.' && is_integer {
if input_number.len() > 12 {
return Err(
"parse_number: decimal too long, illegal position for decimal point",
);
}
input_number.push(*curr_char);
is_integer = false;
input_chars.next();
} else {
break;
}
if is_integer && input_number.len() > 15 {
return Err("parse_number: integer too long, length > 15");
}
if !is_integer && input_number.len() > 16 {
return Err("parse_number: decimal too long, length > 16");
}
}
Ok((is_integer, input_number))
}
pub(crate) fn parse_parameters(input_chars: &mut Peekable<Chars>) -> SFVResult<Parameters> {
// https://httpwg.org/http-extensions/draft-ietf-httpbis-header-structure.html#parse-param
let mut params = Parameters::new();
while let Some(curr_char) = input_chars.peek() {
if curr_char == &';' {
input_chars.next();
} else {
break;
}
utils::consume_sp_chars(input_chars);
let param_name = Self::parse_key(input_chars)?;
let param_value = match input_chars.peek() {
Some('=') => {
input_chars.next();
Self::parse_bare_item(input_chars)?
}
_ => BareItem::Boolean(true),
};
params.insert(param_name, param_value);
}
// If parameters already contains a name param_name (comparing character-for-character), overwrite its value.
// Note that when duplicate Parameter keys are encountered, this has the effect of ignoring all but the last instance.
Ok(params)
}
pub(crate) fn parse_key(input_chars: &mut Peekable<Chars>) -> SFVResult<String> {
match input_chars.peek() {
Some(c) if c == &'*' || c.is_ascii_lowercase() => (),
_ => return Err("parse_key: first character is not lcalpha or '*'"),
}
let mut output = String::new();
while let Some(curr_char) = input_chars.peek() {
if !curr_char.is_ascii_lowercase()
&& !curr_char.is_ascii_digit()
&& !"_-*.".contains(*curr_char)
{
return Ok(output);
}
output.push(*curr_char);
input_chars.next();
}
Ok(output)
}
}

237
third_party/rust/sfv/src/ref_serializer.rs поставляемый Normal file
Просмотреть файл

@ -0,0 +1,237 @@
use crate::serializer::Serializer;
use crate::{Decimal, List, Num, RefBareItem, SFVResult};
use std::marker::PhantomData;
#[derive(Debug)]
pub struct RefItemSerializer<'a> {
pub buffer: &'a mut String,
}
impl<'a> RefItemSerializer<'a> {
pub fn new(buffer: &'a mut String) -> Self {
RefItemSerializer { buffer }
}
pub fn bare_item(self, bare_item: &RefBareItem) -> SFVResult<RefParameterSerializer<'a>> {
Serializer::serialize_ref_bare_item(bare_item, self.buffer)?;
Ok(RefParameterSerializer {
buffer: self.buffer,
})
}
}
#[derive(Debug)]
pub struct RefParameterSerializer<'a> {
buffer: &'a mut String,
}
impl<'a> RefParameterSerializer<'a> {
pub fn parameter(self, name: &str, value: &RefBareItem) -> SFVResult<Self> {
Serializer::serialize_ref_parameter(name, value, self.buffer)?;
Ok(self)
}
}
#[derive(Debug)]
pub struct RefListSerializer<'a> {
buffer: &'a mut String,
}
impl<'a> RefListSerializer<'a> {
pub fn new(buffer: &'a mut String) -> Self {
RefListSerializer { buffer }
}
pub fn bare_item(self, bare_item: &RefBareItem) -> SFVResult<Self> {
if !self.buffer.is_empty() {
self.buffer.push_str(", ");
}
Serializer::serialize_ref_bare_item(bare_item, self.buffer)?;
Ok(RefListSerializer {
buffer: self.buffer,
})
}
pub fn parameter(self, name: &str, value: &RefBareItem) -> SFVResult<Self> {
if self.buffer.is_empty() {
return Err("parameters must be serialized after bare item or inner list");
}
Serializer::serialize_ref_parameter(name, value, self.buffer)?;
Ok(RefListSerializer {
buffer: self.buffer,
})
}
pub fn open_inner_list(self) -> RefInnerListSerializer<'a, Self> {
if !self.buffer.is_empty() {
self.buffer.push_str(", ");
}
self.buffer.push('(');
RefInnerListSerializer::<RefListSerializer> {
buffer: self.buffer,
caller_type: PhantomData,
}
}
}
#[derive(Debug)]
pub struct RefDictSerializer<'a> {
buffer: &'a mut String,
}
impl<'a> RefDictSerializer<'a> {
pub fn new(buffer: &'a mut String) -> Self {
RefDictSerializer { buffer }
}
pub fn bare_item_member(self, name: &str, value: &RefBareItem) -> SFVResult<Self> {
if !self.buffer.is_empty() {
self.buffer.push_str(", ");
}
Serializer::serialize_key(name, self.buffer)?;
if value != &RefBareItem::Boolean(true) {
self.buffer.push('=');
Serializer::serialize_ref_bare_item(value, self.buffer)?;
}
Ok(self)
}
pub fn parameter(self, name: &str, value: &RefBareItem) -> SFVResult<Self> {
if self.buffer.is_empty() {
return Err("parameters must be serialized after bare item or inner list");
}
Serializer::serialize_ref_parameter(name, value, self.buffer)?;
Ok(RefDictSerializer {
buffer: self.buffer,
})
}
pub fn open_inner_list(self, name: &str) -> SFVResult<RefInnerListSerializer<'a, Self>> {
if !self.buffer.is_empty() {
self.buffer.push_str(", ");
}
Serializer::serialize_key(name, self.buffer)?;
self.buffer.push_str("=(");
Ok(RefInnerListSerializer::<RefDictSerializer> {
buffer: self.buffer,
caller_type: PhantomData,
})
}
}
#[derive(Debug)]
pub struct RefInnerListSerializer<'a, T> {
buffer: &'a mut String,
caller_type: PhantomData<T>,
}
impl<'a, T: Container<'a>> RefInnerListSerializer<'a, T> {
pub fn inner_list_bare_item(self, bare_item: &RefBareItem) -> SFVResult<Self> {
if !self.buffer.is_empty() & !self.buffer.ends_with('(') {
self.buffer.push(' ');
}
Serializer::serialize_ref_bare_item(bare_item, self.buffer)?;
Ok(RefInnerListSerializer {
buffer: self.buffer,
caller_type: PhantomData,
})
}
pub fn inner_list_parameter(self, name: &str, value: &RefBareItem) -> SFVResult<Self> {
if self.buffer.is_empty() {
return Err("parameters must be serialized after bare item or inner list");
}
Serializer::serialize_ref_parameter(name, value, self.buffer)?;
Ok(RefInnerListSerializer {
buffer: self.buffer,
caller_type: PhantomData,
})
}
pub fn close_inner_list(self) -> T {
self.buffer.push(')');
T::new(self.buffer)
}
}
pub trait Container<'a> {
fn new(buffer: &'a mut String) -> Self;
}
impl<'a> Container<'a> for RefListSerializer<'a> {
fn new(buffer: &mut String) -> RefListSerializer {
RefListSerializer { buffer }
}
}
impl<'a> Container<'a> for RefDictSerializer<'a> {
fn new(buffer: &mut String) -> RefDictSerializer {
RefDictSerializer { buffer }
}
}
#[cfg(test)]
mod alternative_serializer_tests {
use super::*;
use crate::{BareItem, FromPrimitive};
#[test]
fn test_fast_serialize_item() -> SFVResult<()> {
let mut output = String::new();
let ser = RefItemSerializer::new(&mut output);
ser.bare_item(&RefBareItem::Token("hello"))?
.parameter("abc", &RefBareItem::Boolean(true))?;
assert_eq!("hello;abc", output);
Ok(())
}
#[test]
fn test_fast_serialize_list() -> SFVResult<()> {
let mut output = String::new();
let ser = RefListSerializer::new(&mut output);
ser.bare_item(&RefBareItem::Token("hello"))?
.parameter("key1", &RefBareItem::Boolean(true))?
.parameter("key2", &RefBareItem::Boolean(false))?
.open_inner_list()
.inner_list_bare_item(&RefBareItem::String("some_string"))?
.inner_list_bare_item(&RefBareItem::Integer(12))?
.inner_list_parameter("inner-member-key", &RefBareItem::Boolean(true))?
.close_inner_list()
.parameter("inner-list-param", &RefBareItem::Token("*"))?;
assert_eq!(
"hello;key1;key2=?0, (\"some_string\" 12;inner-member-key);inner-list-param=*",
output
);
Ok(())
}
#[test]
fn test_fast_serialize_dict() -> SFVResult<()> {
let mut output = String::new();
let ser = RefDictSerializer::new(&mut output);
ser.bare_item_member("member1", &RefBareItem::Token("hello"))?
.parameter("key1", &RefBareItem::Boolean(true))?
.parameter("key2", &RefBareItem::Boolean(false))?
.bare_item_member("member2", &RefBareItem::Boolean(true))?
.parameter(
"key3",
&RefBareItem::Decimal(Decimal::from_f64(45.4586).unwrap()),
)?
.parameter("key4", &RefBareItem::String("str"))?
.open_inner_list("key5")?
.inner_list_bare_item(&RefBareItem::Integer(45))?
.inner_list_bare_item(&RefBareItem::Integer(0))?
.close_inner_list()
.bare_item_member("key6", &RefBareItem::String("foo"))?
.open_inner_list("key7")?
.inner_list_bare_item(&RefBareItem::ByteSeq("some_string".as_bytes()))?
.inner_list_bare_item(&RefBareItem::ByteSeq("other_string".as_bytes()))?
.close_inner_list()
.parameter("lparam", &RefBareItem::Integer(10))?
.bare_item_member("key8", &RefBareItem::Boolean(true))?;
assert_eq!(
"member1=hello;key1;key2=?0, member2;key3=45.459;key4=\"str\", key5=(45 0), key6=\"foo\", key7=(:c29tZV9zdHJpbmc=: :b3RoZXJfc3RyaW5n:);lparam=10, key8",
output
);
Ok(())
}
}

320
third_party/rust/sfv/src/serializer.rs поставляемый Normal file
Просмотреть файл

@ -0,0 +1,320 @@
use crate::utils;
use crate::{
BareItem, Decimal, Dictionary, InnerList, Item, List, ListEntry, Parameters, RefBareItem,
SFVResult,
};
use data_encoding::BASE64;
use rust_decimal::prelude::Zero;
/// Serializes structured field value into String.
pub trait SerializeValue {
/// Serializes structured field value into String.
/// # Examples
/// ```
/// # use sfv::{Parser, SerializeValue, ParseValue};
///
/// let parsed_list_field = Parser::parse_list("\"london\", \t\t\"berlin\"".as_bytes());
/// assert!(parsed_list_field.is_ok());
///
/// assert_eq!(
/// parsed_list_field.unwrap().serialize_value().unwrap(),
/// "\"london\", \"berlin\""
/// );
/// ```
fn serialize_value(&self) -> SFVResult<String>;
}
impl SerializeValue for Dictionary {
fn serialize_value(&self) -> SFVResult<String> {
let mut output = String::new();
Serializer::serialize_dict(self, &mut output)?;
Ok(output)
}
}
impl SerializeValue for List {
fn serialize_value(&self) -> SFVResult<String> {
let mut output = String::new();
Serializer::serialize_list(self, &mut output)?;
Ok(output)
}
}
impl SerializeValue for Item {
fn serialize_value(&self) -> SFVResult<String> {
let mut output = String::new();
Serializer::serialize_item(self, &mut output)?;
Ok(output)
}
}
/// Container serialization functions
pub(crate) struct Serializer;
impl Serializer {
pub(crate) fn serialize_item(input_item: &Item, output: &mut String) -> SFVResult<()> {
// https://httpwg.org/http-extensions/draft-ietf-httpbis-header-structure.html#ser-item
Self::serialize_bare_item(&input_item.bare_item, output)?;
Self::serialize_parameters(&input_item.params, output)?;
Ok(())
}
#[deny(clippy::ptr_arg)]
pub(crate) fn serialize_list(input_list: &List, output: &mut String) -> SFVResult<()> {
// https://httpwg.org/http-extensions/draft-ietf-httpbis-header-structure.html#ser-list
if input_list.len() == 0 {
return Err("serialize_list: serializing empty field is not allowed");
}
for (idx, member) in input_list.iter().enumerate() {
match member {
ListEntry::Item(item) => {
Self::serialize_item(item, output)?;
}
ListEntry::InnerList(inner_list) => {
Self::serialize_inner_list(inner_list, output)?;
}
};
// If more items remain in input_list:
// Append “,” to output.
// Append a single SP to output.
if idx < input_list.len() - 1 {
output.push_str(", ");
}
}
Ok(())
}
pub(crate) fn serialize_dict(input_dict: &Dictionary, output: &mut String) -> SFVResult<()> {
// https://httpwg.org/http-extensions/draft-ietf-httpbis-header-structure.html#ser-dictionary
if input_dict.len() == 0 {
return Err("serialize_dictionary: serializing empty field is not allowed");
}
for (idx, (member_name, member_value)) in input_dict.iter().enumerate() {
Serializer::serialize_key(member_name, output)?;
match member_value {
ListEntry::Item(ref item) => {
// If dict member is boolean true, no need to serialize it: only its params must be serialized
// Otherwise serialize entire item with its params
if item.bare_item == BareItem::Boolean(true) {
Self::serialize_parameters(&item.params, output)?;
} else {
output.push('=');
Self::serialize_item(&item, output)?;
}
}
ListEntry::InnerList(inner_list) => {
output.push('=');
Self::serialize_inner_list(&inner_list, output)?;
}
}
// If more items remain in input_dictionary:
// Append “,” to output.
// Append a single SP to output.
if idx < input_dict.len() - 1 {
output.push_str(", ");
}
}
Ok(())
}
fn serialize_inner_list(input_inner_list: &InnerList, output: &mut String) -> SFVResult<()> {
// https://httpwg.org/http-extensions/draft-ietf-httpbis-header-structure.html#ser-innerlist
let items = &input_inner_list.items;
let inner_list_parameters = &input_inner_list.params;
output.push('(');
for (idx, item) in items.iter().enumerate() {
Self::serialize_item(item, output)?;
// If more values remain in inner_list, append a single SP to output
if idx < items.len() - 1 {
output.push_str(" ");
}
}
output.push(')');
Self::serialize_parameters(inner_list_parameters, output)?;
Ok(())
}
pub(crate) fn serialize_bare_item(
input_bare_item: &BareItem,
output: &mut String,
) -> SFVResult<()> {
// https://httpwg.org/http-extensions/draft-ietf-httpbis-header-structure.html#ser-bare-item
let ref_bare_item = input_bare_item.to_ref_bare_item();
Self::serialize_ref_bare_item(&ref_bare_item, output)
}
pub(crate) fn serialize_ref_bare_item(
value: &RefBareItem,
output: &mut String,
) -> SFVResult<()> {
match value {
RefBareItem::Boolean(value) => Self::serialize_bool(*value, output)?,
RefBareItem::String(value) => Self::serialize_string(value, output)?,
RefBareItem::ByteSeq(value) => Self::serialize_byte_sequence(value, output)?,
RefBareItem::Token(value) => Self::serialize_token(value, output)?,
RefBareItem::Integer(value) => Self::serialize_integer(*value, output)?,
RefBareItem::Decimal(value) => Self::serialize_decimal(*value, output)?,
};
Ok(())
}
pub(crate) fn serialize_parameters(
input_params: &Parameters,
output: &mut String,
) -> SFVResult<()> {
// https://httpwg.org/http-extensions/draft-ietf-httpbis-header-structure.html#ser-params
for (param_name, param_value) in input_params.iter() {
Self::serialize_ref_parameter(param_name, &param_value.to_ref_bare_item(), output)?;
}
Ok(())
}
pub(crate) fn serialize_ref_parameter(
name: &str,
value: &RefBareItem,
output: &mut String,
) -> SFVResult<()> {
output.push(';');
Self::serialize_key(name, output)?;
if value != &RefBareItem::Boolean(true) {
output.push('=');
Self::serialize_ref_bare_item(value, output)?;
}
Ok(())
}
pub(crate) fn serialize_key(input_key: &str, output: &mut String) -> SFVResult<()> {
// https://httpwg.org/http-extensions/draft-ietf-httpbis-header-structure.html#ser-key
let disallowed_chars =
|c: char| !(c.is_ascii_lowercase() || c.is_ascii_digit() || "_-*.".contains(c));
if input_key.chars().any(disallowed_chars) {
return Err("serialize_key: disallowed character in input");
}
if let Some(char) = input_key.chars().next() {
if !(char.is_ascii_lowercase() || char == '*') {
return Err("serialize_key: first character is not lcalpha or '*'");
}
}
output.push_str(input_key);
Ok(())
}
pub(crate) fn serialize_integer(value: i64, output: &mut String) -> SFVResult<()> {
//https://httpwg.org/http-extensions/draft-ietf-httpbis-header-structure.html#ser-integer
let (min_int, max_int) = (-999_999_999_999_999_i64, 999_999_999_999_999_i64);
if !(min_int <= value && value <= max_int) {
return Err("serialize_integer: integer is out of range");
}
output.push_str(&value.to_string());
Ok(())
}
pub(crate) fn serialize_decimal(value: Decimal, output: &mut String) -> SFVResult<()> {
// https://httpwg.org/http-extensions/draft-ietf-httpbis-header-structure.html#ser-decimal
let integer_comp_length = 12;
let fraction_length = 3;
let decimal = value.round_dp(fraction_length);
let int_comp = decimal.trunc();
let fract_comp = decimal.fract();
// TODO: Replace with > 999_999_999_999_u64
if int_comp.abs().to_string().len() > integer_comp_length {
return Err("serialize_decimal: integer component > 12 digits");
}
if fract_comp.is_zero() {
output.push_str(&int_comp.to_string());
output.push('.');
output.push('0');
} else {
output.push_str(&decimal.to_string());
}
Ok(())
}
pub(crate) fn serialize_string(value: &str, output: &mut String) -> SFVResult<()> {
// https://httpwg.org/http-extensions/draft-ietf-httpbis-header-structure.html#ser-integer
if !value.is_ascii() {
return Err("serialize_string: non-ascii character");
}
let vchar_or_sp = |char| char == '\x7f' || (char >= '\x00' && char <= '\x1f');
if value.chars().any(vchar_or_sp) {
return Err("serialize_string: not a visible character");
}
output.push('\"');
for char in value.chars() {
if char == '\\' || char == '\"' {
output.push('\\');
}
output.push(char);
}
output.push('\"');
Ok(())
}
pub(crate) fn serialize_token(value: &str, output: &mut String) -> SFVResult<()> {
// https://httpwg.org/http-extensions/draft-ietf-httpbis-header-structure.html#ser-token
if !value.is_ascii() {
return Err("serialize_string: non-ascii character");
}
let mut chars = value.chars();
if let Some(char) = chars.next() {
if !(char.is_ascii_alphabetic() || char == '*') {
return Err("serialise_token: first character is not ALPHA or '*'");
}
}
if chars
.clone()
.any(|c| !(utils::is_tchar(c) || c == ':' || c == '/'))
{
return Err("serialise_token: disallowed character");
}
output.push_str(value);
Ok(())
}
pub(crate) fn serialize_byte_sequence(value: &[u8], output: &mut String) -> SFVResult<()> {
// https://httpwg.org/http-extensions/draft-ietf-httpbis-header-structure.html#ser-binary
output.push(':');
let encoded = BASE64.encode(value.as_ref());
output.push_str(&encoded);
output.push(':');
Ok(())
}
pub(crate) fn serialize_bool(value: bool, output: &mut String) -> SFVResult<()> {
// https://httpwg.org/http-extensions/draft-ietf-httpbis-header-structure.html#ser-boolean
let val = if value { "?1" } else { "?0" };
output.push_str(val);
Ok(())
}
}

850
third_party/rust/sfv/src/test_parser.rs поставляемый Normal file
Просмотреть файл

@ -0,0 +1,850 @@
use crate::FromStr;
use crate::{BareItem, Decimal, Dictionary, InnerList, Item, List, Num, Parameters};
use crate::{ParseMore, ParseValue, Parser};
use std::error::Error;
use std::iter::FromIterator;
#[test]
fn parse() -> Result<(), Box<dyn Error>> {
let input = "\"some_value\"".as_bytes();
let parsed_item = Item::new(BareItem::String("some_value".to_owned()));
let expected = parsed_item;
assert_eq!(expected, Parser::parse_item(input)?);
let input = "12.35;a ".as_bytes();
let params = Parameters::from_iter(vec![("a".to_owned(), BareItem::Boolean(true))]);
let expected = Item::with_params(Decimal::from_str("12.35")?.into(), params);
assert_eq!(expected, Parser::parse_item(input)?);
Ok(())
}
#[test]
fn parse_errors() -> Result<(), Box<dyn Error>> {
let input = "\"some_value¢\"".as_bytes();
assert_eq!(
Err("parse: non-ascii characters in input"),
Parser::parse_item(input)
);
let input = "\"some_value\" trailing_text".as_bytes();
assert_eq!(
Err("parse: trailing characters after parsed value"),
Parser::parse_item(input)
);
assert_eq!(
Err("parse_bare_item: empty item"),
Parser::parse_item("".as_bytes())
);
Ok(())
}
#[test]
fn parse_list_of_numbers() -> Result<(), Box<dyn Error>> {
let mut input = "1,42".chars().peekable();
let item1 = Item::new(1.into());
let item2 = Item::new(42.into());
let expected_list: List = vec![item1.into(), item2.into()];
assert_eq!(expected_list, List::parse(&mut input)?);
Ok(())
}
#[test]
fn parse_list_with_multiple_spaces() -> Result<(), Box<dyn Error>> {
let mut input = "1 , 42".chars().peekable();
let item1 = Item::new(1.into());
let item2 = Item::new(42.into());
let expected_list: List = vec![item1.into(), item2.into()];
assert_eq!(expected_list, List::parse(&mut input)?);
Ok(())
}
#[test]
fn parse_list_of_lists() -> Result<(), Box<dyn Error>> {
let mut input = "(1 2), (42 43)".chars().peekable();
let item1 = Item::new(1.into());
let item2 = Item::new(2.into());
let item3 = Item::new(42.into());
let item4 = Item::new(43.into());
let inner_list_1 = InnerList::new(vec![item1, item2]);
let inner_list_2 = InnerList::new(vec![item3, item4]);
let expected_list: List = vec![inner_list_1.into(), inner_list_2.into()];
assert_eq!(expected_list, List::parse(&mut input)?);
Ok(())
}
#[test]
fn parse_list_empty_inner_list() -> Result<(), Box<dyn Error>> {
let mut input = "()".chars().peekable();
let inner_list = InnerList::new(vec![]);
let expected_list: List = vec![inner_list.into()];
assert_eq!(expected_list, List::parse(&mut input)?);
Ok(())
}
#[test]
fn parse_list_empty() -> Result<(), Box<dyn Error>> {
let mut input = "".chars().peekable();
let expected_list: List = vec![];
assert_eq!(expected_list, List::parse(&mut input)?);
Ok(())
}
#[test]
fn parse_list_of_lists_with_param_and_spaces() -> Result<(), Box<dyn Error>> {
let mut input = "( 1 42 ); k=*".chars().peekable();
let item1 = Item::new(1.into());
let item2 = Item::new(42.into());
let inner_list_param =
Parameters::from_iter(vec![("k".to_owned(), BareItem::Token("*".to_owned()))]);
let inner_list = InnerList::with_params(vec![item1, item2], inner_list_param);
let expected_list: List = vec![inner_list.into()];
assert_eq!(expected_list, List::parse(&mut input)?);
Ok(())
}
#[test]
fn parse_list_of_items_and_lists_with_param() -> Result<(), Box<dyn Error>> {
let mut input = "12, 14, (a b); param=\"param_value_1\", ()"
.chars()
.peekable();
let item1 = Item::new(12.into());
let item2 = Item::new(14.into());
let item3 = Item::new(BareItem::Token("a".to_owned()));
let item4 = Item::new(BareItem::Token("b".to_owned()));
let inner_list_param = Parameters::from_iter(vec![(
"param".to_owned(),
BareItem::String("param_value_1".to_owned()),
)]);
let inner_list = InnerList::with_params(vec![item3, item4], inner_list_param);
let empty_inner_list = InnerList::new(vec![]);
let expected_list: List = vec![
item1.into(),
item2.into(),
inner_list.into(),
empty_inner_list.into(),
];
assert_eq!(expected_list, List::parse(&mut input)?);
Ok(())
}
#[test]
fn parse_list_errors() -> Result<(), Box<dyn Error>> {
let mut input = ",".chars().peekable();
assert_eq!(
Err("parse_bare_item: item type can't be identified"),
List::parse(&mut input)
);
let mut input = "a, b c".chars().peekable();
assert_eq!(
Err("parse_list: trailing characters after list member"),
List::parse(&mut input)
);
let mut input = "a,".chars().peekable();
assert_eq!(Err("parse_list: trailing comma"), List::parse(&mut input));
let mut input = "a , ".chars().peekable();
assert_eq!(Err("parse_list: trailing comma"), List::parse(&mut input));
let mut input = "a\t \t ,\t ".chars().peekable();
assert_eq!(Err("parse_list: trailing comma"), List::parse(&mut input));
let mut input = "a\t\t,\t\t\t".chars().peekable();
assert_eq!(Err("parse_list: trailing comma"), List::parse(&mut input));
let mut input = "(a b),".chars().peekable();
assert_eq!(Err("parse_list: trailing comma"), List::parse(&mut input));
let mut input = "(1, 2, (a b)".chars().peekable();
assert_eq!(
Err("parse_inner_list: bad delimitation"),
List::parse(&mut input)
);
Ok(())
}
#[test]
fn parse_inner_list_errors() -> Result<(), Box<dyn Error>> {
let mut input = "c b); a=1".chars().peekable();
assert_eq!(
Err("parse_inner_list: input does not start with '('"),
Parser::parse_inner_list(&mut input)
);
Ok(())
}
#[test]
fn parse_inner_list_with_param_and_spaces() -> Result<(), Box<dyn Error>> {
let mut input = "(c b); a=1".chars().peekable();
let inner_list_param = Parameters::from_iter(vec![("a".to_owned(), 1.into())]);
let item1 = Item::new(BareItem::Token("c".to_owned()));
let item2 = Item::new(BareItem::Token("b".to_owned()));
let expected = InnerList::with_params(vec![item1, item2], inner_list_param);
assert_eq!(expected, Parser::parse_inner_list(&mut input)?);
Ok(())
}
#[test]
fn parse_item_int_with_space() -> Result<(), Box<dyn Error>> {
let mut input = "12 ".chars().peekable();
assert_eq!(Item::new(12.into()), Item::parse(&mut input)?);
Ok(())
}
#[test]
fn parse_item_decimal_with_bool_param_and_space() -> Result<(), Box<dyn Error>> {
let mut input = "12.35;a ".chars().peekable();
let param = Parameters::from_iter(vec![("a".to_owned(), BareItem::Boolean(true))]);
assert_eq!(
Item::with_params(Decimal::from_str("12.35")?.into(), param),
Item::parse(&mut input)?
);
Ok(())
}
#[test]
fn parse_item_number_with_param() -> Result<(), Box<dyn Error>> {
let param = Parameters::from_iter(vec![("a1".to_owned(), BareItem::Token("*".to_owned()))]);
assert_eq!(
Item::with_params(BareItem::String("12.35".to_owned()), param),
Item::parse(&mut "\"12.35\";a1=*".chars().peekable())?
);
Ok(())
}
#[test]
fn parse_item_errors() -> Result<(), Box<dyn Error>> {
assert_eq!(
Err("parse_bare_item: empty item"),
Item::parse(&mut "".chars().peekable())
);
Ok(())
}
#[test]
fn parse_dict_empty() -> Result<(), Box<dyn Error>> {
assert_eq!(
Dictionary::new(),
Dictionary::parse(&mut "".chars().peekable())?
);
Ok(())
}
#[test]
fn parse_dict_errors() -> Result<(), Box<dyn Error>> {
let mut input = "abc=123;a=1;b=2 def".chars().peekable();
assert_eq!(
Err("parse_dict: trailing characters after dictionary member"),
Dictionary::parse(&mut input)
);
let mut input = "abc=123;a=1,".chars().peekable();
assert_eq!(
Err("parse_dict: trailing comma"),
Dictionary::parse(&mut input)
);
Ok(())
}
#[test]
fn parse_dict_with_spaces_and_params() -> Result<(), Box<dyn Error>> {
let mut input = "abc=123;a=1;b=2, def=456, ghi=789;q=9;r=\"+w\""
.chars()
.peekable();
let item1_params =
Parameters::from_iter(vec![("a".to_owned(), 1.into()), ("b".to_owned(), 2.into())]);
let item3_params = Parameters::from_iter(vec![
("q".to_owned(), 9.into()),
("r".to_owned(), BareItem::String("+w".to_owned())),
]);
let item1 = Item::with_params(123.into(), item1_params);
let item2 = Item::new(456.into());
let item3 = Item::with_params(789.into(), item3_params);
let expected_dict = Dictionary::from_iter(vec![
("abc".to_owned(), item1.into()),
("def".to_owned(), item2.into()),
("ghi".to_owned(), item3.into()),
]);
assert_eq!(expected_dict, Dictionary::parse(&mut input)?);
Ok(())
}
#[test]
fn parse_dict_empty_value() -> Result<(), Box<dyn Error>> {
let mut input = "a=()".chars().peekable();
let inner_list = InnerList::new(vec![]);
let expected_dict = Dictionary::from_iter(vec![("a".to_owned(), inner_list.into())]);
assert_eq!(expected_dict, Dictionary::parse(&mut input)?);
Ok(())
}
#[test]
fn parse_dict_with_token_param() -> Result<(), Box<dyn Error>> {
let mut input = "a=1, b;foo=*, c=3".chars().peekable();
let item2_params =
Parameters::from_iter(vec![("foo".to_owned(), BareItem::Token("*".to_owned()))]);
let item1 = Item::new(1.into());
let item2 = Item::with_params(BareItem::Boolean(true), item2_params);
let item3 = Item::new(3.into());
let expected_dict = Dictionary::from_iter(vec![
("a".to_owned(), item1.into()),
("b".to_owned(), item2.into()),
("c".to_owned(), item3.into()),
]);
assert_eq!(expected_dict, Dictionary::parse(&mut input)?);
Ok(())
}
#[test]
fn parse_dict_multiple_spaces() -> Result<(), Box<dyn Error>> {
// input1, input2, input3 must be parsed into the same structure
let item1 = Item::new(1.into());
let item2 = Item::new(2.into());
let expected_dict = Dictionary::from_iter(vec![
("a".to_owned(), item1.into()),
("b".to_owned(), item2.into()),
]);
let mut input1 = "a=1 , b=2".chars().peekable();
let mut input2 = "a=1\t,\tb=2".chars().peekable();
let mut input3 = "a=1, b=2".chars().peekable();
assert_eq!(expected_dict, Dictionary::parse(&mut input1)?);
assert_eq!(expected_dict, Dictionary::parse(&mut input2)?);
assert_eq!(expected_dict, Dictionary::parse(&mut input3)?);
Ok(())
}
#[test]
fn parse_bare_item() -> Result<(), Box<dyn Error>> {
assert_eq!(
BareItem::Boolean(false),
Parser::parse_bare_item(&mut "?0".chars().peekable())?
);
assert_eq!(
BareItem::String("test string".to_owned()),
Parser::parse_bare_item(&mut "\"test string\"".chars().peekable())?
);
assert_eq!(
BareItem::Token("*token".to_owned()),
Parser::parse_bare_item(&mut "*token".chars().peekable())?
);
assert_eq!(
BareItem::ByteSeq("base_64 encoding test".to_owned().into_bytes()),
Parser::parse_bare_item(&mut ":YmFzZV82NCBlbmNvZGluZyB0ZXN0:".chars().peekable())?
);
assert_eq!(
BareItem::Decimal(Decimal::from_str("-3.55")?),
Parser::parse_bare_item(&mut "-3.55".chars().peekable())?
);
Ok(())
}
#[test]
fn parse_bare_item_errors() -> Result<(), Box<dyn Error>> {
assert_eq!(
Err("parse_bare_item: item type can't be identified"),
Parser::parse_bare_item(&mut "!?0".chars().peekable())
);
assert_eq!(
Err("parse_bare_item: item type can't be identified"),
Parser::parse_bare_item(&mut "_11abc".chars().peekable())
);
assert_eq!(
Err("parse_bare_item: item type can't be identified"),
Parser::parse_bare_item(&mut " ".chars().peekable())
);
Ok(())
}
#[test]
fn parse_bool() -> Result<(), Box<dyn Error>> {
let mut input = "?0gk".chars().peekable();
assert_eq!(false, Parser::parse_bool(&mut input)?);
assert_eq!(input.collect::<String>(), "gk");
assert_eq!(false, Parser::parse_bool(&mut "?0".chars().peekable())?);
assert_eq!(true, Parser::parse_bool(&mut "?1".chars().peekable())?);
Ok(())
}
#[test]
fn parse_bool_errors() -> Result<(), Box<dyn Error>> {
assert_eq!(
Err("parse_bool: first character is not '?'"),
Parser::parse_bool(&mut "".chars().peekable())
);
assert_eq!(
Err("parse_bool: invalid variant"),
Parser::parse_bool(&mut "?".chars().peekable())
);
Ok(())
}
#[test]
fn parse_string() -> Result<(), Box<dyn Error>> {
let mut input = "\"some string\" ;not string".chars().peekable();
assert_eq!("some string".to_owned(), Parser::parse_string(&mut input)?);
assert_eq!(input.collect::<String>(), " ;not string");
assert_eq!(
"test".to_owned(),
Parser::parse_string(&mut "\"test\"".chars().peekable())?
);
assert_eq!(
r#"te\st"#.to_owned(),
Parser::parse_string(&mut "\"te\\\\st\"".chars().peekable())?
);
assert_eq!(
"".to_owned(),
Parser::parse_string(&mut "\"\"".chars().peekable())?
);
assert_eq!(
"some string".to_owned(),
Parser::parse_string(&mut "\"some string\"".chars().peekable())?
);
Ok(())
}
#[test]
fn parse_string_errors() -> Result<(), Box<dyn Error>> {
assert_eq!(
Err("parse_string: first character is not '\"'"),
Parser::parse_string(&mut "test".chars().peekable())
);
assert_eq!(
Err("parse_string: last input character is '\\'"),
Parser::parse_string(&mut "\"\\".chars().peekable())
);
assert_eq!(
Err("parse_string: disallowed character after '\\'"),
Parser::parse_string(&mut "\"\\l\"".chars().peekable())
);
assert_eq!(
Err("parse_string: not a visible character"),
Parser::parse_string(&mut "\"\u{1f}\"".chars().peekable())
);
assert_eq!(
Err("parse_string: no closing '\"'"),
Parser::parse_string(&mut "\"smth".chars().peekable())
);
Ok(())
}
#[test]
fn parse_token() -> Result<(), Box<dyn Error>> {
let mut input = "*some:token}not token".chars().peekable();
assert_eq!("*some:token".to_owned(), Parser::parse_token(&mut input)?);
assert_eq!(input.collect::<String>(), "}not token");
assert_eq!(
"token".to_owned(),
Parser::parse_token(&mut "token".chars().peekable())?
);
assert_eq!(
"a_b-c.d3:f%00/*".to_owned(),
Parser::parse_token(&mut "a_b-c.d3:f%00/*".chars().peekable())?
);
assert_eq!(
"TestToken".to_owned(),
Parser::parse_token(&mut "TestToken".chars().peekable())?
);
assert_eq!(
"some".to_owned(),
Parser::parse_token(&mut "some@token".chars().peekable())?
);
assert_eq!(
"*TestToken*".to_owned(),
Parser::parse_token(&mut "*TestToken*".chars().peekable())?
);
assert_eq!(
"*".to_owned(),
Parser::parse_token(&mut "*[@:token".chars().peekable())?
);
assert_eq!(
"test".to_owned(),
Parser::parse_token(&mut "test token".chars().peekable())?
);
Ok(())
}
#[test]
fn parse_token_errors() -> Result<(), Box<dyn Error>> {
let mut input = "765token".chars().peekable();
assert_eq!(
Err("parse_token: first character is not ALPHA or '*'"),
Parser::parse_token(&mut input)
);
assert_eq!(input.collect::<String>(), "765token");
assert_eq!(
Err("parse_token: first character is not ALPHA or '*'"),
Parser::parse_token(&mut "7token".chars().peekable())
);
assert_eq!(
Err("parse_token: empty input string"),
Parser::parse_token(&mut "".chars().peekable())
);
Ok(())
}
#[test]
fn parse_byte_sequence() -> Result<(), Box<dyn Error>> {
let mut input = ":aGVsbG8:rest_of_str".chars().peekable();
assert_eq!(
"hello".to_owned().into_bytes(),
Parser::parse_byte_sequence(&mut input)?
);
assert_eq!("rest_of_str", input.collect::<String>());
assert_eq!(
"hello".to_owned().into_bytes(),
Parser::parse_byte_sequence(&mut ":aGVsbG8:".chars().peekable())?
);
assert_eq!(
"test_encode".to_owned().into_bytes(),
Parser::parse_byte_sequence(&mut ":dGVzdF9lbmNvZGU:".chars().peekable())?
);
assert_eq!(
"new:year tree".to_owned().into_bytes(),
Parser::parse_byte_sequence(&mut ":bmV3OnllYXIgdHJlZQ==:".chars().peekable())?
);
assert_eq!(
"".to_owned().into_bytes(),
Parser::parse_byte_sequence(&mut "::".chars().peekable())?
);
Ok(())
}
#[test]
fn parse_byte_sequence_errors() -> Result<(), Box<dyn Error>> {
assert_eq!(
Err("parse_byte_seq: first char is not ':'"),
Parser::parse_byte_sequence(&mut "aGVsbG8".chars().peekable())
);
assert_eq!(
Err("parse_byte_seq: invalid char in byte sequence"),
Parser::parse_byte_sequence(&mut ":aGVsb G8=:".chars().peekable())
);
assert_eq!(
Err("parse_byte_seq: no closing ':'"),
Parser::parse_byte_sequence(&mut ":aGVsbG8=".chars().peekable())
);
Ok(())
}
#[test]
fn parse_number_int() -> Result<(), Box<dyn Error>> {
let mut input = "-733333333332d.14".chars().peekable();
assert_eq!(
Num::Integer(-733333333332),
Parser::parse_number(&mut input)?
);
assert_eq!("d.14", input.collect::<String>());
assert_eq!(
Num::Integer(42),
Parser::parse_number(&mut "42".chars().peekable())?
);
assert_eq!(
Num::Integer(-42),
Parser::parse_number(&mut "-42".chars().peekable())?
);
assert_eq!(
Num::Integer(-42),
Parser::parse_number(&mut "-042".chars().peekable())?
);
assert_eq!(
Num::Integer(0),
Parser::parse_number(&mut "0".chars().peekable())?
);
assert_eq!(
Num::Integer(0),
Parser::parse_number(&mut "00".chars().peekable())?
);
assert_eq!(
Num::Integer(123456789012345),
Parser::parse_number(&mut "123456789012345".chars().peekable())?
);
assert_eq!(
Num::Integer(-123456789012345),
Parser::parse_number(&mut "-123456789012345".chars().peekable())?
);
assert_eq!(
Num::Integer(2),
Parser::parse_number(&mut "2,3".chars().peekable())?
);
assert_eq!(
Num::Integer(4),
Parser::parse_number(&mut "4-2".chars().peekable())?
);
assert_eq!(
Num::Integer(-999999999999999),
Parser::parse_number(&mut "-999999999999999".chars().peekable())?
);
assert_eq!(
Num::Integer(999999999999999),
Parser::parse_number(&mut "999999999999999".chars().peekable())?
);
Ok(())
}
#[test]
fn parse_number_decimal() -> Result<(), Box<dyn Error>> {
let mut input = "00.42 test string".chars().peekable();
assert_eq!(
Num::Decimal(Decimal::from_str("0.42")?),
Parser::parse_number(&mut input)?
);
assert_eq!(" test string", input.collect::<String>());
assert_eq!(
Num::Decimal(Decimal::from_str("1.5")?),
Parser::parse_number(&mut "1.5.4.".chars().peekable())?
);
assert_eq!(
Num::Decimal(Decimal::from_str("1.8")?),
Parser::parse_number(&mut "1.8.".chars().peekable())?
);
assert_eq!(
Num::Decimal(Decimal::from_str("1.7")?),
Parser::parse_number(&mut "1.7.0".chars().peekable())?
);
assert_eq!(
Num::Decimal(Decimal::from_str("3.14")?),
Parser::parse_number(&mut "3.14".chars().peekable())?
);
assert_eq!(
Num::Decimal(Decimal::from_str("-3.14")?),
Parser::parse_number(&mut "-3.14".chars().peekable())?
);
assert_eq!(
Num::Decimal(Decimal::from_str("123456789012.1")?),
Parser::parse_number(&mut "123456789012.1".chars().peekable())?
);
assert_eq!(
Num::Decimal(Decimal::from_str("1234567890.112")?),
Parser::parse_number(&mut "1234567890.112".chars().peekable())?
);
Ok(())
}
#[test]
fn parse_number_errors() -> Result<(), Box<dyn Error>> {
let mut input = ":aGVsbG8:rest".chars().peekable();
assert_eq!(
Err("parse_number: input number does not start with a digit"),
Parser::parse_number(&mut input)
);
assert_eq!(":aGVsbG8:rest", input.collect::<String>());
let mut input = "-11.5555 test string".chars().peekable();
assert_eq!(
Err("parse_number: invalid decimal fraction length"),
Parser::parse_number(&mut input)
);
assert_eq!(" test string", input.collect::<String>());
assert_eq!(
Err("parse_number: input number does not start with a digit"),
Parser::parse_number(&mut "--0".chars().peekable())
);
assert_eq!(
Err("parse_number: decimal too long, illegal position for decimal point"),
Parser::parse_number(&mut "1999999999999.1".chars().peekable())
);
assert_eq!(
Err("parse_number: decimal ends with '.'"),
Parser::parse_number(&mut "19888899999.".chars().peekable())
);
assert_eq!(
Err("parse_number: integer too long, length > 15"),
Parser::parse_number(&mut "1999999999999999".chars().peekable())
);
assert_eq!(
Err("parse_number: decimal too long, length > 16"),
Parser::parse_number(&mut "19999999999.99991".chars().peekable())
);
assert_eq!(
Err("parse_number: input number does not start with a digit"),
Parser::parse_number(&mut "- 42".chars().peekable())
);
assert_eq!(
Err("parse_number: input number does not start with a digit"),
Parser::parse_number(&mut "- 42".chars().peekable())
);
assert_eq!(
Err("parse_number: decimal ends with '.'"),
Parser::parse_number(&mut "1..4".chars().peekable())
);
assert_eq!(
Err("parse_number: input number lacks a digit"),
Parser::parse_number(&mut "-".chars().peekable())
);
assert_eq!(
Err("parse_number: decimal ends with '.'"),
Parser::parse_number(&mut "-5. 14".chars().peekable())
);
assert_eq!(
Err("parse_number: decimal ends with '.'"),
Parser::parse_number(&mut "7. 1".chars().peekable())
);
assert_eq!(
Err("parse_number: invalid decimal fraction length"),
Parser::parse_number(&mut "-7.3333333333".chars().peekable())
);
assert_eq!(
Err("parse_number: decimal too long, illegal position for decimal point"),
Parser::parse_number(&mut "-7333333333323.12".chars().peekable())
);
Ok(())
}
#[test]
fn parse_params_string() -> Result<(), Box<dyn Error>> {
let mut input = ";b=\"param_val\"".chars().peekable();
let expected = Parameters::from_iter(vec![(
"b".to_owned(),
BareItem::String("param_val".to_owned()),
)]);
assert_eq!(expected, Parser::parse_parameters(&mut input)?);
Ok(())
}
#[test]
fn parse_params_bool() -> Result<(), Box<dyn Error>> {
let mut input = ";b;a".chars().peekable();
let expected = Parameters::from_iter(vec![
("b".to_owned(), BareItem::Boolean(true)),
("a".to_owned(), BareItem::Boolean(true)),
]);
assert_eq!(expected, Parser::parse_parameters(&mut input)?);
Ok(())
}
#[test]
fn parse_params_mixed_types() -> Result<(), Box<dyn Error>> {
let mut input = ";key1=?0;key2=746.15".chars().peekable();
let expected = Parameters::from_iter(vec![
("key1".to_owned(), BareItem::Boolean(false)),
("key2".to_owned(), Decimal::from_str("746.15")?.into()),
]);
assert_eq!(expected, Parser::parse_parameters(&mut input)?);
Ok(())
}
#[test]
fn parse_params_with_spaces() -> Result<(), Box<dyn Error>> {
let mut input = "; key1=?0; key2=11111".chars().peekable();
let expected = Parameters::from_iter(vec![
("key1".to_owned(), BareItem::Boolean(false)),
("key2".to_owned(), 11111.into()),
]);
assert_eq!(expected, Parser::parse_parameters(&mut input)?);
Ok(())
}
#[test]
fn parse_params_empty() -> Result<(), Box<dyn Error>> {
assert_eq!(
Parameters::new(),
Parser::parse_parameters(&mut " key1=?0; key2=11111".chars().peekable())?
);
assert_eq!(
Parameters::new(),
Parser::parse_parameters(&mut "".chars().peekable())?
);
assert_eq!(
Parameters::new(),
Parser::parse_parameters(&mut "[;a=1".chars().peekable())?
);
assert_eq!(
Parameters::new(),
Parser::parse_parameters(&mut String::new().chars().peekable())?
);
Ok(())
}
#[test]
fn parse_key() -> Result<(), Box<dyn Error>> {
assert_eq!(
"a".to_owned(),
Parser::parse_key(&mut "a=1".chars().peekable())?
);
assert_eq!(
"a1".to_owned(),
Parser::parse_key(&mut "a1=10".chars().peekable())?
);
assert_eq!(
"*1".to_owned(),
Parser::parse_key(&mut "*1=10".chars().peekable())?
);
assert_eq!(
"f".to_owned(),
Parser::parse_key(&mut "f[f=10".chars().peekable())?
);
Ok(())
}
#[test]
fn parse_key_errors() -> Result<(), Box<dyn Error>> {
assert_eq!(
Err("parse_key: first character is not lcalpha or '*'"),
Parser::parse_key(&mut "[*f=10".chars().peekable())
);
Ok(())
}
#[test]
fn parse_more_list() -> Result<(), Box<dyn Error>> {
let item1 = Item::new(1.into());
let item2 = Item::new(2.into());
let item3 = Item::new(42.into());
let inner_list_1 = InnerList::new(vec![item1, item2]);
let expected_list: List = vec![inner_list_1.into(), item3.into()];
let mut parsed_header = Parser::parse_list("(1 2)".as_bytes())?;
let _ = parsed_header.parse_more("42".as_bytes())?;
assert_eq!(expected_list, parsed_header);
Ok(())
}
#[test]
fn parse_more_dict() -> Result<(), Box<dyn Error>> {
let item2_params =
Parameters::from_iter(vec![("foo".to_owned(), BareItem::Token("*".to_owned()))]);
let item1 = Item::new(1.into());
let item2 = Item::with_params(BareItem::Boolean(true), item2_params);
let item3 = Item::new(3.into());
let expected_dict = Dictionary::from_iter(vec![
("a".to_owned(), item1.into()),
("b".to_owned(), item2.into()),
("c".to_owned(), item3.into()),
]);
let mut parsed_header = Parser::parse_dictionary("a=1, b;foo=*\t\t".as_bytes())?;
let _ = parsed_header.parse_more(" c=3".as_bytes())?;
assert_eq!(expected_dict, parsed_header);
Ok(())
}
#[test]
fn parse_more_errors() -> Result<(), Box<dyn Error>> {
let parsed_dict_header =
Parser::parse_dictionary("a=1, b;foo=*".as_bytes())?.parse_more(",a".as_bytes());
assert!(parsed_dict_header.is_err());
let parsed_list_header =
Parser::parse_list("a, b;foo=*".as_bytes())?.parse_more("(a, 2)".as_bytes());
assert!(parsed_list_header.is_err());
Ok(())
}

531
third_party/rust/sfv/src/test_serializer.rs поставляемый Normal file
Просмотреть файл

@ -0,0 +1,531 @@
use crate::serializer::Serializer;
use crate::FromStr;
use crate::SerializeValue;
use crate::{BareItem, Decimal, Dictionary, InnerList, Item, List, Parameters};
use std::error::Error;
use std::iter::FromIterator;
#[test]
fn serialize_value_empty_dict() -> Result<(), Box<dyn Error>> {
let dict_field_value = Dictionary::new();
assert_eq!(
Err("serialize_dictionary: serializing empty field is not allowed"),
dict_field_value.serialize_value()
);
Ok(())
}
#[test]
fn serialize_value_empty_list() -> Result<(), Box<dyn Error>> {
let list_field_value = List::new();
assert_eq!(
Err("serialize_list: serializing empty field is not allowed"),
list_field_value.serialize_value()
);
Ok(())
}
#[test]
fn serialize_value_list_mixed_members_with_params() -> Result<(), Box<dyn Error>> {
let item1 = Item::new(Decimal::from_str("42.4568")?.into());
let item2_param = Parameters::from_iter(vec![("itm2_p".to_owned(), BareItem::Boolean(true))]);
let item2 = Item::with_params(17.into(), item2_param);
let inner_list_item1_param =
Parameters::from_iter(vec![("in1_p".to_owned(), BareItem::Boolean(false))]);
let inner_list_item1 =
Item::with_params(BareItem::String("str1".to_owned()), inner_list_item1_param);
let inner_list_item2_param = Parameters::from_iter(vec![(
"in2_p".to_owned(),
BareItem::String("valu\\e".to_owned()),
)]);
let inner_list_item2 =
Item::with_params(BareItem::Token("str2".to_owned()), inner_list_item2_param);
let inner_list_param = Parameters::from_iter(vec![(
"inner_list_param".to_owned(),
BareItem::ByteSeq("weather".as_bytes().to_vec()),
)]);
let inner_list =
InnerList::with_params(vec![inner_list_item1, inner_list_item2], inner_list_param);
let list_field_value: List = vec![item1.into(), item2.into(), inner_list.into()];
let expected = "42.457, 17;itm2_p, (\"str1\";in1_p=?0 str2;in2_p=\"valu\\\\e\");inner_list_param=:d2VhdGhlcg==:";
assert_eq!(expected, list_field_value.serialize_value()?);
Ok(())
}
#[test]
fn serialize_value_errors() -> Result<(), Box<dyn Error>> {
let disallowed_item = Item::new(BareItem::String("non-ascii text 🐹".into()));
assert_eq!(
Err("serialize_string: non-ascii character"),
disallowed_item.serialize_value()
);
let disallowed_item = Item::new(Decimal::from_str("12345678912345.123")?.into());
assert_eq!(
Err("serialize_decimal: integer component > 12 digits"),
disallowed_item.serialize_value()
);
let param_with_disallowed_key = Parameters::from_iter(vec![("_key".to_owned(), 13.into())]);
let disallowed_item = Item::with_params(12.into(), param_with_disallowed_key);
assert_eq!(
Err("serialize_key: first character is not lcalpha or '*'"),
disallowed_item.serialize_value()
);
Ok(())
}
#[test]
fn serialize_item_byteseq_with_param() -> Result<(), Box<dyn Error>> {
let mut buf = String::new();
let item_param = ("a".to_owned(), BareItem::Token("*ab_1".into()));
let item_param = Parameters::from_iter(vec![item_param]);
let item = Item::with_params(BareItem::ByteSeq("parser".as_bytes().to_vec()), item_param);
Serializer::serialize_item(&item, &mut buf)?;
assert_eq!(":cGFyc2Vy:;a=*ab_1", &buf);
Ok(())
}
#[test]
fn serialize_item_without_params() -> Result<(), Box<dyn Error>> {
let mut buf = String::new();
let item = Item::new(1.into());
Serializer::serialize_item(&item, &mut buf)?;
assert_eq!("1", &buf);
Ok(())
}
#[test]
fn serialize_item_with_bool_true_param() -> Result<(), Box<dyn Error>> {
let mut buf = String::new();
let param = Parameters::from_iter(vec![("a".to_owned(), BareItem::Boolean(true))]);
let item = Item::with_params(Decimal::from_str("12.35")?.into(), param);
Serializer::serialize_item(&item, &mut buf)?;
assert_eq!("12.35;a", &buf);
Ok(())
}
#[test]
fn serialize_item_with_token_param() -> Result<(), Box<dyn Error>> {
let mut buf = String::new();
let param = Parameters::from_iter(vec![("a1".to_owned(), BareItem::Token("*tok".to_owned()))]);
let item = Item::with_params(BareItem::String("12.35".to_owned()), param);
Serializer::serialize_item(&item, &mut buf)?;
assert_eq!("\"12.35\";a1=*tok", &buf);
Ok(())
}
#[test]
fn serialize_integer() -> Result<(), Box<dyn Error>> {
let mut buf = String::new();
Serializer::serialize_integer(-12, &mut buf)?;
assert_eq!("-12", &buf);
buf.clear();
Serializer::serialize_integer(0, &mut buf)?;
assert_eq!("0", &buf);
buf.clear();
Serializer::serialize_integer(999_999_999_999_999, &mut buf)?;
assert_eq!("999999999999999", &buf);
buf.clear();
Serializer::serialize_integer(-999_999_999_999_999, &mut buf)?;
assert_eq!("-999999999999999", &buf);
Ok(())
}
#[test]
fn serialize_integer_errors() -> Result<(), Box<dyn Error>> {
let mut buf = String::new();
assert_eq!(
Err("serialize_integer: integer is out of range"),
Serializer::serialize_integer(1_000_000_000_000_000, &mut buf)
);
buf.clear();
assert_eq!(
Err("serialize_integer: integer is out of range"),
Serializer::serialize_integer(-1_000_000_000_000_000, &mut buf)
);
Ok(())
}
#[test]
fn serialize_decimal() -> Result<(), Box<dyn Error>> {
let mut buf = String::new();
Serializer::serialize_decimal(Decimal::from_str("-99.1346897")?, &mut buf)?;
assert_eq!("-99.135", &buf);
buf.clear();
Serializer::serialize_decimal(Decimal::from_str("-1.00")?, &mut buf)?;
assert_eq!("-1.0", &buf);
buf.clear();
Serializer::serialize_decimal(
Decimal::from_str("-00000000000000000000000099.1346897")?,
&mut buf,
)?;
assert_eq!("-99.135", &buf);
buf.clear();
Serializer::serialize_decimal(Decimal::from_str("100.13")?, &mut buf)?;
assert_eq!("100.13", &buf);
buf.clear();
Serializer::serialize_decimal(Decimal::from_str("-100.130")?, &mut buf)?;
assert_eq!("-100.130", &buf);
buf.clear();
Serializer::serialize_decimal(Decimal::from_str("-137.0")?, &mut buf)?;
assert_eq!("-137.0", &buf);
buf.clear();
Serializer::serialize_decimal(Decimal::from_str("137121212112.123")?, &mut buf)?;
assert_eq!("137121212112.123", &buf);
buf.clear();
Serializer::serialize_decimal(Decimal::from_str("137121212112.1238")?, &mut buf)?;
assert_eq!("137121212112.124", &buf);
Ok(())
}
#[test]
fn serialize_decimal_errors() -> Result<(), Box<dyn Error>> {
let mut buf = String::new();
assert_eq!(
Err("serialize_decimal: integer component > 12 digits"),
Serializer::serialize_decimal(Decimal::from_str("1371212121121.1")?, &mut buf)
);
Ok(())
}
#[test]
fn serialize_string() -> Result<(), Box<dyn Error>> {
let mut buf = String::new();
Serializer::serialize_string("1.1 text", &mut buf)?;
assert_eq!("\"1.1 text\"", &buf);
buf.clear();
Serializer::serialize_string("hello \"name\"", &mut buf)?;
assert_eq!("\"hello \\\"name\\\"\"", &buf);
buf.clear();
Serializer::serialize_string("something\\nothing", &mut buf)?;
assert_eq!("\"something\\\\nothing\"", &buf);
buf.clear();
Serializer::serialize_string("", &mut buf)?;
assert_eq!("\"\"", &buf);
buf.clear();
Serializer::serialize_string(" ", &mut buf)?;
assert_eq!("\" \"", &buf);
buf.clear();
Serializer::serialize_string(" ", &mut buf)?;
assert_eq!("\" \"", &buf);
Ok(())
}
#[test]
fn serialize_string_errors() -> Result<(), Box<dyn Error>> {
let mut buf = String::new();
assert_eq!(
Err("serialize_string: not a visible character"),
Serializer::serialize_string("text \x00", &mut buf)
);
assert_eq!(
Err("serialize_string: not a visible character"),
Serializer::serialize_string("text \x1f", &mut buf)
);
assert_eq!(
Err("serialize_string: not a visible character"),
Serializer::serialize_string("text \x7f", &mut buf)
);
assert_eq!(
Err("serialize_string: non-ascii character"),
Serializer::serialize_string("рядок", &mut buf)
);
Ok(())
}
#[test]
fn serialize_token() -> Result<(), Box<dyn Error>> {
let mut buf = String::new();
Serializer::serialize_token("*", &mut buf)?;
assert_eq!("*", &buf);
buf.clear();
Serializer::serialize_token("abc", &mut buf)?;
assert_eq!("abc", &buf);
buf.clear();
Serializer::serialize_token("abc:de", &mut buf)?;
assert_eq!("abc:de", &buf);
buf.clear();
Serializer::serialize_token("smth/#!else", &mut buf)?;
assert_eq!("smth/#!else", &buf);
Ok(())
}
#[test]
fn serialize_token_errors() -> Result<(), Box<dyn Error>> {
let mut buf = String::new();
assert_eq!(
Err("serialise_token: first character is not ALPHA or '*'"),
Serializer::serialize_token("#some", &mut buf)
);
assert_eq!(
Err("serialise_token: disallowed character"),
Serializer::serialize_token("s ", &mut buf)
);
assert_eq!(
Err("serialise_token: disallowed character"),
Serializer::serialize_token("abc:de\t", &mut buf)
);
Ok(())
}
#[test]
fn serialize_byte_sequence() -> Result<(), Box<dyn Error>> {
let mut buf = String::new();
Serializer::serialize_byte_sequence("hello".as_bytes(), &mut buf)?;
assert_eq!(":aGVsbG8=:", &buf);
buf.clear();
Serializer::serialize_byte_sequence("test_encode".as_bytes(), &mut buf)?;
assert_eq!(":dGVzdF9lbmNvZGU=:", &buf);
buf.clear();
Serializer::serialize_byte_sequence("".as_bytes(), &mut buf)?;
assert_eq!("::", &buf);
buf.clear();
Serializer::serialize_byte_sequence("pleasure.".as_bytes(), &mut buf)?;
assert_eq!(":cGxlYXN1cmUu:", &buf);
buf.clear();
Serializer::serialize_byte_sequence("leasure.".as_bytes(), &mut buf)?;
assert_eq!(":bGVhc3VyZS4=:", &buf);
buf.clear();
Serializer::serialize_byte_sequence("easure.".as_bytes(), &mut buf)?;
assert_eq!(":ZWFzdXJlLg==:", &buf);
buf.clear();
Serializer::serialize_byte_sequence("asure.".as_bytes(), &mut buf)?;
assert_eq!(":YXN1cmUu:", &buf);
buf.clear();
Serializer::serialize_byte_sequence("sure.".as_bytes(), &mut buf)?;
assert_eq!(":c3VyZS4=:", &buf);
Ok(())
}
#[test]
fn serialize_bool() -> Result<(), Box<dyn Error>> {
let mut buf = String::new();
Serializer::serialize_bool(true, &mut buf)?;
assert_eq!("?1", &buf);
buf.clear();
Serializer::serialize_bool(false, &mut buf)?;
assert_eq!("?0", &buf);
Ok(())
}
#[test]
fn serialize_params_bool() -> Result<(), Box<dyn Error>> {
let mut buf = String::new();
let input = Parameters::from_iter(vec![
("*b".to_owned(), BareItem::Boolean(true)),
("a.a".to_owned(), BareItem::Boolean(true)),
]);
Serializer::serialize_parameters(&input, &mut buf)?;
assert_eq!(";*b;a.a", &buf);
Ok(())
}
#[test]
fn serialize_params_string() -> Result<(), Box<dyn Error>> {
let mut buf = String::new();
let input = Parameters::from_iter(vec![(
"b".to_owned(),
BareItem::String("param_val".to_owned()),
)]);
Serializer::serialize_parameters(&input, &mut buf)?;
assert_eq!(";b=\"param_val\"", &buf);
Ok(())
}
#[test]
fn serialize_params_numbers() -> Result<(), Box<dyn Error>> {
let mut buf = String::new();
let input = Parameters::from_iter(vec![
("key1".to_owned(), Decimal::from_str("746.15")?.into()),
("key2".to_owned(), 11111.into()),
]);
Serializer::serialize_parameters(&input, &mut buf)?;
assert_eq!(";key1=746.15;key2=11111", &buf);
Ok(())
}
#[test]
fn serialize_params_mixed_types() -> Result<(), Box<dyn Error>> {
let mut buf = String::new();
let input = Parameters::from_iter(vec![
("key1".to_owned(), BareItem::Boolean(false)),
("key2".to_owned(), Decimal::from_str("1354.091878")?.into()),
]);
Serializer::serialize_parameters(&input, &mut buf)?;
assert_eq!(";key1=?0;key2=1354.092", &buf);
Ok(())
}
#[test]
fn serialize_key() -> Result<(), Box<dyn Error>> {
let mut buf = String::new();
Serializer::serialize_key("*a_fg", &mut buf)?;
assert_eq!("*a_fg", &buf);
buf.clear();
Serializer::serialize_key("*a_fg*", &mut buf)?;
assert_eq!("*a_fg*", &buf);
buf.clear();
Serializer::serialize_key("key1", &mut buf)?;
assert_eq!("key1", &buf);
buf.clear();
Serializer::serialize_key("ke-y.1", &mut buf)?;
assert_eq!("ke-y.1", &buf);
Ok(())
}
#[test]
fn serialize_key_erros() -> Result<(), Box<dyn Error>> {
let mut buf = String::new();
assert_eq!(
Err("serialize_key: disallowed character in input"),
Serializer::serialize_key("AND", &mut buf)
);
assert_eq!(
Err("serialize_key: first character is not lcalpha or '*'"),
Serializer::serialize_key("_key", &mut buf)
);
assert_eq!(
Err("serialize_key: first character is not lcalpha or '*'"),
Serializer::serialize_key("7key", &mut buf)
);
Ok(())
}
#[test]
fn serialize_list_of_items_and_inner_list() -> Result<(), Box<dyn Error>> {
let mut buf = String::new();
let item1 = Item::new(12.into());
let item2 = Item::new(14.into());
let item3 = Item::new(BareItem::Token("a".to_owned()));
let item4 = Item::new(BareItem::Token("b".to_owned()));
let inner_list_param = Parameters::from_iter(vec![(
"param".to_owned(),
BareItem::String("param_value_1".to_owned()),
)]);
let inner_list = InnerList::with_params(vec![item3, item4], inner_list_param);
let input: List = vec![item1.into(), item2.into(), inner_list.into()];
Serializer::serialize_list(&input, &mut buf)?;
assert_eq!("12, 14, (a b);param=\"param_value_1\"", &buf);
Ok(())
}
#[test]
fn serialize_list_of_lists() -> Result<(), Box<dyn Error>> {
let mut buf = String::new();
let item1 = Item::new(1.into());
let item2 = Item::new(2.into());
let item3 = Item::new(42.into());
let item4 = Item::new(43.into());
let inner_list_1 = InnerList::new(vec![item1, item2]);
let inner_list_2 = InnerList::new(vec![item3, item4]);
let input: List = vec![inner_list_1.into(), inner_list_2.into()];
Serializer::serialize_list(&input, &mut buf)?;
assert_eq!("(1 2), (42 43)", &buf);
Ok(())
}
#[test]
fn serialize_list_with_bool_item_and_bool_params() -> Result<(), Box<dyn Error>> {
let mut buf = String::new();
let item1_params = Parameters::from_iter(vec![
("a".to_owned(), BareItem::Boolean(true)),
("b".to_owned(), BareItem::Boolean(false)),
]);
let item1 = Item::with_params(BareItem::Boolean(false), item1_params);
let item2 = Item::new(BareItem::Token("cde_456".to_owned()));
let input: List = vec![item1.into(), item2.into()];
Serializer::serialize_list(&input, &mut buf)?;
assert_eq!("?0;a;b=?0, cde_456", &buf);
Ok(())
}
#[test]
fn serialize_dictionary_with_params() -> Result<(), Box<dyn Error>> {
let mut buf = String::new();
let item1_params = Parameters::from_iter(vec![
("a".to_owned(), 1.into()),
("b".to_owned(), BareItem::Boolean(true)),
]);
let item2_params = Parameters::new();
let item3_params = Parameters::from_iter(vec![
("q".to_owned(), BareItem::Boolean(false)),
("r".to_owned(), BareItem::String("+w".to_owned())),
]);
let item1 = Item::with_params(123.into(), item1_params);
let item2 = Item::with_params(456.into(), item2_params);
let item3 = Item::with_params(789.into(), item3_params);
let input = Dictionary::from_iter(vec![
("abc".to_owned(), item1.into()),
("def".to_owned(), item2.into()),
("ghi".to_owned(), item3.into()),
]);
Serializer::serialize_dict(&input, &mut buf)?;
assert_eq!("abc=123;a=1;b, def=456, ghi=789;q=?0;r=\"+w\"", &buf);
Ok(())
}
#[test]
fn serialize_dict_empty_member_value() -> Result<(), Box<dyn Error>> {
let mut buf = String::new();
let inner_list = InnerList::new(vec![]);
let input = Dictionary::from_iter(vec![("a".to_owned(), inner_list.into())]);
Serializer::serialize_dict(&input, &mut buf)?;
assert_eq!("a=()", &buf);
Ok(())
}

44
third_party/rust/sfv/src/utils.rs поставляемый Normal file
Просмотреть файл

@ -0,0 +1,44 @@
use data_encoding::{Encoding, Specification};
use std::iter::Peekable;
use std::str::Chars;
pub(crate) fn base64() -> Result<Encoding, &'static str> {
let mut spec = Specification::new();
spec.check_trailing_bits = false;
spec.symbols
.push_str("ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/");
spec.padding = None;
spec.ignore = "=".to_owned();
spec.encoding()
.map_err(|_err| "invalid base64 specification")
}
pub(crate) fn is_tchar(c: char) -> bool {
// See tchar values list in https://tools.ietf.org/html/rfc7230#section-3.2.6
let tchars = "!#$%&'*+-.^_`|~";
tchars.contains(c) || c.is_ascii_alphanumeric()
}
pub(crate) fn is_allowed_b64_content(c: char) -> bool {
c.is_ascii_alphanumeric() || c == '+' || c == '=' || c == '/'
}
pub(crate) fn consume_ows_chars(input_chars: &mut Peekable<Chars>) {
while let Some(c) = input_chars.peek() {
if c == &' ' || c == &'\t' {
input_chars.next();
} else {
break;
}
}
}
pub(crate) fn consume_sp_chars(input_chars: &mut Peekable<Chars>) {
while let Some(c) = input_chars.peek() {
if c == &' ' {
input_chars.next();
} else {
break;
}
}
}