bug 1529278: mozrunner: vendor plist crate; r=ato

This commit is contained in:
Nupur Baghel 2019-03-15 15:06:59 +00:00 коммит произвёл Andreas Tolfsen
Родитель 2c34e28fb4
Коммит 133813a888
48 изменённых файлов: 9133 добавлений и 0 удалений

1
third_party/rust/base64-0.9.3/.cargo-checksum.json поставляемый Normal file
Просмотреть файл

@ -0,0 +1 @@
{"files":{"Cargo.toml":"c155ba2131467bbda05a4c8aca4cf40c4a62d2ddce183a79695513948470aee7","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"0dd882e53de11566d50f8e8e2d5a651bcf3fabee4987d70f306233cf39094ba7","README.md":"f4319521a3683eebd5bf96d166b4d5a8c52fb8480b8b7724661ac38a9b89d7aa","RELEASE-NOTES.md":"a9eb191e951b968e230a9499e955f22056687a184f05fb7f138b30cbc4669f5a","benches/benchmarks.rs":"bfc990dfa1716acf5bbd7189b3d7438145a9441833d6e4a796d2ce4428e55e14","examples/make_tables.rs":"e7c3e665874faa10ff1906420eb5182d1b725bcd30ff0d6da719bb368683e197","src/chunked_encoder.rs":"3d92a8241ed6d1a402ad1c1a8c7aa9c736b62cecc4071f44bc29b1b7cc70488c","src/decode.rs":"968c9c13746338ea307d3ff0b528df7a4fa20c7a1071c6adc1fb23c573741b57","src/display.rs":"d8ec9fa3f2b03640b026bbdfd817cd76454e1bb9b6ae53bd793334cd0a4ac62e","src/encode.rs":"c38c64582105ea698c5d1ec1596f5f1d99cfc1fb894226b79c5c8e96f73e6541","src/lib.rs":"4aeed1db6057b03532b20c34dbb822c1edcb0b5a2c21cdd50e7ac6b06858843a","src/line_wrap.rs":"fd136457629b803417bd25eb191fd3665bb57b8726fc4d74e7325e499e4e88af","src/tables.rs":"10b09997ed9765dc46c2d1f0df21b915d88c4f47a292b5c7df5c1851e724fb03","src/tests.rs":"cf0ddeb01b2149041b7ea99a881b6f149b1630110c5c2eedee6b1f44e873ea6c","tests/decode.rs":"c06a7595a0ac708961a180d4a8393f6d377b56028f75b29e6e34fd8451d84226","tests/encode.rs":"2eb3c8f81a4f807be69d1dfb0857625130017b0fa7037f7168f434c886cc851b","tests/helpers.rs":"a76015e4a4e8f98213bdbaa592cd9574ccdc95a28e1b1f835a2753e09fa6037f","tests/tests.rs":"13929406ba13504ff49d1a16a2578a87c560834fba0eaad78edecf0793261b0e"},"package":"489d6c0ed21b11d038c31b6ceccca973e65d73ba3bd8ecb9a2babf5546164643"}

32
third_party/rust/base64-0.9.3/Cargo.toml поставляемый Normal file
Просмотреть файл

@ -0,0 +1,32 @@
# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
#
# When uploading crates to the registry Cargo will automatically
# "normalize" Cargo.toml files for maximal compatibility
# with all versions of Cargo and also rewrite `path` dependencies
# to registry (e.g. crates.io) dependencies
#
# If you believe there's an error in this file please file an
# issue against the rust-lang/cargo repository. If you're
# editing this file be aware that the upstream Cargo.toml
# will likely look very different (and much more reasonable)
[package]
name = "base64"
version = "0.9.3"
authors = ["Alice Maz <alice@alicemaz.com>", "Marshall Pierce <marshall@mpierce.org>"]
description = "encodes and decodes base64 as bytes or utf8"
documentation = "https://github.com/alicemaz/rust-base64/blob/master/README.md"
readme = "README.md"
keywords = ["base64", "utf8", "encode", "decode"]
categories = ["encoding"]
license = "MIT/Apache-2.0"
repository = "https://github.com/alicemaz/rust-base64"
[profile.bench]
debug = true
[dependencies.byteorder]
version = "1.1.0"
[dependencies.safemem]
version = "0.3"
[dev-dependencies.rand]
version = "0.4"

201
third_party/rust/base64-0.9.3/LICENSE-APACHE поставляемый Normal file
Просмотреть файл

@ -0,0 +1,201 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

21
third_party/rust/base64-0.9.3/LICENSE-MIT поставляемый Normal file
Просмотреть файл

@ -0,0 +1,21 @@
The MIT License (MIT)
Copyright (c) 2015 Alice Maz
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.

102
third_party/rust/base64-0.9.3/README.md поставляемый Normal file
Просмотреть файл

@ -0,0 +1,102 @@
[base64](https://crates.io/crates/base64)
===
[![](https://img.shields.io/crates/v/base64.svg)](https://crates.io/crates/base64) [![Docs](https://docs.rs/base64/badge.svg)](https://docs.rs/base64)
It's base64. What more could anyone want?
Example
---
```rust
extern crate base64;
use base64::{encode, decode};
fn main() {
let a = b"hello world";
let b = "aGVsbG8gd29ybGQ=";
assert_eq!(encode(a), b);
assert_eq!(a, &decode(b).unwrap()[..]);
}
```
See the [docs](https://docs.rs/base64) for all the details.
Purpose
---
I have a fondness for small dependency footprints, ecosystems where you can pick and choose what functionality you need, and no more. Unix philosophy sort of thing I guess, many tiny utilities interoperating across a common interface. One time making a Twitter bot, I ran into the need to correctly pluralize arbitrary words. I found on npm a module that did nothing but pluralize words. Nothing else, just a couple of functions. I'd like for this to be that "just a couple of functions."
Developing
---
Benchmarks are in `benches/`. Running them requires nightly rust, but `rustup` makes it easy:
```
rustup run nightly cargo bench
```
Decoding is aided by some pre-calculated tables, which are generated by:
```
cargo run --example make_tables > src/tables.rs.tmp && mv src/tables.rs.tmp src/tables.rs
```
Profiling
---
On Linux, you can use [perf](https://perf.wiki.kernel.org/index.php/Main_Page) for profiling. Then compile the benchmarks with `rustup nightly run cargo bench --no-run`.
Run the benchmark binary with `perf` (shown here filtering to one particular benchmark, which will make the results easier to read). `perf` is only available to the root user on most systems as it fiddles with event counters in your CPU, so use `sudo`. We need to run the actual benchmark binary, hence the path into `target`. You can see the actual full path with `rustup run nightly cargo bench -v`; it will print out the commands it runs. If you use the exact path that `bench` outputs, make sure you get the one that's for the benchmarks, not the tests. You may also want to `cargo clean` so you have only one `benchmarks-` binary (they tend to accumulate).
```
sudo perf record target/release/deps/benchmarks-* --bench decode_10mib_reuse
```
Then analyze the results, again with perf:
```
sudo perf annotate -l
```
You'll see a bunch of interleaved rust source and assembly like this. The section with `lib.rs:327` is telling us that 4.02% of samples saw the `movzbl` aka bit shift as the active instruction. However, this percentage is not as exact as it seems due to a phenomenon called *skid*. Basically, a consequence of how fancy modern CPUs are is that this sort of instruction profiling is inherently inaccurate, especially in branch-heavy code.
```
lib.rs:322 0.70 : 10698: mov %rdi,%rax
2.82 : 1069b: shr $0x38,%rax
: if morsel == decode_tables::INVALID_VALUE {
: bad_byte_index = input_index;
: break;
: };
: accum = (morsel as u64) << 58;
lib.rs:327 4.02 : 1069f: movzbl (%r9,%rax,1),%r15d
: // fast loop of 8 bytes at a time
: while input_index < length_of_full_chunks {
: let mut accum: u64;
:
: let input_chunk = BigEndian::read_u64(&input_bytes[input_index..(input_index + 8)]);
: morsel = decode_table[(input_chunk >> 56) as usize];
lib.rs:322 3.68 : 106a4: cmp $0xff,%r15
: if morsel == decode_tables::INVALID_VALUE {
0.00 : 106ab: je 1090e <base64::decode_config_buf::hbf68a45fefa299c1+0x46e>
```
Fuzzing
---
This uses [cargo-fuzz](https://github.com/rust-fuzz/cargo-fuzz). See `fuzz/fuzzers` for the available fuzzing scripts. To run, use an invocation like these:
```
cargo +nightly fuzz run roundtrip
cargo +nightly fuzz run roundtrip_no_pad
cargo +nightly fuzz run roundtrip_mime -- -max_len=10240
cargo +nightly fuzz run roundtrip_random_config -- -max_len=10240
```
License
---
This project is dual-licensed under MIT and Apache 2.0.

62
third_party/rust/base64-0.9.3/RELEASE-NOTES.md поставляемый Normal file
Просмотреть файл

@ -0,0 +1,62 @@
# 0.9.3
- Update safemem
# 0.9.2
- Derive `Clone` for `DecodeError`.
# 0.9.1
- Add support for `crypt(3)`'s base64 variant.
# 0.9.0
- `decode_config_slice` function for no-allocation decoding, analogous to `encode_config_slice`
- Decode performance optimization
# 0.8.0
- `encode_config_slice` function for no-allocation encoding
# 0.7.0
- `STANDARD_NO_PAD` config
- `Base64Display` heap-free wrapper for use in format strings, etc
# 0.6.0
- Decode performance improvements
- Use `unsafe` in fewer places
- Added fuzzers
# 0.5.2
- Avoid usize overflow when calculating length
- Better line wrapping performance
# 0.5.1
- Temporarily disable line wrapping
- Add Apache 2.0 license
# 0.5.0
- MIME support, including configurable line endings and line wrapping
- Removed `decode_ws`
- Renamed `Base64Error` to `DecodeError`
# 0.4.1
- Allow decoding a `AsRef<[u8]>` instead of just a `&str`
# 0.4.0
- Configurable padding
- Encode performance improvements
# 0.3.0
- Added encode/decode functions that do not allocate their own storage
- Decode performance improvements
- Extraneous padding bytes are no longer ignored. Now, an error will be returned.

345
third_party/rust/base64-0.9.3/benches/benchmarks.rs поставляемый Normal file
Просмотреть файл

@ -0,0 +1,345 @@
#![feature(test)]
extern crate base64;
extern crate rand;
extern crate test;
use base64::display;
use base64::{decode, decode_config_buf, decode_config_slice, encode, encode_config_buf,
encode_config_slice, Config, MIME, STANDARD};
use rand::Rng;
use test::Bencher;
#[bench]
fn encode_3b(b: &mut Bencher) {
do_encode_bench(b, 3)
}
#[bench]
fn encode_3b_reuse_buf(b: &mut Bencher) {
do_encode_bench_reuse_buf(b, 3, STANDARD)
}
#[bench]
fn encode_3b_slice(b: &mut Bencher) {
do_encode_bench_slice(b, 3, STANDARD)
}
#[bench]
fn encode_50b(b: &mut Bencher) {
do_encode_bench(b, 50)
}
#[bench]
fn encode_50b_display(b: &mut Bencher) {
do_encode_bench_display(b, 50)
}
#[bench]
fn encode_50b_reuse_buf(b: &mut Bencher) {
do_encode_bench_reuse_buf(b, 50, STANDARD)
}
#[bench]
fn encode_50b_slice(b: &mut Bencher) {
do_encode_bench_slice(b, 50, STANDARD)
}
#[bench]
fn encode_100b(b: &mut Bencher) {
do_encode_bench(b, 100)
}
#[bench]
fn encode_100b_reuse_buf(b: &mut Bencher) {
do_encode_bench_reuse_buf(b, 100, STANDARD)
}
#[bench]
fn encode_500b(b: &mut Bencher) {
do_encode_bench(b, 500)
}
#[bench]
fn encode_500b_reuse_buf(b: &mut Bencher) {
do_encode_bench_reuse_buf(b, 500, STANDARD)
}
#[bench]
fn encode_500b_reuse_buf_mime(b: &mut Bencher) {
do_encode_bench_reuse_buf(b, 500, MIME)
}
#[bench]
fn encode_3kib(b: &mut Bencher) {
do_encode_bench(b, 3 * 1024)
}
#[bench]
fn encode_3kib_display(b: &mut Bencher) {
do_encode_bench_display(b, 3 * 1024)
}
#[bench]
fn encode_3kib_reuse_buf(b: &mut Bencher) {
do_encode_bench_reuse_buf(b, 3 * 1024, STANDARD)
}
#[bench]
fn encode_3kib_slice(b: &mut Bencher) {
do_encode_bench_slice(b, 3 * 1024, STANDARD)
}
#[bench]
fn encode_3kib_reuse_buf_mime(b: &mut Bencher) {
do_encode_bench_reuse_buf(b, 3 * 1024, MIME)
}
#[bench]
fn encode_3mib(b: &mut Bencher) {
do_encode_bench(b, 3 * 1024 * 1024)
}
#[bench]
fn encode_3mib_display(b: &mut Bencher) {
do_encode_bench_display(b, 3 * 1024 * 1024)
}
#[bench]
fn encode_3mib_reuse_buf(b: &mut Bencher) {
do_encode_bench_reuse_buf(b, 3 * 1024 * 1024, STANDARD)
}
#[bench]
fn encode_3mib_slice(b: &mut Bencher) {
do_encode_bench_slice(b, 3 * 1024 * 1024, STANDARD)
}
#[bench]
fn encode_10mib(b: &mut Bencher) {
do_encode_bench(b, 10 * 1024 * 1024)
}
#[bench]
fn encode_10mib_reuse_buf(b: &mut Bencher) {
do_encode_bench_reuse_buf(b, 10 * 1024 * 1024, STANDARD)
}
#[bench]
fn encode_30mib(b: &mut Bencher) {
do_encode_bench(b, 30 * 1024 * 1024)
}
#[bench]
fn encode_30mib_reuse_buf(b: &mut Bencher) {
do_encode_bench_reuse_buf(b, 30 * 1024 * 1024, STANDARD)
}
#[bench]
fn encode_30mib_slice(b: &mut Bencher) {
do_encode_bench_slice(b, 30 * 1024 * 1024, STANDARD)
}
#[bench]
fn decode_3b(b: &mut Bencher) {
do_decode_bench(b, 3)
}
#[bench]
fn decode_3b_reuse_buf(b: &mut Bencher) {
do_decode_bench_reuse_buf(b, 3)
}
#[bench]
fn decode_3b_slice(b: &mut Bencher) {
do_decode_bench_slice(b, 3)
}
#[bench]
fn decode_50b(b: &mut Bencher) {
do_decode_bench(b, 50)
}
#[bench]
fn decode_50b_reuse_buf(b: &mut Bencher) {
do_decode_bench_reuse_buf(b, 50)
}
#[bench]
fn decode_50b_slice(b: &mut Bencher) {
do_decode_bench_slice(b, 50)
}
#[bench]
fn decode_100b(b: &mut Bencher) {
do_decode_bench(b, 100)
}
#[bench]
fn decode_100b_reuse_buf(b: &mut Bencher) {
do_decode_bench_reuse_buf(b, 100)
}
#[bench]
fn decode_500b(b: &mut Bencher) {
do_decode_bench(b, 500)
}
#[bench]
fn decode_500b_reuse_buf(b: &mut Bencher) {
do_decode_bench_reuse_buf(b, 500)
}
#[bench]
fn decode_3kib(b: &mut Bencher) {
do_decode_bench(b, 3 * 1024)
}
#[bench]
fn decode_3kib_reuse_buf(b: &mut Bencher) {
do_decode_bench_reuse_buf(b, 3 * 1024)
}
#[bench]
fn decode_3kib_slice(b: &mut Bencher) {
do_decode_bench_slice(b, 3 * 1024)
}
#[bench]
fn decode_3mib(b: &mut Bencher) {
do_decode_bench(b, 3 * 1024 * 1024)
}
#[bench]
fn decode_3mib_reuse_buf(b: &mut Bencher) {
do_decode_bench_reuse_buf(b, 3 * 1024 * 1024)
}
#[bench]
fn decode_3mib_slice(b: &mut Bencher) {
do_decode_bench_slice(b, 3 * 1024 * 1024)
}
#[bench]
fn decode_10mib(b: &mut Bencher) {
do_decode_bench(b, 10 * 1024 * 1024)
}
#[bench]
fn decode_10mib_reuse_buf(b: &mut Bencher) {
do_decode_bench_reuse_buf(b, 10 * 1024 * 1024)
}
#[bench]
fn decode_30mib(b: &mut Bencher) {
do_decode_bench(b, 30 * 1024 * 1024)
}
#[bench]
fn decode_30mib_reuse_buf(b: &mut Bencher) {
do_decode_bench_reuse_buf(b, 30 * 1024 * 1024)
}
#[bench]
fn decode_30mib_slice(b: &mut Bencher) {
do_decode_bench_slice(b, 30 * 1024 * 1024)
}
fn do_decode_bench(b: &mut Bencher, size: usize) {
let mut v: Vec<u8> = Vec::with_capacity(size * 3 / 4);
fill(&mut v);
let encoded = encode(&v);
b.bytes = encoded.len() as u64;
b.iter(|| {
let orig = decode(&encoded);
test::black_box(&orig);
});
}
fn do_decode_bench_reuse_buf(b: &mut Bencher, size: usize) {
let mut v: Vec<u8> = Vec::with_capacity(size * 3 / 4);
fill(&mut v);
let encoded = encode(&v);
let mut buf = Vec::new();
b.bytes = encoded.len() as u64;
b.iter(|| {
decode_config_buf(&encoded, STANDARD, &mut buf).unwrap();
test::black_box(&buf);
buf.clear();
});
}
fn do_decode_bench_slice(b: &mut Bencher, size: usize) {
let mut v: Vec<u8> = Vec::with_capacity(size * 3 / 4);
fill(&mut v);
let encoded = encode(&v);
let mut buf = Vec::new();
buf.resize(size, 0);
b.bytes = encoded.len() as u64;
b.iter(|| {
decode_config_slice(&encoded, STANDARD, &mut buf).unwrap();
test::black_box(&buf);
});
}
fn do_encode_bench(b: &mut Bencher, size: usize) {
let mut v: Vec<u8> = Vec::with_capacity(size);
fill(&mut v);
b.bytes = v.len() as u64;
b.iter(|| {
let e = encode(&v);
test::black_box(&e);
});
}
fn do_encode_bench_display(b: &mut Bencher, size: usize) {
let mut v: Vec<u8> = Vec::with_capacity(size);
fill(&mut v);
b.bytes = v.len() as u64;
b.iter(|| {
let e = format!("{}", display::Base64Display::standard(&v));
test::black_box(&e);
});
}
fn do_encode_bench_reuse_buf(b: &mut Bencher, size: usize, config: Config) {
let mut v: Vec<u8> = Vec::with_capacity(size);
fill(&mut v);
let mut buf = String::new();
b.bytes = v.len() as u64;
b.iter(|| {
encode_config_buf(&v, config, &mut buf);
buf.clear();
});
}
fn do_encode_bench_slice(b: &mut Bencher, size: usize, config: Config) {
let mut v: Vec<u8> = Vec::with_capacity(size);
fill(&mut v);
let mut buf = Vec::new();
b.bytes = v.len() as u64;
// conservative estimate of encoded size
buf.resize(size * 2, 0);
b.iter(|| {
encode_config_slice(&v, config, &mut buf);
});
}
fn fill(v: &mut Vec<u8>) {
let cap = v.capacity();
// weak randomness is plenty; we just want to not be completely friendly to the branch predictor
let mut r = rand::weak_rng();
while v.len() < cap {
v.push(r.gen::<u8>());
}
}

116
third_party/rust/base64-0.9.3/examples/make_tables.rs поставляемый Normal file
Просмотреть файл

@ -0,0 +1,116 @@
use std::collections::HashMap;
use std::iter::Iterator;
fn main() {
println!("pub const INVALID_VALUE: u8 = 255;");
// A-Z
let standard_alphabet: Vec<u8> = (0x41..0x5B)
// a-z
.chain(0x61..0x7B)
// 0-9
.chain(0x30..0x3A)
// +
.chain(0x2B..0x2C)
// /
.chain(0x2F..0x30)
.collect();
print_encode_table(&standard_alphabet, "STANDARD_ENCODE", 0);
print_decode_table(&standard_alphabet, "STANDARD_DECODE", 0);
// A-Z
let url_alphabet: Vec<u8> = (0x41..0x5B)
// a-z
.chain(0x61..0x7B)
// 0-9
.chain(0x30..0x3A)
// -
.chain(0x2D..0x2E)
// _s
.chain(0x5F..0x60)
.collect();
print_encode_table(&url_alphabet, "URL_SAFE_ENCODE", 0);
print_decode_table(&url_alphabet, "URL_SAFE_DECODE", 0);
// ./0123456789
let crypt_alphabet: Vec<u8> = (b'.'..(b'9'+1))
// A-Z
.chain(b'A'..(b'Z'+1))
// a-z
.chain(b'a'..(b'z'+1))
.collect();
print_encode_table(&crypt_alphabet, "CRYPT_ENCODE", 0);
print_decode_table(&crypt_alphabet, "CRYPT_DECODE", 0);
}
fn print_encode_table(alphabet: &[u8], const_name: &str, indent_depth: usize) {
println!("#[cfg_attr(rustfmt, rustfmt_skip)]");
println!(
"{:width$}pub const {}: &'static [u8; 64] = &[",
"",
const_name,
width = indent_depth
);
for (i, b) in alphabet.iter().enumerate() {
println!(
"{:width$}{}, // input {} (0x{:X}) => '{}' (0x{:X})",
"",
b,
i,
i,
String::from_utf8(vec![*b as u8]).unwrap(),
b,
width = indent_depth + 4
);
}
println!("{:width$}];", "", width = indent_depth);
}
fn print_decode_table(alphabet: &[u8], const_name: &str, indent_depth: usize) {
// map of alphabet bytes to 6-bit morsels
let mut input_to_morsel = HashMap::<u8, u8>::new();
// standard base64 alphabet bytes, in order
for (morsel, ascii_byte) in alphabet.iter().enumerate() {
// truncation cast is fine here
let _ = input_to_morsel.insert(*ascii_byte, morsel as u8);
}
println!("#[cfg_attr(rustfmt, rustfmt_skip)]");
println!(
"{:width$}pub const {}: &'static [u8; 256] = &[",
"",
const_name,
width = indent_depth
);
for ascii_byte in 0..256 {
let (value, comment) = match input_to_morsel.get(&(ascii_byte as u8)) {
None => (
"INVALID_VALUE".to_string(),
format!("input {} (0x{:X})", ascii_byte, ascii_byte),
),
Some(v) => (
format!("{}", *v),
format!(
"input {} (0x{:X} char '{}') => {} (0x{:X})",
ascii_byte,
ascii_byte,
String::from_utf8(vec![ascii_byte as u8]).unwrap(),
*v,
*v
),
),
};
println!(
"{:width$}{}, // {}",
"",
value,
comment,
width = indent_depth + 4
);
}
println!("{:width$}];", "", width = indent_depth);
}

509
third_party/rust/base64-0.9.3/src/chunked_encoder.rs поставляемый Normal file
Просмотреть файл

@ -0,0 +1,509 @@
use encode::{add_padding, encode_to_slice};
use line_wrap::line_wrap;
use std::cmp;
use {Config, LineEnding, LineWrap};
/// The output mechanism for ChunkedEncoder's encoded bytes.
pub trait Sink {
type Error;
/// Handle a chunk of encoded base64 data (as UTF-8 bytes)
fn write_encoded_bytes(&mut self, encoded: &[u8]) -> Result<(), Self::Error>;
}
#[derive(Debug, PartialEq)]
pub enum ChunkedEncoderError {
/// If wrapping is configured, the line length must be a multiple of 4, and must not be absurdly
/// large (see BUF_SIZE).
InvalidLineLength,
}
const BUF_SIZE: usize = 1024;
/// A base64 encoder that emits encoded bytes in chunks without heap allocation.
pub struct ChunkedEncoder {
config: Config,
max_input_chunk_len: usize,
}
impl ChunkedEncoder {
pub fn new(config: Config) -> Result<ChunkedEncoder, ChunkedEncoderError> {
Ok(ChunkedEncoder {
config,
max_input_chunk_len: max_input_length(BUF_SIZE, &config)?,
})
}
pub fn encode<S: Sink>(&self, bytes: &[u8], sink: &mut S) -> Result<(), S::Error> {
let mut encode_buf: [u8; BUF_SIZE] = [0; BUF_SIZE];
let encode_table = self.config.char_set.encode_table();
let mut input_index = 0;
while input_index < bytes.len() {
// either the full input chunk size, or it's the last iteration
let input_chunk_len = cmp::min(self.max_input_chunk_len, bytes.len() - input_index);
let chunk = &bytes[input_index..(input_index + input_chunk_len)];
let mut b64_bytes_written = encode_to_slice(chunk, &mut encode_buf, encode_table);
input_index += input_chunk_len;
let more_input_left = input_index < bytes.len();
if self.config.pad && !more_input_left {
// no more input, add padding if needed. Buffer will have room because
// max_input_length leaves room for it.
b64_bytes_written += add_padding(bytes.len(), &mut encode_buf[b64_bytes_written..]);
}
let line_ending_bytes = match self.config.line_wrap {
LineWrap::NoWrap => 0,
LineWrap::Wrap(line_len, line_ending) => {
let initial_line_ending_bytes =
line_wrap(&mut encode_buf, b64_bytes_written, line_len, line_ending);
if more_input_left {
assert_eq!(input_chunk_len, self.max_input_chunk_len);
// If there are more bytes of input, then we know we didn't just do the
// last chunk. line_wrap() doesn't put an ending after the last line, so we
// append one more line ending here. Since the chunk just encoded was not
// the last one, it was multiple of the line length (max_input_chunk_len),
// and therefore we can just put the line ending bytes at the end of the
// contents of the buffer.
match line_ending {
LineEnding::LF => {
encode_buf[b64_bytes_written + initial_line_ending_bytes] = b'\n';
initial_line_ending_bytes + 1
}
LineEnding::CRLF => {
encode_buf[b64_bytes_written + initial_line_ending_bytes] = b'\r';
encode_buf[b64_bytes_written + initial_line_ending_bytes + 1] =
b'\n';
initial_line_ending_bytes + 2
}
}
} else {
initial_line_ending_bytes
}
}
};
let total_bytes_written = b64_bytes_written + line_ending_bytes;
sink.write_encoded_bytes(&encode_buf[0..total_bytes_written])?;
}
Ok(())
}
}
/// Calculate the longest input that can be encoded for the given output buffer size.
///
/// If config requires line wrap, the calculated input length will be the maximum number of input
/// lines that can fit in the output buffer after each line has had its line ending appended.
///
/// If the config requires padding, two bytes of buffer space will be set aside so that the last
/// chunk of input can be encoded safely.
///
/// The input length will always be a multiple of 3 so that no encoding state has to be carried over
/// between chunks.
///
/// If the configured line length is not divisible by 4 (and therefore would require carrying
/// encoder state between chunks), or if the line length is too big for the buffer, an error will be
/// returned.
///
/// Note that the last overall line of input should *not* have an ending appended, but this will
/// conservatively calculate space as if it should because encoding is done in chunks, and all the
/// chunks before the last one will need a line ending after the last encoded line in that chunk.
fn max_input_length(encoded_buf_len: usize, config: &Config) -> Result<usize, ChunkedEncoderError> {
let effective_buf_len = if config.pad {
// make room for padding
encoded_buf_len
.checked_sub(2)
.expect("Don't use a tiny buffer")
} else {
encoded_buf_len
};
match config.line_wrap {
// No wrapping, no padding, so just normal base64 expansion.
LineWrap::NoWrap => Ok((effective_buf_len / 4) * 3),
LineWrap::Wrap(line_len, line_ending) => {
// To avoid complicated encode buffer shuffling, only allow line lengths that are
// multiples of 4 (which map to input lengths that are multiples of 3).
// line_len is never 0.
if line_len % 4 != 0 {
return Err(ChunkedEncoderError::InvalidLineLength);
}
let single_encoded_full_line_with_ending_len = line_len
.checked_add(line_ending.len())
.expect("Encoded line length with ending exceeds usize");
// max number of complete lines with endings that will fit in buffer
let num_encoded_wrapped_lines_in_buffer =
effective_buf_len / single_encoded_full_line_with_ending_len;
if num_encoded_wrapped_lines_in_buffer == 0 {
// line + ending is longer than can fit into encode buffer; give up
Err(ChunkedEncoderError::InvalidLineLength)
} else {
let input_len_for_line_len = (line_len / 4) * 3;
let input_len = input_len_for_line_len
.checked_mul(num_encoded_wrapped_lines_in_buffer)
.expect("Max input size exceeds usize");
assert!(input_len % 3 == 0 && input_len > 1);
Ok(input_len)
}
}
}
}
#[cfg(test)]
pub mod tests {
extern crate rand;
use super::*;
use tests::random_config;
use *;
use std::str;
use self::rand::distributions::{IndependentSample, Range};
use self::rand::Rng;
#[test]
fn chunked_encode_empty() {
assert_eq!("", chunked_encode_str(&[], STANDARD));
}
#[test]
fn chunked_encode_intermediate_fast_loop() {
// > 8 bytes input, will enter the pretty fast loop
assert_eq!(
"Zm9vYmFyYmF6cXV4",
chunked_encode_str(b"foobarbazqux", STANDARD)
);
}
#[test]
fn chunked_encode_fast_loop() {
// > 32 bytes input, will enter the uber fast loop
assert_eq!(
"Zm9vYmFyYmF6cXV4cXV1eGNvcmdlZ3JhdWx0Z2FycGx5eg==",
chunked_encode_str(b"foobarbazquxquuxcorgegraultgarplyz", STANDARD)
);
}
#[test]
fn chunked_encode_slow_loop_only() {
// < 8 bytes input, slow loop only
assert_eq!("Zm9vYmFy", chunked_encode_str(b"foobar", STANDARD));
}
#[test]
fn chunked_encode_line_wrap_padding() {
// < 8 bytes input, slow loop only
let config = config_wrap(true, 4, LineEnding::LF);
assert_eq!(
"Zm9v\nYmFy\nZm9v\nYmFy\nZg==",
chunked_encode_str(b"foobarfoobarf", config)
);
}
#[test]
fn chunked_encode_longer_than_one_buffer_adds_final_line_wrap_lf() {
// longest line len possible
let config = config_wrap(false, 1020, LineEnding::LF);
let input = vec![0xFF; 768];
let encoded = chunked_encode_str(&input, config);
// got a line wrap
assert_eq!(1024 + 1, encoded.len());
for &b in encoded.as_bytes()[0..1020].iter() {
// ascii /
assert_eq!(47, b);
}
assert_eq!(10, encoded.as_bytes()[1020]);
for &b in encoded.as_bytes()[1021..].iter() {
// ascii /
assert_eq!(47, b);
}
}
#[test]
fn chunked_encode_longer_than_one_buffer_adds_final_line_wrap_crlf() {
// longest line len possible
let config = config_wrap(false, 1020, LineEnding::CRLF);
let input = vec![0xFF; 768];
let encoded = chunked_encode_str(&input, config);
// got a line wrap
assert_eq!(1024 + 2, encoded.len());
for &b in encoded.as_bytes()[0..1020].iter() {
// ascii /
assert_eq!(47, b);
}
assert_eq!(13, encoded.as_bytes()[1020]);
assert_eq!(10, encoded.as_bytes()[1021]);
for &b in encoded.as_bytes()[1022..].iter() {
// ascii /
assert_eq!(47, b);
}
}
#[test]
fn chunked_encode_matches_normal_encode_random_string_sink() {
let helper = StringSinkTestHelper;
chunked_encode_matches_normal_encode_random(&helper);
}
#[test]
fn max_input_length_no_wrap_no_pad() {
let config = config_no_wrap(false);
assert_eq!(768, max_input_length(1024, &config).unwrap());
}
#[test]
fn max_input_length_no_wrap_with_pad_decrements_one_triple() {
let config = config_no_wrap(true);
assert_eq!(765, max_input_length(1024, &config).unwrap());
}
#[test]
fn max_input_length_no_wrap_with_pad_one_byte_short() {
let config = config_no_wrap(true);
assert_eq!(765, max_input_length(1025, &config).unwrap());
}
#[test]
fn max_input_length_no_wrap_with_pad_fits_exactly() {
let config = config_no_wrap(true);
assert_eq!(768, max_input_length(1026, &config).unwrap());
}
#[test]
fn max_input_length_wrap_with_lf_fits_exactly_no_pad() {
// 10 * (72 + 1) = 730. 54 input bytes = 72 encoded bytes, + 1 for LF.
let config = config_wrap(false, 72, LineEnding::LF);
assert_eq!(540, max_input_length(730, &config).unwrap());
}
#[test]
fn max_input_length_wrap_with_lf_fits_one_spare_byte_no_pad() {
// 10 * (72 + 1) = 730. 54 input bytes = 72 encoded bytes, + 1 for LF.
let config = config_wrap(false, 72, LineEnding::LF);
assert_eq!(540, max_input_length(731, &config).unwrap());
}
#[test]
fn max_input_length_wrap_with_lf_size_one_byte_short_of_another_line_no_pad() {
// 10 * (72 + 1) = 730. 54 input bytes = 72 encoded bytes, + 1 for LF.
// 73 * 11 = 803
let config = config_wrap(false, 72, LineEnding::LF);
assert_eq!(540, max_input_length(802, &config).unwrap());
}
#[test]
fn max_input_length_wrap_with_lf_size_another_line_no_pad() {
// 10 * (72 + 1) = 730. 54 input bytes = 72 encoded bytes, + 1 for LF.
// 73 * 11 = 803
let config = config_wrap(false, 72, LineEnding::LF);
assert_eq!(594, max_input_length(803, &config).unwrap());
}
#[test]
fn max_input_length_wrap_with_lf_one_byte_short_with_pad() {
// one fewer input line
let config = config_wrap(true, 72, LineEnding::LF);
assert_eq!(486, max_input_length(731, &config).unwrap());
}
#[test]
fn max_input_length_wrap_with_lf_fits_exactly_with_pad() {
// 10 * (72 + 1) = 730. 54 input bytes = 72 encoded bytes, + 1 for LF.
let config = config_wrap(true, 72, LineEnding::LF);
assert_eq!(540, max_input_length(732, &config).unwrap());
}
#[test]
fn max_input_length_wrap_line_len_wont_fit_one_line_lf() {
// 300 bytes is 400 encoded, + 1 for LF
let config = config_wrap(false, 400, LineEnding::LF);
assert_eq!(
ChunkedEncoderError::InvalidLineLength,
max_input_length(400, &config).unwrap_err()
);
}
#[test]
fn max_input_length_wrap_line_len_just_fits_one_line_lf() {
// 300 bytes is 400 encoded, + 1 for LF
let config = Config::new(
CharacterSet::Standard,
false,
false,
LineWrap::Wrap(400, LineEnding::LF),
);
assert_eq!(300, max_input_length(401, &config).unwrap());
}
#[test]
fn max_input_length_wrap_with_crlf_fits_exactly_no_pad() {
// 10 * (72 + 2) = 740. 54 input bytes = 72 encoded bytes, + 2 for CRLF.
let config = config_wrap(false, 72, LineEnding::CRLF);
assert_eq!(540, max_input_length(740, &config).unwrap());
}
#[test]
fn max_input_length_wrap_with_crlf_fits_one_spare_byte_no_pad() {
// 10 * (72 + 2) = 740. 54 input bytes = 72 encoded bytes, + 2 for CRLF.
let config = config_wrap(false, 72, LineEnding::CRLF);
assert_eq!(540, max_input_length(741, &config).unwrap());
}
#[test]
fn max_input_length_wrap_with_crlf_size_one_byte_short_of_another_line_no_pad() {
// 10 * (72 + 2) = 740. 54 input bytes = 72 encoded bytes, + 2 for CRLF.
// 74 * 11 = 814
let config = config_wrap(false, 72, LineEnding::CRLF);
assert_eq!(540, max_input_length(813, &config).unwrap());
}
#[test]
fn max_input_length_wrap_with_crlf_size_another_line_no_pad() {
// 10 * (72 + 2) = 740. 54 input bytes = 72 encoded bytes, + 2 for CRLF.
// 74 * 11 = 814
let config = config_wrap(false, 72, LineEnding::CRLF);
assert_eq!(594, max_input_length(814, &config).unwrap());
}
#[test]
fn max_input_length_wrap_line_len_not_multiple_of_4_rejected() {
let config = config_wrap(false, 41, LineEnding::LF);
assert_eq!(
ChunkedEncoderError::InvalidLineLength,
max_input_length(400, &config).unwrap_err()
);
}
pub fn chunked_encode_matches_normal_encode_random<S: SinkTestHelper>(sink_test_helper: &S) {
let mut input_buf: Vec<u8> = Vec::new();
let mut output_buf = String::new();
let mut rng = rand::weak_rng();
let line_len_range = Range::new(1, 1020);
let input_len_range = Range::new(1, 10_000);
for _ in 0..5_000 {
input_buf.clear();
output_buf.clear();
let buf_len = input_len_range.ind_sample(&mut rng);
for _ in 0..buf_len {
input_buf.push(rng.gen());
}
let config = random_config_for_chunked_encoder(&mut rng, &line_len_range);
let chunk_encoded_string = sink_test_helper.encode_to_string(config, &input_buf);
encode_config_buf(&input_buf, config, &mut output_buf);
assert_eq!(
output_buf, chunk_encoded_string,
"input len={}, config: pad={}, wrap={:?}",
buf_len, config.pad, config.line_wrap
);
}
}
fn chunked_encode_str(bytes: &[u8], config: Config) -> String {
let mut sink = StringSink::new();
{
let encoder = ChunkedEncoder::new(config).unwrap();
encoder.encode(bytes, &mut sink).unwrap();
}
return sink.string;
}
fn random_config_for_chunked_encoder<R: Rng>(
rng: &mut R,
line_len_range: &Range<usize>,
) -> Config {
loop {
let config = random_config(rng, line_len_range);
// only use a config with line_len that is divisible by 4
match config.line_wrap {
LineWrap::NoWrap => return config,
LineWrap::Wrap(line_len, _) => if line_len % 4 == 0 {
return config;
},
}
}
}
fn config_no_wrap(pad: bool) -> Config {
Config::new(CharacterSet::Standard, pad, false, LineWrap::NoWrap)
}
fn config_wrap(pad: bool, line_len: usize, line_ending: LineEnding) -> Config {
Config::new(
CharacterSet::Standard,
pad,
false,
LineWrap::Wrap(line_len, line_ending),
)
}
// An abstraction around sinks so that we can have tests that easily to any sink implementation
pub trait SinkTestHelper {
fn encode_to_string(&self, config: Config, bytes: &[u8]) -> String;
}
// A really simple sink that just appends to a string for testing
struct StringSink {
string: String,
}
impl StringSink {
fn new() -> StringSink {
StringSink {
string: String::new(),
}
}
}
impl Sink for StringSink {
type Error = ();
fn write_encoded_bytes(&mut self, s: &[u8]) -> Result<(), Self::Error> {
self.string.push_str(str::from_utf8(s).unwrap());
Ok(())
}
}
struct StringSinkTestHelper;
impl SinkTestHelper for StringSinkTestHelper {
fn encode_to_string(&self, config: Config, bytes: &[u8]) -> String {
let encoder = ChunkedEncoder::new(config).unwrap();
let mut sink = StringSink::new();
encoder.encode(bytes, &mut sink).unwrap();
sink.string
}
}
}

710
third_party/rust/base64-0.9.3/src/decode.rs поставляемый Normal file
Просмотреть файл

@ -0,0 +1,710 @@
use byteorder::{BigEndian, ByteOrder};
use {tables, CharacterSet, Config, STANDARD};
use std::{error, fmt, str};
// decode logic operates on chunks of 8 input bytes without padding
const INPUT_CHUNK_LEN: usize = 8;
const DECODED_CHUNK_LEN: usize = 6;
// we read a u64 and write a u64, but a u64 of input only yields 6 bytes of output, so the last
// 2 bytes of any output u64 should not be counted as written to (but must be available in a
// slice).
const DECODED_CHUNK_SUFFIX: usize = 2;
// how many u64's of input to handle at a time
const CHUNKS_PER_FAST_LOOP_BLOCK: usize = 4;
const INPUT_BLOCK_LEN: usize = CHUNKS_PER_FAST_LOOP_BLOCK * INPUT_CHUNK_LEN;
// includes the trailing 2 bytes for the final u64 write
const DECODED_BLOCK_LEN: usize =
CHUNKS_PER_FAST_LOOP_BLOCK * DECODED_CHUNK_LEN + DECODED_CHUNK_SUFFIX;
/// Errors that can occur while decoding.
#[derive(Clone, Debug, PartialEq, Eq)]
pub enum DecodeError {
/// An invalid byte was found in the input. The offset and offending byte are provided.
InvalidByte(usize, u8),
/// The length of the input is invalid.
InvalidLength,
}
impl fmt::Display for DecodeError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
DecodeError::InvalidByte(index, byte) => {
write!(f, "Invalid byte {}, offset {}.", byte, index)
}
DecodeError::InvalidLength => write!(f, "Encoded text cannot have a 6-bit remainder."),
}
}
}
impl error::Error for DecodeError {
fn description(&self) -> &str {
match *self {
DecodeError::InvalidByte(_, _) => "invalid byte",
DecodeError::InvalidLength => "invalid length",
}
}
fn cause(&self) -> Option<&error::Error> {
None
}
}
///Decode from string reference as octets.
///Returns a Result containing a Vec<u8>.
///Convenience `decode_config(input, base64::STANDARD);`.
///
///# Example
///
///```rust
///extern crate base64;
///
///fn main() {
/// let bytes = base64::decode("aGVsbG8gd29ybGQ=").unwrap();
/// println!("{:?}", bytes);
///}
///```
pub fn decode<T: ?Sized + AsRef<[u8]>>(input: &T) -> Result<Vec<u8>, DecodeError> {
decode_config(input, STANDARD)
}
///Decode from string reference as octets.
///Returns a Result containing a Vec<u8>.
///
///# Example
///
///```rust
///extern crate base64;
///
///fn main() {
/// let bytes = base64::decode_config("aGVsbG8gd29ybGR+Cg==", base64::STANDARD).unwrap();
/// println!("{:?}", bytes);
///
/// let bytes_url = base64::decode_config("aGVsbG8gaW50ZXJuZXR-Cg==", base64::URL_SAFE).unwrap();
/// println!("{:?}", bytes_url);
///}
///```
pub fn decode_config<T: ?Sized + AsRef<[u8]>>(
input: &T,
config: Config,
) -> Result<Vec<u8>, DecodeError> {
let mut buffer = Vec::<u8>::with_capacity(input.as_ref().len() * 4 / 3);
decode_config_buf(input, config, &mut buffer).map(|_| buffer)
}
///Decode from string reference as octets.
///Writes into the supplied buffer to avoid allocation.
///Returns a Result containing an empty tuple, aka ().
///
///# Example
///
///```rust
///extern crate base64;
///
///fn main() {
/// let mut buffer = Vec::<u8>::new();
/// base64::decode_config_buf("aGVsbG8gd29ybGR+Cg==", base64::STANDARD, &mut buffer).unwrap();
/// println!("{:?}", buffer);
///
/// buffer.clear();
///
/// base64::decode_config_buf("aGVsbG8gaW50ZXJuZXR-Cg==", base64::URL_SAFE, &mut buffer)
/// .unwrap();
/// println!("{:?}", buffer);
///}
///```
pub fn decode_config_buf<T: ?Sized + AsRef<[u8]>>(
input: &T,
config: Config,
buffer: &mut Vec<u8>,
) -> Result<(), DecodeError> {
let input_copy;
let input_bytes = if config.strip_whitespace {
input_copy = copy_without_whitespace(input.as_ref());
input_copy.as_ref()
} else {
input.as_ref()
};
let starting_output_len = buffer.len();
let num_chunks = num_chunks(input_bytes);
let decoded_len_estimate = num_chunks
.checked_mul(DECODED_CHUNK_LEN)
.and_then(|p| p.checked_add(starting_output_len))
.expect("Overflow when calculating output buffer length");
buffer.resize(decoded_len_estimate, 0);
let bytes_written;
{
let buffer_slice = &mut buffer.as_mut_slice()[starting_output_len..];
bytes_written = decode_helper(input_bytes, num_chunks, &config.char_set, buffer_slice)?;
}
buffer.truncate(starting_output_len + bytes_written);
Ok(())
}
/// Decode the input into the provided output slice.
///
/// This will not write any bytes past exactly what is decoded (no stray garbage bytes at the end).
///
/// If you don't know ahead of time what the decoded length should be, size your buffer with a
/// conservative estimate for the decoded length of an input: 3 bytes of output for every 4 bytes of
/// input, rounded up, or in other words `(input_len + 3) / 4 * 3`.
///
/// If the slice is not large enough, this will panic.
pub fn decode_config_slice<T: ?Sized + AsRef<[u8]>>(
input: &T,
config: Config,
output: &mut [u8],
) -> Result<usize, DecodeError> {
let input_copy;
let input_bytes = if config.strip_whitespace {
input_copy = copy_without_whitespace(input.as_ref());
input_copy.as_ref()
} else {
input.as_ref()
};
decode_helper(
input_bytes,
num_chunks(input_bytes),
&config.char_set,
output,
)
}
/// Return the number of input chunks (including a possibly partial final chunk) in the input
fn num_chunks(input: &[u8]) -> usize {
input
.len()
.checked_add(INPUT_CHUNK_LEN - 1)
.expect("Overflow when calculating number of chunks in input") / INPUT_CHUNK_LEN
}
fn copy_without_whitespace(input: &[u8]) -> Vec<u8> {
let mut input_copy = Vec::<u8>::with_capacity(input.len());
input_copy.extend(input.iter().filter(|b| !b" \n\t\r\x0b\x0c".contains(b)));
input_copy
}
/// Helper to avoid duplicating num_chunks calculation, which is costly on short inputs.
/// Returns the number of bytes written, or an error.
// We're on the fragile edge of compiler heuristics here. If this is not inlined, slow. If this is
// inlined(always), a different slow. plain ol' inline makes the benchmarks happiest at the moment,
// but this is fragile and the best setting changes with only minor code modifications.
#[inline]
fn decode_helper(
input: &[u8],
num_chunks: usize,
char_set: &CharacterSet,
output: &mut [u8],
) -> Result<usize, DecodeError> {
let decode_table = char_set.decode_table();
let remainder_len = input.len() % INPUT_CHUNK_LEN;
// Because the fast decode loop writes in groups of 8 bytes (unrolled to
// CHUNKS_PER_FAST_LOOP_BLOCK times 8 bytes, where possible) and outputs 8 bytes at a time (of
// which only 6 are valid data), we need to be sure that we stop using the fast decode loop
// soon enough that there will always be 2 more bytes of valid data written after that loop.
let trailing_bytes_to_skip = match remainder_len {
// if input is a multiple of the chunk size, ignore the last chunk as it may have padding,
// and the fast decode logic cannot handle padding
0 => INPUT_CHUNK_LEN,
// 1 and 5 trailing bytes are illegal: can't decode 6 bits of input into a byte
1 | 5 => return Err(DecodeError::InvalidLength),
// This will decode to one output byte, which isn't enough to overwrite the 2 extra bytes
// written by the fast decode loop. So, we have to ignore both these 2 bytes and the
// previous chunk.
2 => INPUT_CHUNK_LEN + 2,
// If this is 3 unpadded chars, then it would actually decode to 2 bytes. However, if this
// is an erroneous 2 chars + 1 pad char that would decode to 1 byte, then it should fail
// with an error, not panic from going past the bounds of the output slice, so we let it
// use stage 3 + 4.
3 => INPUT_CHUNK_LEN + 3,
// This can also decode to one output byte because it may be 2 input chars + 2 padding
// chars, which would decode to 1 byte.
4 => INPUT_CHUNK_LEN + 4,
// Everything else is a legal decode len (given that we don't require padding), and will
// decode to at least 2 bytes of output.
_ => remainder_len,
};
// rounded up to include partial chunks
let mut remaining_chunks = num_chunks;
let mut input_index = 0;
let mut output_index = 0;
{
let length_of_fast_decode_chunks = input.len().saturating_sub(trailing_bytes_to_skip);
// Fast loop, stage 1
// manual unroll to CHUNKS_PER_FAST_LOOP_BLOCK of u64s to amortize slice bounds checks
if let Some(max_start_index) = length_of_fast_decode_chunks.checked_sub(INPUT_BLOCK_LEN) {
while input_index <= max_start_index {
let input_slice = &input[input_index..(input_index + INPUT_BLOCK_LEN)];
let output_slice = &mut output[output_index..(output_index + DECODED_BLOCK_LEN)];
decode_chunk(
&input_slice[0..],
input_index,
decode_table,
&mut output_slice[0..],
)?;
decode_chunk(
&input_slice[8..],
input_index + 8,
decode_table,
&mut output_slice[6..],
)?;
decode_chunk(
&input_slice[16..],
input_index + 16,
decode_table,
&mut output_slice[12..],
)?;
decode_chunk(
&input_slice[24..],
input_index + 24,
decode_table,
&mut output_slice[18..],
)?;
input_index += INPUT_BLOCK_LEN;
output_index += DECODED_BLOCK_LEN - DECODED_CHUNK_SUFFIX;
remaining_chunks -= CHUNKS_PER_FAST_LOOP_BLOCK;
}
}
// Fast loop, stage 2 (aka still pretty fast loop)
// 8 bytes at a time for whatever we didn't do in stage 1.
if let Some(max_start_index) = length_of_fast_decode_chunks.checked_sub(INPUT_CHUNK_LEN) {
while input_index < max_start_index {
decode_chunk(
&input[input_index..(input_index + INPUT_CHUNK_LEN)],
input_index,
decode_table,
&mut output
[output_index..(output_index + DECODED_CHUNK_LEN + DECODED_CHUNK_SUFFIX)],
)?;
output_index += DECODED_CHUNK_LEN;
input_index += INPUT_CHUNK_LEN;
remaining_chunks -= 1;
}
}
}
// Stage 3
// If input length was such that a chunk had to be deferred until after the fast loop
// because decoding it would have produced 2 trailing bytes that wouldn't then be
// overwritten, we decode that chunk here. This way is slower but doesn't write the 2
// trailing bytes.
// However, we still need to avoid the last chunk (partial or complete) because it could
// have padding, so we always do 1 fewer to avoid the last chunk.
for _ in 1..remaining_chunks {
decode_chunk_precise(
&input[input_index..],
input_index,
decode_table,
&mut output[output_index..(output_index + DECODED_CHUNK_LEN)],
)?;
input_index += INPUT_CHUNK_LEN;
output_index += DECODED_CHUNK_LEN;
}
// Stage 4
// Finally, decode any leftovers that aren't a complete input block of 8 bytes.
// Use a u64 as a stack-resident 8 byte buffer.
let mut leftover_bits: u64 = 0;
let mut morsels_in_leftover = 0;
let mut padding_bytes = 0;
let mut first_padding_index: usize = 0;
let start_of_leftovers = input_index;
for (i, b) in input[start_of_leftovers..].iter().enumerate() {
// '=' padding
if *b == 0x3D {
// There can be bad padding in a few ways:
// 1 - Padding with non-padding characters after it
// 2 - Padding after zero or one non-padding characters before it
// in the current quad.
// 3 - More than two characters of padding. If 3 or 4 padding chars
// are in the same quad, that implies it will be caught by #2.
// If it spreads from one quad to another, it will be caught by
// #2 in the second quad.
if i % 4 < 2 {
// Check for case #2.
let bad_padding_index = start_of_leftovers + if padding_bytes > 0 {
// If we've already seen padding, report the first padding index.
// This is to be consistent with the faster logic above: it will report an
// error on the first padding character (since it doesn't expect to see
// anything but actual encoded data).
first_padding_index
} else {
// haven't seen padding before, just use where we are now
i
};
return Err(DecodeError::InvalidByte(bad_padding_index, *b));
}
if padding_bytes == 0 {
first_padding_index = i;
}
padding_bytes += 1;
continue;
}
// Check for case #1.
// To make '=' handling consistent with the main loop, don't allow
// non-suffix '=' in trailing chunk either. Report error as first
// erroneous padding.
if padding_bytes > 0 {
return Err(DecodeError::InvalidByte(
start_of_leftovers + first_padding_index,
0x3D,
));
}
// can use up to 8 * 6 = 48 bits of the u64, if last chunk has no padding.
// To minimize shifts, pack the leftovers from left to right.
let shift = 64 - (morsels_in_leftover + 1) * 6;
// tables are all 256 elements, lookup with a u8 index always succeeds
let morsel = decode_table[*b as usize];
if morsel == tables::INVALID_VALUE {
return Err(DecodeError::InvalidByte(start_of_leftovers + i, *b));
}
leftover_bits |= (morsel as u64) << shift;
morsels_in_leftover += 1;
}
let leftover_bits_ready_to_append = match morsels_in_leftover {
0 => 0,
2 => 8,
3 => 16,
4 => 24,
6 => 32,
7 => 40,
8 => 48,
_ => unreachable!(
"Impossible: must only have 0 to 8 input bytes in last chunk, with no invalid lengths"
),
};
let mut leftover_bits_appended_to_buf = 0;
while leftover_bits_appended_to_buf < leftover_bits_ready_to_append {
// `as` simply truncates the higher bits, which is what we want here
let selected_bits = (leftover_bits >> (56 - leftover_bits_appended_to_buf)) as u8;
output[output_index] = selected_bits;
output_index += 1;
leftover_bits_appended_to_buf += 8;
}
Ok(output_index)
}
/// Decode 8 bytes of input into 6 bytes of output. 8 bytes of output will be written, but only the
/// first 6 of those contain meaningful data.
///
/// `input` is the bytes to decode, of which the first 8 bytes will be processed.
/// `index_at_start_of_input` is the offset in the overall input (used for reporting errors
/// accurately)
/// `decode_table` is the lookup table for the particular base64 alphabet.
/// `output` will have its first 8 bytes overwritten, of which only the first 6 are valid decoded
/// data.
// yes, really inline (worth 30-50% speedup)
#[inline(always)]
fn decode_chunk(
input: &[u8],
index_at_start_of_input: usize,
decode_table: &[u8; 256],
output: &mut [u8],
) -> Result<(), DecodeError> {
let mut accum: u64;
let morsel = decode_table[input[0] as usize];
if morsel == tables::INVALID_VALUE {
return Err(DecodeError::InvalidByte(index_at_start_of_input, input[0]));
}
accum = (morsel as u64) << 58;
let morsel = decode_table[input[1] as usize];
if morsel == tables::INVALID_VALUE {
return Err(DecodeError::InvalidByte(
index_at_start_of_input + 1,
input[1],
));
}
accum |= (morsel as u64) << 52;
let morsel = decode_table[input[2] as usize];
if morsel == tables::INVALID_VALUE {
return Err(DecodeError::InvalidByte(
index_at_start_of_input + 2,
input[2],
));
}
accum |= (morsel as u64) << 46;
let morsel = decode_table[input[3] as usize];
if morsel == tables::INVALID_VALUE {
return Err(DecodeError::InvalidByte(
index_at_start_of_input + 3,
input[3],
));
}
accum |= (morsel as u64) << 40;
let morsel = decode_table[input[4] as usize];
if morsel == tables::INVALID_VALUE {
return Err(DecodeError::InvalidByte(
index_at_start_of_input + 4,
input[4],
));
}
accum |= (morsel as u64) << 34;
let morsel = decode_table[input[5] as usize];
if morsel == tables::INVALID_VALUE {
return Err(DecodeError::InvalidByte(
index_at_start_of_input + 5,
input[5],
));
}
accum |= (morsel as u64) << 28;
let morsel = decode_table[input[6] as usize];
if morsel == tables::INVALID_VALUE {
return Err(DecodeError::InvalidByte(
index_at_start_of_input + 6,
input[6],
));
}
accum |= (morsel as u64) << 22;
let morsel = decode_table[input[7] as usize];
if morsel == tables::INVALID_VALUE {
return Err(DecodeError::InvalidByte(
index_at_start_of_input + 7,
input[7],
));
}
accum |= (morsel as u64) << 16;
BigEndian::write_u64(output, accum);
Ok(())
}
/// Decode an 8-byte chunk, but only write the 6 bytes actually decoded instead of including 2
/// trailing garbage bytes.
#[inline]
fn decode_chunk_precise(
input: &[u8],
index_at_start_of_input: usize,
decode_table: &[u8; 256],
output: &mut [u8],
) -> Result<(), DecodeError> {
let mut tmp_buf = [0_u8; 8];
decode_chunk(
input,
index_at_start_of_input,
decode_table,
&mut tmp_buf[..],
)?;
output[0..6].copy_from_slice(&tmp_buf[0..6]);
Ok(())
}
#[cfg(test)]
mod tests {
extern crate rand;
use super::*;
use encode::encode_config_buf;
use tests::{assert_encode_sanity, random_config};
use self::rand::distributions::{IndependentSample, Range};
use self::rand::Rng;
#[test]
fn decode_chunk_precise_writes_only_6_bytes() {
let input = b"Zm9vYmFy"; // "foobar"
let mut output = [0_u8, 1, 2, 3, 4, 5, 6, 7];
decode_chunk_precise(&input[..], 0, tables::STANDARD_DECODE, &mut output).unwrap();
assert_eq!(&vec![b'f', b'o', b'o', b'b', b'a', b'r', 6, 7], &output);
}
#[test]
fn decode_chunk_writes_8_bytes() {
let input = b"Zm9vYmFy"; // "foobar"
let mut output = [0_u8, 1, 2, 3, 4, 5, 6, 7];
decode_chunk(&input[..], 0, tables::STANDARD_DECODE, &mut output).unwrap();
assert_eq!(&vec![b'f', b'o', b'o', b'b', b'a', b'r', 0, 0], &output);
}
#[test]
fn decode_into_nonempty_vec_doesnt_clobber_existing_prefix() {
let mut orig_data = Vec::new();
let mut encoded_data = String::new();
let mut decoded_with_prefix = Vec::new();
let mut decoded_without_prefix = Vec::new();
let mut prefix = Vec::new();
let prefix_len_range = Range::new(0, 1000);
let input_len_range = Range::new(0, 1000);
let line_len_range = Range::new(1, 1000);
let mut rng = rand::weak_rng();
for _ in 0..10_000 {
orig_data.clear();
encoded_data.clear();
decoded_with_prefix.clear();
decoded_without_prefix.clear();
prefix.clear();
let input_len = input_len_range.ind_sample(&mut rng);
for _ in 0..input_len {
orig_data.push(rng.gen());
}
let config = random_config(&mut rng, &line_len_range);
encode_config_buf(&orig_data, config, &mut encoded_data);
assert_encode_sanity(&encoded_data, &config, input_len);
let prefix_len = prefix_len_range.ind_sample(&mut rng);
// fill the buf with a prefix
for _ in 0..prefix_len {
prefix.push(rng.gen());
}
decoded_with_prefix.resize(prefix_len, 0);
decoded_with_prefix.copy_from_slice(&prefix);
// decode into the non-empty buf
decode_config_buf(&encoded_data, config, &mut decoded_with_prefix).unwrap();
// also decode into the empty buf
decode_config_buf(&encoded_data, config, &mut decoded_without_prefix).unwrap();
assert_eq!(
prefix_len + decoded_without_prefix.len(),
decoded_with_prefix.len()
);
assert_eq!(orig_data, decoded_without_prefix);
// append plain decode onto prefix
prefix.append(&mut decoded_without_prefix);
assert_eq!(prefix, decoded_with_prefix);
}
}
#[test]
fn decode_into_slice_doesnt_clobber_existing_prefix_or_suffix() {
let mut orig_data = Vec::new();
let mut encoded_data = String::new();
let mut decode_buf = Vec::new();
let mut decode_buf_copy: Vec<u8> = Vec::new();
let input_len_range = Range::new(0, 1000);
let line_len_range = Range::new(1, 1000);
let mut rng = rand::weak_rng();
for _ in 0..10_000 {
orig_data.clear();
encoded_data.clear();
decode_buf.clear();
decode_buf_copy.clear();
let input_len = input_len_range.ind_sample(&mut rng);
for _ in 0..input_len {
orig_data.push(rng.gen());
}
let config = random_config(&mut rng, &line_len_range);
encode_config_buf(&orig_data, config, &mut encoded_data);
assert_encode_sanity(&encoded_data, &config, input_len);
// fill the buffer with random garbage, long enough to have some room before and after
for _ in 0..5000 {
decode_buf.push(rng.gen());
}
// keep a copy for later comparison
decode_buf_copy.extend(decode_buf.iter());
let offset = 1000;
// decode into the non-empty buf
let decode_bytes_written =
decode_config_slice(&encoded_data, config, &mut decode_buf[offset..]).unwrap();
assert_eq!(orig_data.len(), decode_bytes_written);
assert_eq!(
orig_data,
&decode_buf[offset..(offset + decode_bytes_written)]
);
assert_eq!(&decode_buf_copy[0..offset], &decode_buf[0..offset]);
assert_eq!(
&decode_buf_copy[offset + decode_bytes_written..],
&decode_buf[offset + decode_bytes_written..]
);
}
}
#[test]
fn decode_into_slice_fits_in_precisely_sized_slice() {
let mut orig_data = Vec::new();
let mut encoded_data = String::new();
let mut decode_buf = Vec::new();
let input_len_range = Range::new(0, 1000);
let line_len_range = Range::new(1, 1000);
let mut rng = rand::weak_rng();
for _ in 0..10_000 {
orig_data.clear();
encoded_data.clear();
decode_buf.clear();
let input_len = input_len_range.ind_sample(&mut rng);
for _ in 0..input_len {
orig_data.push(rng.gen());
}
let config = random_config(&mut rng, &line_len_range);
encode_config_buf(&orig_data, config, &mut encoded_data);
assert_encode_sanity(&encoded_data, &config, input_len);
decode_buf.resize(input_len, 0);
// decode into the non-empty buf
let decode_bytes_written =
decode_config_slice(&encoded_data, config, &mut decode_buf[..]).unwrap();
assert_eq!(orig_data.len(), decode_bytes_written);
assert_eq!(orig_data, decode_buf);
}
}
}

112
third_party/rust/base64-0.9.3/src/display.rs поставляемый Normal file
Просмотреть файл

@ -0,0 +1,112 @@
//! Enables base64'd output anywhere you might use a `Display` implementation, like a format string.
//!
//! ```
//! use base64::display::Base64Display;
//!
//! let data = vec![0x0, 0x1, 0x2, 0x3];
//! let wrapper = Base64Display::standard(&data);
//!
//! assert_eq!("base64: AAECAw==", format!("base64: {}", wrapper));
//! ```
use super::chunked_encoder::{ChunkedEncoder, ChunkedEncoderError};
use super::Config;
use std::fmt::{Display, Formatter};
use std::{fmt, str};
// I'm not convinced that we should expose ChunkedEncoder or its error type since it's just an
// implementation detail, so use a different error type.
/// Errors that can occur initializing a Base64Display.
#[derive(Debug, PartialEq)]
pub enum DisplayError {
/// If wrapping is configured, the line length must be a multiple of 4, and must not be absurdly
/// large (currently capped at 1024, subject to change).
InvalidLineLength,
}
/// A convenience wrapper for base64'ing bytes into a format string without heap allocation.
pub struct Base64Display<'a> {
bytes: &'a [u8],
chunked_encoder: ChunkedEncoder,
}
impl<'a> Base64Display<'a> {
/// Create a `Base64Display` with the provided config.
pub fn with_config(bytes: &[u8], config: Config) -> Result<Base64Display, DisplayError> {
ChunkedEncoder::new(config)
.map(|c| Base64Display {
bytes,
chunked_encoder: c,
})
.map_err(|e| match e {
ChunkedEncoderError::InvalidLineLength => DisplayError::InvalidLineLength,
})
}
/// Convenience method for creating a `Base64Display` with the `STANDARD` configuration.
pub fn standard(bytes: &[u8]) -> Base64Display {
Base64Display::with_config(bytes, super::STANDARD).expect("STANDARD is valid")
}
/// Convenience method for creating a `Base64Display` with the `URL_SAFE` configuration.
pub fn url_safe(bytes: &[u8]) -> Base64Display {
Base64Display::with_config(bytes, super::URL_SAFE).expect("URL_SAFE is valid")
}
}
impl<'a> Display for Base64Display<'a> {
fn fmt(&self, formatter: &mut Formatter) -> Result<(), fmt::Error> {
let mut sink = FormatterSink { f: formatter };
self.chunked_encoder.encode(self.bytes, &mut sink)
}
}
struct FormatterSink<'a, 'b: 'a> {
f: &'a mut Formatter<'b>,
}
impl<'a, 'b: 'a> super::chunked_encoder::Sink for FormatterSink<'a, 'b> {
type Error = fmt::Error;
fn write_encoded_bytes(&mut self, encoded: &[u8]) -> Result<(), Self::Error> {
// Avoid unsafe. If max performance is needed, write your own display wrapper that uses
// unsafe here to gain about 10-15%.
self.f
.write_str(str::from_utf8(encoded).expect("base64 data was not utf8"))
}
}
#[cfg(test)]
mod tests {
use super::super::chunked_encoder::tests::{chunked_encode_matches_normal_encode_random,
SinkTestHelper};
use super::super::*;
use super::*;
#[test]
fn basic_display() {
assert_eq!(
"~$Zm9vYmFy#*",
format!("~${}#*", Base64Display::standard("foobar".as_bytes()))
);
assert_eq!(
"~$Zm9vYmFyZg==#*",
format!("~${}#*", Base64Display::standard("foobarf".as_bytes()))
);
}
#[test]
fn display_encode_matches_normal_encode() {
let helper = DisplaySinkTestHelper;
chunked_encode_matches_normal_encode_random(&helper);
}
struct DisplaySinkTestHelper;
impl SinkTestHelper for DisplaySinkTestHelper {
fn encode_to_string(&self, config: Config, bytes: &[u8]) -> String {
format!("{}", Base64Display::with_config(bytes, config).unwrap())
}
}
}

761
third_party/rust/base64-0.9.3/src/encode.rs поставляемый Normal file
Просмотреть файл

@ -0,0 +1,761 @@
use byteorder::{BigEndian, ByteOrder};
use {line_wrap, line_wrap_parameters, Config, LineWrap, STANDARD};
///Encode arbitrary octets as base64.
///Returns a String.
///Convenience for `encode_config(input, base64::STANDARD);`.
///
///# Example
///
///```rust
///extern crate base64;
///
///fn main() {
/// let b64 = base64::encode(b"hello world");
/// println!("{}", b64);
///}
///```
pub fn encode<T: ?Sized + AsRef<[u8]>>(input: &T) -> String {
encode_config(input, STANDARD)
}
///Encode arbitrary octets as base64.
///Returns a String.
///
///# Example
///
///```rust
///extern crate base64;
///
///fn main() {
/// let b64 = base64::encode_config(b"hello world~", base64::STANDARD);
/// println!("{}", b64);
///
/// let b64_url = base64::encode_config(b"hello internet~", base64::URL_SAFE);
/// println!("{}", b64_url);
///}
///```
pub fn encode_config<T: ?Sized + AsRef<[u8]>>(input: &T, config: Config) -> String {
let mut buf = match encoded_size(input.as_ref().len(), &config) {
Some(n) => String::with_capacity(n),
None => panic!("integer overflow when calculating buffer size"),
};
encode_config_buf(input, config, &mut buf);
buf
}
///Encode arbitrary octets as base64.
///Writes into the supplied output buffer, which will grow the buffer if needed.
///
///# Example
///
///```rust
///extern crate base64;
///
///fn main() {
/// let mut buf = String::new();
/// base64::encode_config_buf(b"hello world~", base64::STANDARD, &mut buf);
/// println!("{}", buf);
///
/// buf.clear();
/// base64::encode_config_buf(b"hello internet~", base64::URL_SAFE, &mut buf);
/// println!("{}", buf);
///}
///```
pub fn encode_config_buf<T: ?Sized + AsRef<[u8]>>(input: &T, config: Config, buf: &mut String) {
let input_bytes = input.as_ref();
let encoded_size = encoded_size(input_bytes.len(), &config)
.expect("usize overflow when calculating buffer size");
let orig_buf_len = buf.len();
// we're only going to insert valid utf8
let buf_bytes;
unsafe {
buf_bytes = buf.as_mut_vec();
}
buf_bytes.resize(
orig_buf_len
.checked_add(encoded_size)
.expect("usize overflow when calculating expanded buffer size"),
0,
);
let mut b64_output = &mut buf_bytes[orig_buf_len..];
encode_with_padding_line_wrap(&input_bytes, &config, encoded_size, &mut b64_output);
}
/// Encode arbitrary octets as base64.
/// Writes into the supplied output buffer.
///
/// This is useful if you wish to avoid allocation entirely (e.g. encoding into a stack-resident
/// or statically-allocated buffer).
///
/// # Panics
///
/// If `output` is too small to hold the encoded version of `input`, a panic will result.
///
/// # Example
///
/// ```rust
/// extern crate base64;
///
/// fn main() {
/// let s = b"hello internet!";
/// let mut buf = Vec::new();
/// // make sure we'll have a slice big enough for base64 + padding
/// buf.resize(s.len() * 4 / 3 + 4, 0);
///
/// let bytes_written = base64::encode_config_slice(s,
/// base64::STANDARD, &mut buf);
///
/// // shorten our vec down to just what was written
/// buf.resize(bytes_written, 0);
///
/// assert_eq!(s, base64::decode(&buf).unwrap().as_slice());
/// }
/// ```
pub fn encode_config_slice<T: ?Sized + AsRef<[u8]>>(
input: &T,
config: Config,
output: &mut [u8],
) -> usize {
let input_bytes = input.as_ref();
let encoded_size = encoded_size(input_bytes.len(), &config)
.expect("usize overflow when calculating buffer size");
let mut b64_output = &mut output[0..encoded_size];
encode_with_padding_line_wrap(&input_bytes, &config, encoded_size, &mut b64_output);
encoded_size
}
/// B64-encode, pad, and line wrap (if configured).
///
/// This helper exists to avoid recalculating encoded_size, which is relatively expensive on short
/// inputs.
///
/// `encoded_size` is the encoded size calculated for `input`.
///
/// `output` must be of size `encoded_size`.
///
/// All bytes in `output` will be written to since it is exactly the size of the output.
pub fn encode_with_padding_line_wrap(
input: &[u8],
config: &Config,
encoded_size: usize,
output: &mut [u8],
) {
debug_assert_eq!(encoded_size, output.len());
let b64_bytes_written = encode_to_slice(input, output, config.char_set.encode_table());
let padding_bytes = if config.pad {
add_padding(input.len(), &mut output[b64_bytes_written..])
} else {
0
};
let encoded_bytes = b64_bytes_written
.checked_add(padding_bytes)
.expect("usize overflow when calculating b64 length");
let line_ending_bytes = if let LineWrap::Wrap(line_len, line_end) = config.line_wrap {
line_wrap(output, encoded_bytes, line_len, line_end)
} else {
0
};
debug_assert_eq!(encoded_size, encoded_bytes + line_ending_bytes);
}
/// Encode input bytes to utf8 base64 bytes. Does not pad or line wrap.
/// `output` must be long enough to hold the encoded `input` without padding or line wrapping.
/// Returns the number of bytes written.
#[inline]
pub fn encode_to_slice(input: &[u8], output: &mut [u8], encode_table: &[u8; 64]) -> usize {
let mut input_index: usize = 0;
const BLOCKS_PER_FAST_LOOP: usize = 4;
const LOW_SIX_BITS: u64 = 0x3F;
// we read 8 bytes at a time (u64) but only actually consume 6 of those bytes. Thus, we need
// 2 trailing bytes to be available to read..
let last_fast_index = input.len().saturating_sub(BLOCKS_PER_FAST_LOOP * 6 + 2);
let mut output_index = 0;
if last_fast_index > 0 {
while input_index <= last_fast_index {
// Major performance wins from letting the optimizer do the bounds check once, mostly
// on the output side
let input_chunk = &input[input_index..(input_index + (BLOCKS_PER_FAST_LOOP * 6 + 2))];
let output_chunk = &mut output[output_index..(output_index + BLOCKS_PER_FAST_LOOP * 8)];
// Hand-unrolling for 32 vs 16 or 8 bytes produces yields performance about equivalent
// to unsafe pointer code on a Xeon E5-1650v3. 64 byte unrolling was slightly better for
// large inputs but significantly worse for 50-byte input, unsurprisingly. I suspect
// that it's a not uncommon use case to encode smallish chunks of data (e.g. a 64-byte
// SHA-512 digest), so it would be nice if that fit in the unrolled loop at least once.
// Plus, single-digit percentage performance differences might well be quite different
// on different hardware.
let input_u64 = BigEndian::read_u64(&input_chunk[0..]);
output_chunk[0] = encode_table[((input_u64 >> 58) & LOW_SIX_BITS) as usize];
output_chunk[1] = encode_table[((input_u64 >> 52) & LOW_SIX_BITS) as usize];
output_chunk[2] = encode_table[((input_u64 >> 46) & LOW_SIX_BITS) as usize];
output_chunk[3] = encode_table[((input_u64 >> 40) & LOW_SIX_BITS) as usize];
output_chunk[4] = encode_table[((input_u64 >> 34) & LOW_SIX_BITS) as usize];
output_chunk[5] = encode_table[((input_u64 >> 28) & LOW_SIX_BITS) as usize];
output_chunk[6] = encode_table[((input_u64 >> 22) & LOW_SIX_BITS) as usize];
output_chunk[7] = encode_table[((input_u64 >> 16) & LOW_SIX_BITS) as usize];
let input_u64 = BigEndian::read_u64(&input_chunk[6..]);
output_chunk[8] = encode_table[((input_u64 >> 58) & LOW_SIX_BITS) as usize];
output_chunk[9] = encode_table[((input_u64 >> 52) & LOW_SIX_BITS) as usize];
output_chunk[10] = encode_table[((input_u64 >> 46) & LOW_SIX_BITS) as usize];
output_chunk[11] = encode_table[((input_u64 >> 40) & LOW_SIX_BITS) as usize];
output_chunk[12] = encode_table[((input_u64 >> 34) & LOW_SIX_BITS) as usize];
output_chunk[13] = encode_table[((input_u64 >> 28) & LOW_SIX_BITS) as usize];
output_chunk[14] = encode_table[((input_u64 >> 22) & LOW_SIX_BITS) as usize];
output_chunk[15] = encode_table[((input_u64 >> 16) & LOW_SIX_BITS) as usize];
let input_u64 = BigEndian::read_u64(&input_chunk[12..]);
output_chunk[16] = encode_table[((input_u64 >> 58) & LOW_SIX_BITS) as usize];
output_chunk[17] = encode_table[((input_u64 >> 52) & LOW_SIX_BITS) as usize];
output_chunk[18] = encode_table[((input_u64 >> 46) & LOW_SIX_BITS) as usize];
output_chunk[19] = encode_table[((input_u64 >> 40) & LOW_SIX_BITS) as usize];
output_chunk[20] = encode_table[((input_u64 >> 34) & LOW_SIX_BITS) as usize];
output_chunk[21] = encode_table[((input_u64 >> 28) & LOW_SIX_BITS) as usize];
output_chunk[22] = encode_table[((input_u64 >> 22) & LOW_SIX_BITS) as usize];
output_chunk[23] = encode_table[((input_u64 >> 16) & LOW_SIX_BITS) as usize];
let input_u64 = BigEndian::read_u64(&input_chunk[18..]);
output_chunk[24] = encode_table[((input_u64 >> 58) & LOW_SIX_BITS) as usize];
output_chunk[25] = encode_table[((input_u64 >> 52) & LOW_SIX_BITS) as usize];
output_chunk[26] = encode_table[((input_u64 >> 46) & LOW_SIX_BITS) as usize];
output_chunk[27] = encode_table[((input_u64 >> 40) & LOW_SIX_BITS) as usize];
output_chunk[28] = encode_table[((input_u64 >> 34) & LOW_SIX_BITS) as usize];
output_chunk[29] = encode_table[((input_u64 >> 28) & LOW_SIX_BITS) as usize];
output_chunk[30] = encode_table[((input_u64 >> 22) & LOW_SIX_BITS) as usize];
output_chunk[31] = encode_table[((input_u64 >> 16) & LOW_SIX_BITS) as usize];
output_index += BLOCKS_PER_FAST_LOOP * 8;
input_index += BLOCKS_PER_FAST_LOOP * 6;
}
}
// Encode what's left after the fast loop.
const LOW_SIX_BITS_U8: u8 = 0x3F;
let rem = input.len() % 3;
let start_of_rem = input.len() - rem;
// start at the first index not handled by fast loop, which may be 0.
while input_index < start_of_rem {
let input_chunk = &input[input_index..(input_index + 3)];
let output_chunk = &mut output[output_index..(output_index + 4)];
output_chunk[0] = encode_table[(input_chunk[0] >> 2) as usize];
output_chunk[1] =
encode_table[((input_chunk[0] << 4 | input_chunk[1] >> 4) & LOW_SIX_BITS_U8) as usize];
output_chunk[2] =
encode_table[((input_chunk[1] << 2 | input_chunk[2] >> 6) & LOW_SIX_BITS_U8) as usize];
output_chunk[3] = encode_table[(input_chunk[2] & LOW_SIX_BITS_U8) as usize];
input_index += 3;
output_index += 4;
}
if rem == 2 {
output[output_index] = encode_table[(input[start_of_rem] >> 2) as usize];
output[output_index + 1] =
encode_table[((input[start_of_rem] << 4 | input[start_of_rem + 1] >> 4)
& LOW_SIX_BITS_U8) as usize];
output[output_index + 2] =
encode_table[((input[start_of_rem + 1] << 2) & LOW_SIX_BITS_U8) as usize];
output_index += 3;
} else if rem == 1 {
output[output_index] = encode_table[(input[start_of_rem] >> 2) as usize];
output[output_index + 1] =
encode_table[((input[start_of_rem] << 4) & LOW_SIX_BITS_U8) as usize];
output_index += 2;
}
output_index
}
/// calculate the base64 encoded string size, including padding and line wraps if appropriate
pub fn encoded_size(bytes_len: usize, config: &Config) -> Option<usize> {
let rem = bytes_len % 3;
let complete_input_chunks = bytes_len / 3;
let complete_chunk_output = complete_input_chunks.checked_mul(4);
let encoded_len_no_wrap = if rem > 0 {
if config.pad {
complete_chunk_output.and_then(|c| c.checked_add(4))
} else {
let encoded_rem = match rem {
1 => 2,
2 => 3,
_ => unreachable!("Impossible remainder"),
};
complete_chunk_output.and_then(|c| c.checked_add(encoded_rem))
}
} else {
complete_chunk_output
};
encoded_len_no_wrap.map(|e| match config.line_wrap {
LineWrap::NoWrap => e,
LineWrap::Wrap(line_len, line_ending) => {
line_wrap_parameters(e, line_len, line_ending).total_len
}
})
}
/// Write padding characters.
/// `output` is the slice where padding should be written, of length at least 2.
///
/// Returns the number of padding bytes written.
pub fn add_padding(input_len: usize, output: &mut [u8]) -> usize {
let rem = input_len % 3;
let mut bytes_written = 0;
for _ in 0..((3 - rem) % 3) {
output[bytes_written] = b'=';
bytes_written += 1;
}
bytes_written
}
#[cfg(test)]
mod tests {
extern crate rand;
use super::*;
use decode::decode_config_buf;
use tests::{assert_encode_sanity, random_config};
use {CharacterSet, Config, LineEnding, LineWrap, MIME, STANDARD, URL_SAFE_NO_PAD};
use self::rand::distributions::{IndependentSample, Range};
use self::rand::Rng;
use std;
use std::str;
#[test]
fn encoded_size_correct_standard() {
assert_encoded_length(0, 0, STANDARD);
assert_encoded_length(1, 4, STANDARD);
assert_encoded_length(2, 4, STANDARD);
assert_encoded_length(3, 4, STANDARD);
assert_encoded_length(4, 8, STANDARD);
assert_encoded_length(5, 8, STANDARD);
assert_encoded_length(6, 8, STANDARD);
assert_encoded_length(7, 12, STANDARD);
assert_encoded_length(8, 12, STANDARD);
assert_encoded_length(9, 12, STANDARD);
assert_encoded_length(54, 72, STANDARD);
assert_encoded_length(55, 76, STANDARD);
assert_encoded_length(56, 76, STANDARD);
assert_encoded_length(57, 76, STANDARD);
assert_encoded_length(58, 80, STANDARD);
}
#[test]
fn encoded_size_correct_no_pad_no_wrap() {
assert_encoded_length(0, 0, URL_SAFE_NO_PAD);
assert_encoded_length(1, 2, URL_SAFE_NO_PAD);
assert_encoded_length(2, 3, URL_SAFE_NO_PAD);
assert_encoded_length(3, 4, URL_SAFE_NO_PAD);
assert_encoded_length(4, 6, URL_SAFE_NO_PAD);
assert_encoded_length(5, 7, URL_SAFE_NO_PAD);
assert_encoded_length(6, 8, URL_SAFE_NO_PAD);
assert_encoded_length(7, 10, URL_SAFE_NO_PAD);
assert_encoded_length(8, 11, URL_SAFE_NO_PAD);
assert_encoded_length(9, 12, URL_SAFE_NO_PAD);
assert_encoded_length(54, 72, URL_SAFE_NO_PAD);
assert_encoded_length(55, 74, URL_SAFE_NO_PAD);
assert_encoded_length(56, 75, URL_SAFE_NO_PAD);
assert_encoded_length(57, 76, URL_SAFE_NO_PAD);
assert_encoded_length(58, 78, URL_SAFE_NO_PAD);
}
#[test]
fn encoded_size_correct_mime() {
assert_encoded_length(0, 0, MIME);
assert_encoded_length(1, 4, MIME);
assert_encoded_length(2, 4, MIME);
assert_encoded_length(3, 4, MIME);
assert_encoded_length(4, 8, MIME);
assert_encoded_length(5, 8, MIME);
assert_encoded_length(6, 8, MIME);
assert_encoded_length(7, 12, MIME);
assert_encoded_length(8, 12, MIME);
assert_encoded_length(9, 12, MIME);
assert_encoded_length(54, 72, MIME);
assert_encoded_length(55, 76, MIME);
assert_encoded_length(56, 76, MIME);
assert_encoded_length(57, 76, MIME);
assert_encoded_length(58, 82, MIME);
assert_encoded_length(59, 82, MIME);
assert_encoded_length(60, 82, MIME);
}
#[test]
fn encoded_size_correct_lf_pad() {
let config = Config::new(
CharacterSet::Standard,
true,
false,
LineWrap::Wrap(76, LineEnding::LF),
);
assert_encoded_length(0, 0, config);
assert_encoded_length(1, 4, config);
assert_encoded_length(2, 4, config);
assert_encoded_length(3, 4, config);
assert_encoded_length(4, 8, config);
assert_encoded_length(5, 8, config);
assert_encoded_length(6, 8, config);
assert_encoded_length(7, 12, config);
assert_encoded_length(8, 12, config);
assert_encoded_length(9, 12, config);
assert_encoded_length(54, 72, config);
assert_encoded_length(55, 76, config);
assert_encoded_length(56, 76, config);
assert_encoded_length(57, 76, config);
// one fewer than MIME
assert_encoded_length(58, 81, config);
assert_encoded_length(59, 81, config);
assert_encoded_length(60, 81, config);
}
#[test]
fn encoded_size_overflow() {
assert_eq!(None, encoded_size(std::usize::MAX, &STANDARD));
}
#[test]
fn encode_config_buf_into_nonempty_buffer_doesnt_clobber_prefix() {
let mut orig_data = Vec::new();
let mut prefix = String::new();
let mut encoded_data_no_prefix = String::new();
let mut encoded_data_with_prefix = String::new();
let mut decoded = Vec::new();
let prefix_len_range = Range::new(0, 1000);
let input_len_range = Range::new(0, 1000);
let line_len_range = Range::new(1, 1000);
let mut rng = rand::weak_rng();
for _ in 0..10_000 {
orig_data.clear();
prefix.clear();
encoded_data_no_prefix.clear();
encoded_data_with_prefix.clear();
decoded.clear();
let input_len = input_len_range.ind_sample(&mut rng);
for _ in 0..input_len {
orig_data.push(rng.gen());
}
let prefix_len = prefix_len_range.ind_sample(&mut rng);
for _ in 0..prefix_len {
// getting convenient random single-byte printable chars that aren't base64 is
// annoying
prefix.push('#');
}
encoded_data_with_prefix.push_str(&prefix);
let config = random_config(&mut rng, &line_len_range);
encode_config_buf(&orig_data, config, &mut encoded_data_no_prefix);
encode_config_buf(&orig_data, config, &mut encoded_data_with_prefix);
assert_eq!(
encoded_data_no_prefix.len() + prefix_len,
encoded_data_with_prefix.len()
);
assert_encode_sanity(&encoded_data_no_prefix, &config, input_len);
assert_encode_sanity(&encoded_data_with_prefix[prefix_len..], &config, input_len);
// append plain encode onto prefix
prefix.push_str(&mut encoded_data_no_prefix);
assert_eq!(prefix, encoded_data_with_prefix);
decode_config_buf(&encoded_data_no_prefix, config, &mut decoded).unwrap();
assert_eq!(orig_data, decoded);
}
}
#[test]
fn encode_config_slice_into_nonempty_buffer_doesnt_clobber_suffix() {
let mut orig_data = Vec::new();
let mut encoded_data = Vec::new();
let mut encoded_data_original_state = Vec::new();
let mut decoded = Vec::new();
let input_len_range = Range::new(0, 1000);
let line_len_range = Range::new(1, 1000);
let mut rng = rand::weak_rng();
for _ in 0..10_000 {
orig_data.clear();
encoded_data.clear();
encoded_data_original_state.clear();
decoded.clear();
let input_len = input_len_range.ind_sample(&mut rng);
for _ in 0..input_len {
orig_data.push(rng.gen());
}
// plenty of existing garbage in the encoded buffer
for _ in 0..10 * input_len {
encoded_data.push(rng.gen());
}
encoded_data_original_state.extend_from_slice(&encoded_data);
let config = random_config(&mut rng, &line_len_range);
let encoded_size = encoded_size(input_len, &config).unwrap();
assert_eq!(
encoded_size,
encode_config_slice(&orig_data, config, &mut encoded_data)
);
assert_encode_sanity(
std::str::from_utf8(&encoded_data[0..encoded_size]).unwrap(),
&config,
input_len,
);
assert_eq!(
&encoded_data[encoded_size..],
&encoded_data_original_state[encoded_size..]
);
decode_config_buf(&encoded_data[0..encoded_size], config, &mut decoded).unwrap();
assert_eq!(orig_data, decoded);
}
}
#[test]
fn encode_config_slice_fits_into_precisely_sized_slice() {
let mut orig_data = Vec::new();
let mut encoded_data = Vec::new();
let mut decoded = Vec::new();
let input_len_range = Range::new(0, 1000);
let line_len_range = Range::new(1, 1000);
let mut rng = rand::weak_rng();
for _ in 0..10_000 {
orig_data.clear();
encoded_data.clear();
decoded.clear();
let input_len = input_len_range.ind_sample(&mut rng);
for _ in 0..input_len {
orig_data.push(rng.gen());
}
let config = random_config(&mut rng, &line_len_range);
let encoded_size = encoded_size(input_len, &config).unwrap();
encoded_data.resize(encoded_size, 0);
assert_eq!(
encoded_size,
encode_config_slice(&orig_data, config, &mut encoded_data)
);
assert_encode_sanity(
std::str::from_utf8(&encoded_data[0..encoded_size]).unwrap(),
&config,
input_len,
);
decode_config_buf(&encoded_data[0..encoded_size], config, &mut decoded).unwrap();
assert_eq!(orig_data, decoded);
}
}
#[test]
fn encode_to_slice_random_valid_utf8() {
let mut input = Vec::new();
let mut output = Vec::new();
let input_len_range = Range::new(0, 1000);
let line_len_range = Range::new(1, 1000);
let mut rng = rand::weak_rng();
for _ in 0..10_000 {
input.clear();
output.clear();
let input_len = input_len_range.ind_sample(&mut rng);
for _ in 0..input_len {
input.push(rng.gen());
}
let config = random_config(&mut rng, &line_len_range);
// fill up the output buffer with garbage
let encoded_size = encoded_size(input_len, &config).unwrap();
for _ in 0..encoded_size {
output.push(rng.gen());
}
let orig_output_buf = output.to_vec();
let bytes_written =
encode_to_slice(&input, &mut output, config.char_set.encode_table());
// make sure the part beyond bytes_written is the same garbage it was before
assert_eq!(orig_output_buf[bytes_written..], output[bytes_written..]);
// make sure the encoded bytes are UTF-8
let _ = str::from_utf8(&output[0..bytes_written]).unwrap();
}
}
#[test]
fn encode_with_padding_line_wrap_random_valid_utf8() {
let mut input = Vec::new();
let mut output = Vec::new();
let input_len_range = Range::new(0, 1000);
let line_len_range = Range::new(1, 1000);
let mut rng = rand::weak_rng();
for _ in 0..10_000 {
input.clear();
output.clear();
let input_len = input_len_range.ind_sample(&mut rng);
for _ in 0..input_len {
input.push(rng.gen());
}
let config = random_config(&mut rng, &line_len_range);
// fill up the output buffer with garbage
let encoded_size = encoded_size(input_len, &config).unwrap();
for _ in 0..encoded_size + 1000 {
output.push(rng.gen());
}
let orig_output_buf = output.to_vec();
encode_with_padding_line_wrap(
&input,
&config,
encoded_size,
&mut output[0..encoded_size],
);
// make sure the part beyond b64 is the same garbage it was before
assert_eq!(orig_output_buf[encoded_size..], output[encoded_size..]);
// make sure the encoded bytes are UTF-8
let _ = str::from_utf8(&output[0..encoded_size]).unwrap();
}
}
#[test]
fn add_padding_random_valid_utf8() {
let mut output = Vec::new();
let mut rng = rand::weak_rng();
// cover our bases for length % 3
for input_len in 0..10 {
output.clear();
// fill output with random
for _ in 0..10 {
output.push(rng.gen());
}
let orig_output_buf = output.to_vec();
let bytes_written = add_padding(input_len, &mut output);
// make sure the part beyond bytes_written is the same garbage it was before
assert_eq!(orig_output_buf[bytes_written..], output[bytes_written..]);
// make sure the encoded bytes are UTF-8
let _ = str::from_utf8(&output[0..bytes_written]).unwrap();
}
}
fn assert_encoded_length(input_len: usize, encoded_len: usize, config: Config) {
assert_eq!(encoded_len, encoded_size(input_len, &config).unwrap());
let mut bytes: Vec<u8> = Vec::new();
let mut rng = rand::weak_rng();
for _ in 0..input_len {
bytes.push(rng.gen());
}
let encoded = encode_config(&bytes, config);
assert_encode_sanity(&encoded, &config, input_len);
assert_eq!(encoded_len, encoded.len());
}
}

220
third_party/rust/base64-0.9.3/src/lib.rs поставляемый Normal file
Просмотреть файл

@ -0,0 +1,220 @@
//! # Configs
//!
//! There isn't just one type of Base64; that would be too simple. You need to choose a character
//! set (standard or URL-safe), padding suffix (yes/no), and line wrap (line length, line ending).
//! The `Config` struct encapsulates this info. There are some common configs included: `STANDARD`,
//! `MIME`, etc. You can also make your own `Config` if needed.
//!
//! The functions that don't have `config` in the name (e.g. `encode()` and `decode()`) use the
//! `STANDARD` config .
//!
//! The functions that write to a slice (the ones that end in `_slice`) are generally the fastest
//! because they don't need to resize anything. If it fits in your workflow and you care about
//! performance, keep using the same buffer (growing as need be) and use the `_slice` methods for
//! the best performance.
//!
//! # Encoding
//!
//! Several different encoding functions are available to you depending on your desire for
//! convenience vs performance.
//!
//! | Function | Output | Allocates |
//! | ----------------------- | ---------------------------- | ------------------------------ |
//! | `encode` | Returns a new `String` | Always |
//! | `encode_config` | Returns a new `String` | Always |
//! | `encode_config_buf` | Appends to provided `String` | Only if `String` needs to grow |
//! | `encode_config_slice` | Writes to provided `&[u8]` | Never |
//!
//! All of the encoding functions that take a `Config` will pad, line wrap, etc, as per the config.
//!
//! # Decoding
//!
//! Just as for encoding, there are different decoding functions available.
//!
//! Note that all decode functions that take a config will allocate a copy of the input if you
//! specify a config that requires whitespace to be stripped. If you care about speed, don't use
//! formats that line wrap and then require whitespace stripping.
//!
//! | Function | Output | Allocates |
//! | ----------------------- | ----------------------------- | ------------------------------ |
//! | `decode` | Returns a new `Vec<u8>` | Always |
//! | `decode_config` | Returns a new `Vec<u8>` | Always |
//! | `decode_config_buf` | Appends to provided `Vec<u8>` | Only if `Vec` needs to grow |
//! | `decode_config_slice` | Writes to provided `&[u8]` | Never |
//!
//! Unlike encoding, where all possible input is valid, decoding can fail (see `DecodeError`).
//!
//! Input can be invalid because it has invalid characters or invalid padding. (No padding at all is
//! valid, but excess padding is not.)
//!
//! Whitespace in the input is invalid unless `strip_whitespace` is enabled in the `Config` used.
//!
//! # Panics
//!
//! If length calculations result in overflowing `usize`, a panic will result.
//!
//! The `_slice` flavors of encode or decode will panic if the provided output slice is too small,
#![deny(
missing_docs, trivial_casts, trivial_numeric_casts, unused_extern_crates, unused_import_braces,
unused_results, variant_size_differences, warnings
)]
extern crate byteorder;
mod chunked_encoder;
pub mod display;
mod line_wrap;
mod tables;
use line_wrap::{line_wrap, line_wrap_parameters};
mod encode;
pub use encode::{encode, encode_config, encode_config_buf, encode_config_slice};
mod decode;
pub use decode::{decode, decode_config, decode_config_buf, decode_config_slice, DecodeError};
#[cfg(test)]
mod tests;
/// Available encoding character sets
#[derive(Clone, Copy, Debug)]
pub enum CharacterSet {
/// The standard character set (uses `+` and `/`)
Standard,
/// The URL safe character set (uses `-` and `_`)
UrlSafe,
/// The `crypt(3)` character set (uses `./0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz`)
Crypt,
}
impl CharacterSet {
fn encode_table(&self) -> &'static [u8; 64] {
match *self {
CharacterSet::Standard => tables::STANDARD_ENCODE,
CharacterSet::UrlSafe => tables::URL_SAFE_ENCODE,
CharacterSet::Crypt => tables::CRYPT_ENCODE,
}
}
fn decode_table(&self) -> &'static [u8; 256] {
match *self {
CharacterSet::Standard => tables::STANDARD_DECODE,
CharacterSet::UrlSafe => tables::URL_SAFE_DECODE,
CharacterSet::Crypt => tables::CRYPT_DECODE,
}
}
}
/// Line ending used in optional line wrapping.
#[derive(Clone, Copy, Debug)]
pub enum LineEnding {
/// Unix-style \n
LF,
/// Windows-style \r\n
CRLF,
}
impl LineEnding {
fn len(&self) -> usize {
match *self {
LineEnding::LF => 1,
LineEnding::CRLF => 2,
}
}
}
/// Line wrap configuration.
#[derive(Clone, Copy, Debug)]
pub enum LineWrap {
/// Don't wrap.
NoWrap,
/// Wrap lines with the specified length and line ending. The length must be > 0.
Wrap(usize, LineEnding),
}
/// Contains configuration parameters for base64 encoding
#[derive(Clone, Copy, Debug)]
pub struct Config {
/// Character set to use
char_set: CharacterSet,
/// True to pad output with `=` characters
pad: bool,
/// Remove whitespace before decoding, at the cost of an allocation. Whitespace is defined
/// according to POSIX-locale `isspace`, meaning \n \r \f \t \v and space.
strip_whitespace: bool,
/// ADT signifying whether to linewrap output, and if so by how many characters and with what
/// ending
line_wrap: LineWrap,
}
impl Config {
/// Create a new `Config`.
pub fn new(
char_set: CharacterSet,
pad: bool,
strip_whitespace: bool,
input_line_wrap: LineWrap,
) -> Config {
let line_wrap = match input_line_wrap {
LineWrap::Wrap(0, _) => LineWrap::NoWrap,
_ => input_line_wrap,
};
Config {
char_set,
pad,
strip_whitespace,
line_wrap,
}
}
}
/// Standard character set with padding.
pub const STANDARD: Config = Config {
char_set: CharacterSet::Standard,
pad: true,
strip_whitespace: false,
line_wrap: LineWrap::NoWrap,
};
/// Standard character set without padding.
pub const STANDARD_NO_PAD: Config = Config {
char_set: CharacterSet::Standard,
pad: false,
strip_whitespace: false,
line_wrap: LineWrap::NoWrap,
};
/// As per standards for MIME encoded messages
pub const MIME: Config = Config {
char_set: CharacterSet::Standard,
pad: true,
strip_whitespace: true,
line_wrap: LineWrap::Wrap(76, LineEnding::CRLF),
};
/// URL-safe character set with padding
pub const URL_SAFE: Config = Config {
char_set: CharacterSet::UrlSafe,
pad: true,
strip_whitespace: false,
line_wrap: LineWrap::NoWrap,
};
/// URL-safe character set without padding
pub const URL_SAFE_NO_PAD: Config = Config {
char_set: CharacterSet::UrlSafe,
pad: false,
strip_whitespace: false,
line_wrap: LineWrap::NoWrap,
};
/// As per `crypt(3)` requirements
pub const CRYPT: Config = Config {
char_set: CharacterSet::Crypt,
pad: false,
strip_whitespace: false,
line_wrap: LineWrap::NoWrap,
};

393
third_party/rust/base64-0.9.3/src/line_wrap.rs поставляемый Normal file
Просмотреть файл

@ -0,0 +1,393 @@
extern crate safemem;
use super::*;
#[derive(Debug, PartialEq)]
pub struct LineWrapParameters {
// number of lines that need an ending
pub lines_with_endings: usize,
// length of last line (which never needs an ending)
pub last_line_len: usize,
// length of lines that need an ending (which are always full lines), with their endings
pub total_full_wrapped_lines_len: usize,
// length of all lines, including endings for the ones that need them
pub total_len: usize,
// length of the line endings only
pub total_line_endings_len: usize,
}
/// Calculations about how many lines we'll get for a given line length, line ending, etc.
/// This assumes that the last line will not get an ending, even if it is the full line length.
pub fn line_wrap_parameters(
input_len: usize,
line_len: usize,
line_ending: LineEnding,
) -> LineWrapParameters {
let line_ending_len = line_ending.len();
if input_len <= line_len {
// no wrapping needed
return LineWrapParameters {
lines_with_endings: 0,
last_line_len: input_len,
total_full_wrapped_lines_len: 0,
total_len: input_len,
total_line_endings_len: 0,
};
};
// num_lines_with_endings > 0, last_line_length > 0
let (num_lines_with_endings, last_line_length) = if input_len % line_len > 0 {
// Every full line has an ending since there is a partial line at the end
(input_len / line_len, input_len % line_len)
} else {
// Every line is a full line, but no trailing ending.
// Subtraction will not underflow since we know input_len > line_len.
(input_len / line_len - 1, line_len)
};
// TODO should we expose exceeding usize via Result to be kind to 16-bit users? Or is that
// always going to be a panic anyway in practice? If we choose to use a Result we could pull
// line wrapping out of the normal encode path and have it be a separate step. Then only users
// who need line wrapping would care about the possibility for error.
let single_full_line_with_ending_len = line_len
.checked_add(line_ending_len)
.expect("Line length with ending exceeds usize");
// length of just the full lines with line endings
let total_full_wrapped_lines_len = num_lines_with_endings
.checked_mul(single_full_line_with_ending_len)
.expect("Full lines with endings length exceeds usize");
// all lines with appropriate endings, including the last line
let total_all_wrapped_len = total_full_wrapped_lines_len
.checked_add(last_line_length)
.expect("All lines with endings length exceeds usize");
let total_line_endings_len = num_lines_with_endings
.checked_mul(line_ending_len)
.expect("Total line endings length exceeds usize");
LineWrapParameters {
lines_with_endings: num_lines_with_endings,
last_line_len: last_line_length,
total_full_wrapped_lines_len: total_full_wrapped_lines_len,
total_len: total_all_wrapped_len,
total_line_endings_len: total_line_endings_len,
}
}
/// Insert line endings into the encoded base64 after each complete line (except the last line, even
/// if it is complete).
/// The provided buffer must be large enough to handle the increased size after endings are
/// inserted.
/// `input_len` is the length of the encoded data in `encoded_buf`.
/// `line_len` is the width without line ending characters.
/// Returns the number of line ending bytes added.
pub fn line_wrap(
encoded_buf: &mut [u8],
input_len: usize,
line_len: usize,
line_ending: LineEnding,
) -> usize {
let line_wrap_params = line_wrap_parameters(input_len, line_len, line_ending);
// ptr.offset() is undefined if it wraps, and there is no checked_offset(). However, because
// we perform this check up front to make sure we have enough capacity, we know that none of
// the subsequent pointer operations (assuming they implement the desired behavior of course!)
// will overflow.
assert!(
encoded_buf.len() >= line_wrap_params.total_len,
"Buffer must be able to hold encoded data after line wrapping"
);
// Move the last line, either partial or full, by itself as it does not have a line ending
// afterwards
let last_line_start = line_wrap_params
.lines_with_endings
.checked_mul(line_len)
.expect("Start of last line in input exceeds usize");
// last line starts immediately after all the wrapped full lines
let new_line_start = line_wrap_params.total_full_wrapped_lines_len;
safemem::copy_over(
encoded_buf,
last_line_start,
new_line_start,
line_wrap_params.last_line_len,
);
let mut line_ending_bytes = 0;
let line_ending_len = line_ending.len();
// handle the full lines
for line_num in 0..line_wrap_params.lines_with_endings {
// doesn't underflow because line_num < lines_with_endings
let lines_before_this_line = line_wrap_params.lines_with_endings - 1 - line_num;
let old_line_start = lines_before_this_line
.checked_mul(line_len)
.expect("Old line start index exceeds usize");
let new_line_start = lines_before_this_line
.checked_mul(line_ending_len)
.and_then(|i| i.checked_add(old_line_start))
.expect("New line start index exceeds usize");
safemem::copy_over(encoded_buf, old_line_start, new_line_start, line_len);
let after_new_line = new_line_start
.checked_add(line_len)
.expect("Line ending index exceeds usize");
match line_ending {
LineEnding::LF => {
encoded_buf[after_new_line] = b'\n';
line_ending_bytes += 1;
}
LineEnding::CRLF => {
encoded_buf[after_new_line] = b'\r';
encoded_buf[after_new_line
.checked_add(1)
.expect("Line ending index exceeds usize")] = b'\n';
line_ending_bytes += 2;
}
}
}
assert_eq!(line_wrap_params.total_line_endings_len, line_ending_bytes);
line_ending_bytes
}
#[cfg(test)]
mod tests {
extern crate rand;
use super::*;
use self::rand::distributions::{IndependentSample, Range};
use self::rand::Rng;
#[test]
fn line_params_perfect_multiple_of_line_length_lf() {
let params = line_wrap_parameters(100, 20, LineEnding::LF);
assert_eq!(
LineWrapParameters {
lines_with_endings: 4,
last_line_len: 20,
total_full_wrapped_lines_len: 84,
total_len: 104,
total_line_endings_len: 4,
},
params
);
}
#[test]
fn line_params_partial_last_line_crlf() {
let params = line_wrap_parameters(103, 20, LineEnding::CRLF);
assert_eq!(
LineWrapParameters {
lines_with_endings: 5,
last_line_len: 3,
total_full_wrapped_lines_len: 110,
total_len: 113,
total_line_endings_len: 10,
},
params
);
}
#[test]
fn line_params_line_len_1_crlf() {
let params = line_wrap_parameters(100, 1, LineEnding::CRLF);
assert_eq!(
LineWrapParameters {
lines_with_endings: 99,
last_line_len: 1,
total_full_wrapped_lines_len: 99 * 3,
total_len: 99 * 3 + 1,
total_line_endings_len: 99 * 2,
},
params
);
}
#[test]
fn line_params_line_len_longer_than_input_crlf() {
let params = line_wrap_parameters(100, 200, LineEnding::CRLF);
assert_eq!(
LineWrapParameters {
lines_with_endings: 0,
last_line_len: 100,
total_full_wrapped_lines_len: 0,
total_len: 100,
total_line_endings_len: 0,
},
params
);
}
#[test]
fn line_params_line_len_same_as_input_crlf() {
let params = line_wrap_parameters(100, 100, LineEnding::CRLF);
assert_eq!(
LineWrapParameters {
lines_with_endings: 0,
last_line_len: 100,
total_full_wrapped_lines_len: 0,
total_len: 100,
total_line_endings_len: 0,
},
params
);
}
#[test]
fn line_wrap_length_1_lf() {
let mut buf = vec![0x1, 0x2, 0x3, 0x4];
assert_eq!(3, do_line_wrap(&mut buf, 1, LineEnding::LF));
assert_eq!(vec![0x1, 0xA, 0x2, 0xA, 0x3, 0xA, 0x4], buf);
}
#[test]
fn line_wrap_length_1_crlf() {
let mut buf = vec![0x1, 0x2, 0x3, 0x4];
assert_eq!(6, do_line_wrap(&mut buf, 1, LineEnding::CRLF));
assert_eq!(vec![0x1, 0xD, 0xA, 0x2, 0xD, 0xA, 0x3, 0xD, 0xA, 0x4], buf);
}
#[test]
fn line_wrap_length_2_lf_full_lines() {
let mut buf = vec![0x1, 0x2, 0x3, 0x4];
assert_eq!(1, do_line_wrap(&mut buf, 2, LineEnding::LF));
assert_eq!(vec![0x1, 0x2, 0xA, 0x3, 0x4], buf);
}
#[test]
fn line_wrap_length_2_crlf_full_lines() {
let mut buf = vec![0x1, 0x2, 0x3, 0x4];
assert_eq!(2, do_line_wrap(&mut buf, 2, LineEnding::CRLF));
assert_eq!(vec![0x1, 0x2, 0xD, 0xA, 0x3, 0x4], buf);
}
#[test]
fn line_wrap_length_2_lf_partial_line() {
let mut buf = vec![0x1, 0x2, 0x3, 0x4, 0x5];
assert_eq!(2, do_line_wrap(&mut buf, 2, LineEnding::LF));
assert_eq!(vec![0x1, 0x2, 0xA, 0x3, 0x4, 0xA, 0x5], buf);
}
#[test]
fn line_wrap_length_2_crlf_partial_line() {
let mut buf = vec![0x1, 0x2, 0x3, 0x4, 0x5];
assert_eq!(4, do_line_wrap(&mut buf, 2, LineEnding::CRLF));
assert_eq!(vec![0x1, 0x2, 0xD, 0xA, 0x3, 0x4, 0xD, 0xA, 0x5], buf);
}
#[test]
fn line_wrap_random() {
let mut buf: Vec<u8> = Vec::new();
let buf_range = Range::new(10, 1000);
let line_range = Range::new(10, 100);
let mut rng = rand::weak_rng();
for _ in 0..10_000 {
buf.clear();
let buf_len = buf_range.ind_sample(&mut rng);
let line_len = line_range.ind_sample(&mut rng);
let line_ending = if rng.gen() {
LineEnding::LF
} else {
LineEnding::CRLF
};
let line_ending_len = line_ending.len();
for _ in 0..buf_len {
buf.push(rng.gen());
}
let line_wrap_params = line_wrap_parameters(buf_len, line_len, line_ending);
let not_wrapped_buf = buf.to_vec();
let _ = do_line_wrap(&mut buf, line_len, line_ending);
// remove the endings
for line_ending_num in 0..line_wrap_params.lines_with_endings {
let line_ending_offset = (line_ending_num + 1) * line_len;
for _ in 0..line_ending_len {
let _ = buf.remove(line_ending_offset);
}
}
assert_eq!(not_wrapped_buf, buf);
}
}
fn do_line_wrap(buf: &mut Vec<u8>, line_len: usize, line_ending: LineEnding) -> usize {
let mut rng = rand::weak_rng();
let orig_len = buf.len();
// A 3x inflation is enough for the worst case: line length 1, crlf ending.
// We add on extra bytes so we'll have un-wrapped bytes at the end that shouldn't get
// modified..
for _ in 0..(1000 + 2 * orig_len) {
buf.push(rng.gen());
}
let mut before_line_wrap = buf.to_vec();
let params = line_wrap_parameters(orig_len, line_len, line_ending);
let bytes_written = line_wrap(&mut buf[..], orig_len, line_len, line_ending);
assert_eq!(params.total_line_endings_len, bytes_written);
assert_eq!(params.lines_with_endings * line_ending.len(), bytes_written);
assert_eq!(params.total_len, orig_len + bytes_written);
// make sure line_wrap didn't touch anything beyond what it should
let start_of_untouched_data = orig_len + bytes_written;
assert_eq!(
before_line_wrap[start_of_untouched_data..],
buf[start_of_untouched_data..]
);
// also make sure that line wrapping will fit into a slice no bigger than what it should
// need
let bytes_written_precise_fit = line_wrap(
&mut before_line_wrap[0..(params.total_len)],
orig_len,
line_len,
line_ending,
);
assert_eq!(bytes_written, bytes_written_precise_fit);
assert_eq!(
&buf[0..(params.total_len)],
&before_line_wrap[0..(params.total_len)]
);
buf.truncate(orig_len + bytes_written);
bytes_written
}
}

979
third_party/rust/base64-0.9.3/src/tables.rs поставляемый Normal file
Просмотреть файл

@ -0,0 +1,979 @@
pub const INVALID_VALUE: u8 = 255;
#[cfg_attr(rustfmt, rustfmt_skip)]
pub const STANDARD_ENCODE: &'static [u8; 64] = &[
65, // input 0 (0x0) => 'A' (0x41)
66, // input 1 (0x1) => 'B' (0x42)
67, // input 2 (0x2) => 'C' (0x43)
68, // input 3 (0x3) => 'D' (0x44)
69, // input 4 (0x4) => 'E' (0x45)
70, // input 5 (0x5) => 'F' (0x46)
71, // input 6 (0x6) => 'G' (0x47)
72, // input 7 (0x7) => 'H' (0x48)
73, // input 8 (0x8) => 'I' (0x49)
74, // input 9 (0x9) => 'J' (0x4A)
75, // input 10 (0xA) => 'K' (0x4B)
76, // input 11 (0xB) => 'L' (0x4C)
77, // input 12 (0xC) => 'M' (0x4D)
78, // input 13 (0xD) => 'N' (0x4E)
79, // input 14 (0xE) => 'O' (0x4F)
80, // input 15 (0xF) => 'P' (0x50)
81, // input 16 (0x10) => 'Q' (0x51)
82, // input 17 (0x11) => 'R' (0x52)
83, // input 18 (0x12) => 'S' (0x53)
84, // input 19 (0x13) => 'T' (0x54)
85, // input 20 (0x14) => 'U' (0x55)
86, // input 21 (0x15) => 'V' (0x56)
87, // input 22 (0x16) => 'W' (0x57)
88, // input 23 (0x17) => 'X' (0x58)
89, // input 24 (0x18) => 'Y' (0x59)
90, // input 25 (0x19) => 'Z' (0x5A)
97, // input 26 (0x1A) => 'a' (0x61)
98, // input 27 (0x1B) => 'b' (0x62)
99, // input 28 (0x1C) => 'c' (0x63)
100, // input 29 (0x1D) => 'd' (0x64)
101, // input 30 (0x1E) => 'e' (0x65)
102, // input 31 (0x1F) => 'f' (0x66)
103, // input 32 (0x20) => 'g' (0x67)
104, // input 33 (0x21) => 'h' (0x68)
105, // input 34 (0x22) => 'i' (0x69)
106, // input 35 (0x23) => 'j' (0x6A)
107, // input 36 (0x24) => 'k' (0x6B)
108, // input 37 (0x25) => 'l' (0x6C)
109, // input 38 (0x26) => 'm' (0x6D)
110, // input 39 (0x27) => 'n' (0x6E)
111, // input 40 (0x28) => 'o' (0x6F)
112, // input 41 (0x29) => 'p' (0x70)
113, // input 42 (0x2A) => 'q' (0x71)
114, // input 43 (0x2B) => 'r' (0x72)
115, // input 44 (0x2C) => 's' (0x73)
116, // input 45 (0x2D) => 't' (0x74)
117, // input 46 (0x2E) => 'u' (0x75)
118, // input 47 (0x2F) => 'v' (0x76)
119, // input 48 (0x30) => 'w' (0x77)
120, // input 49 (0x31) => 'x' (0x78)
121, // input 50 (0x32) => 'y' (0x79)
122, // input 51 (0x33) => 'z' (0x7A)
48, // input 52 (0x34) => '0' (0x30)
49, // input 53 (0x35) => '1' (0x31)
50, // input 54 (0x36) => '2' (0x32)
51, // input 55 (0x37) => '3' (0x33)
52, // input 56 (0x38) => '4' (0x34)
53, // input 57 (0x39) => '5' (0x35)
54, // input 58 (0x3A) => '6' (0x36)
55, // input 59 (0x3B) => '7' (0x37)
56, // input 60 (0x3C) => '8' (0x38)
57, // input 61 (0x3D) => '9' (0x39)
43, // input 62 (0x3E) => '+' (0x2B)
47, // input 63 (0x3F) => '/' (0x2F)
];
#[cfg_attr(rustfmt, rustfmt_skip)]
pub const STANDARD_DECODE: &'static [u8; 256] = &[
INVALID_VALUE, // input 0 (0x0)
INVALID_VALUE, // input 1 (0x1)
INVALID_VALUE, // input 2 (0x2)
INVALID_VALUE, // input 3 (0x3)
INVALID_VALUE, // input 4 (0x4)
INVALID_VALUE, // input 5 (0x5)
INVALID_VALUE, // input 6 (0x6)
INVALID_VALUE, // input 7 (0x7)
INVALID_VALUE, // input 8 (0x8)
INVALID_VALUE, // input 9 (0x9)
INVALID_VALUE, // input 10 (0xA)
INVALID_VALUE, // input 11 (0xB)
INVALID_VALUE, // input 12 (0xC)
INVALID_VALUE, // input 13 (0xD)
INVALID_VALUE, // input 14 (0xE)
INVALID_VALUE, // input 15 (0xF)
INVALID_VALUE, // input 16 (0x10)
INVALID_VALUE, // input 17 (0x11)
INVALID_VALUE, // input 18 (0x12)
INVALID_VALUE, // input 19 (0x13)
INVALID_VALUE, // input 20 (0x14)
INVALID_VALUE, // input 21 (0x15)
INVALID_VALUE, // input 22 (0x16)
INVALID_VALUE, // input 23 (0x17)
INVALID_VALUE, // input 24 (0x18)
INVALID_VALUE, // input 25 (0x19)
INVALID_VALUE, // input 26 (0x1A)
INVALID_VALUE, // input 27 (0x1B)
INVALID_VALUE, // input 28 (0x1C)
INVALID_VALUE, // input 29 (0x1D)
INVALID_VALUE, // input 30 (0x1E)
INVALID_VALUE, // input 31 (0x1F)
INVALID_VALUE, // input 32 (0x20)
INVALID_VALUE, // input 33 (0x21)
INVALID_VALUE, // input 34 (0x22)
INVALID_VALUE, // input 35 (0x23)
INVALID_VALUE, // input 36 (0x24)
INVALID_VALUE, // input 37 (0x25)
INVALID_VALUE, // input 38 (0x26)
INVALID_VALUE, // input 39 (0x27)
INVALID_VALUE, // input 40 (0x28)
INVALID_VALUE, // input 41 (0x29)
INVALID_VALUE, // input 42 (0x2A)
62, // input 43 (0x2B char '+') => 62 (0x3E)
INVALID_VALUE, // input 44 (0x2C)
INVALID_VALUE, // input 45 (0x2D)
INVALID_VALUE, // input 46 (0x2E)
63, // input 47 (0x2F char '/') => 63 (0x3F)
52, // input 48 (0x30 char '0') => 52 (0x34)
53, // input 49 (0x31 char '1') => 53 (0x35)
54, // input 50 (0x32 char '2') => 54 (0x36)
55, // input 51 (0x33 char '3') => 55 (0x37)
56, // input 52 (0x34 char '4') => 56 (0x38)
57, // input 53 (0x35 char '5') => 57 (0x39)
58, // input 54 (0x36 char '6') => 58 (0x3A)
59, // input 55 (0x37 char '7') => 59 (0x3B)
60, // input 56 (0x38 char '8') => 60 (0x3C)
61, // input 57 (0x39 char '9') => 61 (0x3D)
INVALID_VALUE, // input 58 (0x3A)
INVALID_VALUE, // input 59 (0x3B)
INVALID_VALUE, // input 60 (0x3C)
INVALID_VALUE, // input 61 (0x3D)
INVALID_VALUE, // input 62 (0x3E)
INVALID_VALUE, // input 63 (0x3F)
INVALID_VALUE, // input 64 (0x40)
0, // input 65 (0x41 char 'A') => 0 (0x0)
1, // input 66 (0x42 char 'B') => 1 (0x1)
2, // input 67 (0x43 char 'C') => 2 (0x2)
3, // input 68 (0x44 char 'D') => 3 (0x3)
4, // input 69 (0x45 char 'E') => 4 (0x4)
5, // input 70 (0x46 char 'F') => 5 (0x5)
6, // input 71 (0x47 char 'G') => 6 (0x6)
7, // input 72 (0x48 char 'H') => 7 (0x7)
8, // input 73 (0x49 char 'I') => 8 (0x8)
9, // input 74 (0x4A char 'J') => 9 (0x9)
10, // input 75 (0x4B char 'K') => 10 (0xA)
11, // input 76 (0x4C char 'L') => 11 (0xB)
12, // input 77 (0x4D char 'M') => 12 (0xC)
13, // input 78 (0x4E char 'N') => 13 (0xD)
14, // input 79 (0x4F char 'O') => 14 (0xE)
15, // input 80 (0x50 char 'P') => 15 (0xF)
16, // input 81 (0x51 char 'Q') => 16 (0x10)
17, // input 82 (0x52 char 'R') => 17 (0x11)
18, // input 83 (0x53 char 'S') => 18 (0x12)
19, // input 84 (0x54 char 'T') => 19 (0x13)
20, // input 85 (0x55 char 'U') => 20 (0x14)
21, // input 86 (0x56 char 'V') => 21 (0x15)
22, // input 87 (0x57 char 'W') => 22 (0x16)
23, // input 88 (0x58 char 'X') => 23 (0x17)
24, // input 89 (0x59 char 'Y') => 24 (0x18)
25, // input 90 (0x5A char 'Z') => 25 (0x19)
INVALID_VALUE, // input 91 (0x5B)
INVALID_VALUE, // input 92 (0x5C)
INVALID_VALUE, // input 93 (0x5D)
INVALID_VALUE, // input 94 (0x5E)
INVALID_VALUE, // input 95 (0x5F)
INVALID_VALUE, // input 96 (0x60)
26, // input 97 (0x61 char 'a') => 26 (0x1A)
27, // input 98 (0x62 char 'b') => 27 (0x1B)
28, // input 99 (0x63 char 'c') => 28 (0x1C)
29, // input 100 (0x64 char 'd') => 29 (0x1D)
30, // input 101 (0x65 char 'e') => 30 (0x1E)
31, // input 102 (0x66 char 'f') => 31 (0x1F)
32, // input 103 (0x67 char 'g') => 32 (0x20)
33, // input 104 (0x68 char 'h') => 33 (0x21)
34, // input 105 (0x69 char 'i') => 34 (0x22)
35, // input 106 (0x6A char 'j') => 35 (0x23)
36, // input 107 (0x6B char 'k') => 36 (0x24)
37, // input 108 (0x6C char 'l') => 37 (0x25)
38, // input 109 (0x6D char 'm') => 38 (0x26)
39, // input 110 (0x6E char 'n') => 39 (0x27)
40, // input 111 (0x6F char 'o') => 40 (0x28)
41, // input 112 (0x70 char 'p') => 41 (0x29)
42, // input 113 (0x71 char 'q') => 42 (0x2A)
43, // input 114 (0x72 char 'r') => 43 (0x2B)
44, // input 115 (0x73 char 's') => 44 (0x2C)
45, // input 116 (0x74 char 't') => 45 (0x2D)
46, // input 117 (0x75 char 'u') => 46 (0x2E)
47, // input 118 (0x76 char 'v') => 47 (0x2F)
48, // input 119 (0x77 char 'w') => 48 (0x30)
49, // input 120 (0x78 char 'x') => 49 (0x31)
50, // input 121 (0x79 char 'y') => 50 (0x32)
51, // input 122 (0x7A char 'z') => 51 (0x33)
INVALID_VALUE, // input 123 (0x7B)
INVALID_VALUE, // input 124 (0x7C)
INVALID_VALUE, // input 125 (0x7D)
INVALID_VALUE, // input 126 (0x7E)
INVALID_VALUE, // input 127 (0x7F)
INVALID_VALUE, // input 128 (0x80)
INVALID_VALUE, // input 129 (0x81)
INVALID_VALUE, // input 130 (0x82)
INVALID_VALUE, // input 131 (0x83)
INVALID_VALUE, // input 132 (0x84)
INVALID_VALUE, // input 133 (0x85)
INVALID_VALUE, // input 134 (0x86)
INVALID_VALUE, // input 135 (0x87)
INVALID_VALUE, // input 136 (0x88)
INVALID_VALUE, // input 137 (0x89)
INVALID_VALUE, // input 138 (0x8A)
INVALID_VALUE, // input 139 (0x8B)
INVALID_VALUE, // input 140 (0x8C)
INVALID_VALUE, // input 141 (0x8D)
INVALID_VALUE, // input 142 (0x8E)
INVALID_VALUE, // input 143 (0x8F)
INVALID_VALUE, // input 144 (0x90)
INVALID_VALUE, // input 145 (0x91)
INVALID_VALUE, // input 146 (0x92)
INVALID_VALUE, // input 147 (0x93)
INVALID_VALUE, // input 148 (0x94)
INVALID_VALUE, // input 149 (0x95)
INVALID_VALUE, // input 150 (0x96)
INVALID_VALUE, // input 151 (0x97)
INVALID_VALUE, // input 152 (0x98)
INVALID_VALUE, // input 153 (0x99)
INVALID_VALUE, // input 154 (0x9A)
INVALID_VALUE, // input 155 (0x9B)
INVALID_VALUE, // input 156 (0x9C)
INVALID_VALUE, // input 157 (0x9D)
INVALID_VALUE, // input 158 (0x9E)
INVALID_VALUE, // input 159 (0x9F)
INVALID_VALUE, // input 160 (0xA0)
INVALID_VALUE, // input 161 (0xA1)
INVALID_VALUE, // input 162 (0xA2)
INVALID_VALUE, // input 163 (0xA3)
INVALID_VALUE, // input 164 (0xA4)
INVALID_VALUE, // input 165 (0xA5)
INVALID_VALUE, // input 166 (0xA6)
INVALID_VALUE, // input 167 (0xA7)
INVALID_VALUE, // input 168 (0xA8)
INVALID_VALUE, // input 169 (0xA9)
INVALID_VALUE, // input 170 (0xAA)
INVALID_VALUE, // input 171 (0xAB)
INVALID_VALUE, // input 172 (0xAC)
INVALID_VALUE, // input 173 (0xAD)
INVALID_VALUE, // input 174 (0xAE)
INVALID_VALUE, // input 175 (0xAF)
INVALID_VALUE, // input 176 (0xB0)
INVALID_VALUE, // input 177 (0xB1)
INVALID_VALUE, // input 178 (0xB2)
INVALID_VALUE, // input 179 (0xB3)
INVALID_VALUE, // input 180 (0xB4)
INVALID_VALUE, // input 181 (0xB5)
INVALID_VALUE, // input 182 (0xB6)
INVALID_VALUE, // input 183 (0xB7)
INVALID_VALUE, // input 184 (0xB8)
INVALID_VALUE, // input 185 (0xB9)
INVALID_VALUE, // input 186 (0xBA)
INVALID_VALUE, // input 187 (0xBB)
INVALID_VALUE, // input 188 (0xBC)
INVALID_VALUE, // input 189 (0xBD)
INVALID_VALUE, // input 190 (0xBE)
INVALID_VALUE, // input 191 (0xBF)
INVALID_VALUE, // input 192 (0xC0)
INVALID_VALUE, // input 193 (0xC1)
INVALID_VALUE, // input 194 (0xC2)
INVALID_VALUE, // input 195 (0xC3)
INVALID_VALUE, // input 196 (0xC4)
INVALID_VALUE, // input 197 (0xC5)
INVALID_VALUE, // input 198 (0xC6)
INVALID_VALUE, // input 199 (0xC7)
INVALID_VALUE, // input 200 (0xC8)
INVALID_VALUE, // input 201 (0xC9)
INVALID_VALUE, // input 202 (0xCA)
INVALID_VALUE, // input 203 (0xCB)
INVALID_VALUE, // input 204 (0xCC)
INVALID_VALUE, // input 205 (0xCD)
INVALID_VALUE, // input 206 (0xCE)
INVALID_VALUE, // input 207 (0xCF)
INVALID_VALUE, // input 208 (0xD0)
INVALID_VALUE, // input 209 (0xD1)
INVALID_VALUE, // input 210 (0xD2)
INVALID_VALUE, // input 211 (0xD3)
INVALID_VALUE, // input 212 (0xD4)
INVALID_VALUE, // input 213 (0xD5)
INVALID_VALUE, // input 214 (0xD6)
INVALID_VALUE, // input 215 (0xD7)
INVALID_VALUE, // input 216 (0xD8)
INVALID_VALUE, // input 217 (0xD9)
INVALID_VALUE, // input 218 (0xDA)
INVALID_VALUE, // input 219 (0xDB)
INVALID_VALUE, // input 220 (0xDC)
INVALID_VALUE, // input 221 (0xDD)
INVALID_VALUE, // input 222 (0xDE)
INVALID_VALUE, // input 223 (0xDF)
INVALID_VALUE, // input 224 (0xE0)
INVALID_VALUE, // input 225 (0xE1)
INVALID_VALUE, // input 226 (0xE2)
INVALID_VALUE, // input 227 (0xE3)
INVALID_VALUE, // input 228 (0xE4)
INVALID_VALUE, // input 229 (0xE5)
INVALID_VALUE, // input 230 (0xE6)
INVALID_VALUE, // input 231 (0xE7)
INVALID_VALUE, // input 232 (0xE8)
INVALID_VALUE, // input 233 (0xE9)
INVALID_VALUE, // input 234 (0xEA)
INVALID_VALUE, // input 235 (0xEB)
INVALID_VALUE, // input 236 (0xEC)
INVALID_VALUE, // input 237 (0xED)
INVALID_VALUE, // input 238 (0xEE)
INVALID_VALUE, // input 239 (0xEF)
INVALID_VALUE, // input 240 (0xF0)
INVALID_VALUE, // input 241 (0xF1)
INVALID_VALUE, // input 242 (0xF2)
INVALID_VALUE, // input 243 (0xF3)
INVALID_VALUE, // input 244 (0xF4)
INVALID_VALUE, // input 245 (0xF5)
INVALID_VALUE, // input 246 (0xF6)
INVALID_VALUE, // input 247 (0xF7)
INVALID_VALUE, // input 248 (0xF8)
INVALID_VALUE, // input 249 (0xF9)
INVALID_VALUE, // input 250 (0xFA)
INVALID_VALUE, // input 251 (0xFB)
INVALID_VALUE, // input 252 (0xFC)
INVALID_VALUE, // input 253 (0xFD)
INVALID_VALUE, // input 254 (0xFE)
INVALID_VALUE, // input 255 (0xFF)
];
#[cfg_attr(rustfmt, rustfmt_skip)]
pub const URL_SAFE_ENCODE: &'static [u8; 64] = &[
65, // input 0 (0x0) => 'A' (0x41)
66, // input 1 (0x1) => 'B' (0x42)
67, // input 2 (0x2) => 'C' (0x43)
68, // input 3 (0x3) => 'D' (0x44)
69, // input 4 (0x4) => 'E' (0x45)
70, // input 5 (0x5) => 'F' (0x46)
71, // input 6 (0x6) => 'G' (0x47)
72, // input 7 (0x7) => 'H' (0x48)
73, // input 8 (0x8) => 'I' (0x49)
74, // input 9 (0x9) => 'J' (0x4A)
75, // input 10 (0xA) => 'K' (0x4B)
76, // input 11 (0xB) => 'L' (0x4C)
77, // input 12 (0xC) => 'M' (0x4D)
78, // input 13 (0xD) => 'N' (0x4E)
79, // input 14 (0xE) => 'O' (0x4F)
80, // input 15 (0xF) => 'P' (0x50)
81, // input 16 (0x10) => 'Q' (0x51)
82, // input 17 (0x11) => 'R' (0x52)
83, // input 18 (0x12) => 'S' (0x53)
84, // input 19 (0x13) => 'T' (0x54)
85, // input 20 (0x14) => 'U' (0x55)
86, // input 21 (0x15) => 'V' (0x56)
87, // input 22 (0x16) => 'W' (0x57)
88, // input 23 (0x17) => 'X' (0x58)
89, // input 24 (0x18) => 'Y' (0x59)
90, // input 25 (0x19) => 'Z' (0x5A)
97, // input 26 (0x1A) => 'a' (0x61)
98, // input 27 (0x1B) => 'b' (0x62)
99, // input 28 (0x1C) => 'c' (0x63)
100, // input 29 (0x1D) => 'd' (0x64)
101, // input 30 (0x1E) => 'e' (0x65)
102, // input 31 (0x1F) => 'f' (0x66)
103, // input 32 (0x20) => 'g' (0x67)
104, // input 33 (0x21) => 'h' (0x68)
105, // input 34 (0x22) => 'i' (0x69)
106, // input 35 (0x23) => 'j' (0x6A)
107, // input 36 (0x24) => 'k' (0x6B)
108, // input 37 (0x25) => 'l' (0x6C)
109, // input 38 (0x26) => 'm' (0x6D)
110, // input 39 (0x27) => 'n' (0x6E)
111, // input 40 (0x28) => 'o' (0x6F)
112, // input 41 (0x29) => 'p' (0x70)
113, // input 42 (0x2A) => 'q' (0x71)
114, // input 43 (0x2B) => 'r' (0x72)
115, // input 44 (0x2C) => 's' (0x73)
116, // input 45 (0x2D) => 't' (0x74)
117, // input 46 (0x2E) => 'u' (0x75)
118, // input 47 (0x2F) => 'v' (0x76)
119, // input 48 (0x30) => 'w' (0x77)
120, // input 49 (0x31) => 'x' (0x78)
121, // input 50 (0x32) => 'y' (0x79)
122, // input 51 (0x33) => 'z' (0x7A)
48, // input 52 (0x34) => '0' (0x30)
49, // input 53 (0x35) => '1' (0x31)
50, // input 54 (0x36) => '2' (0x32)
51, // input 55 (0x37) => '3' (0x33)
52, // input 56 (0x38) => '4' (0x34)
53, // input 57 (0x39) => '5' (0x35)
54, // input 58 (0x3A) => '6' (0x36)
55, // input 59 (0x3B) => '7' (0x37)
56, // input 60 (0x3C) => '8' (0x38)
57, // input 61 (0x3D) => '9' (0x39)
45, // input 62 (0x3E) => '-' (0x2D)
95, // input 63 (0x3F) => '_' (0x5F)
];
#[cfg_attr(rustfmt, rustfmt_skip)]
pub const URL_SAFE_DECODE: &'static [u8; 256] = &[
INVALID_VALUE, // input 0 (0x0)
INVALID_VALUE, // input 1 (0x1)
INVALID_VALUE, // input 2 (0x2)
INVALID_VALUE, // input 3 (0x3)
INVALID_VALUE, // input 4 (0x4)
INVALID_VALUE, // input 5 (0x5)
INVALID_VALUE, // input 6 (0x6)
INVALID_VALUE, // input 7 (0x7)
INVALID_VALUE, // input 8 (0x8)
INVALID_VALUE, // input 9 (0x9)
INVALID_VALUE, // input 10 (0xA)
INVALID_VALUE, // input 11 (0xB)
INVALID_VALUE, // input 12 (0xC)
INVALID_VALUE, // input 13 (0xD)
INVALID_VALUE, // input 14 (0xE)
INVALID_VALUE, // input 15 (0xF)
INVALID_VALUE, // input 16 (0x10)
INVALID_VALUE, // input 17 (0x11)
INVALID_VALUE, // input 18 (0x12)
INVALID_VALUE, // input 19 (0x13)
INVALID_VALUE, // input 20 (0x14)
INVALID_VALUE, // input 21 (0x15)
INVALID_VALUE, // input 22 (0x16)
INVALID_VALUE, // input 23 (0x17)
INVALID_VALUE, // input 24 (0x18)
INVALID_VALUE, // input 25 (0x19)
INVALID_VALUE, // input 26 (0x1A)
INVALID_VALUE, // input 27 (0x1B)
INVALID_VALUE, // input 28 (0x1C)
INVALID_VALUE, // input 29 (0x1D)
INVALID_VALUE, // input 30 (0x1E)
INVALID_VALUE, // input 31 (0x1F)
INVALID_VALUE, // input 32 (0x20)
INVALID_VALUE, // input 33 (0x21)
INVALID_VALUE, // input 34 (0x22)
INVALID_VALUE, // input 35 (0x23)
INVALID_VALUE, // input 36 (0x24)
INVALID_VALUE, // input 37 (0x25)
INVALID_VALUE, // input 38 (0x26)
INVALID_VALUE, // input 39 (0x27)
INVALID_VALUE, // input 40 (0x28)
INVALID_VALUE, // input 41 (0x29)
INVALID_VALUE, // input 42 (0x2A)
INVALID_VALUE, // input 43 (0x2B)
INVALID_VALUE, // input 44 (0x2C)
62, // input 45 (0x2D char '-') => 62 (0x3E)
INVALID_VALUE, // input 46 (0x2E)
INVALID_VALUE, // input 47 (0x2F)
52, // input 48 (0x30 char '0') => 52 (0x34)
53, // input 49 (0x31 char '1') => 53 (0x35)
54, // input 50 (0x32 char '2') => 54 (0x36)
55, // input 51 (0x33 char '3') => 55 (0x37)
56, // input 52 (0x34 char '4') => 56 (0x38)
57, // input 53 (0x35 char '5') => 57 (0x39)
58, // input 54 (0x36 char '6') => 58 (0x3A)
59, // input 55 (0x37 char '7') => 59 (0x3B)
60, // input 56 (0x38 char '8') => 60 (0x3C)
61, // input 57 (0x39 char '9') => 61 (0x3D)
INVALID_VALUE, // input 58 (0x3A)
INVALID_VALUE, // input 59 (0x3B)
INVALID_VALUE, // input 60 (0x3C)
INVALID_VALUE, // input 61 (0x3D)
INVALID_VALUE, // input 62 (0x3E)
INVALID_VALUE, // input 63 (0x3F)
INVALID_VALUE, // input 64 (0x40)
0, // input 65 (0x41 char 'A') => 0 (0x0)
1, // input 66 (0x42 char 'B') => 1 (0x1)
2, // input 67 (0x43 char 'C') => 2 (0x2)
3, // input 68 (0x44 char 'D') => 3 (0x3)
4, // input 69 (0x45 char 'E') => 4 (0x4)
5, // input 70 (0x46 char 'F') => 5 (0x5)
6, // input 71 (0x47 char 'G') => 6 (0x6)
7, // input 72 (0x48 char 'H') => 7 (0x7)
8, // input 73 (0x49 char 'I') => 8 (0x8)
9, // input 74 (0x4A char 'J') => 9 (0x9)
10, // input 75 (0x4B char 'K') => 10 (0xA)
11, // input 76 (0x4C char 'L') => 11 (0xB)
12, // input 77 (0x4D char 'M') => 12 (0xC)
13, // input 78 (0x4E char 'N') => 13 (0xD)
14, // input 79 (0x4F char 'O') => 14 (0xE)
15, // input 80 (0x50 char 'P') => 15 (0xF)
16, // input 81 (0x51 char 'Q') => 16 (0x10)
17, // input 82 (0x52 char 'R') => 17 (0x11)
18, // input 83 (0x53 char 'S') => 18 (0x12)
19, // input 84 (0x54 char 'T') => 19 (0x13)
20, // input 85 (0x55 char 'U') => 20 (0x14)
21, // input 86 (0x56 char 'V') => 21 (0x15)
22, // input 87 (0x57 char 'W') => 22 (0x16)
23, // input 88 (0x58 char 'X') => 23 (0x17)
24, // input 89 (0x59 char 'Y') => 24 (0x18)
25, // input 90 (0x5A char 'Z') => 25 (0x19)
INVALID_VALUE, // input 91 (0x5B)
INVALID_VALUE, // input 92 (0x5C)
INVALID_VALUE, // input 93 (0x5D)
INVALID_VALUE, // input 94 (0x5E)
63, // input 95 (0x5F char '_') => 63 (0x3F)
INVALID_VALUE, // input 96 (0x60)
26, // input 97 (0x61 char 'a') => 26 (0x1A)
27, // input 98 (0x62 char 'b') => 27 (0x1B)
28, // input 99 (0x63 char 'c') => 28 (0x1C)
29, // input 100 (0x64 char 'd') => 29 (0x1D)
30, // input 101 (0x65 char 'e') => 30 (0x1E)
31, // input 102 (0x66 char 'f') => 31 (0x1F)
32, // input 103 (0x67 char 'g') => 32 (0x20)
33, // input 104 (0x68 char 'h') => 33 (0x21)
34, // input 105 (0x69 char 'i') => 34 (0x22)
35, // input 106 (0x6A char 'j') => 35 (0x23)
36, // input 107 (0x6B char 'k') => 36 (0x24)
37, // input 108 (0x6C char 'l') => 37 (0x25)
38, // input 109 (0x6D char 'm') => 38 (0x26)
39, // input 110 (0x6E char 'n') => 39 (0x27)
40, // input 111 (0x6F char 'o') => 40 (0x28)
41, // input 112 (0x70 char 'p') => 41 (0x29)
42, // input 113 (0x71 char 'q') => 42 (0x2A)
43, // input 114 (0x72 char 'r') => 43 (0x2B)
44, // input 115 (0x73 char 's') => 44 (0x2C)
45, // input 116 (0x74 char 't') => 45 (0x2D)
46, // input 117 (0x75 char 'u') => 46 (0x2E)
47, // input 118 (0x76 char 'v') => 47 (0x2F)
48, // input 119 (0x77 char 'w') => 48 (0x30)
49, // input 120 (0x78 char 'x') => 49 (0x31)
50, // input 121 (0x79 char 'y') => 50 (0x32)
51, // input 122 (0x7A char 'z') => 51 (0x33)
INVALID_VALUE, // input 123 (0x7B)
INVALID_VALUE, // input 124 (0x7C)
INVALID_VALUE, // input 125 (0x7D)
INVALID_VALUE, // input 126 (0x7E)
INVALID_VALUE, // input 127 (0x7F)
INVALID_VALUE, // input 128 (0x80)
INVALID_VALUE, // input 129 (0x81)
INVALID_VALUE, // input 130 (0x82)
INVALID_VALUE, // input 131 (0x83)
INVALID_VALUE, // input 132 (0x84)
INVALID_VALUE, // input 133 (0x85)
INVALID_VALUE, // input 134 (0x86)
INVALID_VALUE, // input 135 (0x87)
INVALID_VALUE, // input 136 (0x88)
INVALID_VALUE, // input 137 (0x89)
INVALID_VALUE, // input 138 (0x8A)
INVALID_VALUE, // input 139 (0x8B)
INVALID_VALUE, // input 140 (0x8C)
INVALID_VALUE, // input 141 (0x8D)
INVALID_VALUE, // input 142 (0x8E)
INVALID_VALUE, // input 143 (0x8F)
INVALID_VALUE, // input 144 (0x90)
INVALID_VALUE, // input 145 (0x91)
INVALID_VALUE, // input 146 (0x92)
INVALID_VALUE, // input 147 (0x93)
INVALID_VALUE, // input 148 (0x94)
INVALID_VALUE, // input 149 (0x95)
INVALID_VALUE, // input 150 (0x96)
INVALID_VALUE, // input 151 (0x97)
INVALID_VALUE, // input 152 (0x98)
INVALID_VALUE, // input 153 (0x99)
INVALID_VALUE, // input 154 (0x9A)
INVALID_VALUE, // input 155 (0x9B)
INVALID_VALUE, // input 156 (0x9C)
INVALID_VALUE, // input 157 (0x9D)
INVALID_VALUE, // input 158 (0x9E)
INVALID_VALUE, // input 159 (0x9F)
INVALID_VALUE, // input 160 (0xA0)
INVALID_VALUE, // input 161 (0xA1)
INVALID_VALUE, // input 162 (0xA2)
INVALID_VALUE, // input 163 (0xA3)
INVALID_VALUE, // input 164 (0xA4)
INVALID_VALUE, // input 165 (0xA5)
INVALID_VALUE, // input 166 (0xA6)
INVALID_VALUE, // input 167 (0xA7)
INVALID_VALUE, // input 168 (0xA8)
INVALID_VALUE, // input 169 (0xA9)
INVALID_VALUE, // input 170 (0xAA)
INVALID_VALUE, // input 171 (0xAB)
INVALID_VALUE, // input 172 (0xAC)
INVALID_VALUE, // input 173 (0xAD)
INVALID_VALUE, // input 174 (0xAE)
INVALID_VALUE, // input 175 (0xAF)
INVALID_VALUE, // input 176 (0xB0)
INVALID_VALUE, // input 177 (0xB1)
INVALID_VALUE, // input 178 (0xB2)
INVALID_VALUE, // input 179 (0xB3)
INVALID_VALUE, // input 180 (0xB4)
INVALID_VALUE, // input 181 (0xB5)
INVALID_VALUE, // input 182 (0xB6)
INVALID_VALUE, // input 183 (0xB7)
INVALID_VALUE, // input 184 (0xB8)
INVALID_VALUE, // input 185 (0xB9)
INVALID_VALUE, // input 186 (0xBA)
INVALID_VALUE, // input 187 (0xBB)
INVALID_VALUE, // input 188 (0xBC)
INVALID_VALUE, // input 189 (0xBD)
INVALID_VALUE, // input 190 (0xBE)
INVALID_VALUE, // input 191 (0xBF)
INVALID_VALUE, // input 192 (0xC0)
INVALID_VALUE, // input 193 (0xC1)
INVALID_VALUE, // input 194 (0xC2)
INVALID_VALUE, // input 195 (0xC3)
INVALID_VALUE, // input 196 (0xC4)
INVALID_VALUE, // input 197 (0xC5)
INVALID_VALUE, // input 198 (0xC6)
INVALID_VALUE, // input 199 (0xC7)
INVALID_VALUE, // input 200 (0xC8)
INVALID_VALUE, // input 201 (0xC9)
INVALID_VALUE, // input 202 (0xCA)
INVALID_VALUE, // input 203 (0xCB)
INVALID_VALUE, // input 204 (0xCC)
INVALID_VALUE, // input 205 (0xCD)
INVALID_VALUE, // input 206 (0xCE)
INVALID_VALUE, // input 207 (0xCF)
INVALID_VALUE, // input 208 (0xD0)
INVALID_VALUE, // input 209 (0xD1)
INVALID_VALUE, // input 210 (0xD2)
INVALID_VALUE, // input 211 (0xD3)
INVALID_VALUE, // input 212 (0xD4)
INVALID_VALUE, // input 213 (0xD5)
INVALID_VALUE, // input 214 (0xD6)
INVALID_VALUE, // input 215 (0xD7)
INVALID_VALUE, // input 216 (0xD8)
INVALID_VALUE, // input 217 (0xD9)
INVALID_VALUE, // input 218 (0xDA)
INVALID_VALUE, // input 219 (0xDB)
INVALID_VALUE, // input 220 (0xDC)
INVALID_VALUE, // input 221 (0xDD)
INVALID_VALUE, // input 222 (0xDE)
INVALID_VALUE, // input 223 (0xDF)
INVALID_VALUE, // input 224 (0xE0)
INVALID_VALUE, // input 225 (0xE1)
INVALID_VALUE, // input 226 (0xE2)
INVALID_VALUE, // input 227 (0xE3)
INVALID_VALUE, // input 228 (0xE4)
INVALID_VALUE, // input 229 (0xE5)
INVALID_VALUE, // input 230 (0xE6)
INVALID_VALUE, // input 231 (0xE7)
INVALID_VALUE, // input 232 (0xE8)
INVALID_VALUE, // input 233 (0xE9)
INVALID_VALUE, // input 234 (0xEA)
INVALID_VALUE, // input 235 (0xEB)
INVALID_VALUE, // input 236 (0xEC)
INVALID_VALUE, // input 237 (0xED)
INVALID_VALUE, // input 238 (0xEE)
INVALID_VALUE, // input 239 (0xEF)
INVALID_VALUE, // input 240 (0xF0)
INVALID_VALUE, // input 241 (0xF1)
INVALID_VALUE, // input 242 (0xF2)
INVALID_VALUE, // input 243 (0xF3)
INVALID_VALUE, // input 244 (0xF4)
INVALID_VALUE, // input 245 (0xF5)
INVALID_VALUE, // input 246 (0xF6)
INVALID_VALUE, // input 247 (0xF7)
INVALID_VALUE, // input 248 (0xF8)
INVALID_VALUE, // input 249 (0xF9)
INVALID_VALUE, // input 250 (0xFA)
INVALID_VALUE, // input 251 (0xFB)
INVALID_VALUE, // input 252 (0xFC)
INVALID_VALUE, // input 253 (0xFD)
INVALID_VALUE, // input 254 (0xFE)
INVALID_VALUE, // input 255 (0xFF)
];
#[cfg_attr(rustfmt, rustfmt_skip)]
pub const CRYPT_ENCODE: &'static [u8; 64] = &[
46, // input 0 (0x0) => '.' (0x2E)
47, // input 1 (0x1) => '/' (0x2F)
48, // input 2 (0x2) => '0' (0x30)
49, // input 3 (0x3) => '1' (0x31)
50, // input 4 (0x4) => '2' (0x32)
51, // input 5 (0x5) => '3' (0x33)
52, // input 6 (0x6) => '4' (0x34)
53, // input 7 (0x7) => '5' (0x35)
54, // input 8 (0x8) => '6' (0x36)
55, // input 9 (0x9) => '7' (0x37)
56, // input 10 (0xA) => '8' (0x38)
57, // input 11 (0xB) => '9' (0x39)
65, // input 12 (0xC) => 'A' (0x41)
66, // input 13 (0xD) => 'B' (0x42)
67, // input 14 (0xE) => 'C' (0x43)
68, // input 15 (0xF) => 'D' (0x44)
69, // input 16 (0x10) => 'E' (0x45)
70, // input 17 (0x11) => 'F' (0x46)
71, // input 18 (0x12) => 'G' (0x47)
72, // input 19 (0x13) => 'H' (0x48)
73, // input 20 (0x14) => 'I' (0x49)
74, // input 21 (0x15) => 'J' (0x4A)
75, // input 22 (0x16) => 'K' (0x4B)
76, // input 23 (0x17) => 'L' (0x4C)
77, // input 24 (0x18) => 'M' (0x4D)
78, // input 25 (0x19) => 'N' (0x4E)
79, // input 26 (0x1A) => 'O' (0x4F)
80, // input 27 (0x1B) => 'P' (0x50)
81, // input 28 (0x1C) => 'Q' (0x51)
82, // input 29 (0x1D) => 'R' (0x52)
83, // input 30 (0x1E) => 'S' (0x53)
84, // input 31 (0x1F) => 'T' (0x54)
85, // input 32 (0x20) => 'U' (0x55)
86, // input 33 (0x21) => 'V' (0x56)
87, // input 34 (0x22) => 'W' (0x57)
88, // input 35 (0x23) => 'X' (0x58)
89, // input 36 (0x24) => 'Y' (0x59)
90, // input 37 (0x25) => 'Z' (0x5A)
97, // input 38 (0x26) => 'a' (0x61)
98, // input 39 (0x27) => 'b' (0x62)
99, // input 40 (0x28) => 'c' (0x63)
100, // input 41 (0x29) => 'd' (0x64)
101, // input 42 (0x2A) => 'e' (0x65)
102, // input 43 (0x2B) => 'f' (0x66)
103, // input 44 (0x2C) => 'g' (0x67)
104, // input 45 (0x2D) => 'h' (0x68)
105, // input 46 (0x2E) => 'i' (0x69)
106, // input 47 (0x2F) => 'j' (0x6A)
107, // input 48 (0x30) => 'k' (0x6B)
108, // input 49 (0x31) => 'l' (0x6C)
109, // input 50 (0x32) => 'm' (0x6D)
110, // input 51 (0x33) => 'n' (0x6E)
111, // input 52 (0x34) => 'o' (0x6F)
112, // input 53 (0x35) => 'p' (0x70)
113, // input 54 (0x36) => 'q' (0x71)
114, // input 55 (0x37) => 'r' (0x72)
115, // input 56 (0x38) => 's' (0x73)
116, // input 57 (0x39) => 't' (0x74)
117, // input 58 (0x3A) => 'u' (0x75)
118, // input 59 (0x3B) => 'v' (0x76)
119, // input 60 (0x3C) => 'w' (0x77)
120, // input 61 (0x3D) => 'x' (0x78)
121, // input 62 (0x3E) => 'y' (0x79)
122, // input 63 (0x3F) => 'z' (0x7A)
];
#[cfg_attr(rustfmt, rustfmt_skip)]
pub const CRYPT_DECODE: &'static [u8; 256] = &[
INVALID_VALUE, // input 0 (0x0)
INVALID_VALUE, // input 1 (0x1)
INVALID_VALUE, // input 2 (0x2)
INVALID_VALUE, // input 3 (0x3)
INVALID_VALUE, // input 4 (0x4)
INVALID_VALUE, // input 5 (0x5)
INVALID_VALUE, // input 6 (0x6)
INVALID_VALUE, // input 7 (0x7)
INVALID_VALUE, // input 8 (0x8)
INVALID_VALUE, // input 9 (0x9)
INVALID_VALUE, // input 10 (0xA)
INVALID_VALUE, // input 11 (0xB)
INVALID_VALUE, // input 12 (0xC)
INVALID_VALUE, // input 13 (0xD)
INVALID_VALUE, // input 14 (0xE)
INVALID_VALUE, // input 15 (0xF)
INVALID_VALUE, // input 16 (0x10)
INVALID_VALUE, // input 17 (0x11)
INVALID_VALUE, // input 18 (0x12)
INVALID_VALUE, // input 19 (0x13)
INVALID_VALUE, // input 20 (0x14)
INVALID_VALUE, // input 21 (0x15)
INVALID_VALUE, // input 22 (0x16)
INVALID_VALUE, // input 23 (0x17)
INVALID_VALUE, // input 24 (0x18)
INVALID_VALUE, // input 25 (0x19)
INVALID_VALUE, // input 26 (0x1A)
INVALID_VALUE, // input 27 (0x1B)
INVALID_VALUE, // input 28 (0x1C)
INVALID_VALUE, // input 29 (0x1D)
INVALID_VALUE, // input 30 (0x1E)
INVALID_VALUE, // input 31 (0x1F)
INVALID_VALUE, // input 32 (0x20)
INVALID_VALUE, // input 33 (0x21)
INVALID_VALUE, // input 34 (0x22)
INVALID_VALUE, // input 35 (0x23)
INVALID_VALUE, // input 36 (0x24)
INVALID_VALUE, // input 37 (0x25)
INVALID_VALUE, // input 38 (0x26)
INVALID_VALUE, // input 39 (0x27)
INVALID_VALUE, // input 40 (0x28)
INVALID_VALUE, // input 41 (0x29)
INVALID_VALUE, // input 42 (0x2A)
INVALID_VALUE, // input 43 (0x2B)
INVALID_VALUE, // input 44 (0x2C)
INVALID_VALUE, // input 45 (0x2D)
0, // input 46 (0x2E char '.') => 0 (0x0)
1, // input 47 (0x2F char '/') => 1 (0x1)
2, // input 48 (0x30 char '0') => 2 (0x2)
3, // input 49 (0x31 char '1') => 3 (0x3)
4, // input 50 (0x32 char '2') => 4 (0x4)
5, // input 51 (0x33 char '3') => 5 (0x5)
6, // input 52 (0x34 char '4') => 6 (0x6)
7, // input 53 (0x35 char '5') => 7 (0x7)
8, // input 54 (0x36 char '6') => 8 (0x8)
9, // input 55 (0x37 char '7') => 9 (0x9)
10, // input 56 (0x38 char '8') => 10 (0xA)
11, // input 57 (0x39 char '9') => 11 (0xB)
INVALID_VALUE, // input 58 (0x3A)
INVALID_VALUE, // input 59 (0x3B)
INVALID_VALUE, // input 60 (0x3C)
INVALID_VALUE, // input 61 (0x3D)
INVALID_VALUE, // input 62 (0x3E)
INVALID_VALUE, // input 63 (0x3F)
INVALID_VALUE, // input 64 (0x40)
12, // input 65 (0x41 char 'A') => 12 (0xC)
13, // input 66 (0x42 char 'B') => 13 (0xD)
14, // input 67 (0x43 char 'C') => 14 (0xE)
15, // input 68 (0x44 char 'D') => 15 (0xF)
16, // input 69 (0x45 char 'E') => 16 (0x10)
17, // input 70 (0x46 char 'F') => 17 (0x11)
18, // input 71 (0x47 char 'G') => 18 (0x12)
19, // input 72 (0x48 char 'H') => 19 (0x13)
20, // input 73 (0x49 char 'I') => 20 (0x14)
21, // input 74 (0x4A char 'J') => 21 (0x15)
22, // input 75 (0x4B char 'K') => 22 (0x16)
23, // input 76 (0x4C char 'L') => 23 (0x17)
24, // input 77 (0x4D char 'M') => 24 (0x18)
25, // input 78 (0x4E char 'N') => 25 (0x19)
26, // input 79 (0x4F char 'O') => 26 (0x1A)
27, // input 80 (0x50 char 'P') => 27 (0x1B)
28, // input 81 (0x51 char 'Q') => 28 (0x1C)
29, // input 82 (0x52 char 'R') => 29 (0x1D)
30, // input 83 (0x53 char 'S') => 30 (0x1E)
31, // input 84 (0x54 char 'T') => 31 (0x1F)
32, // input 85 (0x55 char 'U') => 32 (0x20)
33, // input 86 (0x56 char 'V') => 33 (0x21)
34, // input 87 (0x57 char 'W') => 34 (0x22)
35, // input 88 (0x58 char 'X') => 35 (0x23)
36, // input 89 (0x59 char 'Y') => 36 (0x24)
37, // input 90 (0x5A char 'Z') => 37 (0x25)
INVALID_VALUE, // input 91 (0x5B)
INVALID_VALUE, // input 92 (0x5C)
INVALID_VALUE, // input 93 (0x5D)
INVALID_VALUE, // input 94 (0x5E)
INVALID_VALUE, // input 95 (0x5F)
INVALID_VALUE, // input 96 (0x60)
38, // input 97 (0x61 char 'a') => 38 (0x26)
39, // input 98 (0x62 char 'b') => 39 (0x27)
40, // input 99 (0x63 char 'c') => 40 (0x28)
41, // input 100 (0x64 char 'd') => 41 (0x29)
42, // input 101 (0x65 char 'e') => 42 (0x2A)
43, // input 102 (0x66 char 'f') => 43 (0x2B)
44, // input 103 (0x67 char 'g') => 44 (0x2C)
45, // input 104 (0x68 char 'h') => 45 (0x2D)
46, // input 105 (0x69 char 'i') => 46 (0x2E)
47, // input 106 (0x6A char 'j') => 47 (0x2F)
48, // input 107 (0x6B char 'k') => 48 (0x30)
49, // input 108 (0x6C char 'l') => 49 (0x31)
50, // input 109 (0x6D char 'm') => 50 (0x32)
51, // input 110 (0x6E char 'n') => 51 (0x33)
52, // input 111 (0x6F char 'o') => 52 (0x34)
53, // input 112 (0x70 char 'p') => 53 (0x35)
54, // input 113 (0x71 char 'q') => 54 (0x36)
55, // input 114 (0x72 char 'r') => 55 (0x37)
56, // input 115 (0x73 char 's') => 56 (0x38)
57, // input 116 (0x74 char 't') => 57 (0x39)
58, // input 117 (0x75 char 'u') => 58 (0x3A)
59, // input 118 (0x76 char 'v') => 59 (0x3B)
60, // input 119 (0x77 char 'w') => 60 (0x3C)
61, // input 120 (0x78 char 'x') => 61 (0x3D)
62, // input 121 (0x79 char 'y') => 62 (0x3E)
63, // input 122 (0x7A char 'z') => 63 (0x3F)
INVALID_VALUE, // input 123 (0x7B)
INVALID_VALUE, // input 124 (0x7C)
INVALID_VALUE, // input 125 (0x7D)
INVALID_VALUE, // input 126 (0x7E)
INVALID_VALUE, // input 127 (0x7F)
INVALID_VALUE, // input 128 (0x80)
INVALID_VALUE, // input 129 (0x81)
INVALID_VALUE, // input 130 (0x82)
INVALID_VALUE, // input 131 (0x83)
INVALID_VALUE, // input 132 (0x84)
INVALID_VALUE, // input 133 (0x85)
INVALID_VALUE, // input 134 (0x86)
INVALID_VALUE, // input 135 (0x87)
INVALID_VALUE, // input 136 (0x88)
INVALID_VALUE, // input 137 (0x89)
INVALID_VALUE, // input 138 (0x8A)
INVALID_VALUE, // input 139 (0x8B)
INVALID_VALUE, // input 140 (0x8C)
INVALID_VALUE, // input 141 (0x8D)
INVALID_VALUE, // input 142 (0x8E)
INVALID_VALUE, // input 143 (0x8F)
INVALID_VALUE, // input 144 (0x90)
INVALID_VALUE, // input 145 (0x91)
INVALID_VALUE, // input 146 (0x92)
INVALID_VALUE, // input 147 (0x93)
INVALID_VALUE, // input 148 (0x94)
INVALID_VALUE, // input 149 (0x95)
INVALID_VALUE, // input 150 (0x96)
INVALID_VALUE, // input 151 (0x97)
INVALID_VALUE, // input 152 (0x98)
INVALID_VALUE, // input 153 (0x99)
INVALID_VALUE, // input 154 (0x9A)
INVALID_VALUE, // input 155 (0x9B)
INVALID_VALUE, // input 156 (0x9C)
INVALID_VALUE, // input 157 (0x9D)
INVALID_VALUE, // input 158 (0x9E)
INVALID_VALUE, // input 159 (0x9F)
INVALID_VALUE, // input 160 (0xA0)
INVALID_VALUE, // input 161 (0xA1)
INVALID_VALUE, // input 162 (0xA2)
INVALID_VALUE, // input 163 (0xA3)
INVALID_VALUE, // input 164 (0xA4)
INVALID_VALUE, // input 165 (0xA5)
INVALID_VALUE, // input 166 (0xA6)
INVALID_VALUE, // input 167 (0xA7)
INVALID_VALUE, // input 168 (0xA8)
INVALID_VALUE, // input 169 (0xA9)
INVALID_VALUE, // input 170 (0xAA)
INVALID_VALUE, // input 171 (0xAB)
INVALID_VALUE, // input 172 (0xAC)
INVALID_VALUE, // input 173 (0xAD)
INVALID_VALUE, // input 174 (0xAE)
INVALID_VALUE, // input 175 (0xAF)
INVALID_VALUE, // input 176 (0xB0)
INVALID_VALUE, // input 177 (0xB1)
INVALID_VALUE, // input 178 (0xB2)
INVALID_VALUE, // input 179 (0xB3)
INVALID_VALUE, // input 180 (0xB4)
INVALID_VALUE, // input 181 (0xB5)
INVALID_VALUE, // input 182 (0xB6)
INVALID_VALUE, // input 183 (0xB7)
INVALID_VALUE, // input 184 (0xB8)
INVALID_VALUE, // input 185 (0xB9)
INVALID_VALUE, // input 186 (0xBA)
INVALID_VALUE, // input 187 (0xBB)
INVALID_VALUE, // input 188 (0xBC)
INVALID_VALUE, // input 189 (0xBD)
INVALID_VALUE, // input 190 (0xBE)
INVALID_VALUE, // input 191 (0xBF)
INVALID_VALUE, // input 192 (0xC0)
INVALID_VALUE, // input 193 (0xC1)
INVALID_VALUE, // input 194 (0xC2)
INVALID_VALUE, // input 195 (0xC3)
INVALID_VALUE, // input 196 (0xC4)
INVALID_VALUE, // input 197 (0xC5)
INVALID_VALUE, // input 198 (0xC6)
INVALID_VALUE, // input 199 (0xC7)
INVALID_VALUE, // input 200 (0xC8)
INVALID_VALUE, // input 201 (0xC9)
INVALID_VALUE, // input 202 (0xCA)
INVALID_VALUE, // input 203 (0xCB)
INVALID_VALUE, // input 204 (0xCC)
INVALID_VALUE, // input 205 (0xCD)
INVALID_VALUE, // input 206 (0xCE)
INVALID_VALUE, // input 207 (0xCF)
INVALID_VALUE, // input 208 (0xD0)
INVALID_VALUE, // input 209 (0xD1)
INVALID_VALUE, // input 210 (0xD2)
INVALID_VALUE, // input 211 (0xD3)
INVALID_VALUE, // input 212 (0xD4)
INVALID_VALUE, // input 213 (0xD5)
INVALID_VALUE, // input 214 (0xD6)
INVALID_VALUE, // input 215 (0xD7)
INVALID_VALUE, // input 216 (0xD8)
INVALID_VALUE, // input 217 (0xD9)
INVALID_VALUE, // input 218 (0xDA)
INVALID_VALUE, // input 219 (0xDB)
INVALID_VALUE, // input 220 (0xDC)
INVALID_VALUE, // input 221 (0xDD)
INVALID_VALUE, // input 222 (0xDE)
INVALID_VALUE, // input 223 (0xDF)
INVALID_VALUE, // input 224 (0xE0)
INVALID_VALUE, // input 225 (0xE1)
INVALID_VALUE, // input 226 (0xE2)
INVALID_VALUE, // input 227 (0xE3)
INVALID_VALUE, // input 228 (0xE4)
INVALID_VALUE, // input 229 (0xE5)
INVALID_VALUE, // input 230 (0xE6)
INVALID_VALUE, // input 231 (0xE7)
INVALID_VALUE, // input 232 (0xE8)
INVALID_VALUE, // input 233 (0xE9)
INVALID_VALUE, // input 234 (0xEA)
INVALID_VALUE, // input 235 (0xEB)
INVALID_VALUE, // input 236 (0xEC)
INVALID_VALUE, // input 237 (0xED)
INVALID_VALUE, // input 238 (0xEE)
INVALID_VALUE, // input 239 (0xEF)
INVALID_VALUE, // input 240 (0xF0)
INVALID_VALUE, // input 241 (0xF1)
INVALID_VALUE, // input 242 (0xF2)
INVALID_VALUE, // input 243 (0xF3)
INVALID_VALUE, // input 244 (0xF4)
INVALID_VALUE, // input 245 (0xF5)
INVALID_VALUE, // input 246 (0xF6)
INVALID_VALUE, // input 247 (0xF7)
INVALID_VALUE, // input 248 (0xF8)
INVALID_VALUE, // input 249 (0xF9)
INVALID_VALUE, // input 250 (0xFA)
INVALID_VALUE, // input 251 (0xFB)
INVALID_VALUE, // input 252 (0xFC)
INVALID_VALUE, // input 253 (0xFD)
INVALID_VALUE, // input 254 (0xFE)
INVALID_VALUE, // input 255 (0xFF)
];

114
third_party/rust/base64-0.9.3/src/tests.rs поставляемый Normal file
Просмотреть файл

@ -0,0 +1,114 @@
extern crate rand;
use encode::encoded_size;
use line_wrap::line_wrap_parameters;
use *;
use std::str;
use self::rand::distributions::{IndependentSample, Range};
use self::rand::Rng;
#[test]
fn roundtrip_random_config_short() {
// exercise the slower encode/decode routines that operate on shorter buffers more vigorously
roundtrip_random_config(Range::new(0, 50), Range::new(0, 50), 10_000);
}
#[test]
fn roundtrip_random_config_long() {
roundtrip_random_config(Range::new(0, 1000), Range::new(0, 1000), 10_000);
}
pub fn assert_encode_sanity(encoded: &str, config: &Config, input_len: usize) {
let input_rem = input_len % 3;
let (expected_padding_len, last_encoded_chunk_len) = if input_rem > 0 {
if config.pad {
(3 - input_rem, 4)
} else {
(0, input_rem + 1)
}
} else {
(0, 0)
};
let b64_only_len = (input_len / 3) * 4 + last_encoded_chunk_len;
let expected_line_ending_len = match config.line_wrap {
LineWrap::NoWrap => 0,
LineWrap::Wrap(line_len, line_ending) => {
line_wrap_parameters(b64_only_len, line_len, line_ending).total_line_endings_len
}
};
let expected_encoded_len = encoded_size(input_len, &config).unwrap();
assert_eq!(expected_encoded_len, encoded.len());
let line_endings_len = encoded.chars().filter(|&c| c == '\r' || c == '\n').count();
let padding_len = encoded.chars().filter(|&c| c == '=').count();
assert_eq!(expected_padding_len, padding_len);
assert_eq!(expected_line_ending_len, line_endings_len);
let _ = str::from_utf8(encoded.as_bytes()).expect("Base64 should be valid utf8");
}
fn roundtrip_random_config(
input_len_range: Range<usize>,
line_len_range: Range<usize>,
iterations: u32,
) {
let mut input_buf: Vec<u8> = Vec::new();
let mut encoded_buf = String::new();
let mut rng = rand::weak_rng();
for _ in 0..iterations {
input_buf.clear();
encoded_buf.clear();
let input_len = input_len_range.ind_sample(&mut rng);
let config = random_config(&mut rng, &line_len_range);
for _ in 0..input_len {
input_buf.push(rng.gen());
}
encode_config_buf(&input_buf, config, &mut encoded_buf);
assert_encode_sanity(&encoded_buf, &config, input_len);
assert_eq!(input_buf, decode_config(&encoded_buf, config).unwrap());
}
}
pub fn random_config<R: Rng>(rng: &mut R, line_len_range: &Range<usize>) -> Config {
let line_wrap = if rng.gen() {
LineWrap::NoWrap
} else {
let line_len = line_len_range.ind_sample(rng);
let line_ending = if rng.gen() {
LineEnding::LF
} else {
LineEnding::CRLF
};
LineWrap::Wrap(line_len, line_ending)
};
const CHARSETS: &[CharacterSet] = &[
CharacterSet::UrlSafe,
CharacterSet::Standard,
CharacterSet::Crypt,
];
let charset = *rng.choose(CHARSETS).unwrap();
let strip_whitespace = match line_wrap {
LineWrap::NoWrap => false,
_ => true,
};
Config::new(charset, rng.gen(), strip_whitespace, line_wrap)
}

351
third_party/rust/base64-0.9.3/tests/decode.rs поставляемый Normal file
Просмотреть файл

@ -0,0 +1,351 @@
extern crate base64;
use base64::*;
mod helpers;
use helpers::*;
fn compare_decode_mime(expected: &str, target: &str) {
assert_eq!(
expected,
String::from_utf8(decode_config(target, MIME).unwrap()).unwrap()
);
}
#[test]
fn decode_rfc4648_0() {
compare_decode("", "");
}
#[test]
fn decode_rfc4648_1() {
compare_decode("f", "Zg==");
}
#[test]
fn decode_rfc4648_1_just_a_bit_of_padding() {
// allows less padding than required
compare_decode("f", "Zg=");
}
#[test]
fn decode_rfc4648_1_no_padding() {
compare_decode("f", "Zg");
}
#[test]
fn decode_rfc4648_2() {
compare_decode("fo", "Zm8=");
}
#[test]
fn decode_rfc4648_2_no_padding() {
compare_decode("fo", "Zm8");
}
#[test]
fn decode_rfc4648_3() {
compare_decode("foo", "Zm9v");
}
#[test]
fn decode_rfc4648_4() {
compare_decode("foob", "Zm9vYg==");
}
#[test]
fn decode_rfc4648_4_no_padding() {
compare_decode("foob", "Zm9vYg");
}
#[test]
fn decode_rfc4648_5() {
compare_decode("fooba", "Zm9vYmE=");
}
#[test]
fn decode_rfc4648_5_no_padding() {
compare_decode("fooba", "Zm9vYmE");
}
#[test]
fn decode_rfc4648_6() {
compare_decode("foobar", "Zm9vYmFy");
}
#[test]
fn decode_mime_allow_space() {
assert!(decode_config("YWx pY2U=", MIME).is_ok());
}
#[test]
fn decode_mime_allow_tab() {
assert!(decode_config("YWx\tpY2U=", MIME).is_ok());
}
#[test]
fn decode_mime_allow_ff() {
assert!(decode_config("YWx\x0cpY2U=", MIME).is_ok());
}
#[test]
fn decode_mime_allow_vtab() {
assert!(decode_config("YWx\x0bpY2U=", MIME).is_ok());
}
#[test]
fn decode_mime_allow_nl() {
assert!(decode_config("YWx\npY2U=", MIME).is_ok());
}
#[test]
fn decode_mime_allow_crnl() {
assert!(decode_config("YWx\r\npY2U=", MIME).is_ok());
}
#[test]
fn decode_mime_reject_null() {
assert_eq!(
DecodeError::InvalidByte(3, 0x0),
decode_config("YWx\0pY2U==", MIME).unwrap_err()
);
}
#[test]
fn decode_mime_absurd_whitespace() {
compare_decode_mime(
"how could you let this happen",
"\n aG93I\n\nG\x0bNvd\r\nWxkI HlvdSB \tsZXQgdGh\rpcyBo\x0cYXBwZW4 = ",
);
}
#[test]
fn decode_single_pad_byte_after_2_chars_in_trailing_quad_ok() {
for num_quads in 0..25 {
let mut s: String = std::iter::repeat("ABCD").take(num_quads).collect();
s.push_str("Zg=");
let input_len = num_quads * 3 + 1;
// Since there are 3 bytes in the trailing quad, want to be sure this allows for the fact
// that it could be bad padding rather than assuming that it will decode to 2 bytes and
// therefore allow 1 extra round of fast decode logic (stage 1 / 2).
let mut decoded = Vec::new();
decoded.resize(input_len, 0);
assert_eq!(
input_len,
decode_config_slice(&s, STANDARD, &mut decoded).unwrap()
);
}
}
//this is a MAY in the rfc: https://tools.ietf.org/html/rfc4648#section-3.3
#[test]
fn decode_1_pad_byte_in_fast_loop_then_extra_padding_chunk_error() {
for num_quads in 0..25 {
let mut s: String = std::iter::repeat("ABCD").take(num_quads).collect();
s.push_str("YWxpY2U=====");
// since the first 8 bytes are handled in stage 1 or 2, the padding is detected as a
// generic invalid byte, not specifcally a padding issue.
// Could argue that the *next* padding byte (in the next quad) is technically the first
// erroneous one, but reporting that accurately is more complex and probably nobody cares
assert_eq!(
DecodeError::InvalidByte(num_quads * 4 + 7, b'='),
decode(&s).unwrap_err()
);
}
}
#[test]
fn decode_2_pad_bytes_in_leftovers_then_extra_padding_chunk_error() {
for num_quads in 0..25 {
let mut s: String = std::iter::repeat("ABCD").take(num_quads).collect();
s.push_str("YWxpY2UABB====");
// 6 bytes (4 padding) after last 8-byte chunk, so it's decoded by stage 4.
// First padding byte is invalid.
assert_eq!(
DecodeError::InvalidByte(num_quads * 4 + 10, b'='),
decode(&s).unwrap_err()
);
}
}
#[test]
fn decode_valid_bytes_after_padding_in_leftovers_error() {
for num_quads in 0..25 {
let mut s: String = std::iter::repeat("ABCD").take(num_quads).collect();
s.push_str("YWxpY2UABB=B");
// 4 bytes after last 8-byte chunk, so it's decoded by stage 4.
// First (and only) padding byte is invalid.
assert_eq!(
DecodeError::InvalidByte(num_quads * 4 + 10, b'='),
decode(&s).unwrap_err()
);
}
}
#[test]
fn decode_absurd_pad_error() {
for num_quads in 0..25 {
let mut s: String = std::iter::repeat("ABCD").take(num_quads).collect();
s.push_str("==Y=Wx===pY=2U=====");
// Plenty of remaining bytes, so handled by stage 1 or 2.
// first padding byte
assert_eq!(
DecodeError::InvalidByte(num_quads * 4, b'='),
decode(&s).unwrap_err()
);
}
}
#[test]
fn decode_extra_padding_after_1_pad_bytes_in_trailing_quad_returns_error() {
for num_quads in 0..25 {
let mut s: String = std::iter::repeat("ABCD").take(num_quads).collect();
s.push_str("EEE===");
// handled by stage 1, 2, or 4 depending on length
// first padding byte -- which would be legal if it was the only padding
assert_eq!(
DecodeError::InvalidByte(num_quads * 4 + 3, b'='),
decode(&s).unwrap_err()
);
}
}
#[test]
fn decode_extra_padding_after_2_pad_bytes_in_trailing_quad_2_returns_error() {
for num_quads in 0..25 {
let mut s: String = std::iter::repeat("ABCD").take(num_quads).collect();
s.push_str("EE====");
// handled by stage 1, 2, or 4 depending on length
// first padding byte -- which would be legal if it was by itself
assert_eq!(
DecodeError::InvalidByte(num_quads * 4 + 2, b'='),
decode(&s).unwrap_err()
);
}
}
#[test]
fn decode_start_quad_with_padding_returns_error() {
for num_quads in 0..25 {
// add enough padding to ensure that we'll hit all 4 stages at the different lengths
for pad_bytes in 1..32 {
let mut s: String = std::iter::repeat("ABCD").take(num_quads).collect();
let padding: String = std::iter::repeat("=").take(pad_bytes).collect();
s.push_str(&padding);
if pad_bytes % 4 == 1 {
// detected in early length check
assert_eq!(DecodeError::InvalidLength, decode(&s).unwrap_err());
} else {
// padding lengths 2 - 8 are handled by stage 4
// padding length >= 8 will hit at least one chunk at stages 1, 2, 3 at different
// prefix lengths
assert_eq!(
DecodeError::InvalidByte(num_quads * 4, b'='),
decode(&s).unwrap_err()
);
}
}
}
}
#[test]
fn decode_padding_followed_by_non_padding_returns_error() {
for num_quads in 0..25 {
for pad_bytes in 0..31 {
let mut s: String = std::iter::repeat("ABCD").take(num_quads).collect();
let padding: String = std::iter::repeat("=").take(pad_bytes).collect();
s.push_str(&padding);
s.push_str("E");
if pad_bytes % 4 == 0 {
assert_eq!(DecodeError::InvalidLength, decode(&s).unwrap_err());
} else {
// pad len 1 - 8 will be handled by stage 4
// pad len 9 (suffix len 10) will have 8 bytes of padding handled by stage 3
// first padding byte
assert_eq!(
DecodeError::InvalidByte(num_quads * 4, b'='),
decode(&s).unwrap_err()
);
}
}
}
}
#[test]
fn decode_one_char_in_quad_with_padding_error() {
for num_quads in 0..25 {
let mut s: String = std::iter::repeat("ABCD").take(num_quads).collect();
s.push_str("E=");
assert_eq!(
DecodeError::InvalidByte(num_quads * 4 + 1, b'='),
decode(&s).unwrap_err()
);
// more padding doesn't change the error
s.push_str("=");
assert_eq!(
DecodeError::InvalidByte(num_quads * 4 + 1, b'='),
decode(&s).unwrap_err()
);
s.push_str("=");
assert_eq!(
DecodeError::InvalidByte(num_quads * 4 + 1, b'='),
decode(&s).unwrap_err()
);
}
}
#[test]
fn decode_one_char_in_quad_without_padding_error() {
for num_quads in 0..25 {
let mut s: String = std::iter::repeat("ABCD").take(num_quads).collect();
s.push('E');
assert_eq!(DecodeError::InvalidLength, decode(&s).unwrap_err());
}
}
#[test]
fn decode_reject_invalid_bytes_with_correct_error() {
for length in 1..100 {
for index in 0_usize..length {
for invalid_byte in " \t\n\r\x0C\x0B\x00%*.".bytes() {
let prefix: String = std::iter::repeat("A").take(index).collect();
let suffix: String = std::iter::repeat("B").take(length - index - 1).collect();
let input = prefix + &String::from_utf8(vec![invalid_byte]).unwrap() + &suffix;
assert_eq!(
length,
input.len(),
"length {} error position {}",
length,
index
);
if length % 4 == 1 {
assert_eq!(DecodeError::InvalidLength, decode(&input).unwrap_err());
} else {
assert_eq!(
DecodeError::InvalidByte(index, invalid_byte),
decode(&input).unwrap_err()
);
}
}
}
}
}

149
third_party/rust/base64-0.9.3/tests/encode.rs поставляемый Normal file
Просмотреть файл

@ -0,0 +1,149 @@
extern crate base64;
use base64::*;
fn compare_encode(expected: &str, target: &[u8]) {
assert_eq!(expected, encode(target));
}
#[test]
fn encode_rfc4648_0() {
compare_encode("", b"");
}
#[test]
fn encode_rfc4648_1() {
compare_encode("Zg==", b"f");
}
#[test]
fn encode_rfc4648_2() {
compare_encode("Zm8=", b"fo");
}
#[test]
fn encode_rfc4648_3() {
compare_encode("Zm9v", b"foo");
}
#[test]
fn encode_rfc4648_4() {
compare_encode("Zm9vYg==", b"foob");
}
#[test]
fn encode_rfc4648_5() {
compare_encode("Zm9vYmE=", b"fooba");
}
#[test]
fn encode_rfc4648_6() {
compare_encode("Zm9vYmFy", b"foobar");
}
#[test]
fn encode_all_ascii() {
let mut ascii = Vec::<u8>::with_capacity(128);
for i in 0..128 {
ascii.push(i);
}
compare_encode(
"AAECAwQFBgcICQoLDA0ODxAREhMUFRYXGBkaGxwdHh8gISIjJCUmJygpKissLS4vMDEyMzQ1Njc4OTo7P\
D0+P0BBQkNERUZHSElKS0xNTk9QUVJTVFVWV1hZWltcXV5fYGFiY2RlZmdoaWprbG1ub3BxcnN0dXZ3eHl6e3x9fn8\
=",
&ascii,
);
}
#[test]
fn encode_all_bytes() {
let mut bytes = Vec::<u8>::with_capacity(256);
for i in 0..255 {
bytes.push(i);
}
bytes.push(255); //bug with "overflowing" ranges?
compare_encode(
"AAECAwQFBgcICQoLDA0ODxAREhMUFRYXGBkaGxwdHh8gISIjJCUmJygpKissLS4vMDEyMzQ1Njc4OTo7P\
D0+P0BBQkNERUZHSElKS0xNTk9QUVJTVFVWV1hZWltcXV5fYGFiY2RlZmdoaWprbG1ub3BxcnN0dXZ3eHl6e3x9fn\
+AgYKDhIWGh4iJiouMjY6PkJGSk5SVlpeYmZqbnJ2en6ChoqOkpaanqKmqq6ytrq+wsbKztLW2t7i5uru8vb6\
/wMHCw8TFxsfIycrLzM3Oz9DR0tPU1dbX2Nna29zd3t/g4eLj5OXm5+jp6uvs7e7v8PHy8/T19vf4+fr7/P3+/w==",
&bytes,
);
}
#[test]
fn encode_all_bytes_url() {
let mut bytes = Vec::<u8>::with_capacity(256);
for i in 0..255 {
bytes.push(i);
}
bytes.push(255); //bug with "overflowing" ranges?
assert_eq!(
"AAECAwQFBgcICQoLDA0ODxAREhMUFRYXGBkaGxwdHh8gISIjJCUmJygpKissLS4vMDEyMzQ1Njc4OTo7PD0\
-P0BBQkNERUZHSElKS0xNTk9QUVJTVFVWV1hZWltcXV5fYGFiY2RlZmdoaWprbG1ub3BxcnN0dXZ3eHl6e3x9fn\
-AgYKDhIWGh4iJiouMjY6PkJGSk5SVlpeYmZqbnJ2en6ChoqOkpaanqKmqq6ytrq\
-wsbKztLW2t7i5uru8vb6_wMHCw8TFxsfIycrLzM3Oz9DR0tPU1dbX2Nna29zd3t_g4eLj5OXm5-jp6uvs7e7v8PHy\
8_T19vf4-fr7_P3-_w==",
encode_config(&bytes, URL_SAFE)
);
}
#[test]
fn encode_line_ending_lf_partial_last_line() {
let config = Config::new(
CharacterSet::Standard,
true,
false,
LineWrap::Wrap(3, LineEnding::LF),
);
assert_eq!("Zm9\nvYm\nFy", encode_config(b"foobar", config));
}
#[test]
fn encode_line_ending_crlf_partial_last_line() {
let config = Config::new(
CharacterSet::Standard,
true,
false,
LineWrap::Wrap(3, LineEnding::CRLF),
);
assert_eq!("Zm9\r\nvYm\r\nFy", encode_config(b"foobar", config));
}
#[test]
fn encode_line_ending_lf_full_last_line() {
let config = Config::new(
CharacterSet::Standard,
true,
false,
LineWrap::Wrap(4, LineEnding::LF),
);
assert_eq!("Zm9v\nYmFy", encode_config(b"foobar", config));
}
#[test]
fn encode_line_ending_crlf_full_last_line() {
let config = Config::new(
CharacterSet::Standard,
true,
false,
LineWrap::Wrap(4, LineEnding::CRLF),
);
assert_eq!("Zm9v\r\nYmFy", encode_config(b"foobar", config));
}
#[test]
fn encode_url_safe_without_padding() {
let encoded = encode_config(b"alice", URL_SAFE_NO_PAD);
assert_eq!(&encoded, "YWxpY2U");
assert_eq!(
String::from_utf8(decode(&encoded).unwrap()).unwrap(),
"alice"
);
}

14
third_party/rust/base64-0.9.3/tests/helpers.rs поставляемый Normal file
Просмотреть файл

@ -0,0 +1,14 @@
extern crate base64;
use base64::*;
pub fn compare_decode(expected: &str, target: &str) {
assert_eq!(
expected,
String::from_utf8(decode(target).unwrap()).unwrap()
);
assert_eq!(
expected,
String::from_utf8(decode(target.as_bytes()).unwrap()).unwrap()
);
}

191
third_party/rust/base64-0.9.3/tests/tests.rs поставляемый Normal file
Просмотреть файл

@ -0,0 +1,191 @@
extern crate base64;
extern crate rand;
use rand::Rng;
use base64::*;
mod helpers;
use helpers::*;
// generate random contents of the specified length and test encode/decode roundtrip
fn roundtrip_random(
byte_buf: &mut Vec<u8>,
str_buf: &mut String,
config: Config,
byte_len: usize,
approx_values_per_byte: u8,
max_rounds: u64,
) {
// let the short ones be short but don't let it get too crazy large
let num_rounds = calculate_number_of_rounds(byte_len, approx_values_per_byte, max_rounds);
let mut r = rand::weak_rng();
let mut decode_buf = Vec::new();
for _ in 0..num_rounds {
byte_buf.clear();
str_buf.clear();
decode_buf.clear();
while byte_buf.len() < byte_len {
byte_buf.push(r.gen::<u8>());
}
encode_config_buf(&byte_buf, config, str_buf);
decode_config_buf(&str_buf, config, &mut decode_buf).unwrap();
assert_eq!(byte_buf, &decode_buf);
}
}
fn calculate_number_of_rounds(byte_len: usize, approx_values_per_byte: u8, max: u64) -> u64 {
// don't overflow
let mut prod = approx_values_per_byte as u64;
for _ in 0..byte_len {
if prod > max {
return max;
}
prod = prod.saturating_mul(prod);
}
prod
}
fn no_pad_config() -> Config {
Config::new(CharacterSet::Standard, false, false, LineWrap::NoWrap)
}
#[test]
fn roundtrip_random_short_standard() {
let mut byte_buf: Vec<u8> = Vec::new();
let mut str_buf = String::new();
for input_len in 0..40 {
roundtrip_random(&mut byte_buf, &mut str_buf, STANDARD, input_len, 4, 10000);
}
}
#[test]
fn roundtrip_random_with_fast_loop_standard() {
let mut byte_buf: Vec<u8> = Vec::new();
let mut str_buf = String::new();
for input_len in 40..100 {
roundtrip_random(&mut byte_buf, &mut str_buf, STANDARD, input_len, 4, 1000);
}
}
#[test]
fn roundtrip_random_short_no_padding() {
let mut byte_buf: Vec<u8> = Vec::new();
let mut str_buf = String::new();
for input_len in 0..40 {
roundtrip_random(
&mut byte_buf,
&mut str_buf,
no_pad_config(),
input_len,
4,
10000,
);
}
}
#[test]
fn roundtrip_random_no_padding() {
let mut byte_buf: Vec<u8> = Vec::new();
let mut str_buf = String::new();
for input_len in 40..100 {
roundtrip_random(
&mut byte_buf,
&mut str_buf,
no_pad_config(),
input_len,
4,
1000,
);
}
}
#[test]
fn roundtrip_decode_trailing_10_bytes() {
// This is a special case because we decode 8 byte blocks of input at a time as much as we can,
// ideally unrolled to 32 bytes at a time, in stages 1 and 2. Since we also write a u64's worth
// of bytes (8) to the output, we always write 2 garbage bytes that then will be overwritten by
// the NEXT block. However, if the next block only contains 2 bytes, it will decode to 1 byte,
// and therefore be too short to cover up the trailing 2 garbage bytes. Thus, we have stage 3
// to handle that case.
for num_quads in 0..25 {
let mut s: String = std::iter::repeat("ABCD").take(num_quads).collect();
s.push_str("EFGHIJKLZg");
let decoded = decode(&s).unwrap();
assert_eq!(num_quads * 3 + 7, decoded.len());
assert_eq!(s, encode_config(&decoded, STANDARD_NO_PAD));
}
}
#[test]
fn display_wrapper_matches_normal_encode() {
let mut bytes = Vec::<u8>::with_capacity(256);
for i in 0..255 {
bytes.push(i);
}
bytes.push(255);
assert_eq!(
encode(&bytes),
format!("{}", base64::display::Base64Display::standard(&bytes))
);
}
#[test]
fn because_we_can() {
compare_decode("alice", "YWxpY2U=");
compare_decode("alice", &encode(b"alice"));
compare_decode("alice", &encode(&decode(&encode(b"alice")).unwrap()));
}
#[test]
fn encode_config_slice_can_use_inline_buffer() {
let mut buf: [u8; 22] = [0; 22];
let mut larger_buf: [u8; 24] = [0; 24];
let mut input: [u8; 16] = [0; 16];
let mut rng = rand::weak_rng();
for elt in &mut input {
*elt = rng.gen();
}
assert_eq!(22, encode_config_slice(&input, STANDARD_NO_PAD, &mut buf));
let decoded = decode_config(&buf, STANDARD_NO_PAD).unwrap();
assert_eq!(decoded, input);
// let's try it again with padding
assert_eq!(24, encode_config_slice(&input, STANDARD, &mut larger_buf));
let decoded = decode_config(&buf, STANDARD).unwrap();
assert_eq!(decoded, input);
}
#[test]
#[should_panic(expected = "index 24 out of range for slice of length 22")]
fn encode_config_slice_panics_when_buffer_too_small() {
let mut buf: [u8; 22] = [0; 22];
let mut input: [u8; 16] = [0; 16];
let mut rng = rand::weak_rng();
for elt in &mut input {
*elt = rng.gen();
}
encode_config_slice(&input, STANDARD, &mut buf);
}

1
third_party/rust/plist/.cargo-checksum.json поставляемый Normal file
Просмотреть файл

@ -0,0 +1 @@
{"files":{"Cargo.toml":"27365823f4b2c0004e347f184809c53acbc35e40a9308b9769cbd536daa05c80","LICENCE":"5b0ae40d1a35f7ae6591a28e44771240e6a88cb03a66c9189a45b9681639b466","README.md":"dee6eca91153cd02d93ac632e0fb49ec483b53d0c1547c33d746deae5dbce0f5","src/date.rs":"14640d162b3b03e7e1a176e245cb9658ef78e2ac538ed20ac620410d1ef4fb65","src/de.rs":"19c315a5f77f943d41f643f9714b726dfbbe0e8a9a414f1b3bd608c73f7d7df6","src/lib.rs":"231493cc5bc0ad6e2c5c83a2a8bcf52d2213e71c0c81f5f130d496e7bf8609cd","src/ser.rs":"b363bb65aca97ee952c73660864f510d6c3c2f254f097db87b591e2066929ae5","src/stream/binary_reader.rs":"f4b6eac1ed6539959c8844db4d01d220e0f7442170d33f8779b2e79883fddc95","src/stream/mod.rs":"0464cbc0c0ae1004e7f501cabd478a6a5b67bc5aac118a0b698102c75d5fd195","src/stream/xml_reader.rs":"cfd511a1de09d1ad348c6f010ee82ccefe0c2b2dacfc205967bd7c7ddd743055","src/stream/xml_writer.rs":"18ff60f7ed9d75f9b2085c1a00f44c40e3733128d33038195a1ba2a2f7c0aadf","src/value.rs":"da39ce4da6380d46ba925d500eac5f8ffbd25853648de298a09c6ecf865ac889","tests/data/binary.plist":"0317d32d466ddb926ba5ab037fd6cf5165d6607c3e15f945378938e63d8d4543","tests/data/binary_circular_array.plist":"825aed6ce83a8abdbe15d4686ce35157f4eb861bd792f0ce790115fb4ec48805","tests/data/binary_zero_offset_size.plist":"020953c8211989d01b5edf42e025560f46ece3b604ceda03708819bd2587a1a1","tests/data/utf16_bplist.plist":"c0b7d33001021df98d8631c604c8471e74e4b4599bac51a8bed149ca82adbcd5","tests/data/xml.plist":"f084b950c8037f798e6946769f48ed6d49e9956fe34c37ed4c4803bab9aa4830","tests/data/xml_error.plist":"3718f7dd2c716a4a6c36d1f7055b78d86c982c812c19964f85a6f62eff1589ea","tests/fuzzer.rs":"5acd1e2a9ea794bbcba98bfb52a90242a5b45b84c0a2a9f2dfd111e3e345ac75","tests/serde_tests/mod.rs":"d994823d65a8808e1e8c93f3cd87df33c5c7c9dc766b14719bb572e65bf5ad02","tests/tests.rs":"872144648147d8e649e68682e196e73bf8a28d2de1847ac595262560cda39a60"},"package":"95bef0807b4fe77618f8d24f0c4ec37a4ad1dad9348c3b27d8b624c824d8cf48"}

42
third_party/rust/plist/Cargo.toml поставляемый Normal file
Просмотреть файл

@ -0,0 +1,42 @@
# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
#
# When uploading crates to the registry Cargo will automatically
# "normalize" Cargo.toml files for maximal compatibility
# with all versions of Cargo and also rewrite `path` dependencies
# to registry (e.g. crates.io) dependencies
#
# If you believe there's an error in this file please file an
# issue against the rust-lang/cargo repository. If you're
# editing this file be aware that the upstream Cargo.toml
# will likely look very different (and much more reasonable)
[package]
name = "plist"
version = "0.4.0"
authors = ["Ed Barnard <eabarnard@gmail.com>"]
description = "A rusty plist parser. Supports Serde serialization."
documentation = "https://docs.rs/plist/0.4.0/plist/"
keywords = ["plist", "parser"]
categories = ["config", "encoding", "parser-implementations"]
license = "MIT"
repository = "https://github.com/ebarnard/rust-plist/"
[dependencies.base64]
version = "0.9.0"
[dependencies.byteorder]
version = "1.1.0"
[dependencies.humantime]
version = "1.1.1"
[dependencies.serde]
version = "1.0.2"
optional = true
[dependencies.xml-rs]
version = "0.8.0"
[dev-dependencies.serde_derive]
version = "1.0.2"
[features]
default = ["serde"]

19
third_party/rust/plist/LICENCE поставляемый Normal file
Просмотреть файл

@ -0,0 +1,19 @@
Copyright (c) 2015 Edward Barnard
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

7
third_party/rust/plist/README.md поставляемый Normal file
Просмотреть файл

@ -0,0 +1,7 @@
# Plist
A rusty plist parser.
[![Build Status](https://travis-ci.org/ebarnard/rust-plist.svg?branch=master)](https://travis-ci.org/ebarnard/rust-plist)
[Documentation](https://docs.rs/plist/)

149
third_party/rust/plist/src/date.rs поставляемый Normal file
Просмотреть файл

@ -0,0 +1,149 @@
use humantime;
use std::fmt;
use std::hash::{Hash, Hasher};
use std::result::Result as StdResult;
use std::time::{Duration, SystemTime, UNIX_EPOCH};
/// A UTC timestamp. Used for serialization to and from the plist date type.
#[derive(Clone, Copy, PartialEq)]
pub struct Date {
inner: SystemTime,
}
impl Date {
pub(crate) fn from_rfc3339(date: &str) -> Result<Self, ()> {
Ok(Date {
inner: humantime::parse_rfc3339(date).map_err(|_| ())?,
})
}
pub(crate) fn to_rfc3339(&self) -> String {
format!("{}", humantime::format_rfc3339(self.inner))
}
pub(crate) fn from_seconds_since_plist_epoch(timestamp: f64) -> Result<Date, ()> {
// `timestamp` is the number of seconds since the plist epoch of 1/1/2001 00:00:00.
// `PLIST_EPOCH_UNIX_TIMESTAMP` is the unix timestamp of the plist epoch.
const PLIST_EPOCH_UNIX_TIMESTAMP: u64 = 978_307_200;
let plist_epoch = UNIX_EPOCH + Duration::from_secs(PLIST_EPOCH_UNIX_TIMESTAMP);
if !timestamp.is_finite() {
return Err(());
}
let is_negative = timestamp < 0.0;
let timestamp = timestamp.abs();
let seconds = timestamp.floor() as u64;
let subsec_nanos = (timestamp.fract() * 1e9) as u32;
let dur_since_plist_epoch = Duration::new(seconds, subsec_nanos);
let inner = if is_negative {
plist_epoch - dur_since_plist_epoch
} else {
plist_epoch + dur_since_plist_epoch
};
Ok(Date { inner })
}
}
impl fmt::Debug for Date {
fn fmt(&self, f: &mut fmt::Formatter) -> StdResult<(), fmt::Error> {
let rfc3339 = humantime::format_rfc3339(self.inner);
<humantime::Rfc3339Timestamp as fmt::Display>::fmt(&rfc3339, f)
}
}
// TODO: Remove manual impl once minimum Rust version reaches 1.24.0.
impl Hash for Date {
fn hash<H: Hasher>(&self, state: &mut H) {
let elapsed = match self.inner.duration_since(UNIX_EPOCH) {
Ok(elapsed) => {
false.hash(state);
elapsed
}
Err(err) => {
true.hash(state);
err.duration()
}
};
elapsed.hash(state)
}
}
impl From<SystemTime> for Date {
fn from(date: SystemTime) -> Self {
Date { inner: date }
}
}
impl Into<SystemTime> for Date {
fn into(self) -> SystemTime {
self.inner
}
}
#[cfg(feature = "serde")]
pub mod serde_impls {
use serde::de::{Deserialize, Deserializer, Error, Unexpected, Visitor};
use serde::ser::{Serialize, Serializer};
use std::fmt;
use Date;
pub const DATE_NEWTYPE_STRUCT_NAME: &str = "PLIST-DATE";
impl Serialize for Date {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
let date_str = self.to_rfc3339();
serializer.serialize_newtype_struct(DATE_NEWTYPE_STRUCT_NAME, &date_str)
}
}
struct DateNewtypeVisitor;
impl<'de> Visitor<'de> for DateNewtypeVisitor {
type Value = Date;
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter.write_str("a plist date newtype")
}
fn visit_newtype_struct<D>(self, deserializer: D) -> Result<Self::Value, D::Error>
where
D: Deserializer<'de>,
{
deserializer.deserialize_str(DateStrVisitor)
}
}
struct DateStrVisitor;
impl<'de> Visitor<'de> for DateStrVisitor {
type Value = Date;
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter.write_str("a plist date string")
}
fn visit_str<E>(self, v: &str) -> Result<Self::Value, E>
where
E: Error,
{
Date::from_rfc3339(v).map_err(|()| E::invalid_value(Unexpected::Str(v), &self))
}
}
impl<'de> Deserialize<'de> for Date {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
deserializer.deserialize_newtype_struct(DATE_NEWTYPE_STRUCT_NAME, DateNewtypeVisitor)
}
}
}

410
third_party/rust/plist/src/de.rs поставляемый Normal file
Просмотреть файл

@ -0,0 +1,410 @@
use serde::de;
use std::fmt::Display;
use std::fs::File;
use std::io::{BufReader, Read, Seek};
use std::iter::Peekable;
use std::path::Path;
use stream::{self, Event};
use {u64_to_usize, Error};
macro_rules! expect {
($next:expr, $pat:pat) => {
match $next {
Some(Ok(v @ $pat)) => v,
None => return Err(Error::UnexpectedEof),
_ => return Err(event_mismatch_error()),
}
};
($next:expr, $pat:pat => $save:expr) => {
match $next {
Some(Ok($pat)) => $save,
None => return Err(Error::UnexpectedEof),
_ => return Err(event_mismatch_error()),
}
};
}
macro_rules! try_next {
($next:expr) => {
match $next {
Some(Ok(v)) => v,
Some(Err(_)) => return Err(event_mismatch_error()),
None => return Err(Error::UnexpectedEof),
}
};
}
fn event_mismatch_error() -> Error {
Error::InvalidData
}
impl de::Error for Error {
fn custom<T: Display>(msg: T) -> Self {
Error::Serde(msg.to_string())
}
}
/// A structure that deserializes plist event streams into Rust values.
pub struct Deserializer<I>
where
I: IntoIterator<Item = Result<Event, Error>>,
{
events: Peekable<<I as IntoIterator>::IntoIter>,
}
impl<I> Deserializer<I>
where
I: IntoIterator<Item = Result<Event, Error>>,
{
pub fn new(iter: I) -> Deserializer<I> {
Deserializer {
events: iter.into_iter().peekable(),
}
}
}
impl<'de, 'a, I> de::Deserializer<'de> for &'a mut Deserializer<I>
where
I: IntoIterator<Item = Result<Event, Error>>,
{
type Error = Error;
fn deserialize_any<V>(self, visitor: V) -> Result<V::Value, Self::Error>
where
V: de::Visitor<'de>,
{
match try_next!(self.events.next()) {
Event::StartArray(len) => {
let len = len.and_then(u64_to_usize);
let ret = visitor.visit_seq(MapAndSeqAccess::new(self, false, len))?;
expect!(self.events.next(), Event::EndArray);
Ok(ret)
}
Event::EndArray => Err(event_mismatch_error()),
Event::StartDictionary(len) => {
let len = len.and_then(u64_to_usize);
let ret = visitor.visit_map(MapAndSeqAccess::new(self, false, len))?;
expect!(self.events.next(), Event::EndDictionary);
Ok(ret)
}
Event::EndDictionary => Err(event_mismatch_error()),
Event::BooleanValue(v) => visitor.visit_bool(v),
Event::DataValue(v) => visitor.visit_byte_buf(v),
Event::DateValue(v) => visitor.visit_string(v.to_rfc3339()),
Event::IntegerValue(v) if v.is_positive() => visitor.visit_u64(v as u64),
Event::IntegerValue(v) => visitor.visit_i64(v as i64),
Event::RealValue(v) => visitor.visit_f64(v),
Event::StringValue(v) => visitor.visit_string(v),
}
}
forward_to_deserialize_any! {
bool u8 u16 u32 u64 i8 i16 i32 i64 f32 f64 char str string
seq bytes byte_buf map unit_struct
tuple_struct tuple ignored_any identifier
}
fn deserialize_unit<V>(self, visitor: V) -> Result<V::Value, Self::Error>
where
V: de::Visitor<'de>,
{
expect!(self.events.next(), Event::StringValue(_));
visitor.visit_unit()
}
fn deserialize_option<V>(self, visitor: V) -> Result<V::Value, Self::Error>
where
V: de::Visitor<'de>,
{
expect!(self.events.next(), Event::StartDictionary(_));
let ret = match try_next!(self.events.next()) {
Event::StringValue(ref s) if &s[..] == "None" => {
expect!(self.events.next(), Event::StringValue(_));
visitor.visit_none::<Self::Error>()?
}
Event::StringValue(ref s) if &s[..] == "Some" => visitor.visit_some(&mut *self)?,
_ => return Err(event_mismatch_error()),
};
expect!(self.events.next(), Event::EndDictionary);
Ok(ret)
}
fn deserialize_newtype_struct<V>(
self,
_name: &'static str,
visitor: V,
) -> Result<V::Value, Self::Error>
where
V: de::Visitor<'de>,
{
visitor.visit_newtype_struct(self)
}
fn deserialize_struct<V>(
self,
_name: &'static str,
_fields: &'static [&'static str],
visitor: V,
) -> Result<V::Value, Self::Error>
where
V: de::Visitor<'de>,
{
expect!(self.events.next(), Event::StartDictionary(_));
let ret = visitor.visit_map(MapAndSeqAccess::new(self, true, None))?;
expect!(self.events.next(), Event::EndDictionary);
Ok(ret)
}
fn deserialize_enum<V>(
self,
_enum: &'static str,
_variants: &'static [&'static str],
visitor: V,
) -> Result<V::Value, Self::Error>
where
V: de::Visitor<'de>,
{
expect!(self.events.next(), Event::StartDictionary(_));
let ret = visitor.visit_enum(&mut *self)?;
expect!(self.events.next(), Event::EndDictionary);
Ok(ret)
}
}
impl<'de, 'a, I> de::EnumAccess<'de> for &'a mut Deserializer<I>
where
I: IntoIterator<Item = Result<Event, Error>>,
{
type Error = Error;
type Variant = Self;
fn variant_seed<V>(self, seed: V) -> Result<(V::Value, Self), Self::Error>
where
V: de::DeserializeSeed<'de>,
{
Ok((seed.deserialize(&mut *self)?, self))
}
}
impl<'de, 'a, I> de::VariantAccess<'de> for &'a mut Deserializer<I>
where
I: IntoIterator<Item = Result<Event, Error>>,
{
type Error = Error;
fn unit_variant(self) -> Result<(), Self::Error> {
<() as de::Deserialize>::deserialize(self)
}
fn newtype_variant_seed<T>(self, seed: T) -> Result<T::Value, Self::Error>
where
T: de::DeserializeSeed<'de>,
{
seed.deserialize(self)
}
fn tuple_variant<V>(self, len: usize, visitor: V) -> Result<V::Value, Self::Error>
where
V: de::Visitor<'de>,
{
<Self as de::Deserializer>::deserialize_tuple(self, len, visitor)
}
fn struct_variant<V>(
self,
fields: &'static [&'static str],
visitor: V,
) -> Result<V::Value, Self::Error>
where
V: de::Visitor<'de>,
{
let name = "";
<Self as de::Deserializer>::deserialize_struct(self, name, fields, visitor)
}
}
pub struct StructValueDeserializer<'a, I: 'a>
where
I: IntoIterator<Item = Result<Event, Error>>,
{
de: &'a mut Deserializer<I>,
}
impl<'de, 'a, I> de::Deserializer<'de> for StructValueDeserializer<'a, I>
where
I: IntoIterator<Item = Result<Event, Error>>,
{
type Error = Error;
fn deserialize_any<V>(self, visitor: V) -> Result<V::Value, Self::Error>
where
V: de::Visitor<'de>,
{
self.de.deserialize_any(visitor)
}
forward_to_deserialize_any! {
bool u8 u16 u32 u64 i8 i16 i32 i64 f32 f64 char str string
seq bytes byte_buf map unit_struct
tuple_struct tuple ignored_any identifier
}
fn deserialize_unit<V>(self, visitor: V) -> Result<V::Value, Self::Error>
where
V: de::Visitor<'de>,
{
self.de.deserialize_unit(visitor)
}
fn deserialize_option<V>(self, visitor: V) -> Result<V::Value, Self::Error>
where
V: de::Visitor<'de>,
{
// None struct values are ignored so if we're here the value must be Some.
visitor.visit_some(self.de)
}
fn deserialize_newtype_struct<V>(
self,
name: &'static str,
visitor: V,
) -> Result<V::Value, Self::Error>
where
V: de::Visitor<'de>,
{
self.de.deserialize_newtype_struct(name, visitor)
}
fn deserialize_struct<V>(
self,
name: &'static str,
fields: &'static [&'static str],
visitor: V,
) -> Result<V::Value, Self::Error>
where
V: de::Visitor<'de>,
{
self.de.deserialize_struct(name, fields, visitor)
}
fn deserialize_enum<V>(
self,
enum_: &'static str,
variants: &'static [&'static str],
visitor: V,
) -> Result<V::Value, Self::Error>
where
V: de::Visitor<'de>,
{
self.de.deserialize_enum(enum_, variants, visitor)
}
}
struct MapAndSeqAccess<'a, I>
where
I: 'a + IntoIterator<Item = Result<Event, Error>>,
{
de: &'a mut Deserializer<I>,
is_struct: bool,
remaining: Option<usize>,
}
impl<'a, I> MapAndSeqAccess<'a, I>
where
I: 'a + IntoIterator<Item = Result<Event, Error>>,
{
fn new(
de: &'a mut Deserializer<I>,
is_struct: bool,
len: Option<usize>,
) -> MapAndSeqAccess<'a, I> {
MapAndSeqAccess {
de,
is_struct,
remaining: len,
}
}
}
impl<'de, 'a, I> de::SeqAccess<'de> for MapAndSeqAccess<'a, I>
where
I: 'a + IntoIterator<Item = Result<Event, Error>>,
{
type Error = Error;
fn next_element_seed<T>(&mut self, seed: T) -> Result<Option<T::Value>, Self::Error>
where
T: de::DeserializeSeed<'de>,
{
if let Some(&Ok(Event::EndArray)) = self.de.events.peek() {
return Ok(None);
}
self.remaining = self.remaining.map(|r| r.saturating_sub(1));
seed.deserialize(&mut *self.de).map(Some)
}
fn size_hint(&self) -> Option<usize> {
self.remaining
}
}
impl<'de, 'a, I> de::MapAccess<'de> for MapAndSeqAccess<'a, I>
where
I: 'a + IntoIterator<Item = Result<Event, Error>>,
{
type Error = Error;
fn next_key_seed<K>(&mut self, seed: K) -> Result<Option<K::Value>, Self::Error>
where
K: de::DeserializeSeed<'de>,
{
if let Some(&Ok(Event::EndDictionary)) = self.de.events.peek() {
return Ok(None);
}
self.remaining = self.remaining.map(|r| r.saturating_sub(1));
seed.deserialize(&mut *self.de).map(Some)
}
fn next_value_seed<V>(&mut self, seed: V) -> Result<V::Value, Self::Error>
where
V: de::DeserializeSeed<'de>,
{
if self.is_struct {
seed.deserialize(StructValueDeserializer { de: &mut *self.de })
} else {
seed.deserialize(&mut *self.de)
}
}
fn size_hint(&self) -> Option<usize> {
self.remaining
}
}
/// Deserializes an instance of type `T` from a plist file of any encoding.
pub fn from_file<P: AsRef<Path>, T: de::DeserializeOwned>(path: P) -> Result<T, Error> {
let file = File::open(path)?;
from_reader(BufReader::new(file))
}
/// Deserializes an instance of type `T` from a seekable byte stream containing a plist file of any encoding.
pub fn from_reader<R: Read + Seek, T: de::DeserializeOwned>(reader: R) -> Result<T, Error> {
let reader = stream::Reader::new(reader);
let mut de = Deserializer::new(reader);
de::Deserialize::deserialize(&mut de)
}
/// Deserializes an instance of type `T` from a byte stream containing an XML encoded plist file.
pub fn from_reader_xml<R: Read, T: de::DeserializeOwned>(reader: R) -> Result<T, Error> {
let reader = stream::XmlReader::new(reader);
let mut de = Deserializer::new(reader);
de::Deserialize::deserialize(&mut de)
}

131
third_party/rust/plist/src/lib.rs поставляемый Normal file
Просмотреть файл

@ -0,0 +1,131 @@
//! # Plist
//!
//! A rusty plist parser.
//!
//! ## Usage
//!
//! Put this in your `Cargo.toml`:
//!
//! ```toml
//! [dependencies]
//! plist = "0.2"
//! ```
//!
//! And put this in your crate root:
//!
//! ```rust
//! extern crate plist;
//! ```
//!
//! ## Examples
//!
//! ```rust
//! use plist::Value;
//!
//! let value = Value::from_file("tests/data/xml.plist").unwrap();
//!
//! match value {
//! Value::Array(_array) => (),
//! _ => ()
//! }
//! ```
//!
//! ```rust
//! extern crate plist;
//! # #[cfg(feature = "serde")]
//! #[macro_use]
//! extern crate serde_derive;
//!
//! # #[cfg(feature = "serde")]
//! # fn main() {
//! #[derive(Deserialize)]
//! #[serde(rename_all = "PascalCase")]
//! struct Info {
//! author: String,
//! height: f32,
//! }
//!
//! let info: Info = plist::from_file("tests/data/xml.plist").unwrap();
//! # }
//! #
//! # #[cfg(not(feature = "serde"))]
//! # fn main() {}
//! ```
extern crate base64;
extern crate byteorder;
extern crate humantime;
extern crate xml as xml_rs;
pub mod stream;
mod date;
mod value;
pub use date::Date;
pub use value::Value;
// Optional serde module
#[cfg(feature = "serde")]
#[macro_use]
extern crate serde;
#[cfg(feature = "serde")]
mod de;
#[cfg(feature = "serde")]
mod ser;
#[cfg(feature = "serde")]
pub use self::de::{from_file, from_reader, from_reader_xml, Deserializer};
#[cfg(feature = "serde")]
pub use self::ser::{to_writer_xml, Serializer};
use std::fmt;
use std::io;
#[derive(Debug)]
pub enum Error {
InvalidData,
UnexpectedEof,
Io(io::Error),
Serde(String),
}
impl ::std::error::Error for Error {
fn description(&self) -> &str {
match *self {
Error::InvalidData => "invalid data",
Error::UnexpectedEof => "unexpected eof",
Error::Io(ref err) => err.description(),
Error::Serde(ref err) => &err,
}
}
fn cause(&self) -> Option<&::std::error::Error> {
match *self {
Error::Io(ref err) => Some(err),
_ => None,
}
}
}
impl fmt::Display for Error {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
match *self {
Error::Io(ref err) => err.fmt(fmt),
_ => <Self as ::std::error::Error>::description(self).fmt(fmt),
}
}
}
impl From<io::Error> for Error {
fn from(err: io::Error) -> Error {
Error::Io(err)
}
}
fn u64_to_usize(len_u64: u64) -> Option<usize> {
let len = len_u64 as usize;
if len as u64 != len_u64 {
return None; // Too long
}
Some(len)
}

714
third_party/rust/plist/src/ser.rs поставляемый Normal file
Просмотреть файл

@ -0,0 +1,714 @@
use serde::ser;
use std::fmt::Display;
use std::io::Write;
use date::serde_impls::DATE_NEWTYPE_STRUCT_NAME;
use stream::{self, Event, Writer};
use {Date, Error};
impl ser::Error for Error {
fn custom<T: Display>(msg: T) -> Self {
Error::Serde(msg.to_string())
}
}
/// A structure that serializes Rust values plist event streams.
pub struct Serializer<W: Writer> {
writer: W,
}
impl<W: Writer> Serializer<W> {
pub fn new(writer: W) -> Serializer<W> {
Serializer { writer }
}
fn emit(&mut self, event: Event) -> Result<(), Error> {
self.writer.write(&event)?;
Ok(())
}
pub fn into_inner(self) -> W {
self.writer
}
// Emit {key: value}
fn single_key_dict(&mut self, key: String) -> Result<(), Error> {
self.emit(Event::StartDictionary(Some(1)))?;
self.emit(Event::StringValue(key))?;
Ok(())
}
fn single_key_dict_end(&mut self) -> Result<(), Error> {
self.emit(Event::EndDictionary)?;
Ok(())
}
}
impl<'a, W: Writer> ser::Serializer for &'a mut Serializer<W> {
type Ok = ();
type Error = Error;
type SerializeSeq = Compound<'a, W>;
type SerializeTuple = Compound<'a, W>;
type SerializeTupleStruct = Compound<'a, W>;
type SerializeTupleVariant = Compound<'a, W>;
type SerializeMap = Compound<'a, W>;
type SerializeStruct = Compound<'a, W>;
type SerializeStructVariant = Compound<'a, W>;
fn serialize_bool(self, v: bool) -> Result<(), Self::Error> {
self.emit(Event::BooleanValue(v))
}
fn serialize_i8(self, v: i8) -> Result<(), Self::Error> {
self.serialize_i64(v.into())
}
fn serialize_i16(self, v: i16) -> Result<(), Self::Error> {
self.serialize_i64(v.into())
}
fn serialize_i32(self, v: i32) -> Result<(), Self::Error> {
self.serialize_i64(v.into())
}
fn serialize_i64(self, v: i64) -> Result<(), Self::Error> {
self.emit(Event::IntegerValue(v))
}
fn serialize_u8(self, v: u8) -> Result<(), Self::Error> {
self.serialize_u64(v.into())
}
fn serialize_u16(self, v: u16) -> Result<(), Self::Error> {
self.serialize_u64(v.into())
}
fn serialize_u32(self, v: u32) -> Result<(), Self::Error> {
self.serialize_u64(v.into())
}
fn serialize_u64(self, v: u64) -> Result<(), Self::Error> {
self.emit(Event::IntegerValue(v as i64))
}
fn serialize_f32(self, v: f32) -> Result<(), Self::Error> {
self.serialize_f64(v.into())
}
fn serialize_f64(self, v: f64) -> Result<(), Self::Error> {
self.emit(Event::RealValue(v))
}
fn serialize_char(self, v: char) -> Result<(), Self::Error> {
self.emit(Event::StringValue(v.to_string()))
}
fn serialize_str(self, v: &str) -> Result<(), Self::Error> {
self.emit(Event::StringValue(v.to_owned()))
}
fn serialize_bytes(self, v: &[u8]) -> Result<(), Self::Error> {
self.emit(Event::DataValue(v.to_owned()))
}
fn serialize_none(self) -> Result<(), Self::Error> {
self.single_key_dict("None".to_owned())?;
self.serialize_unit()?;
self.single_key_dict_end()
}
fn serialize_some<T: ?Sized + ser::Serialize>(self, value: &T) -> Result<(), Self::Error> {
self.single_key_dict("Some".to_owned())?;
value.serialize(&mut *self)?;
self.single_key_dict_end()
}
fn serialize_unit(self) -> Result<(), Self::Error> {
// Emit empty string
self.emit(Event::StringValue(String::new()))
}
fn serialize_unit_struct(self, _name: &'static str) -> Result<(), Self::Error> {
self.serialize_unit()
}
fn serialize_unit_variant(
self,
_name: &'static str,
_variant_index: u32,
variant: &'static str,
) -> Result<(), Self::Error> {
self.single_key_dict(variant.to_owned())?;
self.serialize_unit()?;
self.single_key_dict_end()?;
Ok(())
}
fn serialize_newtype_struct<T: ?Sized + ser::Serialize>(
self,
name: &'static str,
value: &T,
) -> Result<(), Self::Error> {
if name == DATE_NEWTYPE_STRUCT_NAME {
value.serialize(DateSerializer { ser: &mut *self })
} else {
value.serialize(self)
}
}
fn serialize_newtype_variant<T: ?Sized + ser::Serialize>(
self,
_name: &'static str,
_variant_index: u32,
variant: &'static str,
value: &T,
) -> Result<(), Self::Error> {
self.single_key_dict(variant.to_owned())?;
value.serialize(&mut *self)?;
self.single_key_dict_end()
}
fn serialize_seq(self, len: Option<usize>) -> Result<Self::SerializeSeq, Self::Error> {
let len = len.map(|len| len as u64);
self.emit(Event::StartArray(len))?;
Ok(Compound { ser: self })
}
fn serialize_tuple(self, len: usize) -> Result<Self::SerializeTuple, Self::Error> {
self.serialize_seq(Some(len))
}
fn serialize_tuple_struct(
self,
_name: &'static str,
len: usize,
) -> Result<Self::SerializeTupleStruct, Self::Error> {
self.serialize_tuple(len)
}
fn serialize_tuple_variant(
self,
_name: &'static str,
_variant_index: u32,
variant: &'static str,
len: usize,
) -> Result<Self::SerializeTupleVariant, Self::Error> {
self.single_key_dict(variant.to_owned())?;
self.serialize_tuple(len)
}
fn serialize_map(self, len: Option<usize>) -> Result<Self::SerializeMap, Self::Error> {
let len = len.map(|len| len as u64);
self.emit(Event::StartDictionary(len))?;
Ok(Compound { ser: self })
}
fn serialize_struct(
self,
_name: &'static str,
_len: usize,
) -> Result<Self::SerializeStruct, Self::Error> {
// The number of struct fields is not known as fields with None values are ignored.
self.serialize_map(None)
}
fn serialize_struct_variant(
self,
name: &'static str,
_variant_index: u32,
variant: &'static str,
len: usize,
) -> Result<Self::SerializeStructVariant, Self::Error> {
self.single_key_dict(variant.to_owned())?;
self.serialize_struct(name, len)
}
}
struct StructFieldSerializer<'a, W: 'a + Writer> {
ser: &'a mut Serializer<W>,
field_name: &'static str,
}
impl<'a, W: Writer> StructFieldSerializer<'a, W> {
fn use_ser(self) -> Result<&'a mut Serializer<W>, Error> {
// We are going to serialize something so write the struct field name.
self.ser
.emit(Event::StringValue(self.field_name.to_owned()))?;
Ok(self.ser)
}
}
impl<'a, W: Writer> ser::Serializer for StructFieldSerializer<'a, W> {
type Ok = ();
type Error = Error;
type SerializeSeq = Compound<'a, W>;
type SerializeTuple = Compound<'a, W>;
type SerializeTupleStruct = Compound<'a, W>;
type SerializeTupleVariant = Compound<'a, W>;
type SerializeMap = Compound<'a, W>;
type SerializeStruct = Compound<'a, W>;
type SerializeStructVariant = Compound<'a, W>;
fn serialize_bool(self, v: bool) -> Result<(), Self::Error> {
self.use_ser()?.serialize_bool(v)
}
fn serialize_i8(self, v: i8) -> Result<(), Self::Error> {
self.use_ser()?.serialize_i8(v)
}
fn serialize_i16(self, v: i16) -> Result<(), Self::Error> {
self.use_ser()?.serialize_i16(v)
}
fn serialize_i32(self, v: i32) -> Result<(), Self::Error> {
self.use_ser()?.serialize_i32(v)
}
fn serialize_i64(self, v: i64) -> Result<(), Self::Error> {
self.use_ser()?.serialize_i64(v)
}
fn serialize_u8(self, v: u8) -> Result<(), Self::Error> {
self.use_ser()?.serialize_u8(v)
}
fn serialize_u16(self, v: u16) -> Result<(), Self::Error> {
self.use_ser()?.serialize_u16(v)
}
fn serialize_u32(self, v: u32) -> Result<(), Self::Error> {
self.use_ser()?.serialize_u32(v)
}
fn serialize_u64(self, v: u64) -> Result<(), Self::Error> {
self.use_ser()?.serialize_u64(v)
}
fn serialize_f32(self, v: f32) -> Result<(), Self::Error> {
self.use_ser()?.serialize_f32(v)
}
fn serialize_f64(self, v: f64) -> Result<(), Self::Error> {
self.use_ser()?.serialize_f64(v)
}
fn serialize_char(self, v: char) -> Result<(), Self::Error> {
self.use_ser()?.serialize_char(v)
}
fn serialize_str(self, v: &str) -> Result<(), Self::Error> {
self.use_ser()?.serialize_str(v)
}
fn serialize_bytes(self, v: &[u8]) -> Result<(), Self::Error> {
self.use_ser()?.serialize_bytes(v)
}
fn serialize_none(self) -> Result<(), Self::Error> {
// Don't write a dict for None if the Option is in a struct.
Ok(())
}
fn serialize_some<T: ?Sized + ser::Serialize>(self, value: &T) -> Result<(), Self::Error> {
let ser = self.use_ser()?;
value.serialize(ser)
}
fn serialize_unit(self) -> Result<(), Self::Error> {
self.use_ser()?.serialize_unit()
}
fn serialize_unit_struct(self, name: &'static str) -> Result<(), Self::Error> {
self.use_ser()?.serialize_unit_struct(name)
}
fn serialize_unit_variant(
self,
name: &'static str,
variant_index: u32,
variant: &'static str,
) -> Result<(), Self::Error> {
self.use_ser()?
.serialize_unit_variant(name, variant_index, variant)
}
fn serialize_newtype_struct<T: ?Sized + ser::Serialize>(
self,
name: &'static str,
value: &T,
) -> Result<(), Self::Error> {
self.use_ser()?.serialize_newtype_struct(name, value)
}
fn serialize_newtype_variant<T: ?Sized + ser::Serialize>(
self,
name: &'static str,
variant_index: u32,
variant: &'static str,
value: &T,
) -> Result<(), Self::Error> {
self.use_ser()?
.serialize_newtype_variant(name, variant_index, variant, value)
}
fn serialize_seq(self, len: Option<usize>) -> Result<Self::SerializeSeq, Self::Error> {
self.use_ser()?.serialize_seq(len)
}
fn serialize_tuple(self, len: usize) -> Result<Self::SerializeTuple, Self::Error> {
self.use_ser()?.serialize_tuple(len)
}
fn serialize_tuple_struct(
self,
name: &'static str,
len: usize,
) -> Result<Self::SerializeTupleStruct, Self::Error> {
self.use_ser()?.serialize_tuple_struct(name, len)
}
fn serialize_tuple_variant(
self,
name: &'static str,
variant_index: u32,
variant: &'static str,
len: usize,
) -> Result<Self::SerializeTupleVariant, Self::Error> {
self.use_ser()?
.serialize_tuple_variant(name, variant_index, variant, len)
}
fn serialize_map(self, len: Option<usize>) -> Result<Self::SerializeMap, Self::Error> {
self.use_ser()?.serialize_map(len)
}
fn serialize_struct(
self,
name: &'static str,
len: usize,
) -> Result<Self::SerializeStruct, Self::Error> {
self.use_ser()?.serialize_struct(name, len)
}
fn serialize_struct_variant(
self,
name: &'static str,
variant_index: u32,
variant: &'static str,
len: usize,
) -> Result<Self::SerializeStructVariant, Self::Error> {
self.use_ser()?
.serialize_struct_variant(name, variant_index, variant, len)
}
}
struct DateSerializer<'a, W: 'a + Writer> {
ser: &'a mut Serializer<W>,
}
impl<'a, W: Writer> DateSerializer<'a, W> {
fn expecting_date_error(&self) -> Error {
ser::Error::custom("plist date string expected")
}
}
impl<'a, W: Writer> ser::Serializer for DateSerializer<'a, W> {
type Ok = ();
type Error = Error;
type SerializeSeq = ser::Impossible<(), Error>;
type SerializeTuple = ser::Impossible<(), Error>;
type SerializeTupleStruct = ser::Impossible<(), Error>;
type SerializeTupleVariant = ser::Impossible<(), Error>;
type SerializeMap = ser::Impossible<(), Error>;
type SerializeStruct = ser::Impossible<(), Error>;
type SerializeStructVariant = ser::Impossible<(), Error>;
fn serialize_bool(self, _: bool) -> Result<(), Self::Error> {
Err(self.expecting_date_error())
}
fn serialize_i8(self, _: i8) -> Result<(), Self::Error> {
Err(self.expecting_date_error())
}
fn serialize_i16(self, _: i16) -> Result<(), Self::Error> {
Err(self.expecting_date_error())
}
fn serialize_i32(self, _: i32) -> Result<(), Self::Error> {
Err(self.expecting_date_error())
}
fn serialize_i64(self, _: i64) -> Result<(), Self::Error> {
Err(self.expecting_date_error())
}
fn serialize_u8(self, _: u8) -> Result<(), Self::Error> {
Err(self.expecting_date_error())
}
fn serialize_u16(self, _: u16) -> Result<(), Self::Error> {
Err(self.expecting_date_error())
}
fn serialize_u32(self, _: u32) -> Result<(), Self::Error> {
Err(self.expecting_date_error())
}
fn serialize_u64(self, _: u64) -> Result<(), Self::Error> {
Err(self.expecting_date_error())
}
fn serialize_f32(self, _: f32) -> Result<(), Self::Error> {
Err(self.expecting_date_error())
}
fn serialize_f64(self, _: f64) -> Result<(), Self::Error> {
Err(self.expecting_date_error())
}
fn serialize_char(self, _: char) -> Result<(), Self::Error> {
Err(self.expecting_date_error())
}
fn serialize_str(self, v: &str) -> Result<(), Self::Error> {
let date = Date::from_rfc3339(v).map_err(|()| self.expecting_date_error())?;
self.ser.emit(Event::DateValue(date))
}
fn serialize_bytes(self, _: &[u8]) -> Result<(), Self::Error> {
Err(self.expecting_date_error())
}
fn serialize_none(self) -> Result<(), Self::Error> {
Err(self.expecting_date_error())
}
fn serialize_some<T: ?Sized + ser::Serialize>(self, _: &T) -> Result<(), Self::Error> {
Err(self.expecting_date_error())
}
fn serialize_unit(self) -> Result<(), Self::Error> {
Err(self.expecting_date_error())
}
fn serialize_unit_struct(self, _: &'static str) -> Result<(), Self::Error> {
Err(self.expecting_date_error())
}
fn serialize_unit_variant(
self,
_: &'static str,
_: u32,
_: &'static str,
) -> Result<(), Self::Error> {
Err(self.expecting_date_error())
}
fn serialize_newtype_struct<T: ?Sized + ser::Serialize>(
self,
_: &'static str,
_: &T,
) -> Result<(), Self::Error> {
Err(self.expecting_date_error())
}
fn serialize_newtype_variant<T: ?Sized + ser::Serialize>(
self,
_: &'static str,
_: u32,
_: &'static str,
_: &T,
) -> Result<(), Self::Error> {
Err(self.expecting_date_error())
}
fn serialize_seq(self, _: Option<usize>) -> Result<Self::SerializeSeq, Self::Error> {
Err(self.expecting_date_error())
}
fn serialize_tuple(self, _: usize) -> Result<Self::SerializeTuple, Self::Error> {
Err(self.expecting_date_error())
}
fn serialize_tuple_struct(
self,
_: &'static str,
_: usize,
) -> Result<Self::SerializeTupleStruct, Self::Error> {
Err(self.expecting_date_error())
}
fn serialize_tuple_variant(
self,
_: &'static str,
_: u32,
_: &'static str,
_: usize,
) -> Result<Self::SerializeTupleVariant, Self::Error> {
Err(self.expecting_date_error())
}
fn serialize_map(self, _: Option<usize>) -> Result<Self::SerializeMap, Self::Error> {
Err(self.expecting_date_error())
}
fn serialize_struct(
self,
_: &'static str,
_: usize,
) -> Result<Self::SerializeStruct, Self::Error> {
Err(self.expecting_date_error())
}
fn serialize_struct_variant(
self,
_: &'static str,
_: u32,
_: &'static str,
_: usize,
) -> Result<Self::SerializeStructVariant, Self::Error> {
Err(self.expecting_date_error())
}
}
#[doc(hidden)]
pub struct Compound<'a, W: 'a + Writer> {
ser: &'a mut Serializer<W>,
}
impl<'a, W: Writer> ser::SerializeSeq for Compound<'a, W> {
type Ok = ();
type Error = Error;
fn serialize_element<T: ?Sized + ser::Serialize>(
&mut self,
value: &T,
) -> Result<(), Self::Error> {
value.serialize(&mut *self.ser)
}
fn end(self) -> Result<Self::Ok, Self::Error> {
self.ser.emit(Event::EndArray)
}
}
impl<'a, W: Writer> ser::SerializeTuple for Compound<'a, W> {
type Ok = ();
type Error = Error;
fn serialize_element<T: ?Sized + ser::Serialize>(
&mut self,
value: &T,
) -> Result<(), Self::Error> {
<Self as ser::SerializeSeq>::serialize_element(self, value)
}
fn end(self) -> Result<Self::Ok, Self::Error> {
<Self as ser::SerializeSeq>::end(self)
}
}
impl<'a, W: Writer> ser::SerializeTupleStruct for Compound<'a, W> {
type Ok = ();
type Error = Error;
fn serialize_field<T: ?Sized + ser::Serialize>(
&mut self,
value: &T,
) -> Result<(), Self::Error> {
<Self as ser::SerializeSeq>::serialize_element(self, value)
}
fn end(self) -> Result<Self::Ok, Self::Error> {
<Self as ser::SerializeSeq>::end(self)
}
}
impl<'a, W: Writer> ser::SerializeTupleVariant for Compound<'a, W> {
type Ok = ();
type Error = Error;
fn serialize_field<T: ?Sized + ser::Serialize>(
&mut self,
value: &T,
) -> Result<(), Self::Error> {
<Self as ser::SerializeSeq>::serialize_element(self, value)
}
fn end(self) -> Result<Self::Ok, Self::Error> {
self.ser.emit(Event::EndArray)?;
self.ser.single_key_dict_end()
}
}
impl<'a, W: Writer> ser::SerializeMap for Compound<'a, W> {
type Ok = ();
type Error = Error;
fn serialize_key<T: ?Sized + ser::Serialize>(&mut self, key: &T) -> Result<(), Self::Error> {
key.serialize(&mut *self.ser)
}
fn serialize_value<T: ?Sized + ser::Serialize>(
&mut self,
value: &T,
) -> Result<(), Self::Error> {
value.serialize(&mut *self.ser)
}
fn end(self) -> Result<Self::Ok, Self::Error> {
self.ser.emit(Event::EndDictionary)
}
}
impl<'a, W: Writer> ser::SerializeStruct for Compound<'a, W> {
type Ok = ();
type Error = Error;
fn serialize_field<T: ?Sized + ser::Serialize>(
&mut self,
key: &'static str,
value: &T,
) -> Result<(), Self::Error> {
// We don't want to serialize None if the Option is a struct field as this is how null
// fields are represented in plists.
value.serialize(StructFieldSerializer {
field_name: key,
ser: &mut *self.ser,
})
}
fn end(self) -> Result<Self::Ok, Self::Error> {
<Self as ser::SerializeMap>::end(self)
}
}
impl<'a, W: Writer> ser::SerializeStructVariant for Compound<'a, W> {
type Ok = ();
type Error = Error;
fn serialize_field<T: ?Sized + ser::Serialize>(
&mut self,
key: &'static str,
value: &T,
) -> Result<(), Self::Error> {
<Self as ser::SerializeStruct>::serialize_field(self, key, value)
}
fn end(self) -> Result<Self::Ok, Self::Error> {
self.ser.emit(Event::EndDictionary)?;
self.ser.single_key_dict_end()
}
}
/// Serializes the given data structure as an XML encoded plist file.
pub fn to_writer_xml<W: Write, T: ser::Serialize>(writer: W, value: &T) -> Result<(), Error> {
let writer = stream::XmlWriter::new(writer);
let mut ser = Serializer::new(writer);
value.serialize(&mut ser)
}

376
third_party/rust/plist/src/stream/binary_reader.rs поставляемый Normal file
Просмотреть файл

@ -0,0 +1,376 @@
use byteorder::{BigEndian, ReadBytesExt};
use std::io::{Read, Seek, SeekFrom};
use std::mem::size_of;
use std::string::{FromUtf16Error, FromUtf8Error};
use stream::Event;
use {u64_to_usize, Date, Error};
impl From<FromUtf8Error> for Error {
fn from(_: FromUtf8Error) -> Error {
Error::InvalidData
}
}
impl From<FromUtf16Error> for Error {
fn from(_: FromUtf16Error) -> Error {
Error::InvalidData
}
}
struct StackItem {
object_ref: u64,
child_object_refs: Vec<u64>,
ty: StackType,
}
enum StackType {
Array,
Dict,
}
// https://opensource.apple.com/source/CF/CF-550/CFBinaryPList.c
// https://hg.python.org/cpython/file/3.4/Lib/plistlib.py
pub struct BinaryReader<R> {
stack: Vec<StackItem>,
object_offsets: Vec<u64>,
object_on_stack: Vec<bool>,
reader: R,
ref_size: u8,
root_object: u64,
// The largest single allocation allowed for this plist.
// Equal to the number of bytes in the plist minus the magic number and trailer.
max_allocation_bytes: usize,
}
impl<R: Read + Seek> BinaryReader<R> {
pub fn new(reader: R) -> BinaryReader<R> {
BinaryReader {
stack: Vec::new(),
object_offsets: Vec::new(),
object_on_stack: Vec::new(),
reader,
ref_size: 0,
root_object: 0,
max_allocation_bytes: 0,
}
}
fn can_allocate(&self, len: u64, size: usize) -> bool {
let byte_len = len.saturating_mul(size as u64);
byte_len <= self.max_allocation_bytes as u64
}
fn allocate_vec<T>(&self, len: u64, size: usize) -> Result<Vec<T>, Error> {
if self.can_allocate(len, size) {
Ok(Vec::with_capacity(len as usize))
} else {
Err(Error::InvalidData)
}
}
fn read_trailer(&mut self) -> Result<(), Error> {
self.reader.seek(SeekFrom::Start(0))?;
let mut magic = [0; 8];
self.reader.read_exact(&mut magic)?;
if &magic != b"bplist00" {
return Err(Error::InvalidData);
}
// Trailer starts with 6 bytes of padding
let trailer_start = self.reader.seek(SeekFrom::End(-32 + 6))?;
let offset_size = self.reader.read_u8()?;
match offset_size {
1 | 2 | 4 | 8 => (),
_ => return Err(Error::InvalidData),
}
self.ref_size = self.reader.read_u8()?;
match self.ref_size {
1 | 2 | 4 | 8 => (),
_ => return Err(Error::InvalidData),
}
let num_objects = self.reader.read_u64::<BigEndian>()?;
self.root_object = self.reader.read_u64::<BigEndian>()?;
let offset_table_offset = self.reader.read_u64::<BigEndian>()?;
// File size minus trailer and header
// Truncated to max(usize)
self.max_allocation_bytes = trailer_start.saturating_sub(8) as usize;
// Read offset table
self.reader.seek(SeekFrom::Start(offset_table_offset))?;
self.object_offsets = self.read_ints(num_objects, offset_size)?;
self.object_on_stack = vec![false; self.object_offsets.len()];
Ok(())
}
fn read_ints(&mut self, len: u64, size: u8) -> Result<Vec<u64>, Error> {
let mut ints = self.allocate_vec(len, size as usize)?;
for _ in 0..len {
match size {
1 => ints.push(self.reader.read_u8()?.into()),
2 => ints.push(self.reader.read_u16::<BigEndian>()?.into()),
4 => ints.push(self.reader.read_u32::<BigEndian>()?.into()),
8 => ints.push(self.reader.read_u64::<BigEndian>()?),
_ => return Err(Error::InvalidData),
}
}
Ok(ints)
}
fn read_refs(&mut self, len: u64) -> Result<Vec<u64>, Error> {
let ref_size = self.ref_size;
self.read_ints(len, ref_size)
}
fn read_object_len(&mut self, len: u8) -> Result<u64, Error> {
if (len & 0x0f) == 0x0f {
let len_power_of_two = self.reader.read_u8()? & 0x03;
Ok(match len_power_of_two {
0 => self.reader.read_u8()?.into(),
1 => self.reader.read_u16::<BigEndian>()?.into(),
2 => self.reader.read_u32::<BigEndian>()?.into(),
3 => self.reader.read_u64::<BigEndian>()?,
_ => return Err(Error::InvalidData),
})
} else {
Ok(len.into())
}
}
fn read_data(&mut self, len: u64) -> Result<Vec<u8>, Error> {
let mut data = self.allocate_vec(len, size_of::<u8>())?;
data.resize(len as usize, 0);
self.reader.read_exact(&mut data)?;
Ok(data)
}
fn seek_to_object(&mut self, object_ref: u64) -> Result<u64, Error> {
let object_ref = u64_to_usize(object_ref).ok_or(Error::InvalidData)?;
let offset = *self
.object_offsets
.get(object_ref)
.ok_or(Error::InvalidData)?;
Ok(self.reader.seek(SeekFrom::Start(offset))?)
}
fn push_stack_item_and_check_for_recursion(&mut self, item: StackItem) -> Result<(), Error> {
let object_ref = u64_to_usize(item.object_ref).expect("internal consistency error");
let is_on_stack = &mut self.object_on_stack[object_ref];
if *is_on_stack {
return Err(Error::InvalidData);
}
*is_on_stack = true;
self.stack.push(item);
Ok(())
}
fn pop_stack_item(&mut self) -> StackItem {
let item = self.stack.pop().expect("internal consistency error");
let object_ref = u64_to_usize(item.object_ref).expect("internal consistency error");
self.object_on_stack[object_ref] = false;
item
}
fn read_next(&mut self) -> Result<Option<Event>, Error> {
let object_ref = if self.ref_size == 0 {
// Initialise here rather than in new
self.read_trailer()?;
self.root_object
} else {
let maybe_object_ref = if let Some(stack_item) = self.stack.last_mut() {
stack_item.child_object_refs.pop()
} else {
// Finished reading the plist
return Ok(None);
};
if let Some(object_ref) = maybe_object_ref {
object_ref
} else {
// We're at the end of an array or dict. Pop the top stack item and return.
let stack_item = self.pop_stack_item();
match stack_item.ty {
StackType::Array => return Ok(Some(Event::EndArray)),
StackType::Dict => return Ok(Some(Event::EndDictionary)),
}
}
};
self.seek_to_object(object_ref)?;
let token = self.reader.read_u8()?;
let ty = (token & 0xf0) >> 4;
let size = token & 0x0f;
let result = match (ty, size) {
(0x0, 0x00) => return Err(Error::InvalidData), // null
(0x0, 0x08) => Some(Event::BooleanValue(false)),
(0x0, 0x09) => Some(Event::BooleanValue(true)),
(0x0, 0x0f) => return Err(Error::InvalidData), // fill
(0x1, 0) => Some(Event::IntegerValue(self.reader.read_u8()?.into())),
(0x1, 1) => Some(Event::IntegerValue(
self.reader.read_u16::<BigEndian>()?.into(),
)),
(0x1, 2) => Some(Event::IntegerValue(
self.reader.read_u32::<BigEndian>()?.into(),
)),
(0x1, 3) => Some(Event::IntegerValue(self.reader.read_i64::<BigEndian>()?)),
(0x1, 4) => return Err(Error::InvalidData), // 128 bit int
(0x1, _) => return Err(Error::InvalidData), // variable length int
(0x2, 2) => Some(Event::RealValue(
self.reader.read_f32::<BigEndian>()?.into(),
)),
(0x2, 3) => Some(Event::RealValue(self.reader.read_f64::<BigEndian>()?)),
(0x2, _) => return Err(Error::InvalidData), // odd length float
(0x3, 3) => {
// Date. Seconds since 1/1/2001 00:00:00.
let secs = self.reader.read_f64::<BigEndian>()?;
Some(Event::DateValue(
Date::from_seconds_since_plist_epoch(secs).map_err(|()| Error::InvalidData)?,
))
}
(0x4, n) => {
// Data
let len = self.read_object_len(n)?;
Some(Event::DataValue(self.read_data(len)?))
}
(0x5, n) => {
// ASCII string
let len = self.read_object_len(n)?;
let raw = self.read_data(len)?;
let string = String::from_utf8(raw)?;
Some(Event::StringValue(string))
}
(0x6, n) => {
// UTF-16 string
let len_utf16_codepoints = self.read_object_len(n)?;
let mut raw_utf16 = self.allocate_vec(len_utf16_codepoints, size_of::<u16>())?;
for _ in 0..len_utf16_codepoints {
raw_utf16.push(self.reader.read_u16::<BigEndian>()?);
}
let string = String::from_utf16(&raw_utf16)?;
Some(Event::StringValue(string))
}
(0xa, n) => {
// Array
let len = self.read_object_len(n)?;
let mut child_object_refs = self.read_refs(len)?;
// Reverse so we can pop off the end of the stack in order
child_object_refs.reverse();
self.push_stack_item_and_check_for_recursion(StackItem {
object_ref,
ty: StackType::Array,
child_object_refs,
})?;
Some(Event::StartArray(Some(len)))
}
(0xd, n) => {
// Dict
let len = self.read_object_len(n)?;
let key_refs = self.read_refs(len)?;
let value_refs = self.read_refs(len)?;
let mut child_object_refs = self.allocate_vec(len * 2, self.ref_size as usize)?;
let len = key_refs.len();
for i in 1..len + 1 {
// Reverse so we can pop off the end of the stack in order
child_object_refs.push(value_refs[len - i]);
child_object_refs.push(key_refs[len - i]);
}
self.push_stack_item_and_check_for_recursion(StackItem {
object_ref,
ty: StackType::Dict,
child_object_refs,
})?;
Some(Event::StartDictionary(Some(len as u64)))
}
(_, _) => return Err(Error::InvalidData),
};
Ok(result)
}
}
impl<R: Read + Seek> Iterator for BinaryReader<R> {
type Item = Result<Event, Error>;
fn next(&mut self) -> Option<Result<Event, Error>> {
match self.read_next() {
Ok(Some(event)) => Some(Ok(event)),
Err(err) => {
// Mark the plist as finished
self.stack.clear();
Some(Err(err))
}
Ok(None) => None,
}
}
}
#[cfg(test)]
mod tests {
use humantime::parse_rfc3339_weak;
use std::fs::File;
use std::path::Path;
use super::*;
use stream::Event;
use stream::Event::*;
#[test]
fn streaming_parser() {
let reader = File::open(&Path::new("./tests/data/binary.plist")).unwrap();
let streaming_parser = BinaryReader::new(reader);
let events: Vec<Event> = streaming_parser.map(|e| e.unwrap()).collect();
let comparison = &[
StartDictionary(Some(6)),
StringValue("Lines".to_owned()),
StartArray(Some(2)),
StringValue("It is a tale told by an idiot,".to_owned()),
StringValue("Full of sound and fury, signifying nothing.".to_owned()),
EndArray,
StringValue("Death".to_owned()),
IntegerValue(1564),
StringValue("Height".to_owned()),
RealValue(1.60),
StringValue("Birthdate".to_owned()),
DateValue(parse_rfc3339_weak("1981-05-16 11:32:06").unwrap().into()),
StringValue("Author".to_owned()),
StringValue("William Shakespeare".to_owned()),
StringValue("Data".to_owned()),
DataValue(vec![0, 0, 0, 190, 0, 0, 0, 3, 0, 0, 0, 30, 0, 0, 0]),
EndDictionary,
];
assert_eq!(events, comparison);
}
#[test]
fn utf16_plist() {
let reader = File::open(&Path::new("./tests/data/utf16_bplist.plist")).unwrap();
let streaming_parser = BinaryReader::new(reader);
let mut events: Vec<Event> = streaming_parser.map(|e| e.unwrap()).collect();
assert_eq!(events[2], StringValue("\u{2605} or better".to_owned()));
let poem = if let StringValue(ref mut poem) = events[4] {
poem
} else {
panic!("not a string")
};
assert_eq!(poem.len(), 643);
assert_eq!(poem.pop().unwrap(), '\u{2605}');
}
}

98
third_party/rust/plist/src/stream/mod.rs поставляемый Normal file
Просмотреть файл

@ -0,0 +1,98 @@
//! An abstraction of a plist file as a stream of events. Used to support multiple encodings.
mod binary_reader;
pub use self::binary_reader::BinaryReader;
mod xml_reader;
pub use self::xml_reader::XmlReader;
mod xml_writer;
pub use self::xml_writer::XmlWriter;
use std::io::{Read, Seek, SeekFrom};
use {Date, Error};
/// An encoding of a plist as a flat structure.
///
/// Output by the event readers.
///
/// Dictionary keys and values are represented as pairs of values e.g.:
///
/// ```ignore rust
/// StartDictionary
/// StringValue("Height") // Key
/// RealValue(181.2) // Value
/// StringValue("Age") // Key
/// IntegerValue(28) // Value
/// EndDictionary
/// ```
#[derive(Clone, Debug, PartialEq)]
pub enum Event {
// While the length of an array or dict cannot be feasably greater than max(usize) this better
// conveys the concept of an effectively unbounded event stream.
StartArray(Option<u64>),
EndArray,
StartDictionary(Option<u64>),
EndDictionary,
BooleanValue(bool),
DataValue(Vec<u8>),
DateValue(Date),
IntegerValue(i64),
RealValue(f64),
StringValue(String),
}
pub struct Reader<R: Read + Seek>(ReaderInner<R>);
enum ReaderInner<R: Read + Seek> {
Uninitialized(Option<R>),
Xml(XmlReader<R>),
Binary(BinaryReader<R>),
}
impl<R: Read + Seek> Reader<R> {
pub fn new(reader: R) -> Reader<R> {
Reader(ReaderInner::Uninitialized(Some(reader)))
}
fn is_binary(reader: &mut R) -> Result<bool, Error> {
reader.seek(SeekFrom::Start(0))?;
let mut magic = [0; 8];
reader.read_exact(&mut magic)?;
reader.seek(SeekFrom::Start(0))?;
Ok(&magic == b"bplist00")
}
}
impl<R: Read + Seek> Iterator for Reader<R> {
type Item = Result<Event, Error>;
fn next(&mut self) -> Option<Result<Event, Error>> {
let mut reader = match self.0 {
ReaderInner::Xml(ref mut parser) => return parser.next(),
ReaderInner::Binary(ref mut parser) => return parser.next(),
ReaderInner::Uninitialized(ref mut reader) => reader.take().unwrap(),
};
let event_reader = match Reader::is_binary(&mut reader) {
Ok(true) => ReaderInner::Binary(BinaryReader::new(reader)),
Ok(false) => ReaderInner::Xml(XmlReader::new(reader)),
Err(err) => {
::std::mem::replace(&mut self.0, ReaderInner::Uninitialized(Some(reader)));
return Some(Err(err));
}
};
::std::mem::replace(&mut self.0, event_reader);
self.next()
}
}
/// Supports writing event streams in different plist encodings.
pub trait Writer {
fn write(&mut self, event: &Event) -> Result<(), Error>;
}

199
third_party/rust/plist/src/stream/xml_reader.rs поставляемый Normal file
Просмотреть файл

@ -0,0 +1,199 @@
use base64;
use std::io::Read;
use std::str::FromStr;
use xml_rs::reader::{EventReader, ParserConfig, XmlEvent};
use stream::Event;
use {Date, Error};
pub struct XmlReader<R: Read> {
xml_reader: EventReader<R>,
queued_event: Option<XmlEvent>,
element_stack: Vec<String>,
finished: bool,
}
impl<R: Read> XmlReader<R> {
pub fn new(reader: R) -> XmlReader<R> {
let config = ParserConfig::new()
.trim_whitespace(false)
.whitespace_to_characters(true)
.cdata_to_characters(true)
.ignore_comments(true)
.coalesce_characters(true);
XmlReader {
xml_reader: EventReader::new_with_config(reader, config),
queued_event: None,
element_stack: Vec::new(),
finished: false,
}
}
fn read_content<F>(&mut self, f: F) -> Result<Event, Error>
where
F: FnOnce(String) -> Result<Event, Error>,
{
match self.xml_reader.next() {
Ok(XmlEvent::Characters(s)) => f(s),
Ok(event @ XmlEvent::EndElement { .. }) => {
self.queued_event = Some(event);
f("".to_owned())
}
_ => Err(Error::InvalidData),
}
}
fn next_event(&mut self) -> ::std::result::Result<XmlEvent, ()> {
if let Some(event) = self.queued_event.take() {
Ok(event)
} else {
self.xml_reader.next().map_err(|_| ())
}
}
fn read_next(&mut self) -> Option<Result<Event, Error>> {
loop {
match self.next_event() {
Ok(XmlEvent::StartElement { name, .. }) => {
// Add the current element to the element stack
self.element_stack.push(name.local_name.clone());
match &name.local_name[..] {
"plist" => (),
"array" => return Some(Ok(Event::StartArray(None))),
"dict" => return Some(Ok(Event::StartDictionary(None))),
"key" => return Some(self.read_content(|s| Ok(Event::StringValue(s)))),
"true" => return Some(Ok(Event::BooleanValue(true))),
"false" => return Some(Ok(Event::BooleanValue(false))),
"data" => {
return Some(self.read_content(|s| {
let data = base64::decode_config(&s, base64::MIME)
.map_err(|_| Error::InvalidData)?;
Ok(Event::DataValue(data))
}))
}
"date" => {
return Some(self.read_content(|s| {
Ok(Event::DateValue(
Date::from_rfc3339(&s).map_err(|()| Error::InvalidData)?,
))
}))
}
"integer" => {
return Some(self.read_content(|s| match FromStr::from_str(&s) {
Ok(i) => Ok(Event::IntegerValue(i)),
Err(_) => Err(Error::InvalidData),
}))
}
"real" => {
return Some(self.read_content(|s| match FromStr::from_str(&s) {
Ok(f) => Ok(Event::RealValue(f)),
Err(_) => Err(Error::InvalidData),
}))
}
"string" => return Some(self.read_content(|s| Ok(Event::StringValue(s)))),
_ => return Some(Err(Error::InvalidData)),
}
}
Ok(XmlEvent::EndElement { name, .. }) => {
// Check the corrent element is being closed
match self.element_stack.pop() {
Some(ref open_name) if &name.local_name == open_name => (),
Some(ref _open_name) => return Some(Err(Error::InvalidData)),
None => return Some(Err(Error::InvalidData)),
}
match &name.local_name[..] {
"array" => return Some(Ok(Event::EndArray)),
"dict" => return Some(Ok(Event::EndDictionary)),
"plist" => (),
_ => (),
}
}
Ok(XmlEvent::EndDocument) => {
if self.element_stack.is_empty() {
return None;
} else {
return Some(Err(Error::UnexpectedEof));
}
}
Err(_) => return Some(Err(Error::InvalidData)),
_ => (),
}
}
}
}
impl<R: Read> Iterator for XmlReader<R> {
type Item = Result<Event, Error>;
fn next(&mut self) -> Option<Result<Event, Error>> {
if self.finished {
None
} else {
match self.read_next() {
Some(Ok(event)) => Some(Ok(event)),
Some(Err(err)) => {
self.finished = true;
Some(Err(err))
}
None => {
self.finished = true;
None
}
}
}
}
}
#[cfg(test)]
mod tests {
use humantime::parse_rfc3339_weak;
use std::fs::File;
use std::path::Path;
use super::*;
use stream::Event;
use stream::Event::*;
#[test]
fn streaming_parser() {
let reader = File::open(&Path::new("./tests/data/xml.plist")).unwrap();
let streaming_parser = XmlReader::new(reader);
let events: Vec<Event> = streaming_parser.map(|e| e.unwrap()).collect();
let comparison = &[
StartDictionary(None),
StringValue("Author".to_owned()),
StringValue("William Shakespeare".to_owned()),
StringValue("Lines".to_owned()),
StartArray(None),
StringValue("It is a tale told by an idiot,".to_owned()),
StringValue("Full of sound and fury, signifying nothing.".to_owned()),
EndArray,
StringValue("Death".to_owned()),
IntegerValue(1564),
StringValue("Height".to_owned()),
RealValue(1.60),
StringValue("Data".to_owned()),
DataValue(vec![0, 0, 0, 190, 0, 0, 0, 3, 0, 0, 0, 30, 0, 0, 0]),
StringValue("Birthdate".to_owned()),
DateValue(parse_rfc3339_weak("1981-05-16 11:32:06").unwrap().into()),
StringValue("Blank".to_owned()),
StringValue("".to_owned()),
EndDictionary,
];
assert_eq!(events, comparison);
}
#[test]
fn bad_data() {
let reader = File::open(&Path::new("./tests/data/xml_error.plist")).unwrap();
let streaming_parser = XmlReader::new(reader);
let events: Vec<_> = streaming_parser.collect();
assert!(events.last().unwrap().is_err());
}
}

248
third_party/rust/plist/src/stream/xml_writer.rs поставляемый Normal file
Просмотреть файл

@ -0,0 +1,248 @@
use base64;
use std::borrow::Cow;
use std::io::Write;
use xml_rs::name::Name;
use xml_rs::namespace::Namespace;
use xml_rs::writer::{EmitterConfig, Error as XmlWriterError, EventWriter, XmlEvent};
use stream::{Event, Writer};
use Error;
static XML_PROLOGUE: &str = r#"<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
"#;
impl From<XmlWriterError> for Error {
fn from(err: XmlWriterError) -> Error {
match err {
XmlWriterError::Io(err) => Error::Io(err),
_ => Error::InvalidData,
}
}
}
#[derive(PartialEq)]
enum Element {
Dictionary,
Array,
}
pub struct XmlWriter<W: Write> {
xml_writer: EventWriter<W>,
stack: Vec<Element>,
expecting_key: bool,
written_prologue: bool,
// Not very nice
empty_namespace: Namespace,
}
impl<W: Write> XmlWriter<W> {
pub fn new(writer: W) -> XmlWriter<W> {
let config = EmitterConfig::new()
.line_separator("\n")
.indent_string("\t")
.perform_indent(true)
.write_document_declaration(false)
.normalize_empty_elements(true)
.cdata_to_characters(true)
.keep_element_names_stack(false)
.autopad_comments(true);
XmlWriter {
xml_writer: EventWriter::new_with_config(writer, config),
stack: Vec::new(),
expecting_key: false,
written_prologue: false,
empty_namespace: Namespace::empty(),
}
}
fn write_element_and_value(&mut self, name: &str, value: &str) -> Result<(), Error> {
self.start_element(name)?;
self.write_value(value)?;
self.end_element(name)?;
Ok(())
}
fn start_element(&mut self, name: &str) -> Result<(), Error> {
self.xml_writer.write(XmlEvent::StartElement {
name: Name::local(name),
attributes: Cow::Borrowed(&[]),
namespace: Cow::Borrowed(&self.empty_namespace),
})?;
Ok(())
}
fn end_element(&mut self, name: &str) -> Result<(), Error> {
self.xml_writer.write(XmlEvent::EndElement {
name: Some(Name::local(name)),
})?;
Ok(())
}
fn write_value(&mut self, value: &str) -> Result<(), Error> {
self.xml_writer.write(XmlEvent::Characters(value))?;
Ok(())
}
pub fn write(&mut self, event: &Event) -> Result<(), Error> {
<Self as Writer>::write(self, event)
}
pub fn into_inner(self) -> W {
self.xml_writer.into_inner()
}
}
impl<W: Write> Writer for XmlWriter<W> {
fn write(&mut self, event: &Event) -> Result<(), Error> {
if !self.written_prologue {
self.xml_writer
.inner_mut()
.write_all(XML_PROLOGUE.as_bytes())?;
self.written_prologue = true;
}
if self.expecting_key {
match *event {
Event::EndDictionary => match self.stack.pop() {
Some(Element::Dictionary) => {
self.end_element("dict")?;
self.expecting_key = self.stack.last() == Some(&Element::Dictionary);
}
_ => return Err(Error::InvalidData),
},
Event::StringValue(ref value) => {
self.write_element_and_value("key", &*value)?;
self.expecting_key = false;
}
_ => return Err(Error::InvalidData),
}
} else {
match *event {
Event::StartArray(_) => {
self.start_element("array")?;
self.stack.push(Element::Array);
}
Event::EndArray => match self.stack.pop() {
Some(Element::Array) => self.end_element("array")?,
_ => return Err(Error::InvalidData),
},
Event::StartDictionary(_) => {
self.start_element("dict")?;
self.stack.push(Element::Dictionary);
}
Event::EndDictionary => return Err(Error::InvalidData),
Event::BooleanValue(true) => {
self.start_element("true")?;
self.end_element("true")?;
}
Event::BooleanValue(false) => {
self.start_element("false")?;
self.end_element("false")?;
}
Event::DataValue(ref value) => {
let base64_data = base64::encode_config(&value, base64::MIME);
self.write_element_and_value("data", &base64_data)?;
}
Event::DateValue(ref value) => {
self.write_element_and_value("date", &value.to_rfc3339())?
}
Event::IntegerValue(ref value) => {
self.write_element_and_value("integer", &value.to_string())?
}
Event::RealValue(ref value) => {
self.write_element_and_value("real", &value.to_string())?
}
Event::StringValue(ref value) => self.write_element_and_value("string", &*value)?,
};
self.expecting_key = self.stack.last() == Some(&Element::Dictionary);
}
// If there are no more open tags then write the </plist> element
if self.stack.len() == 0 {
// We didn't tell the xml_writer about the <plist> tag so we'll skip telling it
// about the </plist> tag as well.
self.xml_writer.inner_mut().write_all(b"\n</plist>")?;
}
Ok(())
}
}
#[cfg(test)]
mod tests {
use humantime::parse_rfc3339_weak;
use std::io::Cursor;
use super::*;
use stream::Event::*;
#[test]
fn streaming_parser() {
let plist = &[
StartDictionary(None),
StringValue("Author".to_owned()),
StringValue("William Shakespeare".to_owned()),
StringValue("Lines".to_owned()),
StartArray(None),
StringValue("It is a tale told by an idiot,".to_owned()),
StringValue("Full of sound and fury, signifying nothing.".to_owned()),
EndArray,
StringValue("Death".to_owned()),
IntegerValue(1564),
StringValue("Height".to_owned()),
RealValue(1.60),
StringValue("Data".to_owned()),
DataValue(vec![0, 0, 0, 190, 0, 0, 0, 3, 0, 0, 0, 30, 0, 0, 0]),
StringValue("Birthdate".to_owned()),
DateValue(parse_rfc3339_weak("1981-05-16 11:32:06").unwrap().into()),
StringValue("Comment".to_owned()),
StringValue("2 < 3".to_owned()), // make sure characters are escaped
EndDictionary,
];
let mut cursor = Cursor::new(Vec::new());
{
let mut plist_w = XmlWriter::new(&mut cursor);
for item in plist {
plist_w.write(item).unwrap();
}
}
let comparison = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>
<!DOCTYPE plist PUBLIC \"-//Apple//DTD PLIST 1.0//EN\" \"http://www.apple.com/DTDs/PropertyList-1.0.dtd\">
<plist version=\"1.0\">
<dict>
\t<key>Author</key>
\t<string>William Shakespeare</string>
\t<key>Lines</key>
\t<array>
\t\t<string>It is a tale told by an idiot,</string>
\t\t<string>Full of sound and fury, signifying nothing.</string>
\t</array>
\t<key>Death</key>
\t<integer>1564</integer>
\t<key>Height</key>
\t<real>1.6</real>
\t<key>Data</key>
\t<data>AAAAvgAAAAMAAAAeAAAA</data>
\t<key>Birthdate</key>
\t<date>1981-05-16T11:32:06Z</date>
\t<key>Comment</key>
\t<string>2 &lt; 3</string>
</dict>
</plist>";
let s = String::from_utf8(cursor.into_inner()).unwrap();
assert_eq!(s, comparison);
}
}

553
third_party/rust/plist/src/value.rs поставляемый Normal file
Просмотреть файл

@ -0,0 +1,553 @@
use std::collections::BTreeMap;
use std::fs::File;
use std::io::Write;
use std::io::{BufReader, Read, Seek};
use std::path::Path;
use stream::{Event, Reader, Writer, XmlReader, XmlWriter};
use {u64_to_usize, Date, Error};
/// Represents any plist value.
#[derive(Clone, Debug, PartialEq)]
pub enum Value {
Array(Vec<Value>),
Dictionary(BTreeMap<String, Value>),
Boolean(bool),
Data(Vec<u8>),
Date(Date),
Real(f64),
Integer(i64),
String(String),
}
impl Value {
/// Reads a `Value` from a plist file of any encoding.
pub fn from_file<P: AsRef<Path>>(path: P) -> Result<Value, Error> {
let file = File::open(path)?;
Value::from_reader(BufReader::new(file))
}
/// Reads a `Value` from a seekable byte stream containing a plist file of any encoding.
pub fn from_reader<R: Read + Seek>(reader: R) -> Result<Value, Error> {
let reader = Reader::new(reader);
Value::from_events(reader)
}
/// Reads a `Value` from a seekable byte stream containing an XML encoded plist file.
pub fn from_reader_xml<R: Read>(reader: R) -> Result<Value, Error> {
let reader = XmlReader::new(reader);
Value::from_events(reader)
}
/// Serializes the given data structure as an XML encoded plist file.
pub fn to_writer_xml<W: Write>(&self, writer: W) -> Result<(), Error> {
let mut writer = XmlWriter::new(writer);
self.to_writer_xml_inner(&mut writer)
}
fn to_writer_xml_inner(&self, writer: &mut Writer) -> Result<(), Error> {
let events = self.clone().into_events();
for event in events {
writer.write(&event)?;
}
Ok(())
}
/// Creates a `Value` from an event source.
pub fn from_events<T>(events: T) -> Result<Value, Error>
where
T: IntoIterator<Item = Result<Event, Error>>,
{
Builder::new(events.into_iter()).build()
}
/// Converts a `Value` into an `Event` stream.
pub fn into_events(self) -> Vec<Event> {
let mut events = Vec::new();
self.into_events_inner(&mut events);
events
}
fn into_events_inner(self, events: &mut Vec<Event>) {
match self {
Value::Array(array) => {
events.push(Event::StartArray(Some(array.len() as u64)));
for value in array {
value.into_events_inner(events);
}
events.push(Event::EndArray);
}
Value::Dictionary(dict) => {
events.push(Event::StartDictionary(Some(dict.len() as u64)));
for (key, value) in dict {
events.push(Event::StringValue(key));
value.into_events_inner(events);
}
events.push(Event::EndDictionary);
}
Value::Boolean(value) => events.push(Event::BooleanValue(value)),
Value::Data(value) => events.push(Event::DataValue(value)),
Value::Date(value) => events.push(Event::DateValue(value)),
Value::Real(value) => events.push(Event::RealValue(value)),
Value::Integer(value) => events.push(Event::IntegerValue(value)),
Value::String(value) => events.push(Event::StringValue(value)),
}
}
/// If the `Value` is an Array, returns the associated `Vec`.
///
/// Returns `None` otherwise.
pub fn as_array(&self) -> Option<&Vec<Value>> {
match *self {
Value::Array(ref array) => Some(array),
_ => None,
}
}
/// If the `Value` is an Array, returns the associated mutable `Vec`.
///
/// Returns `None` otherwise.
pub fn as_array_mut(&mut self) -> Option<&mut Vec<Value>> {
match *self {
Value::Array(ref mut array) => Some(array),
_ => None,
}
}
/// If the `Value` is a Dictionary, returns the associated `BTreeMap`.
///
/// Returns `None` otherwise.
pub fn as_dictionary(&self) -> Option<&BTreeMap<String, Value>> {
match *self {
Value::Dictionary(ref map) => Some(map),
_ => None,
}
}
/// If the `Value` is a Dictionary, returns the associated mutable `BTreeMap`.
///
/// Returns `None` otherwise.
pub fn as_dictionary_mut(&mut self) -> Option<&mut BTreeMap<String, Value>> {
match *self {
Value::Dictionary(ref mut map) => Some(map),
_ => None,
}
}
/// If the `Value` is a Boolean, returns the associated `bool`.
///
/// Returns `None` otherwise.
pub fn as_boolean(&self) -> Option<bool> {
match *self {
Value::Boolean(v) => Some(v),
_ => None,
}
}
/// If the `Value` is a Data, returns the underlying `Vec`.
///
/// Returns `None` otherwise.
///
/// This method consumes the `Value`. If this is not desired, please use
/// `as_data` method.
pub fn into_data(self) -> Option<Vec<u8>> {
match self {
Value::Data(data) => Some(data),
_ => None,
}
}
/// If the `Value` is a Data, returns the associated `Vec`.
///
/// Returns `None` otherwise.
pub fn as_data(&self) -> Option<&[u8]> {
match *self {
Value::Data(ref data) => Some(data),
_ => None,
}
}
/// If the `Value` is a Date, returns the associated `Date`.
///
/// Returns `None` otherwise.
pub fn as_date(&self) -> Option<Date> {
match *self {
Value::Date(date) => Some(date),
_ => None,
}
}
/// If the `Value` is a Real, returns the associated `f64`.
///
/// Returns `None` otherwise.
pub fn as_real(&self) -> Option<f64> {
match *self {
Value::Real(v) => Some(v),
_ => None,
}
}
/// If the `Value` is an Integer, returns the associated `i64`.
///
/// Returns `None` otherwise.
pub fn as_integer(&self) -> Option<i64> {
match *self {
Value::Integer(v) => Some(v),
_ => None,
}
}
/// If the `Value` is a String, returns the underlying `String`.
///
/// Returns `None` otherwise.
///
/// This method consumes the `Value`. If this is not desired, please use
/// `as_string` method.
pub fn into_string(self) -> Option<String> {
match self {
Value::String(v) => Some(v),
_ => None,
}
}
/// If the `Value` is a String, returns the associated `str`.
///
/// Returns `None` otherwise.
pub fn as_string(&self) -> Option<&str> {
match *self {
Value::String(ref v) => Some(v),
_ => None,
}
}
}
impl From<Vec<Value>> for Value {
fn from(from: Vec<Value>) -> Value {
Value::Array(from)
}
}
impl From<BTreeMap<String, Value>> for Value {
fn from(from: BTreeMap<String, Value>) -> Value {
Value::Dictionary(from)
}
}
impl From<bool> for Value {
fn from(from: bool) -> Value {
Value::Boolean(from)
}
}
impl<'a> From<&'a bool> for Value {
fn from(from: &'a bool) -> Value {
Value::Boolean(*from)
}
}
impl From<Date> for Value {
fn from(from: Date) -> Value {
Value::Date(from)
}
}
impl<'a> From<&'a Date> for Value {
fn from(from: &'a Date) -> Value {
Value::Date(*from)
}
}
impl From<f64> for Value {
fn from(from: f64) -> Value {
Value::Real(from)
}
}
impl From<f32> for Value {
fn from(from: f32) -> Value {
Value::Real(from.into())
}
}
impl From<i64> for Value {
fn from(from: i64) -> Value {
Value::Integer(from)
}
}
impl From<i32> for Value {
fn from(from: i32) -> Value {
Value::Integer(from.into())
}
}
impl From<i16> for Value {
fn from(from: i16) -> Value {
Value::Integer(from.into())
}
}
impl From<i8> for Value {
fn from(from: i8) -> Value {
Value::Integer(from.into())
}
}
impl From<u32> for Value {
fn from(from: u32) -> Value {
Value::Integer(from.into())
}
}
impl From<u16> for Value {
fn from(from: u16) -> Value {
Value::Integer(from.into())
}
}
impl From<u8> for Value {
fn from(from: u8) -> Value {
Value::Integer(from.into())
}
}
impl<'a> From<&'a f64> for Value {
fn from(from: &'a f64) -> Value {
Value::Real(*from)
}
}
impl<'a> From<&'a f32> for Value {
fn from(from: &'a f32) -> Value {
Value::Real((*from).into())
}
}
impl<'a> From<&'a i64> for Value {
fn from(from: &'a i64) -> Value {
Value::Integer(*from)
}
}
impl<'a> From<&'a i32> for Value {
fn from(from: &'a i32) -> Value {
Value::Integer((*from).into())
}
}
impl<'a> From<&'a i16> for Value {
fn from(from: &'a i16) -> Value {
Value::Integer((*from).into())
}
}
impl<'a> From<&'a i8> for Value {
fn from(from: &'a i8) -> Value {
Value::Integer((*from).into())
}
}
impl<'a> From<&'a u32> for Value {
fn from(from: &'a u32) -> Value {
Value::Integer((*from).into())
}
}
impl<'a> From<&'a u16> for Value {
fn from(from: &'a u16) -> Value {
Value::Integer((*from).into())
}
}
impl<'a> From<&'a u8> for Value {
fn from(from: &'a u8) -> Value {
Value::Integer((*from).into())
}
}
impl From<String> for Value {
fn from(from: String) -> Value {
Value::String(from)
}
}
impl<'a> From<&'a str> for Value {
fn from(from: &'a str) -> Value {
Value::String(from.into())
}
}
struct Builder<T> {
stream: T,
token: Option<Event>,
}
impl<T: Iterator<Item = Result<Event, Error>>> Builder<T> {
fn new(stream: T) -> Builder<T> {
Builder {
stream,
token: None,
}
}
fn build(mut self) -> Result<Value, Error> {
self.bump()?;
let plist = self.build_value()?;
// Ensure the stream has been fully consumed
self.bump()?;
match self.token {
None => Ok(plist),
_ => Err(Error::InvalidData),
}
}
fn bump(&mut self) -> Result<(), Error> {
self.token = match self.stream.next() {
Some(Ok(token)) => Some(token),
Some(Err(err)) => return Err(err),
None => None,
};
Ok(())
}
fn build_value(&mut self) -> Result<Value, Error> {
match self.token.take() {
Some(Event::StartArray(len)) => Ok(Value::Array(self.build_array(len)?)),
Some(Event::StartDictionary(len)) => Ok(Value::Dictionary(self.build_dict(len)?)),
Some(Event::BooleanValue(b)) => Ok(Value::Boolean(b)),
Some(Event::DataValue(d)) => Ok(Value::Data(d)),
Some(Event::DateValue(d)) => Ok(Value::Date(d)),
Some(Event::IntegerValue(i)) => Ok(Value::Integer(i)),
Some(Event::RealValue(f)) => Ok(Value::Real(f)),
Some(Event::StringValue(s)) => Ok(Value::String(s)),
Some(Event::EndArray) => Err(Error::InvalidData),
Some(Event::EndDictionary) => Err(Error::InvalidData),
// The stream should not have ended here
None => Err(Error::InvalidData),
}
}
fn build_array(&mut self, len: Option<u64>) -> Result<Vec<Value>, Error> {
let mut values = match len.and_then(u64_to_usize) {
Some(len) => Vec::with_capacity(len),
None => Vec::new(),
};
loop {
self.bump()?;
if let Some(Event::EndArray) = self.token {
self.token.take();
return Ok(values);
}
values.push(self.build_value()?);
}
}
fn build_dict(&mut self, _len: Option<u64>) -> Result<BTreeMap<String, Value>, Error> {
let mut values = BTreeMap::new();
loop {
self.bump()?;
match self.token.take() {
Some(Event::EndDictionary) => return Ok(values),
Some(Event::StringValue(s)) => {
self.bump()?;
values.insert(s, self.build_value()?);
}
_ => {
// Only string keys are supported in plists
return Err(Error::InvalidData);
}
}
}
}
}
#[cfg(test)]
mod tests {
use std::collections::BTreeMap;
use std::time::SystemTime;
use super::*;
use stream::Event::*;
use {Date, Value};
#[test]
fn value_accessors() {
let vec = vec![Value::Real(0.0)];
let mut array = Value::Array(vec.clone());
assert_eq!(array.as_array(), Some(&vec.clone()));
assert_eq!(array.as_array_mut(), Some(&mut vec.clone()));
let mut map = BTreeMap::new();
map.insert("key1".to_owned(), Value::String("value1".to_owned()));
let mut dict = Value::Dictionary(map.clone());
assert_eq!(dict.as_dictionary(), Some(&map.clone()));
assert_eq!(dict.as_dictionary_mut(), Some(&mut map.clone()));
assert_eq!(Value::Boolean(true).as_boolean(), Some(true));
let slice: &[u8] = &[1, 2, 3];
assert_eq!(Value::Data(slice.to_vec()).as_data(), Some(slice));
assert_eq!(
Value::Data(slice.to_vec()).into_data(),
Some(slice.to_vec())
);
let date: Date = SystemTime::now().into();
assert_eq!(Value::Date(date.clone()).as_date(), Some(date));
assert_eq!(Value::Real(0.0).as_real(), Some(0.0));
assert_eq!(Value::Integer(1).as_integer(), Some(1));
assert_eq!(Value::String("2".to_owned()).as_string(), Some("2"));
assert_eq!(
Value::String("t".to_owned()).into_string(),
Some("t".to_owned())
);
}
#[test]
fn builder() {
// Input
let events = vec![
StartDictionary(None),
StringValue("Author".to_owned()),
StringValue("William Shakespeare".to_owned()),
StringValue("Lines".to_owned()),
StartArray(None),
StringValue("It is a tale told by an idiot,".to_owned()),
StringValue("Full of sound and fury, signifying nothing.".to_owned()),
EndArray,
StringValue("Birthdate".to_owned()),
IntegerValue(1564),
StringValue("Height".to_owned()),
RealValue(1.60),
EndDictionary,
];
let builder = Builder::new(events.into_iter().map(|e| Ok(e)));
let plist = builder.build();
// Expected output
let mut lines = Vec::new();
lines.push(Value::String("It is a tale told by an idiot,".to_owned()));
lines.push(Value::String(
"Full of sound and fury, signifying nothing.".to_owned(),
));
let mut dict = BTreeMap::new();
dict.insert(
"Author".to_owned(),
Value::String("William Shakespeare".to_owned()),
);
dict.insert("Lines".to_owned(), Value::Array(lines));
dict.insert("Birthdate".to_owned(), Value::Integer(1564));
dict.insert("Height".to_owned(), Value::Real(1.60));
assert_eq!(plist.unwrap(), Value::Dictionary(dict));
}
}

Двоичные данные
third_party/rust/plist/tests/data/binary.plist поставляемый Normal file

Двоичный файл не отображается.

Двоичные данные
third_party/rust/plist/tests/data/binary_circular_array.plist поставляемый Normal file

Двоичный файл не отображается.

Двоичные данные
third_party/rust/plist/tests/data/binary_zero_offset_size.plist поставляемый Normal file

Двоичный файл не отображается.

Двоичные данные
third_party/rust/plist/tests/data/utf16_bplist.plist поставляемый Normal file

Двоичный файл не отображается.

26
third_party/rust/plist/tests/data/xml.plist поставляемый Normal file
Просмотреть файл

@ -0,0 +1,26 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>Author</key>
<string>William Shakespeare</string>
<key>Lines</key>
<array>
<string>It is a tale told by an idiot,</string>
<string>Full of sound and fury, signifying nothing.</string>
</array>
<key>Death</key>
<integer>1564</integer>
<key>Height</key>
<real>1.6</real>
<key>Data</key>
<data>
AAAAvgAAAA
MAAAAeAAAA
</data>
<key>Birthdate</key>
<date>1981-05-16T11:32:06Z</date>
<key>Blank</key>
<string></string>
</dict>
</plist>

17
third_party/rust/plist/tests/data/xml_error.plist поставляемый Normal file
Просмотреть файл

@ -0,0 +1,17 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>Author</key>
<string>William Shakespeare</string>
<key>Lines</key>
<array>
<string>It is a tale told by an idiot,</string>
<string>Full of sound and fury, signifying nothing.</string>
</array>
<key>Death</key>
<integer>1564</integer>
<key>Height</key>
<real>1.6</real>
<key>Data</ke
BADNESS

73
third_party/rust/plist/tests/fuzzer.rs поставляемый Normal file
Просмотреть файл

@ -0,0 +1,73 @@
extern crate plist;
use plist::{Error, Value};
use std::io::Cursor;
#[test]
fn too_large_allocation() {
let data = b"bplist00\"&L^^^^^^^^-^^^^^^^^^^^";
test_fuzzer_data_err(data);
}
#[test]
fn too_large_allocation_2() {
let data = b"bplist00;<)\x9fX\x0a<h\x0a:hhhhG:hh\x0amhhhhhhx#hhT)\x0a*";
test_fuzzer_data_err(data);
}
#[test]
fn empty_offset_table() {
let data = b"bplist00;\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00<)\x9fXTX(";
test_fuzzer_data_err(data);
}
#[test]
fn binary_circular_reference() {
let data = b"bplist00\xd6\x01\x02\x03\x04\x05\x06\x07\x0a\x0b\x0c\x0d\x0eULinesUDeathVHeightYBirthdateVAutbplist00\xd6\x01\x02\x03\x04\x05\x06\x07\x0a\x0b\x0c\x0d\x0eULinesUDeathVHeightYBirthdateVAuthorTData\xa2\x08\x09_\x10\x1eIt is nifying nothing.\x11\x06\x1c#?\xf9\x99\x99\x99\x99\x99\x9a3\xc1\xc2v\x00e\x00\x00\x00_\x10\x13William ShakespeareO\x10\x0f\x00\x00\x00\xbe\x00\x00\x00\x03\x00\x00\x00\x1e\x00\x00\x00\x08\x15\x1b!(58>Ab\x90\x93\x9c\xa5\xbb\x00\x00\x00\x00\x00\x00\x01\x01\x00\x00\x00\x00\x00\x00\x00\x0f\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xcd";
test_fuzzer_data_err(data);
}
#[test]
fn binary_zero_offset_size() {
let data = include_bytes!("data/binary_zero_offset_size.plist");
test_fuzzer_data_err(data);
}
#[test]
fn binary_nan_date() {
let data = b"bplist00\xd6\x01\x02\x01\x04\x05\x06\x07\x0a\x0b\x0c\x0d\x0eULinesUDeathVHeightYBthridateVAuthorTData\xa2\x08\x09_\x10\x1eIt is a tale told by an idiot,_\x10+Full of sound and fury, signifying nothing.\x11\x06\x1c#?\xf9\x99\x99\x99\x99\x99\x9a3\xff\xff\xff\xffe\x00\x00\x00_\x13\x10William ShakespeareO\x10\xe5\x00\x00\x00\xbe\x00\x00\x00\x03\x00\x00\x00\x1e\x00\x00\x00\x08\x15\x1b!(14>Ab\x90\x93\x9c\xa5\xbb\xd4\x00\x00\x00\x00\x00\x01\x01\x00\x00\x00\x00\x00\x00\x00\x0f\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xcd";
test_fuzzer_data_err(data);
}
#[test]
fn binary_circular_array() {
let data = include_bytes!("data/binary_circular_array.plist");
test_fuzzer_data_err(data);
}
// Issue 20 - not found by fuzzing but this is a convenient place to put the test.
#[test]
fn issue_20_binary_with_data_in_trailer() {
let data =
b"bplist00\xd0\x08\0\0\0\0\0\0\x01\x01\0\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\t";
test_fuzzer_data_ok(data);
}
#[test]
fn issue_22_binary_with_byte_ref_size() {
let data = b"bplist00\xd1\x01\x02TTestQ1\x08\x0b\x10\x00\x00\x00\x00\x00\x00\x01\x01\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x12";
test_fuzzer_data_ok(data);
}
fn test_fuzzer_data(data: &[u8]) -> Result<Value, Error> {
let cursor = Cursor::new(data);
Value::from_reader(cursor)
}
fn test_fuzzer_data_ok(data: &[u8]) {
assert!(test_fuzzer_data(data).is_ok());
}
fn test_fuzzer_data_err(data: &[u8]) {
assert!(test_fuzzer_data(data).is_err());
}

280
third_party/rust/plist/tests/serde_tests/mod.rs поставляемый Normal file
Просмотреть файл

@ -0,0 +1,280 @@
use plist::stream::Event;
use plist::stream::Event::*;
use plist::stream::Writer;
use plist::{Date, Deserializer, Error, Serializer};
use serde::de::DeserializeOwned;
use serde::ser::Serialize;
use std::fmt::Debug;
use std::time::SystemTime;
struct VecWriter {
events: Vec<Event>,
}
impl VecWriter {
pub fn new() -> VecWriter {
VecWriter { events: Vec::new() }
}
pub fn into_inner(self) -> Vec<Event> {
self.events
}
}
impl Writer for VecWriter {
fn write(&mut self, event: &Event) -> Result<(), Error> {
self.events.push(event.clone());
Ok(())
}
}
fn new_serializer() -> Serializer<VecWriter> {
Serializer::new(VecWriter::new())
}
fn new_deserializer(events: Vec<Event>) -> Deserializer<Vec<Result<Event, Error>>> {
let result_events = events.into_iter().map(Ok).collect();
Deserializer::new(result_events)
}
fn assert_roundtrip<T>(obj: T, comparison: Option<&[Event]>)
where
T: Debug + DeserializeOwned + PartialEq + Serialize,
{
let mut se = new_serializer();
obj.serialize(&mut se).unwrap();
let events = se.into_inner().into_inner();
if let Some(comparison) = comparison {
assert_eq!(&events[..], comparison);
}
let mut de = new_deserializer(events);
let new_obj = T::deserialize(&mut de).unwrap();
assert_eq!(new_obj, obj);
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
enum Animal {
Cow,
Dog(DogOuter),
Frog(Result<String, bool>, Vec<f64>),
Cat {
age: usize,
name: String,
firmware: Option<Vec<u8>>,
},
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
struct DogOuter {
inner: Vec<DogInner>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
struct DogInner {
a: (),
b: usize,
c: Vec<String>,
}
#[test]
fn cow() {
let cow = Animal::Cow;
let comparison = &[
StartDictionary(Some(1)),
StringValue("Cow".to_owned()),
StringValue("".to_owned()),
EndDictionary,
];
assert_roundtrip(cow, Some(comparison));
}
#[test]
fn dog() {
let dog = Animal::Dog(DogOuter {
inner: vec![DogInner {
a: (),
b: 12,
c: vec!["a".to_string(), "b".to_string()],
}],
});
let comparison = &[
StartDictionary(Some(1)),
StringValue("Dog".to_owned()),
StartDictionary(None),
StringValue("inner".to_owned()),
StartArray(Some(1)),
StartDictionary(None),
StringValue("a".to_owned()),
StringValue("".to_owned()),
StringValue("b".to_owned()),
IntegerValue(12),
StringValue("c".to_owned()),
StartArray(Some(2)),
StringValue("a".to_owned()),
StringValue("b".to_owned()),
EndArray,
EndDictionary,
EndArray,
EndDictionary,
EndDictionary,
];
assert_roundtrip(dog, Some(comparison));
}
#[test]
fn frog() {
let frog = Animal::Frog(
Ok("hello".to_owned()),
vec![1.0, 2.0, 3.14159, 0.000000001, 1.27e31],
);
let comparison = &[
StartDictionary(Some(1)),
StringValue("Frog".to_owned()),
StartArray(Some(2)),
StartDictionary(Some(1)),
StringValue("Ok".to_owned()),
StringValue("hello".to_owned()),
EndDictionary,
StartArray(Some(5)),
RealValue(1.0),
RealValue(2.0),
RealValue(3.14159),
RealValue(0.000000001),
RealValue(1.27e31),
EndArray,
EndArray,
EndDictionary,
];
assert_roundtrip(frog, Some(comparison));
}
#[test]
fn cat() {
let cat = Animal::Cat {
age: 12,
name: "Paws".to_owned(),
firmware: Some(vec![0, 1, 2, 3, 4, 5, 6, 7, 8]),
};
let comparison = &[
StartDictionary(Some(1)),
StringValue("Cat".to_owned()),
StartDictionary(None),
StringValue("age".to_owned()),
IntegerValue(12),
StringValue("name".to_owned()),
StringValue("Paws".to_owned()),
StringValue("firmware".to_owned()),
StartArray(Some(9)),
IntegerValue(0),
IntegerValue(1),
IntegerValue(2),
IntegerValue(3),
IntegerValue(4),
IntegerValue(5),
IntegerValue(6),
IntegerValue(7),
IntegerValue(8),
EndArray,
EndDictionary,
EndDictionary,
];
assert_roundtrip(cat, Some(comparison));
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
struct NewtypeStruct(NewtypeInner);
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
struct NewtypeInner(u8, u8, u8);
#[test]
fn newtype_struct() {
let newtype = NewtypeStruct(NewtypeInner(34, 32, 13));
let comparison = &[
StartArray(Some(3)),
IntegerValue(34),
IntegerValue(32),
IntegerValue(13),
EndArray,
];
assert_roundtrip(newtype, Some(comparison));
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
struct TypeWithOptions {
a: Option<String>,
b: Option<u32>,
c: Option<Box<TypeWithOptions>>,
}
#[test]
fn type_with_options() {
let inner = TypeWithOptions {
a: None,
b: Some(12),
c: None,
};
let obj = TypeWithOptions {
a: Some("hello".to_owned()),
b: None,
c: Some(Box::new(inner)),
};
let comparison = &[
StartDictionary(None),
StringValue("a".to_owned()),
StringValue("hello".to_owned()),
StringValue("c".to_owned()),
StartDictionary(None),
StringValue("b".to_owned()),
IntegerValue(12),
EndDictionary,
EndDictionary,
];
assert_roundtrip(obj, Some(comparison));
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
struct TypeWithDate {
a: Option<i32>,
b: Option<Date>,
}
#[test]
fn type_with_date() {
let date: Date = SystemTime::now().into();
let obj = TypeWithDate {
a: Some(28),
b: Some(date.clone()),
};
let comparison = &[
StartDictionary(None),
StringValue("a".to_owned()),
IntegerValue(28),
StringValue("b".to_owned()),
DateValue(date),
EndDictionary,
];
assert_roundtrip(obj, Some(comparison));
}

10
third_party/rust/plist/tests/tests.rs поставляемый Normal file
Просмотреть файл

@ -0,0 +1,10 @@
extern crate plist;
#[cfg(feature = "serde")]
extern crate serde;
#[cfg(feature = "serde")]
#[macro_use]
extern crate serde_derive;
#[cfg(feature = "serde")]
mod serde_tests;

1
third_party/rust/safemem/.cargo-checksum.json поставляемый Normal file
Просмотреть файл

@ -0,0 +1 @@
{"files":{"Cargo.toml":"d8e1045d4c55c48a86b7751f5fb2113163b8f608c1a1f80abb87c2e405b1f5ac","LICENSE-APACHE":"7cfd738c53d61c79f07e348f622bf7707c9084237054d37fbe07788a75f5881c","LICENSE-MIT":"64e1c5f067ee2d8c0ee8abd751e57275d4e97e4f7c05699bc23f5005a9c53043","README.md":"d2f7807fe39099b7adf65e33d083ea4b85f266583f0df00bb6dac8f723a9cc0f","src/lib.rs":"eef79dfff399b6205ddb47aebe59b8c42d3982c1505b196687fc597741485d95"},"package":"8dca453248a96cb0749e36ccdfe2b0b4e54a61bfef89fb97ec621eb8e0a93dd9"}

28
third_party/rust/safemem/Cargo.toml поставляемый Normal file
Просмотреть файл

@ -0,0 +1,28 @@
# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
#
# When uploading crates to the registry Cargo will automatically
# "normalize" Cargo.toml files for maximal compatibility
# with all versions of Cargo and also rewrite `path` dependencies
# to registry (e.g. crates.io) dependencies
#
# If you believe there's an error in this file please file an
# issue against the rust-lang/cargo repository. If you're
# editing this file be aware that the upstream Cargo.toml
# will likely look very different (and much more reasonable)
[package]
name = "safemem"
version = "0.3.0"
authors = ["Austin Bonander <austin.bonander@gmail.com>"]
description = "Safe wrappers for memory-accessing functions, like `std::ptr::copy()`."
documentation = "https://docs.rs/safemem"
keywords = ["memset", "memmove", "copy"]
categories = ["no-std"]
license = "MIT/Apache-2.0"
repository = "https://github.com/abonander/safemem"
[dependencies]
[features]
default = ["std"]
std = []

201
third_party/rust/safemem/LICENSE-APACHE поставляемый Normal file
Просмотреть файл

@ -0,0 +1,201 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

25
third_party/rust/safemem/LICENSE-MIT поставляемый Normal file
Просмотреть файл

@ -0,0 +1,25 @@
Copyright (c) 2016 The `multipart` Crate Developers
Permission is hereby granted, free of charge, to any
person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the
Software without restriction, including without
limitation the rights to use, copy, modify, merge,
publish, distribute, sublicense, and/or sell copies of
the Software, and to permit persons to whom the Software
is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice
shall be included in all copies or substantial portions
of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.

25
third_party/rust/safemem/README.md поставляемый Normal file
Просмотреть файл

@ -0,0 +1,25 @@
# safemem
Safe wrappers for `memmove`, `memset`, etc. in Rust
`no_std` Support
----------------
This crate has support for `no_std` which is controlled via default feature `std`. To use the crate
in a `no_std` environment simply turn off default features.
License
-------
Licensed under either of
* Apache License, Version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or http://www.apache.org/licenses/LICENSE-2.0)
* MIT license ([LICENSE-MIT](LICENSE-MIT) or http://opensource.org/licenses/MIT)
at your option.
### Contribution
Unless you explicitly state otherwise, any contribution intentionally submitted
for inclusion in the work by you, as defined in the Apache-2.0 license, shall be dual licensed as above, without any
additional terms or conditions.

117
third_party/rust/safemem/src/lib.rs поставляемый Normal file
Просмотреть файл

@ -0,0 +1,117 @@
//! Safe wrappers for memory-accessing functions like `std::ptr::copy()`.
#![cfg_attr(not(feature = "std"), no_std)]
#[cfg(not(feature = "std"))]
extern crate core as std;
use std::ptr;
macro_rules! idx_check (
($slice:expr, $idx:expr) => {
assert!($idx < $slice.len(),
concat!("`", stringify!($idx), "` ({}) out of bounds. Length: {}"),
$idx, $slice.len());
}
);
macro_rules! len_check (
($slice:expr, $start:ident, $len:ident) => {
assert!(
$start.checked_add($len)
.expect(concat!("Overflow evaluating ", stringify!($start + $len)))
<= $slice.len(),
"Length {} starting at {} is out of bounds (slice len {}).", $len, $start, $slice.len()
)
}
);
/// Copy `len` elements from `src_idx` to `dest_idx`. Ranges may overlap.
///
/// Safe wrapper for `memmove()`/`std::ptr::copy()`.
///
/// ###Panics
/// * If either `src_idx` or `dest_idx` are out of bounds, or if either of these plus `len` is out of
/// bounds.
/// * If `src_idx + len` or `dest_idx + len` overflows.
pub fn copy_over<T: Copy>(slice: &mut [T], src_idx: usize, dest_idx: usize, len: usize) {
if slice.len() == 0 { return; }
idx_check!(slice, src_idx);
idx_check!(slice, dest_idx);
len_check!(slice, src_idx, len);
len_check!(slice, dest_idx, len);
let src_ptr: *const T = &slice[src_idx];
let dest_ptr: *mut T = &mut slice[dest_idx];
unsafe {
ptr::copy(src_ptr, dest_ptr, len);
}
}
/// Safe wrapper for `std::ptr::write_bytes()`/`memset()`.
pub fn write_bytes(slice: &mut [u8], byte: u8) {
unsafe {
ptr::write_bytes(slice.as_mut_ptr(), byte, slice.len());
}
}
/// Prepend `elems` to `vec`, resizing if necessary.
///
/// ###Panics
/// If `vec.len() + elems.len()` overflows.
#[cfg(feature = "std")]
pub fn prepend<T: Copy>(elems: &[T], vec: &mut Vec<T>) {
// Our overflow check occurs here, no need to do it ourselves.
vec.reserve(elems.len());
let old_len = vec.len();
let new_len = old_len + elems.len();
unsafe {
vec.set_len(new_len);
}
// Move the old elements down to the end.
if old_len > 0 {
copy_over(vec, 0, elems.len(), old_len);
}
vec[..elems.len()].copy_from_slice(elems);
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
#[should_panic]
fn bounds_check() {
let mut arr = [0i32, 1, 2, 3, 4, 5];
copy_over(&mut arr, 2, 1, 7);
}
#[test]
fn copy_empty() {
let mut arr: [i32; 0] = [];
copy_over(&mut arr, 0, 0, 0);
}
#[test]
#[cfg(feature = "std")]
fn prepend_empty() {
let mut vec: Vec<i32> = vec![];
prepend(&[1, 2, 3], &mut vec);
}
#[test]
#[cfg(feature = "std")]
fn prepend_i32() {
let mut vec = vec![3, 4, 5];
prepend(&[1, 2], &mut vec);
assert_eq!(vec, &[1, 2, 3, 4, 5]);
}
}