Bug 1520001 - mach rust vendor;r=arai,smaug

Depends on D34130

Differential Revision: https://phabricator.services.mozilla.com/D34131

--HG--
rename : third_party/rust/ascii-canvas/LICENSE-APACHE => third_party/rust/itertools-0.7.6/LICENSE-APACHE
rename : third_party/rust/petgraph/LICENSE-MIT => third_party/rust/itertools-0.7.6/LICENSE-MIT
rename : third_party/rust/itertools/Makefile => third_party/rust/itertools-0.7.6/Makefile
rename : third_party/rust/itertools/custom.css => third_party/rust/itertools-0.7.6/custom.css
extra : moz-landing-system : lando
This commit is contained in:
David Teller 2019-06-10 14:09:06 +00:00
Родитель cf49aea86d
Коммит 260e07c0c7
439 изменённых файлов: 19417 добавлений и 106406 удалений

184
Cargo.lock сгенерированный
Просмотреть файл

@ -61,14 +61,6 @@ dependencies = [
"nodrop 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "ascii-canvas"
version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"term 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "atomic_refcell"
version = "0.1.0"
@ -230,12 +222,11 @@ name = "binast"
version = "0.1.1"
dependencies = [
"Inflector 0.11.2 (registry+https://github.com/rust-lang/crates.io-index)",
"binjs_meta 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)",
"binjs_meta 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)",
"clap 2.31.2 (registry+https://github.com/rust-lang/crates.io-index)",
"env_logger 0.5.6 (registry+https://github.com/rust-lang/crates.io-index)",
"itertools 0.7.6 (registry+https://github.com/rust-lang/crates.io-index)",
"itertools 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
"webidl 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)",
"yaml-rust 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
]
@ -270,21 +261,13 @@ dependencies = [
[[package]]
name = "binjs_meta"
version = "0.4.3"
version = "0.5.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"Inflector 0.11.2 (registry+https://github.com/rust-lang/crates.io-index)",
"itertools 0.7.6 (registry+https://github.com/rust-lang/crates.io-index)",
"itertools 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
"webidl 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "bit-set"
version = "0.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"bit-vec 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)",
"weedle 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
@ -909,11 +892,6 @@ dependencies = [
"nom 4.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "diff"
version = "0.1.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "digest"
version = "0.8.0"
@ -932,18 +910,6 @@ dependencies = [
"winapi 0.3.6 (git+https://github.com/froydnj/winapi-rs?branch=aarch64)",
]
[[package]]
name = "docopt"
version = "1.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"lazy_static 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
"regex 1.0.3 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 1.0.88 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_derive 1.0.88 (git+https://github.com/servo/serde?branch=deserialize_from_enums10)",
"strsim 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "dogear"
version = "0.2.6"
@ -990,14 +956,6 @@ name = "either"
version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "ena"
version = "0.10.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "encoding_c"
version = "0.9.0"
@ -1103,11 +1061,6 @@ dependencies = [
"winapi 0.3.6 (git+https://github.com/froydnj/winapi-rs?branch=aarch64)",
]
[[package]]
name = "fixedbitset"
version = "0.1.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "flate2"
version = "1.0.1"
@ -1475,6 +1428,14 @@ dependencies = [
"either 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "itertools"
version = "0.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"either 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "itoa"
version = "0.4.1"
@ -1544,35 +1505,6 @@ dependencies = [
"xpcom 0.1.0",
]
[[package]]
name = "lalrpop"
version = "0.16.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"ascii-canvas 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
"atty 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)",
"bit-set 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)",
"diff 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)",
"docopt 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
"ena 0.10.1 (registry+https://github.com/rust-lang/crates.io-index)",
"itertools 0.7.6 (registry+https://github.com/rust-lang/crates.io-index)",
"lalrpop-util 0.16.2 (registry+https://github.com/rust-lang/crates.io-index)",
"petgraph 0.4.13 (registry+https://github.com/rust-lang/crates.io-index)",
"regex 1.0.3 (registry+https://github.com/rust-lang/crates.io-index)",
"regex-syntax 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 1.0.88 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_derive 1.0.88 (git+https://github.com/servo/serde?branch=deserialize_from_enums10)",
"sha2 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)",
"string_cache 0.7.3 (registry+https://github.com/rust-lang/crates.io-index)",
"term 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
"unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "lalrpop-util"
version = "0.16.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "lazy_static"
version = "1.2.0"
@ -2097,11 +2029,6 @@ dependencies = [
"num-traits 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "ordermap"
version = "0.3.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "owning_ref"
version = "0.4.0"
@ -2153,15 +2080,6 @@ name = "percent-encoding"
version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "petgraph"
version = "0.4.13"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"fixedbitset 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)",
"ordermap 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "phf"
version = "0.7.21"
@ -2850,37 +2768,6 @@ name = "string"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "string_cache"
version = "0.7.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"lazy_static 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
"new_debug_unreachable 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
"phf_shared 0.7.21 (registry+https://github.com/rust-lang/crates.io-index)",
"precomputed-hash 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 1.0.88 (registry+https://github.com/rust-lang/crates.io-index)",
"string_cache_codegen 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
"string_cache_shared 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "string_cache_codegen"
version = "0.4.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"phf_generator 0.7.21 (registry+https://github.com/rust-lang/crates.io-index)",
"phf_shared 0.7.21 (registry+https://github.com/rust-lang/crates.io-index)",
"proc-macro2 0.4.27 (registry+https://github.com/rust-lang/crates.io-index)",
"quote 0.6.11 (registry+https://github.com/rust-lang/crates.io-index)",
"string_cache_shared 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "string_cache_shared"
version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "strsim"
version = "0.7.0"
@ -3038,15 +2925,6 @@ dependencies = [
"winapi 0.3.6 (git+https://github.com/froydnj/winapi-rs?branch=aarch64)",
]
[[package]]
name = "term"
version = "0.4.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
"winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "term_size"
version = "0.3.0"
@ -3458,15 +3336,6 @@ dependencies = [
"url 1.7.2 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "webidl"
version = "0.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"lalrpop 0.16.2 (registry+https://github.com/rust-lang/crates.io-index)",
"lalrpop-util 0.16.2 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "webrender"
version = "0.60.0"
@ -3557,6 +3426,14 @@ dependencies = [
"sha2 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "weedle"
version = "0.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"nom 4.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "which"
version = "1.0.3"
@ -3728,7 +3605,6 @@ dependencies = [
"checksum argon2rs 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)" = "3f67b0b6a86dae6e67ff4ca2b6201396074996379fba2b92ff649126f37cb392"
"checksum arrayref 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)" = "0fd1479b7c29641adbd35ff3b5c293922d696a92f25c8c975da3e0acbc87258f"
"checksum arrayvec 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)" = "2f0ef4a9820019a0c91d918918c93dc71d469f581a49b47ddc1d285d4270bbe2"
"checksum ascii-canvas 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b385d69402821a1c254533a011a312531cbcc0e3e24f19bbb4747a5a2daf37e2"
"checksum atomic_refcell 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "fb2dcb6e6d35f20276943cc04bb98e538b348d525a04ac79c10021561d202f21"
"checksum atty 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)" = "9a7d5b8723950951411ee34d271d99dddcc2035a16ab25310ea2c8cfd4369652"
"checksum authenticator 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)" = "ec149e5d5d4caa2c9ead53a8ce1ea9c4204c388c65bf3b96c2d1dc0fcf4aeb66"
@ -3740,8 +3616,7 @@ dependencies = [
"checksum binary-space-partition 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "88ceb0d16c4fd0e42876e298d7d3ce3780dd9ebdcbe4199816a32c77e08597ff"
"checksum bincode 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "bda13183df33055cbb84b847becce220d392df502ebe7a4a78d7021771ed94d0"
"checksum bindgen 0.49.1 (registry+https://github.com/rust-lang/crates.io-index)" = "6bd7710ac8399ae1ebe1e3aac7c9047c4f39f2c94b33c997f482f49e96991f7c"
"checksum binjs_meta 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)" = "430239e4551e42b80fa5d92322ac80ea38c9dda56e5d5582e057e2288352b71a"
"checksum bit-set 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)" = "6f1efcc46c18245a69c38fcc5cc650f16d3a59d034f3106e9ed63748f695730a"
"checksum binjs_meta 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)" = "6c9a0da2208ceb785c1626fa8b7d250d2e5546ae230294b4a998e4f818c1768e"
"checksum bit-vec 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "f59bbe95d4e52a6398ec21238d31577f2b28a9d86807f06ca59d191d8440d0bb"
"checksum bit_reverse 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)" = "5e97e02db5a2899c0377f3d6031d5da8296ca2b47abef6ed699de51b9e40a28c"
"checksum bitflags 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)" = "228047a76f468627ca71776ecdebd732a3423081fcf5125585bcd7c49886ce12"
@ -3801,16 +3676,13 @@ dependencies = [
"checksum deflate 0.7.19 (registry+https://github.com/rust-lang/crates.io-index)" = "8a6abb26e16e8d419b5c78662aa9f82857c2386a073da266840e474d5055ec86"
"checksum derive_more 0.13.0 (registry+https://github.com/rust-lang/crates.io-index)" = "3f57d78cf3bd45270dad4e70c21ec77a960b36c7a841ff9db76aaa775a8fb871"
"checksum devd-rs 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "0d009f166c0d9e9f9909dc751630b3a6411ab7f85a153d32d01deb364ffe52a7"
"checksum diff 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)" = "3c2b69f912779fbb121ceb775d74d51e915af17aaebc38d28a592843a2dd0a3a"
"checksum digest 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)" = "05f47366984d3ad862010e22c7ce81a7dbcaebbdfb37241a620f8b6596ee135c"
"checksum dirs 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)" = "88972de891f6118092b643d85a0b28e0678e0f948d7f879aa32f2d5aafe97d2a"
"checksum docopt 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "db2906c2579b5b7207fc1e328796a9a8835dc44e22dbe8e460b1d636f9a7b225"
"checksum dogear 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)" = "c01a457f8d6689260111be60774bfb68e558b41bc89b866ebc3bbed60ba255cb"
"checksum dtoa 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "09c3753c3db574d215cba4ea76018483895d7bff25a31b49ba45db21c48e50ab"
"checksum dtoa-short 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "068d4026697c1a18f0b0bb8cfcad1b0c151b90d8edb9bf4c235ad68128920d1d"
"checksum dwrote 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)" = "0bd1369e02db5e9b842a9b67bce8a2fcc043beafb2ae8a799dd482d46ea1ff0d"
"checksum either 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "18785c1ba806c258137c937e44ada9ee7e69a37e3c72077542cd2f069d78562a"
"checksum ena 0.10.1 (registry+https://github.com/rust-lang/crates.io-index)" = "25b4e5febb25f08c49f1b07dc33a182729a6b21edfb562b5aef95f78e0dbe5bb"
"checksum encoding_c 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)" = "769ecb8b33323998e482b218c0d13cd64c267609023b4b7ec3ee740714c318ee"
"checksum encoding_rs 0.8.17 (registry+https://github.com/rust-lang/crates.io-index)" = "4155785c79f2f6701f185eb2e6b4caf0555ec03477cb4c70db67b465311620ed"
"checksum env_logger 0.5.6 (registry+https://github.com/rust-lang/crates.io-index)" = "0561146661ae44c579e993456bc76d11ce1e0c7d745e57b2fa7146b6e49fa2ad"
@ -3821,7 +3693,6 @@ dependencies = [
"checksum failure_derive 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "64c2d913fe8ed3b6c6518eedf4538255b989945c14c2a7d5cbff62a5e2120596"
"checksum fake-simd 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "e88a8acf291dafb59c2d96e8f59828f3838bb1a70398823ade51a84de6a6deed"
"checksum filetime_win 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b8c37abd4a58e0cb794bcae4a7dc4f02fff376949d8d1066d4c729e97bfb38ec"
"checksum fixedbitset 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)" = "85cb8fec437468d86dc7c83ca7cfc933341d561873275f22dd5eedefa63a6478"
"checksum flate2 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "9fac2277e84e5e858483756647a9d0aa8d9a2b7cba517fd84325a0aaa69a0909"
"checksum fnv 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)" = "2fad85553e09a6f881f739c29f0b00b0f01357c743266d478b68951ce23285f3"
"checksum foreign-types 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "5ebc04f19019fff1f2d627b5581574ead502f80c48c88900575a46e0840fe5d0"
@ -3851,11 +3722,10 @@ dependencies = [
"checksum inflate 0.4.5 (registry+https://github.com/rust-lang/crates.io-index)" = "1cdb29978cc5797bd8dcc8e5bf7de604891df2a8dc576973d71a281e916db2ff"
"checksum iovec 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "dbe6e417e7d0975db6512b90796e8ce223145ac4e33c377e4a42882a0e88bb08"
"checksum itertools 0.7.6 (registry+https://github.com/rust-lang/crates.io-index)" = "b07332223953b5051bceb67e8c4700aa65291535568e1f12408c43c4a42c0394"
"checksum itertools 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)" = "5b8467d9c1cebe26feb08c640139247fac215782d35371ade9a2136ed6085358"
"checksum itoa 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)" = "c069bbec61e1ca5a596166e55dfe4773ff745c3d16b700013bcaff9a6df2c682"
"checksum kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "7507624b29483431c0ba2d82aece8ca6cdba9382bff4ddd0f7490560c056098d"
"checksum khronos_api 3.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "e2db585e1d738fc771bf08a151420d3ed193d9d895a36df7f6f8a9456b911ddc"
"checksum lalrpop 0.16.2 (registry+https://github.com/rust-lang/crates.io-index)" = "02888049e197dff0c5c9fd503bd2458ea373c5e845c2f5460db1f9e43050d55e"
"checksum lalrpop-util 0.16.2 (registry+https://github.com/rust-lang/crates.io-index)" = "488da0d45c65af229321623c62660627d02b0e7fbc768a4c3fcd121815404ef1"
"checksum lazy_static 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "a374c89b9db55895453a74c1e38861d9deec0b01b405a82516e9d5de4820dea1"
"checksum lazycell 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ce12306c4739d86ee97c23139f3a34ddf0387bbf181bc7929d287025a8c3ef6b"
"checksum lazycell 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "b294d6fa9ee409a054354afc4352b0b9ef7ca222c69b8812cbea9e7d2bf3783f"
@ -3902,14 +3772,12 @@ dependencies = [
"checksum object 0.10.0 (registry+https://github.com/rust-lang/crates.io-index)" = "6cca6ad89d0801138cb4ef606908ae12d83edc4c790ef5178fc7b4c72d959e90"
"checksum opaque-debug 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "51ecbcb821e1bd256d456fe858aaa7f380b63863eab2eb86eee1bd9f33dd6682"
"checksum ordered-float 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "2f0015e9e8e28ee20c581cfbfe47c650cedeb9ed0721090e0b7ebb10b9cdbcc2"
"checksum ordermap 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)" = "a86ed3f5f244b372d6b1a00b72ef7f8876d0bc6a78a4c9985c53614041512063"
"checksum owning_ref 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "49a4b8ea2179e6a2e27411d3bca09ca6dd630821cf6894c6c7c8467a8ee7ef13"
"checksum packed_simd 0.3.3 (git+https://github.com/hsivonen/packed_simd?branch=rust_1_32)" = "<none>"
"checksum parking_lot 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)" = "fa7767817701cce701d5585b9c4db3cdd02086398322c1d7e8bf5094a96a2ce7"
"checksum parking_lot_core 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)" = "cb88cb1cb3790baa6776844f968fea3be44956cf184fa1be5a03341f5491278c"
"checksum peeking_take_while 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "19b17cddbe7ec3f8bc800887bab5e717348c95ea2ca0b1bf0837fb964dc67099"
"checksum percent-encoding 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "de154f638187706bde41d9b4738748933d64e6b37bdbffc0b47a97d16a6ae356"
"checksum petgraph 0.4.13 (registry+https://github.com/rust-lang/crates.io-index)" = "9c3659d1ee90221741f65dd128d9998311b0e40c5d3c23a62445938214abce4f"
"checksum phf 0.7.21 (registry+https://github.com/rust-lang/crates.io-index)" = "cb325642290f28ee14d8c6201159949a872f220c62af6e110a56ea914fbe42fc"
"checksum phf_codegen 0.7.21 (registry+https://github.com/rust-lang/crates.io-index)" = "d62594c0bb54c464f633175d502038177e90309daf2e0158be42ed5f023ce88f"
"checksum phf_generator 0.7.21 (registry+https://github.com/rust-lang/crates.io-index)" = "6b07ffcc532ccc85e3afc45865469bf5d9e4ef5bfcf9622e3cfe80c2d275ec03"
@ -3983,16 +3851,12 @@ dependencies = [
"checksum smallvec 0.6.6 (registry+https://github.com/rust-lang/crates.io-index)" = "622df2d454c29a4d89b30dc3b27b42d7d90d6b9e587dbf8f67652eb7514da484"
"checksum stable_deref_trait 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "15132e0e364248108c5e2c02e3ab539be8d6f5d52a01ca9bbf27ed657316f02b"
"checksum string 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "00caf261d6f90f588f8450b8e1230fa0d5be49ee6140fdfbcb55335aff350970"
"checksum string_cache 0.7.3 (registry+https://github.com/rust-lang/crates.io-index)" = "25d70109977172b127fe834e5449e5ab1740b9ba49fa18a2020f509174f25423"
"checksum string_cache_codegen 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "1eea1eee654ef80933142157fdad9dd8bc43cf7c74e999e369263496f04ff4da"
"checksum string_cache_shared 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b1884d1bc09741d466d9b14e6d37ac89d6909cbcac41dd9ae982d4d063bbedfc"
"checksum strsim 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "bb4f380125926a99e52bc279241539c018323fab05ad6368b56f93d9369ff550"
"checksum svg_fmt 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "c666f0fed8e1e20e057af770af9077d72f3d5a33157b8537c1475dd8ffd6d32b"
"checksum syn 0.15.30 (registry+https://github.com/rust-lang/crates.io-index)" = "66c8865bf5a7cbb662d8b011950060b3c8743dca141b054bf7195b20d314d8e2"
"checksum synstructure 0.10.1 (registry+https://github.com/rust-lang/crates.io-index)" = "73687139bf99285483c96ac0add482c3776528beac1d97d444f6e91f203a2015"
"checksum target-lexicon 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "1b0ab4982b8945c35cc1c46a83a9094c414f6828a099ce5dcaa8ee2b04642dcb"
"checksum tempfile 3.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "47776f63b85777d984a50ce49d6b9e58826b6a3766a449fc95bc66cd5663c15b"
"checksum term 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)" = "fa63644f74ce96fbeb9b794f66aff2a52d601cbd5e80f4b97123e3899f4570f1"
"checksum term_size 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "e2b6b55df3198cc93372e85dd2ed817f0e38ce8cc0f22eb32391bfad9c4bf209"
"checksum termcolor 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)" = "adc4587ead41bf016f11af03e55a624c06568b5a19db4e90fde573d805074f83"
"checksum termion 1.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "689a3bdfaab439fd92bc87df5c4c78417d3cbe537487274e9b0b2dce76e92096"
@ -4036,7 +3900,7 @@ dependencies = [
"checksum walkdir 2.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "63636bd0eb3d00ccb8b9036381b526efac53caf112b7783b730ab3f8e44da369"
"checksum want 0.0.6 (registry+https://github.com/rust-lang/crates.io-index)" = "797464475f30ddb8830cc529aaaae648d581f99e2036a928877dfde027ddf6b3"
"checksum wasmparser 0.29.2 (registry+https://github.com/rust-lang/crates.io-index)" = "981a8797cf89762e0233ec45fae731cb79a4dfaee12d9f0fe6cee01e4ac58d00"
"checksum webidl 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)" = "d0f807f7488d680893f7188aa09d7672a3a0a8461975a098a2edf0a52e3fee29"
"checksum weedle 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)" = "26a4c67f132386d965390b8a734d5d10adbcd30eb5cc74bd9229af8b83f10044"
"checksum which 1.0.3 (registry+https://github.com/rust-lang/crates.io-index)" = "4be6cfa54dab45266e98b5d7be2f8ce959ddd49abd141a05d52dce4b07f803bb"
"checksum winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)" = "167dc9d6949a9b857f3451275e911c3f44255842c1f7a76f33c55103a909087a"
"checksum winapi 0.3.6 (git+https://github.com/froydnj/winapi-rs?branch=aarch64)" = "<none>"

Просмотреть файл

@ -1 +0,0 @@
{"files":{"Cargo.toml":"dbb4e9109fa74df727ab69e2fec04655817f29f13f3014e84da8052039764b73","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"73cb5f952615bc21d11487cd7e0cf091967cc71d2e7d6cb93f96b36d47b250ed","README.md":"936b7dcfa140c626ec81541090208d000c8e6c05f2ef4bd5ba9dc1befe06d80f","src/lib.rs":"85a4f14672537e5dcf1a2e070436fa91c364ecd39581616a48cddb9735b64133","src/row.rs":"6ce3e1a99cb6ccdb1d525b03ce12e6265979d12a8dcc4aa9860215ca8e3d5cb4","src/style.rs":"3ae1b7e308532362c3a93d6f8113882f36cfd9c773486a6e99651514a6cb44d9","src/test.rs":"154421667bec09882e7d59c504ffd35309b92be650baa694dc79eb54d4c68c78"},"package":"b385d69402821a1c254533a011a312531cbcc0e3e24f19bbb4747a5a2daf37e2"}

10
third_party/rust/ascii-canvas/Cargo.toml поставляемый
Просмотреть файл

@ -1,10 +0,0 @@
[package]
name = "ascii-canvas"
version = "1.0.0"
authors = ["Niko Matsakis <niko@alum.mit.edu>"]
description = "simple canvas for drawing lines and styled text and emitting to the terminal"
repository = "https://github.com/nikomatsakis/ascii-canvas"
license = "Apache-2.0/MIT"
[dependencies]
term = "0.4.5"

25
third_party/rust/ascii-canvas/LICENSE-MIT поставляемый
Просмотреть файл

@ -1,25 +0,0 @@
Copyright (c) 2015 The LALRPOP Project Developers
Permission is hereby granted, free of charge, to any
person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the
Software without restriction, including without
limitation the rights to use, copy, modify, merge,
publish, distribute, sublicense, and/or sell copies of
the Software, and to permit persons to whom the Software
is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice
shall be included in all copies or substantial portions
of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.

4
third_party/rust/ascii-canvas/README.md поставляемый
Просмотреть файл

@ -1,4 +0,0 @@
ASCII canvas is a simple library that allows you to draw lines and
colored text and then write them to the terminal. It uses the `term`
library to handle the ANSI nonsense and hence it works on Windows,
Mac, and Unix.

372
third_party/rust/ascii-canvas/src/lib.rs поставляемый
Просмотреть файл

@ -1,372 +0,0 @@
//! An "ASCII Canvas" allows us to draw lines and write text into a
//! fixed-sized canvas and then convert that canvas into ASCII
//! characters. ANSI styling is supported.
extern crate term;
use std::cmp;
use std::ops::Range;
use std::iter::ExactSizeIterator;
use style::Style;
use term::Terminal;
mod row;
#[cfg(test)] mod test;
pub mod style;
pub use self::row::Row;
///////////////////////////////////////////////////////////////////////////
/// AsciiView is a view onto an `AsciiCanvas` which potentially
/// applies transformations along the way (e.g., shifting, adding
/// styling information). Most of the main drawing methods for
/// `AsciiCanvas` are defined as inherent methods on an `AsciiView`
/// trait object.
pub trait AsciiView {
fn columns(&self) -> usize;
fn read_char(&mut self, row: usize, column: usize) -> char;
fn write_char(&mut self, row: usize, column: usize, ch: char, style: Style);
}
impl<'a> AsciiView+'a {
fn add_box_dirs(&mut self,
row: usize,
column: usize,
dirs: u8)
{
let old_ch = self.read_char(row, column);
let new_ch = add_dirs(old_ch, dirs);
self.write_char(row, column, new_ch, Style::new());
}
/// Draws a line for the given range of rows at the given column.
pub fn draw_vertical_line(&mut self,
rows: Range<usize>,
column: usize)
{
let len = rows.len();
for (index, r) in rows.enumerate() {
let new_dirs = if index == 0 {
DOWN
} else if index == len - 1 {
UP
} else {
UP | DOWN
};
self.add_box_dirs(r, column, new_dirs);
}
}
/// Draws a horizontal line along a given row for the given range
/// of columns.
pub fn draw_horizontal_line(&mut self,
row: usize,
columns: Range<usize>)
{
let len = columns.len();
for (index, c) in columns.enumerate() {
let new_dirs = if index == 0 {
RIGHT
} else if index == len - 1 {
LEFT
} else {
LEFT | RIGHT
};
self.add_box_dirs(row, c, new_dirs);
}
}
/// Writes characters in the given style at the given position.
pub fn write_chars<I>(&mut self,
row: usize,
column: usize,
chars: I,
style: Style)
where I: Iterator<Item=char>
{
for (i, ch) in chars.enumerate() {
self.write_char(row, column + i, ch, style);
}
}
/// Creates a new view onto the same canvas, but writing at an offset.
pub fn shift<'c>(&'c mut self, row: usize, column: usize) -> ShiftedView<'c> {
ShiftedView::new(self, row, column)
}
/// Creates a new view onto the same canvas, but applying a style
/// to all the characters written.
pub fn styled<'c>(&'c mut self, style: Style) -> StyleView<'c> {
StyleView::new(self, style)
}
}
pub struct AsciiCanvas {
columns: usize,
rows: usize,
characters: Vec<char>,
styles: Vec<Style>,
}
/// To use an `AsciiCanvas`, first create the canvas, then draw any
/// lines, then write text labels. It is required to draw the lines
/// first so that we can detect intersecting lines properly (we could
/// track which characters belong to lines, I suppose).
impl AsciiCanvas {
/// Create a canvas of the given size. We will automatically add
/// rows as needed, but the columns are fixed at creation.
pub fn new(rows: usize, columns: usize) -> Self {
AsciiCanvas {
rows: rows,
columns: columns,
characters: vec![' '; columns * rows],
styles: vec![Style::new(); columns * rows],
}
}
fn grow_rows_if_needed(&mut self, new_rows: usize) {
if new_rows >= self.rows {
let new_chars = (new_rows - self.rows) * self.columns;
self.characters.extend((0..new_chars).map(|_| ' '));
self.styles.extend((0..new_chars).map(|_| Style::new()));
self.rows = new_rows;
}
}
fn index(&mut self, r: usize, c: usize) -> usize {
self.grow_rows_if_needed(r + 1);
self.in_range_index(r, c)
}
fn in_range_index(&self, r: usize, c: usize) -> usize {
assert!(r < self.rows);
assert!(c <= self.columns);
r * self.columns + c
}
fn start_index(&self, r: usize) -> usize {
self.in_range_index(r, 0)
}
fn end_index(&self, r: usize) -> usize {
self.in_range_index(r, self.columns)
}
pub fn write_to<T:Terminal+?Sized>(&self, term: &mut T) -> term::Result<()> {
for row in self.to_strings() {
try!(row.write_to(term));
try!(writeln!(term, ""));
}
Ok(())
}
pub fn to_strings(&self) -> Vec<Row> {
(0..self.rows)
.map(|row| {
let start = self.start_index(row);
let end = self.end_index(row);
let chars = &self.characters[start..end];
let styles = &self.styles[start..end];
Row::new(chars, styles)
})
.collect()
}
}
impl AsciiView for AsciiCanvas {
fn columns(&self) -> usize {
self.columns
}
fn read_char(&mut self, row: usize, column: usize) -> char {
assert!(column < self.columns);
let index = self.index(row, column);
self.characters[index]
}
fn write_char(&mut self,
row: usize,
column: usize,
ch: char,
style: Style)
{
assert!(column < self.columns);
let index = self.index(row, column);
self.characters[index] = ch;
self.styles[index] = style;
}
}
#[derive(Copy, Clone)]
struct Point {
row: usize,
column: usize,
}
/// Gives a view onto an AsciiCanvas that has a fixed upper-left
/// point. You can get one of these by calling the `shift()` method on
/// any ASCII view.
///
/// Shifted views also track the extent of the characters which are
/// written through them; the `close()` method can be used to read
/// that out when you are finished.
pub struct ShiftedView<'canvas> {
// either the base canvas or another view
base: &'canvas mut AsciiView,
// fixed at creation: the content is always allowed to grow down,
// but cannot grow right more than `num_columns`
upper_left: Point,
// this is updated to track content that is emitted
lower_right: Point,
}
impl<'canvas> ShiftedView<'canvas> {
fn new(base: &'canvas mut AsciiView,
row: usize,
column: usize)
-> Self {
let upper_left = Point { row: row, column: column };
ShiftedView {
base: base,
upper_left: upper_left,
lower_right: upper_left,
}
}
/// Finalize the view; returns the (maximal row, maximal column)
/// that was written (in the coordinates of the parent view, not
/// the shifted view). Note that these values are the actual last
/// places that were written, so if you wrote to that precise
/// location, you would overwrite some of the content that was
/// written.
pub fn close(self) -> (usize, usize) {
(self.lower_right.row, self.lower_right.column)
}
fn track_max(&mut self, row: usize, column: usize) {
self.lower_right.row = cmp::max(self.lower_right.row, row);
self.lower_right.column = cmp::max(self.lower_right.column, column);
}
}
impl<'canvas> AsciiView for ShiftedView<'canvas> {
fn columns(&self) -> usize {
self.base.columns() - self.upper_left.column
}
fn read_char(&mut self, row: usize, column: usize) -> char {
let row = self.upper_left.row + row;
let column = self.upper_left.column + column;
self.base.read_char(row, column)
}
fn write_char(&mut self,
row: usize,
column: usize,
ch: char,
style: Style)
{
let row = self.upper_left.row + row;
let column = self.upper_left.column + column;
self.track_max(row, column);
self.base.write_char(row, column, ch, style)
}
}
/// Gives a view onto an AsciiCanvas that applies an additional style
/// to things that are written. You can get one of these by calling
/// the `styled()` method on any ASCII view.
pub struct StyleView<'canvas> {
base: &'canvas mut AsciiView,
style: Style,
}
impl<'canvas> StyleView<'canvas> {
fn new(base: &'canvas mut AsciiView, style: Style) -> Self {
StyleView {
base: base,
style: style,
}
}
}
impl<'canvas> AsciiView for StyleView<'canvas> {
fn columns(&self) -> usize {
self.base.columns()
}
fn read_char(&mut self, row: usize, column: usize) -> char {
self.base.read_char(row, column)
}
fn write_char(&mut self,
row: usize,
column: usize,
ch: char,
style: Style)
{
self.base.write_char(row, column, ch, style.with(self.style))
}
}
///////////////////////////////////////////////////////////////////////////
// Unicode box-drawing characters
const UP: u8 = 0b0001;
const DOWN: u8 = 0b0010;
const LEFT: u8 = 0b0100;
const RIGHT: u8 = 0b1000;
const BOX_CHARS: &'static [(char, u8)] = &[
('╵', UP),
('│', UP | DOWN),
('┤', UP | DOWN | LEFT),
('├', UP | DOWN | RIGHT),
('┼', UP | DOWN | LEFT | RIGHT),
('┘', UP | LEFT),
('└', UP | RIGHT),
('┴', UP | LEFT | RIGHT),
// No UP:
('╷', DOWN),
('┐', DOWN | LEFT),
('┌', DOWN | RIGHT),
('┬', DOWN | LEFT | RIGHT),
// No UP|DOWN:
('╶', LEFT),
('─', LEFT | RIGHT),
// No LEFT:
('╴', RIGHT),
// No RIGHT:
(' ', 0),
];
fn box_char_for_dirs(dirs: u8) -> char {
for &(c, d) in BOX_CHARS {
if dirs == d {
return c;
}
}
panic!("no box character for dirs: {:b}", dirs);
}
fn dirs_for_box_char(ch: char) -> Option<u8> {
for &(c, d) in BOX_CHARS {
if c == ch {
return Some(d);
}
}
None
}
fn add_dirs(old_ch: char, new_dirs: u8) -> char {
let old_dirs = dirs_for_box_char(old_ch).unwrap_or(0);
box_char_for_dirs(old_dirs | new_dirs)
}

46
third_party/rust/ascii-canvas/src/row.rs поставляемый
Просмотреть файл

@ -1,46 +0,0 @@
use std::fmt::{Debug, Display, Formatter, Error};
use style::{Style, StyleCursor};
use term::{self, Terminal};
pub struct Row {
text: String,
styles: Vec<Style>
}
impl Row {
pub fn new(chars: &[char], styles: &[Style]) -> Row {
assert_eq!(chars.len(), styles.len());
Row {
text: chars.iter().cloned().collect(),
styles: styles.to_vec()
}
}
pub fn write_to<T:Terminal+?Sized>(&self, term: &mut T) -> term::Result<()> {
let mut cursor = try!(StyleCursor::new(term));
for (character, &style) in self.text.trim_right().chars()
.zip(&self.styles)
{
try!(cursor.set_style(style));
try!(write!(cursor.term(), "{}", character));
}
Ok(())
}
}
// Using display/debug just skips the styling.
impl Display for Row {
fn fmt(&self, fmt: &mut Formatter) -> Result<(), Error> {
Display::fmt(self.text.trim_right(), fmt)
}
}
impl Debug for Row {
fn fmt(&self, fmt: &mut Formatter) -> Result<(), Error> {
// NB: use Display, not Debug, just throw some quotes around it
try!(write!(fmt, "\""));
try!(Display::fmt(self.text.trim_right(), fmt));
write!(fmt, "\"")
}
}

203
third_party/rust/ascii-canvas/src/style.rs поставляемый
Просмотреть файл

@ -1,203 +0,0 @@
//! The `Style` type is a simplified view of the various
//! attributes offered by the `term` library. These are
//! enumerated as bits so they can be easily or'd together
//! etc.
use std::default::Default;
use term::{self, Terminal};
#[derive(Copy, Clone, Default, PartialEq, Eq)]
pub struct Style {
bits: u64
}
macro_rules! declare_styles {
($($style:ident,)*) => {
#[derive(Copy, Clone)]
#[allow(non_camel_case_types)]
enum StyleBit {
$($style,)*
}
$(
pub const $style: Style = Style { bits: 1 << (StyleBit::$style as u64) };
)*
}
}
pub const DEFAULT: Style = Style { bits: 0 };
declare_styles! {
// Foreground colors:
FG_BLACK,
FG_BLUE,
FG_BRIGHT_BLACK,
FG_BRIGHT_BLUE,
FG_BRIGHT_CYAN,
FG_BRIGHT_GREEN,
FG_BRIGHT_MAGENTA,
FG_BRIGHT_RED,
FG_BRIGHT_WHITE,
FG_BRIGHT_YELLOW,
FG_CYAN,
FG_GREEN,
FG_MAGENTA,
FG_RED,
FG_WHITE,
FG_YELLOW,
// Background colors:
BG_BLACK,
BG_BLUE,
BG_BRIGHT_BLACK,
BG_BRIGHT_BLUE,
BG_BRIGHT_CYAN,
BG_BRIGHT_GREEN,
BG_BRIGHT_MAGENTA,
BG_BRIGHT_RED,
BG_BRIGHT_WHITE,
BG_BRIGHT_YELLOW,
BG_CYAN,
BG_GREEN,
BG_MAGENTA,
BG_RED,
BG_WHITE,
BG_YELLOW,
// Other:
BOLD,
DIM,
ITALIC,
UNDERLINE,
BLINK,
STANDOUT,
REVERSE,
SECURE,
}
impl Style {
pub fn new() -> Style {
Style::default()
}
pub fn with(self, other_style: Style) -> Style {
Style { bits: self.bits | other_style.bits }
}
pub fn contains(self, other_style: Style) -> bool {
self.with(other_style) == self
}
/// Attempts to apply the given style to the given terminal. If
/// the style is not supported, either there is no effect or else
/// a similar, substitute style may be applied.
pub fn apply<T: Terminal + ?Sized>(self, term: &mut T) -> term::Result<()> {
try!(term.reset());
macro_rules! fg_color {
($color:expr, $term_color:ident) => {
if self.contains($color) {
if term.supports_color() {
try!(term.fg(term::color::$term_color));
}
}
}
}
fg_color!(FG_BLACK, BLACK);
fg_color!(FG_BLUE, BLUE);
fg_color!(FG_BRIGHT_BLACK, BRIGHT_BLACK);
fg_color!(FG_BRIGHT_BLUE, BRIGHT_BLUE);
fg_color!(FG_BRIGHT_CYAN, BRIGHT_CYAN);
fg_color!(FG_BRIGHT_GREEN, BRIGHT_GREEN);
fg_color!(FG_BRIGHT_MAGENTA, BRIGHT_MAGENTA);
fg_color!(FG_BRIGHT_RED, BRIGHT_RED);
fg_color!(FG_BRIGHT_WHITE, BRIGHT_WHITE);
fg_color!(FG_BRIGHT_YELLOW, BRIGHT_YELLOW);
fg_color!(FG_CYAN, CYAN);
fg_color!(FG_GREEN, GREEN);
fg_color!(FG_MAGENTA, MAGENTA);
fg_color!(FG_RED, RED);
fg_color!(FG_WHITE, WHITE);
fg_color!(FG_YELLOW, YELLOW);
macro_rules! bg_color {
($color:expr, $term_color:ident) => {
if self.contains($color) {
if term.supports_color() {
try!(term.bg(term::color::$term_color));
}
}
}
}
bg_color!(BG_BLACK, BLACK);
bg_color!(BG_BLUE, BLUE);
bg_color!(BG_BRIGHT_BLACK, BRIGHT_BLACK);
bg_color!(BG_BRIGHT_BLUE, BRIGHT_BLUE);
bg_color!(BG_BRIGHT_CYAN, BRIGHT_CYAN);
bg_color!(BG_BRIGHT_GREEN, BRIGHT_GREEN);
bg_color!(BG_BRIGHT_MAGENTA, BRIGHT_MAGENTA);
bg_color!(BG_BRIGHT_RED, BRIGHT_RED);
bg_color!(BG_BRIGHT_WHITE, BRIGHT_WHITE);
bg_color!(BG_BRIGHT_YELLOW, BRIGHT_YELLOW);
bg_color!(BG_CYAN, CYAN);
bg_color!(BG_GREEN, GREEN);
bg_color!(BG_MAGENTA, MAGENTA);
bg_color!(BG_RED, RED);
bg_color!(BG_WHITE, WHITE);
bg_color!(BG_YELLOW, YELLOW);
macro_rules! attr {
($attr:expr, $term_attr:expr) => {
if self.contains($attr) {
let attr = $term_attr;
if term.supports_attr(attr) {
try!(term.attr(attr));
}
}
}
}
attr!(BOLD, term::Attr::Bold);
attr!(DIM, term::Attr::Dim);
attr!(ITALIC, term::Attr::Italic(true));
attr!(UNDERLINE, term::Attr::Underline(true));
attr!(BLINK, term::Attr::Blink);
attr!(STANDOUT, term::Attr::Standout(true));
attr!(REVERSE, term::Attr::Reverse);
attr!(SECURE, term::Attr::Secure);
Ok(())
}
}
///////////////////////////////////////////////////////////////////////////
pub struct StyleCursor<'term, T: ?Sized + Terminal + 'term> {
current_style: Style,
term: &'term mut T,
}
impl<'term, T: ?Sized + Terminal> StyleCursor<'term, T> {
pub fn new(term: &'term mut T) -> term::Result<StyleCursor<'term, T>> {
let current_style = Style::default();
try!(current_style.apply(term));
Ok(StyleCursor {
current_style: current_style,
term: term
})
}
pub fn term(&mut self) -> &mut T {
self.term
}
pub fn set_style(&mut self, style: Style) -> term::Result<()> {
if style != self.current_style {
try!(style.apply(self.term));
self.current_style = style;
}
Ok(())
}
}

77
third_party/rust/ascii-canvas/src/test.rs поставляемый
Просмотреть файл

@ -1,77 +0,0 @@
use style::Style;
use test_util::expect_debug;
use super::{AsciiCanvas, AsciiView};
#[test]
fn draw_box() {
let mut canvas = AsciiCanvas::new(5, 10);
{
let view: &mut AsciiView = &mut canvas;
view.draw_vertical_line(2..5, 2);
view.draw_vertical_line(2..5, 7);
view.draw_horizontal_line(2, 2..8);
view.draw_horizontal_line(4, 2..8);
}
expect_debug(
&canvas.to_strings(),
r#"
[
"",
"",
" ┌────┐",
" │ │",
" └────┘"
]
"#.trim());
}
#[test]
fn grow_box() {
let mut canvas = AsciiCanvas::new(0, 10);
{
let view: &mut AsciiView = &mut canvas;
view.draw_vertical_line(2..5, 2);
view.draw_vertical_line(2..5, 7);
view.draw_horizontal_line(2, 2..8);
view.draw_horizontal_line(4, 2..8);
}
expect_debug(
&canvas.to_strings(),
r#"
[
"",
"",
" ┌────┐",
" │ │",
" └────┘"
]
"#.trim());
}
#[test]
fn shift() {
let mut canvas = AsciiCanvas::new(0, 10);
{
let canvas: &mut AsciiView = &mut canvas;
let view: &mut AsciiView = &mut canvas.shift(1, 2);
view.draw_vertical_line(2..5, 2);
view.draw_vertical_line(2..5, 7);
view.draw_horizontal_line(2, 2..8);
view.draw_horizontal_line(4, 2..8);
view.write_chars(3, 3, "Hi!".chars(), Style::new());
}
expect_debug(
&canvas.to_strings(),
r#"
[
"",
"",
"",
" ┌────┐",
" │Hi! │",
" └────┘"
]
"#.trim());
}

Просмотреть файл

@ -1 +1 @@
{"files":{"Cargo.toml":"eee9b8c9f05e442ed41ee986b07b443cb89465346dce4aae4f73f13fa7243492","README.md":"17e5ed3a3bd9b898e73c3056711daabe1238fe9682d24d255f8263fae4eb783d","examples/generate_spidermonkey.rs":"a831abf8d7a1ab73c5d70a9e8517b8af1df492589a2f180698145ac5d46d7102","src/export.rs":"56910e257a000cac963b9ac377558767d05076b677e83a7e75e570ecbd4b35f6","src/import.rs":"366bada1b19c608ffe7dc4761f1db1a1dae616f4ed99685e4260a00d5c0125d0","src/lib.rs":"d4ea18ec850054a817c6b91ed52412a2f2f39639628e5918dee688d829d3ed4b","src/spec.rs":"7cfb4705d9cfa72ba0a34c5d5beab7e23ac54d8e9fa125317364535d5aa7496a","src/util.rs":"1d934eec75d9dee44289f9a9a9e67c96dd6205367430b9bcf9fc66e730bf6eb0"},"package":"430239e4551e42b80fa5d92322ac80ea38c9dda56e5d5582e057e2288352b71a"}
{"files":{"Cargo.toml":"23870cd2ada8f913245771d63e99aea00df6917ca461644b5d51324f370b00ef","README.md":"17e5ed3a3bd9b898e73c3056711daabe1238fe9682d24d255f8263fae4eb783d","examples/generate_spidermonkey.rs":"913a34e84f45bd8bbe305629ca5abb2818370333a769074a757ce1bb4bb54778","src/export.rs":"920c045da0597fd330737c707a4d2ec2af6174b809b204b693f57ab45edbdc68","src/import.rs":"5d4c5ac03404ec04af21963f951626209c944ed5c55a51b09b05da596e5174d0","src/lib.rs":"546b6b13669d659d35a130dd29cfe3eac547a41d210adb194db1b214194295d7","src/spec.rs":"9adb6ff0168e3ec29735a5a9198756076385b6f6063883e47190846f46fa7247","src/util.rs":"47fcb109cd3d20eed52f7d1d99a12da86f40204fbf29f990ff60bb35bfe86fa1"},"package":"6c9a0da2208ceb785c1626fa8b7d250d2e5546ae230294b4a998e4f818c1768e"}

12
third_party/rust/binjs_meta/Cargo.toml поставляемый
Просмотреть файл

@ -3,7 +3,7 @@
# When uploading crates to the registry Cargo will automatically
# "normalize" Cargo.toml files for maximal compatibility
# with all versions of Cargo and also rewrite `path` dependencies
# to registry (e.g. crates.io) dependencies
# to registry (e.g., crates.io) dependencies
#
# If you believe there's an error in this file please file an
# issue against the rust-lang/cargo repository. If you're
@ -12,7 +12,7 @@
[package]
name = "binjs_meta"
version = "0.4.3"
version = "0.5.2"
authors = ["David Teller <D.O.Teller@gmail.com>"]
description = "Part of binjs-ref. Tools for manipulating grammars. You probably do not want to use this crate directly unless you're writing an encoder, decoder or parser generator for binjs."
homepage = "https://binast.github.io/ecmascript-binary-ast/"
@ -25,18 +25,18 @@ repository = "https://github.com/binast/binjs-ref"
version = "^0.11"
[dependencies.itertools]
version = "^0.7"
version = "^0.8"
[dependencies.log]
version = "^0.4"
[dependencies.webidl]
[dependencies.weedle]
version = "^0.8"
[dev-dependencies.clap]
version = "^2"
version = "^2.0"
[dev-dependencies.env_logger]
version = "^0.5"
version = "^0.6"
[dev-dependencies.yaml-rust]
version = "^0.4"

Разница между файлами не показана из-за своего большого размера Загрузить разницу

319
third_party/rust/binjs_meta/src/export.rs поставляемый
Просмотреть файл

@ -1,7 +1,7 @@
use spec::*;
use util::*;
use std::collections::{ HashMap, HashSet };
use std::collections::{HashMap, HashSet};
use itertools::Itertools;
@ -87,7 +87,9 @@ impl TypeDeanonymizer {
for (_, interface) in spec.interfaces_by_name() {
for field in interface.contents().fields() {
if field.is_lazy() {
let skip_name = result.builder.field_name(format!("{}_skip", field.name().to_str()).to_str());
let skip_name = result
.builder
.field_name(format!("{}_skip", field.name().to_str()).to_str());
skip_name_map.insert(field.name(), skip_name);
}
}
@ -108,17 +110,25 @@ impl TypeDeanonymizer {
}
// Copy the declaration.
let mut declaration = result.builder.add_interface(name)
.unwrap();
let mut declaration = result.builder.add_interface(name).unwrap();
for field in fields.drain(..) {
// Create *_skip field just before the lazy field.
// See also tagged_tuple in write.rs.
if field.is_lazy() {
declaration.with_field(skip_name_map.get(field.name()).unwrap(),
Type::offset().required());
declaration.with_field(
skip_name_map.get(field.name()).unwrap(),
Type::offset().required(),
);
}
declaration.with_field_laziness(field.name(), field.type_().clone(),
field.laziness());
declaration.with_field_laziness(
field.name(),
field.type_().clone(),
field.laziness(),
);
}
if let Some(ref field_name) = interface.scoped_dictionary() {
declaration.with_scoped_dictionary(field_name);
}
}
// Copy and deanonymize typedefs
@ -126,18 +136,15 @@ impl TypeDeanonymizer {
result.builder.import_node_name(name);
if result.builder.get_typedef(name).is_some() {
// Already imported by following links.
continue
continue;
}
result.import_type(spec, &definition, Some(name.clone()));
}
// Copy and deanonymize string enums
for (name, definition) in spec.string_enums_by_name() {
result.builder.import_node_name(name);
let mut strings: Vec<_> = definition.strings()
.iter()
.collect();
let mut declaration = result.builder.add_string_enum(name)
.unwrap();
let mut strings: Vec<_> = definition.strings().iter().collect();
let mut declaration = result.builder.add_string_enum(name).unwrap();
for string in strings.drain(..) {
declaration.with_string(&string);
}
@ -162,17 +169,20 @@ impl TypeDeanonymizer {
}
/// Returns `(sum, name)` where `sum` is `Some(names)` iff this type can be resolved to a sum of interfaces.
fn import_type(&mut self, spec: &Spec, type_: &Type, public_name: Option<NodeName>) -> (Option<HashSet<NodeName>>, NodeName) {
fn import_type(
&mut self,
spec: &Spec,
type_: &Type,
public_name: Option<NodeName>,
) -> (Option<HashSet<NodeName>>, NodeName) {
debug!(target: "export_utils", "import_type {:?} => {:?}", public_name, type_);
if type_.is_optional() {
let (_, spec_name) = self.import_typespec(spec, &type_.spec, None);
let my_name =
match public_name {
None => self.builder.node_name(&format!("Optional{}", spec_name)),
Some(ref name) => name.clone()
};
let deanonymized = Type::named(&spec_name).optional()
.unwrap(); // Named types can always be made optional.
let my_name = match public_name {
None => self.builder.node_name(&format!("Optional{}", spec_name)),
Some(ref name) => name.clone(),
};
let deanonymized = Type::named(&spec_name).optional().unwrap(); // Named types can always be made optional.
if let Some(ref mut typedef) = self.builder.add_typedef(&my_name) {
debug!(target: "export_utils", "import_type introduced {:?}", my_name);
typedef.with_type(deanonymized.clone());
@ -184,17 +194,22 @@ impl TypeDeanonymizer {
self.import_typespec(spec, &type_.spec, public_name)
}
}
fn import_typespec(&mut self, spec: &Spec, type_spec: &TypeSpec, public_name: Option<NodeName>) -> (Option<HashSet<NodeName>>, NodeName) {
fn import_typespec(
&mut self,
spec: &Spec,
type_spec: &TypeSpec,
public_name: Option<NodeName>,
) -> (Option<HashSet<NodeName>>, NodeName) {
debug!(target: "export_utils", "import_typespec {:?} => {:?}", public_name, type_spec);
match *type_spec {
TypeSpec::Boolean |
TypeSpec::Number |
TypeSpec::UnsignedLong |
TypeSpec::PropertyKey |
TypeSpec::IdentifierName |
TypeSpec::String |
TypeSpec::Offset |
TypeSpec::Void => {
TypeSpec::Boolean
| TypeSpec::Number
| TypeSpec::UnsignedLong
| TypeSpec::PropertyKey
| TypeSpec::IdentifierName
| TypeSpec::String
| TypeSpec::Offset
| TypeSpec::Void => {
if let Some(ref my_name) = public_name {
if let Some(ref mut typedef) = self.builder.add_typedef(&my_name) {
debug!(target: "export_utils", "import_typespec: Defining {name} (primitive)", name = my_name.to_str());
@ -230,9 +245,7 @@ impl TypeDeanonymizer {
NamedType::Interface(_) => {
// - May use in a sum.
// - If a rewriting takes place, it didn't change the names.
let sum = [link.clone()].iter()
.cloned()
.collect();
let sum = [link.clone()].iter().cloned().collect();
(Some(sum), None, None)
}
};
@ -242,17 +255,46 @@ impl TypeDeanonymizer {
// If we have a public name, alias it to `content`
if let Some(content) = rewrite {
let deanonymized = match primitive {
None |
Some(IsNullable { is_nullable: true, .. }) |
Some(IsNullable { content: Primitive::Interface(_), .. }) => Type::named(&content).required(),
Some(IsNullable { content: Primitive::String, .. }) => Type::string().required(),
Some(IsNullable { content: Primitive::IdentifierName, .. }) => Type::identifier_name().required(),
Some(IsNullable { content: Primitive::PropertyKey, .. }) => Type::property_key().required(),
Some(IsNullable { content: Primitive::Number, .. }) => Type::number().required(),
Some(IsNullable { content: Primitive::UnsignedLong, .. }) => Type::unsigned_long().required(),
Some(IsNullable { content: Primitive::Boolean, .. }) => Type::bool().required(),
Some(IsNullable { content: Primitive::Offset, .. }) => Type::offset().required(),
Some(IsNullable { content: Primitive::Void, .. }) => Type::void().required(),
None
| Some(IsNullable {
is_nullable: true, ..
})
| Some(IsNullable {
content: Primitive::Interface(_),
..
}) => Type::named(&content).required(),
Some(IsNullable {
content: Primitive::String,
..
}) => Type::string().required(),
Some(IsNullable {
content: Primitive::IdentifierName,
..
}) => Type::identifier_name().required(),
Some(IsNullable {
content: Primitive::PropertyKey,
..
}) => Type::property_key().required(),
Some(IsNullable {
content: Primitive::Number,
..
}) => Type::number().required(),
Some(IsNullable {
content: Primitive::UnsignedLong,
..
}) => Type::unsigned_long().required(),
Some(IsNullable {
content: Primitive::Boolean,
..
}) => Type::bool().required(),
Some(IsNullable {
content: Primitive::Offset,
..
}) => Type::offset().required(),
Some(IsNullable {
content: Primitive::Void,
..
}) => Type::void().required(),
};
debug!(target: "export_utils", "import_typespec aliasing {:?} => {:?}",
my_name, deanonymized);
@ -276,27 +318,22 @@ impl TypeDeanonymizer {
}
TypeSpec::Array {
ref contents,
ref supports_empty
ref supports_empty,
} => {
let (_, contents_name) = self.import_type(spec, contents, None);
let my_name =
match public_name {
None => self.builder.node_name(&format!("{non_empty}ListOf{content}",
non_empty =
if *supports_empty {
""
} else {
"NonEmpty"
},
content = contents_name.to_str())),
Some(ref name) => name.clone()
};
let deanonymized =
if *supports_empty {
Type::named(&contents_name).array()
} else {
Type::named(&contents_name).non_empty_array()
};
let my_name = match public_name {
None => self.builder.node_name(&format!(
"{non_empty}ListOf{content}",
non_empty = if *supports_empty { "" } else { "NonEmpty" },
content = contents_name.to_str()
)),
Some(ref name) => name.clone(),
};
let deanonymized = if *supports_empty {
Type::named(&contents_name).array()
} else {
Type::named(&contents_name).non_empty_array()
};
if let Some(ref mut typedef) = self.builder.add_typedef(&my_name) {
debug!(target: "export_utils", "import_typespec: Defining {name} (name to list)",
name = my_name.to_str());
@ -324,22 +361,22 @@ impl TypeDeanonymizer {
full_sum.insert(item);
}
}
let my_name =
match public_name {
None => self.builder.node_name(&format!("{}",
names.drain(..)
.format("Or"))),
Some(ref name) => name.clone()
};
let my_name = match public_name {
None => self.builder.node_name(&format!(
"{}",
names.into_iter().sorted().into_iter().format("Or")
)),
Some(ref name) => name.clone(),
};
for subsum_name in subsums {
// So, `my_name` is a superset of `subsum_name`.
let mut supersum_entry = self.supersums_of.entry(subsum_name.clone())
let mut supersum_entry = self
.supersums_of
.entry(subsum_name.clone())
.or_insert_with(|| HashSet::new());
supersum_entry.insert(my_name.clone());
}
let sum : Vec<_> = full_sum.iter()
.map(Type::named)
.collect();
let sum: Vec<_> = full_sum.iter().map(Type::named).collect();
let deanonymized = Type::sum(&sum).required();
if let Some(ref mut typedef) = self.builder.add_typedef(&my_name) {
debug!(target: "export_utils", "import_typespec: Defining {name} (name to sum)", name = my_name.to_str());
@ -367,34 +404,32 @@ impl TypeName {
pub fn type_spec(spec: &TypeSpec) -> String {
match *spec {
TypeSpec::Array { ref contents, supports_empty: false } =>
format!("NonEmptyListOf{}", Self::type_(contents)),
TypeSpec::Array { ref contents, supports_empty: true } =>
format!("ListOf{}", Self::type_(contents)),
TypeSpec::NamedType(ref name) =>
name.to_string().clone(),
TypeSpec::Offset =>
"_Offset".to_string(),
TypeSpec::Boolean =>
"_Bool".to_string(),
TypeSpec::Number =>
"_Number".to_string(),
TypeSpec::UnsignedLong =>
"_UnsignedLong".to_string(),
TypeSpec::String =>
"_String".to_string(),
TypeSpec::Void =>
"_Void".to_string(),
TypeSpec::IdentifierName =>
"IdentifierName".to_string(),
TypeSpec::PropertyKey =>
"PropertyKey".to_string(),
TypeSpec::TypeSum(ref sum) => {
format!("{}", sum.types()
TypeSpec::Array {
ref contents,
supports_empty: false,
} => format!("NonEmptyListOf{}", Self::type_(contents)),
TypeSpec::Array {
ref contents,
supports_empty: true,
} => format!("ListOf{}", Self::type_(contents)),
TypeSpec::NamedType(ref name) => name.to_string().clone(),
TypeSpec::Offset => "_Offset".to_string(),
TypeSpec::Boolean => "_Bool".to_string(),
TypeSpec::Number => "_Number".to_string(),
TypeSpec::UnsignedLong => "_UnsignedLong".to_string(),
TypeSpec::String => "_String".to_string(),
TypeSpec::Void => "_Void".to_string(),
TypeSpec::IdentifierName => "IdentifierName".to_string(),
TypeSpec::PropertyKey => "PropertyKey".to_string(),
TypeSpec::TypeSum(ref sum) => format!(
"{}",
sum.types()
.iter()
.map(Self::type_spec)
.format("Or"))
}
.sorted()
.into_iter()
.format("Or")
),
}
}
}
@ -410,35 +445,34 @@ impl ToWebidl {
TypeSpec::Offset => {
return None;
}
TypeSpec::Array { ref contents, ref supports_empty } => {
match Self::type_(&*contents, prefix, indent) {
None => { return None; }
Some(description) => format!("{emptiness}FrozenArray<{}>",
description,
emptiness = if *supports_empty { "" } else {"[NonEmpty] "} ),
TypeSpec::Array {
ref contents,
ref supports_empty,
} => match Self::type_(&*contents, prefix, indent) {
None => {
return None;
}
}
TypeSpec::Boolean =>
"bool".to_string(),
TypeSpec::String =>
"string".to_string(),
TypeSpec::PropertyKey =>
"[PropertyKey] string".to_string(),
TypeSpec::IdentifierName =>
"[IdentifierName] string".to_string(),
TypeSpec::Number =>
"number".to_string(),
TypeSpec::UnsignedLong =>
"unsigned long".to_string(),
TypeSpec::NamedType(ref name) =>
name.to_str().to_string(),
TypeSpec::TypeSum(ref sum) => {
format!("({})", sum.types()
Some(description) => format!(
"{emptiness}FrozenArray<{}>",
description,
emptiness = if *supports_empty { "" } else { "[NonEmpty] " }
),
},
TypeSpec::Boolean => "bool".to_string(),
TypeSpec::String => "string".to_string(),
TypeSpec::PropertyKey => "[PropertyKey] string".to_string(),
TypeSpec::IdentifierName => "[IdentifierName] string".to_string(),
TypeSpec::Number => "number".to_string(),
TypeSpec::UnsignedLong => "unsigned long".to_string(),
TypeSpec::NamedType(ref name) => name.to_str().to_string(),
TypeSpec::TypeSum(ref sum) => format!(
"({})",
sum.types()
.iter()
.filter_map(|x| Self::spec(x, "", indent))
.format(" or "))
}
TypeSpec::Void => "void".to_string()
.format(" or ")
),
TypeSpec::Void => "void".to_string(),
};
Some(result)
}
@ -448,27 +482,38 @@ impl ToWebidl {
let pretty_type = Self::spec(type_.spec(), prefix, indent);
match pretty_type {
None => None,
Some(pretty_type) => Some(format!("{}{}",
Some(pretty_type) => Some(format!(
"{}{}",
pretty_type,
if type_.is_optional() { "?" } else { "" }))
if type_.is_optional() { "?" } else { "" }
)),
}
}
/// Export an Interface
pub fn interface(interface: &Interface, prefix: &str, indent: &str) -> String {
let mut result = format!("{prefix} interface {name} : Node {{\n", prefix=prefix, name=interface.name().to_str());
let mut result = format!(
"{prefix} interface {name} : Node {{\n",
prefix = prefix,
name = interface.name().to_str()
);
{
let prefix = format!("{prefix}{indent}",
prefix=prefix,
indent=indent);
let prefix = format!("{prefix}{indent}", prefix = prefix, indent = indent);
for field in interface.contents().fields() {
match Self::type_(field.type_(), &prefix, indent) {
None => /* generated field, ignore */ {},
None =>
/* generated field, ignore */
{}
Some(description) => {
if let Some(ref doc) = field.doc() {
result.push_str(&format!("{prefix}// {doc}\n", prefix = prefix, doc = doc));
result.push_str(&format!(
"{prefix}// {doc}\n",
prefix = prefix,
doc = doc
));
}
result.push_str(&format!("{prefix}{description} {name};\n",
result.push_str(&format!(
"{prefix}{description} {name};\n",
prefix = prefix,
name = field.name().to_str(),
description = description
@ -480,7 +525,7 @@ impl ToWebidl {
}
}
}
result.push_str(&format!("{prefix} }}\n", prefix=prefix));
result.push_str(&format!("{prefix} }}\n", prefix = prefix));
result
}
}

353
third_party/rust/binjs_meta/src/import.rs поставляемый
Просмотреть файл

@ -1,22 +1,35 @@
use spec::{ self, Laziness, SpecBuilder, TypeSum };
use spec::{self, Laziness, SpecBuilder, TypeSpec, TypeSum};
use weedle::common::Identifier;
use weedle::types::*;
use weedle::*;
use webidl::ast::*;
fn nullable<T: std::fmt::Debug>(src: &MayBeNull<T>, dst: TypeSpec) -> spec::Type {
if src.q_mark.is_some() {
dst.optional()
.unwrap_or_else(|| panic!("This type could not be made optional {:?}", src.type_))
} else {
dst.required()
}
}
pub struct Importer {
builder: SpecBuilder,
/// The interfaces we have traversed so far.
path: Vec<String>,
}
impl Importer {
/// Import an AST into a SpecBuilder.
/// Import a WebIDL spec into a SpecBuilder.
///
/// A WebIDL spec may consist in several files. Files are parsed in the order
/// of `sources`. An extension file (e.g. `es6-extended.webidl`) MUST appear
/// after the files it extends.
///
/// ```
/// extern crate binjs_meta;
/// extern crate webidl;
/// use webidl;
/// use binjs_meta::spec::SpecOptions;
///
/// let ast = webidl::parse_string("
/// let mut builder = binjs_meta::import::Importer::import(vec!["
/// interface FooContents {
/// attribute boolean value;
/// };
@ -26,9 +39,7 @@ impl Importer {
/// interface EagerFoo {
/// attribute FooContents contents;
/// };
/// ").expect("Could not parse");
///
/// let mut builder = binjs_meta::import::Importer::import(&ast);
/// "].into_iter()).expect("Could not parse");
///
/// let fake_root = builder.node_name("@@ROOT@@"); // Unused
/// let null = builder.node_name(""); // Used
@ -62,173 +73,277 @@ impl Importer {
/// assert_eq!(contents_field.is_lazy(), true);
/// }
/// ```
pub fn import(ast: &AST) -> SpecBuilder {
pub fn import<'a>(
sources: impl IntoIterator<Item = &'a str>,
) -> Result<SpecBuilder, weedle::Err<CompleteStr<'a>>> {
let mut importer = Importer {
path: Vec::with_capacity(256),
builder: SpecBuilder::new()
builder: SpecBuilder::new(),
};
importer.import_ast(ast);
importer.builder
for source in sources {
let ast = weedle::parse(source)?;
importer.import_all_definitions(&ast);
}
Ok(importer.builder)
}
fn import_ast(&mut self, ast: &AST) {
fn import_all_definitions(&mut self, ast: &Definitions) {
for definition in ast {
self.import_definition(&definition)
}
}
fn import_definition(&mut self, def: &Definition) {
match *def {
Definition::Enum(ref enum_) => self.import_enum(enum_),
Definition::Typedef(ref typedef) => self.import_typedef(typedef),
Definition::Interface(ref interface) => self.import_interface(interface),
_ => panic!("Not implemented: importing {:?}", def)
_ => panic!("Not implemented: importing {:?}", def),
}
}
fn import_enum(&mut self, enum_: &Enum) {
let name = self.builder.node_name(&enum_.name);
let mut node = self.builder.add_string_enum(&name)
fn import_enum(&mut self, enum_: &EnumDefinition) {
let name = self.builder.node_name(enum_.identifier.0);
let mut node = self
.builder
.add_string_enum(&name)
.expect("Name already present");
for variant in &enum_.variants {
node.with_string(variant);
for variant in &enum_.values.body.list {
node.with_string(&variant.0);
}
}
fn import_typedef(&mut self, typedef: &Typedef) {
let name = self.builder.node_name(&typedef.name);
fn import_typedef(&mut self, typedef: &TypedefDefinition) {
let name = self.builder.node_name(typedef.identifier.0);
// The following are, unfortunately, not true typedefs.
// Ignore their definition.
let type_ = match typedef.name.as_ref() {
"Identifier" => spec::TypeSpec::IdentifierName
.required(),
"IdentifierName" => spec::TypeSpec::IdentifierName
.required(),
"PropertyKey" => spec::TypeSpec::PropertyKey
.required(),
_ => self.convert_type(&*typedef.type_)
let type_ = match typedef.identifier.0 {
"Identifier" => TypeSpec::IdentifierName.required(),
"IdentifierName" => TypeSpec::IdentifierName.required(),
"PropertyKey" => TypeSpec::PropertyKey.required(),
_ => self.convert_type(&typedef.type_.type_),
};
debug!(target: "meta::import", "Importing typedef {type_:?} {name:?}",
type_ = type_,
name = name);
let mut node = self.builder.add_typedef(&name)
.unwrap_or_else(|| panic!("Error: Name {} is defined more than once in the spec.", name));
let mut node = self.builder.add_typedef(&name).unwrap_or_else(|| {
panic!(
"Error: Name {} is defined more than once in the spec.",
name
)
});
assert!(!type_.is_optional());
node.with_spec(type_.spec);
}
fn import_interface(&mut self, interface: &Interface) {
let interface = if let &Interface::NonPartial(ref interface) = interface {
interface
} else {
panic!("Expected a non-partial interface, got {:?}", interface);
};
fn import_interface(&mut self, interface: &InterfaceDefinition) {
// Handle special, hardcoded, interfaces.
match interface.name.as_ref() {
match interface.identifier.0 {
"Node" => {
// We're not interested in the root interface.
return;
}
"IdentifierName" => {
unimplemented!()
}
_ => {
}
"IdentifierName" => unimplemented!(),
_ => {}
}
if let Some(ref parent) = interface.inherits {
assert_eq!(parent, "Node");
if let Some(ref parent) = interface.inheritance {
assert_eq!(parent.identifier.0, "Node");
}
self.path.push(interface.name.clone());
self.path.push(interface.identifier.0.to_owned());
// Now handle regular stuff.
let mut fields = Vec::new();
for member in &interface.members {
if let InterfaceMember::Attribute(Attribute::Regular(ref attribute)) = *member {
use webidl::ast::ExtendedAttribute::NoArguments;
use webidl::ast::Other::Identifier;
for member in &interface.members.body {
if let interface::InterfaceMember::Attribute(interface::AttributeInterfaceMember {
modifier: None,
attributes,
identifier,
type_,
..
}) = member
{
let name = self.builder.field_name(identifier.0);
let type_ = self.convert_type(&type_.type_);
let name = self.builder.field_name(&attribute.name);
let type_ = self.convert_type(&*attribute.type_);
let is_lazy = attribute.extended_attributes.iter()
.find(|attribute| {
if let &NoArguments(Identifier(ref id)) = attribute.as_ref() {
if &*id == "Lazy" {
return true;
}
}
false
let is_lazy = attributes
.iter()
.flat_map(|attribute| &attribute.body.list)
.find(|attribute| match attribute {
attribute::ExtendedAttribute::NoArgs(
attribute::ExtendedAttributeNoArgs(Identifier("Lazy")),
) => true,
_ => false,
})
.is_some();
fields.push((name, type_, if is_lazy { Laziness::Lazy } else { Laziness:: Eager }));
fields.push((
name,
type_,
if is_lazy {
Laziness::Lazy
} else {
Laziness::Eager
},
));
} else {
panic!("Expected an attribute, got {:?}", member);
}
}
let name = self.builder.node_name(&interface.name);
let mut node = self.builder.add_interface(&name)
.expect("Name already present");
for (field_name, field_type, laziness) in fields.drain(..) {
node.with_field_laziness(&field_name, field_type, laziness);
let name = self.builder.node_name(interface.identifier.0);
// Set to `Some("Foo")` if this interface has attribute
// `[ExtendsTypeSum=Foo]`.
let mut extends_type_sum = None;
let mut scoped_dictionary = None;
{
let mut node = self
.builder
.add_interface(&name)
.expect("Name already present");
for (field_name, field_type, laziness) in fields.drain(..) {
node.with_field_laziness(&field_name, field_type, laziness);
}
for attribute in interface
.attributes
.iter()
.flat_map(|attribute| &attribute.body.list)
{
use weedle::attribute::ExtendedAttribute::*;
use weedle::attribute::*;
match *attribute {
NoArgs(ExtendedAttributeNoArgs(Identifier("Skippable"))) => {
panic!("Encountered deprecated attribute [Skippable]");
}
NoArgs(ExtendedAttributeNoArgs(Identifier("Scope"))) => {
node.with_scope(true);
}
Ident(ExtendedAttributeIdent {
lhs_identifier: Identifier("ExtendsTypeSum"),
assign: _,
rhs: IdentifierOrString::Identifier(ref rhs),
}) => {
assert!(extends_type_sum.is_none());
extends_type_sum = Some(rhs.0);
}
Ident(ExtendedAttributeIdent {
lhs_identifier: Identifier("ScopedDictionary"),
assign: _,
rhs: IdentifierOrString::Identifier(ref rhs),
}) => {
assert!(scoped_dictionary.is_none());
scoped_dictionary = Some(rhs.0);
}
_ => panic!("Unknown attribute {:?}", attribute),
}
}
// If the node contains an attribute `[ScopedDictionary=field]`,
// mark the node as inserting a scoped dictionary with this field.
if let Some(ref field_name) = scoped_dictionary {
node.with_scoped_dictionary_str(field_name);
}
}
for extended_attribute in &interface.extended_attributes {
use webidl::ast::ExtendedAttribute::NoArguments;
use webidl::ast::Other::Identifier;
if let &NoArguments(Identifier(ref id)) = extended_attribute.as_ref() {
if &*id == "Skippable" {
panic!("Encountered deprecated attribute [Skippable]");
}
if &*id == "Scope" {
node.with_scope(true);
}
}
// If the node contains an attribute `[ExtendsTypeSum=Foobar]`,
// extend `typedef (... or ... or ...) Foobar` into
// `typedef (... or ... or ... or CurrentNode) Foobar`.
if let Some(ref extended) = extends_type_sum {
let node_name = self
.builder
.get_node_name(extended)
.unwrap_or_else(|| panic!("Could not find node name {}", extended));
let mut typedef = self
.builder
.get_typedef_mut(&node_name)
.unwrap_or_else(|| panic!("Could not find typedef {}", extended));
let mut typespec = typedef.spec_mut();
let typesum = if let TypeSpec::TypeSum(ref mut typesum) = *typespec {
typesum
} else {
panic!(
"Attempting to extend a node that is not a type sum {}",
extended
);
};
typesum.with_type_case(TypeSpec::NamedType(name));
}
self.path.pop();
}
fn convert_type(&mut self, t: &Type) -> spec::Type {
let spec = match t.kind {
TypeKind::Boolean => spec::TypeSpec::Boolean,
TypeKind::Identifier(ref id) => {
let name = self.builder.node_name(id);
fn convert_single_type(&mut self, t: &NonAnyType) -> spec::Type {
match t {
NonAnyType::Boolean(ref b) => nullable(b, TypeSpec::Boolean),
NonAnyType::Identifier(ref id) => nullable(id, {
let name = self.builder.node_name(id.type_.0);
// Sadly, some identifiers are not truly `typedef`s.
match name.to_str() {
"IdentifierName" if self.is_at_interface("StaticMemberAssignmentTarget") => spec::TypeSpec::PropertyKey,
"IdentifierName" if self.is_at_interface("StaticMemberExpression") => spec::TypeSpec::PropertyKey,
"IdentifierName" if self.is_at_interface("ImportSpecifier") => spec::TypeSpec::PropertyKey,
"IdentifierName" if self.is_at_interface("ExportSpecifier") => spec::TypeSpec::PropertyKey,
"IdentifierName" if self.is_at_interface("ExportLocalSpecifier") => spec::TypeSpec::PropertyKey,
"IdentifierName" => spec::TypeSpec::IdentifierName,
"Identifier" => spec::TypeSpec::IdentifierName,
_ => spec::TypeSpec::NamedType(name.clone())
"IdentifierName" if self.is_at_interface("StaticMemberAssignmentTarget") => {
TypeSpec::PropertyKey
}
"IdentifierName" if self.is_at_interface("StaticMemberExpression") => {
TypeSpec::PropertyKey
}
"IdentifierName" if self.is_at_interface("ImportSpecifier") => {
TypeSpec::PropertyKey
}
"IdentifierName" if self.is_at_interface("ExportSpecifier") => {
TypeSpec::PropertyKey
}
"IdentifierName" if self.is_at_interface("ExportLocalSpecifier") => {
TypeSpec::PropertyKey
}
"IdentifierName" => TypeSpec::IdentifierName,
"Identifier" => TypeSpec::IdentifierName,
_ => TypeSpec::NamedType(name.clone()),
}
}
TypeKind::DOMString if self.is_at_interface("LiteralPropertyName") => spec::TypeSpec::PropertyKey,
TypeKind::DOMString => spec::TypeSpec::String,
TypeKind::Union(ref types) => {
let mut dest = Vec::with_capacity(types.len());
for typ in types {
dest.push(self.convert_type(&*typ).spec)
}
spec::TypeSpec::TypeSum(TypeSum::new(dest))
}
TypeKind::FrozenArray(ref type_) => {
spec::TypeSpec::Array {
contents: Box::new(self.convert_type(&*type_)),
supports_empty: true
}
}
TypeKind::RestrictedDouble =>
spec::TypeSpec::Number,
TypeKind::UnsignedLong =>
spec::TypeSpec::UnsignedLong,
}),
NonAnyType::DOMString(ref s) => nullable(
s,
if self.is_at_interface("LiteralPropertyName") {
TypeSpec::PropertyKey
} else {
TypeSpec::String
},
),
NonAnyType::FrozenArrayType(ref t) => nullable(
t,
TypeSpec::Array {
contents: Box::new(self.convert_type(&t.type_.generics.body)),
supports_empty: true,
},
),
NonAnyType::FloatingPoint(ref t) => nullable(t, TypeSpec::Number),
NonAnyType::Integer(ref t) => nullable(t, TypeSpec::UnsignedLong),
_ => {
panic!("I don't know how to import {:?} yet", t);
}
};
if t.nullable {
spec.optional()
.unwrap_or_else(|| panic!("This type could not be made optional {:?}", t))
} else {
spec.required()
}
}
fn convert_union_type(&mut self, types: &MayBeNull<UnionType>) -> spec::Type {
let converted_types: Vec<_> = types
.type_
.body
.list
.iter()
.map(|t| match t {
UnionMemberType::Single(t) => self.convert_single_type(t),
UnionMemberType::Union(t) => self.convert_union_type(t),
})
.map(|t| t.spec)
.collect();
nullable(types, TypeSpec::TypeSum(TypeSum::new(converted_types)))
}
fn convert_type(&mut self, t: &Type) -> spec::Type {
match t {
Type::Single(SingleType::NonAny(t)) => self.convert_single_type(t),
Type::Union(types) => self.convert_union_type(types),
_ => panic!("I don't know how to import {:?} yet", t),
}
}

3
third_party/rust/binjs_meta/src/lib.rs поставляемый
Просмотреть файл

@ -6,8 +6,7 @@ extern crate itertools;
#[macro_use]
extern crate log;
extern crate webidl;
extern crate weedle;
/// Generic tools for generating implementations of the Syntax.
pub mod export;

355
third_party/rust/binjs_meta/src/spec.rs поставляемый
Просмотреть файл

@ -6,8 +6,8 @@ use itertools::Itertools;
use std;
use std::cell::*;
use std::collections::{ HashMap, HashSet };
use std::fmt::{ Debug, Display };
use std::collections::{HashMap, HashSet};
use std::fmt::{Debug, Display};
use std::hash::*;
use std::rc::*;
@ -17,7 +17,7 @@ pub enum Laziness {
/// An eager attribute is designed to be parsed immediately.
Eager,
/// A lazy attribute is designed for deferred parsing.
Lazy
Lazy,
}
/// The name of an interface or enum.
@ -50,7 +50,6 @@ impl ToStr for NodeName {
}
}
/// The name of a field in an interface.
#[derive(Clone, Hash, PartialEq, Eq, PartialOrd, Ord)]
pub struct FieldName(Rc<String>);
@ -95,7 +94,7 @@ impl TypeSum {
pub fn new(types: Vec<TypeSpec>) -> Self {
TypeSum {
types,
interfaces: HashSet::new()
interfaces: HashSet::new(),
}
}
pub fn types(&self) -> &[TypeSpec] {
@ -112,11 +111,18 @@ impl TypeSum {
for item in &self.types {
let result = item.get_interface(spec, name);
if result.is_some() {
return result
return result;
}
}
None
}
/// Add a new type case to this sum.
pub fn with_type_case(&mut self, spec: TypeSpec) -> &mut Self {
debug_assert_eq!(self.interfaces.len(), 0);
self.types.push(spec);
self
}
}
/// Representation of a field in an interface.
@ -131,10 +137,13 @@ pub struct Field {
/// Documentation for the field. Ignored for the time being.
documentation: Option<String>,
laziness: Laziness
laziness: Laziness,
}
impl Hash for Field {
fn hash<H>(&self, state: &mut H) where H: Hasher {
fn hash<H>(&self, state: &mut H)
where
H: Hasher,
{
self.name.hash(state)
}
}
@ -166,7 +175,7 @@ impl Field {
pub fn doc(&self) -> Option<&str> {
match self.documentation {
None => None,
Some(ref s) => Some(&*s)
Some(ref s) => Some(&*s),
}
}
pub fn with_doc(mut self, doc: Option<String>) -> Self {
@ -242,8 +251,7 @@ impl NamedType {
NamedType::Interface(ref result) => Some(result.clone()),
NamedType::Typedef(ref type_) => {
if let TypeSpec::NamedType(ref named) = *type_.spec() {
let named = spec.get_type_by_name(named)
.expect("Type not found");
let named = spec.get_type_by_name(named).expect("Type not found");
named.as_interface(spec)
} else {
None
@ -259,10 +267,11 @@ impl TypeSpec {
TypeSpec::Array {
contents: Box::new(Type {
spec: self,
or_null: false
or_null: false,
}),
supports_empty: true,
}.required()
}
.required()
}
pub fn non_empty_array(self) -> Type {
@ -272,16 +281,17 @@ impl TypeSpec {
or_null: false,
}),
supports_empty: false,
}.required()
}
.required()
}
pub fn optional(self) -> Option<Type> {
if let TypeSpec::Offset = self {
None
} else {
} else {
Some(Type {
spec: self,
or_null: true
or_null: true,
})
}
}
@ -289,7 +299,7 @@ impl TypeSpec {
pub fn required(self) -> Type {
Type {
spec: self,
or_null: false
or_null: false,
}
}
@ -333,16 +343,14 @@ impl TypeSpec {
TypeSpec::Offset => Some(IsNullable::non_nullable(Primitive::Offset)),
TypeSpec::IdentifierName => Some(IsNullable::non_nullable(Primitive::IdentifierName)),
TypeSpec::PropertyKey => Some(IsNullable::non_nullable(Primitive::PropertyKey)),
TypeSpec::NamedType(ref name) => {
match spec.get_type_by_name(name).unwrap() {
NamedType::Interface(ref interface) =>
Some(IsNullable::non_nullable(Primitive::Interface(interface.clone()))),
NamedType::Typedef(ref type_) =>
type_.get_primitive(spec),
NamedType::StringEnum(_) => None
}
}
_ => None
TypeSpec::NamedType(ref name) => match spec.get_type_by_name(name).unwrap() {
NamedType::Interface(ref interface) => Some(IsNullable::non_nullable(
Primitive::Interface(interface.clone()),
)),
NamedType::Typedef(ref type_) => type_.get_primitive(spec),
NamedType::StringEnum(_) => None,
},
_ => None,
}
}
}
@ -356,7 +364,7 @@ impl<T> IsNullable<T> {
fn non_nullable(value: T) -> Self {
IsNullable {
is_nullable: false,
content: value
content: value,
}
}
}
@ -413,9 +421,7 @@ impl Type {
TypeSpec::NamedType(name.clone())
}
pub fn sum(types: &[TypeSpec]) -> TypeSpec {
let specs = types.iter()
.cloned()
.collect();
let specs = types.iter().cloned().collect();
TypeSpec::TypeSum(TypeSum::new(specs))
}
pub fn string() -> TypeSpec {
@ -481,8 +487,8 @@ pub struct Obj {
impl PartialEq for Obj {
fn eq(&self, other: &Self) -> bool {
// Normalize order before comparing.
let me : HashSet<_> = self.fields.iter().collect();
let other : HashSet<_> = other.fields.iter().collect();
let me: HashSet<_> = self.fields.iter().collect();
let other: HashSet<_> = other.fields.iter().collect();
me == other
}
}
@ -491,9 +497,7 @@ impl Eq for Obj {}
impl Obj {
/// Create a new empty structure
pub fn new() -> Self {
Obj {
fields: Vec::new()
}
Obj { fields: Vec::new() }
}
/// A list of the fields in the structure.
pub fn fields<'a>(&'a self) -> &'a [Field] {
@ -507,26 +511,30 @@ impl Obj {
pub fn with_full_field(&mut self, field: Field) -> &mut Self {
if self.field(field.name()).is_some() {
warn!("Field: attempting to overwrite {:?}", field.name());
return self
return self;
}
self.fields.push(field);
self
}
fn with_field_aux(self, name: &FieldName, type_: Type, laziness: Laziness,
doc: Option<&str>) -> Self {
fn with_field_aux(
self,
name: &FieldName,
type_: Type,
laziness: Laziness,
doc: Option<&str>,
) -> Self {
if self.field(name).is_some() {
warn!("Field: attempting to overwrite {:?}", name);
return self
return self;
}
let mut fields = self.fields;
fields.push(Field::new(name.clone(), type_)
.with_doc(doc.map(str::to_string))
.with_laziness(laziness));
Obj {
fields
}
fields.push(
Field::new(name.clone(), type_)
.with_doc(doc.map(str::to_string))
.with_laziness(laziness),
);
Obj { fields }
}
/// Extend a structure with a field.
@ -578,9 +586,23 @@ pub struct InterfaceDeclaration {
contents: Obj,
is_scope: bool,
/// If Some(name), this interface introduces a scoped dictionary, i.e. a new
/// dictionary that is designed to be used when encoding/decoding its child
/// nodes.
scoped_dictionary: Option<FieldName>,
}
impl InterfaceDeclaration {
/// A list of the fields in the interface.
pub fn fields<'a>(&'a self) -> &'a [Field] {
self.contents.fields()
}
/// Fetch a specific field in the structure
pub fn field<'a>(&'a self, name: &FieldName) -> Option<&'a Field> {
self.contents.field(name)
}
pub fn with_full_field(&mut self, contents: Field) -> &mut Self {
let _ = self.contents.with_full_field(contents);
self
@ -591,13 +613,24 @@ impl InterfaceDeclaration {
pub fn with_field_lazy(&mut self, name: &FieldName, type_: Type) -> &mut Self {
self.with_field_aux(name, type_, None, Laziness::Eager)
}
pub fn with_field_laziness(&mut self, name: &FieldName, type_: Type, laziness: Laziness) -> &mut Self {
pub fn with_field_laziness(
&mut self,
name: &FieldName,
type_: Type,
laziness: Laziness,
) -> &mut Self {
self.with_field_aux(name, type_, None, laziness)
}
pub fn with_field_doc(&mut self, name: &FieldName, type_: Type, doc: &str) -> &mut Self {
self.with_field_aux(name, type_, Some(doc), Laziness::Eager)
}
fn with_field_aux(&mut self, name: &FieldName, type_: Type, doc: Option<&str>, laziness: Laziness) -> &mut Self {
fn with_field_aux(
&mut self,
name: &FieldName,
type_: Type,
doc: Option<&str>,
laziness: Laziness,
) -> &mut Self {
let mut contents = Obj::new();
std::mem::swap(&mut self.contents, &mut contents);
self.contents = contents.with_field_aux(name, type_, laziness, doc);
@ -607,6 +640,21 @@ impl InterfaceDeclaration {
self.is_scope = value;
self
}
pub fn with_scoped_dictionary(&mut self, field_name: &FieldName) -> &mut Self {
self.with_scoped_dictionary_str(field_name.to_str())
}
pub fn with_scoped_dictionary_str(&mut self, field_name: &str) -> &mut Self {
let field_name = {
self.fields()
.into_iter()
.map(|field| field.name())
.find(|candidate| candidate.to_str() == field_name)
.expect("Attempting to set a scoped dictionary with a non-existent field name")
.clone()
};
self.scoped_dictionary = Some(field_name);
self
}
}
/// A data structure used to progressively construct the `Spec`.
@ -628,7 +676,7 @@ impl SpecBuilder {
interfaces_by_name: HashMap::new(),
string_enums_by_name: HashMap::new(),
typedefs_by_name: HashMap::new(),
names: HashMap::new()
names: HashMap::new(),
}
}
@ -641,7 +689,7 @@ impl SpecBuilder {
/// equality.
pub fn node_name(&mut self, name: &str) -> NodeName {
if let Some(result) = self.names.get(name) {
return NodeName(result.clone())
return NodeName(result.clone());
}
let shared = Rc::new(name.to_string());
let result = NodeName(shared.clone());
@ -649,11 +697,11 @@ impl SpecBuilder {
result
}
pub fn get_node_name(&self, name: &str) -> Option<NodeName> {
self.names.get(name)
.map(|hit| NodeName(hit.clone()))
self.names.get(name).map(|hit| NodeName(hit.clone()))
}
pub fn import_node_name(&mut self, node_name: &NodeName) {
self.names.insert(node_name.to_string().clone(), node_name.0.clone());
self.names
.insert(node_name.to_string().clone(), node_name.0.clone());
}
pub fn field_name(&mut self, name: &str) -> FieldName {
@ -666,7 +714,8 @@ impl SpecBuilder {
result
}
pub fn import_field_name(&mut self, field_name: &FieldName) {
self.names.insert(field_name.to_string().clone(), field_name.0.clone());
self.names
.insert(field_name.to_string().clone(), field_name.0.clone());
}
pub fn add_interface(&mut self, name: &NodeName) -> Option<RefMut<InterfaceDeclaration>> {
@ -677,14 +726,13 @@ impl SpecBuilder {
name: name.clone(),
contents: Obj::new(),
is_scope: false,
scoped_dictionary: None,
});
self.interfaces_by_name.insert(name.clone(), result);
self.interfaces_by_name.get(name)
.map(RefCell::borrow_mut)
self.interfaces_by_name.get(name).map(RefCell::borrow_mut)
}
pub fn get_interface(&mut self, name: &NodeName) -> Option<RefMut<InterfaceDeclaration>> {
self.interfaces_by_name.get(name)
.map(RefCell::borrow_mut)
self.interfaces_by_name.get(name).map(RefCell::borrow_mut)
}
/// Add a named enumeration.
@ -694,7 +742,7 @@ impl SpecBuilder {
}
let e = RefCell::new(StringEnum {
name: name.clone(),
values: vec![]
values: vec![],
});
self.string_enums_by_name.insert(name.clone(), e);
self.string_enums_by_name.get(name).map(RefCell::borrow_mut)
@ -709,38 +757,47 @@ impl SpecBuilder {
self.typedefs_by_name.get(name).map(RefCell::borrow_mut)
}
/// Access an already added typedef.
pub fn get_typedef(&self, name: &NodeName) -> Option<Ref<Type>> {
self.typedefs_by_name.get(name).
map(RefCell::borrow)
self.typedefs_by_name.get(name).map(RefCell::borrow)
}
/// Access an already added typedef, mutably.
pub fn get_typedef_mut(&mut self, name: &NodeName) -> Option<RefMut<Type>> {
self.typedefs_by_name.get(name).map(RefCell::borrow_mut)
}
/// Generate the graph.
pub fn into_spec<'a>(self, options: SpecOptions<'a>) -> Spec {
// 1. Collect node names.
let mut interfaces_by_name = self.interfaces_by_name;
let interfaces_by_name : HashMap<_, _> = interfaces_by_name.drain()
.map(|(k, v)| (k, Rc::new(Interface {
declaration: RefCell::into_inner(v)
})))
let interfaces_by_name: HashMap<_, _> = interfaces_by_name
.drain()
.map(|(k, v)| {
(
k,
Rc::new(Interface {
declaration: RefCell::into_inner(v),
}),
)
})
.collect();
let mut string_enums_by_name = self.string_enums_by_name;
let string_enums_by_name : HashMap<_, _> = string_enums_by_name.drain()
let string_enums_by_name: HashMap<_, _> = string_enums_by_name
.drain()
.map(|(k, v)| (k, Rc::new(RefCell::into_inner(v))))
.collect();
let mut typedefs_by_name = self.typedefs_by_name;
let typedefs_by_name : HashMap<_, _> = typedefs_by_name.drain()
let typedefs_by_name: HashMap<_, _> = typedefs_by_name
.drain()
.map(|(k, v)| (k, Rc::new(RefCell::into_inner(v))))
.collect();
let node_names: HashMap<_, _> = interfaces_by_name
.keys()
.chain(string_enums_by_name
.keys())
.chain(typedefs_by_name
.keys())
.map(|name| {
(name.to_string().clone(), name.clone())
})
.chain(string_enums_by_name.keys())
.chain(typedefs_by_name.keys())
.map(|name| (name.to_string().clone(), name.clone()))
.collect();
debug!(target: "spec", "Established list of node names: {:?} ({})",
node_names.keys()
@ -755,7 +812,7 @@ impl SpecBuilder {
}
}
let mut resolved_type_sums_by_name : HashMap<NodeName, HashSet<NodeName>> = HashMap::new();
let mut resolved_type_sums_by_name: HashMap<NodeName, HashSet<NodeName>> = HashMap::new();
{
// 3. Check that node names are used but not duplicated.
for name in node_names.values() {
@ -789,7 +846,10 @@ impl SpecBuilder {
}
for name in &used_typenames {
// Built-in types
if name.to_str() == "IdentifierName" || name.to_str() == "Identifier" || name.to_str() == "PropertyKey" {
if name.to_str() == "IdentifierName"
|| name.to_str() == "Identifier"
|| name.to_str() == "PropertyKey"
{
continue;
}
if typedefs_by_name.contains_key(name) {
@ -822,29 +882,39 @@ impl SpecBuilder {
// => `None` if we are currently classifying (used to detect cycles),
// => `Some(SumOfInterfaces(set))` if the name describes a sum of interfaces
// => `Some(BadForSumOfInterfaces)` if the name describes something that can't be summed with an interface
let mut classification : HashMap<NodeName, Option<TypeClassification>> = HashMap::new();
fn classify_type(typedefs_by_name: &HashMap<NodeName, Rc<Type>>,
let mut classification: HashMap<NodeName, Option<TypeClassification>> = HashMap::new();
fn classify_type(
typedefs_by_name: &HashMap<NodeName, Rc<Type>>,
string_enums_by_name: &HashMap<NodeName, Rc<StringEnum>>,
interfaces_by_name: &HashMap<NodeName, Rc<Interface>>,
cache: &mut HashMap<NodeName, Option<TypeClassification>>, type_: &TypeSpec, name: &NodeName) -> TypeClassification
{
cache: &mut HashMap<NodeName, Option<TypeClassification>>,
type_: &TypeSpec,
name: &NodeName,
) -> TypeClassification {
debug!(target: "spec", "classify_type for {:?}: walking {:?}", name, type_);
match *type_ {
TypeSpec::Array { ref contents, .. } => {
// Check that the contents are correct.
let _ = classify_type(typedefs_by_name, string_enums_by_name, interfaces_by_name, cache, contents.spec(), name);
let _ = classify_type(
typedefs_by_name,
string_enums_by_name,
interfaces_by_name,
cache,
contents.spec(),
name,
);
// Regardless, the result is bad for a sum of interfaces.
debug!(target: "spec", "classify_type => don't put me in an interface");
TypeClassification::Array
},
}
TypeSpec::Boolean
| TypeSpec::Number
| TypeSpec::String
| TypeSpec::Void
| TypeSpec::Offset
| TypeSpec::UnsignedLong
| TypeSpec::IdentifierName
| TypeSpec::PropertyKey => {
| TypeSpec::Number
| TypeSpec::String
| TypeSpec::Void
| TypeSpec::Offset
| TypeSpec::UnsignedLong
| TypeSpec::IdentifierName
| TypeSpec::PropertyKey => {
debug!(target: "spec", "classify_type => don't put me in an interface");
TypeClassification::Primitive
}
@ -859,20 +929,30 @@ impl SpecBuilder {
}
// Start lookup for this name.
cache.insert(name.clone(), None);
let result =
if name.to_str() == "IdentifierName" || name.to_str() == "Identifier" || name.to_str() == "PropertyKey" {
TypeClassification::Primitive
} else if interfaces_by_name.contains_key(name) {
let mut names = HashSet::new();
names.insert(name.clone());
TypeClassification::SumOfInterfaces(names)
} else if string_enums_by_name.contains_key(name) {
TypeClassification::StringEnum
} else {
let type_ = typedefs_by_name.get(name)
.unwrap_or_else(|| panic!("Type {} not found", name)); // Completeness checked abover in this method.
classify_type(typedefs_by_name, string_enums_by_name, interfaces_by_name, cache, type_.spec(), name)
};
let result = if name.to_str() == "IdentifierName"
|| name.to_str() == "Identifier"
|| name.to_str() == "PropertyKey"
{
TypeClassification::Primitive
} else if interfaces_by_name.contains_key(name) {
let mut names = HashSet::new();
names.insert(name.clone());
TypeClassification::SumOfInterfaces(names)
} else if string_enums_by_name.contains_key(name) {
TypeClassification::StringEnum
} else {
let type_ = typedefs_by_name
.get(name)
.unwrap_or_else(|| panic!("Type {} not found", name)); // Completeness checked abover in this method.
classify_type(
typedefs_by_name,
string_enums_by_name,
interfaces_by_name,
cache,
type_.spec(),
name,
)
};
debug!(target: "spec", "classify_type {:?} => (inserting in cache) {:?}", name, result);
cache.insert(name.clone(), Some(result.clone()));
result
@ -899,7 +979,14 @@ impl SpecBuilder {
}
for (name, type_) in &typedefs_by_name {
classification.insert(name.clone(), None);
let class = classify_type(&typedefs_by_name, &string_enums_by_name, &interfaces_by_name, &mut classification, type_.spec(), name);
let class = classify_type(
&typedefs_by_name,
&string_enums_by_name,
&interfaces_by_name,
&mut classification,
type_.spec(),
name,
);
if !type_.is_optional() {
classification.insert(name.clone(), Some(class));
} else {
@ -912,7 +999,14 @@ impl SpecBuilder {
// poorly items of both kinds.
for (name, interface) in &interfaces_by_name {
for field in interface.declaration.contents.fields() {
classify_type(&typedefs_by_name, &string_enums_by_name, &interfaces_by_name, &mut classification, field.type_().spec(), name);
classify_type(
&typedefs_by_name,
&string_enums_by_name,
&interfaces_by_name,
&mut classification,
field.type_().spec(),
name,
);
}
}
@ -981,15 +1075,23 @@ impl Interface {
pub fn get_field_by_name(&self, name: &FieldName) -> Option<&Field> {
for field in self.contents().fields() {
if name == field.name() {
return Some(field)
return Some(field);
}
}
None
}
pub fn is_scope(&self) -> bool {
pub fn is_scope(&self) -> bool {
self.declaration.is_scope
}
/// If this interface introduces a scoped dictionary change,
/// return Some(field_name) where field_name is
/// the field of this interface containing the name of
/// the dictionary to use in the sub-ast.
pub fn scoped_dictionary(&self) -> Option<FieldName> {
self.declaration.scoped_dictionary.clone()
}
}
/// Immutable representation of the spec.
@ -1008,7 +1110,8 @@ pub struct Spec {
impl Spec {
pub fn get_interface_by_name(&self, name: &NodeName) -> Option<&Interface> {
self.interfaces_by_name.get(name)
self.interfaces_by_name
.get(name)
.map(std::borrow::Borrow::borrow)
}
pub fn interfaces_by_name(&self) -> &HashMap<NodeName, Rc<Interface>> {
@ -1025,27 +1128,22 @@ impl Spec {
}
pub fn get_type_by_name(&self, name: &NodeName) -> Option<NamedType> {
if let Some(interface) = self.interfaces_by_name
.get(name) {
return Some(NamedType::Interface(interface.clone()))
if let Some(interface) = self.interfaces_by_name.get(name) {
return Some(NamedType::Interface(interface.clone()));
}
if let Some(strings_enum) = self.string_enums_by_name
.get(name) {
return Some(NamedType::StringEnum(strings_enum.clone()))
if let Some(strings_enum) = self.string_enums_by_name.get(name) {
return Some(NamedType::StringEnum(strings_enum.clone()));
}
if let Some(type_) = self.typedefs_by_name
.get(name) {
return Some(NamedType::Typedef(type_.clone()))
if let Some(type_) = self.typedefs_by_name.get(name) {
return Some(NamedType::Typedef(type_.clone()));
}
None
}
pub fn get_field_name(&self, name: &str) -> Option<&FieldName> {
self.fields
.get(name)
self.fields.get(name)
}
pub fn get_node_name(&self, name: &str) -> Option<&NodeName> {
self.node_names
.get(name)
self.node_names.get(name)
}
pub fn node_names(&self) -> &HashMap<String, NodeName> {
&self.node_names
@ -1062,8 +1160,7 @@ impl Spec {
/// The starting point for parsing.
pub fn get_root(&self) -> NamedType {
self.get_type_by_name(&self.root)
.unwrap()
self.get_type_by_name(&self.root).unwrap()
}
}
@ -1084,8 +1181,7 @@ impl HasInterfaces for NamedType {
match *self {
NamedType::Interface(_) => None,
NamedType::StringEnum(_) => None,
NamedType::Typedef(ref type_) =>
type_.spec().get_interface(spec, name)
NamedType::Typedef(ref type_) => type_.spec().get_interface(spec, name),
}
}
}
@ -1105,10 +1201,9 @@ impl HasInterfaces for TypeSpec {
} else {
None
}
},
TypeSpec::TypeSum(ref sum) =>
sum.get_interface(spec, name),
_ => None
}
TypeSpec::TypeSum(ref sum) => sum.get_interface(spec, name),
_ => None,
}
}
}

149
third_party/rust/binjs_meta/src/util.rs поставляемый
Просмотреть файл

@ -1,7 +1,5 @@
//! Miscellaneous utilities.
extern crate inflector;
pub trait ToStr {
/// Return the value as a `str`.
fn to_str(&self) -> &str;
@ -96,13 +94,20 @@ pub trait ToCases: ToStr {
}
}
impl<T> ToCases for T where T: ToStr {
impl<T> ToCases for T
where
T: ToStr,
{
fn to_class_cases(&self) -> String {
match self.to_str() {
"" => "Null".to_string(),
other => {
let result = inflector::cases::pascalcase::to_pascal_case(other);
assert!(result.to_str().len() != 0, "Could not convert '{}' to class case", other );
assert!(
result.to_str().len() != 0,
"Could not convert '{}' to class case",
other
);
result
}
}
@ -151,7 +156,11 @@ impl<T> ToCases for T where T: ToStr {
"" => "_Null".to_string(),
_ => {
let class_cased = self.to_class_cases();
assert!(&class_cased != "", "FIXME: `to_class_cases` does not handle {} yet", self.to_str());
assert!(
&class_cased != "",
"FIXME: `to_class_cases` does not handle {} yet",
self.to_str()
);
class_cased
}
}
@ -169,8 +178,11 @@ impl<T> ToCases for T where T: ToStr {
"result" => "result_".to_string(),
"kind" => "kind_".to_string(),
// Special cases
"" => unimplemented!("FIXME: `to_cpp_field_case` does not handle {} yet", self.to_str()),
_ => snake
"" => unimplemented!(
"FIXME: `to_cpp_field_case` does not handle {} yet",
self.to_str()
),
_ => snake,
}
}
fn to_rust_identifier_case(&self) -> String {
@ -180,8 +192,11 @@ impl<T> ToCases for T where T: ToStr {
"super" => "super_".to_string(),
"type" => "type_".to_string(),
"" if self.to_str() == "" => "null".to_string(),
"" => unimplemented!("FIXME: `to_rust_identifier_case` does not handle {} yet", self.to_str()),
_ => snake
"" => unimplemented!(
"FIXME: `to_rust_identifier_case` does not handle {} yet",
self.to_str()
),
_ => snake,
}
}
}
@ -227,7 +242,10 @@ pub trait Reindentable {
fn fit(&self, prefix: &str, width: usize) -> String;
}
impl<T> Reindentable for T where T: ToStr {
impl<T> Reindentable for T
where
T: ToStr,
{
fn reindent(&self, prefix: &str) -> String {
use itertools::Itertools;
@ -236,19 +254,24 @@ impl<T> Reindentable for T where T: ToStr {
// Determine the number of whitespace chars on the first line.
// Trim that many whitespace chars on the following lines.
if let Some(first_line) = str.lines().next() {
let indent_len = first_line.chars()
let indent_len = first_line
.chars()
.take_while(|c| char::is_whitespace(*c))
.count();
format!("{}", str.lines()
.map(|line|
if line.len() > indent_len {
format!("{prefix}{text}",
format!(
"{}",
str.lines()
.map(|line| if line.len() > indent_len {
format!(
"{prefix}{text}",
prefix = prefix,
text = line[indent_len..].to_string())
text = line[indent_len..].to_string()
)
} else {
"".to_string()
})
.format("\n"))
.format("\n")
)
} else {
"".to_string()
}
@ -261,7 +284,8 @@ impl<T> Reindentable for T where T: ToStr {
// Determine the number of whitespace chars on the first line.
// Trim that many whitespace chars on the following lines.
if let Some(first_line) = str.lines().next() {
let indent_len = first_line.chars()
let indent_len = first_line
.chars()
.take_while(|c| char::is_whitespace(*c))
.count();
let mut lines = vec![];
@ -274,7 +298,8 @@ impl<T> Reindentable for T where T: ToStr {
eprintln!("Line still contains {} ({})", rest, gobbled);
if rest.len() + prefix.len() > columns {
// Try and find the largest prefix of `text` that fits within `columns`.
let mut iterator = rest.chars()
let mut iterator = rest
.chars()
.enumerate()
.filter(|&(_, c)| char::is_whitespace(c));
let mut last_whitespace_before_break = None;
@ -292,40 +317,33 @@ impl<T> Reindentable for T where T: ToStr {
(None, None) => {
eprintln!("Ok, string didn't contain any whitespace: '{}'", rest);
// Oh, `rest` does not contain any whitespace. Well, use everything.
lines.push(format!("{prefix}{rest}",
prefix = prefix,
rest = rest));
continue 'per_line
lines.push(format!("{prefix}{rest}", prefix = prefix, rest = rest));
continue 'per_line;
}
(Some(pos), _) | (None, Some(pos)) if pos != 0 => {
eprintln!("Best whitespace found at {}", pos);
// Use `rest[0..pos]`, trimmed right.
gobbled += pos + 1;
let line = format!("{prefix}{rest}",
let line = format!(
"{prefix}{rest}",
prefix = prefix,
rest = rest[0..pos].trim_right());
rest = rest[0..pos].trim_end()
);
lines.push(line)
}
_else => {
panic!("{:?}", _else)
}
_else => panic!("{:?}", _else),
}
} else {
let line = format!("{prefix}{rest}",
prefix = prefix,
rest = rest);
let line = format!("{prefix}{rest}", prefix = prefix, rest = rest);
lines.push(line);
continue 'per_line
continue 'per_line;
}
}
}
format!("{lines}",
lines = lines.iter()
.format("\n"))
format!("{lines}", lines = lines.iter().format("\n"))
} else {
"".to_string()
}
}
}
@ -333,13 +351,13 @@ impl Reindentable for Option<String> {
fn reindent(&self, prefix: &str) -> String {
match *self {
None => "".to_string(),
Some(ref string) => string.reindent(prefix)
Some(ref string) => string.reindent(prefix),
}
}
fn fit(&self, prefix: &str, columns: usize) -> String {
match *self {
None => "".to_string(),
Some(ref string) => string.fit(prefix, columns)
Some(ref string) => string.fit(prefix, columns),
}
}
}
@ -364,15 +382,15 @@ pub mod name_sorter {
/// Return the number of items in the sorter.
pub fn len(&self) -> usize {
debug_assert!( {
debug_assert!({
// Let's check that the length is always the sum of sublengths.
let len = self.per_length.values()
.map(|v| {
match v {
&Node::Leaf(Some(_)) => 1,
&Node::Leaf(_) => panic!("Invariant error: empty leaf!"),
&Node::Internal { ref len, .. } => *len
}
let len = self
.per_length
.values()
.map(|v| match v {
&Node::Leaf(Some(_)) => 1,
&Node::Leaf(_) => panic!("Invariant error: empty leaf!"),
&Node::Internal { ref len, .. } => *len,
})
.fold(0, |x, y| (x + y));
len == self.len
@ -431,12 +449,12 @@ pub mod name_sorter {
}
pub fn iter(&self) -> impl Iterator<Item = (usize, &Node<T>)> {
self.per_length.iter()
.map(|(&len, node)| (len, node))
self.per_length.iter().map(|(&len, node)| (len, node))
}
pub fn get(&self, key: &str) -> Option<&T> {
self.per_length.get(&key.len())
self.per_length
.get(&key.len())
.and_then(|node| node.get(key))
}
}
@ -451,18 +469,17 @@ pub mod name_sorter {
/// Number of leaves in this subtree.
len: usize,
}
},
}
impl<T> Node<T> {
fn get(&self, key: &str) -> Option<&T> {
match (self, key.chars().next()) {
(&Node::Leaf(Some(ref result)), None) => Some(result),
(&Node::Internal { ref children, ..}, Some(c)) => {
(&Node::Internal { ref children, .. }, Some(c)) => {
debug_assert!(children.len() != 0);
children.get(&c)
.and_then(|node| node.get(&key[1..]))
children.get(&c).and_then(|node| node.get(&key[1..]))
}
_ => panic!("Invariant error: length")
_ => panic!("Invariant error: length"),
}
}
@ -474,14 +491,21 @@ pub mod name_sorter {
std::mem::swap(&mut data, old);
data
}
(&mut Node::Internal { ref mut children, ref mut len }, Some(c)) => {
(
&mut Node::Internal {
ref mut children,
ref mut len,
},
Some(c),
) => {
let result = {
let entry = if key.len() == 1 {
children.entry(c)
.or_insert_with(|| Node::Leaf(None))
children.entry(c).or_insert_with(|| Node::Leaf(None))
} else {
children.entry(c)
.or_insert_with(|| Node::Internal { children: HashMap::new(), len: 0})
children.entry(c).or_insert_with(|| Node::Internal {
children: HashMap::new(),
len: 0,
})
};
entry.insert(&key[1..], value)
};
@ -492,14 +516,17 @@ pub mod name_sorter {
debug_assert!(children.len() != 0);
result
}
_ => panic!("Invariant error: length")
_ => panic!("Invariant error: length"),
}
}
fn new(key: &str, value: T) -> Self {
if key.len() == 0 {
Node::Leaf(Some(value))
} else {
let mut node = Node::Internal { children: HashMap::new(), len : 0};
let mut node = Node::Internal {
children: HashMap::new(),
len: 0,
};
assert!(node.insert(key, value).is_none());
node
}

Просмотреть файл

@ -1 +0,0 @@
{"files":{"Cargo.toml":"3342b785a96c022128627c03d66f701ff8f5fa3b1088f1a6282bbd7fab94d99d","LICENSE-APACHE":"8173d5c29b4f956d532781d2b86e4e30f83e6b7878dce18c919451d6ba707c90","LICENSE-MIT":"c9a75f18b9ab2927829a208fc6aa2cf4e63b8420887ba29cdb265d6619ae82d5","README.md":"49741b792be0800387a30bf6300d5ad4d306e15b63510301e377670489620f40","deploy-docs.sh":"7b66111b124c1c7e59cb84cf110d98b5cb783bd35a676e970d9b3035e55f7dfd","src/lib.rs":"51809e3f8799d712a740f5bd37b658fbda44a5c7e62bf33a69c255866afa61b1"},"package":"6f1efcc46c18245a69c38fcc5cc650f16d3a59d034f3106e9ed63748f695730a"}

33
third_party/rust/bit-set/Cargo.toml поставляемый
Просмотреть файл

@ -1,33 +0,0 @@
# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
#
# When uploading crates to the registry Cargo will automatically
# "normalize" Cargo.toml files for maximal compatibility
# with all versions of Cargo and also rewrite `path` dependencies
# to registry (e.g. crates.io) dependencies
#
# If you believe there's an error in this file please file an
# issue against the rust-lang/cargo repository. If you're
# editing this file be aware that the upstream Cargo.toml
# will likely look very different (and much more reasonable)
[package]
name = "bit-set"
version = "0.5.0"
authors = ["Alexis Beingessner <a.beingessner@gmail.com>"]
description = "A set of bits"
homepage = "https://github.com/contain-rs/bit-set"
documentation = "https://contain-rs.github.io/bit-set/bit_set"
readme = "README.md"
keywords = ["data-structures", "bitset"]
license = "MIT/Apache-2.0"
repository = "https://github.com/contain-rs/bit-set"
[dependencies.bit-vec]
version = "0.5.0"
default-features = false
[dev-dependencies.rand]
version = "0.3"
[features]
default = ["std"]
nightly = ["bit-vec/nightly"]
std = ["bit-vec/std"]

201
third_party/rust/bit-set/LICENSE-APACHE поставляемый
Просмотреть файл

@ -1,201 +0,0 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

25
third_party/rust/bit-set/LICENSE-MIT поставляемый
Просмотреть файл

@ -1,25 +0,0 @@
Copyright (c) 2016 The Rust Project Developers
Permission is hereby granted, free of charge, to any
person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the
Software without restriction, including without
limitation the rights to use, copy, modify, merge,
publish, distribute, sublicense, and/or sell copies of
the Software, and to permit persons to whom the Software
is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice
shall be included in all copies or substantial portions
of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.

17
third_party/rust/bit-set/README.md поставляемый
Просмотреть файл

@ -1,17 +0,0 @@
**WARNING: THIS PROJECT IS IN MAINTENANCE MODE, DUE TO INSUFFICIENT MAINTAINER RESOURCES**
It works fine, but will generally no longer be improved.
We are currently only accepting changes which:
* keep this compiling with the latest versions of Rust or its dependencies.
* have minimal review requirements, such as documentation changes (so not totally new APIs).
------
A Set of bits.
Documentation is available at https://contain-rs.github.io/bit-set/bit_set.
[![Build Status](https://travis-ci.org/contain-rs/bit-set.svg?branch=master)](https://travis-ci.org/contain-rs/bit-set)
[![crates.io](http://meritbadge.herokuapp.com/bit-set)](https://crates.io/crates/bit-set)

20
third_party/rust/bit-set/deploy-docs.sh поставляемый
Просмотреть файл

@ -1,20 +0,0 @@
#!/bin/bash
set -o errexit -o nounset
rev=$(git rev-parse --short HEAD)
cd target/doc
git init
git config user.email 'FlashCat@users.noreply.github.com'
git config user.name 'FlashCat'
git remote add upstream "https://${GH_TOKEN}@github.com/${TRAVIS_REPO_SLUG}.git"
git fetch upstream gh-pages
git reset upstream/gh-pages
touch .
git add -A .
git commit -m "rebuild pages at ${rev}"
git push -q upstream HEAD:gh-pages

1443
third_party/rust/bit-set/src/lib.rs поставляемый

Разница между файлами не показана из-за своего большого размера Загрузить разницу

1
third_party/rust/diff/.cargo-checksum.json поставляемый
Просмотреть файл

@ -1 +0,0 @@
{"files":{"Cargo.toml":"71838a0a4987a0a3e4c7b1eb667c42a763eddacbead07adde45f81407e16dd7c","LICENSE-APACHE":"b40930bbcf80744c86c46a12bc9da056641d722716c378f5659b9e555ef833e1","LICENSE-MIT":"c5de694d2f7a6084593c7280222149b683f084103f9627be3a22ed5eee4ce86c","README.md":"3bad625ba36a7860645995daa412b0ca65f961c7ab3f75d319e31e200a30067d","benches/benches.rs":"acb562551891b0290f21f12a40bf9c1b5074109488b563e3b014000d095ec427","examples/simple.rs":"b56bfd5a90d9ece110a0e2ac7eece6cec7e8ca92d2af8394d40f33176577e48f","src/lib.rs":"4462b5a8e59e59bbd2322716c34b9761f0a51f55190ff38ee8f936641a64d8fc","tests/tests.rs":"a93a403807d5c6735e04c90a35254a82e00ba1bc72daff778b5cbf6bd37dde1a"},"package":"3c2b69f912779fbb121ceb775d74d51e915af17aaebc38d28a592843a2dd0a3a"}

27
third_party/rust/diff/Cargo.toml поставляемый
Просмотреть файл

@ -1,27 +0,0 @@
# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
#
# When uploading crates to the registry Cargo will automatically
# "normalize" Cargo.toml files for maximal compatibility
# with all versions of Cargo and also rewrite `path` dependencies
# to registry (e.g. crates.io) dependencies
#
# If you believe there's an error in this file please file an
# issue against the rust-lang/cargo repository. If you're
# editing this file be aware that the upstream Cargo.toml
# will likely look very different (and much more reasonable)
[package]
name = "diff"
version = "0.1.11"
authors = ["Utkarsh Kukreti <utkarshkukreti@gmail.com>"]
description = "An LCS based slice and string diffing implementation."
homepage = "https://github.com/utkarshkukreti/diff.rs"
documentation = "https://docs.rs/diff"
license = "MIT OR Apache-2.0"
repository = "https://github.com/utkarshkukreti/diff.rs"
[dev-dependencies.quickcheck]
version = "0.4.1"
default-features = false
[dev-dependencies.speculate]
version = "0.0.25"

201
third_party/rust/diff/LICENSE-APACHE поставляемый
Просмотреть файл

@ -1,201 +0,0 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "{}"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright {yyyy} {name of copyright owner}
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

21
third_party/rust/diff/LICENSE-MIT поставляемый
Просмотреть файл

@ -1,21 +0,0 @@
MIT License
Copyright (c) 2015 Utkarsh Kukreti
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

39
third_party/rust/diff/README.md поставляемый
Просмотреть файл

@ -1,39 +0,0 @@
# diff.rs
> An LCS based slice and string diffing implementation.
## Example
```rust
extern crate diff;
fn main() {
let left = "foo\nbar\nbaz\nquux";
let right = "foo\nbaz\nbar\nquux";
for diff in diff::lines(left, right) {
match diff {
diff::Result::Left(l) => println!("-{}", l),
diff::Result::Both(l, _) => println!(" {}", l),
diff::Result::Right(r) => println!("+{}", r)
}
}
}
```
prints
```
foo
-bar
baz
+bar
quux
```
## License
`diff` is primarily distributed under the terms of both the MIT license and the
Apache License (Version 2.0).
See LICENSE-APACHE, and LICENSE-MIT for details.

44
third_party/rust/diff/benches/benches.rs поставляемый
Просмотреть файл

@ -1,44 +0,0 @@
#![feature(plugin, test)]
#![plugin(speculate)]
extern crate diff;
extern crate test;
speculate! {
describe "slice" {
bench "empty" |b| {
let slice = [0u8; 0];
b.iter(|| ::diff::slice(&slice, &slice));
}
bench "10 equal items" |b| {
let slice = [0u8; 10];
b.iter(|| ::diff::slice(&slice, &slice));
}
bench "10 non-equal items" |b| {
let (left, right) = ([0u8; 10], [1u8; 10]);
b.iter(|| ::diff::slice(&left, &right));
}
bench "100 equal items" |b| {
let slice = [0u8; 100];
b.iter(|| ::diff::slice(&slice, &slice));
}
bench "100 non-equal items" |b| {
let (left, right) = ([0u8; 100], [1u8; 100]);
b.iter(|| ::diff::slice(&left, &right));
}
bench "1000 equal items" |b| {
let slice = [0u8; 1000];
b.iter(|| ::diff::slice(&slice, &slice));
}
bench "1000 non-equal items" |b| {
let (left, right) = ([0u8; 1000], [1u8; 1000]);
b.iter(|| ::diff::slice(&left, &right));
}
}
}

14
third_party/rust/diff/examples/simple.rs поставляемый
Просмотреть файл

@ -1,14 +0,0 @@
extern crate diff;
fn main() {
let left = "foo\nbar\nbaz\nquux";
let right = "foo\nbaz\nbar\nquux";
for diff in diff::lines(left, right) {
match diff {
diff::Result::Left(l) => println!("-{}", l),
diff::Result::Both(l, _) => println!(" {}", l),
diff::Result::Right(r) => println!("+{}", r),
}
}
}

126
third_party/rust/diff/src/lib.rs поставляемый
Просмотреть файл

@ -1,126 +0,0 @@
use std::cmp;
/// A fragment of a computed diff.
#[derive(Clone, Debug, PartialEq)]
pub enum Result<T> {
Left(T),
Both(T, T),
Right(T),
}
/// Computes the diff between two slices.
pub fn slice<'a, T: PartialEq>(left: &'a [T], right: &'a [T]) -> Vec<Result<&'a T>> {
iter(left.iter(), right.iter())
}
/// Computes the diff between the lines of two strings.
pub fn lines<'a>(left: &'a str, right: &'a str) -> Vec<Result<&'a str>> {
let mut diff = iter(left.lines(), right.lines());
// str::lines() does not yield an empty str at the end if the str ends with
// '\n'. We handle this special case by inserting one last diff item,
// depending on whether the left string ends with '\n', or the right one,
// or both.
match (
left.as_bytes().last().cloned(),
right.as_bytes().last().cloned(),
) {
(Some(b'\n'), Some(b'\n')) => {
diff.push(Result::Both(&left[left.len()..], &right[right.len()..]))
}
(Some(b'\n'), _) => diff.push(Result::Left(&left[left.len()..])),
(_, Some(b'\n')) => diff.push(Result::Right(&right[right.len()..])),
_ => {}
}
diff
}
/// Computes the diff between the chars of two strings.
pub fn chars<'a>(left: &'a str, right: &'a str) -> Vec<Result<char>> {
iter(left.chars(), right.chars())
}
fn iter<I, T>(left: I, right: I) -> Vec<Result<T>>
where
I: Clone + Iterator<Item = T> + DoubleEndedIterator,
T: PartialEq,
{
let left_count = left.clone().count();
let right_count = right.clone().count();
let min_count = cmp::min(left_count, right_count);
let leading_equals = left.clone()
.zip(right.clone())
.take_while(|p| p.0 == p.1)
.count();
let trailing_equals = left.clone()
.rev()
.zip(right.clone().rev())
.take(min_count - leading_equals)
.take_while(|p| p.0 == p.1)
.count();
let left_diff_size = left_count - leading_equals - trailing_equals;
let right_diff_size = right_count - leading_equals - trailing_equals;
let table: Vec<Vec<u32>> = {
let mut table = vec![vec![0; right_diff_size + 1]; left_diff_size + 1];
let left_skip = left.clone().skip(leading_equals).take(left_diff_size);
let right_skip = right.clone().skip(leading_equals).take(right_diff_size);
for (i, l) in left_skip.clone().enumerate() {
for (j, r) in right_skip.clone().enumerate() {
table[i + 1][j + 1] = if l == r {
table[i][j] + 1
} else {
std::cmp::max(table[i][j + 1], table[i + 1][j])
};
}
}
table
};
let diff = {
let mut diff = Vec::with_capacity(left_diff_size + right_diff_size);
let mut i = left_diff_size;
let mut j = right_diff_size;
let mut li = left.clone().rev().skip(trailing_equals);
let mut ri = right.clone().rev().skip(trailing_equals);
loop {
if j > 0 && (i == 0 || table[i][j] == table[i][j - 1]) {
j -= 1;
diff.push(Result::Right(ri.next().unwrap()));
} else if i > 0 && (j == 0 || table[i][j] == table[i - 1][j]) {
i -= 1;
diff.push(Result::Left(li.next().unwrap()));
} else if i > 0 && j > 0 {
i -= 1;
j -= 1;
diff.push(Result::Both(li.next().unwrap(), ri.next().unwrap()));
} else {
break;
}
}
diff
};
let diff_size = leading_equals + diff.len() + trailing_equals;
let mut total_diff = Vec::with_capacity(diff_size);
total_diff.extend(
left.clone()
.zip(right.clone())
.take(leading_equals)
.map(|(l, r)| Result::Both(l, r)),
);
total_diff.extend(diff.into_iter().rev());
total_diff.extend(
left.skip(leading_equals + left_diff_size)
.zip(right.skip(leading_equals + right_diff_size))
.map(|(l, r)| Result::Both(l, r)),
);
total_diff
}

229
third_party/rust/diff/tests/tests.rs поставляемый
Просмотреть файл

@ -1,229 +0,0 @@
#![feature(plugin)]
#![plugin(speculate)]
extern crate diff;
extern crate quickcheck;
use diff::Result::*;
pub fn undiff<T: Clone>(diff: &[::diff::Result<&T>]) -> (Vec<T>, Vec<T>) {
let (mut left, mut right) = (vec![], vec![]);
for d in diff {
match *d {
Left(l) => left.push(l.clone()),
Both(l, r) => {
left.push(l.clone());
right.push(r.clone());
}
Right(r) => right.push(r.clone()),
}
}
(left, right)
}
pub fn undiff_str<'a>(diff: &[::diff::Result<&'a str>]) -> (Vec<&'a str>, Vec<&'a str>) {
let (mut left, mut right) = (vec![], vec![]);
for d in diff {
match *d {
Left(l) => left.push(l),
Both(l, r) => {
left.push(l);
right.push(r);
}
Right(r) => right.push(r),
}
}
(left, right)
}
pub fn undiff_chars(diff: &[::diff::Result<char>]) -> (String, String) {
let (mut left, mut right) = (vec![], vec![]);
for d in diff {
match *d {
Left(l) => left.push(l),
Both(l, r) => {
left.push(l);
right.push(r);
}
Right(r) => right.push(r),
}
}
(
left.iter().cloned().collect(),
right.iter().cloned().collect(),
)
}
speculate! {
describe "slice" {
fn go<T>(left: &[T], right: &[T], len: usize) where
T: Clone + ::std::fmt::Debug + PartialEq
{
let diff = ::diff::slice(&left, &right);
assert_eq!(diff.len(), len);
let (left_, right_) = undiff(&diff);
assert_eq!(left, &left_[..]);
assert_eq!(right, &right_[..]);
}
test "empty slices" {
let slice: &[()] = &[];
go(&slice, &slice, 0);
}
test "equal + non-empty slices" {
let slice = [1, 2, 3];
go(&slice, &slice, 3);
}
test "left empty, right non-empty" {
let slice = [1, 2, 3];
go(&slice, &[], 3);
}
test "left non-empty, right empty" {
let slice = [1, 2, 3];
go(&[], &slice, 3);
}
test "misc 1" {
let left = [1, 2, 3, 4, 1, 3];
let right = [1, 4, 1, 1];
go(&left, &right, 7);
}
test "misc 2" {
let left = [1, 2, 1, 2, 3, 2, 2, 3, 1, 3];
let right = [3, 3, 1, 2, 3, 1, 2, 3, 4, 1];
go(&left, &right, 14);
}
test "misc 3" {
let left = [1, 3, 4];
let right = [2, 3, 4];
go(&left, &right, 4);
}
test "quickcheck" {
fn prop(left: Vec<i32>, right: Vec<i32>) -> bool {
let diff = ::diff::slice(&left, &right);
let (left_, right_) = undiff(&diff);
left == &left_[..] && right == &right_[..]
}
::quickcheck::quickcheck(prop as fn(Vec<i32>, Vec<i32>) -> bool);
}
}
describe "lines" {
fn go(left: &str, right: &str, len: usize) {
let diff = ::diff::lines(&left, &right);
assert_eq!(diff.len(), len);
let (left_, right_) = undiff_str(&diff);
assert_eq!(left, left_.join("\n"));
assert_eq!(right, right_.join("\n"));
}
test "both empty" {
go("", "", 0);
}
test "one empty" {
go("foo", "", 1);
go("", "foo", 1);
}
test "both equal and non-empty" {
go("foo\nbar", "foo\nbar", 2);
}
test "misc 1" {
go("foo\nbar\nbaz", "foo\nbaz\nquux", 4);
}
test "#10" {
go("a\nb\nc", "a\nb\nc\n", 4);
go("a\nb\nc\n", "a\nb\nc", 4);
let left = "a\nb\n\nc\n\n\n";
let right = "a\n\n\nc\n\n";
go(left, right, 8);
go(right, left, 8);
}
}
describe "chars" {
fn go(left: &str, right: &str, len: usize) {
let diff = ::diff::chars(&left, &right);
assert_eq!(diff.len(), len);
let (left_, right_) = undiff_chars(&diff);
assert_eq!(left, left_);
assert_eq!(right, right_);
}
test "both empty" {
go("", "", 0);
}
test "one empty" {
go("foo", "", 3);
go("", "foo", 3);
}
test "both equal and non-empty" {
go("foo bar", "foo bar", 7);
}
test "misc 1" {
go("foo bar baz", "foo baz quux", 16);
}
}
describe "issues" {
test "#4" {
assert_eq!(::diff::slice(&[1], &[2]), vec![Left(&1),
Right(&2)]);
assert_eq!(::diff::lines("a", "b"), vec![Left("a"),
Right("b")]);
}
test "#6" {
// This produced an overflow in the lines computation because it
// was not accounting for the fact that the "right" length was
// less than the "left" length.
let expected = r#"
BacktraceNode {
parents: [
BacktraceNode {
parents: []
},
BacktraceNode {
parents: [
BacktraceNode {
parents: []
}
]
}
]
}"#;
let actual = r#"
BacktraceNode {
parents: [
BacktraceNode {
parents: []
},
BacktraceNode {
parents: [
BacktraceNode {
parents: []
},
BacktraceNode {
parents: []
}
]
}
]
}"#;
::diff::lines(actual, expected);
}
}
}

Просмотреть файл

@ -1 +0,0 @@
{"files":{"COPYING":"01c266bced4a434da0051174d6bee16a4c82cf634e2679b6155d40d75012390f","Cargo.toml":"9b11b3f077cb37e9314fd44a9c385662bebd96f6858e0886e28b00ab1beee421","LICENSE-MIT":"0f96a83840e146e43c0ec96a22ec1f392e0680e6c1226e6f3ba87e0740af850f","README.md":"9a9d39001433160095de7a297b51052c91c9ef7f25a94d6f67ebe50343977926","UNLICENSE":"7e12e5df4bae12cb21581ba157ced20e1986a0508dd10d0e8a4ab9a4cf94e85c","completions/docopt-wordlist.bash":"213bf1baea244eeb32af3a24a9ad895212cb538e3cdaee3bfed842b11a2a64d8","examples/cargo.rs":"6a5012a3359e574a61607eca0c15add23ea9e312e8f20fb90d6438740483fefd","examples/cp.rs":"35e705c59968c22a965b7ba9afc4b7a3af5d411e929432b2fb6bd2ed08a7c9ce","examples/decode.rs":"85f5033cf6450a771d6be2af819718d316b92fb98b201e247cdbe0eb39039487","examples/hashmap.rs":"9066a7b7192e15b3b667702519645d31926a371bc54ab8d70b211d98458d5a8d","examples/optional_command.rs":"44d8dda079e237ac140b1d81d34d065cb2427a6edb4e60eadaa2c8ceaff0831c","examples/verbose_multiple.rs":"3279c76c7f3bde135deca90085b9f9d5a86ea3bd619e57ddfed35f4200bb5f4a","src/dopt.rs":"df0132f0e4ddc4f0bc6fa5789cf24b5fe01d1a91338dc1431bf93c5a1d6ffc11","src/lib.rs":"e7089315c3ebd4d2774bad8b5a6b2899db6348a44f88dc4253c840bbb389f147","src/parse.rs":"e67d4a5ee95a9fcc1aa5c84e78605f32a1c2bbc5e772de9109ae1ce5fac6f16a","src/synonym.rs":"152b89b6f755222f81ebb63fd3d372d7407aa8046522fc1dcc2e40f417cfc65b","src/test/mod.rs":"1f3eb58d5740f8789dea7bdb2815b1313e948c6f5de9ea6d79cad5bbed484114","src/test/suggestions.rs":"51e044db856a424ef12d2bc2eb541ae922b93d81ac5548767c9c638ccd87d388","src/test/testcases.docopt":"13fcd2948a5625b76f93b98ac7b6cb53ef70c119fc2c5f85d2cb67e56bd4e9c3","src/test/testcases.rs":"cbecfab0c82249a7d8ad193ad5e9e10f45a7a41b37e69cfc025a9cdc6c213f04","src/wordlist.rs":"45ccc3441d1abf072c2079f15b7f5a7af68bd2989c99a8acd5554133fa8db7fa"},"package":"db2906c2579b5b7207fc1e328796a9a8835dc44e22dbe8e460b1d636f9a7b225"}

3
third_party/rust/docopt/COPYING поставляемый
Просмотреть файл

@ -1,3 +0,0 @@
This project is dual-licensed under the Unlicense and MIT licenses.
You may use this code under the terms of either license.

48
third_party/rust/docopt/Cargo.toml поставляемый
Просмотреть файл

@ -1,48 +0,0 @@
# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
#
# When uploading crates to the registry Cargo will automatically
# "normalize" Cargo.toml files for maximal compatibility
# with all versions of Cargo and also rewrite `path` dependencies
# to registry (e.g. crates.io) dependencies
#
# If you believe there's an error in this file please file an
# issue against the rust-lang/cargo repository. If you're
# editing this file be aware that the upstream Cargo.toml
# will likely look very different (and much more reasonable)
[package]
name = "docopt"
version = "1.0.2"
authors = ["Andrew Gallant <jamslam@gmail.com>"]
exclude = ["/.travis.yml", "/Makefile", "/ctags.rust", "/scripts/*", "/session.vim"]
description = "Command line argument parsing."
homepage = "https://github.com/docopt/docopt.rs"
documentation = "http://burntsushi.net/rustdoc/docopt/"
readme = "README.md"
keywords = ["docopt", "argument", "command", "argv"]
categories = ["command-line-interface"]
license = "Unlicense/MIT"
repository = "https://github.com/docopt/docopt.rs"
[lib]
name = "docopt"
[[bin]]
name = "docopt-wordlist"
path = "src/wordlist.rs"
test = false
doc = false
[dependencies.lazy_static]
version = "1"
[dependencies.regex]
version = "1.0.3"
[dependencies.serde]
version = "1.0"
[dependencies.serde_derive]
version = "1.0"
[dependencies.strsim]
version = "0.7"

21
third_party/rust/docopt/LICENSE-MIT поставляемый
Просмотреть файл

@ -1,21 +0,0 @@
The MIT License (MIT)
Copyright (c) 2015 Andrew Gallant
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.

191
third_party/rust/docopt/README.md поставляемый
Просмотреть файл

@ -1,191 +0,0 @@
Docopt for Rust with automatic type based decoding (i.e., data validation).
This implementation conforms to the
[official description of Docopt](http://docopt.org/) and
[passes its test suite](https://github.com/docopt/docopt/pull/201).
[![Build status](https://api.travis-ci.org/docopt/docopt.rs.svg)](https://travis-ci.org/docopt/docopt.rs)
[![](http://meritbadge.herokuapp.com/docopt)](https://crates.io/crates/docopt)
Dual-licensed under MIT or the [UNLICENSE](http://unlicense.org).
### Current status
Fully functional but the design of the API is up for debate. **I am seeking
feedback**.
### Documentation
<https://docs.rs/docopt>
### Installation
This crate is fully compatible with Cargo. Just add it to your `Cargo.toml`:
```toml
[dependencies]
docopt = "1"
serde = "1.0" # if you're using `derive(Deserialize)`
serde_derive = "1.0" # if you're using `derive(Deserialize)`
```
### Quick example
Here is a full working example. Notice that you can specify the types of each
of the named values in the Docopt usage string. Values will be automatically
converted to those types (or an error will be reported).
```rust
#[macro_use]
extern crate serde_derive;
extern crate docopt;
use docopt::Docopt;
const USAGE: &'static str = "
Naval Fate.
Usage:
naval_fate.py ship new <name>...
naval_fate.py ship <name> move <x> <y> [--speed=<kn>]
naval_fate.py ship shoot <x> <y>
naval_fate.py mine (set|remove) <x> <y> [--moored | --drifting]
naval_fate.py (-h | --help)
naval_fate.py --version
Options:
-h --help Show this screen.
--version Show version.
--speed=<kn> Speed in knots [default: 10].
--moored Moored (anchored) mine.
--drifting Drifting mine.
";
#[derive(Debug, Deserialize)]
struct Args {
flag_speed: isize,
flag_drifting: bool,
arg_name: Vec<String>,
arg_x: Option<i32>,
arg_y: Option<i32>,
cmd_ship: bool,
cmd_mine: bool,
}
fn main() {
let args: Args = Docopt::new(USAGE)
.and_then(|d| d.deserialize())
.unwrap_or_else(|e| e.exit());
println!("{:?}", args);
}
```
### Struct field name mapping
The field names of the struct map like this:
```
-g => flag_g
--group => flag_group
--group <arg> => flag_group
FILE => arg_FILE
<file> => arg_file
build => cmd_build
```
### Traditional Docopt API
The reference implementation of Docopt returns a Python dictionary with names
like `<arg>` or `--flag`. If you prefer this access pattern, then you can use
`docopt::ArgvMap`. The disadvantage is that you have to do all of your type
conversion manually. Here's the canonical Docopt example with a hash table:
```rust
extern crate docopt;
use docopt::Docopt;
const USAGE: &'static str = "
Naval Fate.
Usage:
naval_fate.py ship new <name>...
naval_fate.py ship <name> move <x> <y> [--speed=<kn>]
naval_fate.py ship shoot <x> <y>
naval_fate.py mine (set|remove) <x> <y> [--moored | --drifting]
naval_fate.py (-h | --help)
naval_fate.py --version
Options:
-h --help Show this screen.
--version Show version.
--speed=<kn> Speed in knots [default: 10].
--moored Moored (anchored) mine.
--drifting Drifting mine.
";
fn main() {
let args = Docopt::new(USAGE)
.and_then(|dopt| dopt.parse())
.unwrap_or_else(|e| e.exit());
println!("{:?}", args);
// You can conveniently access values with `get_{bool,count,str,vec}`
// functions. If the key doesn't exist (or if, e.g., you use `get_str` on
// a switch), then a sensible default value is returned.
println!("\nSome values:");
println!(" Speed: {}", args.get_str("--speed"));
println!(" Drifting? {}", args.get_bool("--drifting"));
println!(" Names: {:?}", args.get_vec("<name>"));
}
```
### Tab completion support
This particular implementation bundles a command called `docopt-wordlist` that
can be used to automate tab completion. This repository also collects some
basic completion support for various shells (currently only bash) in the
`completions` directory.
You can use them to setup tab completion on your system. It should work with
any program that uses Docopt (or rather, any program that outputs usage
messages that look like Docopt). For example, to get tab completion support for
Cargo, you'll have to install `docopt-wordlist` and add some voodoo to your
`$HOME/.bash_completion` file (this may vary for other shells).
Here it is step by step:
```bash
# Download and build `docopt-wordlist` (as part of the Docopt package)
$ git clone git://github.com/docopt/docopt.rs
$ cd docopt.rs
$ cargo build --release
# Now setup tab completion (for bash)
$ echo "DOCOPT_WORDLIST_BIN=\"$(pwd)/target/release/docopt-wordlist\"" >> $HOME/.bash_completion
$ echo "source \"$(pwd)/completions/docopt-wordlist.bash\"" >> $HOME/.bash_completion
$ echo "complete -F _docopt_wordlist_commands cargo" >> $HOME/.bash_completion
```
My [CSV toolkit](https://github.com/BurntSushi/xsv) is supported too:
```bash
# shameless plug...
$ echo "complete -F _docopt_wordlist_commands xsv" >> $HOME/.bash_completion
```
Note that this is emphatically a first pass. There are several improvements
that I'd like to make:
1. Take context into account when completing. For example, it should be
possible to only show completions that can lead to a valid Docopt match.
This may be hard. (i.e., It may require restructuring Docopt's internals.)
2. Support more shells. (I'll happily accept pull requests on this one. I doubt
I'll venture outside of bash any time soon.)
3. Make tab completion support more seamless. The way it works right now is
pretty hacky by intermingling file/directory completion.

24
third_party/rust/docopt/UNLICENSE поставляемый
Просмотреть файл

@ -1,24 +0,0 @@
This is free and unencumbered software released into the public domain.
Anyone is free to copy, modify, publish, use, compile, sell, or
distribute this software, either in source code form or as a compiled
binary, for any purpose, commercial or non-commercial, and by any
means.
In jurisdictions that recognize copyright laws, the author or authors
of this software dedicate any and all copyright interest in the
software to the public domain. We make this dedication for the benefit
of the public at large and to the detriment of our heirs and
successors. We intend this dedication to be an overt act of
relinquishment in perpetuity of all present and future rights to this
software under copyright law.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR
OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
For more information, please refer to <http://unlicense.org/>

Просмотреть файл

@ -1,79 +0,0 @@
# This is your basic tab completion that will work well with commands that
# have only one usage (i.e., no distinct sub-commands).
#
# Completion works by simply taking the command name and running `$cmd --help`
# to get the usage (which is then parsed for possible completions).
function _docopt_wordlist {
if [ -z "$DOCOPT_WORDLIST_BIN" ]; then
DOCOPT_WORDLIST_BIN=/usr/local/bin/docopt-wordlist
fi
cword=$(_get_cword)
cmd="${COMP_WORDS[0]}"
wordlist=$("$cmd" --help 2>&1 | "$DOCOPT_WORDLIST_BIN")
gen "$cword" "$wordlist"
}
# This is a fancier version of the above that supports commands that have
# multiple sub-commands (i.e., distinct usages like Cargo).
#
# This supports sub-command completion only if `$cmd --list` shows a list of
# available sub-commands.
#
# Otherwise, the usage for the command `a b c d` is taken from the first
# command that exits successfully:
#
# a b c d --help
# a b c --help
# a b --help
# a --help
#
# So for example, if you've typed `cargo test --jo`, then the following
# happens:
#
# cargo test --jo --help # error
# cargo test --help # gives 'test' sub-command usage!
#
# As a special case, if only the initial command has been typed, then the
# sub-commands (taken from `$cmd --list`) are added to the wordlist.
function _docopt_wordlist_commands {
if [ -z "$DOCOPT_WORDLIST_BIN" ]; then
DOCOPT_WORDLIST_BIN=/usr/local/bin/docopt-wordlist
fi
cword=$(_get_cword)
if [ "$COMP_CWORD" = 1 ]; then
cmd="${COMP_WORDS[0]}"
wordlist=$("$cmd" --help 2>&1 | "$DOCOPT_WORDLIST_BIN")
wordlist+=" $("$cmd" --list | egrep '^ +\w' | awk '{print $1}')"
gen "$cword" "$wordlist"
else
for ((i="$COMP_CWORD"; i >= 1; i++)); do
cmd="${COMP_WORDS[@]::$i}"
wordlist=$($cmd --help 2>&1 | "$DOCOPT_WORDLIST_BIN")
if [ $? = 0 ]; then
gen "$cword" "$wordlist"
break
fi
done
fi
}
# A helper function for running `compgen`, which is responsible for taking
# a prefix and presenting possible completions.
#
# If the current prefix starts with a `.` or a `/`, then file/directory
# completion is done. Otherwise, Docopt completion is done. If Docopt
# completion is empty, then it falls back to file/directory completion.
function gen {
cword="$1"
wordlist="$2"
if [[ "$cword" = .* || "$cword" = /* ]]; then
COMPREPLY=($(compgen -A file -- "$cword"))
else
COMPREPLY=($(compgen -W "$wordlist" -- "$cword"))
if [ -z "$COMPREPLY" ]; then
COMPREPLY=($(compgen -A file -- "$cword"))
fi
fi
}

59
third_party/rust/docopt/examples/cargo.rs поставляемый
Просмотреть файл

@ -1,59 +0,0 @@
#[macro_use]
extern crate serde_derive;
extern crate docopt;
use docopt::Docopt;
// Write the Docopt usage string.
const USAGE: &'static str = "
Rust's package manager
Usage:
cargo <command> [<args>...]
cargo [options]
Options:
-h, --help Display this message
-V, --version Print version info and exit
--list List installed commands
-v, --verbose Use verbose output
Some common cargo commands are:
build Compile the current project
clean Remove the target directory
doc Build this project's and its dependencies' documentation
new Create a new cargo project
run Build and execute src/main.rs
test Run the tests
bench Run the benchmarks
update Update dependencies listed in Cargo.lock
See 'cargo help <command>' for more information on a specific command.
";
#[derive(Debug, Deserialize)]
struct Args {
arg_command: Option<Command>,
arg_args: Vec<String>,
flag_list: bool,
flag_verbose: bool,
}
#[derive(Debug, Deserialize)]
enum Command {
Build,
Clean,
Doc,
New,
Run,
Test,
Bench,
Update,
}
fn main() {
let args: Args = Docopt::new(USAGE)
.and_then(|d| d.options_first(true).deserialize())
.unwrap_or_else(|e| e.exit());
println!("{:?}", args);
}

29
third_party/rust/docopt/examples/cp.rs поставляемый
Просмотреть файл

@ -1,29 +0,0 @@
#[macro_use]
extern crate serde_derive;
extern crate docopt;
use docopt::Docopt;
// Write the Docopt usage string.
const USAGE: &'static str = "
Usage: cp [-a] <source> <dest>
cp [-a] <source>... <dir>
Options:
-a, --archive Copy everything.
";
#[derive(Debug, Deserialize)]
struct Args {
arg_source: Vec<String>,
arg_dest: String,
arg_dir: String,
flag_archive: bool,
}
fn main() {
let args: Args = Docopt::new(USAGE)
.and_then(|d| d.deserialize())
.unwrap_or_else(|e| e.exit());
println!("{:?}", args);
}

48
third_party/rust/docopt/examples/decode.rs поставляемый
Просмотреть файл

@ -1,48 +0,0 @@
#[macro_use]
extern crate serde_derive;
extern crate docopt;
use docopt::Docopt;
const USAGE: &'static str = "
Naval Fate.
Usage:
naval_fate.py ship new <name>...
naval_fate.py ship <name> move <x> <y> [--speed=<kn>]
naval_fate.py ship shoot <x> <y>
naval_fate.py mine (set|remove) <x> <y> [--moored | --drifting]
naval_fate.py (-h | --help)
naval_fate.py --version
Options:
-h --help Show this screen.
--version Show version.
--speed=<kn> Speed in knots [default: 10].
--moored Moored (anchored) mine.
--drifting Drifting mine.
";
#[derive(Debug, Deserialize)]
struct Args {
flag_speed: isize,
flag_drifting: bool,
arg_name: Vec<String>,
arg_x: Option<isize>,
arg_y: Option<isize>,
cmd_ship: bool,
cmd_mine: bool,
}
fn main() {
let args: Args = Docopt::new(USAGE)
.and_then(|d| d.deserialize())
.unwrap_or_else(|e| e.exit());
println!("{:?}", args);
println!("\nSome values:");
println!(" Speed: {}", args.flag_speed);
println!(" Drifting? {}", args.flag_drifting);
println!(" Names: {:?}", args.arg_name);
println!(" Command 'ship' invoked? {:?}", args.cmd_ship);
}

39
third_party/rust/docopt/examples/hashmap.rs поставляемый
Просмотреть файл

@ -1,39 +0,0 @@
extern crate docopt;
use docopt::Docopt;
const USAGE: &'static str = "
Naval Fate.
Usage:
naval_fate.py ship new <name>...
naval_fate.py ship <name> move <x> <y> [--speed=<kn>]
naval_fate.py ship shoot <x> <y>
naval_fate.py mine (set|remove) <x> <y> [--moored | --drifting]
naval_fate.py (-h | --help)
naval_fate.py --version
Options:
-h --help Show this screen.
--version Show version.
--speed=<kn> Speed in knots [default: 10].
--moored Moored (anchored) mine.
--drifting Drifting mine.
";
fn main() {
let version = "1.2.3".to_owned();
let args = Docopt::new(USAGE)
.and_then(|dopt| dopt.version(Some(version)).parse())
.unwrap_or_else(|e| e.exit());
println!("{:?}", args);
// You can conveniently access values with `get_{bool,count,str,vec}`
// functions. If the key doesn't exist (or if, e.g., you use `get_str` on
// a switch), then a sensible default value is returned.
println!("\nSome values:");
println!(" Speed: {}", args.get_str("--speed"));
println!(" Drifting? {}", args.get_bool("--drifting"));
println!(" Names: {:?}", args.get_vec("<name>"));
println!(" Command 'ship' invoked? {:?}", args.get_bool("ship"));
}

Просмотреть файл

@ -1,76 +0,0 @@
// This example shows how to implement a command with a "catch all."
//
// This requires writing your own impl for `Decodable` because docopt's
// decoder uses `Option<T>` to mean "T may not be present" rather than
// "T may be present but incorrect."
#[macro_use]
extern crate serde_derive;
extern crate serde;
extern crate docopt;
use docopt::Docopt;
use serde::de::{Deserialize, Deserializer, Error, Visitor};
use std::fmt;
// Write the Docopt usage string.
const USAGE: &'static str = "
Rust's package manager
Usage:
mycli [<command>]
Options:
-h, --help Display this message
";
#[derive(Debug, Deserialize)]
struct Args {
arg_command: Command,
}
struct CommandVisitor;
impl<'de> Visitor<'de> for CommandVisitor {
type Value = Command;
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter.write_str("a string A, B or C")
}
fn visit_str<E>(self, s: &str) -> Result<Self::Value, E>
where E: Error
{
Ok(match s {
"" => Command::None,
"A" => Command::A,
"B" => Command::B,
"C" => Command::C,
s => Command::Unknown(s.to_string()),
})
}
}
impl<'de> Deserialize<'de> for Command {
fn deserialize<D>(d: D) -> Result<Command, D::Error>
where D: Deserializer<'de>
{
d.deserialize_str(CommandVisitor)
}
}
#[derive(Debug)]
enum Command {
A,
B,
C,
Unknown(String),
None,
}
fn main() {
let args: Args = Docopt::new(USAGE)
.and_then(|d| d.deserialize())
.unwrap_or_else(|e| e.exit());
println!("{:?}", args);
}

Просмотреть файл

@ -1,37 +0,0 @@
#[macro_use]
extern crate serde_derive;
extern crate docopt;
use docopt::Docopt;
// This shows how to implement multiple levels of verbosity.
//
// When you have multiple patterns, I think the only way to carry the
// repeated flag through all of them is to specify it for each pattern
// explicitly.
//
// This is unfortunate.
const USAGE: &'static str = "
Usage: cp [options] [-v | -vv | -vvv] <source> <dest>
cp [options] [-v | -vv | -vvv] <source>... <dir>
Options:
-a, --archive Copy everything.
-v, --verbose Show extra log output.
";
#[derive(Debug, Deserialize)]
struct Args {
arg_source: Vec<String>,
arg_dest: String,
arg_dir: String,
flag_archive: bool,
flag_verbose: usize,
}
fn main() {
let args: Args = Docopt::new(USAGE)
.and_then(|d| d.deserialize())
.unwrap_or_else(|e| e.exit());
println!("{:?}", args);
}

1005
third_party/rust/docopt/src/dopt.rs поставляемый

Разница между файлами не показана из-за своего большого размера Загрузить разницу

222
third_party/rust/docopt/src/lib.rs поставляемый
Просмотреть файл

@ -1,222 +0,0 @@
//! Docopt for Rust. This implementation conforms to the
//! [official description of Docopt](http://docopt.org/) and
//! [passes its test suite](https://github.com/docopt/docopt/pull/201).
//!
//! This library is [on GitHub](https://github.com/docopt/docopt.rs).
//!
//! Fundamentally, Docopt is a command line argument parser. The detail that
//! distinguishes it from most parsers is that the parser is derived from the
//! usage string. Here's a simple example:
//!
//! ```rust
//! use docopt::Docopt;
//!
//! // Write the Docopt usage string.
//! const USAGE: &'static str = "
//! Usage: cp [-a] <source> <dest>
//! cp [-a] <source>... <dir>
//!
//! Options:
//! -a, --archive Copy everything.
//! ";
//!
//! // The argv. Normally you'd just use `parse` which will automatically
//! // use `std::env::args()`.
//! let argv = || vec!["cp", "-a", "file1", "file2", "dest/"];
//!
//! // Parse argv and exit the program with an error message if it fails.
//! let args = Docopt::new(USAGE)
//! .and_then(|d| d.argv(argv().into_iter()).parse())
//! .unwrap_or_else(|e| e.exit());
//!
//! // Now access your argv values. Synonyms work just fine!
//! assert!(args.get_bool("-a") && args.get_bool("--archive"));
//! assert_eq!(args.get_vec("<source>"), vec!["file1", "file2"]);
//! assert_eq!(args.get_str("<dir>"), "dest/");
//! assert_eq!(args.get_str("<dest>"), "");
//! ```
//!
//! # Type based decoding
//!
//! Often, command line values aren't just strings or booleans---sometimes
//! they are integers, or enums, or something more elaborate. Using the
//! standard Docopt interface can be inconvenient for this purpose, because
//! you'll need to convert all of the values explicitly. Instead, this crate
//! provides a `Decoder` that converts an `ArgvMap` to your custom struct.
//! Here is the same example as above using type based decoding:
//!
//! ```rust
//! # extern crate docopt;
//! #[macro_use]
//! extern crate serde_derive;
//! # fn main() {
//! use docopt::Docopt;
//!
//! // Write the Docopt usage string.
//! const USAGE: &'static str = "
//! Usage: cp [-a] <source> <dest>
//! cp [-a] <source>... <dir>
//!
//! Options:
//! -a, --archive Copy everything.
//! ";
//!
//! #[derive(Deserialize)]
//! struct Args {
//! arg_source: Vec<String>,
//! arg_dest: String,
//! arg_dir: String,
//! flag_archive: bool,
//! }
//!
//! let argv = || vec!["cp", "-a", "file1", "file2", "dest/"];
//! let args: Args = Docopt::new(USAGE)
//! .and_then(|d| d.argv(argv().into_iter()).deserialize())
//! .unwrap_or_else(|e| e.exit());
//!
//! // Now access your argv values.
//! fn s(x: &str) -> String { x.to_string() }
//! assert!(args.flag_archive);
//! assert_eq!(args.arg_source, vec![s("file1"), s("file2")]);
//! assert_eq!(args.arg_dir, s("dest/"));
//! assert_eq!(args.arg_dest, s(""));
//! # }
//! ```
//!
//! # Command line arguments for `rustc`
//!
//! Here's an example with a subset of `rustc`'s command line arguments that
//! shows more of Docopt and some of the benefits of type based decoding.
//!
//! ```rust
//! # extern crate docopt;
//! #[macro_use]
//! extern crate serde_derive;
//! extern crate serde;
//! # fn main() {
//! # #![allow(non_snake_case)]
//! use docopt::Docopt;
//! use std::fmt;
//!
//! // Write the Docopt usage string.
//! const USAGE: &'static str = "
//! Usage: rustc [options] [--cfg SPEC... -L PATH...] INPUT
//! rustc (--help | --version)
//!
//! Options:
//! -h, --help Show this message.
//! --version Show the version of rustc.
//! --cfg SPEC Configure the compilation environment.
//! -L PATH Add a directory to the library search path.
//! --emit TYPE Configure the output that rustc will produce.
//! Valid values: asm, ir, bc, obj, link.
//! --opt-level LEVEL Optimize with possible levels 0-3.
//! ";
//!
//! #[derive(Deserialize)]
//! struct Args {
//! arg_INPUT: String,
//! flag_emit: Option<Emit>,
//! flag_opt_level: Option<OptLevel>,
//! flag_cfg: Vec<String>,
//! flag_L: Vec<String>,
//! flag_help: bool,
//! flag_version: bool,
//! }
//!
//! // This is easy. The decoder will automatically restrict values to
//! // strings that match one of the enum variants.
//! #[derive(Deserialize)]
//! # #[derive(Debug, PartialEq)]
//! enum Emit { Asm, Ir, Bc, Obj, Link }
//!
//! // This one is harder because we want the user to specify an integer,
//! // but restrict it to a specific range. So we implement `Deserialize`
//! // ourselves.
//! # #[derive(Debug, PartialEq)]
//! enum OptLevel { Zero, One, Two, Three }
//! struct OptLevelVisitor;
//!
//! impl<'de> serde::de::Visitor<'de> for OptLevelVisitor {
//! type Value = OptLevel;
//!
//! fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
//! formatter.write_str("a number from range 0..3")
//! }
//!
//! fn visit_u8<E>(self, n: u8) -> Result<Self::Value, E>
//! where E: serde::de::Error
//! {
//! Ok(match n {
//! 0 => OptLevel::Zero, 1 => OptLevel::One,
//! 2 => OptLevel::Two, 3 => OptLevel::Three,
//! n => {
//! let err = format!(
//! "Could not deserialize '{}' as opt-level.", n);
//! return Err(E::custom(err));
//! }
//! })
//! }
//! }
//!
//! impl<'de> serde::de::Deserialize<'de> for OptLevel {
//! fn deserialize<D>(d: D) -> Result<OptLevel, D::Error>
//! where D: serde::de::Deserializer<'de>
//! {
//! d.deserialize_u8(OptLevelVisitor)
//! }
//! }
//!
//! let argv = || vec!["rustc", "-L", ".", "-L", "..", "--cfg", "a",
//! "--opt-level", "2", "--emit=ir", "docopt.rs"];
//! let args: Args = Docopt::new(USAGE)
//! .and_then(|d| d.argv(argv().into_iter()).deserialize())
//! .unwrap_or_else(|e| e.exit());
//!
//! // Now access your argv values.
//! fn s(x: &str) -> String { x.to_string() }
//! assert_eq!(args.arg_INPUT, "docopt.rs".to_string());
//! assert_eq!(args.flag_L, vec![s("."), s("..")]);
//! assert_eq!(args.flag_cfg, vec![s("a")]);
//! assert_eq!(args.flag_opt_level, Some(OptLevel::Two));
//! assert_eq!(args.flag_emit, Some(Emit::Ir));
//! # }
//! ```
#![crate_name = "docopt"]
#![doc(html_root_url = "http://burntsushi.net/rustdoc/docopt")]
#![deny(missing_docs)]
#[macro_use]
extern crate lazy_static;
extern crate regex;
extern crate strsim;
#[allow(unused_imports)]
#[macro_use]
extern crate serde_derive;
extern crate serde;
pub use dopt::{ArgvMap, Deserializer, Docopt, Error, Value};
macro_rules! werr(
($($arg:tt)*) => ({
use std::io::{Write, stderr};
write!(&mut stderr(), $($arg)*).unwrap();
})
);
macro_rules! regex(
($s:expr) => (::regex::Regex::new($s).unwrap());
);
fn cap_or_empty<'t>(caps: &regex::Captures<'t>, name: &str) -> &'t str {
caps.name(name).map_or("", |m| m.as_str())
}
mod dopt;
#[doc(hidden)]
pub mod parse;
mod synonym;
#[cfg(test)]
mod test;

1488
third_party/rust/docopt/src/parse.rs поставляемый

Разница между файлами не показана из-за своего большого размера Загрузить разницу

107
third_party/rust/docopt/src/synonym.rs поставляемый
Просмотреть файл

@ -1,107 +0,0 @@
use std::collections::HashMap;
use std::collections::hash_map::{Iter, Keys};
use std::fmt::Debug;
use std::hash::Hash;
use std::iter::{FromIterator, IntoIterator};
use std::mem;
#[derive(Clone)]
pub struct SynonymMap<K, V> {
vals: HashMap<K, V>,
syns: HashMap<K, K>,
}
impl<K: Eq + Hash, V> SynonymMap<K, V> {
pub fn new() -> SynonymMap<K, V> {
SynonymMap {
vals: HashMap::new(),
syns: HashMap::new(),
}
}
pub fn insert_synonym(&mut self, from: K, to: K) -> bool {
assert!(self.vals.contains_key(&to));
self.syns.insert(from, to).is_none()
}
pub fn keys(&self) -> Keys<K, V> {
self.vals.keys()
}
pub fn iter(&self) -> Iter<K, V> {
self.vals.iter()
}
pub fn synonyms(&self) -> Iter<K, K> {
self.syns.iter()
}
pub fn find(&self, k: &K) -> Option<&V> {
self.with_key(k, |k| self.vals.get(k))
}
pub fn contains_key(&self, k: &K) -> bool {
self.with_key(k, |k| self.vals.contains_key(k))
}
pub fn len(&self) -> usize {
self.vals.len()
}
fn with_key<T, F>(&self, k: &K, with: F) -> T where F: FnOnce(&K) -> T {
if self.syns.contains_key(k) {
with(&self.syns[k])
} else {
with(k)
}
}
}
impl<K: Eq + Hash + Clone, V> SynonymMap<K, V> {
pub fn resolve(&self, k: &K) -> K {
self.with_key(k, |k| k.clone())
}
pub fn get<'a>(&'a self, k: &K) -> &'a V {
self.find(k).unwrap()
}
pub fn find_mut<'a>(&'a mut self, k: &K) -> Option<&'a mut V> {
if self.syns.contains_key(k) {
self.vals.get_mut(&self.syns[k])
} else {
self.vals.get_mut(k)
}
}
pub fn swap(&mut self, k: K, mut new: V) -> Option<V> {
if self.syns.contains_key(&k) {
let old = self.vals.get_mut(&k).unwrap();
mem::swap(old, &mut new);
Some(new)
} else {
self.vals.insert(k, new)
}
}
pub fn insert(&mut self, k: K, v: V) -> bool {
self.swap(k, v).is_none()
}
}
impl<K: Eq + Hash + Clone, V> FromIterator<(K, V)> for SynonymMap<K, V> {
fn from_iter<T: IntoIterator<Item=(K, V)>>(iter: T) -> SynonymMap<K, V> {
let mut map = SynonymMap::new();
for (k, v) in iter {
map.insert(k, v);
}
map
}
}
impl<K: Eq + Hash + Debug, V: Debug> Debug for SynonymMap<K, V> {
fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
self.vals.fmt(f)?;
write!(f, " (synomyns: {:?})", self.syns)
}
}

152
third_party/rust/docopt/src/test/mod.rs поставляемый
Просмотреть файл

@ -1,152 +0,0 @@
use std::collections::HashMap;
use {Docopt, ArgvMap, Error};
use Value::{self, Switch, Plain};
fn get_args(doc: &str, argv: &[&'static str]) -> ArgvMap {
let dopt = match Docopt::new(doc) {
Err(err) => panic!("Invalid usage: {}", err),
Ok(dopt) => dopt,
};
match dopt.argv(vec!["cmd"].iter().chain(argv.iter())).parse() {
Err(err) => panic!("{}", err),
Ok(vals) => vals,
}
}
fn map_from_alist(alist: Vec<(&'static str, Value)>)
-> HashMap<String, Value> {
alist.into_iter().map(|(k, v)| (k.to_string(), v)).collect()
}
fn same_args(expected: &HashMap<String, Value>, got: &ArgvMap) {
for (k, ve) in expected.iter() {
match got.map.find(k) {
None => panic!("EXPECTED has '{}' but GOT does not.", k),
Some(vg) => {
assert!(ve == vg,
"{}: EXPECTED = '{:?}' != '{:?}' = GOT", k, ve, vg)
}
}
}
for (k, vg) in got.map.iter() {
match got.map.find(k) {
None => panic!("GOT has '{}' but EXPECTED does not.", k),
Some(ve) => {
assert!(vg == ve,
"{}: GOT = '{:?}' != '{:?}' = EXPECTED", k, vg, ve)
}
}
}
}
macro_rules! test_expect(
($name:ident, $doc:expr, $args:expr, $expected:expr) => (
#[test]
fn $name() {
let vals = get_args($doc, $args);
let expected = map_from_alist($expected);
same_args(&expected, &vals);
}
);
);
macro_rules! test_user_error(
($name:ident, $doc:expr, $args:expr) => (
#[test]
#[should_panic]
fn $name() { get_args($doc, $args); }
);
);
test_expect!(test_issue_13, "Usage: prog file <file>", &["file", "file"],
vec![("file", Switch(true)),
("<file>", Plain(Some("file".to_string())))]);
test_expect!(test_issue_129, "Usage: prog [options]
Options:
--foo ARG Foo foo.",
&["--foo=a b"],
vec![("--foo", Plain(Some("a b".into())))]);
#[test]
fn regression_issue_12() {
const USAGE: &'static str = "
Usage:
whisper info <file>
whisper update <file> <timestamp> <value>
whisper mark <file> <value>
";
#[derive(Deserialize, Debug)]
struct Args {
arg_file: String,
cmd_info: bool,
cmd_update: bool,
arg_timestamp: u64,
arg_value: f64,
}
let dopt: Args = Docopt::new(USAGE)
.unwrap()
.argv(&["whisper", "mark", "./p/blah", "100"])
.deserialize()
.unwrap();
assert_eq!(dopt.arg_timestamp, 0);
}
#[test]
fn regression_issue_195() {
const USAGE: &'static str = "
Usage:
slow [-abcdefghijklmnopqrs...]
";
let argv = &["slow", "-abcdefghijklmnopqrs"];
let dopt : Docopt = Docopt::new(USAGE).unwrap().argv(argv);
dopt.parse().unwrap();
}
#[test]
fn regression_issue_219() {
#[derive(Deserialize)]
struct Args {
arg_type: Vec<String>,
arg_param: Vec<String>,
}
const USAGE: &'static str = "
Usage:
encode [-v <type> <param>]...
";
let argv = &["encode", "-v", "bool", "true", "string", "foo"];
let args: Args = Docopt::new(USAGE).unwrap().argv(argv).deserialize().unwrap();
assert_eq!(args.arg_type, vec!["bool".to_owned(), "string".to_owned()]);
assert_eq!(args.arg_param, vec!["true".to_owned(), "foo".to_owned()]);
}
#[test]
fn test_unit_struct() {
const USAGE: &'static str = "
Usage:
cargo version [options]
Options:
-h, --help Print this message
";
#[derive(Deserialize)]
struct Options;
let argv = &["cargo", "version"];
let dopt: Result<Options, Error>= Docopt::new(USAGE)
.unwrap()
.argv(argv)
.deserialize();
assert!(dopt.is_ok());
}
mod testcases;
mod suggestions;

Просмотреть файл

@ -1,72 +0,0 @@
use {Docopt, Error};
fn get_suggestion(doc: &str, argv: &[&'static str]) -> Error {
let dopt =
match Docopt::new(doc) {
Err(err) => panic!("Invalid usage: {}", err),
Ok(dopt) => dopt,
};
let mut argv: Vec<_> = argv.iter().map(|x| x.to_string()).collect();
argv.insert(0, "prog".to_string());
match dopt.argv(argv.into_iter()).parse() {
Err(err) => err,
Ok(_) => panic!("Should have been a user error"),
}
}
macro_rules! test_suggest(
($name:ident, $doc:expr, $args:expr, $expected:expr) => (
#[test]
fn $name() {
let sg = get_suggestion($doc, $args);
println!("{}", sg);
match sg {
Error::WithProgramUsage(e, _) => {
match *e {
Error::Argv(msg) => {
println!("{:?}",msg);
assert_eq!(msg, $expected);
}
err => panic!("Error other than argv: {:?}", err)
}
},
_ => panic!("Error without program usage")
}
}
);
);
test_suggest!(test_suggest_1, "Usage: prog [--release]", &["--releas"], "Unknown flag: '--releas'. Did you mean '--release'?");
test_suggest!(test_suggest_2,
"Usage: prog [-a] <source> <dest>
prog [-a] <source>... <dir>
prog [-e]
Options:
-a, --archive Copy everything.
",
&["-d"], "Unknown flag: '-d'");
test_suggest!(test_suggest_3,
"Usage: prog [-a] <source> <dest>
prog [-a] <source>... <dir>
prog [-e]
Options:
-a, --archive Copy everything.
-e, --export Export all the things.
",
&["--expotr"], "Unknown flag: '--expotr'. Did you mean '--export'?");
test_suggest!(test_suggest_4,
"Usage: prog [--import] [--complete]
",
&["--mport", "--complte"], "Unknown flag: '--mport'. Did you mean '--import'?");
test_suggest!(test_suggest_5,
"Usage: prog [--import] [--complete]
",
&["--import", "--complte"], "Unknown flag: '--complte'. Did you mean '--complete'?");

Разница между файлами не показана из-за своего большого размера Загрузить разницу

801
third_party/rust/docopt/src/test/testcases.rs поставляемый
Просмотреть файл

@ -1,801 +0,0 @@
// !!! ATTENTION !!!
// This file is automatically generated by `scripts/mk-testcases`.
// Please do not edit this file directly!
use Value::{Switch, Counted, Plain, List};
use test::{get_args, map_from_alist, same_args};
test_expect!(test_0_testcases, "Usage: prog", &[], vec!());
test_user_error!(test_1_testcases, "Usage: prog", &["--xxx"]);
test_expect!(test_2_testcases, "Usage: prog [options]
Options: -a All.", &[], vec!(("-a", Switch(false))));
test_expect!(test_3_testcases, "Usage: prog [options]
Options: -a All.", &["-a"], vec!(("-a", Switch(true))));
test_user_error!(test_4_testcases, "Usage: prog [options]
Options: -a All.", &["-x"]);
test_expect!(test_5_testcases, "Usage: prog [options]
Options: --all All.", &[], vec!(("--all", Switch(false))));
test_expect!(test_6_testcases, "Usage: prog [options]
Options: --all All.", &["--all"], vec!(("--all", Switch(true))));
test_user_error!(test_7_testcases, "Usage: prog [options]
Options: --all All.", &["--xxx"]);
test_expect!(test_8_testcases, "Usage: prog [options]
Options: -v, --verbose Verbose.", &["--verbose"], vec!(("--verbose", Switch(true))));
test_user_error!(test_9_testcases, "Usage: prog [options]
Options: -v, --verbose Verbose.", &["--ver"]);
test_expect!(test_10_testcases, "Usage: prog [options]
Options: -v, --verbose Verbose.", &["-v"], vec!(("--verbose", Switch(true))));
test_expect!(test_11_testcases, "Usage: prog [options]
Options: -p PATH", &["-p", "home/"], vec!(("-p", Plain(Some("home/".to_string())))));
test_expect!(test_12_testcases, "Usage: prog [options]
Options: -p PATH", &["-phome/"], vec!(("-p", Plain(Some("home/".to_string())))));
test_user_error!(test_13_testcases, "Usage: prog [options]
Options: -p PATH", &["-p"]);
test_expect!(test_14_testcases, "Usage: prog [options]
Options: --path <path>", &["--path", "home/"], vec!(("--path", Plain(Some("home/".to_string())))));
test_expect!(test_15_testcases, "Usage: prog [options]
Options: --path <path>", &["--path=home/"], vec!(("--path", Plain(Some("home/".to_string())))));
test_user_error!(test_16_testcases, "Usage: prog [options]
Options: --path <path>", &["--pa", "home/"]);
test_user_error!(test_17_testcases, "Usage: prog [options]
Options: --path <path>", &["--pa=home/"]);
test_user_error!(test_18_testcases, "Usage: prog [options]
Options: --path <path>", &["--path"]);
test_expect!(test_19_testcases, "Usage: prog [options]
Options: -p PATH, --path=<path> Path to files.", &["-proot"], vec!(("--path", Plain(Some("root".to_string())))));
test_expect!(test_20_testcases, "Usage: prog [options]
Options: -p --path PATH Path to files.", &["-p", "root"], vec!(("--path", Plain(Some("root".to_string())))));
test_expect!(test_21_testcases, "Usage: prog [options]
Options: -p --path PATH Path to files.", &["--path", "root"], vec!(("--path", Plain(Some("root".to_string())))));
test_expect!(test_22_testcases, "Usage: prog [options]
Options:
-p PATH Path to files [default: ./]", &[], vec!(("-p", Plain(Some("./".to_string())))));
test_expect!(test_23_testcases, "Usage: prog [options]
Options:
-p PATH Path to files [default: ./]", &["-phome"], vec!(("-p", Plain(Some("home".to_string())))));
test_expect!(test_24_testcases, "UsAgE: prog [options]
OpTiOnS: --path=<files> Path to files
[dEfAuLt: /root]", &[], vec!(("--path", Plain(Some("/root".to_string())))));
test_expect!(test_25_testcases, "UsAgE: prog [options]
OpTiOnS: --path=<files> Path to files
[dEfAuLt: /root]", &["--path=home"], vec!(("--path", Plain(Some("home".to_string())))));
test_expect!(test_26_testcases, "usage: prog [options]
options:
-a Add
-r Remote
-m <msg> Message", &["-a", "-r", "-m", "Hello"], vec!(("-m", Plain(Some("Hello".to_string()))), ("-a", Switch(true)), ("-r", Switch(true))));
test_expect!(test_27_testcases, "usage: prog [options]
options:
-a Add
-r Remote
-m <msg> Message", &["-armyourass"], vec!(("-m", Plain(Some("yourass".to_string()))), ("-a", Switch(true)), ("-r", Switch(true))));
test_expect!(test_28_testcases, "usage: prog [options]
options:
-a Add
-r Remote
-m <msg> Message", &["-a", "-r"], vec!(("-m", Plain(None)), ("-a", Switch(true)), ("-r", Switch(true))));
test_expect!(test_29_testcases, "Usage: prog [options]
Options: --version
--verbose", &["--version"], vec!(("--verbose", Switch(false)), ("--version", Switch(true))));
test_expect!(test_30_testcases, "Usage: prog [options]
Options: --version
--verbose", &["--verbose"], vec!(("--verbose", Switch(true)), ("--version", Switch(false))));
test_user_error!(test_31_testcases, "Usage: prog [options]
Options: --version
--verbose", &["--ver"]);
test_user_error!(test_32_testcases, "Usage: prog [options]
Options: --version
--verbose", &["--verb"]);
test_expect!(test_33_testcases, "usage: prog [-a -r -m <msg>]
options:
-a Add
-r Remote
-m <msg> Message", &["-armyourass"], vec!(("-m", Plain(Some("yourass".to_string()))), ("-a", Switch(true)), ("-r", Switch(true))));
test_expect!(test_34_testcases, "usage: prog [-armMSG]
options: -a Add
-r Remote
-m <msg> Message", &["-a", "-r", "-m", "Hello"], vec!(("-m", Plain(Some("Hello".to_string()))), ("-a", Switch(true)), ("-r", Switch(true))));
test_expect!(test_35_testcases, "usage: prog -a -b
options:
-a
-b", &["-a", "-b"], vec!(("-a", Switch(true)), ("-b", Switch(true))));
test_expect!(test_36_testcases, "usage: prog -a -b
options:
-a
-b", &["-b", "-a"], vec!(("-a", Switch(true)), ("-b", Switch(true))));
test_user_error!(test_37_testcases, "usage: prog -a -b
options:
-a
-b", &["-a"]);
test_user_error!(test_38_testcases, "usage: prog -a -b
options:
-a
-b", &[]);
test_expect!(test_39_testcases, "usage: prog (-a -b)
options: -a
-b", &["-a", "-b"], vec!(("-a", Switch(true)), ("-b", Switch(true))));
test_expect!(test_40_testcases, "usage: prog (-a -b)
options: -a
-b", &["-b", "-a"], vec!(("-a", Switch(true)), ("-b", Switch(true))));
test_user_error!(test_41_testcases, "usage: prog (-a -b)
options: -a
-b", &["-a"]);
test_user_error!(test_42_testcases, "usage: prog (-a -b)
options: -a
-b", &[]);
test_expect!(test_43_testcases, "usage: prog [-a] -b
options: -a
-b", &["-a", "-b"], vec!(("-a", Switch(true)), ("-b", Switch(true))));
test_expect!(test_44_testcases, "usage: prog [-a] -b
options: -a
-b", &["-b", "-a"], vec!(("-a", Switch(true)), ("-b", Switch(true))));
test_user_error!(test_45_testcases, "usage: prog [-a] -b
options: -a
-b", &["-a"]);
test_expect!(test_46_testcases, "usage: prog [-a] -b
options: -a
-b", &["-b"], vec!(("-a", Switch(false)), ("-b", Switch(true))));
test_user_error!(test_47_testcases, "usage: prog [-a] -b
options: -a
-b", &[]);
test_expect!(test_48_testcases, "usage: prog [(-a -b)]
options: -a
-b", &["-a", "-b"], vec!(("-a", Switch(true)), ("-b", Switch(true))));
test_expect!(test_49_testcases, "usage: prog [(-a -b)]
options: -a
-b", &["-b", "-a"], vec!(("-a", Switch(true)), ("-b", Switch(true))));
test_user_error!(test_50_testcases, "usage: prog [(-a -b)]
options: -a
-b", &["-a"]);
test_user_error!(test_51_testcases, "usage: prog [(-a -b)]
options: -a
-b", &["-b"]);
test_expect!(test_52_testcases, "usage: prog [(-a -b)]
options: -a
-b", &[], vec!(("-a", Switch(false)), ("-b", Switch(false))));
test_user_error!(test_53_testcases, "usage: prog (-a|-b)
options: -a
-b", &["-a", "-b"]);
test_user_error!(test_54_testcases, "usage: prog (-a|-b)
options: -a
-b", &[]);
test_expect!(test_55_testcases, "usage: prog (-a|-b)
options: -a
-b", &["-a"], vec!(("-a", Switch(true)), ("-b", Switch(false))));
test_expect!(test_56_testcases, "usage: prog (-a|-b)
options: -a
-b", &["-b"], vec!(("-a", Switch(false)), ("-b", Switch(true))));
test_user_error!(test_57_testcases, "usage: prog [ -a | -b ]
options: -a
-b", &["-a", "-b"]);
test_expect!(test_58_testcases, "usage: prog [ -a | -b ]
options: -a
-b", &[], vec!(("-a", Switch(false)), ("-b", Switch(false))));
test_expect!(test_59_testcases, "usage: prog [ -a | -b ]
options: -a
-b", &["-a"], vec!(("-a", Switch(true)), ("-b", Switch(false))));
test_expect!(test_60_testcases, "usage: prog [ -a | -b ]
options: -a
-b", &["-b"], vec!(("-a", Switch(false)), ("-b", Switch(true))));
test_expect!(test_61_testcases, "usage: prog <arg>", &["10"], vec!(("<arg>", Plain(Some("10".to_string())))));
test_user_error!(test_62_testcases, "usage: prog <arg>", &["10", "20"]);
test_user_error!(test_63_testcases, "usage: prog <arg>", &[]);
test_expect!(test_64_testcases, "usage: prog [<arg>]", &["10"], vec!(("<arg>", Plain(Some("10".to_string())))));
test_user_error!(test_65_testcases, "usage: prog [<arg>]", &["10", "20"]);
test_expect!(test_66_testcases, "usage: prog [<arg>]", &[], vec!(("<arg>", Plain(None))));
test_expect!(test_67_testcases, "usage: prog <kind> <name> <type>", &["10", "20", "40"], vec!(("<type>", Plain(Some("40".to_string()))), ("<kind>", Plain(Some("10".to_string()))), ("<name>", Plain(Some("20".to_string())))));
test_user_error!(test_68_testcases, "usage: prog <kind> <name> <type>", &["10", "20"]);
test_user_error!(test_69_testcases, "usage: prog <kind> <name> <type>", &[]);
test_expect!(test_70_testcases, "usage: prog <kind> [<name> <type>]", &["10", "20", "40"], vec!(("<type>", Plain(Some("40".to_string()))), ("<kind>", Plain(Some("10".to_string()))), ("<name>", Plain(Some("20".to_string())))));
test_expect!(test_71_testcases, "usage: prog <kind> [<name> <type>]", &["10", "20"], vec!(("<type>", Plain(None)), ("<kind>", Plain(Some("10".to_string()))), ("<name>", Plain(Some("20".to_string())))));
test_user_error!(test_72_testcases, "usage: prog <kind> [<name> <type>]", &[]);
test_user_error!(test_73_testcases, "usage: prog [<kind> | <name> <type>]", &["10", "20", "40"]);
test_expect!(test_74_testcases, "usage: prog [<kind> | <name> <type>]", &["20", "40"], vec!(("<type>", Plain(Some("40".to_string()))), ("<kind>", Plain(None)), ("<name>", Plain(Some("20".to_string())))));
test_expect!(test_75_testcases, "usage: prog [<kind> | <name> <type>]", &[], vec!(("<type>", Plain(None)), ("<kind>", Plain(None)), ("<name>", Plain(None))));
test_expect!(test_76_testcases, "usage: prog (<kind> --all | <name>)
options:
--all", &["10", "--all"], vec!(("--all", Switch(true)), ("<kind>", Plain(Some("10".to_string()))), ("<name>", Plain(None))));
test_expect!(test_77_testcases, "usage: prog (<kind> --all | <name>)
options:
--all", &["10"], vec!(("--all", Switch(false)), ("<kind>", Plain(None)), ("<name>", Plain(Some("10".to_string())))));
test_user_error!(test_78_testcases, "usage: prog (<kind> --all | <name>)
options:
--all", &[]);
test_expect!(test_79_testcases, "usage: prog [<name> <name>]", &["10", "20"], vec!(("<name>", List(vec!("10".to_string(), "20".to_string())))));
test_expect!(test_80_testcases, "usage: prog [<name> <name>]", &["10"], vec!(("<name>", List(vec!("10".to_string())))));
test_expect!(test_81_testcases, "usage: prog [<name> <name>]", &[], vec!(("<name>", List(vec!()))));
test_expect!(test_82_testcases, "usage: prog [(<name> <name>)]", &["10", "20"], vec!(("<name>", List(vec!("10".to_string(), "20".to_string())))));
test_user_error!(test_83_testcases, "usage: prog [(<name> <name>)]", &["10"]);
test_expect!(test_84_testcases, "usage: prog [(<name> <name>)]", &[], vec!(("<name>", List(vec!()))));
test_expect!(test_85_testcases, "usage: prog NAME...", &["10", "20"], vec!(("NAME", List(vec!("10".to_string(), "20".to_string())))));
test_expect!(test_86_testcases, "usage: prog NAME...", &["10"], vec!(("NAME", List(vec!("10".to_string())))));
test_user_error!(test_87_testcases, "usage: prog NAME...", &[]);
test_expect!(test_88_testcases, "usage: prog [NAME]...", &["10", "20"], vec!(("NAME", List(vec!("10".to_string(), "20".to_string())))));
test_expect!(test_89_testcases, "usage: prog [NAME]...", &["10"], vec!(("NAME", List(vec!("10".to_string())))));
test_expect!(test_90_testcases, "usage: prog [NAME]...", &[], vec!(("NAME", List(vec!()))));
test_expect!(test_91_testcases, "usage: prog [NAME...]", &["10", "20"], vec!(("NAME", List(vec!("10".to_string(), "20".to_string())))));
test_expect!(test_92_testcases, "usage: prog [NAME...]", &["10"], vec!(("NAME", List(vec!("10".to_string())))));
test_expect!(test_93_testcases, "usage: prog [NAME...]", &[], vec!(("NAME", List(vec!()))));
test_expect!(test_94_testcases, "usage: prog [NAME [NAME ...]]", &["10", "20"], vec!(("NAME", List(vec!("10".to_string(), "20".to_string())))));
test_expect!(test_95_testcases, "usage: prog [NAME [NAME ...]]", &["10"], vec!(("NAME", List(vec!("10".to_string())))));
test_expect!(test_96_testcases, "usage: prog [NAME [NAME ...]]", &[], vec!(("NAME", List(vec!()))));
test_expect!(test_97_testcases, "usage: prog (NAME | --foo NAME)
options: --foo", &["10"], vec!(("NAME", Plain(Some("10".to_string()))), ("--foo", Switch(false))));
test_expect!(test_98_testcases, "usage: prog (NAME | --foo NAME)
options: --foo", &["--foo", "10"], vec!(("NAME", Plain(Some("10".to_string()))), ("--foo", Switch(true))));
test_user_error!(test_99_testcases, "usage: prog (NAME | --foo NAME)
options: --foo", &["--foo=10"]);
test_expect!(test_100_testcases, "usage: prog (NAME | --foo) [--bar | NAME]
options: --foo
options: --bar", &["10"], vec!(("--bar", Switch(false)), ("NAME", List(vec!("10".to_string()))), ("--foo", Switch(false))));
test_expect!(test_101_testcases, "usage: prog (NAME | --foo) [--bar | NAME]
options: --foo
options: --bar", &["10", "20"], vec!(("--bar", Switch(false)), ("NAME", List(vec!("10".to_string(), "20".to_string()))), ("--foo", Switch(false))));
test_expect!(test_102_testcases, "usage: prog (NAME | --foo) [--bar | NAME]
options: --foo
options: --bar", &["--foo", "--bar"], vec!(("--bar", Switch(true)), ("NAME", List(vec!())), ("--foo", Switch(true))));
test_expect!(test_103_testcases, "Naval Fate.
Usage:
prog ship new <name>...
prog ship [<name>] move <x> <y> [--speed=<kn>]
prog ship shoot <x> <y>
prog mine (set|remove) <x> <y> [--moored|--drifting]
prog -h | --help
prog --version
Options:
-h --help Show this screen.
--version Show version.
--speed=<kn> Speed in knots [default: 10].
--moored Mored (anchored) mine.
--drifting Drifting mine.", &["ship", "Guardian", "move", "150", "300", "--speed=20"], vec!(("shoot", Switch(false)), ("--moored", Switch(false)), ("--drifting", Switch(false)), ("move", Switch(true)), ("--speed", Plain(Some("20".to_string()))), ("mine", Switch(false)), ("new", Switch(false)), ("--version", Switch(false)), ("set", Switch(false)), ("remove", Switch(false)), ("<name>", List(vec!("Guardian".to_string()))), ("ship", Switch(true)), ("<x>", Plain(Some("150".to_string()))), ("<y>", Plain(Some("300".to_string()))), ("--help", Switch(false))));
test_expect!(test_104_testcases, "usage: prog --hello", &["--hello"], vec!(("--hello", Switch(true))));
test_expect!(test_105_testcases, "usage: prog [--hello=<world>]", &[], vec!(("--hello", Plain(None))));
test_expect!(test_106_testcases, "usage: prog [--hello=<world>]", &["--hello", "wrld"], vec!(("--hello", Plain(Some("wrld".to_string())))));
test_expect!(test_107_testcases, "usage: prog [-o]", &[], vec!(("-o", Switch(false))));
test_expect!(test_108_testcases, "usage: prog [-o]", &["-o"], vec!(("-o", Switch(true))));
test_expect!(test_109_testcases, "usage: prog [-opr]", &["-op"], vec!(("-o", Switch(true)), ("-p", Switch(true)), ("-r", Switch(false))));
test_expect!(test_110_testcases, "usage: prog --aabb | --aa", &["--aa"], vec!(("--aa", Switch(true)), ("--aabb", Switch(false))));
test_user_error!(test_111_testcases, "usage: prog --aabb | --aa", &["--a"]);
test_expect!(test_112_testcases, "Usage: prog -v", &["-v"], vec!(("-v", Switch(true))));
test_expect!(test_113_testcases, "Usage: prog [-v -v]", &[], vec!(("-v", Counted(0))));
test_expect!(test_114_testcases, "Usage: prog [-v -v]", &["-v"], vec!(("-v", Counted(1))));
test_expect!(test_115_testcases, "Usage: prog [-v -v]", &["-vv"], vec!(("-v", Counted(2))));
test_user_error!(test_116_testcases, "Usage: prog -v ...", &[]);
test_expect!(test_117_testcases, "Usage: prog -v ...", &["-v"], vec!(("-v", Counted(1))));
test_expect!(test_118_testcases, "Usage: prog -v ...", &["-vv"], vec!(("-v", Counted(2))));
test_expect!(test_119_testcases, "Usage: prog -v ...", &["-vvvvvv"], vec!(("-v", Counted(6))));
test_expect!(test_120_testcases, "Usage: prog [-v | -vv | -vvv]
This one is probably most readable user-friednly variant.", &[], vec!(("-v", Counted(0))));
test_expect!(test_121_testcases, "Usage: prog [-v | -vv | -vvv]
This one is probably most readable user-friednly variant.", &["-v"], vec!(("-v", Counted(1))));
test_expect!(test_122_testcases, "Usage: prog [-v | -vv | -vvv]
This one is probably most readable user-friednly variant.", &["-vv"], vec!(("-v", Counted(2))));
test_user_error!(test_123_testcases, "Usage: prog [-v | -vv | -vvv]
This one is probably most readable user-friednly variant.", &["-vvvv"]);
test_expect!(test_124_testcases, "usage: prog [--ver --ver]", &["--ver", "--ver"], vec!(("--ver", Counted(2))));
test_expect!(test_125_testcases, "usage: prog [go]", &["go"], vec!(("go", Switch(true))));
test_expect!(test_126_testcases, "usage: prog [go go]", &[], vec!(("go", Counted(0))));
test_expect!(test_127_testcases, "usage: prog [go go]", &["go"], vec!(("go", Counted(1))));
test_expect!(test_128_testcases, "usage: prog [go go]", &["go", "go"], vec!(("go", Counted(2))));
test_user_error!(test_129_testcases, "usage: prog [go go]", &["go", "go", "go"]);
test_expect!(test_130_testcases, "usage: prog go...", &["go", "go", "go", "go", "go"], vec!(("go", Counted(5))));
test_expect!(test_131_testcases, "usage: prog [options] [-a]
options: -a
-b", &["-a"], vec!(("-a", Switch(true)), ("-b", Switch(false))));
test_user_error!(test_132_testcases, "usage: prog [options] [-a]
options: -a
-b", &["-aa"]);
test_expect!(test_133_testcases, "Usage: prog [options] A
Options:
-q Be quiet
-v Be verbose.", &["arg"], vec!(("A", Plain(Some("arg".to_string()))), ("-v", Switch(false)), ("-q", Switch(false))));
test_expect!(test_134_testcases, "Usage: prog [options] A
Options:
-q Be quiet
-v Be verbose.", &["-v", "arg"], vec!(("A", Plain(Some("arg".to_string()))), ("-v", Switch(true)), ("-q", Switch(false))));
test_expect!(test_135_testcases, "Usage: prog [options] A
Options:
-q Be quiet
-v Be verbose.", &["-q", "arg"], vec!(("A", Plain(Some("arg".to_string()))), ("-v", Switch(false)), ("-q", Switch(true))));
test_expect!(test_136_testcases, "usage: prog [-]", &["-"], vec!(("-", Switch(true))));
test_expect!(test_137_testcases, "usage: prog [-]", &[], vec!(("-", Switch(false))));
test_expect!(test_138_testcases, "usage: prog [NAME [NAME ...]]", &["a", "b"], vec!(("NAME", List(vec!("a".to_string(), "b".to_string())))));
test_expect!(test_139_testcases, "usage: prog [NAME [NAME ...]]", &[], vec!(("NAME", List(vec!()))));
test_expect!(test_140_testcases, "usage: prog [options]
options:
-a Add
-m <msg> Message", &["-a"], vec!(("-m", Plain(None)), ("-a", Switch(true))));
test_expect!(test_141_testcases, "usage: prog --hello", &["--hello"], vec!(("--hello", Switch(true))));
test_expect!(test_142_testcases, "usage: prog [--hello=<world>]", &[], vec!(("--hello", Plain(None))));
test_expect!(test_143_testcases, "usage: prog [--hello=<world>]", &["--hello", "wrld"], vec!(("--hello", Plain(Some("wrld".to_string())))));
test_expect!(test_144_testcases, "usage: prog [-o]", &[], vec!(("-o", Switch(false))));
test_expect!(test_145_testcases, "usage: prog [-o]", &["-o"], vec!(("-o", Switch(true))));
test_expect!(test_146_testcases, "usage: prog [-opr]", &["-op"], vec!(("-o", Switch(true)), ("-p", Switch(true)), ("-r", Switch(false))));
test_expect!(test_147_testcases, "usage: git [-v | --verbose]", &["-v"], vec!(("-v", Switch(true)), ("--verbose", Switch(false))));
test_expect!(test_148_testcases, "usage: git remote [-v | --verbose]", &["remote", "-v"], vec!(("-v", Switch(true)), ("remote", Switch(true)), ("--verbose", Switch(false))));
test_expect!(test_149_testcases, "usage: prog", &[], vec!());
test_expect!(test_150_testcases, "usage: prog
prog <a> <b>", &["1", "2"], vec!(("<a>", Plain(Some("1".to_string()))), ("<b>", Plain(Some("2".to_string())))));
test_expect!(test_151_testcases, "usage: prog
prog <a> <b>", &[], vec!(("<a>", Plain(None)), ("<b>", Plain(None))));
test_expect!(test_152_testcases, "usage: prog <a> <b>
prog", &[], vec!(("<a>", Plain(None)), ("<b>", Plain(None))));
test_expect!(test_153_testcases, "usage: prog [--file=<f>]", &[], vec!(("--file", Plain(None))));
test_expect!(test_154_testcases, "usage: prog [--file=<f>]
options: --file <a>", &[], vec!(("--file", Plain(None))));
test_expect!(test_155_testcases, "Usage: prog [-a <host:port>]
Options: -a, --address <host:port> TCP address [default: localhost:6283].", &[], vec!(("--address", Plain(Some("localhost:6283".to_string())))));
test_expect!(test_156_testcases, "usage: prog --long=<arg> ...", &["--long", "one"], vec!(("--long", List(vec!("one".to_string())))));
test_expect!(test_157_testcases, "usage: prog --long=<arg> ...", &["--long", "one", "--long", "two"], vec!(("--long", List(vec!("one".to_string(), "two".to_string())))));
test_expect!(test_158_testcases, "usage: prog (go <direction> --speed=<km/h>)...", &["go", "left", "--speed=5", "go", "right", "--speed=9"], vec!(("go", Counted(2)), ("<direction>", List(vec!("left".to_string(), "right".to_string()))), ("--speed", List(vec!("5".to_string(), "9".to_string())))));
test_expect!(test_159_testcases, "usage: prog [options] -a
options: -a", &["-a"], vec!(("-a", Switch(true))));
test_expect!(test_160_testcases, "usage: prog [-o <o>]...
options: -o <o> [default: x]", &["-o", "this", "-o", "that"], vec!(("-o", List(vec!("this".to_string(), "that".to_string())))));
test_expect!(test_161_testcases, "usage: prog [-o <o>]...
options: -o <o> [default: x]", &[], vec!(("-o", List(vec!("x".to_string())))));
test_expect!(test_162_testcases, "usage: prog [-o <o>]...
options: -o <o> [default: x y]", &["-o", "this"], vec!(("-o", List(vec!("this".to_string())))));
test_expect!(test_163_testcases, "usage: prog [-o <o>]...
options: -o <o> [default: x y]", &[], vec!(("-o", List(vec!("x".to_string(), "y".to_string())))));
test_expect!(test_164_testcases, "usage: prog -pPATH
options: -p PATH", &["-pHOME"], vec!(("-p", Plain(Some("HOME".to_string())))));
test_expect!(test_165_testcases, "Usage: foo (--xx=X|--yy=Y)...", &["--xx=1", "--yy=2"], vec!(("--yy", List(vec!("2".to_string()))), ("--xx", List(vec!("1".to_string())))));
test_expect!(test_166_testcases, "usage: prog [<input file>]", &["f.txt"], vec!(("<input file>", Plain(Some("f.txt".to_string())))));
test_expect!(test_167_testcases, "usage: prog [--input=<file name>]...", &["--input", "a.txt", "--input=b.txt"], vec!(("--input", List(vec!("a.txt".to_string(), "b.txt".to_string())))));
test_expect!(test_168_testcases, "usage: prog good [options]
prog fail [options]
options: --loglevel=N", &["fail", "--loglevel", "5"], vec!(("fail", Switch(true)), ("good", Switch(false)), ("--loglevel", Plain(Some("5".to_string())))));
test_expect!(test_169_testcases, "usage:prog --foo", &["--foo"], vec!(("--foo", Switch(true))));
test_expect!(test_170_testcases, "PROGRAM USAGE: prog --foo", &["--foo"], vec!(("--foo", Switch(true))));
test_expect!(test_171_testcases, "Usage: prog --foo
prog --bar
NOT PART OF SECTION", &["--foo"], vec!(("--bar", Switch(false)), ("--foo", Switch(true))));
test_expect!(test_172_testcases, "Usage:
prog --foo
prog --bar
NOT PART OF SECTION", &["--foo"], vec!(("--bar", Switch(false)), ("--foo", Switch(true))));
test_expect!(test_173_testcases, "Usage:
prog --foo
prog --bar
NOT PART OF SECTION", &["--foo"], vec!(("--bar", Switch(false)), ("--foo", Switch(true))));
test_expect!(test_174_testcases, "Usage: prog [options]
global options: --foo
local options: --baz
--bar
other options:
--egg
--spam
-not-an-option-", &["--bar", "--egg"], vec!(("--bar", Switch(true)), ("--egg", Switch(true)), ("--spam", Switch(false))));
test_expect!(test_175_testcases, "Usage: prog [-a] [--] [<arg>...]", &["-a"], vec!(("<arg>", List(vec!())), ("-a", Switch(true))));
test_expect!(test_176_testcases, "Usage: prog [-a] [--] [<arg>...]", &["--"], vec!(("<arg>", List(vec!())), ("-a", Switch(false))));
test_expect!(test_177_testcases, "Usage: prog [-a] [--] [<arg>...]", &["-a", "--", "-b"], vec!(("<arg>", List(vec!("-b".to_string()))), ("-a", Switch(true))));
test_expect!(test_178_testcases, "Usage: prog [-a] [--] [<arg>...]", &["-a", "--", "-a"], vec!(("<arg>", List(vec!("-a".to_string()))), ("-a", Switch(true))));
test_expect!(test_179_testcases, "Usage: prog [-a] [--] [<arg>...]", &["--", "-a"], vec!(("<arg>", List(vec!("-a".to_string()))), ("-a", Switch(false))));
test_expect!(test_180_testcases, "Usage: prog test [options] [--] [<args>...]", &["test", "a", "--", "-b"], vec!(("<args>", List(vec!("a".to_string(), "-b".to_string())))));
test_expect!(test_181_testcases, "Usage: prog test [options] [--] [<args>...]", &["test", "--", "-b"], vec!(("<args>", List(vec!("-b".to_string())))));
test_user_error!(test_182_testcases, "Usage: prog test [options] [--] [<args>...]", &["test", "a", "-b"]);
test_expect!(test_183_testcases, "Usage: prog test [options] [--] [<args>...]", &["test", "--", "-b", "--"], vec!(("<args>", List(vec!("-b".to_string(), "--".to_string())))));
test_expect!(test_184_testcases, "Usage: prog [options]
Options:
-a ... Foo", &[], vec!(("-a", Counted(0))));
test_expect!(test_185_testcases, "Usage: prog [options]
Options:
-a ... Foo", &["-a"], vec!(("-a", Counted(1))));
test_expect!(test_186_testcases, "Usage: prog [options]
Options:
-a ... Foo", &["-a", "-a"], vec!(("-a", Counted(2))));
test_expect!(test_187_testcases, "Usage: prog [options]
Options:
-a ... Foo", &["-aa"], vec!(("-a", Counted(2))));
test_expect!(test_188_testcases, "Usage: prog [options]
Options:
-a ... Foo", &["-a", "-a", "-a"], vec!(("-a", Counted(3))));
test_expect!(test_189_testcases, "Usage: prog [options]
Options:
-a ... Foo", &["-aaa"], vec!(("-a", Counted(3))));
test_expect!(test_190_testcases, "Usage: prog [options]
Options:
-a, --all ... Foo", &[], vec!(("-a", Counted(0))));
test_expect!(test_191_testcases, "Usage: prog [options]
Options:
-a, --all ... Foo", &["-a"], vec!(("-a", Counted(1))));
test_expect!(test_192_testcases, "Usage: prog [options]
Options:
-a, --all ... Foo", &["-a", "--all"], vec!(("-a", Counted(2))));
test_expect!(test_193_testcases, "Usage: prog [options]
Options:
-a, --all ... Foo", &["-aa", "--all"], vec!(("-a", Counted(3))));
test_expect!(test_194_testcases, "Usage: prog [options]
Options:
-a, --all ... Foo", &["--all"], vec!(("-a", Counted(1))));
test_expect!(test_195_testcases, "Usage: prog [options]
Options:
-a, --all ... Foo", &["--all", "--all"], vec!(("-a", Counted(2))));
test_expect!(test_196_testcases, "Usage: prog [options]
Options:
-a, --all ARG ... Foo", &[], vec!(("-a", List(vec!()))));
test_expect!(test_197_testcases, "Usage: prog [options]
Options:
-a, --all ARG ... Foo", &["-a", "1"], vec!(("-a", List(vec!("1".to_string())))));
test_expect!(test_198_testcases, "Usage: prog [options]
Options:
-a, --all ARG ... Foo", &["-a", "2", "--all", "3"], vec!(("-a", List(vec!("2".to_string(), "3".to_string())))));
test_expect!(test_199_testcases, "Usage: prog [options]
Options:
-a, --all ARG ... Foo", &["-a4", "-a5", "--all", "6"], vec!(("-a", List(vec!("4".to_string(), "5".to_string(), "6".to_string())))));
test_expect!(test_200_testcases, "Usage: prog [options]
Options:
-a, --all ARG ... Foo", &["--all", "7"], vec!(("-a", List(vec!("7".to_string())))));
test_expect!(test_201_testcases, "Usage: prog [options]
Options:
-a, --all ARG ... Foo", &["--all", "8", "--all", "9"], vec!(("-a", List(vec!("8".to_string(), "9".to_string())))));
test_expect!(test_202_testcases, "Usage: prog [options]
Options:
--all ... Foo", &[], vec!(("--all", Counted(0))));
test_expect!(test_203_testcases, "Usage: prog [options]
Options:
--all ... Foo", &["--all"], vec!(("--all", Counted(1))));
test_expect!(test_204_testcases, "Usage: prog [options]
Options:
--all ... Foo", &["--all", "--all"], vec!(("--all", Counted(2))));
test_expect!(test_205_testcases, "Usage: prog [options]
Options:
--all=ARG ... Foo", &[], vec!(("--all", List(vec!()))));
test_expect!(test_206_testcases, "Usage: prog [options]
Options:
--all=ARG ... Foo", &["--all", "1"], vec!(("--all", List(vec!("1".to_string())))));
test_expect!(test_207_testcases, "Usage: prog [options]
Options:
--all=ARG ... Foo", &["--all", "2", "--all", "3"], vec!(("--all", List(vec!("2".to_string(), "3".to_string())))));
test_user_error!(test_208_testcases, "Usage: prog [options]
Options:
--all ... Foo", &["--all", "--all"]);
test_user_error!(test_209_testcases, "Usage: prog [options]
Options:
--all ARG ... Foo", &["--all", "foo", "--all", "bar"]);
test_expect!(test_210_testcases, "Usage: prog --speed=ARG", &["--speed", "20"], vec!(("--speed", Plain(Some("20".to_string())))));
test_expect!(test_211_testcases, "Usage: prog --speed=ARG", &["--speed=20"], vec!(("--speed", Plain(Some("20".to_string())))));
test_expect!(test_212_testcases, "Usage: prog --speed=ARG", &["--speed=-20"], vec!(("--speed", Plain(Some("-20".to_string())))));
test_expect!(test_213_testcases, "Usage: prog --speed=ARG", &["--speed", "-20"], vec!(("--speed", Plain(Some("-20".to_string())))));
test_expect!(test_214_testcases, "usage: prog [--datetime=<regex>]
options: --datetime=<regex> Regex for datetimes [default: ^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}]", &[], vec!(("--datetime", Plain(Some("^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}".to_string())))));
test_expect!(test_215_testcases, "Usage: prog [options]
Options:
-x ARG
-y", &["-x-y"], vec!(("-x", Plain(Some("-y".to_string())))));

114
third_party/rust/docopt/src/wordlist.rs поставляемый
Просмотреть файл

@ -1,114 +0,0 @@
#[macro_use]
extern crate lazy_static;
#[macro_use]
extern crate serde_derive;
extern crate regex;
extern crate serde;
extern crate strsim;
use std::collections::HashMap;
use std::io::{self, Read, Write};
use dopt::Docopt;
use parse::{Atom, Parser};
// cheat until we get syntax extensions back :-(
macro_rules! regex(
($s:expr) => (::regex::Regex::new($s).unwrap());
);
macro_rules! werr(
($($arg:tt)*) => ({
use std::io::{Write, stderr};
write!(&mut stderr(), $($arg)*).unwrap();
})
);
fn cap_or_empty<'t>(caps: &regex::Captures<'t>, name: &str) -> &'t str {
caps.name(name).map_or("", |m| m.as_str())
}
#[allow(dead_code)]
mod dopt;
#[allow(dead_code)]
mod parse;
#[allow(dead_code)]
mod synonym;
const USAGE: &'static str = "
Usage: docopt-wordlist [(<name> <possibles>)] ...
docopt-wordlist prints a list of available flags and commands arguments for the
given usage (provided on stdin).
Example use:
your-command --help | docopt-wordlist
This command also supports completing positional arguments when given a list of
choices. The choices are included in the word list if and only if the argument
name appears in the usage string. For example:
your-command --help | docopt-wordlist 'arg' 'a b c'
Which will only include 'a', 'b' and 'c' in the wordlist if
'your-command --help' contains a positional argument named 'arg'.
";
#[derive(Debug, Deserialize)]
struct Args {
arg_name: Vec<String>,
arg_possibles: Vec<String>,
}
fn main() {
let args: Args = Docopt::new(USAGE)
.and_then(|d| d.deserialize())
.unwrap_or_else(|e| e.exit());
match run(args) {
Ok(_) => {},
Err(err) => {
write!(&mut io::stderr(), "{}", err).unwrap();
::std::process::exit(1)
}
}
}
fn run(args: Args) -> Result<(), String> {
let mut usage = String::new();
io::stdin().read_to_string(&mut usage).map_err(|e| e.to_string())?;
let parsed = Parser::new(&usage).map_err(|e| e.to_string())?;
let arg_possibles: HashMap<String, Vec<String>> =
args.arg_name.iter()
.zip(args.arg_possibles.iter())
.map(|(name, possibles)| {
let choices =
regex!(r"[ \t]+").split(&**possibles)
.map(|s| s.to_string())
.collect::<Vec<String>>();
(name.clone(), choices)
})
.collect();
let mut words = vec![];
for k in parsed.descs.keys() {
if let Atom::Positional(ref arg_name) = *k {
if let Some(choices) = arg_possibles.get(arg_name) {
words.extend(choices.iter().map(|s| s.clone()));
}
// If the user hasn't given choices for this positional argument,
// then there's really nothing to complete here.
} else {
words.push(k.to_string());
}
}
for (k, _) in parsed.descs.synonyms() {
// We don't need to do anything special here since synonyms can
// only be flags, which we always include in the wordlist.
words.push(k.to_string());
}
println!("{}", words.join(" "));
Ok(())
}

1
third_party/rust/ena/.cargo-checksum.json поставляемый
Просмотреть файл

@ -1 +0,0 @@
{"files":{"Cargo.toml":"479607f839ec311f5b48754953c3b33bd2d170d2bcb3008e904bef21ecad7a6d","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"0621878e61f0d0fda054bcbe02df75192c28bde1ecc8289cbd86aeba2dd72720","README.md":"4b02d7ebfb188b1f2cbef20ade3082197046ccaa89e49d2bcdef6102d48919e3","measurements.txt":"b209f98f2bc696904a48829e86952f4f09b59e4e685f7c12087c59d05ed31829","src/bitvec.rs":"c6c66c348776ff480b7ff6e4a3e0f64554a4194266f614408b45b5e3c324ec0a","src/lib.rs":"294aabf6fb846dbe35bba837d70ea9115f20cd808995a318c0fccb05f91d096f","src/snapshot_vec.rs":"4935b5eb8292e3b62d662ca01d0baef3d6b341f5479811d837e872ebc3c8518f","src/unify/backing_vec.rs":"0bcc5cd9d7a8bf1fd17e87b6388eeb0f9e3c21ed280fa31ab5dcc4a1ee69fcca","src/unify/mod.rs":"1bed8bd5c8f804fb4c225ed309940ede74b05e58d64f6182ff1ea3895c18a930","src/unify/tests.rs":"b18974faeebdf2c03e82035fe7281bf4db3360ab10ce34b1d3441547836b19f2"},"package":"25b4e5febb25f08c49f1b07dc33a182729a6b21edfb562b5aef95f78e0dbe5bb"}

37
third_party/rust/ena/Cargo.toml поставляемый
Просмотреть файл

@ -1,37 +0,0 @@
# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
#
# When uploading crates to the registry Cargo will automatically
# "normalize" Cargo.toml files for maximal compatibility
# with all versions of Cargo and also rewrite `path` dependencies
# to registry (e.g. crates.io) dependencies
#
# If you believe there's an error in this file please file an
# issue against the rust-lang/cargo repository. If you're
# editing this file be aware that the upstream Cargo.toml
# will likely look very different (and much more reasonable)
[package]
name = "ena"
version = "0.10.1"
authors = ["Niko Matsakis <niko@alum.mit.edu>"]
description = "Union-find, congruence closure, and other unification code. Based on code from rustc."
homepage = "https://github.com/nikomatsakis/ena"
readme = "README.md"
keywords = ["unification", "union-find"]
license = "MIT/Apache-2.0"
repository = "https://github.com/nikomatsakis/ena"
[dependencies.dogged]
version = "0.2.0"
optional = true
[dependencies.log]
version = "0.4"
[dependencies.petgraph]
version = "0.4.5"
optional = true
[features]
bench = []
congruence-closure = ["petgraph"]
persistent = ["dogged"]

201
third_party/rust/ena/LICENSE-APACHE поставляемый
Просмотреть файл

@ -1,201 +0,0 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

25
third_party/rust/ena/LICENSE-MIT поставляемый
Просмотреть файл

@ -1,25 +0,0 @@
Copyright (c) 2010 The Rust Project Developers
Permission is hereby granted, free of charge, to any
person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the
Software without restriction, including without
limitation the rights to use, copy, modify, merge,
publish, distribute, sublicense, and/or sell copies of
the Software, and to permit persons to whom the Software
is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice
shall be included in all copies or substantial portions
of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.

23
third_party/rust/ena/README.md поставляемый
Просмотреть файл

@ -1,23 +0,0 @@
[![Build Status](https://travis-ci.org/nikomatsakis/ena.svg?branch=master)](https://travis-ci.org/nikomatsakis/ena)
An implementation of union-find in Rust; extracted from (and used by)
rustc.
### Name
The name "ena" comes from the Greek word for "one".
### Features
By default, you just get the union-find implementation. You can also
opt-in to the following experimental features:
- `bench`: use to run benchmarks (`cargo bench --features bench`)
### License
Like rustc itself, this code is dual-licensed under the MIT and Apache
licenses. Pull requests, comments, and other contributions are assumed
to imply consent to those terms. Moreover, it is understood that any
changes here may well be used in rustc itself under the same terms.

6
third_party/rust/ena/measurements.txt поставляемый
Просмотреть файл

@ -1,6 +0,0 @@
base
test unify::tests::big_array_bench ... bench: 740,192 ns/iter (+/- 35,823)
test unify::tests::big_array_bench ... bench: 745,031 ns/iter (+/- 240,463)
test unify::tests::big_array_bench ... bench: 762,031 ns/iter (+/- 240,463)
test unify::tests::big_array_bench ... bench: 756,234 ns/iter (+/- 264,710)

301
third_party/rust/ena/src/bitvec.rs поставляемый
Просмотреть файл

@ -1,301 +0,0 @@
// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
/// A very simple BitVector type.
pub struct BitVector {
data: Vec<u64>,
}
impl BitVector {
pub fn new(num_bits: usize) -> BitVector {
let num_words = u64s(num_bits);
BitVector { data: vec![0; num_words] }
}
pub fn contains(&self, bit: usize) -> bool {
let (word, mask) = word_mask(bit);
(self.data[word] & mask) != 0
}
/// Returns true if the bit has changed.
pub fn insert(&mut self, bit: usize) -> bool {
let (word, mask) = word_mask(bit);
let data = &mut self.data[word];
let value = *data;
let new_value = value | mask;
*data = new_value;
new_value != value
}
pub fn insert_all(&mut self, all: &BitVector) -> bool {
assert!(self.data.len() == all.data.len());
let mut changed = false;
for (i, j) in self.data.iter_mut().zip(&all.data) {
let value = *i;
*i = value | *j;
if value != *i {
changed = true;
}
}
changed
}
pub fn grow(&mut self, num_bits: usize) {
let num_words = u64s(num_bits);
let extra_words = self.data.len() - num_words;
self.data.extend((0..extra_words).map(|_| 0));
}
/// Iterates over indexes of set bits in a sorted order
pub fn iter<'a>(&'a self) -> BitVectorIter<'a> {
BitVectorIter {
iter: self.data.iter(),
current: 0,
idx: 0,
}
}
}
pub struct BitVectorIter<'a> {
iter: ::std::slice::Iter<'a, u64>,
current: u64,
idx: usize,
}
impl<'a> Iterator for BitVectorIter<'a> {
type Item = usize;
fn next(&mut self) -> Option<usize> {
while self.current == 0 {
self.current = if let Some(&i) = self.iter.next() {
if i == 0 {
self.idx += 64;
continue;
} else {
self.idx = u64s(self.idx) * 64;
i
}
} else {
return None;
}
}
let offset = self.current.trailing_zeros() as usize;
self.current >>= offset;
self.current >>= 1; // shift otherwise overflows for 0b1000_0000_…_0000
self.idx += offset + 1;
return Some(self.idx - 1);
}
}
/// A "bit matrix" is basically a square matrix of booleans
/// represented as one gigantic bitvector. In other words, it is as if
/// you have N bitvectors, each of length N. Note that `elements` here is `N`/
#[derive(Clone)]
pub struct BitMatrix {
elements: usize,
vector: Vec<u64>,
}
impl BitMatrix {
// Create a new `elements x elements` matrix, initially empty.
pub fn new(elements: usize) -> BitMatrix {
// For every element, we need one bit for every other
// element. Round up to an even number of u64s.
let u64s_per_elem = u64s(elements);
BitMatrix {
elements: elements,
vector: vec![0; elements * u64s_per_elem],
}
}
/// The range of bits for a given element.
fn range(&self, element: usize) -> (usize, usize) {
let u64s_per_elem = u64s(self.elements);
let start = element * u64s_per_elem;
(start, start + u64s_per_elem)
}
pub fn add(&mut self, source: usize, target: usize) -> bool {
let (start, _) = self.range(source);
let (word, mask) = word_mask(target);
let mut vector = &mut self.vector[..];
let v1 = vector[start + word];
let v2 = v1 | mask;
vector[start + word] = v2;
v1 != v2
}
/// Do the bits from `source` contain `target`?
///
/// Put another way, if the matrix represents (transitive)
/// reachability, can `source` reach `target`?
pub fn contains(&self, source: usize, target: usize) -> bool {
let (start, _) = self.range(source);
let (word, mask) = word_mask(target);
(self.vector[start + word] & mask) != 0
}
/// Returns those indices that are reachable from both `a` and
/// `b`. This is an O(n) operation where `n` is the number of
/// elements (somewhat independent from the actual size of the
/// intersection, in particular).
pub fn intersection(&self, a: usize, b: usize) -> Vec<usize> {
let (a_start, a_end) = self.range(a);
let (b_start, b_end) = self.range(b);
let mut result = Vec::with_capacity(self.elements);
for (base, (i, j)) in (a_start..a_end).zip(b_start..b_end).enumerate() {
let mut v = self.vector[i] & self.vector[j];
for bit in 0..64 {
if v == 0 {
break;
}
if v & 0x1 != 0 {
result.push(base * 64 + bit);
}
v >>= 1;
}
}
result
}
/// Add the bits from `read` to the bits from `write`,
/// return true if anything changed.
///
/// This is used when computing transitive reachability because if
/// you have an edge `write -> read`, because in that case
/// `write` can reach everything that `read` can (and
/// potentially more).
pub fn merge(&mut self, read: usize, write: usize) -> bool {
let (read_start, read_end) = self.range(read);
let (write_start, write_end) = self.range(write);
let vector = &mut self.vector[..];
let mut changed = false;
for (read_index, write_index) in (read_start..read_end).zip(write_start..write_end) {
let v1 = vector[write_index];
let v2 = v1 | vector[read_index];
vector[write_index] = v2;
changed = changed | (v1 != v2);
}
changed
}
}
fn u64s(elements: usize) -> usize {
(elements + 63) / 64
}
fn word_mask(index: usize) -> (usize, u64) {
let word = index / 64;
let mask = 1 << (index % 64);
(word, mask)
}
#[test]
fn bitvec_iter_works() {
let mut bitvec = BitVector::new(100);
bitvec.insert(1);
bitvec.insert(10);
bitvec.insert(19);
bitvec.insert(62);
bitvec.insert(63);
bitvec.insert(64);
bitvec.insert(65);
bitvec.insert(66);
bitvec.insert(99);
assert_eq!(bitvec.iter().collect::<Vec<_>>(),
[1, 10, 19, 62, 63, 64, 65, 66, 99]);
}
#[test]
fn bitvec_iter_works_2() {
let mut bitvec = BitVector::new(300);
bitvec.insert(1);
bitvec.insert(10);
bitvec.insert(19);
bitvec.insert(62);
bitvec.insert(66);
bitvec.insert(99);
bitvec.insert(299);
assert_eq!(bitvec.iter().collect::<Vec<_>>(),
[1, 10, 19, 62, 66, 99, 299]);
}
#[test]
fn bitvec_iter_works_3() {
let mut bitvec = BitVector::new(319);
bitvec.insert(0);
bitvec.insert(127);
bitvec.insert(191);
bitvec.insert(255);
bitvec.insert(319);
assert_eq!(bitvec.iter().collect::<Vec<_>>(), [0, 127, 191, 255, 319]);
}
#[test]
fn union_two_vecs() {
let mut vec1 = BitVector::new(65);
let mut vec2 = BitVector::new(65);
assert!(vec1.insert(3));
assert!(!vec1.insert(3));
assert!(vec2.insert(5));
assert!(vec2.insert(64));
assert!(vec1.insert_all(&vec2));
assert!(!vec1.insert_all(&vec2));
assert!(vec1.contains(3));
assert!(!vec1.contains(4));
assert!(vec1.contains(5));
assert!(!vec1.contains(63));
assert!(vec1.contains(64));
}
#[test]
fn grow() {
let mut vec1 = BitVector::new(65);
assert!(vec1.insert(3));
assert!(!vec1.insert(3));
assert!(vec1.insert(5));
assert!(vec1.insert(64));
vec1.grow(128);
assert!(vec1.contains(3));
assert!(vec1.contains(5));
assert!(vec1.contains(64));
assert!(!vec1.contains(126));
}
#[test]
fn matrix_intersection() {
let mut vec1 = BitMatrix::new(200);
// (*) Elements reachable from both 2 and 65.
vec1.add(2, 3);
vec1.add(2, 6);
vec1.add(2, 10); // (*)
vec1.add(2, 64); // (*)
vec1.add(2, 65);
vec1.add(2, 130);
vec1.add(2, 160); // (*)
vec1.add(64, 133);
vec1.add(65, 2);
vec1.add(65, 8);
vec1.add(65, 10); // (*)
vec1.add(65, 64); // (*)
vec1.add(65, 68);
vec1.add(65, 133);
vec1.add(65, 160); // (*)
let intersection = vec1.intersection(2, 64);
assert!(intersection.is_empty());
let intersection = vec1.intersection(2, 65);
assert_eq!(intersection, &[10, 64, 160]);
}

23
third_party/rust/ena/src/lib.rs поставляемый
Просмотреть файл

@ -1,23 +0,0 @@
// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! An implementation of union-find. See the `unify` module for more
//! details.
#![cfg_attr(feature = "bench", feature(test))]
#[macro_use]
extern crate log;
#[cfg(feature = "persistent")]
extern crate dogged;
pub mod snapshot_vec;
pub mod unify;

322
third_party/rust/ena/src/snapshot_vec.rs поставляемый
Просмотреть файл

@ -1,322 +0,0 @@
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! A utility class for implementing "snapshottable" things; a snapshottable data structure permits
//! you to take a snapshot (via `start_snapshot`) and then, after making some changes, elect either
//! to rollback to the start of the snapshot or commit those changes.
//!
//! This vector is intended to be used as part of an abstraction, not serve as a complete
//! abstraction on its own. As such, while it will roll back most changes on its own, it also
//! supports a `get_mut` operation that gives you an arbitrary mutable pointer into the vector. To
//! ensure that any changes you make this with this pointer are rolled back, you must invoke
//! `record` to record any changes you make and also supplying a delegate capable of reversing
//! those changes.
use self::UndoLog::*;
use std::fmt;
use std::mem;
use std::ops;
#[derive(Debug)]
pub enum UndoLog<D: SnapshotVecDelegate> {
/// Indicates where a snapshot started.
OpenSnapshot,
/// Indicates a snapshot that has been committed.
CommittedSnapshot,
/// New variable with given index was created.
NewElem(usize),
/// Variable with given index was changed *from* the given value.
SetElem(usize, D::Value),
/// Extensible set of actions
Other(D::Undo),
}
pub struct SnapshotVec<D: SnapshotVecDelegate> {
values: Vec<D::Value>,
undo_log: Vec<UndoLog<D>>,
}
impl<D> fmt::Debug for SnapshotVec<D>
where D: SnapshotVecDelegate,
D: fmt::Debug,
D::Undo: fmt::Debug,
D::Value: fmt::Debug
{
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
fmt.debug_struct("SnapshotVec")
.field("values", &self.values)
.field("undo_log", &self.undo_log)
.finish()
}
}
// Snapshots are tokens that should be created/consumed linearly.
pub struct Snapshot {
// Length of the undo log at the time the snapshot was taken.
length: usize,
}
pub trait SnapshotVecDelegate {
type Value;
type Undo;
fn reverse(values: &mut Vec<Self::Value>, action: Self::Undo);
}
// HACK(eddyb) manual impl avoids `Default` bound on `D`.
impl<D: SnapshotVecDelegate> Default for SnapshotVec<D> {
fn default() -> Self {
SnapshotVec {
values: Vec::new(),
undo_log: Vec::new(),
}
}
}
impl<D: SnapshotVecDelegate> SnapshotVec<D> {
pub fn new() -> Self {
Self::default()
}
pub fn with_capacity(c: usize) -> SnapshotVec<D> {
SnapshotVec {
values: Vec::with_capacity(c),
undo_log: Vec::new(),
}
}
fn in_snapshot(&self) -> bool {
!self.undo_log.is_empty()
}
pub fn record(&mut self, action: D::Undo) {
if self.in_snapshot() {
self.undo_log.push(Other(action));
}
}
pub fn len(&self) -> usize {
self.values.len()
}
pub fn push(&mut self, elem: D::Value) -> usize {
let len = self.values.len();
self.values.push(elem);
if self.in_snapshot() {
self.undo_log.push(NewElem(len));
}
len
}
pub fn get(&self, index: usize) -> &D::Value {
&self.values[index]
}
/// Reserve space for new values, just like an ordinary vec.
pub fn reserve(&mut self, additional: usize) {
// This is not affected by snapshots or anything.
self.values.reserve(additional);
}
/// Returns a mutable pointer into the vec; whatever changes you make here cannot be undone
/// automatically, so you should be sure call `record()` with some sort of suitable undo
/// action.
pub fn get_mut(&mut self, index: usize) -> &mut D::Value {
&mut self.values[index]
}
/// Updates the element at the given index. The old value will saved (and perhaps restored) if
/// a snapshot is active.
pub fn set(&mut self, index: usize, new_elem: D::Value) {
let old_elem = mem::replace(&mut self.values[index], new_elem);
if self.in_snapshot() {
self.undo_log.push(SetElem(index, old_elem));
}
}
/// Updates all elements. Potentially more efficient -- but
/// otherwise equivalent to -- invoking `set` for each element.
pub fn set_all(&mut self, mut new_elems: impl FnMut(usize) -> D::Value) {
if !self.in_snapshot() {
for (slot, index) in self.values.iter_mut().zip(0..) {
*slot = new_elems(index);
}
} else {
for i in 0..self.values.len() {
self.set(i, new_elems(i));
}
}
}
pub fn update<OP>(&mut self, index: usize, op: OP)
where
OP: FnOnce(&mut D::Value),
D::Value: Clone,
{
if self.in_snapshot() {
let old_elem = self.values[index].clone();
self.undo_log.push(SetElem(index, old_elem));
}
op(&mut self.values[index]);
}
pub fn start_snapshot(&mut self) -> Snapshot {
let length = self.undo_log.len();
self.undo_log.push(OpenSnapshot);
Snapshot { length: length }
}
pub fn actions_since_snapshot(&self, snapshot: &Snapshot) -> &[UndoLog<D>] {
&self.undo_log[snapshot.length..]
}
fn assert_open_snapshot(&self, snapshot: &Snapshot) {
// Or else there was a failure to follow a stack discipline:
assert!(self.undo_log.len() > snapshot.length);
// Invariant established by start_snapshot():
assert!(match self.undo_log[snapshot.length] {
OpenSnapshot => true,
_ => false,
});
}
pub fn rollback_to(&mut self, snapshot: Snapshot) {
debug!("rollback_to({})", snapshot.length);
self.assert_open_snapshot(&snapshot);
while self.undo_log.len() > snapshot.length + 1 {
match self.undo_log.pop().unwrap() {
OpenSnapshot => {
// This indicates a failure to obey the stack discipline.
panic!("Cannot rollback an uncommitted snapshot");
}
CommittedSnapshot => {
// This occurs when there are nested snapshots and
// the inner is committed but outer is rolled back.
}
NewElem(i) => {
self.values.pop();
assert!(self.values.len() == i);
}
SetElem(i, v) => {
self.values[i] = v;
}
Other(u) => {
D::reverse(&mut self.values, u);
}
}
}
let v = self.undo_log.pop().unwrap();
assert!(match v {
OpenSnapshot => true,
_ => false,
});
assert!(self.undo_log.len() == snapshot.length);
}
/// Commits all changes since the last snapshot. Of course, they
/// can still be undone if there is a snapshot further out.
pub fn commit(&mut self, snapshot: Snapshot) {
debug!("commit({})", snapshot.length);
self.assert_open_snapshot(&snapshot);
if snapshot.length == 0 {
// The root snapshot.
self.undo_log.truncate(0);
} else {
self.undo_log[snapshot.length] = CommittedSnapshot;
}
}
}
impl<D: SnapshotVecDelegate> ops::Deref for SnapshotVec<D> {
type Target = [D::Value];
fn deref(&self) -> &[D::Value] {
&*self.values
}
}
impl<D: SnapshotVecDelegate> ops::DerefMut for SnapshotVec<D> {
fn deref_mut(&mut self) -> &mut [D::Value] {
&mut *self.values
}
}
impl<D: SnapshotVecDelegate> ops::Index<usize> for SnapshotVec<D> {
type Output = D::Value;
fn index(&self, index: usize) -> &D::Value {
self.get(index)
}
}
impl<D: SnapshotVecDelegate> ops::IndexMut<usize> for SnapshotVec<D> {
fn index_mut(&mut self, index: usize) -> &mut D::Value {
self.get_mut(index)
}
}
impl<D: SnapshotVecDelegate> Extend<D::Value> for SnapshotVec<D> {
fn extend<T>(&mut self, iterable: T)
where
T: IntoIterator<Item = D::Value>,
{
let initial_len = self.values.len();
self.values.extend(iterable);
let final_len = self.values.len();
if self.in_snapshot() {
self.undo_log.extend((initial_len..final_len).map(|len| NewElem(len)));
}
}
}
impl<D: SnapshotVecDelegate> Clone for SnapshotVec<D>
where
D::Value: Clone,
D::Undo: Clone,
{
fn clone(&self) -> Self {
SnapshotVec {
values: self.values.clone(),
undo_log: self.undo_log.clone(),
}
}
}
impl<D: SnapshotVecDelegate> Clone for UndoLog<D>
where
D::Value: Clone,
D::Undo: Clone,
{
fn clone(&self) -> Self {
match *self {
OpenSnapshot => OpenSnapshot,
CommittedSnapshot => CommittedSnapshot,
NewElem(i) => NewElem(i),
SetElem(i, ref v) => SetElem(i, v.clone()),
Other(ref u) => Other(u.clone()),
}
}
}

211
third_party/rust/ena/src/unify/backing_vec.rs поставляемый
Просмотреть файл

@ -1,211 +0,0 @@
#[cfg(feature = "persistent")]
use dogged::DVec;
use snapshot_vec as sv;
use std::ops;
use std::marker::PhantomData;
use super::{VarValue, UnifyKey, UnifyValue};
#[allow(dead_code)] // rustc BUG
#[allow(type_alias_bounds)]
type Key<S: UnificationStore> = <S as UnificationStore>::Key;
/// Largely internal trait implemented by the unification table
/// backing store types. The most common such type is `InPlace`,
/// which indicates a standard, mutable unification table.
pub trait UnificationStore:
ops::Index<usize, Output = VarValue<Key<Self>>> + Clone + Default
{
type Key: UnifyKey<Value = Self::Value>;
type Value: UnifyValue;
type Snapshot;
fn start_snapshot(&mut self) -> Self::Snapshot;
fn rollback_to(&mut self, snapshot: Self::Snapshot);
fn commit(&mut self, snapshot: Self::Snapshot);
fn reset_unifications(
&mut self,
value: impl FnMut(u32) -> VarValue<Self::Key>,
);
fn len(&self) -> usize;
fn push(&mut self, value: VarValue<Self::Key>);
fn reserve(&mut self, num_new_values: usize);
fn update<F>(&mut self, index: usize, op: F)
where F: FnOnce(&mut VarValue<Self::Key>);
fn tag() -> &'static str {
Self::Key::tag()
}
}
/// Backing store for an in-place unification table.
/// Not typically used directly.
#[derive(Clone, Debug)]
pub struct InPlace<K: UnifyKey> {
values: sv::SnapshotVec<Delegate<K>>
}
// HACK(eddyb) manual impl avoids `Default` bound on `K`.
impl<K: UnifyKey> Default for InPlace<K> {
fn default() -> Self {
InPlace { values: sv::SnapshotVec::new() }
}
}
impl<K: UnifyKey> UnificationStore for InPlace<K> {
type Key = K;
type Value = K::Value;
type Snapshot = sv::Snapshot;
#[inline]
fn start_snapshot(&mut self) -> Self::Snapshot {
self.values.start_snapshot()
}
#[inline]
fn rollback_to(&mut self, snapshot: Self::Snapshot) {
self.values.rollback_to(snapshot);
}
#[inline]
fn commit(&mut self, snapshot: Self::Snapshot) {
self.values.commit(snapshot);
}
#[inline]
fn reset_unifications(
&mut self,
mut value: impl FnMut(u32) -> VarValue<Self::Key>,
) {
self.values.set_all(|i| value(i as u32));
}
#[inline]
fn len(&self) -> usize {
self.values.len()
}
#[inline]
fn push(&mut self, value: VarValue<Self::Key>) {
self.values.push(value);
}
#[inline]
fn reserve(&mut self, num_new_values: usize) {
self.values.reserve(num_new_values);
}
#[inline]
fn update<F>(&mut self, index: usize, op: F)
where F: FnOnce(&mut VarValue<Self::Key>)
{
self.values.update(index, op)
}
}
impl<K> ops::Index<usize> for InPlace<K>
where K: UnifyKey
{
type Output = VarValue<K>;
fn index(&self, index: usize) -> &VarValue<K> {
&self.values[index]
}
}
#[derive(Copy, Clone, Debug)]
struct Delegate<K>(PhantomData<K>);
impl<K: UnifyKey> sv::SnapshotVecDelegate for Delegate<K> {
type Value = VarValue<K>;
type Undo = ();
fn reverse(_: &mut Vec<VarValue<K>>, _: ()) {}
}
#[cfg(feature = "persistent")]
#[derive(Clone, Debug)]
pub struct Persistent<K: UnifyKey> {
values: DVec<VarValue<K>>
}
// HACK(eddyb) manual impl avoids `Default` bound on `K`.
#[cfg(feature = "persistent")]
impl<K: UnifyKey> Default for Persistent<K> {
fn default() -> Self {
Persistent { values: DVec::new() }
}
}
#[cfg(feature = "persistent")]
impl<K: UnifyKey> UnificationStore for Persistent<K> {
type Key = K;
type Value = K::Value;
type Snapshot = Self;
#[inline]
fn start_snapshot(&mut self) -> Self::Snapshot {
self.clone()
}
#[inline]
fn rollback_to(&mut self, snapshot: Self::Snapshot) {
*self = snapshot;
}
#[inline]
fn commit(&mut self, _snapshot: Self::Snapshot) {
}
#[inline]
fn reset_unifications(
&mut self,
mut value: impl FnMut(u32) -> VarValue<Self::Key>,
) {
// Without extending dogged, there isn't obviously a more
// efficient way to do this. But it's pretty dumb. Maybe
// dogged needs a `map`.
for i in 0 .. self.values.len() {
self.values[i] = value(i as u32);
}
}
#[inline]
fn len(&self) -> usize {
self.values.len()
}
#[inline]
fn push(&mut self, value: VarValue<Self::Key>) {
self.values.push(value);
}
#[inline]
fn reserve(&mut self, _num_new_values: usize) {
// not obviously relevant to DVec.
}
#[inline]
fn update<F>(&mut self, index: usize, op: F)
where F: FnOnce(&mut VarValue<Self::Key>)
{
let p = &mut self.values[index];
op(p);
}
}
#[cfg(feature = "persistent")]
impl<K> ops::Index<usize> for Persistent<K>
where K: UnifyKey
{
type Output = VarValue<K>;
fn index(&self, index: usize) -> &VarValue<K> {
&self.values[index]
}
}

537
third_party/rust/ena/src/unify/mod.rs поставляемый
Просмотреть файл

@ -1,537 +0,0 @@
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Union-find implementation. The main type is `UnificationTable`.
//!
//! You can define your own type for the *keys* in the table, but you
//! must implement `UnifyKey` for that type. The assumption is that
//! keys will be newtyped integers, hence we require that they
//! implement `Copy`.
//!
//! Keys can have values associated with them. The assumption is that
//! these values are cheaply cloneable (ideally, `Copy`), and some of
//! the interfaces are oriented around that assumption. If you just
//! want the classical "union-find" algorithm where you group things
//! into sets, use the `Value` type of `()`.
//!
//! When you have keys with non-trivial values, you must also define
//! how those values can be merged. As part of doing this, you can
//! define the "error" type to return on error; if errors are not
//! possible, use `NoError` (an uninstantiable struct). Using this
//! type also unlocks various more ergonomic methods (e.g., `union()`
//! in place of `unify_var_var()`).
//!
//! The best way to see how it is used is to read the `tests.rs` file;
//! search for e.g. `UnitKey`.
use std::marker;
use std::fmt::Debug;
mod backing_vec;
pub use self::backing_vec::{InPlace, UnificationStore};
#[cfg(feature = "persistent")]
pub use self::backing_vec::Persistent;
#[cfg(test)]
mod tests;
/// This trait is implemented by any type that can serve as a type
/// variable. We call such variables *unification keys*. For example,
/// this trait is implemented by `IntVid`, which represents integral
/// variables.
///
/// Each key type has an associated value type `V`. For example, for
/// `IntVid`, this is `Option<IntVarValue>`, representing some
/// (possibly not yet known) sort of integer.
///
/// Clients are expected to provide implementations of this trait; you
/// can see some examples in the `test` module.
pub trait UnifyKey: Copy + Clone + Debug + PartialEq {
type Value: UnifyValue;
fn index(&self) -> u32;
fn from_index(u: u32) -> Self;
fn tag() -> &'static str;
/// If true, then `self` should be preferred as root to `other`.
/// Note that we assume a consistent partial ordering, so
/// returning true implies that `other.prefer_as_root_to(self)`
/// would return false. If there is no ordering between two keys
/// (i.e., `a.prefer_as_root_to(b)` and `b.prefer_as_root_to(a)`
/// both return false) then the rank will be used to determine the
/// root in an optimal way.
///
/// NB. The only reason to implement this method is if you want to
/// control what value is returned from `find()`. In general, it
/// is better to let the unification table determine the root,
/// since overriding the rank can cause execution time to increase
/// dramatically.
#[allow(unused_variables)]
fn order_roots(
a: Self,
a_value: &Self::Value,
b: Self,
b_value: &Self::Value,
) -> Option<(Self, Self)> {
None
}
}
/// Trait implemented for **values** associated with a unification
/// key. This trait defines how to merge the values from two keys that
/// are unioned together. This merging can be fallible. If you attempt
/// to union two keys whose values cannot be merged, then the error is
/// propagated up and the two keys are not unioned.
///
/// This crate provides implementations of `UnifyValue` for `()`
/// (which is infallible) and `Option<T>` (where `T: UnifyValue`). The
/// option implementation merges two sum-values using the `UnifyValue`
/// implementation of `T`.
///
/// See also `EqUnifyValue`, which is a convenience trait for cases
/// where the "merge" operation succeeds only if the two values are
/// equal.
pub trait UnifyValue: Clone + Debug {
/// Defines the type to return when merging of two values fails.
/// If merging is infallible, use the special struct `NoError`
/// found in this crate, which unlocks various more convenient
/// methods on the unification table.
type Error;
/// Given two values, produce a new value that combines them.
/// If that is not possible, produce an error.
fn unify_values(value1: &Self, value2: &Self) -> Result<Self, Self::Error>;
}
/// A convenient helper for unification values which must be equal or
/// else an error occurs. For example, if you are unifying types in a
/// simple functional language, this may be appropriate, since (e.g.)
/// you can't unify a type variable bound to `int` with one bound to
/// `float` (but you can unify two type variables both bound to
/// `int`).
///
/// Any type which implements `EqUnifyValue` automatially implements
/// `UnifyValue`; if the two values are equal, merging is permitted.
/// Otherwise, the error `(v1, v2)` is returned, where `v1` and `v2`
/// are the two unequal values.
pub trait EqUnifyValue: Eq + Clone + Debug {}
impl<T: EqUnifyValue> UnifyValue for T {
type Error = (T, T);
fn unify_values(value1: &Self, value2: &Self) -> Result<Self, Self::Error> {
if value1 == value2 {
Ok(value1.clone())
} else {
Err((value1.clone(), value2.clone()))
}
}
}
/// A struct which can never be instantiated. Used
/// for the error type for infallible cases.
#[derive(Debug)]
pub struct NoError {
_dummy: (),
}
/// Value of a unification key. We implement Tarjan's union-find
/// algorithm: when two keys are unified, one of them is converted
/// into a "redirect" pointing at the other. These redirects form a
/// DAG: the roots of the DAG (nodes that are not redirected) are each
/// associated with a value of type `V` and a rank. The rank is used
/// to keep the DAG relatively balanced, which helps keep the running
/// time of the algorithm under control. For more information, see
/// <http://en.wikipedia.org/wiki/Disjoint-set_data_structure>.
#[derive(PartialEq, Clone, Debug)]
pub struct VarValue<K: UnifyKey> { // FIXME pub
parent: K, // if equal to self, this is a root
value: K::Value, // value assigned (only relevant to root)
rank: u32, // max depth (only relevant to root)
}
/// Table of unification keys and their values. You must define a key type K
/// that implements the `UnifyKey` trait. Unification tables can be used in two-modes:
///
/// - in-place (`UnificationTable<InPlace<K>>` or `InPlaceUnificationTable<K>`):
/// - This is the standard mutable mode, where the array is modified
/// in place.
/// - To do backtracking, you can employ the `snapshot` and `rollback_to`
/// methods.
/// - persistent (`UnificationTable<Persistent<K>>` or `PersistentUnificationTable<K>`):
/// - In this mode, we use a persistent vector to store the data, so that
/// cloning the table is an O(1) operation.
/// - This implies that ordinary operations are quite a bit slower though.
/// - Requires the `persistent` feature be selected in your Cargo.toml file.
#[derive(Clone, Debug, Default)]
pub struct UnificationTable<S: UnificationStore> {
/// Indicates the current value of each key.
values: S,
}
/// A unification table that uses an "in-place" vector.
#[allow(type_alias_bounds)]
pub type InPlaceUnificationTable<K: UnifyKey> = UnificationTable<InPlace<K>>;
/// A unification table that uses a "persistent" vector.
#[cfg(feature = "persistent")]
#[allow(type_alias_bounds)]
pub type PersistentUnificationTable<K: UnifyKey> = UnificationTable<Persistent<K>>;
/// At any time, users may snapshot a unification table. The changes
/// made during the snapshot may either be *committed* or *rolled back*.
pub struct Snapshot<S: UnificationStore> {
// Link snapshot to the unification store `S` of the table.
marker: marker::PhantomData<S>,
snapshot: S::Snapshot,
}
impl<K: UnifyKey> VarValue<K> {
fn new_var(key: K, value: K::Value) -> VarValue<K> {
VarValue::new(key, value, 0)
}
fn new(parent: K, value: K::Value, rank: u32) -> VarValue<K> {
VarValue {
parent: parent, // this is a root
value: value,
rank: rank,
}
}
fn redirect(&mut self, to: K) {
self.parent = to;
}
fn root(&mut self, rank: u32, value: K::Value) {
self.rank = rank;
self.value = value;
}
fn parent(&self, self_key: K) -> Option<K> {
self.if_not_self(self.parent, self_key)
}
fn if_not_self(&self, key: K, self_key: K) -> Option<K> {
if key == self_key {
None
} else {
Some(key)
}
}
}
// We can't use V:LatticeValue, much as I would like to,
// because frequently the pattern is that V=Option<U> for some
// other type parameter U, and we have no way to say
// Option<U>:LatticeValue.
impl<S: UnificationStore> UnificationTable<S> {
pub fn new() -> Self {
Self::default()
}
/// Starts a new snapshot. Each snapshot must be either
/// rolled back or committed in a "LIFO" (stack) order.
pub fn snapshot(&mut self) -> Snapshot<S> {
Snapshot {
marker: marker::PhantomData::<S>,
snapshot: self.values.start_snapshot(),
}
}
/// Reverses all changes since the last snapshot. Also
/// removes any keys that have been created since then.
pub fn rollback_to(&mut self, snapshot: Snapshot<S>) {
debug!("{}: rollback_to()", S::tag());
self.values.rollback_to(snapshot.snapshot);
}
/// Commits all changes since the last snapshot. Of course, they
/// can still be undone if there is a snapshot further out.
pub fn commit(&mut self, snapshot: Snapshot<S>) {
debug!("{}: commit()", S::tag());
self.values.commit(snapshot.snapshot);
}
/// Creates a fresh key with the given value.
pub fn new_key(&mut self, value: S::Value) -> S::Key {
let len = self.values.len();
let key: S::Key = UnifyKey::from_index(len as u32);
self.values.push(VarValue::new_var(key, value));
debug!("{}: created new key: {:?}", S::tag(), key);
key
}
/// Reserve memory for `num_new_keys` to be created. Does not
/// actually create the new keys; you must then invoke `new_key`.
pub fn reserve(&mut self, num_new_keys: usize) {
self.values.reserve(num_new_keys);
}
/// Clears all unifications that have been performed, resetting to
/// the initial state. The values of each variable are given by
/// the closure.
pub fn reset_unifications(
&mut self,
mut value: impl FnMut(S::Key) -> S::Value,
) {
self.values.reset_unifications(|i| {
let key = UnifyKey::from_index(i as u32);
let value = value(key);
VarValue::new_var(key, value)
});
}
/// Returns the number of keys created so far.
pub fn len(&self) -> usize {
self.values.len()
}
/// Obtains the current value for a particular key.
/// Not for end-users; they can use `probe_value`.
fn value(&self, key: S::Key) -> &VarValue<S::Key> {
&self.values[key.index() as usize]
}
/// Find the root node for `vid`. This uses the standard
/// union-find algorithm with path compression:
/// <http://en.wikipedia.org/wiki/Disjoint-set_data_structure>.
///
/// NB. This is a building-block operation and you would probably
/// prefer to call `probe` below.
fn get_root_key(&mut self, vid: S::Key) -> S::Key {
let redirect = {
match self.value(vid).parent(vid) {
None => return vid,
Some(redirect) => redirect,
}
};
let root_key: S::Key = self.get_root_key(redirect);
if root_key != redirect {
// Path compression
self.update_value(vid, |value| value.parent = root_key);
}
root_key
}
fn update_value<OP>(&mut self, key: S::Key, op: OP)
where
OP: FnOnce(&mut VarValue<S::Key>),
{
self.values.update(key.index() as usize, op);
debug!("Updated variable {:?} to {:?}", key, self.value(key));
}
/// Either redirects `node_a` to `node_b` or vice versa, depending
/// on the relative rank. The value associated with the new root
/// will be `new_value`.
///
/// NB: This is the "union" operation of "union-find". It is
/// really more of a building block. If the values associated with
/// your key are non-trivial, you would probably prefer to call
/// `unify_var_var` below.
fn unify_roots(&mut self, key_a: S::Key, key_b: S::Key, new_value: S::Value) {
debug!("unify(key_a={:?}, key_b={:?})", key_a, key_b);
let rank_a = self.value(key_a).rank;
let rank_b = self.value(key_b).rank;
if let Some((new_root, redirected)) =
S::Key::order_roots(
key_a,
&self.value(key_a).value,
key_b,
&self.value(key_b).value,
) {
// compute the new rank for the new root that they chose;
// this may not be the optimal choice.
let new_rank = if new_root == key_a {
debug_assert!(redirected == key_b);
if rank_a > rank_b {
rank_a
} else {
rank_b + 1
}
} else {
debug_assert!(new_root == key_b);
debug_assert!(redirected == key_a);
if rank_b > rank_a {
rank_b
} else {
rank_a + 1
}
};
self.redirect_root(new_rank, redirected, new_root, new_value);
} else if rank_a > rank_b {
// a has greater rank, so a should become b's parent,
// i.e., b should redirect to a.
self.redirect_root(rank_a, key_b, key_a, new_value);
} else if rank_a < rank_b {
// b has greater rank, so a should redirect to b.
self.redirect_root(rank_b, key_a, key_b, new_value);
} else {
// If equal, redirect one to the other and increment the
// other's rank.
self.redirect_root(rank_a + 1, key_a, key_b, new_value);
}
}
/// Internal method to redirect `old_root_key` (which is currently
/// a root) to a child of `new_root_key` (which will remain a
/// root). The rank and value of `new_root_key` will be updated to
/// `new_rank` and `new_value` respectively.
fn redirect_root(
&mut self,
new_rank: u32,
old_root_key: S::Key,
new_root_key: S::Key,
new_value: S::Value,
) {
self.update_value(old_root_key, |old_root_value| {
old_root_value.redirect(new_root_key);
});
self.update_value(new_root_key, |new_root_value| {
new_root_value.root(new_rank, new_value);
});
}
}
/// ////////////////////////////////////////////////////////////////////////
/// Public API
impl<'tcx, S, K, V> UnificationTable<S>
where
S: UnificationStore<Key = K, Value = V>,
K: UnifyKey<Value = V>,
V: UnifyValue,
{
/// Unions two keys without the possibility of failure; only
/// applicable when unify values use `NoError` as their error
/// type.
pub fn union<K1, K2>(&mut self, a_id: K1, b_id: K2)
where
K1: Into<K>,
K2: Into<K>,
V: UnifyValue<Error = NoError>,
{
self.unify_var_var(a_id, b_id).unwrap();
}
/// Unions a key and a value without the possibility of failure;
/// only applicable when unify values use `NoError` as their error
/// type.
pub fn union_value<K1>(&mut self, id: K1, value: V)
where
K1: Into<K>,
V: UnifyValue<Error = NoError>,
{
self.unify_var_value(id, value).unwrap();
}
/// Given two keys, indicates whether they have been unioned together.
pub fn unioned<K1, K2>(&mut self, a_id: K1, b_id: K2) -> bool
where
K1: Into<K>,
K2: Into<K>,
{
self.find(a_id) == self.find(b_id)
}
/// Given a key, returns the (current) root key.
pub fn find<K1>(&mut self, id: K1) -> K
where
K1: Into<K>,
{
let id = id.into();
self.get_root_key(id)
}
/// Unions together two variables, merging their values. If
/// merging the values fails, the error is propagated and this
/// method has no effect.
pub fn unify_var_var<K1, K2>(&mut self, a_id: K1, b_id: K2) -> Result<(), V::Error>
where
K1: Into<K>,
K2: Into<K>,
{
let a_id = a_id.into();
let b_id = b_id.into();
let root_a = self.get_root_key(a_id);
let root_b = self.get_root_key(b_id);
if root_a == root_b {
return Ok(());
}
let combined = V::unify_values(&self.value(root_a).value, &self.value(root_b).value)?;
Ok(self.unify_roots(root_a, root_b, combined))
}
/// Sets the value of the key `a_id` to `b`, attempting to merge
/// with the previous value.
pub fn unify_var_value<K1>(&mut self, a_id: K1, b: V) -> Result<(), V::Error>
where
K1: Into<K>,
{
let a_id = a_id.into();
let root_a = self.get_root_key(a_id);
let value = V::unify_values(&self.value(root_a).value, &b)?;
self.update_value(root_a, |node| node.value = value);
Ok(())
}
/// Returns the current value for the given key. If the key has
/// been union'd, this will give the value from the current root.
pub fn probe_value<K1>(&mut self, id: K1) -> V
where
K1: Into<K>,
{
let id = id.into();
let id = self.get_root_key(id);
self.value(id).value.clone()
}
}
///////////////////////////////////////////////////////////////////////////
impl UnifyValue for () {
type Error = NoError;
fn unify_values(_: &(), _: &()) -> Result<(), NoError> {
Ok(())
}
}
impl<V: UnifyValue> UnifyValue for Option<V> {
type Error = V::Error;
fn unify_values(a: &Option<V>, b: &Option<V>) -> Result<Self, V::Error> {
match (a, b) {
(&None, &None) => Ok(None),
(&Some(ref v), &None) |
(&None, &Some(ref v)) => Ok(Some(v.clone())),
(&Some(ref a), &Some(ref b)) => {
match V::unify_values(a, b) {
Ok(v) => Ok(Some(v)),
Err(err) => Err(err),
}
}
}
}
}

476
third_party/rust/ena/src/unify/tests.rs поставляемый
Просмотреть файл

@ -1,476 +0,0 @@
// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Naming the benchmarks using uppercase letters helps them sort
// better.
#![allow(non_snake_case)]
#[cfg(feature = "bench")]
extern crate test;
#[cfg(feature = "bench")]
use self::test::Bencher;
use std::cmp;
use unify::{NoError, InPlace, InPlaceUnificationTable, UnifyKey, EqUnifyValue, UnifyValue};
use unify::{UnificationStore, UnificationTable};
#[cfg(feature = "persistent")]
use unify::Persistent;
#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq)]
struct UnitKey(u32);
impl UnifyKey for UnitKey {
type Value = ();
fn index(&self) -> u32 {
self.0
}
fn from_index(u: u32) -> UnitKey {
UnitKey(u)
}
fn tag() -> &'static str {
"UnitKey"
}
}
macro_rules! all_modes {
($name:ident for $t:ty => $body:tt) => {
fn test_body<$name: UnificationStore<Key = $t, Value = <$t as UnifyKey>::Value>>() {
$body
}
test_body::<InPlace<$t>>();
#[cfg(feature = "persistent")]
test_body::<Persistent<$t>>();
}
}
#[test]
fn basic() {
all_modes! {
S for UnitKey => {
let mut ut: UnificationTable<S> = UnificationTable::new();
let k1 = ut.new_key(());
let k2 = ut.new_key(());
assert_eq!(ut.unioned(k1, k2), false);
ut.union(k1, k2);
assert_eq!(ut.unioned(k1, k2), true);
}
}
}
#[test]
fn big_array() {
all_modes! {
S for UnitKey => {
let mut ut: UnificationTable<S> = UnificationTable::new();
let mut keys = Vec::new();
const MAX: usize = 1 << 15;
for _ in 0..MAX {
keys.push(ut.new_key(()));
}
for i in 1..MAX {
let l = keys[i - 1];
let r = keys[i];
ut.union(l, r);
}
for i in 0..MAX {
assert!(ut.unioned(keys[0], keys[i]));
}
}
}
}
#[cfg(feature = "bench")]
fn big_array_bench_generic<S: UnificationStore<Key=UnitKey, Value=()>>(b: &mut Bencher) {
let mut ut: UnificationTable<S> = UnificationTable::new();
let mut keys = Vec::new();
const MAX: usize = 1 << 15;
for _ in 0..MAX {
keys.push(ut.new_key(()));
}
b.iter(|| {
for i in 1..MAX {
let l = keys[i - 1];
let r = keys[i];
ut.union(l, r);
}
for i in 0..MAX {
assert!(ut.unioned(keys[0], keys[i]));
}
})
}
#[cfg(feature = "bench")]
#[bench]
fn big_array_bench_InPlace(b: &mut Bencher) {
big_array_bench_generic::<InPlace<UnitKey>>(b);
}
#[cfg(all(feature = "bench", feature = "persistent"))]
#[bench]
fn big_array_bench_Persistent(b: &mut Bencher) {
big_array_bench_generic::<Persistent<UnitKey>>(b);
}
#[cfg(feature = "bench")]
fn big_array_bench_in_snapshot_generic<S: UnificationStore<Key=UnitKey, Value=()>>(b: &mut Bencher) {
let mut ut: UnificationTable<S> = UnificationTable::new();
let mut keys = Vec::new();
const MAX: usize = 1 << 15;
for _ in 0..MAX {
keys.push(ut.new_key(()));
}
b.iter(|| {
let snapshot = ut.snapshot();
for i in 1..MAX {
let l = keys[i - 1];
let r = keys[i];
ut.union(l, r);
}
for i in 0..MAX {
assert!(ut.unioned(keys[0], keys[i]));
}
ut.rollback_to(snapshot);
})
}
#[cfg(feature = "bench")]
#[bench]
fn big_array_bench_in_snapshot_InPlace(b: &mut Bencher) {
big_array_bench_in_snapshot_generic::<InPlace<UnitKey>>(b);
}
#[cfg(all(feature = "bench", feature = "persistent"))]
#[bench]
fn big_array_bench_in_snapshot_Persistent(b: &mut Bencher) {
big_array_bench_in_snapshot_generic::<Persistent<UnitKey>>(b);
}
#[cfg(feature = "bench")]
fn big_array_bench_clone_generic<S: UnificationStore<Key=UnitKey, Value=()>>(b: &mut Bencher) {
let mut ut: UnificationTable<S> = UnificationTable::new();
let mut keys = Vec::new();
const MAX: usize = 1 << 15;
for _ in 0..MAX {
keys.push(ut.new_key(()));
}
b.iter(|| {
let saved_table = ut.clone();
for i in 1..MAX {
let l = keys[i - 1];
let r = keys[i];
ut.union(l, r);
}
for i in 0..MAX {
assert!(ut.unioned(keys[0], keys[i]));
}
ut = saved_table;
})
}
#[cfg(feature = "bench")]
#[bench]
fn big_array_bench_clone_InPlace(b: &mut Bencher) {
big_array_bench_clone_generic::<InPlace<UnitKey>>(b);
}
#[cfg(all(feature = "bench", feature = "persistent"))]
#[bench]
fn big_array_bench_clone_Persistent(b: &mut Bencher) {
big_array_bench_clone_generic::<Persistent<UnitKey>>(b);
}
#[test]
fn even_odd() {
all_modes! {
S for UnitKey => {
let mut ut: UnificationTable<S> = UnificationTable::new();
let mut keys = Vec::new();
const MAX: usize = 1 << 10;
for i in 0..MAX {
let key = ut.new_key(());
keys.push(key);
if i >= 2 {
ut.union(key, keys[i - 2]);
}
}
for i in 1..MAX {
assert!(!ut.unioned(keys[i - 1], keys[i]));
}
for i in 2..MAX {
assert!(ut.unioned(keys[i - 2], keys[i]));
}
}
}
}
#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq)]
struct IntKey(u32);
impl UnifyKey for IntKey {
type Value = Option<i32>;
fn index(&self) -> u32 {
self.0
}
fn from_index(u: u32) -> IntKey {
IntKey(u)
}
fn tag() -> &'static str {
"IntKey"
}
}
impl EqUnifyValue for i32 {}
#[test]
fn unify_same_int_twice() {
all_modes! {
S for IntKey => {
let mut ut: UnificationTable<S> = UnificationTable::new();
let k1 = ut.new_key(None);
let k2 = ut.new_key(None);
assert!(ut.unify_var_value(k1, Some(22)).is_ok());
assert!(ut.unify_var_value(k2, Some(22)).is_ok());
assert!(ut.unify_var_var(k1, k2).is_ok());
assert_eq!(ut.probe_value(k1), Some(22));
}
}
}
#[test]
fn unify_vars_then_int_indirect() {
all_modes! {
S for IntKey => {
let mut ut: UnificationTable<S> = UnificationTable::new();
let k1 = ut.new_key(None);
let k2 = ut.new_key(None);
assert!(ut.unify_var_var(k1, k2).is_ok());
assert!(ut.unify_var_value(k1, Some(22)).is_ok());
assert_eq!(ut.probe_value(k2), Some(22));
}
}
}
#[test]
fn unify_vars_different_ints_1() {
all_modes! {
S for IntKey => {
let mut ut: UnificationTable<S> = UnificationTable::new();
let k1 = ut.new_key(None);
let k2 = ut.new_key(None);
assert!(ut.unify_var_var(k1, k2).is_ok());
assert!(ut.unify_var_value(k1, Some(22)).is_ok());
assert!(ut.unify_var_value(k2, Some(23)).is_err());
}
}
}
#[test]
fn unify_vars_different_ints_2() {
all_modes! {
S for IntKey => {
let mut ut: UnificationTable<S> = UnificationTable::new();
let k1 = ut.new_key(None);
let k2 = ut.new_key(None);
assert!(ut.unify_var_var(k2, k1).is_ok());
assert!(ut.unify_var_value(k1, Some(22)).is_ok());
assert!(ut.unify_var_value(k2, Some(23)).is_err());
}
}
}
#[test]
fn unify_distinct_ints_then_vars() {
all_modes! {
S for IntKey => {
let mut ut: UnificationTable<S> = UnificationTable::new();
let k1 = ut.new_key(None);
let k2 = ut.new_key(None);
assert!(ut.unify_var_value(k1, Some(22)).is_ok());
assert!(ut.unify_var_value(k2, Some(23)).is_ok());
assert!(ut.unify_var_var(k2, k1).is_err());
}
}
}
#[test]
fn unify_root_value_1() {
all_modes! {
S for IntKey => {
let mut ut: UnificationTable<S> = UnificationTable::new();
let k1 = ut.new_key(None);
let k2 = ut.new_key(None);
let k3 = ut.new_key(None);
assert!(ut.unify_var_value(k1, Some(22)).is_ok());
assert!(ut.unify_var_var(k1, k2).is_ok());
assert!(ut.unify_var_value(k3, Some(23)).is_ok());
assert!(ut.unify_var_var(k1, k3).is_err());
}
}
}
#[test]
fn unify_root_value_2() {
all_modes! {
S for IntKey => {
let mut ut: UnificationTable<S> = UnificationTable::new();
let k1 = ut.new_key(None);
let k2 = ut.new_key(None);
let k3 = ut.new_key(None);
assert!(ut.unify_var_value(k1, Some(22)).is_ok());
assert!(ut.unify_var_var(k2, k1).is_ok());
assert!(ut.unify_var_value(k3, Some(23)).is_ok());
assert!(ut.unify_var_var(k1, k3).is_err());
}
}
}
#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq)]
struct OrderedKey(u32);
#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord)]
struct OrderedRank(u32);
impl UnifyKey for OrderedKey {
type Value = OrderedRank;
fn index(&self) -> u32 {
self.0
}
fn from_index(u: u32) -> OrderedKey {
OrderedKey(u)
}
fn tag() -> &'static str {
"OrderedKey"
}
fn order_roots(
a: OrderedKey,
a_rank: &OrderedRank,
b: OrderedKey,
b_rank: &OrderedRank,
) -> Option<(OrderedKey, OrderedKey)> {
println!("{:?} vs {:?}", a_rank, b_rank);
if a_rank > b_rank {
Some((a, b))
} else if b_rank > a_rank {
Some((b, a))
} else {
None
}
}
}
impl UnifyValue for OrderedRank {
type Error = NoError;
fn unify_values(value1: &Self, value2: &Self) -> Result<Self, NoError> {
Ok(OrderedRank(cmp::max(value1.0, value2.0)))
}
}
#[test]
fn ordered_key() {
all_modes! {
S for OrderedKey => {
let mut ut: UnificationTable<S> = UnificationTable::new();
let k0_1 = ut.new_key(OrderedRank(0));
let k0_2 = ut.new_key(OrderedRank(0));
let k0_3 = ut.new_key(OrderedRank(0));
let k0_4 = ut.new_key(OrderedRank(0));
ut.union(k0_1, k0_2); // rank of one of those will now be 1
ut.union(k0_3, k0_4); // rank of new root also 1
ut.union(k0_1, k0_3); // rank of new root now 2
let k0_5 = ut.new_key(OrderedRank(0));
let k0_6 = ut.new_key(OrderedRank(0));
ut.union(k0_5, k0_6); // rank of new root now 1
ut.union(k0_1, k0_5); // new root rank 2, should not be k0_5 or k0_6
assert!(vec![k0_1, k0_2, k0_3, k0_4].contains(&ut.find(k0_1)));
}
}
}
#[test]
fn ordered_key_k1() {
all_modes! {
S for UnitKey => {
let mut ut: InPlaceUnificationTable<OrderedKey> = UnificationTable::new();
let k0_1 = ut.new_key(OrderedRank(0));
let k0_2 = ut.new_key(OrderedRank(0));
let k0_3 = ut.new_key(OrderedRank(0));
let k0_4 = ut.new_key(OrderedRank(0));
ut.union(k0_1, k0_2); // rank of one of those will now be 1
ut.union(k0_3, k0_4); // rank of new root also 1
ut.union(k0_1, k0_3); // rank of new root now 2
let k1_5 = ut.new_key(OrderedRank(1));
let k1_6 = ut.new_key(OrderedRank(1));
ut.union(k1_5, k1_6); // rank of new root now 1
ut.union(k0_1, k1_5); // even though k1 has lower rank, it wins
assert!(
vec![k1_5, k1_6].contains(&ut.find(k0_1)),
"unexpected choice for root: {:?}",
ut.find(k0_1)
);
}
}
}
/// Test that we *can* clone.
#[test]
fn clone_table() {
all_modes! {
S for IntKey => {
let mut ut: UnificationTable<S> = UnificationTable::new();
let k1 = ut.new_key(None);
let k2 = ut.new_key(None);
let k3 = ut.new_key(None);
assert!(ut.unify_var_value(k1, Some(22)).is_ok());
assert!(ut.unify_var_value(k2, Some(22)).is_ok());
assert!(ut.unify_var_var(k1, k2).is_ok());
assert_eq!(ut.probe_value(k3), None);
let mut ut1 = ut.clone();
assert_eq!(ut1.probe_value(k1), Some(22));
assert_eq!(ut1.probe_value(k3), None);
assert!(ut.unify_var_value(k3, Some(44)).is_ok());
assert_eq!(ut1.probe_value(k1), Some(22));
assert_eq!(ut1.probe_value(k3), None);
assert_eq!(ut.probe_value(k3), Some(44));
}
}
}

Просмотреть файл

@ -1 +0,0 @@
{"files":{"Cargo.toml":"8dfde677381046100ae39607ad71be900b925a46cda77e098edaf06d94bb955f","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"ce592787ff2321feab698a4c612237f4378cc658ebb1d472913e5802cc47afb4","README.rst":"6d40c0a15609101185e03743f00f86c8c2778b558f184a135a1082fbeee73c0f","benches/benches.rs":"745803c7962409ba8a63635336ca5f6b971ef1dc8f46e2cdee2a8a0c6b86e9a9","src/lib.rs":"26b8c35989c1894545e3772114449e02b774bb3ffc91f421db61333a65299628","src/range.rs":"6c9fd2462e353221dcf63393a78783428995a9460de3e4c799bd00a273dda9d8"},"package":"85cb8fec437468d86dc7c83ca7cfc933341d561873275f22dd5eedefa63a6478"}

24
third_party/rust/fixedbitset/Cargo.toml поставляемый
Просмотреть файл

@ -1,24 +0,0 @@
# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
#
# When uploading crates to the registry Cargo will automatically
# "normalize" Cargo.toml files for maximal compatibility
# with all versions of Cargo and also rewrite `path` dependencies
# to registry (e.g. crates.io) dependencies
#
# If you believe there's an error in this file please file an
# issue against the rust-lang/cargo repository. If you're
# editing this file be aware that the upstream Cargo.toml
# will likely look very different (and much more reasonable)
[package]
name = "fixedbitset"
version = "0.1.8"
authors = ["bluss"]
description = "FixedBitSet is a simple bitset collection"
documentation = "https://docs.rs/fixedbitset/"
keywords = ["container", "data-structure", "bitvec", "bitset"]
categories = ["data-structures"]
license = "MIT/Apache-2.0"
repository = "https://github.com/bluss/fixedbitset"
[package.metadata.release]
no-dev-version = true

201
third_party/rust/fixedbitset/LICENSE-APACHE поставляемый
Просмотреть файл

@ -1,201 +0,0 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

25
third_party/rust/fixedbitset/LICENSE-MIT поставляемый
Просмотреть файл

@ -1,25 +0,0 @@
Copyright (c) 2015-2017
Permission is hereby granted, free of charge, to any
person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the
Software without restriction, including without
limitation the rights to use, copy, modify, merge,
publish, distribute, sublicense, and/or sell copies of
the Software, and to permit persons to whom the Software
is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice
shall be included in all copies or substantial portions
of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.

75
third_party/rust/fixedbitset/README.rst поставляемый
Просмотреть файл

@ -1,75 +0,0 @@
fixedbitset
===========
A simple bitset container for Rust
Please read the `API documentation here`__
__ https://docs.rs/fixedbitset/
|build_status|_ |crates|_
.. |build_status| image:: https://travis-ci.org/bluss/fixedbitset.svg?branch=master
.. _build_status: https://travis-ci.org/bluss/fixedbitset
.. |crates| image:: http://meritbadge.herokuapp.com/fixedbitset
.. _crates: https://crates.io/crates/fixedbitset
Recent Changes
--------------
- 0.1.8
+ Add missing ``#[inline]`` on the ones iterator
+ Fix docs for ``insert_range, set_range``
- 0.1.7
+ Add fast methods ``.insert_range``, ``.set_range`` by @kennytm
- 0.1.6
+ Add iterator ``.ones()`` by @mneumann
+ Fix bug with ``.count_ones()`` where it would erronously have an
out-of-bounds panic for even block endpoints
- 0.1.5
+ Add method ``.count_ones(range)``.
- 0.1.4
+ Remove an assertion in ``.copy_bit(from, to)`` so that it is in line
with the documentation. The ``from`` bit does not need to be in bounds.
+ Improve ``.grow()`` to use ``Vec::resize`` internally.
- 0.1.3
+ Add method ``.put()`` to enable a bit and return previous value
- 0.1.2
+ Add method ``.copy_bit()`` (by fuine)
+ impl Default
- 0.1.1
+ Update documentation URL
- 0.1.0
+ Add method ``.grow()``
License
-------
Dual-licensed to be compatible with the Rust project.
Licensed under the Apache License, Version 2.0
http://www.apache.org/licenses/LICENSE-2.0 or the MIT license
http://opensource.org/licenses/MIT, at your
option. This file may not be copied, modified, or distributed
except according to those terms.

Просмотреть файл

@ -1,133 +0,0 @@
#![feature(test)]
extern crate test;
extern crate fixedbitset;
use test::Bencher;
use fixedbitset::{FixedBitSet};
use std::mem::size_of;
#[inline]
fn iter_ones_using_contains<F: FnMut(usize)>(fb: &FixedBitSet, f: &mut F) {
for bit in 0 .. fb.len() {
if fb.contains(bit) {
f(bit);
}
}
}
#[inline]
fn iter_ones_using_slice_directly<F: FnMut(usize)>(fb: &FixedBitSet, f: &mut F) {
for (block_idx, &block) in fb.as_slice().iter().enumerate() {
let mut bit_pos = block_idx * size_of::<u32>() * 8;
let mut block: u32 = block;
while block != 0 {
if (block & 1) == 1 {
f(bit_pos);
}
block = block >> 1;
bit_pos += 1;
}
}
}
#[bench]
fn bench_iter_ones_using_contains_all_zeros(b: &mut Bencher) {
const N: usize = 1_000_000;
let fb = FixedBitSet::with_capacity(N);
b.iter(|| {
let mut count = 0;
iter_ones_using_contains(&fb, &mut |_bit| count += 1);
count
});
}
#[bench]
fn bench_iter_ones_using_contains_all_ones(b: &mut Bencher) {
const N: usize = 1_000_000;
let mut fb = FixedBitSet::with_capacity(N);
fb.insert_range(..);
b.iter(|| {
let mut count = 0;
iter_ones_using_contains(&fb, &mut |_bit| count += 1);
count
});
}
#[bench]
fn bench_iter_ones_using_slice_directly_all_zero(b: &mut Bencher) {
const N: usize = 1_000_000;
let fb = FixedBitSet::with_capacity(N);
b.iter(|| {
let mut count = 0;
iter_ones_using_slice_directly(&fb, &mut |_bit| count += 1);
count
});
}
#[bench]
fn bench_iter_ones_using_slice_directly_all_ones(b: &mut Bencher) {
const N: usize = 1_000_000;
let mut fb = FixedBitSet::with_capacity(N);
fb.insert_range(..);
b.iter(|| {
let mut count = 0;
iter_ones_using_slice_directly(&fb, &mut |_bit| count += 1);
count
});
}
#[bench]
fn bench_iter_ones_all_zeros(b: &mut Bencher) {
const N: usize = 1_000_000;
let fb = FixedBitSet::with_capacity(N);
b.iter(|| {
let mut count = 0;
for _ in fb.ones() {
count += 1;
}
count
});
}
#[bench]
fn bench_iter_ones_all_ones(b: &mut Bencher) {
const N: usize = 1_000_000;
let mut fb = FixedBitSet::with_capacity(N);
fb.insert_range(..);
b.iter(|| {
let mut count = 0;
for _ in fb.ones() {
count += 1;
}
count
});
}
#[bench]
fn bench_insert_range(b: &mut Bencher) {
const N: usize = 1_000_000;
let mut fb = FixedBitSet::with_capacity(N);
b.iter(|| {
fb.insert_range(..)
});
}
#[bench]
fn bench_insert_range_using_loop(b: &mut Bencher) {
const N: usize = 1_000_000;
let mut fb = FixedBitSet::with_capacity(N);
b.iter(|| {
for i in 0..N {
fb.insert(i);
}
});
}

565
third_party/rust/fixedbitset/src/lib.rs поставляемый
Просмотреть файл

@ -1,565 +0,0 @@
//! `FixedBitSet` is a simple fixed size set of bits.
#![doc(html_root_url="https://docs.rs/fixedbitset/0.1/")]
mod range;
use std::ops::Index;
use std::cmp::{Ord, Ordering};
pub use range::IndexRange;
static TRUE: bool = true;
static FALSE: bool = false;
const BITS: usize = 32;
type Block = u32;
#[inline]
fn div_rem(x: usize, d: usize) -> (usize, usize)
{
(x / d, x % d)
}
/// `FixedBitSet` is a simple fixed size set of bits that each can
/// be enabled (1 / **true**) or disabled (0 / **false**).
///
/// The bit set has a fixed capacity in terms of enabling bits (and the
/// capacity can grow using the `grow` method).
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
pub struct FixedBitSet {
data: Vec<Block>,
/// length in bits
length: usize,
}
impl FixedBitSet
{
/// Create a new **FixedBitSet** with a specific number of bits,
/// all initially clear.
pub fn with_capacity(bits: usize) -> Self
{
let (mut blocks, rem) = div_rem(bits, BITS);
blocks += (rem > 0) as usize;
FixedBitSet {
data: vec![0; blocks],
length: bits,
}
}
/// Grow capacity to **bits**, all new bits initialized to zero
pub fn grow(&mut self, bits: usize) {
let (mut blocks, rem) = div_rem(bits, BITS);
blocks += (rem > 0) as usize;
if bits > self.length {
self.length = bits;
self.data.resize(blocks, 0);
}
}
/// Return the length of the `FixedBitSet` in bits.
#[inline]
pub fn len(&self) -> usize { self.length }
/// Return **true** if the bit is enabled in the **FixedBitSet**,
/// **false** otherwise.
///
/// Note: bits outside the capacity are always disabled.
///
/// Note: Also available with index syntax: `bitset[bit]`.
#[inline]
pub fn contains(&self, bit: usize) -> bool
{
let (block, i) = div_rem(bit, BITS);
match self.data.get(block) {
None => false,
Some(b) => (b & (1 << i)) != 0,
}
}
/// Clear all bits.
#[inline]
pub fn clear(&mut self)
{
for elt in &mut self.data[..] {
*elt = 0
}
}
/// Enable `bit`.
///
/// **Panics** if **bit** is out of bounds.
#[inline]
pub fn insert(&mut self, bit: usize)
{
assert!(bit < self.length);
let (block, i) = div_rem(bit, BITS);
unsafe {
*self.data.get_unchecked_mut(block) |= 1 << i;
}
}
/// Enable `bit`, and return its previous value.
///
/// **Panics** if **bit** is out of bounds.
#[inline]
pub fn put(&mut self, bit: usize) -> bool
{
assert!(bit < self.length);
let (block, i) = div_rem(bit, BITS);
unsafe {
let word = self.data.get_unchecked_mut(block);
let prev = *word & (1 << i) != 0;
*word |= 1 << i;
prev
}
}
/// **Panics** if **bit** is out of bounds.
#[inline]
pub fn set(&mut self, bit: usize, enabled: bool)
{
assert!(bit < self.length);
let (block, i) = div_rem(bit, BITS);
unsafe {
let elt = self.data.get_unchecked_mut(block);
if enabled {
*elt |= 1 << i;
} else {
*elt &= !(1 << i);
}
}
}
/// Copies boolean value from specified bit to the specified bit.
///
/// **Panics** if **to** is out of bounds.
#[inline]
pub fn copy_bit(&mut self, from: usize, to: usize)
{
assert!(to < self.length);
let (to_block, t) = div_rem(to, BITS);
let enabled = self.contains(from);
unsafe {
let to_elt = self.data.get_unchecked_mut(to_block);
if enabled {
*to_elt |= 1 << t;
} else {
*to_elt &= !(1 << t);
}
}
}
/// Count the number of set bits in the given bit range.
///
/// Use `..` to count the whole content of the bitset.
///
/// **Panics** if the range extends past the end of the bitset.
#[inline]
pub fn count_ones<T: IndexRange>(&self, range: T) -> usize
{
Masks::new(range, self.length)
.map(|(block, mask)| unsafe {
let value = *self.data.get_unchecked(block);
(value & mask).count_ones() as usize
})
.sum()
}
/// Sets every bit in the given range to the given state (`enabled`)
///
/// Use `..` to toggle the whole bitset.
///
/// **Panics** if the range extends past the end of the bitset.
#[inline]
pub fn set_range<T: IndexRange>(&mut self, range: T, enabled: bool)
{
for (block, mask) in Masks::new(range, self.length) {
unsafe {
if enabled {
*self.data.get_unchecked_mut(block) |= mask;
} else {
*self.data.get_unchecked_mut(block) &= !mask;
}
}
}
}
/// Enables every bit in the given range.
///
/// Use `..` to make the whole bitset ones.
///
/// **Panics** if the range extends past the end of the bitset.
#[inline]
pub fn insert_range<T: IndexRange>(&mut self, range: T)
{
self.set_range(range, true);
}
/// View the bitset as a slice of `u32` blocks
#[inline]
pub fn as_slice(&self) -> &[u32]
{
&self.data
}
/// View the bitset as a mutable slice of `u32` blocks. Writing past the bitlength in the last
/// will cause `contains` to return potentially incorrect results for bits past the bitlength.
#[inline]
pub fn as_mut_slice(&mut self) -> &mut [u32]
{
&mut self.data
}
/// Iterates over all enabled bits.
///
/// Iterator element is the index of the `1` bit, type `usize`.
#[inline]
pub fn ones(&self) -> Ones {
match self.as_slice().split_first() {
Some((&block, rem)) => {
Ones {
current_bit_idx: 0,
current_block_idx: 0,
current_block: block,
remaining_blocks: rem
}
}
None => {
Ones {
current_bit_idx: 0,
current_block_idx: 0,
current_block: 0,
remaining_blocks: &[]
}
}
}
}
}
struct Masks {
first_block: usize,
first_mask: Block,
last_block: usize,
last_mask: Block,
}
impl Masks {
#[inline]
fn new<T: IndexRange>(range: T, length: usize) -> Masks {
let start = range.start().unwrap_or(0);
let end = range.end().unwrap_or(length);
assert!(start <= end && end <= length);
let (first_block, first_rem) = div_rem(start, BITS);
let (last_block, last_rem) = div_rem(end, BITS);
Masks {
first_block: first_block as usize,
first_mask: Block::max_value() << first_rem,
last_block: last_block as usize,
last_mask: (Block::max_value() >> 1) >> (BITS - last_rem - 1),
// this is equivalent to `MAX >> (BITS - x)` with correct semantics when x == 0.
}
}
}
impl Iterator for Masks {
type Item = (usize, Block);
#[inline]
fn next(&mut self) -> Option<Self::Item> {
match self.first_block.cmp(&self.last_block) {
Ordering::Less => {
let res = (self.first_block, self.first_mask);
self.first_block += 1;
self.first_mask = !0;
Some(res)
}
Ordering::Equal => {
let mask = self.first_mask & self.last_mask;
let res = if mask == 0 {
None
} else {
Some((self.first_block, mask))
};
self.first_block += 1;
res
}
Ordering::Greater => None,
}
}
}
pub struct Ones<'a> {
current_bit_idx: usize,
current_block_idx: usize,
remaining_blocks: &'a [Block],
current_block: Block
}
impl<'a> Iterator for Ones<'a> {
type Item = usize; // the bit position of the '1'
#[inline]
fn next(&mut self) -> Option<Self::Item> {
let mut block = self.current_block;
let mut idx = self.current_bit_idx;
loop {
loop {
if (block & 1) == 1 {
self.current_block = block >> 1;
self.current_bit_idx = idx + 1;
return Some(idx);
}
// reordering the two lines below makes a huge (2x) difference in performance!
block = block >> 1;
idx += 1;
if block == 0 {
break;
}
}
// go to next block
match self.remaining_blocks.split_first() {
Some((&next_block, rest)) => {
self.remaining_blocks = rest;
self.current_block_idx += 1;
idx = self.current_block_idx * BITS;
block = next_block;
}
None => {
// last block => done
return None;
}
}
}
}
}
impl Clone for FixedBitSet
{
#[inline]
fn clone(&self) -> Self
{
FixedBitSet {
data: self.data.clone(),
length: self.length,
}
}
}
/// Return **true** if the bit is enabled in the bitset,
/// or **false** otherwise.
///
/// Note: bits outside the capacity are always disabled, and thus
/// indexing a FixedBitSet will not panic.
impl Index<usize> for FixedBitSet
{
type Output = bool;
#[inline]
fn index(&self, bit: usize) -> &bool
{
if self.contains(bit) {
&TRUE
} else {
&FALSE
}
}
}
#[test]
fn it_works() {
const N: usize = 50;
let mut fb = FixedBitSet::with_capacity(N);
println!("{:?}", fb);
for i in 0..(N + 10) {
assert_eq!(fb.contains(i), false);
}
fb.insert(10);
fb.set(11, false);
fb.set(12, false);
fb.set(12, true);
fb.set(N-1, true);
println!("{:?}", fb);
assert!(fb.contains(10));
assert!(!fb.contains(11));
assert!(fb.contains(12));
assert!(fb.contains(N-1));
for i in 0..N {
let contain = i == 10 || i == 12 || i == N - 1;
assert_eq!(contain, fb[i]);
}
fb.clear();
}
#[test]
fn grow() {
let mut fb = FixedBitSet::with_capacity(48);
for i in 0..fb.len() {
fb.set(i, true);
}
let old_len = fb.len();
fb.grow(72);
for j in 0..fb.len() {
assert_eq!(fb.contains(j), j < old_len);
}
fb.set(64, true);
assert!(fb.contains(64));
}
#[test]
fn copy_bit() {
let mut fb = FixedBitSet::with_capacity(48);
for i in 0..fb.len() {
fb.set(i, true);
}
fb.set(42, false);
fb.copy_bit(42, 2);
assert!(!fb.contains(42));
assert!(!fb.contains(2));
assert!(fb.contains(1));
fb.copy_bit(1, 42);
assert!(fb.contains(42));
fb.copy_bit(1024, 42);
assert!(!fb[42]);
}
#[test]
fn count_ones() {
let mut fb = FixedBitSet::with_capacity(100);
fb.set(11, true);
fb.set(12, true);
fb.set(7, true);
fb.set(35, true);
fb.set(40, true);
fb.set(77, true);
fb.set(95, true);
fb.set(50, true);
fb.set(99, true);
assert_eq!(fb.count_ones(..7), 0);
assert_eq!(fb.count_ones(..8), 1);
assert_eq!(fb.count_ones(..11), 1);
assert_eq!(fb.count_ones(..12), 2);
assert_eq!(fb.count_ones(..13), 3);
assert_eq!(fb.count_ones(..35), 3);
assert_eq!(fb.count_ones(..36), 4);
assert_eq!(fb.count_ones(..40), 4);
assert_eq!(fb.count_ones(..41), 5);
assert_eq!(fb.count_ones(50..), 4);
assert_eq!(fb.count_ones(70..95), 1);
assert_eq!(fb.count_ones(70..96), 2);
assert_eq!(fb.count_ones(70..99), 2);
assert_eq!(fb.count_ones(..), 9);
assert_eq!(fb.count_ones(0..100), 9);
assert_eq!(fb.count_ones(0..0), 0);
assert_eq!(fb.count_ones(100..100), 0);
assert_eq!(fb.count_ones(7..), 9);
assert_eq!(fb.count_ones(8..), 8);
}
#[test]
fn ones() {
let mut fb = FixedBitSet::with_capacity(100);
fb.set(11, true);
fb.set(12, true);
fb.set(7, true);
fb.set(35, true);
fb.set(40, true);
fb.set(77, true);
fb.set(95, true);
fb.set(50, true);
fb.set(99, true);
let ones: Vec<_> = fb.ones().collect();
assert_eq!(vec![7, 11, 12, 35, 40, 50, 77, 95, 99], ones);
}
#[test]
fn iter_ones_range() {
fn test_range(from: usize, to: usize, capa: usize) {
assert!(to <= capa);
let mut fb = FixedBitSet::with_capacity(capa);
for i in from..to {
fb.insert(i);
}
let ones: Vec<_> = fb.ones().collect();
let expected: Vec<_> = (from..to).collect();
assert_eq!(expected, ones);
}
for i in 0..100 {
test_range(i, 100, 100);
test_range(0, i, 100);
}
}
#[should_panic]
#[test]
fn count_ones_oob() {
let fb = FixedBitSet::with_capacity(100);
fb.count_ones(90..101);
}
#[should_panic]
#[test]
fn count_ones_negative_range() {
let fb = FixedBitSet::with_capacity(100);
fb.count_ones(90..80);
}
#[test]
fn count_ones_panic() {
for i in 1..128 {
let fb = FixedBitSet::with_capacity(i);
for j in 0..fb.len() + 1 {
for k in j..fb.len() + 1 {
assert_eq!(fb.count_ones(j..k), 0);
}
}
}
}
#[test]
fn default() {
let fb = FixedBitSet::default();
assert_eq!(fb.len(), 0);
}
#[test]
fn insert_range() {
let mut fb = FixedBitSet::with_capacity(97);
fb.insert_range(..3);
fb.insert_range(9..32);
fb.insert_range(37..81);
fb.insert_range(90..);
for i in 0..97 {
assert_eq!(fb.contains(i), i<3 || 9<=i&&i<32 || 37<=i&&i<81 || 90<=i);
}
assert!(!fb.contains(97));
assert!(!fb.contains(127));
assert!(!fb.contains(128));
}
#[test]
fn set_range() {
let mut fb = FixedBitSet::with_capacity(48);
fb.insert_range(..);
fb.set_range(..32, false);
fb.set_range(37.., false);
fb.set_range(5..9, true);
fb.set_range(40..40, true);
for i in 0..48 {
assert_eq!(fb.contains(i), 5<=i&&i<9 || 32<=i&&i<37);
}
assert!(!fb.contains(48));
assert!(!fb.contains(64));
}

39
third_party/rust/fixedbitset/src/range.rs поставляемый
Просмотреть файл

@ -1,39 +0,0 @@
use std::ops::{
RangeFull,
RangeFrom,
RangeTo,
Range,
};
// Taken from https://github.com/bluss/odds/blob/master/src/range.rs.
/// **IndexRange** is implemented by Rust's built-in range types, produced
/// by range syntax like `..`, `a..`, `..b` or `c..d`.
pub trait IndexRange<T=usize> {
#[inline]
/// Start index (inclusive)
fn start(&self) -> Option<T> { None }
#[inline]
/// End index (exclusive)
fn end(&self) -> Option<T> { None }
}
impl<T> IndexRange<T> for RangeFull {}
impl<T: Copy> IndexRange<T> for RangeFrom<T> {
#[inline]
fn start(&self) -> Option<T> { Some(self.start) }
}
impl<T: Copy> IndexRange<T> for RangeTo<T> {
#[inline]
fn end(&self) -> Option<T> { Some(self.end) }
}
impl<T: Copy> IndexRange<T> for Range<T> {
#[inline]
fn start(&self) -> Option<T> { Some(self.start) }
#[inline]
fn end(&self) -> Option<T> { Some(self.end) }
}

1
third_party/rust/itertools-0.7.6/.cargo-checksum.json поставляемый Normal file
Просмотреть файл

@ -0,0 +1 @@
{"files":{"Cargo.toml":"9aee0be6b22b8323448a4af38d5b923a0fe47245a3e01573d0ad51fa1c81ffa2","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"7576269ea71f767b99297934c0b2367532690f8c4badc695edf8e04ab6a1e545","Makefile":"13f1c5b88a7b946b5813f7231df2933b6b19b223e9e2d3fa63ad681192f984b5","README.rst":"5d457160aa59d3e9ea1c7ad92e1f450ac43c867a72b854baa2e536152c1335c1","benches/bench1.rs":"695e4b00addf2e8e44a09d85a588cb333c97109829e59739bdab01e7342d47b5","benches/extra/mod.rs":"4c5b03e74fc5b02383500c9da9fd6550262706ee569d70d085700f6d0b5749ba","benches/extra/zipslices.rs":"108dd488de366b2d83fb6bcc603ecbf9a017e165ac19d03440074fa244af3fb2","benches/tuple_combinations.rs":"8c14e9341d92e5cfd5f9a067d11088b37b003e82635d1ab3a8e5290e3ef83eed","benches/tuples.rs":"412a952f08bb03695952d5cfd57949dcf28be8b99e3c6653994bdb8af9654653","custom.css":"03d2316d325a09f03f0fae54d24b64f784518a8249432edbd60e01436be900d5","examples/iris.data":"596ffd580471ca4d4880f8e439c7281f3b50d8249a5960353cb200b1490f63a0","examples/iris.rs":"3996ca0a62762aec2b102f0f4244fe90d4b4354286d68d80cdc40e35f4352ba3","src/adaptors/mod.rs":"63da1c2f3cf9a33cbfd19721c955b3c2f052b4f5cf8c609dcb37b2698f30720c","src/adaptors/multi_product.rs":"e51e36b6f6ff3bb820c3544f7f80f700b0d1a2c4c7a51cff4e39b78750341d3c","src/combinations.rs":"a9a3fc78eb5c9f3933ff60275a635d0c81f4864a73515dc052aeb2add4ad4909","src/concat_impl.rs":"276339b00588f54c25f8ffbe0ae3c0031f7e52fb53c6578554a0bde1681b58a5","src/cons_tuples_impl.rs":"24511088aa946fec4ed4c46cf2d64c0928eea9e8bc410d2fbc3e344b6c63074d","src/diff.rs":"921e2b867d7b32ffedc72a5eb780811322d14d1e0883a608b9028a2afcad0df2","src/either_or_both.rs":"4739b8644fa932b7992e565be6a6bc64dff9f93e345c52f90b3e1f8a67e6d18e","src/format.rs":"412fbe02f12311c6fbcec1044f57ad6991783f5a3f323b9c391accfe4915106f","src/free.rs":"ced78d79c0c78398ac53bf628e77ae84f214972d0dad0507b6687c2f88873aaa","src/groupbylazy.rs":"4de00767e6ec2aa27419bcf7a88a2e45f41a5e73103227d99e8b2d9d147dcf6b","src/impl_macros.rs":"eb0bb3f70ec1bcaffa6110ae4134c777951ed1e5f48d8c811dbf0a597dc48faa","src/intersperse.rs":"8f4053a203068b0d7ddab480d454910b508bec227bdf2c0d66f6f7ed7e56ce96","src/kmerge_impl.rs":"a15ed167c535f2a5700b1efd021adbc0b793a001773cd1f6648a5068b8a32245","src/lib.rs":"729a0718daad88348d32d623255a90a7656bf1a27301817108e8829debc18548","src/merge_join.rs":"d6952726da870aab6ee0607822384aad30cc4ff0ffc7223c9a707b125b1b8b5b","src/minmax.rs":"4668a7f824fbc133599f43ffb6f7283e5bd603e07df2d8176abc6f25d6af9db0","src/multipeek_impl.rs":"ebe9544d94d0bf7200f7625241a3b5a291b7b564091a08cad40ff08b51f1b1bf","src/pad_tail.rs":"078615a2892f8c6db665074cf6f1be1bef4cf5ee418bc174edcfd4dc703e163f","src/peeking_take_while.rs":"6aea3bb40fb480e9f3695ce2a7a3a2e2346d437ca846d20e6bb3c09beb0934f4","src/process_results_impl.rs":"214608f9a67a5a55b3c9c8e136355f78d0f8a9e0000c4523faaa44a05f070961","src/put_back_n_impl.rs":"d35858184c525372b22d14d42cdf63726cf0fd50f5bd42ec7a82d55a8e180e9f","src/rciter_impl.rs":"449262cb5c2d1e9affe0b4020f1c28aa316a3718afe094667b79bbff1557d5e6","src/repeatn.rs":"5192191b87efe68f60353f742114da33e4d490a302671036f1b262d1f0a004c1","src/size_hint.rs":"c1d35b422a696cf3d63e7c90d8f9fdf01a304cf8156e914287c4ef48fea62dd3","src/sources.rs":"9d7eb4dbd87f659d04b4980238c5fc72b71472e16861d17a32cab9b77a3df06a","src/tee.rs":"f8cf7fb07506b3a1f2d59a1ec5a6b858534af90df1dad745f38761e73eab6baf","src/tuple_impl.rs":"0a26201089b3ee6546e174fc7f89aadde415f4eb201160f3d9b42fe857d03946","src/unique_impl.rs":"601a231786f61b503e55c22def380fa8b026b615125bcf90356587c761892bc8","src/with_position.rs":"d922f045f6fa090a431be928f3221c6dc37ac6f9bb54461b3b84f99a7e91244a","src/zip_eq_impl.rs":"95e493deeadd640751f5c49f55008bd31218978f38396967bc4a356f6f11d209","src/zip_longest.rs":"8ab899b3bf0295cbc076770b003499b91e93310f02ce05d15b91d39aa218bdd2","src/ziptuple.rs":"bd75c3f9493a7b9f949f27310c2f0a7a3504baac988fd7399dd252389be1b39d","tests/merge_join.rs":"546eaffae40010f15a7bcf95bc53f5e9b67424c5b93df6ffb0aaa1e48e8b90c0","tests/peeking_take_while.rs":"a2ae6474e09620a47bb8a6e3c62929261e72c52881370adb2d22e89aa9e9aec8","tests/quick.rs":"f80a6d47fc233fd23efaa0a3358419b403869d273cc0c82049e09114d0e0bcb5","tests/test_core.rs":"21037f9af8b5a69ebc02636fe2128f4d36a46ca9241c6bee74f32f6446afcaa8","tests/test_std.rs":"756db4247a7dae7ae599ab2796d6e1e2fc740102cc2e57a73bec0a2ead3335e8","tests/tuples.rs":"5323d15a7abf6545b2655167d3206b6cf6a947e9409a244ea6a8cf4ad8ceac64","tests/zip.rs":"fe213d70c4fa114cb4d1930a6b971f4af617a239041ddb87e6b5a9bbe62261b8"},"package":"b07332223953b5051bceb67e8c4700aa65291535568e1f12408c43c4a42c0394"}

Просмотреть файл

@ -11,48 +11,33 @@
# will likely look very different (and much more reasonable)
[package]
name = "ordermap"
version = "0.3.5"
name = "itertools"
version = "0.7.6"
authors = ["bluss"]
description = "A hash table with consistent order and fast iteration."
documentation = "https://docs.rs/ordermap/"
keywords = ["hashmap"]
categories = ["data-structures"]
license = "Apache-2.0/MIT"
repository = "https://github.com/bluss/ordermap"
[package.metadata.docs.rs]
features = ["serde-1"]
description = "Extra iterator adaptors, iterator methods, free functions, and macros."
documentation = "https://docs.rs/itertools/"
keywords = ["iterator", "data-structure", "zip", "product", "group-by"]
categories = ["algorithms", "rust-patterns"]
license = "MIT/Apache-2.0"
repository = "https://github.com/bluss/rust-itertools"
[package.metadata.release]
no-dev-version = true
[profile.bench]
debug = true
[lib]
test = false
bench = false
[dependencies.serde]
[dependencies.either]
version = "1.0"
optional = true
[dev-dependencies.fnv]
version = "1.0"
[dev-dependencies.itertools]
version = "0.7.0"
[dev-dependencies.lazy_static]
version = "1"
default-features = false
[dev-dependencies.permutohedron]
version = "0.2"
[dev-dependencies.quickcheck]
version = "0.6"
version = "0.5"
default-features = false
[dev-dependencies.rand]
version = "0.4"
[dev-dependencies.serde_test]
version = "1.0.5"
[features]
serde-1 = ["serde"]
test_debug = []
test_low_transition_point = []
default = ["use_std"]
use_std = []

Просмотреть файл

Просмотреть файл

Просмотреть файл

471
third_party/rust/itertools-0.7.6/README.rst поставляемый Normal file
Просмотреть файл

@ -0,0 +1,471 @@
Itertools
=========
Extra iterator adaptors, functions and macros.
Please read the `API documentation here`__
__ https://docs.rs/itertools/
|build_status|_ |crates|_
.. |build_status| image:: https://travis-ci.org/bluss/rust-itertools.svg?branch=master
.. _build_status: https://travis-ci.org/bluss/rust-itertools
.. |crates| image:: http://meritbadge.herokuapp.com/itertools
.. _crates: https://crates.io/crates/itertools
How to use with cargo:
.. code:: toml
[dependencies]
itertools = "0.7.3"
How to use in your crate:
.. code:: rust
#[macro_use] extern crate itertools;
use itertools::Itertools;
How to contribute:
- Fix a bug or implement a new thing
- Include tests for your new feature, preferably a quickcheck test
- Make a Pull Request
Recent Changes
--------------
- 0.7.6
- Add new adaptor ``.multi_cartesian_product()`` which is an n-ary product
iterator by @tobz1000
- Add new method ``.sorted_by_key()`` by @Xion
- Provide simpler and faster ``.count()`` for ``.unique()`` and ``.unique_by()``
- 0.7.5
- ``.multipeek()`` now implements ``PeekingNext``, by @nicopap.
- 0.7.4
- Add new adaptor ``.update()`` by @lucasem; this adaptor is used
to modify an element before passing it on in an iterator chain.
- 0.7.3
- Add new method ``.collect_tuple()`` by @matklad; it makes a tuple out of
the iterator's elements if the number of them matches **exactly**.
- Implement ``fold`` and ``collect`` for ``.map_results()`` which means
it reuses the code of the standard ``.map()`` for these methods.
- 0.7.2
- Add new adaptor ``.merge_join_by`` by @srijs; a heterogeneous merge join
for two ordered sequences.
- 0.7.1
- Iterator adaptors and iterators in itertools now use the same ``must_use``
reminder that the standard library adaptors do, by @matematikaedit and @bluss
*“iterator adaptors are lazy and do nothing unless consumed”*.
- 0.7.0
- Faster ``izip!()`` by @krdln
- ``izip!()`` is now a wrapper for repeated regular ``.zip()`` and
a single ``.map()``. This means it optimizes as well as the standard
library ``.zip()`` it uses.
**Note:** ``multizip`` and ``izip!()`` are now different! The former
has a named type but the latter optimizes better.
- Faster ``.unique()``
- ``no_std`` support, which is opt-in!
- Many lovable features are still there without std, like ``izip!()``
or ``.format()`` or ``.merge()``, but not those that use collections.
- Trait bounds were required up front instead of just on the type:
``group_by``'s ``PartialEq`` by @Phlosioneer and ``repeat_call``'s
``FnMut``.
- Removed deprecated constructor ``Zip::new`` — use ``izip!()`` or ``multizip()``
- 0.6.5
- Fix bug in ``.cartesian_product()``'s fold (which only was visible for
unfused iterators).
- 0.6.4
- Add specific ``fold`` implementations for ``.cartesian_product()`` and
``cons_tuples()``, which improves their performance in fold, foreach, and
iterator consumers derived from them.
- 0.6.3
- Add iterator adaptor ``.positions(predicate)`` by @tmccombs
- 0.6.2
- Add function ``process_results`` which can “lift” a function of the regular
values of an iterator so that it can process the ``Ok`` values from an
iterator of ``Results`` instead, by @shepmaster
- Add iterator method ``.concat()`` which combines all iterator elements
into a single collection using the ``Extend`` trait, by @srijs
- 0.6.1
- Better size hint testing and subsequent size hint bugfixes by @rkarp.
Fixes bugs in product, interleave_shortest size hints.
- New iterator method ``.all_equal()`` by @phimuemue
- 0.6.0
- Deprecated names were removed in favour of their replacements
- ``.flatten()`` does not implement double ended iteration anymore
- ``.fold_while()`` uses ``&mut self`` and returns ``FoldWhile<T>``, for
composability (#168)
- ``.foreach()`` and ``.fold1()`` use ``self``, like ``.fold()`` does.
- ``.combinations(0)`` now produces a single empty vector. (#174)
- 0.5.10
- Add itertools method ``.kmerge_by()`` (and corresponding free function)
- Relaxed trait requirement of ``.kmerge()`` and ``.minmax()`` to PartialOrd.
- 0.5.9
- Add multipeek method ``.reset_peek()``
- Add categories
- 0.5.8
- Add iterator adaptor ``.peeking_take_while()`` and its trait ``PeekingNext``.
- 0.5.7
- Add iterator adaptor ``.with_position()``
- Fix multipeek's performance for long peeks by using ``VecDeque``.
- 0.5.6
- Add ``.map_results()``
- 0.5.5
- Many more adaptors now implement ``Debug``
- Add free function constructor ``repeat_n``. ``RepeatN::new`` is now
deprecated.
- 0.5.4
- Add infinite generator function ``iterate``, that takes a seed and a
closure.
- 0.5.3
- Special-cased ``.fold()`` for flatten and put back. ``.foreach()``
now uses fold on the iterator, to pick up any iterator specific loop
implementation.
- ``.combinations(n)`` asserts up front that ``n != 0``, instead of
running into an error on the second iterator element.
- 0.5.2
- Add ``.tuples::<T>()`` that iterates by two, three or four elements at
a time (where ``T`` is a tuple type).
- Add ``.tuple_windows::<T>()`` that iterates using a window of the
two, three or four most recent elements.
- Add ``.next_tuple::<T>()`` method, that picks the next two, three or four
elements in one go.
- ``.interleave()`` now has an accurate size hint.
- 0.5.1
- Workaround module/function name clash that made racer crash on completing
itertools. Only internal changes needed.
- 0.5.0
- `Release announcement <http://bluss.github.io/rust/2016/09/26/itertools-0.5.0/>`_
- Renamed:
- combinations is now tuple_combinations
- combinations_n to combinations
- group_by_lazy, chunks_lazy to group_by, chunks
- Unfold::new to unfold()
- RepeatCall::new to repeat_call()
- Zip::new to multizip
- PutBack::new, PutBackN::new to put_back, put_back_n
- PutBack::with_value is now a builder setter, not a constructor
- MultiPeek::new, .multipeek() to multipeek()
- format to format_with and format_default to format
- .into_rc() to rciter
- ``Partition`` enum is now ``Either``
- Module reorganization:
- All iterator structs are under ``itertools::structs`` but also
reexported to the top level, for backwards compatibility
- All free functions are reexported at the root, ``itertools::free`` will
be removed in the next version
- Removed:
- ZipSlices, use .zip() instead
- .enumerate_from(), ZipTrusted, due to being unstable
- .mend_slices(), moved to crate odds
- Stride, StrideMut, moved to crate odds
- linspace(), moved to crate itertools-num
- .sort_by(), use .sorted_by()
- .is_empty_hint(), use .size_hint()
- .dropn(), use .dropping()
- .map_fn(), use .map()
- .slice(), use .take() / .skip()
- helper traits in misc
- ``new`` constructors on iterator structs, use Itertools trait or free
functions instead
- ``itertools::size_hint`` is now private
- Behaviour changes:
- format and format_with helpers now panic if you try to format them more
than once.
- ``repeat_call`` is not double ended anymore
- New features:
- tuple flattening iterator is constructible with ``cons_tuples``
- itertools reexports ``Either`` from the ``either`` crate. ``Either<L, R>``
is an iterator when ``L, R`` are.
- ``MinMaxResult`` now implements Copy and Clone
- tuple_combinations supports 1-4 tuples of combinations (previously just 2)
- 0.4.19
- Add ``.minmax_by()``
- Add ``itertools::free::cloned``
- Add ``itertools::free::rciter``
- Improve ``.step(n)`` slightly to take advantage of specialized Fuse better.
- 0.4.18
- Only changes related to the "unstable" crate feature. This feature is more
or less deprecated.
- Use deprecated warnings when unstable is enabled. .enumerate_from() will
be removed imminently since it's using a deprecated libstd trait.
- 0.4.17
- Fix bug in .kmerge() that caused it to often produce the wrong order (#134)
- 0.4.16
- Improve precision of the interleave_shortest adaptor's size hint (it is
now computed exactly when possible).
- 0.4.15
- Fixup on top of the workaround in 0.4.14. A function in itertools::free was
removed by mistake and now it is added back again.
- 0.4.14
- Workaround an upstream regression in a rust nightly build that broke
compilation of of itertools::free::{interleave, merge}
- 0.4.13
- Add .minmax() and .minmax_by_key(), iterator methods for finding both minimum
and maximum in one scan.
- Add .format_default(), a simpler version of .format() (lazy formatting
for iterators).
- 0.4.12
- Add .zip_eq(), an adaptor like .zip() except it ensures iterators
of inequal length don't pass silently (instead it panics).
- Add .fold_while(), an iterator method that is a fold that
can short-circuit.
- Add .partition_map(), an iterator method that can separate elements
into two collections.
- 0.4.11
- Add .get() for Stride{,Mut} and .get_mut() for StrideMut
- 0.4.10
- Improve performance of .kmerge()
- 0.4.9
- Add k-ary merge adaptor .kmerge()
- Fix a bug in .islice() with ranges a..b where a > b.
- 0.4.8
- Implement Clone, Debug for Linspace
- 0.4.7
- Add function diff_with() that compares two iterators
- Add .combinations_n(), an n-ary combinations iterator
- Add methods PutBack::with_value and PutBack::into_parts.
- 0.4.6
- Add method .sorted()
- Add module ``itertools::free`` with free function variants of common
iterator adaptors and methods.
For example ``enumerate(iterable)``, ``rev(iterable)``, and so on.
- 0.4.5
- Add .flatten()
- 0.4.4
- Allow composing ZipSlices with itself
- 0.4.3
- Write iproduct!() as a single expression; this allows temporary values
in its arguments.
- 0.4.2
- Add .fold_options()
- Require Rust 1.1 or later
- 0.4.1
- Update .dropping() to take advantage of .nth()
- 0.4.0
- .merge(), .unique() and .dedup() now perform better due to not using
function pointers
- Add free functions enumerate() and rev()
- Breaking changes:
- Return types of .merge() and .merge_by() renamed and changed
- Method Merge::new removed
- .merge_by() now takes a closure that returns bool.
- Return type of .dedup() changed
- Return type of .mend_slices() changed
- Return type of .unique() changed
- Removed function times(), struct Times: use a range instead
- Removed deprecated macro icompr!()
- Removed deprecated FnMap and method .fn_map(): use .map_fn()
- .interleave_shortest() is no longer guaranteed to act like fused
- 0.3.25
- Rename .sort_by() to .sorted_by(). Old name is deprecated.
- Fix well-formedness warnings from RFC 1214, no user visible impact
- 0.3.24
- Improve performance of .merge()'s ordering function slightly
- 0.3.23
- Added .chunks(), similar to (and based on) .group_by_lazy().
- Tweak linspace to match numpy.linspace and make it double ended.
- 0.3.22
- Added ZipSlices, a fast zip for slices
- 0.3.21
- Remove `Debug` impl for `Format`, it will have different use later
- 0.3.20
- Optimize .group_by_lazy()
- 0.3.19
- Added .group_by_lazy(), a possibly nonallocating group by
- Added .format(), a nonallocating formatting helper for iterators
- Remove uses of RandomAccessIterator since it has been deprecated in rust.
- 0.3.17
- Added (adopted) Unfold from rust
- 0.3.16
- Added adaptors .unique(), .unique_by()
- 0.3.15
- Added method .sort_by()
- 0.3.14
- Added adaptor .while_some()
- 0.3.13
- Added adaptor .interleave_shortest()
- Added adaptor .pad_using()
- 0.3.11
- Added assert_equal function
- 0.3.10
- Bugfix .combinations() size_hint.
- 0.3.8
- Added source RepeatCall
- 0.3.7
- Added adaptor PutBackN
- Added adaptor .combinations()
- 0.3.6
- Added itertools::partition, partition a sequence in place based on a predicate.
- Deprecate icompr!() with no replacement.
- 0.3.5
- .map_fn() replaces deprecated .fn_map().
- 0.3.4
- .take_while_ref() *by-ref adaptor*
- .coalesce() *adaptor*
- .mend_slices() *adaptor*
- 0.3.3
- .dropping_back() *method*
- .fold1() *method*
- .is_empty_hint() *method*
License
-------
Dual-licensed to be compatible with the Rust project.
Licensed under the Apache License, Version 2.0
http://www.apache.org/licenses/LICENSE-2.0 or the MIT license
http://opensource.org/licenses/MIT, at your
option. This file may not be copied, modified, or distributed
except according to those terms.

735
third_party/rust/itertools-0.7.6/benches/bench1.rs поставляемый Normal file
Просмотреть файл

@ -0,0 +1,735 @@
#![feature(test)]
extern crate test;
#[macro_use] extern crate itertools;
use test::{black_box};
use itertools::Itertools;
use itertools::free::cloned;
use std::iter::repeat;
use std::cmp;
use std::ops::Add;
mod extra;
use extra::ZipSlices;
#[bench]
fn slice_iter(b: &mut test::Bencher)
{
let xs: Vec<_> = repeat(1i32).take(20).collect();
b.iter(|| for elt in xs.iter() {
test::black_box(elt);
})
}
#[bench]
fn slice_iter_rev(b: &mut test::Bencher)
{
let xs: Vec<_> = repeat(1i32).take(20).collect();
b.iter(|| for elt in xs.iter().rev() {
test::black_box(elt);
})
}
#[bench]
fn zip_default_zip(b: &mut test::Bencher)
{
let xs = vec![0; 1024];
let ys = vec![0; 768];
let xs = black_box(xs);
let ys = black_box(ys);
b.iter(|| {
for (&x, &y) in xs.iter().zip(&ys) {
test::black_box(x);
test::black_box(y);
}
})
}
#[bench]
fn zipdot_i32_default_zip(b: &mut test::Bencher)
{
let xs = vec![2; 1024];
let ys = vec![2; 768];
let xs = black_box(xs);
let ys = black_box(ys);
b.iter(|| {
let mut s = 0;
for (&x, &y) in xs.iter().zip(&ys) {
s += x * y;
}
s
})
}
#[bench]
fn zipdot_f32_default_zip(b: &mut test::Bencher)
{
let xs = vec![2f32; 1024];
let ys = vec![2f32; 768];
let xs = black_box(xs);
let ys = black_box(ys);
b.iter(|| {
let mut s = 0.;
for (&x, &y) in xs.iter().zip(&ys) {
s += x * y;
}
s
})
}
#[bench]
fn zip_default_zip3(b: &mut test::Bencher)
{
let xs = vec![0; 1024];
let ys = vec![0; 768];
let zs = vec![0; 766];
let xs = black_box(xs);
let ys = black_box(ys);
let zs = black_box(zs);
b.iter(|| {
for ((&x, &y), &z) in xs.iter().zip(&ys).zip(&zs) {
test::black_box(x);
test::black_box(y);
test::black_box(z);
}
})
}
/*
#[bench]
fn zip_slices_ziptuple(b: &mut test::Bencher)
{
let xs = vec![0; 1024];
let ys = vec![0; 768];
b.iter(|| {
let xs = black_box(&xs);
let ys = black_box(&ys);
for (&x, &y) in Zip::new((xs, ys)) {
test::black_box(x);
test::black_box(y);
}
})
}
*/
#[bench]
fn zipslices(b: &mut test::Bencher)
{
let xs = vec![0; 1024];
let ys = vec![0; 768];
let xs = black_box(xs);
let ys = black_box(ys);
b.iter(|| {
for (&x, &y) in ZipSlices::new(&xs, &ys) {
test::black_box(x);
test::black_box(y);
}
})
}
#[bench]
fn zipslices_mut(b: &mut test::Bencher)
{
let xs = vec![0; 1024];
let ys = vec![0; 768];
let xs = black_box(xs);
let mut ys = black_box(ys);
b.iter(|| {
for (&x, &mut y) in ZipSlices::from_slices(&xs[..], &mut ys[..]) {
test::black_box(x);
test::black_box(y);
}
})
}
#[bench]
fn zipdot_i32_zipslices(b: &mut test::Bencher)
{
let xs = vec![2; 1024];
let ys = vec![2; 768];
let xs = black_box(xs);
let ys = black_box(ys);
b.iter(|| {
let mut s = 0i32;
for (&x, &y) in ZipSlices::new(&xs, &ys) {
s += x * y;
}
s
})
}
#[bench]
fn zipdot_f32_zipslices(b: &mut test::Bencher)
{
let xs = vec![2f32; 1024];
let ys = vec![2f32; 768];
let xs = black_box(xs);
let ys = black_box(ys);
b.iter(|| {
let mut s = 0.;
for (&x, &y) in ZipSlices::new(&xs, &ys) {
s += x * y;
}
s
})
}
#[bench]
fn zip_checked_counted_loop(b: &mut test::Bencher)
{
let xs = vec![0; 1024];
let ys = vec![0; 768];
let xs = black_box(xs);
let ys = black_box(ys);
b.iter(|| {
// Must slice to equal lengths, and then bounds checks are eliminated!
let len = cmp::min(xs.len(), ys.len());
let xs = &xs[..len];
let ys = &ys[..len];
for i in 0..len {
let x = xs[i];
let y = ys[i];
test::black_box(x);
test::black_box(y);
}
})
}
#[bench]
fn zipdot_i32_checked_counted_loop(b: &mut test::Bencher)
{
let xs = vec![2; 1024];
let ys = vec![2; 768];
let xs = black_box(xs);
let ys = black_box(ys);
b.iter(|| {
// Must slice to equal lengths, and then bounds checks are eliminated!
let len = cmp::min(xs.len(), ys.len());
let xs = &xs[..len];
let ys = &ys[..len];
let mut s = 0i32;
for i in 0..len {
s += xs[i] * ys[i];
}
s
})
}
#[bench]
fn zipdot_f32_checked_counted_loop(b: &mut test::Bencher)
{
let xs = vec![2f32; 1024];
let ys = vec![2f32; 768];
let xs = black_box(xs);
let ys = black_box(ys);
b.iter(|| {
// Must slice to equal lengths, and then bounds checks are eliminated!
let len = cmp::min(xs.len(), ys.len());
let xs = &xs[..len];
let ys = &ys[..len];
let mut s = 0.;
for i in 0..len {
s += xs[i] * ys[i];
}
s
})
}
#[bench]
fn zipdot_f32_checked_counted_unrolled_loop(b: &mut test::Bencher)
{
let xs = vec![2f32; 1024];
let ys = vec![2f32; 768];
let xs = black_box(xs);
let ys = black_box(ys);
b.iter(|| {
// Must slice to equal lengths, and then bounds checks are eliminated!
let len = cmp::min(xs.len(), ys.len());
let mut xs = &xs[..len];
let mut ys = &ys[..len];
let mut s = 0.;
let (mut p0, mut p1, mut p2, mut p3, mut p4, mut p5, mut p6, mut p7) =
(0., 0., 0., 0., 0., 0., 0., 0.);
// how to unroll and have bounds checks eliminated (by cristicbz)
// split sum into eight parts to enable vectorization (by bluss)
while xs.len() >= 8 {
p0 += xs[0] * ys[0];
p1 += xs[1] * ys[1];
p2 += xs[2] * ys[2];
p3 += xs[3] * ys[3];
p4 += xs[4] * ys[4];
p5 += xs[5] * ys[5];
p6 += xs[6] * ys[6];
p7 += xs[7] * ys[7];
xs = &xs[8..];
ys = &ys[8..];
}
s += p0 + p4;
s += p1 + p5;
s += p2 + p6;
s += p3 + p7;
for i in 0..xs.len() {
s += xs[i] * ys[i];
}
s
})
}
#[bench]
fn zip_unchecked_counted_loop(b: &mut test::Bencher)
{
let xs = vec![0; 1024];
let ys = vec![0; 768];
let xs = black_box(xs);
let ys = black_box(ys);
b.iter(|| {
let len = cmp::min(xs.len(), ys.len());
for i in 0..len {
unsafe {
let x = *xs.get_unchecked(i);
let y = *ys.get_unchecked(i);
test::black_box(x);
test::black_box(y);
}
}
})
}
#[bench]
fn zipdot_i32_unchecked_counted_loop(b: &mut test::Bencher)
{
let xs = vec![2; 1024];
let ys = vec![2; 768];
let xs = black_box(xs);
let ys = black_box(ys);
b.iter(|| {
let len = cmp::min(xs.len(), ys.len());
let mut s = 0i32;
for i in 0..len {
unsafe {
let x = *xs.get_unchecked(i);
let y = *ys.get_unchecked(i);
s += x * y;
}
}
s
})
}
#[bench]
fn zipdot_f32_unchecked_counted_loop(b: &mut test::Bencher)
{
let xs = vec![2.; 1024];
let ys = vec![2.; 768];
let xs = black_box(xs);
let ys = black_box(ys);
b.iter(|| {
let len = cmp::min(xs.len(), ys.len());
let mut s = 0f32;
for i in 0..len {
unsafe {
let x = *xs.get_unchecked(i);
let y = *ys.get_unchecked(i);
s += x * y;
}
}
s
})
}
#[bench]
fn zip_unchecked_counted_loop3(b: &mut test::Bencher)
{
let xs = vec![0; 1024];
let ys = vec![0; 768];
let zs = vec![0; 766];
let xs = black_box(xs);
let ys = black_box(ys);
let zs = black_box(zs);
b.iter(|| {
let len = cmp::min(xs.len(), cmp::min(ys.len(), zs.len()));
for i in 0..len {
unsafe {
let x = *xs.get_unchecked(i);
let y = *ys.get_unchecked(i);
let z = *zs.get_unchecked(i);
test::black_box(x);
test::black_box(y);
test::black_box(z);
}
}
})
}
#[bench]
fn group_by_lazy_1(b: &mut test::Bencher) {
let mut data = vec![0; 1024];
for (index, elt) in data.iter_mut().enumerate() {
*elt = index / 10;
}
let data = test::black_box(data);
b.iter(|| {
for (_key, group) in &data.iter().group_by(|elt| **elt) {
for elt in group {
test::black_box(elt);
}
}
})
}
#[bench]
fn group_by_lazy_2(b: &mut test::Bencher) {
let mut data = vec![0; 1024];
for (index, elt) in data.iter_mut().enumerate() {
*elt = index / 2;
}
let data = test::black_box(data);
b.iter(|| {
for (_key, group) in &data.iter().group_by(|elt| **elt) {
for elt in group {
test::black_box(elt);
}
}
})
}
#[bench]
fn slice_chunks(b: &mut test::Bencher) {
let data = vec![0; 1024];
let data = test::black_box(data);
let sz = test::black_box(10);
b.iter(|| {
for group in data.chunks(sz) {
for elt in group {
test::black_box(elt);
}
}
})
}
#[bench]
fn chunks_lazy_1(b: &mut test::Bencher) {
let data = vec![0; 1024];
let data = test::black_box(data);
let sz = test::black_box(10);
b.iter(|| {
for group in &data.iter().chunks(sz) {
for elt in group {
test::black_box(elt);
}
}
})
}
#[bench]
fn equal(b: &mut test::Bencher) {
let data = vec![7; 1024];
let l = data.len();
let alpha = test::black_box(&data[1..]);
let beta = test::black_box(&data[..l - 1]);
b.iter(|| {
itertools::equal(alpha, beta)
})
}
#[bench]
fn merge_default(b: &mut test::Bencher) {
let mut data1 = vec![0; 1024];
let mut data2 = vec![0; 800];
let mut x = 0;
for (_, elt) in data1.iter_mut().enumerate() {
*elt = x;
x += 1;
}
let mut y = 0;
for (i, elt) in data2.iter_mut().enumerate() {
*elt += y;
if i % 3 == 0 {
y += 3;
} else {
y += 0;
}
}
let data1 = test::black_box(data1);
let data2 = test::black_box(data2);
b.iter(|| {
data1.iter().merge(&data2).count()
})
}
#[bench]
fn merge_by_cmp(b: &mut test::Bencher) {
let mut data1 = vec![0; 1024];
let mut data2 = vec![0; 800];
let mut x = 0;
for (_, elt) in data1.iter_mut().enumerate() {
*elt = x;
x += 1;
}
let mut y = 0;
for (i, elt) in data2.iter_mut().enumerate() {
*elt += y;
if i % 3 == 0 {
y += 3;
} else {
y += 0;
}
}
let data1 = test::black_box(data1);
let data2 = test::black_box(data2);
b.iter(|| {
data1.iter().merge_by(&data2, PartialOrd::le).count()
})
}
#[bench]
fn merge_by_lt(b: &mut test::Bencher) {
let mut data1 = vec![0; 1024];
let mut data2 = vec![0; 800];
let mut x = 0;
for (_, elt) in data1.iter_mut().enumerate() {
*elt = x;
x += 1;
}
let mut y = 0;
for (i, elt) in data2.iter_mut().enumerate() {
*elt += y;
if i % 3 == 0 {
y += 3;
} else {
y += 0;
}
}
let data1 = test::black_box(data1);
let data2 = test::black_box(data2);
b.iter(|| {
data1.iter().merge_by(&data2, |a, b| a <= b).count()
})
}
#[bench]
fn kmerge_default(b: &mut test::Bencher) {
let mut data1 = vec![0; 1024];
let mut data2 = vec![0; 800];
let mut x = 0;
for (_, elt) in data1.iter_mut().enumerate() {
*elt = x;
x += 1;
}
let mut y = 0;
for (i, elt) in data2.iter_mut().enumerate() {
*elt += y;
if i % 3 == 0 {
y += 3;
} else {
y += 0;
}
}
let data1 = test::black_box(data1);
let data2 = test::black_box(data2);
let its = &[data1.iter(), data2.iter()];
b.iter(|| {
its.iter().cloned().kmerge().count()
})
}
#[bench]
fn kmerge_tenway(b: &mut test::Bencher) {
let mut data = vec![0; 10240];
let mut state = 1729u16;
fn rng(state: &mut u16) -> u16 {
let new = state.wrapping_mul(31421) + 6927;
*state = new;
new
}
for elt in &mut data {
*elt = rng(&mut state);
}
let mut chunks = Vec::new();
let mut rest = &mut data[..];
while rest.len() > 0 {
let chunk_len = 1 + rng(&mut state) % 512;
let chunk_len = cmp::min(rest.len(), chunk_len as usize);
let (fst, tail) = {rest}.split_at_mut(chunk_len);
fst.sort();
chunks.push(fst.iter().cloned());
rest = tail;
}
// println!("Chunk lengths: {}", chunks.iter().format_with(", ", |elt, f| f(&elt.len())));
b.iter(|| {
chunks.iter().cloned().kmerge().count()
})
}
fn fast_integer_sum<I>(iter: I) -> I::Item
where I: IntoIterator,
I::Item: Default + Add<Output=I::Item>
{
iter.into_iter().fold(<_>::default(), |x, y| x + y)
}
#[bench]
fn step_vec_2(b: &mut test::Bencher) {
let v = vec![0; 1024];
b.iter(|| {
fast_integer_sum(cloned(v.iter().step(2)))
});
}
#[bench]
fn step_vec_10(b: &mut test::Bencher) {
let v = vec![0; 1024];
b.iter(|| {
fast_integer_sum(cloned(v.iter().step(10)))
});
}
#[bench]
fn step_range_2(b: &mut test::Bencher) {
let v = black_box(0..1024);
b.iter(|| {
fast_integer_sum(v.clone().step(2))
});
}
#[bench]
fn step_range_10(b: &mut test::Bencher) {
let v = black_box(0..1024);
b.iter(|| {
fast_integer_sum(v.clone().step(10))
});
}
#[bench]
fn cartesian_product_iterator(b: &mut test::Bencher)
{
let xs = vec![0; 16];
b.iter(|| {
let mut sum = 0;
for (&x, &y, &z) in iproduct!(&xs, &xs, &xs) {
sum += x;
sum += y;
sum += z;
}
sum
})
}
#[bench]
fn cartesian_product_fold(b: &mut test::Bencher)
{
let xs = vec![0; 16];
b.iter(|| {
let mut sum = 0;
iproduct!(&xs, &xs, &xs).fold((), |(), (&x, &y, &z)| {
sum += x;
sum += y;
sum += z;
});
sum
})
}
#[bench]
fn multi_cartesian_product_iterator(b: &mut test::Bencher)
{
let xs = [vec![0; 16], vec![0; 16], vec![0; 16]];
b.iter(|| {
let mut sum = 0;
for x in xs.into_iter().multi_cartesian_product() {
sum += x[0];
sum += x[1];
sum += x[2];
}
sum
})
}
#[bench]
fn multi_cartesian_product_fold(b: &mut test::Bencher)
{
let xs = [vec![0; 16], vec![0; 16], vec![0; 16]];
b.iter(|| {
let mut sum = 0;
xs.into_iter().multi_cartesian_product().fold((), |(), x| {
sum += x[0];
sum += x[1];
sum += x[2];
});
sum
})
}
#[bench]
fn cartesian_product_nested_for(b: &mut test::Bencher)
{
let xs = vec![0; 16];
b.iter(|| {
let mut sum = 0;
for &x in &xs {
for &y in &xs {
for &z in &xs {
sum += x;
sum += y;
sum += z;
}
}
}
sum
})
}

4
third_party/rust/itertools-0.7.6/benches/extra/mod.rs поставляемый Normal file
Просмотреть файл

@ -0,0 +1,4 @@
pub use self::zipslices::ZipSlices;
mod zipslices;

189
third_party/rust/itertools-0.7.6/benches/extra/zipslices.rs поставляемый Normal file
Просмотреть файл

@ -0,0 +1,189 @@
use std::cmp;
// Note: There are different ways to implement ZipSlices.
// This version performed the best in benchmarks.
//
// I also implemented a version with three pointes (tptr, tend, uptr),
// that mimiced slice::Iter and only checked bounds by using tptr == tend,
// but that was inferior to this solution.
/// An iterator which iterates two slices simultaneously.
///
/// `ZipSlices` acts like a double-ended `.zip()` iterator.
///
/// It was intended to be more efficient than `.zip()`, and it was, then
/// rustc changed how it optimizes so it can not promise improved performance
/// at this time.
///
/// Note that elements past the end of the shortest of the two slices are ignored.
///
/// Iterator element type for `ZipSlices<T, U>` is `(T::Item, U::Item)`. For example,
/// for a `ZipSlices<&'a [A], &'b mut [B]>`, the element type is `(&'a A, &'b mut B)`.
#[derive(Clone)]
pub struct ZipSlices<T, U> {
t: T,
u: U,
len: usize,
index: usize,
}
impl<'a, 'b, A, B> ZipSlices<&'a [A], &'b [B]> {
/// Create a new `ZipSlices` from slices `a` and `b`.
///
/// Act like a double-ended `.zip()` iterator, but more efficiently.
///
/// Note that elements past the end of the shortest of the two slices are ignored.
#[inline(always)]
pub fn new(a: &'a [A], b: &'b [B]) -> Self {
let minl = cmp::min(a.len(), b.len());
ZipSlices {
t: a,
u: b,
len: minl,
index: 0,
}
}
}
impl<T, U> ZipSlices<T, U>
where T: Slice,
U: Slice
{
/// Create a new `ZipSlices` from slices `a` and `b`.
///
/// Act like a double-ended `.zip()` iterator, but more efficiently.
///
/// Note that elements past the end of the shortest of the two slices are ignored.
#[inline(always)]
pub fn from_slices(a: T, b: U) -> Self {
let minl = cmp::min(a.len(), b.len());
ZipSlices {
t: a,
u: b,
len: minl,
index: 0,
}
}
}
impl<T, U> Iterator for ZipSlices<T, U>
where T: Slice,
U: Slice
{
type Item = (T::Item, U::Item);
#[inline(always)]
fn next(&mut self) -> Option<Self::Item> {
unsafe {
if self.index >= self.len {
None
} else {
let i = self.index;
self.index += 1;
Some((
self.t.get_unchecked(i),
self.u.get_unchecked(i)))
}
}
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
let len = self.len - self.index;
(len, Some(len))
}
}
impl<T, U> DoubleEndedIterator for ZipSlices<T, U>
where T: Slice,
U: Slice
{
#[inline(always)]
fn next_back(&mut self) -> Option<Self::Item> {
unsafe {
if self.index >= self.len {
None
} else {
self.len -= 1;
let i = self.len;
Some((
self.t.get_unchecked(i),
self.u.get_unchecked(i)))
}
}
}
}
impl<T, U> ExactSizeIterator for ZipSlices<T, U>
where T: Slice,
U: Slice
{}
unsafe impl<T, U> Slice for ZipSlices<T, U>
where T: Slice,
U: Slice
{
type Item = (T::Item, U::Item);
fn len(&self) -> usize {
self.len - self.index
}
unsafe fn get_unchecked(&mut self, i: usize) -> Self::Item {
(self.t.get_unchecked(i),
self.u.get_unchecked(i))
}
}
/// A helper trait to let `ZipSlices` accept both `&[T]` and `&mut [T]`.
///
/// Unsafe trait because:
///
/// - Implementors must guarantee that `get_unchecked` is valid for all indices `0..len()`.
pub unsafe trait Slice {
/// The type of a reference to the slice's elements
type Item;
#[doc(hidden)]
fn len(&self) -> usize;
#[doc(hidden)]
unsafe fn get_unchecked(&mut self, i: usize) -> Self::Item;
}
unsafe impl<'a, T> Slice for &'a [T] {
type Item = &'a T;
#[inline(always)]
fn len(&self) -> usize { (**self).len() }
#[inline(always)]
unsafe fn get_unchecked(&mut self, i: usize) -> &'a T {
debug_assert!(i < self.len());
(**self).get_unchecked(i)
}
}
unsafe impl<'a, T> Slice for &'a mut [T] {
type Item = &'a mut T;
#[inline(always)]
fn len(&self) -> usize { (**self).len() }
#[inline(always)]
unsafe fn get_unchecked(&mut self, i: usize) -> &'a mut T {
debug_assert!(i < self.len());
// override the lifetime constraints of &mut &'a mut [T]
(*(*self as *mut [T])).get_unchecked_mut(i)
}
}
#[test]
fn zipslices() {
let xs = [1, 2, 3, 4, 5, 6];
let ys = [1, 2, 3, 7];
::itertools::assert_equal(ZipSlices::new(&xs, &ys), xs.iter().zip(&ys));
let xs = [1, 2, 3, 4, 5, 6];
let mut ys = [0; 6];
for (x, y) in ZipSlices::from_slices(&xs[..], &mut ys[..]) {
*y = *x;
}
::itertools::assert_equal(&xs, &ys);
}

97
third_party/rust/itertools-0.7.6/benches/tuple_combinations.rs поставляемый Normal file
Просмотреть файл

@ -0,0 +1,97 @@
#![feature(test)]
extern crate test;
extern crate itertools;
use test::{black_box, Bencher};
use itertools::Itertools;
// aproximate 100_000 iterations for each combination
const N1: usize = 100_000;
const N2: usize = 448;
const N3: usize = 86;
const N4: usize = 41;
#[bench]
fn comb_for1(b: &mut Bencher) {
b.iter(|| {
for i in 0..N1 {
black_box(i);
}
});
}
#[bench]
fn comb_for2(b: &mut Bencher) {
b.iter(|| {
for i in 0..N2 {
for j in (i + 1)..N2 {
black_box(i + j);
}
}
});
}
#[bench]
fn comb_for3(b: &mut Bencher) {
b.iter(|| {
for i in 0..N3 {
for j in (i + 1)..N3 {
for k in (j + 1)..N3 {
black_box(i + j + k);
}
}
}
});
}
#[bench]
fn comb_for4(b: &mut Bencher) {
b.iter(|| {
for i in 0..N4 {
for j in (i + 1)..N4 {
for k in (j + 1)..N4 {
for l in (k + 1)..N4 {
black_box(i + j + k + l);
}
}
}
}
});
}
#[bench]
fn comb_c1(b: &mut Bencher) {
b.iter(|| {
for (i,) in (0..N1).tuple_combinations() {
black_box(i);
}
});
}
#[bench]
fn comb_c2(b: &mut Bencher) {
b.iter(|| {
for (i, j) in (0..N2).tuple_combinations() {
black_box(i + j);
}
});
}
#[bench]
fn comb_c3(b: &mut Bencher) {
b.iter(|| {
for (i, j, k) in (0..N3).tuple_combinations() {
black_box(i + j + k);
}
});
}
#[bench]
fn comb_c4(b: &mut Bencher) {
b.iter(|| {
for (i, j, k, l) in (0..N4).tuple_combinations() {
black_box(i + j + k + l);
}
});
}

190
third_party/rust/itertools-0.7.6/benches/tuples.rs поставляемый Normal file
Просмотреть файл

@ -0,0 +1,190 @@
#![feature(test)]
extern crate test;
extern crate itertools;
use test::Bencher;
use itertools::Itertools;
fn s1(a: u32) -> u32 {
a
}
fn s2(a: u32, b: u32) -> u32 {
a + b
}
fn s3(a: u32, b: u32, c: u32) -> u32 {
a + b + c
}
fn s4(a: u32, b: u32, c: u32, d: u32) -> u32 {
a + b + c + d
}
fn sum_s1(s: &[u32]) -> u32 {
s1(s[0])
}
fn sum_s2(s: &[u32]) -> u32 {
s2(s[0], s[1])
}
fn sum_s3(s: &[u32]) -> u32 {
s3(s[0], s[1], s[2])
}
fn sum_s4(s: &[u32]) -> u32 {
s4(s[0], s[1], s[2], s[3])
}
fn sum_t1(s: &(&u32, )) -> u32 {
s1(*s.0)
}
fn sum_t2(s: &(&u32, &u32)) -> u32 {
s2(*s.0, *s.1)
}
fn sum_t3(s: &(&u32, &u32, &u32)) -> u32 {
s3(*s.0, *s.1, *s.2)
}
fn sum_t4(s: &(&u32, &u32, &u32, &u32)) -> u32 {
s4(*s.0, *s.1, *s.2, *s.3)
}
macro_rules! def_benchs {
($N:expr;
$TUPLE_FUN:ident,
$TUPLES:ident,
$TUPLE_WINDOWS:ident;
$SLICE_FUN:ident,
$CHUNKS:ident,
$WINDOWS:ident;
$FOR_CHUNKS:ident,
$FOR_WINDOWS:ident
) => (
#[bench]
fn $FOR_CHUNKS(b: &mut Bencher) {
let v: Vec<u32> = (0.. $N * 1_000).collect();
let mut s = 0;
b.iter(|| {
let mut j = 0;
for _ in 0..1_000 {
s += $SLICE_FUN(&v[j..(j + $N)]);
j += $N;
}
s
});
}
#[bench]
fn $FOR_WINDOWS(b: &mut Bencher) {
let v: Vec<u32> = (0..1_000).collect();
let mut s = 0;
b.iter(|| {
for i in 0..(1_000 - $N) {
s += $SLICE_FUN(&v[i..(i + $N)]);
}
s
});
}
#[bench]
fn $TUPLES(b: &mut Bencher) {
let v: Vec<u32> = (0.. $N * 1_000).collect();
let mut s = 0;
b.iter(|| {
for x in v.iter().tuples() {
s += $TUPLE_FUN(&x);
}
s
});
}
#[bench]
fn $CHUNKS(b: &mut Bencher) {
let v: Vec<u32> = (0.. $N * 1_000).collect();
let mut s = 0;
b.iter(|| {
for x in v.chunks($N) {
s += $SLICE_FUN(x);
}
s
});
}
#[bench]
fn $TUPLE_WINDOWS(b: &mut Bencher) {
let v: Vec<u32> = (0..1_000).collect();
let mut s = 0;
b.iter(|| {
for x in v.iter().tuple_windows() {
s += $TUPLE_FUN(&x);
}
s
});
}
#[bench]
fn $WINDOWS(b: &mut Bencher) {
let v: Vec<u32> = (0..1_000).collect();
let mut s = 0;
b.iter(|| {
for x in v.windows($N) {
s += $SLICE_FUN(x);
}
s
});
}
)
}
def_benchs!{
1;
sum_t1,
tuple_chunks_1,
tuple_windows_1;
sum_s1,
slice_chunks_1,
slice_windows_1;
for_chunks_1,
for_windows_1
}
def_benchs!{
2;
sum_t2,
tuple_chunks_2,
tuple_windows_2;
sum_s2,
slice_chunks_2,
slice_windows_2;
for_chunks_2,
for_windows_2
}
def_benchs!{
3;
sum_t3,
tuple_chunks_3,
tuple_windows_3;
sum_s3,
slice_chunks_3,
slice_windows_3;
for_chunks_3,
for_windows_3
}
def_benchs!{
4;
sum_t4,
tuple_chunks_4,
tuple_windows_4;
sum_s4,
slice_chunks_4,
slice_windows_4;
for_chunks_4,
for_windows_4
}

Просмотреть файл

150
third_party/rust/itertools-0.7.6/examples/iris.data поставляемый Normal file
Просмотреть файл

@ -0,0 +1,150 @@
5.1,3.5,1.4,0.2,Iris-setosa
4.9,3.0,1.4,0.2,Iris-setosa
4.7,3.2,1.3,0.2,Iris-setosa
4.6,3.1,1.5,0.2,Iris-setosa
5.0,3.6,1.4,0.2,Iris-setosa
5.4,3.9,1.7,0.4,Iris-setosa
4.6,3.4,1.4,0.3,Iris-setosa
5.0,3.4,1.5,0.2,Iris-setosa
4.4,2.9,1.4,0.2,Iris-setosa
4.9,3.1,1.5,0.1,Iris-setosa
5.4,3.7,1.5,0.2,Iris-setosa
4.8,3.4,1.6,0.2,Iris-setosa
4.8,3.0,1.4,0.1,Iris-setosa
4.3,3.0,1.1,0.1,Iris-setosa
5.8,4.0,1.2,0.2,Iris-setosa
5.7,4.4,1.5,0.4,Iris-setosa
5.4,3.9,1.3,0.4,Iris-setosa
5.1,3.5,1.4,0.3,Iris-setosa
5.7,3.8,1.7,0.3,Iris-setosa
5.1,3.8,1.5,0.3,Iris-setosa
5.4,3.4,1.7,0.2,Iris-setosa
5.1,3.7,1.5,0.4,Iris-setosa
4.6,3.6,1.0,0.2,Iris-setosa
5.1,3.3,1.7,0.5,Iris-setosa
4.8,3.4,1.9,0.2,Iris-setosa
5.0,3.0,1.6,0.2,Iris-setosa
5.0,3.4,1.6,0.4,Iris-setosa
5.2,3.5,1.5,0.2,Iris-setosa
5.2,3.4,1.4,0.2,Iris-setosa
4.7,3.2,1.6,0.2,Iris-setosa
4.8,3.1,1.6,0.2,Iris-setosa
5.4,3.4,1.5,0.4,Iris-setosa
5.2,4.1,1.5,0.1,Iris-setosa
5.5,4.2,1.4,0.2,Iris-setosa
4.9,3.1,1.5,0.1,Iris-setosa
5.0,3.2,1.2,0.2,Iris-setosa
5.5,3.5,1.3,0.2,Iris-setosa
4.9,3.1,1.5,0.1,Iris-setosa
4.4,3.0,1.3,0.2,Iris-setosa
5.1,3.4,1.5,0.2,Iris-setosa
5.0,3.5,1.3,0.3,Iris-setosa
4.5,2.3,1.3,0.3,Iris-setosa
4.4,3.2,1.3,0.2,Iris-setosa
5.0,3.5,1.6,0.6,Iris-setosa
5.1,3.8,1.9,0.4,Iris-setosa
4.8,3.0,1.4,0.3,Iris-setosa
5.1,3.8,1.6,0.2,Iris-setosa
4.6,3.2,1.4,0.2,Iris-setosa
5.3,3.7,1.5,0.2,Iris-setosa
5.0,3.3,1.4,0.2,Iris-setosa
7.0,3.2,4.7,1.4,Iris-versicolor
6.4,3.2,4.5,1.5,Iris-versicolor
6.9,3.1,4.9,1.5,Iris-versicolor
5.5,2.3,4.0,1.3,Iris-versicolor
6.5,2.8,4.6,1.5,Iris-versicolor
5.7,2.8,4.5,1.3,Iris-versicolor
6.3,3.3,4.7,1.6,Iris-versicolor
4.9,2.4,3.3,1.0,Iris-versicolor
6.6,2.9,4.6,1.3,Iris-versicolor
5.2,2.7,3.9,1.4,Iris-versicolor
5.0,2.0,3.5,1.0,Iris-versicolor
5.9,3.0,4.2,1.5,Iris-versicolor
6.0,2.2,4.0,1.0,Iris-versicolor
6.1,2.9,4.7,1.4,Iris-versicolor
5.6,2.9,3.6,1.3,Iris-versicolor
6.7,3.1,4.4,1.4,Iris-versicolor
5.6,3.0,4.5,1.5,Iris-versicolor
5.8,2.7,4.1,1.0,Iris-versicolor
6.2,2.2,4.5,1.5,Iris-versicolor
5.6,2.5,3.9,1.1,Iris-versicolor
5.9,3.2,4.8,1.8,Iris-versicolor
6.1,2.8,4.0,1.3,Iris-versicolor
6.3,2.5,4.9,1.5,Iris-versicolor
6.1,2.8,4.7,1.2,Iris-versicolor
6.4,2.9,4.3,1.3,Iris-versicolor
6.6,3.0,4.4,1.4,Iris-versicolor
6.8,2.8,4.8,1.4,Iris-versicolor
6.7,3.0,5.0,1.7,Iris-versicolor
6.0,2.9,4.5,1.5,Iris-versicolor
5.7,2.6,3.5,1.0,Iris-versicolor
5.5,2.4,3.8,1.1,Iris-versicolor
5.5,2.4,3.7,1.0,Iris-versicolor
5.8,2.7,3.9,1.2,Iris-versicolor
6.0,2.7,5.1,1.6,Iris-versicolor
5.4,3.0,4.5,1.5,Iris-versicolor
6.0,3.4,4.5,1.6,Iris-versicolor
6.7,3.1,4.7,1.5,Iris-versicolor
6.3,2.3,4.4,1.3,Iris-versicolor
5.6,3.0,4.1,1.3,Iris-versicolor
5.5,2.5,4.0,1.3,Iris-versicolor
5.5,2.6,4.4,1.2,Iris-versicolor
6.1,3.0,4.6,1.4,Iris-versicolor
5.8,2.6,4.0,1.2,Iris-versicolor
5.0,2.3,3.3,1.0,Iris-versicolor
5.6,2.7,4.2,1.3,Iris-versicolor
5.7,3.0,4.2,1.2,Iris-versicolor
5.7,2.9,4.2,1.3,Iris-versicolor
6.2,2.9,4.3,1.3,Iris-versicolor
5.1,2.5,3.0,1.1,Iris-versicolor
5.7,2.8,4.1,1.3,Iris-versicolor
6.3,3.3,6.0,2.5,Iris-virginica
5.8,2.7,5.1,1.9,Iris-virginica
7.1,3.0,5.9,2.1,Iris-virginica
6.3,2.9,5.6,1.8,Iris-virginica
6.5,3.0,5.8,2.2,Iris-virginica
7.6,3.0,6.6,2.1,Iris-virginica
4.9,2.5,4.5,1.7,Iris-virginica
7.3,2.9,6.3,1.8,Iris-virginica
6.7,2.5,5.8,1.8,Iris-virginica
7.2,3.6,6.1,2.5,Iris-virginica
6.5,3.2,5.1,2.0,Iris-virginica
6.4,2.7,5.3,1.9,Iris-virginica
6.8,3.0,5.5,2.1,Iris-virginica
5.7,2.5,5.0,2.0,Iris-virginica
5.8,2.8,5.1,2.4,Iris-virginica
6.4,3.2,5.3,2.3,Iris-virginica
6.5,3.0,5.5,1.8,Iris-virginica
7.7,3.8,6.7,2.2,Iris-virginica
7.7,2.6,6.9,2.3,Iris-virginica
6.0,2.2,5.0,1.5,Iris-virginica
6.9,3.2,5.7,2.3,Iris-virginica
5.6,2.8,4.9,2.0,Iris-virginica
7.7,2.8,6.7,2.0,Iris-virginica
6.3,2.7,4.9,1.8,Iris-virginica
6.7,3.3,5.7,2.1,Iris-virginica
7.2,3.2,6.0,1.8,Iris-virginica
6.2,2.8,4.8,1.8,Iris-virginica
6.1,3.0,4.9,1.8,Iris-virginica
6.4,2.8,5.6,2.1,Iris-virginica
7.2,3.0,5.8,1.6,Iris-virginica
7.4,2.8,6.1,1.9,Iris-virginica
7.9,3.8,6.4,2.0,Iris-virginica
6.4,2.8,5.6,2.2,Iris-virginica
6.3,2.8,5.1,1.5,Iris-virginica
6.1,2.6,5.6,1.4,Iris-virginica
7.7,3.0,6.1,2.3,Iris-virginica
6.3,3.4,5.6,2.4,Iris-virginica
6.4,3.1,5.5,1.8,Iris-virginica
6.0,3.0,4.8,1.8,Iris-virginica
6.9,3.1,5.4,2.1,Iris-virginica
6.7,3.1,5.6,2.4,Iris-virginica
6.9,3.1,5.1,2.3,Iris-virginica
5.8,2.7,5.1,1.9,Iris-virginica
6.8,3.2,5.9,2.3,Iris-virginica
6.7,3.3,5.7,2.5,Iris-virginica
6.7,3.0,5.2,2.3,Iris-virginica
6.3,2.5,5.0,1.9,Iris-virginica
6.5,3.0,5.2,2.0,Iris-virginica
6.2,3.4,5.4,2.3,Iris-virginica
5.9,3.0,5.1,1.8,Iris-virginica

141
third_party/rust/itertools-0.7.6/examples/iris.rs поставляемый Normal file
Просмотреть файл

@ -0,0 +1,141 @@
///
/// This example parses, sorts and groups the iris dataset
/// and does some simple manipulations.
///
/// Iterators and itertools functionality are used throughout.
///
///
extern crate itertools;
use itertools::Itertools;
use std::collections::HashMap;
use std::iter::repeat;
use std::num::ParseFloatError;
use std::str::FromStr;
static DATA: &'static str = include_str!("iris.data");
#[derive(Clone, Debug)]
struct Iris {
name: String,
data: [f32; 4],
}
#[derive(Clone, Debug)]
enum ParseError {
Numeric(ParseFloatError),
Other(&'static str),
}
impl From<ParseFloatError> for ParseError {
fn from(err: ParseFloatError) -> Self {
ParseError::Numeric(err)
}
}
/// Parse an Iris from a comma-separated line
impl FromStr for Iris {
type Err = ParseError;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let mut iris = Iris { name: "".into(), data: [0.; 4] };
let mut parts = s.split(",").map(str::trim);
// using Iterator::by_ref()
for (index, part) in parts.by_ref().take(4).enumerate() {
iris.data[index] = try!(part.parse::<f32>());
}
if let Some(name) = parts.next() {
iris.name = name.into();
} else {
return Err(ParseError::Other("Missing name"))
}
Ok(iris)
}
}
fn main() {
// using Itertools::fold_results to create the result of parsing
let irises = DATA.lines()
.map(str::parse)
.fold_results(Vec::new(), |mut v, iris: Iris| {
v.push(iris);
v
});
let mut irises = match irises {
Err(e) => {
println!("Error parsing: {:?}", e);
std::process::exit(1);
}
Ok(data) => data,
};
// Sort them and group them
irises.sort_by(|a, b| Ord::cmp(&a.name, &b.name));
// using Iterator::cycle()
let mut plot_symbols = "+ox".chars().cycle();
let mut symbolmap = HashMap::new();
// using Itertools::group_by
for (species, species_group) in &irises.iter().group_by(|iris| &iris.name) {
// assign a plot symbol
symbolmap.entry(species).or_insert_with(|| {
plot_symbols.next().unwrap()
});
println!("{} (symbol={})", species, symbolmap[species]);
for iris in species_group {
// using Itertools::format for lazy formatting
println!("{:>3.1}", iris.data.iter().format(", "));
}
}
// Look at all combinations of the four columns
//
// See https://en.wikipedia.org/wiki/Iris_flower_data_set
//
let n = 30; // plot size
let mut plot = vec![' '; n * n];
// using Itertools::tuple_combinations
for (a, b) in (0..4).tuple_combinations() {
println!("Column {} vs {}:", a, b);
// Clear plot
//
// using std::iter::repeat;
// using Itertools::set_from
plot.iter_mut().set_from(repeat(' '));
// using Itertools::minmax
let min_max = |data: &[Iris], col| {
data.iter()
.map(|iris| iris.data[col])
.minmax()
.into_option()
.expect("Can't find min/max of empty iterator")
};
let (min_x, max_x) = min_max(&irises, a);
let (min_y, max_y) = min_max(&irises, b);
// Plot the data points
let round_to_grid = |x, min, max| ((x - min) / (max - min) * ((n - 1) as f32)) as usize;
let flip = |ix| n - 1 - ix; // reverse axis direction
for iris in &irises {
let ix = round_to_grid(iris.data[a], min_x, max_x);
let iy = flip(round_to_grid(iris.data[b], min_y, max_y));
plot[n * iy + ix] = symbolmap[&iris.name];
}
// render plot
//
// using Itertools::join
for line in plot.chunks(n) {
println!("{}", line.iter().join(" "))
}
}
}

1272
third_party/rust/itertools-0.7.6/src/adaptors/mod.rs поставляемый Normal file

Разница между файлами не показана из-за своего большого размера Загрузить разницу

219
third_party/rust/itertools-0.7.6/src/adaptors/multi_product.rs поставляемый Normal file
Просмотреть файл

@ -0,0 +1,219 @@
#![cfg(feature = "use_std")]
use size_hint;
use Itertools;
#[derive(Clone)]
/// An iterator adaptor that iterates over the cartesian product of
/// multiple iterators of type `I`.
///
/// An iterator element type is `Vec<I>`.
///
/// See [`.multi_cartesian_product()`](../trait.Itertools.html#method.multi_cartesian_product)
/// for more information.
#[must_use = "iterator adaptors are lazy and do nothing unless consumed"]
pub struct MultiProduct<I>(Vec<MultiProductIter<I>>)
where I: Iterator + Clone,
I::Item: Clone;
/// Create a new cartesian product iterator over an arbitrary number
/// of iterators of the same type.
///
/// Iterator element is of type `Vec<H::Item::Item>`.
pub fn multi_cartesian_product<H>(iters: H) -> MultiProduct<<H::Item as IntoIterator>::IntoIter>
where H: Iterator,
H::Item: IntoIterator,
<H::Item as IntoIterator>::IntoIter: Clone,
<H::Item as IntoIterator>::Item: Clone
{
MultiProduct(iters.map(|i| MultiProductIter::new(i.into_iter())).collect())
}
#[derive(Clone)]
/// Holds the state of a single iterator within a MultiProduct.
struct MultiProductIter<I>
where I: Iterator + Clone,
I::Item: Clone
{
cur: Option<I::Item>,
iter: I,
iter_orig: I,
}
/// Holds the current state during an iteration of a MultiProduct.
enum MultiProductIterState {
StartOfIter,
MidIter { on_first_iter: bool },
}
impl<I> MultiProduct<I>
where I: Iterator + Clone,
I::Item: Clone
{
/// Iterates the rightmost iterator, then recursively iterates iterators
/// to the left if necessary.
///
/// Returns true if the iteration succeeded, else false.
fn iterate_last(
multi_iters: &mut [MultiProductIter<I>],
mut state: MultiProductIterState
) -> bool {
use self::MultiProductIterState::*;
if let Some((last, rest)) = multi_iters.split_last_mut() {
let on_first_iter = match state {
StartOfIter => {
let on_first_iter = !last.in_progress();
state = MidIter { on_first_iter: on_first_iter };
on_first_iter
},
MidIter { on_first_iter } => on_first_iter
};
if !on_first_iter {
last.iterate();
}
if last.in_progress() {
true
} else if MultiProduct::iterate_last(rest, state) {
last.reset();
last.iterate();
// If iterator is None twice consecutively, then iterator is
// empty; whole product is empty.
last.in_progress()
} else {
false
}
} else {
// Reached end of iterator list. On initialisation, return true.
// At end of iteration (final iterator finishes), finish.
match state {
StartOfIter => false,
MidIter { on_first_iter } => on_first_iter
}
}
}
/// Returns the unwrapped value of the next iteration.
fn curr_iterator(&self) -> Vec<I::Item> {
self.0.iter().map(|multi_iter| {
multi_iter.cur.clone().unwrap()
}).collect()
}
/// Returns true if iteration has started and has not yet finished; false
/// otherwise.
fn in_progress(&self) -> bool {
if let Some(last) = self.0.last() {
last.in_progress()
} else {
false
}
}
}
impl<I> MultiProductIter<I>
where I: Iterator + Clone,
I::Item: Clone
{
fn new(iter: I) -> Self {
MultiProductIter {
cur: None,
iter: iter.clone(),
iter_orig: iter
}
}
/// Iterate the managed iterator.
fn iterate(&mut self) {
self.cur = self.iter.next();
}
/// Reset the managed iterator.
fn reset(&mut self) {
self.iter = self.iter_orig.clone();
}
/// Returns true if the current iterator has been started and has not yet
/// finished; false otherwise.
fn in_progress(&self) -> bool {
self.cur.is_some()
}
}
impl<I> Iterator for MultiProduct<I>
where I: Iterator + Clone,
I::Item: Clone
{
type Item = Vec<I::Item>;
fn next(&mut self) -> Option<Self::Item> {
if MultiProduct::iterate_last(
&mut self.0,
MultiProductIterState::StartOfIter
) {
Some(self.curr_iterator())
} else {
None
}
}
fn count(self) -> usize {
if self.0.len() == 0 {
return 0;
}
if !self.in_progress() {
return self.0.into_iter().fold(1, |acc, multi_iter| {
acc * multi_iter.iter.count()
});
}
self.0.into_iter().fold(
0,
|acc, MultiProductIter { iter, iter_orig, cur: _ }| {
let total_count = iter_orig.count();
let cur_count = iter.count();
acc * total_count + cur_count
}
)
}
fn size_hint(&self) -> (usize, Option<usize>) {
// Not ExactSizeIterator because size may be larger than usize
if self.0.len() == 0 {
return (0, Some(0));
}
if !self.in_progress() {
return self.0.iter().fold((1, Some(1)), |acc, multi_iter| {
size_hint::mul(acc, multi_iter.iter.size_hint())
});
}
self.0.iter().fold(
(0, Some(0)),
|acc, &MultiProductIter { ref iter, ref iter_orig, cur: _ }| {
let cur_size = iter.size_hint();
let total_size = iter_orig.size_hint();
size_hint::add(size_hint::mul(acc, total_size), cur_size)
}
)
}
fn last(self) -> Option<Self::Item> {
let iter_count = self.0.len();
let lasts: Self::Item = self.0.into_iter()
.map(|multi_iter| multi_iter.iter.last())
.while_some()
.collect();
if lasts.len() == iter_count {
Some(lasts)
} else {
None
}
}
}

165
third_party/rust/itertools-0.7.6/src/combinations.rs поставляемый Normal file
Просмотреть файл

@ -0,0 +1,165 @@
use std::ops::Index;
use std::fmt;
/// An iterator to iterate through all the `n`-length combinations in an iterator.
///
/// See [`.combinations()`](../trait.Itertools.html#method.combinations) for more information.
#[must_use = "iterator adaptors are lazy and do nothing unless consumed"]
pub struct Combinations<I: Iterator> {
n: usize,
indices: Vec<usize>,
pool: LazyBuffer<I>,
first: bool,
}
impl<I> fmt::Debug for Combinations<I>
where I: Iterator + fmt::Debug,
I::Item: fmt::Debug,
{
debug_fmt_fields!(Combinations, n, indices, pool, first);
}
/// Create a new `Combinations` from a clonable iterator.
pub fn combinations<I>(iter: I, n: usize) -> Combinations<I>
where I: Iterator
{
let mut indices: Vec<usize> = Vec::with_capacity(n);
for i in 0..n {
indices.push(i);
}
let mut pool: LazyBuffer<I> = LazyBuffer::new(iter);
for _ in 0..n {
if !pool.get_next() {
break;
}
}
Combinations {
n: n,
indices: indices,
pool: pool,
first: true,
}
}
impl<I> Iterator for Combinations<I>
where I: Iterator,
I::Item: Clone
{
type Item = Vec<I::Item>;
fn next(&mut self) -> Option<Self::Item> {
let mut pool_len = self.pool.len();
if self.pool.is_done() {
if pool_len == 0 || self.n > pool_len {
return None;
}
}
if self.first {
self.first = false;
} else if self.n == 0 {
return None;
} else {
// Scan from the end, looking for an index to increment
let mut i: usize = self.n - 1;
// Check if we need to consume more from the iterator
if self.indices[i] == pool_len - 1 && !self.pool.is_done() {
if self.pool.get_next() {
pool_len += 1;
}
}
while self.indices[i] == i + pool_len - self.n {
if i > 0 {
i -= 1;
} else {
// Reached the last combination
return None;
}
}
// Increment index, and reset the ones to its right
self.indices[i] += 1;
let mut j = i + 1;
while j < self.n {
self.indices[j] = self.indices[j - 1] + 1;
j += 1;
}
}
// Create result vector based on the indices
let mut result = Vec::with_capacity(self.n);
for i in self.indices.iter() {
result.push(self.pool[*i].clone());
}
Some(result)
}
}
#[derive(Debug)]
struct LazyBuffer<I: Iterator> {
it: I,
done: bool,
buffer: Vec<I::Item>,
}
impl<I> LazyBuffer<I>
where I: Iterator
{
pub fn new(it: I) -> LazyBuffer<I> {
let mut it = it;
let mut buffer = Vec::new();
let done;
if let Some(first) = it.next() {
buffer.push(first);
done = false;
} else {
done = true;
}
LazyBuffer {
it: it,
done: done,
buffer: buffer,
}
}
pub fn len(&self) -> usize {
self.buffer.len()
}
pub fn is_done(&self) -> bool {
self.done
}
pub fn get_next(&mut self) -> bool {
if self.done {
return false;
}
let next_item = self.it.next();
match next_item {
Some(x) => {
self.buffer.push(x);
true
}
None => {
self.done = true;
false
}
}
}
}
impl<I> Index<usize> for LazyBuffer<I>
where I: Iterator,
I::Item: Sized
{
type Output = I::Item;
fn index<'b>(&'b self, _index: usize) -> &'b I::Item {
self.buffer.index(_index)
}
}

22
third_party/rust/itertools-0.7.6/src/concat_impl.rs поставляемый Normal file
Просмотреть файл

@ -0,0 +1,22 @@
use Itertools;
/// Combine all an iterator's elements into one element by using `Extend`.
///
/// `IntoIterator`-enabled version of `.concat()`
///
/// This combinator will extend the first item with each of the rest of the
/// items of the iterator. If the iterator is empty, the default value of
/// `I::Item` is returned.
///
/// ```rust
/// use itertools::concat;
///
/// let input = vec![vec![1], vec![2, 3], vec![4, 5, 6]];
/// assert_eq!(concat(input), vec![1, 2, 3, 4, 5, 6]);
/// ```
pub fn concat<I>(iterable: I) -> I::Item
where I: IntoIterator,
I::Item: Extend<<<I as IntoIterator>::Item as IntoIterator>::Item> + IntoIterator + Default
{
iterable.into_iter().fold1(|mut a, b| { a.extend(b); a }).unwrap_or_else(|| <_>::default())
}

67
third_party/rust/itertools-0.7.6/src/cons_tuples_impl.rs поставляемый Normal file
Просмотреть файл

@ -0,0 +1,67 @@
macro_rules! impl_cons_iter(
($_A:ident, $_B:ident, ) => (); // stop
($A:ident, $($B:ident,)*) => (
impl_cons_iter!($($B,)*);
#[allow(non_snake_case)]
impl<X, Iter, $($B),*> Iterator for ConsTuples<Iter, (($($B,)*), X)>
where Iter: Iterator<Item = (($($B,)*), X)>,
{
type Item = ($($B,)* X, );
fn next(&mut self) -> Option<Self::Item> {
self.iter.next().map(|(($($B,)*), x)| ($($B,)* x, ))
}
fn size_hint(&self) -> (usize, Option<usize>) {
self.iter.size_hint()
}
fn fold<Acc, Fold>(self, accum: Acc, mut f: Fold) -> Acc
where Fold: FnMut(Acc, Self::Item) -> Acc,
{
self.iter.fold(accum, move |acc, (($($B,)*), x)| f(acc, ($($B,)* x, )))
}
}
#[allow(non_snake_case)]
impl<X, Iter, $($B),*> DoubleEndedIterator for ConsTuples<Iter, (($($B,)*), X)>
where Iter: DoubleEndedIterator<Item = (($($B,)*), X)>,
{
fn next_back(&mut self) -> Option<Self::Item> {
self.iter.next().map(|(($($B,)*), x)| ($($B,)* x, ))
}
}
);
);
impl_cons_iter!(A, B, C, D, E, F, G, H,);
/// An iterator that maps an iterator of tuples like
/// `((A, B), C)` to an iterator of `(A, B, C)`.
///
/// Used by the `iproduct!()` macro.
#[must_use = "iterator adaptors are lazy and do nothing unless consumed"]
pub struct ConsTuples<I, J>
where I: Iterator<Item=J>,
{
iter: I,
}
impl<I, J> Clone for ConsTuples<I, J>
where I: Clone + Iterator<Item=J>,
{
fn clone(&self) -> Self {
ConsTuples {
iter: self.iter.clone(),
}
}
}
/// Create an iterator that maps for example iterators of
/// `((A, B), C)` to `(A, B, C)`.
pub fn cons_tuples<I, J>(iterable: I) -> ConsTuples<I, J>
where I: Iterator<Item=J>
{
ConsTuples { iter: iterable.into_iter() }
}

61
third_party/rust/itertools-0.7.6/src/diff.rs поставляемый Normal file
Просмотреть файл

@ -0,0 +1,61 @@
//! "Diff"ing iterators for caching elements to sequential collections without requiring the new
//! elements' iterator to be `Clone`.
//!
//! - [**Diff**](./enum.Diff.html) (produced by the [**diff_with**](./fn.diff_with.html) function)
//! describes the difference between two non-`Clone` iterators `I` and `J` after breaking ASAP from
//! a lock-step comparison.
use free::put_back;
use structs::PutBack;
/// A type returned by the [`diff_with`](./fn.diff_with.html) function.
///
/// `Diff` represents the way in which the elements yielded by the iterator `I` differ to some
/// iterator `J`.
pub enum Diff<I, J>
where I: Iterator,
J: Iterator
{
/// The index of the first non-matching element along with both iterator's remaining elements
/// starting with the first mis-match.
FirstMismatch(usize, PutBack<I>, PutBack<J>),
/// The total number of elements that were in `J` along with the remaining elements of `I`.
Shorter(usize, PutBack<I>),
/// The total number of elements that were in `I` along with the remaining elements of `J`.
Longer(usize, PutBack<J>),
}
/// Compares every element yielded by both `i` and `j` with the given function in lock-step and
/// returns a `Diff` which describes how `j` differs from `i`.
///
/// If the number of elements yielded by `j` is less than the number of elements yielded by `i`,
/// the number of `j` elements yielded will be returned along with `i`'s remaining elements as
/// `Diff::Shorter`.
///
/// If the two elements of a step differ, the index of those elements along with the remaining
/// elements of both `i` and `j` are returned as `Diff::FirstMismatch`.
///
/// If `i` becomes exhausted before `j` becomes exhausted, the number of elements in `i` along with
/// the remaining `j` elements will be returned as `Diff::Longer`.
pub fn diff_with<I, J, F>(i: I, j: J, is_equal: F)
-> Option<Diff<I::IntoIter, J::IntoIter>>
where I: IntoIterator,
J: IntoIterator,
F: Fn(&I::Item, &J::Item) -> bool
{
let mut i = i.into_iter();
let mut j = j.into_iter();
let mut idx = 0;
while let Some(i_elem) = i.next() {
match j.next() {
None => return Some(Diff::Shorter(idx, put_back(i).with_value(i_elem))),
Some(j_elem) => if !is_equal(&i_elem, &j_elem) {
let remaining_i = put_back(i).with_value(i_elem);
let remaining_j = put_back(j).with_value(j_elem);
return Some(Diff::FirstMismatch(idx, remaining_i, remaining_j));
},
}
idx += 1;
}
j.next().map(|j_elem| Diff::Longer(idx, put_back(j).with_value(j_elem)))
}

10
third_party/rust/itertools-0.7.6/src/either_or_both.rs поставляемый Normal file
Просмотреть файл

@ -0,0 +1,10 @@
/// Value that either holds a single A or B, or both.
#[derive(Clone, PartialEq, Eq, Debug)]
pub enum EitherOrBoth<A, B> {
/// Both values are present.
Both(A, B),
/// Only the left value of type `A` is present.
Left(A),
/// Only the right value of type `B` is present.
Right(B),
}

113
third_party/rust/itertools-0.7.6/src/format.rs поставляемый Normal file
Просмотреть файл

@ -0,0 +1,113 @@
use std::fmt;
use std::cell::RefCell;
/// Format all iterator elements lazily, separated by `sep`.
///
/// The format value can only be formatted once, after that the iterator is
/// exhausted.
///
/// See [`.format_with()`](../trait.Itertools.html#method.format_with) for more information.
pub struct FormatWith<'a, I, F> {
sep: &'a str,
/// FormatWith uses interior mutability because Display::fmt takes &self.
inner: RefCell<Option<(I, F)>>,
}
/// Format all iterator elements lazily, separated by `sep`.
///
/// The format value can only be formatted once, after that the iterator is
/// exhausted.
///
/// See [`.format()`](../trait.Itertools.html#method.format)
/// for more information.
#[derive(Clone)]
pub struct Format<'a, I> {
sep: &'a str,
/// Format uses interior mutability because Display::fmt takes &self.
inner: RefCell<Option<I>>,
}
pub fn new_format<'a, I, F>(iter: I, separator: &'a str, f: F) -> FormatWith<'a, I, F>
where I: Iterator,
F: FnMut(I::Item, &mut FnMut(&fmt::Display) -> fmt::Result) -> fmt::Result
{
FormatWith {
sep: separator,
inner: RefCell::new(Some((iter, f))),
}
}
pub fn new_format_default<'a, I>(iter: I, separator: &'a str) -> Format<'a, I>
where I: Iterator,
{
Format {
sep: separator,
inner: RefCell::new(Some(iter)),
}
}
impl<'a, I, F> fmt::Display for FormatWith<'a, I, F>
where I: Iterator,
F: FnMut(I::Item, &mut FnMut(&fmt::Display) -> fmt::Result) -> fmt::Result
{
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let (mut iter, mut format) = match self.inner.borrow_mut().take() {
Some(t) => t,
None => panic!("FormatWith: was already formatted once"),
};
if let Some(fst) = iter.next() {
try!(format(fst, &mut |disp: &fmt::Display| disp.fmt(f)));
for elt in iter {
if self.sep.len() > 0 {
try!(f.write_str(self.sep));
}
try!(format(elt, &mut |disp: &fmt::Display| disp.fmt(f)));
}
}
Ok(())
}
}
impl<'a, I> Format<'a, I>
where I: Iterator,
{
fn format<F>(&self, f: &mut fmt::Formatter, mut cb: F) -> fmt::Result
where F: FnMut(&I::Item, &mut fmt::Formatter) -> fmt::Result,
{
let mut iter = match self.inner.borrow_mut().take() {
Some(t) => t,
None => panic!("Format: was already formatted once"),
};
if let Some(fst) = iter.next() {
try!(cb(&fst, f));
for elt in iter {
if self.sep.len() > 0 {
try!(f.write_str(self.sep));
}
try!(cb(&elt, f));
}
}
Ok(())
}
}
macro_rules! impl_format {
($($fmt_trait:ident)*) => {
$(
impl<'a, I> fmt::$fmt_trait for Format<'a, I>
where I: Iterator,
I::Item: fmt::$fmt_trait,
{
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
self.format(f, fmt::$fmt_trait::fmt)
}
}
)*
}
}
impl_format!{Display Debug
UpperExp LowerExp UpperHex LowerHex Octal Binary Pointer}

231
third_party/rust/itertools-0.7.6/src/free.rs поставляемый Normal file
Просмотреть файл

@ -0,0 +1,231 @@
//! Free functions that create iterator adaptors or call iterator methods.
//!
//! The benefit of free functions is that they accept any `IntoIterator` as
//! argument, so the resulting code may be easier to read.
#[cfg(feature = "use_std")]
use std::fmt::Display;
use std::iter::{self, Zip};
#[cfg(feature = "use_std")]
use Itertools;
pub use adaptors::{
interleave,
merge,
put_back,
};
#[cfg(feature = "use_std")]
pub use put_back_n_impl::put_back_n;
#[cfg(feature = "use_std")]
pub use multipeek_impl::multipeek;
#[cfg(feature = "use_std")]
pub use kmerge_impl::kmerge;
pub use zip_eq_impl::zip_eq;
pub use merge_join::merge_join_by;
#[cfg(feature = "use_std")]
pub use rciter_impl::rciter;
/// Iterate `iterable` with a running index.
///
/// `IntoIterator` enabled version of `.enumerate()`.
///
/// ```
/// use itertools::enumerate;
///
/// for (i, elt) in enumerate(&[1, 2, 3]) {
/// /* loop body */
/// }
/// ```
pub fn enumerate<I>(iterable: I) -> iter::Enumerate<I::IntoIter>
where I: IntoIterator
{
iterable.into_iter().enumerate()
}
/// Iterate `iterable` in reverse.
///
/// `IntoIterator` enabled version of `.rev()`.
///
/// ```
/// use itertools::rev;
///
/// for elt in rev(&[1, 2, 3]) {
/// /* loop body */
/// }
/// ```
pub fn rev<I>(iterable: I) -> iter::Rev<I::IntoIter>
where I: IntoIterator,
I::IntoIter: DoubleEndedIterator
{
iterable.into_iter().rev()
}
/// Iterate `i` and `j` in lock step.
///
/// `IntoIterator` enabled version of `i.zip(j)`.
///
/// ```
/// use itertools::zip;
///
/// let data = [1, 2, 3, 4, 5];
/// for (a, b) in zip(&data, &data[1..]) {
/// /* loop body */
/// }
/// ```
pub fn zip<I, J>(i: I, j: J) -> Zip<I::IntoIter, J::IntoIter>
where I: IntoIterator,
J: IntoIterator
{
i.into_iter().zip(j)
}
/// Create an iterator that first iterates `i` and then `j`.
///
/// `IntoIterator` enabled version of `i.chain(j)`.
///
/// ```
/// use itertools::chain;
///
/// for elt in chain(&[1, 2, 3], &[4]) {
/// /* loop body */
/// }
/// ```
pub fn chain<I, J>(i: I, j: J) -> iter::Chain<<I as IntoIterator>::IntoIter, <J as IntoIterator>::IntoIter>
where I: IntoIterator,
J: IntoIterator<Item = I::Item>
{
i.into_iter().chain(j)
}
/// Create an iterator that clones each element from &T to T
///
/// `IntoIterator` enabled version of `i.cloned()`.
///
/// ```
/// use itertools::cloned;
///
/// assert_eq!(cloned(b"abc").next(), Some(b'a'));
/// ```
pub fn cloned<'a, I, T: 'a>(iterable: I) -> iter::Cloned<I::IntoIter>
where I: IntoIterator<Item=&'a T>,
T: Clone,
{
iterable.into_iter().cloned()
}
/// Perform a fold operation over the iterable.
///
/// `IntoIterator` enabled version of `i.fold(init, f)`
///
/// ```
/// use itertools::fold;
///
/// assert_eq!(fold(&[1., 2., 3.], 0., |a, &b| f32::max(a, b)), 3.);
/// ```
pub fn fold<I, B, F>(iterable: I, init: B, f: F) -> B
where I: IntoIterator,
F: FnMut(B, I::Item) -> B
{
iterable.into_iter().fold(init, f)
}
/// Test whether the predicate holds for all elements in the iterable.
///
/// `IntoIterator` enabled version of `i.all(f)`
///
/// ```
/// use itertools::all;
///
/// assert!(all(&[1, 2, 3], |elt| *elt > 0));
/// ```
pub fn all<I, F>(iterable: I, f: F) -> bool
where I: IntoIterator,
F: FnMut(I::Item) -> bool
{
iterable.into_iter().all(f)
}
/// Test whether the predicate holds for any elements in the iterable.
///
/// `IntoIterator` enabled version of `i.any(f)`
///
/// ```
/// use itertools::any;
///
/// assert!(any(&[0, -1, 2], |elt| *elt > 0));
/// ```
pub fn any<I, F>(iterable: I, f: F) -> bool
where I: IntoIterator,
F: FnMut(I::Item) -> bool
{
iterable.into_iter().any(f)
}
/// Return the maximum value of the iterable.
///
/// `IntoIterator` enabled version of `i.max()`.
///
/// ```
/// use itertools::max;
///
/// assert_eq!(max(0..10), Some(9));
/// ```
pub fn max<I>(iterable: I) -> Option<I::Item>
where I: IntoIterator,
I::Item: Ord
{
iterable.into_iter().max()
}
/// Return the minimum value of the iterable.
///
/// `IntoIterator` enabled version of `i.min()`.
///
/// ```
/// use itertools::min;
///
/// assert_eq!(min(0..10), Some(0));
/// ```
pub fn min<I>(iterable: I) -> Option<I::Item>
where I: IntoIterator,
I::Item: Ord
{
iterable.into_iter().min()
}
/// Combine all iterator elements into one String, seperated by `sep`.
///
/// `IntoIterator` enabled version of `iterable.join(sep)`.
///
/// ```
/// use itertools::join;
///
/// assert_eq!(join(&[1, 2, 3], ", "), "1, 2, 3");
/// ```
#[cfg(feature = "use_std")]
pub fn join<I>(iterable: I, sep: &str) -> String
where I: IntoIterator,
I::Item: Display
{
iterable.into_iter().join(sep)
}
/// Collect all the iterable's elements into a sorted vector in ascending order.
///
/// `IntoIterator` enabled version of `iterable.sorted()`.
///
/// ```
/// use itertools::sorted;
/// use itertools::assert_equal;
///
/// assert_equal(sorted("rust".chars()), "rstu".chars());
/// ```
#[cfg(feature = "use_std")]
pub fn sorted<I>(iterable: I) -> Vec<I::Item>
where I: IntoIterator,
I::Item: Ord
{
iterable.into_iter().sorted()
}

570
third_party/rust/itertools-0.7.6/src/groupbylazy.rs поставляемый Normal file
Просмотреть файл

@ -0,0 +1,570 @@
use std::cell::{Cell, RefCell};
use std::vec;
/// A trait to unify FnMut for GroupBy with the chunk key in IntoChunks
trait KeyFunction<A> {
type Key;
fn call_mut(&mut self, arg: A) -> Self::Key;
}
impl<'a, A, K, F: ?Sized> KeyFunction<A> for F
where F: FnMut(A) -> K
{
type Key = K;
#[inline]
fn call_mut(&mut self, arg: A) -> Self::Key {
(*self)(arg)
}
}
/// ChunkIndex acts like the grouping key function for IntoChunks
struct ChunkIndex {
size: usize,
index: usize,
key: usize,
}
impl ChunkIndex {
#[inline(always)]
fn new(size: usize) -> Self {
ChunkIndex {
size: size,
index: 0,
key: 0,
}
}
}
impl<'a, A> KeyFunction<A> for ChunkIndex {
type Key = usize;
#[inline(always)]
fn call_mut(&mut self, _arg: A) -> Self::Key {
if self.index == self.size {
self.key += 1;
self.index = 0;
}
self.index += 1;
self.key
}
}
struct GroupInner<K, I, F>
where I: Iterator
{
key: F,
iter: I,
current_key: Option<K>,
current_elt: Option<I::Item>,
/// flag set if iterator is exhausted
done: bool,
/// Index of group we are currently buffering or visiting
top_group: usize,
/// Least index for which we still have elements buffered
oldest_buffered_group: usize,
/// Group index for `buffer[0]` -- the slots
/// bottom_group..oldest_buffered_group are unused and will be erased when
/// that range is large enough.
bottom_group: usize,
/// Buffered groups, from `bottom_group` (index 0) to `top_group`.
buffer: Vec<vec::IntoIter<I::Item>>,
/// index of last group iter that was dropped, usize::MAX == none
dropped_group: usize,
}
impl<K, I, F> GroupInner<K, I, F>
where I: Iterator,
F: for<'a> KeyFunction<&'a I::Item, Key=K>,
K: PartialEq,
{
/// `client`: Index of group that requests next element
#[inline(always)]
fn step(&mut self, client: usize) -> Option<I::Item> {
/*
println!("client={}, bottom_group={}, oldest_buffered_group={}, top_group={}, buffers=[{}]",
client, self.bottom_group, self.oldest_buffered_group,
self.top_group,
self.buffer.iter().map(|elt| elt.len()).format(", "));
*/
if client < self.oldest_buffered_group {
None
} else if client < self.top_group ||
(client == self.top_group &&
self.buffer.len() > self.top_group - self.bottom_group)
{
self.lookup_buffer(client)
} else if self.done {
None
} else if self.top_group == client {
self.step_current()
} else {
self.step_buffering(client)
}
}
#[inline(never)]
fn lookup_buffer(&mut self, client: usize) -> Option<I::Item> {
// if `bufidx` doesn't exist in self.buffer, it might be empty
let bufidx = client - self.bottom_group;
if client < self.oldest_buffered_group {
return None;
}
let elt = self.buffer.get_mut(bufidx).and_then(|queue| queue.next());
if elt.is_none() && client == self.oldest_buffered_group {
// FIXME: VecDeque is unfortunately not zero allocation when empty,
// so we do this job manually.
// `bottom_group..oldest_buffered_group` is unused, and if it's large enough, erase it.
self.oldest_buffered_group += 1;
// skip forward further empty queues too
while self.buffer.get(self.oldest_buffered_group - self.bottom_group)
.map_or(false, |buf| buf.len() == 0)
{
self.oldest_buffered_group += 1;
}
let nclear = self.oldest_buffered_group - self.bottom_group;
if nclear > 0 && nclear >= self.buffer.len() / 2 {
let mut i = 0;
self.buffer.retain(|buf| {
i += 1;
debug_assert!(buf.len() == 0 || i > nclear);
i > nclear
});
self.bottom_group = self.oldest_buffered_group;
}
}
elt
}
/// Take the next element from the iterator, and set the done
/// flag if exhausted. Must not be called after done.
#[inline(always)]
fn next_element(&mut self) -> Option<I::Item> {
debug_assert!(!self.done);
match self.iter.next() {
None => { self.done = true; None }
otherwise => otherwise,
}
}
#[inline(never)]
fn step_buffering(&mut self, client: usize) -> Option<I::Item> {
// requested a later group -- walk through the current group up to
// the requested group index, and buffer the elements (unless
// the group is marked as dropped).
// Because the `Groups` iterator is always the first to request
// each group index, client is the next index efter top_group.
debug_assert!(self.top_group + 1 == client);
let mut group = Vec::new();
if let Some(elt) = self.current_elt.take() {
if self.top_group != self.dropped_group {
group.push(elt);
}
}
let mut first_elt = None; // first element of the next group
while let Some(elt) = self.next_element() {
let key = self.key.call_mut(&elt);
match self.current_key.take() {
None => {}
Some(old_key) => if old_key != key {
self.current_key = Some(key);
first_elt = Some(elt);
break;
},
}
self.current_key = Some(key);
if self.top_group != self.dropped_group {
group.push(elt);
}
}
if self.top_group != self.dropped_group {
self.push_next_group(group);
}
if first_elt.is_some() {
self.top_group += 1;
debug_assert!(self.top_group == client);
}
first_elt
}
fn push_next_group(&mut self, group: Vec<I::Item>) {
// When we add a new buffered group, fill up slots between oldest_buffered_group and top_group
while self.top_group - self.bottom_group > self.buffer.len() {
if self.buffer.is_empty() {
self.bottom_group += 1;
self.oldest_buffered_group += 1;
} else {
self.buffer.push(Vec::new().into_iter());
}
}
self.buffer.push(group.into_iter());
debug_assert!(self.top_group + 1 - self.bottom_group == self.buffer.len());
}
/// This is the immediate case, where we use no buffering
#[inline]
fn step_current(&mut self) -> Option<I::Item> {
debug_assert!(!self.done);
if let elt @ Some(..) = self.current_elt.take() {
return elt;
}
match self.next_element() {
None => None,
Some(elt) => {
let key = self.key.call_mut(&elt);
match self.current_key.take() {
None => {}
Some(old_key) => if old_key != key {
self.current_key = Some(key);
self.current_elt = Some(elt);
self.top_group += 1;
return None;
},
}
self.current_key = Some(key);
Some(elt)
}
}
}
/// Request the just started groups' key.
///
/// `client`: Index of group
///
/// **Panics** if no group key is available.
fn group_key(&mut self, client: usize) -> K {
// This can only be called after we have just returned the first
// element of a group.
// Perform this by simply buffering one more element, grabbing the
// next key.
debug_assert!(!self.done);
debug_assert!(client == self.top_group);
debug_assert!(self.current_key.is_some());
debug_assert!(self.current_elt.is_none());
let old_key = self.current_key.take().unwrap();
if let Some(elt) = self.next_element() {
let key = self.key.call_mut(&elt);
if old_key != key {
self.top_group += 1;
}
self.current_key = Some(key);
self.current_elt = Some(elt);
}
old_key
}
}
impl<K, I, F> GroupInner<K, I, F>
where I: Iterator,
{
/// Called when a group is dropped
fn drop_group(&mut self, client: usize) {
// It's only useful to track the maximal index
if self.dropped_group == !0 || client > self.dropped_group {
self.dropped_group = client;
}
}
}
/// `GroupBy` is the storage for the lazy grouping operation.
///
/// If the groups are consumed in their original order, or if each
/// group is dropped without keeping it around, then `GroupBy` uses
/// no allocations. It needs allocations only if several group iterators
/// are alive at the same time.
///
/// This type implements `IntoIterator` (it is **not** an iterator
/// itself), because the group iterators need to borrow from this
/// value. It should be stored in a local variable or temporary and
/// iterated.
///
/// See [`.group_by()`](../trait.Itertools.html#method.group_by) for more information.
#[must_use = "iterator adaptors are lazy and do nothing unless consumed"]
pub struct GroupBy<K, I, F>
where I: Iterator,
{
inner: RefCell<GroupInner<K, I, F>>,
// the group iterator's current index. Keep this in the main value
// so that simultaneous iterators all use the same state.
index: Cell<usize>,
}
/// Create a new
pub fn new<K, J, F>(iter: J, f: F) -> GroupBy<K, J::IntoIter, F>
where J: IntoIterator,
F: FnMut(&J::Item) -> K,
{
GroupBy {
inner: RefCell::new(GroupInner {
key: f,
iter: iter.into_iter(),
current_key: None,
current_elt: None,
done: false,
top_group: 0,
oldest_buffered_group: 0,
bottom_group: 0,
buffer: Vec::new(),
dropped_group: !0,
}),
index: Cell::new(0),
}
}
impl<K, I, F> GroupBy<K, I, F>
where I: Iterator,
{
/// `client`: Index of group that requests next element
fn step(&self, client: usize) -> Option<I::Item>
where F: FnMut(&I::Item) -> K,
K: PartialEq,
{
self.inner.borrow_mut().step(client)
}
/// `client`: Index of group
fn drop_group(&self, client: usize) {
self.inner.borrow_mut().drop_group(client)
}
}
impl<'a, K, I, F> IntoIterator for &'a GroupBy<K, I, F>
where I: Iterator,
I::Item: 'a,
F: FnMut(&I::Item) -> K,
K: PartialEq
{
type Item = (K, Group<'a, K, I, F>);
type IntoIter = Groups<'a, K, I, F>;
fn into_iter(self) -> Self::IntoIter {
Groups { parent: self }
}
}
/// An iterator that yields the Group iterators.
///
/// Iterator element type is `(K, Group)`:
/// the group's key `K` and the group's iterator.
///
/// See [`.group_by()`](../trait.Itertools.html#method.group_by) for more information.
#[must_use = "iterator adaptors are lazy and do nothing unless consumed"]
pub struct Groups<'a, K: 'a, I: 'a, F: 'a>
where I: Iterator,
I::Item: 'a
{
parent: &'a GroupBy<K, I, F>,
}
impl<'a, K, I, F> Iterator for Groups<'a, K, I, F>
where I: Iterator,
I::Item: 'a,
F: FnMut(&I::Item) -> K,
K: PartialEq
{
type Item = (K, Group<'a, K, I, F>);
#[inline]
fn next(&mut self) -> Option<Self::Item> {
let index = self.parent.index.get();
self.parent.index.set(index + 1);
let inner = &mut *self.parent.inner.borrow_mut();
inner.step(index).map(|elt| {
let key = inner.group_key(index);
(key, Group {
parent: self.parent,
index: index,
first: Some(elt),
})
})
}
}
/// An iterator for the elements in a single group.
///
/// Iterator element type is `I::Item`.
pub struct Group<'a, K: 'a, I: 'a, F: 'a>
where I: Iterator,
I::Item: 'a,
{
parent: &'a GroupBy<K, I, F>,
index: usize,
first: Option<I::Item>,
}
impl<'a, K, I, F> Drop for Group<'a, K, I, F>
where I: Iterator,
I::Item: 'a,
{
fn drop(&mut self) {
self.parent.drop_group(self.index);
}
}
impl<'a, K, I, F> Iterator for Group<'a, K, I, F>
where I: Iterator,
I::Item: 'a,
F: FnMut(&I::Item) -> K,
K: PartialEq,
{
type Item = I::Item;
#[inline]
fn next(&mut self) -> Option<Self::Item> {
if let elt @ Some(..) = self.first.take() {
return elt;
}
self.parent.step(self.index)
}
}
///// IntoChunks /////
/// Create a new
pub fn new_chunks<J>(iter: J, size: usize) -> IntoChunks<J::IntoIter>
where J: IntoIterator,
{
IntoChunks {
inner: RefCell::new(GroupInner {
key: ChunkIndex::new(size),
iter: iter.into_iter(),
current_key: None,
current_elt: None,
done: false,
top_group: 0,
oldest_buffered_group: 0,
bottom_group: 0,
buffer: Vec::new(),
dropped_group: !0,
}),
index: Cell::new(0),
}
}
/// `ChunkLazy` is the storage for a lazy chunking operation.
///
/// `IntoChunks` behaves just like `GroupBy`: it is iterable, and
/// it only buffers if several chunk iterators are alive at the same time.
///
/// This type implements `IntoIterator` (it is **not** an iterator
/// itself), because the chunk iterators need to borrow from this
/// value. It should be stored in a local variable or temporary and
/// iterated.
///
/// Iterator element type is `Chunk`, each chunk's iterator.
///
/// See [`.chunks()`](../trait.Itertools.html#method.chunks) for more information.
#[must_use = "iterator adaptors are lazy and do nothing unless consumed"]
pub struct IntoChunks<I>
where I: Iterator,
{
inner: RefCell<GroupInner<usize, I, ChunkIndex>>,
// the chunk iterator's current index. Keep this in the main value
// so that simultaneous iterators all use the same state.
index: Cell<usize>,
}
impl<I> IntoChunks<I>
where I: Iterator,
{
/// `client`: Index of chunk that requests next element
fn step(&self, client: usize) -> Option<I::Item> {
self.inner.borrow_mut().step(client)
}
/// `client`: Index of chunk
fn drop_group(&self, client: usize) {
self.inner.borrow_mut().drop_group(client)
}
}
impl<'a, I> IntoIterator for &'a IntoChunks<I>
where I: Iterator,
I::Item: 'a,
{
type Item = Chunk<'a, I>;
type IntoIter = Chunks<'a, I>;
fn into_iter(self) -> Self::IntoIter {
Chunks {
parent: self,
}
}
}
/// An iterator that yields the Chunk iterators.
///
/// Iterator element type is `Chunk`.
///
/// See [`.chunks()`](../trait.Itertools.html#method.chunks) for more information.
#[must_use = "iterator adaptors are lazy and do nothing unless consumed"]
pub struct Chunks<'a, I: 'a>
where I: Iterator,
I::Item: 'a,
{
parent: &'a IntoChunks<I>,
}
impl<'a, I> Iterator for Chunks<'a, I>
where I: Iterator,
I::Item: 'a,
{
type Item = Chunk<'a, I>;
#[inline]
fn next(&mut self) -> Option<Self::Item> {
let index = self.parent.index.get();
self.parent.index.set(index + 1);
let inner = &mut *self.parent.inner.borrow_mut();
inner.step(index).map(|elt| {
Chunk {
parent: self.parent,
index: index,
first: Some(elt),
}
})
}
}
/// An iterator for the elements in a single chunk.
///
/// Iterator element type is `I::Item`.
pub struct Chunk<'a, I: 'a>
where I: Iterator,
I::Item: 'a,
{
parent: &'a IntoChunks<I>,
index: usize,
first: Option<I::Item>,
}
impl<'a, I> Drop for Chunk<'a, I>
where I: Iterator,
I::Item: 'a,
{
fn drop(&mut self) {
self.parent.drop_group(self.index);
}
}
impl<'a, I> Iterator for Chunk<'a, I>
where I: Iterator,
I::Item: 'a,
{
type Item = I::Item;
#[inline]
fn next(&mut self) -> Option<Self::Item> {
if let elt @ Some(..) = self.first.take() {
return elt;
}
self.parent.step(self.index)
}
}

14
third_party/rust/itertools-0.7.6/src/impl_macros.rs поставляемый Normal file
Просмотреть файл

@ -0,0 +1,14 @@
//!
//! Implementation's internal macros
macro_rules! debug_fmt_fields {
($tyname:ident, $($($field:ident).+),*) => {
fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
f.debug_struct(stringify!($tyname))
$(
.field(stringify!($($field).+), &self.$($field).+)
)*
.finish()
}
}
}

Некоторые файлы не были показаны из-за слишком большого количества измененных файлов Показать больше