Merge mozilla-central to autoland. a=merge CLOSED TREE

This commit is contained in:
Brindusan Cristian 2018-10-14 12:28:55 +03:00
Родитель 5e9b16a94e 3a54520d8d
Коммит 25e58a63c5
282 изменённых файлов: 15709 добавлений и 66585 удалений

187
Cargo.lock сгенерированный
Просмотреть файл

@ -72,16 +72,6 @@ name = "atomic_refcell"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "atty"
version = "0.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.43 (registry+https://github.com/rust-lang/crates.io-index)",
"winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "atty"
version = "0.2.11"
@ -200,6 +190,19 @@ name = "binary-space-partition"
version = "0.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "binast"
version = "0.1.1"
dependencies = [
"binjs_meta 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)",
"clap 2.31.2 (registry+https://github.com/rust-lang/crates.io-index)",
"env_logger 0.5.6 (registry+https://github.com/rust-lang/crates.io-index)",
"itertools 0.7.6 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.4.5 (registry+https://github.com/rust-lang/crates.io-index)",
"webidl 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)",
"yaml-rust 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "bincode"
version = "1.0.0"
@ -229,39 +232,26 @@ dependencies = [
[[package]]
name = "binjs_meta"
version = "0.3.10"
version = "0.4.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"Inflector 0.11.2 (registry+https://github.com/rust-lang/crates.io-index)",
"itertools 0.7.6 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.4.5 (registry+https://github.com/rust-lang/crates.io-index)",
"webidl 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "binsource"
version = "0.1.0"
dependencies = [
"binjs_meta 0.3.10 (registry+https://github.com/rust-lang/crates.io-index)",
"clap 2.31.2 (registry+https://github.com/rust-lang/crates.io-index)",
"env_logger 0.5.6 (registry+https://github.com/rust-lang/crates.io-index)",
"itertools 0.7.6 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.4.5 (registry+https://github.com/rust-lang/crates.io-index)",
"webidl 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)",
"yaml-rust 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
"webidl 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "bit-set"
version = "0.4.0"
version = "0.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"bit-vec 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)",
"bit-vec 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "bit-vec"
version = "0.4.4"
version = "0.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
@ -283,6 +273,15 @@ dependencies = [
"constant_time_eq 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "block-buffer"
version = "0.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"arrayref 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)",
"byte-tools 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "boxfnonce"
version = "0.0.3"
@ -293,6 +292,11 @@ name = "build_const"
version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "byte-tools"
version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "byteorder"
version = "1.2.1"
@ -711,6 +715,14 @@ name = "diff"
version = "0.1.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "digest"
version = "0.7.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"generic-array 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "dirs"
version = "1.0.4"
@ -772,8 +784,11 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "ena"
version = "0.5.0"
version = "0.9.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"log 0.4.5 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "encoding_c"
@ -847,6 +862,11 @@ dependencies = [
"synstructure 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "fake-simd"
version = "0.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "fallible"
version = "0.0.1"
@ -979,6 +999,14 @@ dependencies = [
"style_traits 0.0.1",
]
[[package]]
name = "generic-array"
version = "0.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"typenum 1.10.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "gkrust"
version = "0.1.0"
@ -1040,7 +1068,7 @@ dependencies = [
[[package]]
name = "gleam"
version = "0.6.2"
version = "0.6.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"gl_generator 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)",
@ -1221,48 +1249,44 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "lalrpop"
version = "0.15.1"
version = "0.16.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"ascii-canvas 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
"atty 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
"bit-set 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
"atty 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)",
"bit-set 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)",
"diff 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)",
"digest 0.7.6 (registry+https://github.com/rust-lang/crates.io-index)",
"docopt 0.8.3 (registry+https://github.com/rust-lang/crates.io-index)",
"ena 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)",
"ena 0.9.3 (registry+https://github.com/rust-lang/crates.io-index)",
"itertools 0.7.6 (registry+https://github.com/rust-lang/crates.io-index)",
"lalrpop-snap 0.15.1 (registry+https://github.com/rust-lang/crates.io-index)",
"lalrpop-util 0.15.1 (registry+https://github.com/rust-lang/crates.io-index)",
"petgraph 0.4.11 (registry+https://github.com/rust-lang/crates.io-index)",
"regex 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
"regex-syntax 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)",
"lalrpop-snap 0.16.0 (registry+https://github.com/rust-lang/crates.io-index)",
"lalrpop-util 0.16.0 (registry+https://github.com/rust-lang/crates.io-index)",
"petgraph 0.4.13 (registry+https://github.com/rust-lang/crates.io-index)",
"regex 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
"regex-syntax 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 1.0.66 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_derive 1.0.66 (git+https://github.com/servo/serde?branch=deserialize_from_enums8)",
"sha2 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)",
"string_cache 0.7.3 (registry+https://github.com/rust-lang/crates.io-index)",
"term 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
"unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "lalrpop-intern"
version = "0.15.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "lalrpop-snap"
version = "0.15.1"
version = "0.16.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"ascii-canvas 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
"atty 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
"bit-set 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
"atty 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)",
"bit-set 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)",
"diff 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)",
"ena 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)",
"ena 0.9.3 (registry+https://github.com/rust-lang/crates.io-index)",
"itertools 0.7.6 (registry+https://github.com/rust-lang/crates.io-index)",
"lalrpop-intern 0.15.1 (registry+https://github.com/rust-lang/crates.io-index)",
"lalrpop-util 0.15.1 (registry+https://github.com/rust-lang/crates.io-index)",
"petgraph 0.4.11 (registry+https://github.com/rust-lang/crates.io-index)",
"regex 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
"lalrpop-util 0.16.0 (registry+https://github.com/rust-lang/crates.io-index)",
"petgraph 0.4.13 (registry+https://github.com/rust-lang/crates.io-index)",
"regex 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
"regex-syntax 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)",
"string_cache 0.7.3 (registry+https://github.com/rust-lang/crates.io-index)",
"term 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
@ -1271,7 +1295,7 @@ dependencies = [
[[package]]
name = "lalrpop-util"
version = "0.15.1"
version = "0.16.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
@ -1773,7 +1797,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "petgraph"
version = "0.4.11"
version = "0.4.13"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"fixedbitset 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)",
@ -2214,6 +2238,17 @@ dependencies = [
"stable_deref_trait 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "sha2"
version = "0.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"block-buffer 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
"byte-tools 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
"digest 0.7.6 (registry+https://github.com/rust-lang/crates.io-index)",
"fake-simd 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "simd"
version = "0.2.3"
@ -2714,6 +2749,11 @@ name = "try-lock"
version = "0.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "typenum"
version = "1.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "u2fhid"
version = "0.2.2"
@ -2873,11 +2913,11 @@ dependencies = [
[[package]]
name = "webidl"
version = "0.6.0"
version = "0.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"lalrpop 0.15.1 (registry+https://github.com/rust-lang/crates.io-index)",
"lalrpop-util 0.15.1 (registry+https://github.com/rust-lang/crates.io-index)",
"lalrpop 0.16.0 (registry+https://github.com/rust-lang/crates.io-index)",
"lalrpop-util 0.16.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
@ -2896,7 +2936,7 @@ dependencies = [
"euclid 0.19.0 (registry+https://github.com/rust-lang/crates.io-index)",
"freetype 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
"fxhash 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
"gleam 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)",
"gleam 0.6.3 (registry+https://github.com/rust-lang/crates.io-index)",
"lazy_static 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.4.5 (registry+https://github.com/rust-lang/crates.io-index)",
"num-traits 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)",
@ -2940,7 +2980,7 @@ dependencies = [
"euclid 0.19.0 (registry+https://github.com/rust-lang/crates.io-index)",
"foreign-types 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
"fxhash 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
"gleam 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)",
"gleam 0.6.3 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.4.5 (registry+https://github.com/rust-lang/crates.io-index)",
"nsstring 0.1.0",
"rayon 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
@ -3078,7 +3118,6 @@ dependencies = [
"checksum arrayvec 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)" = "2f0ef4a9820019a0c91d918918c93dc71d469f581a49b47ddc1d285d4270bbe2"
"checksum ascii-canvas 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b385d69402821a1c254533a011a312531cbcc0e3e24f19bbb4747a5a2daf37e2"
"checksum atomic_refcell 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "fb2dcb6e6d35f20276943cc04bb98e538b348d525a04ac79c10021561d202f21"
"checksum atty 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "d0fd4c0631f06448cc45a6bbb3b710ebb7ff8ccb96a0800c994afe23a70d5df2"
"checksum atty 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)" = "9a7d5b8723950951411ee34d271d99dddcc2035a16ab25310ea2c8cfd4369652"
"checksum backtrace 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)" = "89a47830402e9981c5c41223151efcced65a0510c13097c769cede7efb34782a"
"checksum backtrace-sys 0.1.24 (registry+https://github.com/rust-lang/crates.io-index)" = "c66d56ac8dabd07f6aacdaf633f4b8262f5b3601a810a0dcddffd5c22c69daa0"
@ -3086,14 +3125,16 @@ dependencies = [
"checksum binary-space-partition 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "88ceb0d16c4fd0e42876e298d7d3ce3780dd9ebdcbe4199816a32c77e08597ff"
"checksum bincode 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "bda13183df33055cbb84b847becce220d392df502ebe7a4a78d7021771ed94d0"
"checksum bindgen 0.39.0 (registry+https://github.com/rust-lang/crates.io-index)" = "eac4ed5f2de9efc3c87cb722468fa49d0763e98f999d539bfc5e452c13d85c91"
"checksum binjs_meta 0.3.10 (registry+https://github.com/rust-lang/crates.io-index)" = "cc0956bac41c458cf38340699dbb54c2220c91cdbfa33be19670fe69e0a6ac9b"
"checksum bit-set 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "d9bf6104718e80d7b26a68fdbacff3481cfc05df670821affc7e9cbc1884400c"
"checksum bit-vec 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)" = "02b4ff8b16e6076c3e14220b39fbc1fabb6737522281a388998046859400895f"
"checksum binjs_meta 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)" = "430239e4551e42b80fa5d92322ac80ea38c9dda56e5d5582e057e2288352b71a"
"checksum bit-set 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)" = "6f1efcc46c18245a69c38fcc5cc650f16d3a59d034f3106e9ed63748f695730a"
"checksum bit-vec 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)" = "4440d5cb623bb7390ae27fec0bb6c61111969860f8e3ae198bfa0663645e67cf"
"checksum bitflags 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)" = "228047a76f468627ca71776ecdebd732a3423081fcf5125585bcd7c49886ce12"
"checksum bitreader 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "80b13e2ab064ff3aa0bdbf1eff533f9822dc37899821f5f98c67f263eab51707"
"checksum blake2-rfc 0.2.18 (registry+https://github.com/rust-lang/crates.io-index)" = "5d6d530bdd2d52966a6d03b7a964add7ae1a288d25214066fd4b600f0f796400"
"checksum block-buffer 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "a076c298b9ecdb530ed9d967e74a6027d6a7478924520acddcddc24c1c8ab3ab"
"checksum boxfnonce 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)" = "8380105befe91099e6f69206164072c05bc92427ff6aa8a5171388317346dd75"
"checksum build_const 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "e90dc84f5e62d2ebe7676b83c22d33b6db8bd27340fb6ffbff0a364efa0cb9c9"
"checksum byte-tools 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "560c32574a12a89ecd91f5e742165893f86e3ab98d21f8ea548658eb9eef5f40"
"checksum byteorder 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "652805b7e73fada9d85e9a6682a4abd490cb52d96aeecc12e33a0de34dfd0d23"
"checksum bytes 0.4.9 (registry+https://github.com/rust-lang/crates.io-index)" = "e178b8e0e239e844b083d5a0d4a156b2654e67f9f80144d48398fcd736a24fb8"
"checksum bzip2 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)" = "c3eafc42c44e0d827de6b1c131175098fe7fb53b8ce8a47e65cb3ea94688be24"
@ -3139,13 +3180,14 @@ dependencies = [
"checksum darling_macro 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "eb69a38fdeaeaf3db712e1df170de67ee9dfc24fb88ca3e9d21e703ec25a4d8e"
"checksum devd-rs 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "e7c9ac481c38baf400d3b732e4a06850dfaa491d1b6379a249d9d40d14c2434c"
"checksum diff 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)" = "3c2b69f912779fbb121ceb775d74d51e915af17aaebc38d28a592843a2dd0a3a"
"checksum digest 0.7.6 (registry+https://github.com/rust-lang/crates.io-index)" = "03b072242a8cbaf9c145665af9d250c59af3b958f83ed6824e13533cf76d5b90"
"checksum dirs 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)" = "88972de891f6118092b643d85a0b28e0678e0f948d7f879aa32f2d5aafe97d2a"
"checksum docopt 0.8.3 (registry+https://github.com/rust-lang/crates.io-index)" = "d8acd393692c503b168471874953a2531df0e9ab77d0b6bbc582395743300a4a"
"checksum dtoa 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "09c3753c3db574d215cba4ea76018483895d7bff25a31b49ba45db21c48e50ab"
"checksum dtoa-short 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "068d4026697c1a18f0b0bb8cfcad1b0c151b90d8edb9bf4c235ad68128920d1d"
"checksum dwrote 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "30a998e9ff70cd208ccdc4f864e998688bf61d7b897dccec8e17a884d17358bf"
"checksum either 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "18785c1ba806c258137c937e44ada9ee7e69a37e3c72077542cd2f069d78562a"
"checksum ena 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)" = "cabe5a5078ac8c506d3e4430763b1ba9b609b1286913e7d08e581d1c2de9b7e5"
"checksum ena 0.9.3 (registry+https://github.com/rust-lang/crates.io-index)" = "88dc8393b3c7352f94092497f6b52019643e493b6b890eb417cdb7c46117e621"
"checksum encoding_c 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)" = "769ecb8b33323998e482b218c0d13cd64c267609023b4b7ec3ee740714c318ee"
"checksum encoding_rs 0.8.9 (registry+https://github.com/rust-lang/crates.io-index)" = "f1a79fa56c329a5b087de13748054fb3b974c4a672c12c71f0b66e35c5addec5"
"checksum env_logger 0.5.6 (registry+https://github.com/rust-lang/crates.io-index)" = "0561146661ae44c579e993456bc76d11ce1e0c7d745e57b2fa7146b6e49fa2ad"
@ -3153,6 +3195,7 @@ dependencies = [
"checksum euclid 0.19.0 (registry+https://github.com/rust-lang/crates.io-index)" = "70a2ebdf55fb9d6329046e026329a55ef8fbaae5ea833f56e170beb3125a8a5f"
"checksum failure 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "7efb22686e4a466b1ec1a15c2898f91fa9cb340452496dca654032de20ff95b9"
"checksum failure_derive 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "946d0e98a50d9831f5d589038d2ca7f8f455b1c21028c0db0e84116a12696426"
"checksum fake-simd 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "e88a8acf291dafb59c2d96e8f59828f3838bb1a70398823ade51a84de6a6deed"
"checksum fixedbitset 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)" = "85cb8fec437468d86dc7c83ca7cfc933341d561873275f22dd5eedefa63a6478"
"checksum flate2 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "9fac2277e84e5e858483756647a9d0aa8d9a2b7cba517fd84325a0aaa69a0909"
"checksum fnv 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)" = "6cc484842f1e2884faf56f529f960cc12ad8c71ce96cc7abba0a067c98fee344"
@ -3165,8 +3208,9 @@ dependencies = [
"checksum futures-cpupool 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)" = "ab90cde24b3319636588d0c35fe03b1333857621051837ed769faefb4c2162e4"
"checksum fxhash 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "c31b6d751ae2c7f11320402d34e41349dd1016f8d5d45e48c4312bc8625af50c"
"checksum gcc 0.3.54 (registry+https://github.com/rust-lang/crates.io-index)" = "5e33ec290da0d127825013597dbdfc28bee4964690c7ce1166cbc2a7bd08b1bb"
"checksum generic-array 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ef25c5683767570c2bbd7deba372926a55eaae9982d7726ee2a1050239d45b9d"
"checksum gl_generator 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)" = "7a795170cbd85b5a7baa58d6d7525cae6a03e486859860c220f7ebbbdd379d0a"
"checksum gleam 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)" = "24ba6539d49223f6bca4f076d9490c001bdbfe07d59cb0ad4079033c75bdc92d"
"checksum gleam 0.6.3 (registry+https://github.com/rust-lang/crates.io-index)" = "2260952cc0393ca6f183e1a91a035c65c85ddb02505f3d53e5a775eb05946f44"
"checksum glob 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)" = "8be18de09a56b60ed0edf84bc9df007e30040691af7acd1c41874faac5895bfb"
"checksum goblin 0.0.17 (registry+https://github.com/rust-lang/crates.io-index)" = "5911d7df7b8f65ab676c5327b50acea29d3c6a1a4ad05e444cf5dce321b26db2"
"checksum h2 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)" = "a27e7ed946e8335bdf9a191bc1b9b14a03ba822d013d2f58437f4fabcbd7fc2c"
@ -3182,10 +3226,9 @@ dependencies = [
"checksum itoa 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)" = "c069bbec61e1ca5a596166e55dfe4773ff745c3d16b700013bcaff9a6df2c682"
"checksum kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "7507624b29483431c0ba2d82aece8ca6cdba9382bff4ddd0f7490560c056098d"
"checksum khronos_api 2.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "037ab472c33f67b5fbd3e9163a2645319e5356fcd355efa6d4eb7fff4bbcb554"
"checksum lalrpop 0.15.1 (registry+https://github.com/rust-lang/crates.io-index)" = "88035943c3cfbb897a499a556212b2b053574f32b4238b71b61625bc470f80aa"
"checksum lalrpop-intern 0.15.1 (registry+https://github.com/rust-lang/crates.io-index)" = "cc4fd87be4a815fd373e02773983940f0d75fb26fde8c098e9e45f7af03154c0"
"checksum lalrpop-snap 0.15.1 (registry+https://github.com/rust-lang/crates.io-index)" = "5f244285324e4e33d486910b66fd3b7cb37e2072c5bf63319f506fe99ed72650"
"checksum lalrpop-util 0.15.1 (registry+https://github.com/rust-lang/crates.io-index)" = "de408fd50dea8ad7a77107144983a25c7fdabf5f8faf707a6e020d68874ed06c"
"checksum lalrpop 0.16.0 (registry+https://github.com/rust-lang/crates.io-index)" = "9f7014afd5642680074fd5dcc624d544f9eabfa281cba2c3ac56c3db6d21ad1b"
"checksum lalrpop-snap 0.16.0 (registry+https://github.com/rust-lang/crates.io-index)" = "0b85aa455529344133d7ecaaac04c01ed87f459deeaa0fe5422885e2095d8cdc"
"checksum lalrpop-util 0.16.0 (registry+https://github.com/rust-lang/crates.io-index)" = "2400aeebcd11259370d038c24821b93218dd2f33a53f53e9c8fcccca70be6696"
"checksum lazy_static 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "e6412c5e2ad9584b0b8e979393122026cdd6d2a80b933f890dcd694ddbe73739"
"checksum lazycell 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ce12306c4739d86ee97c23139f3a34ddf0387bbf181bc7929d287025a8c3ef6b"
"checksum lazycell 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)" = "a6f08839bc70ef4a3fe1d566d5350f519c5912ea86be0df1740a7d247c7fc0ef"
@ -3231,7 +3274,7 @@ dependencies = [
"checksum parking_lot_core 0.2.14 (registry+https://github.com/rust-lang/crates.io-index)" = "4db1a8ccf734a7bce794cc19b3df06ed87ab2f3907036b693c68f56b4d4537fa"
"checksum peeking_take_while 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "19b17cddbe7ec3f8bc800887bab5e717348c95ea2ca0b1bf0837fb964dc67099"
"checksum percent-encoding 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "de154f638187706bde41d9b4738748933d64e6b37bdbffc0b47a97d16a6ae356"
"checksum petgraph 0.4.11 (registry+https://github.com/rust-lang/crates.io-index)" = "7a7e5234c228fbfa874c86a77f685886127f82e0aef602ad1d48333fcac6ad61"
"checksum petgraph 0.4.13 (registry+https://github.com/rust-lang/crates.io-index)" = "9c3659d1ee90221741f65dd128d9998311b0e40c5d3c23a62445938214abce4f"
"checksum phf 0.7.21 (registry+https://github.com/rust-lang/crates.io-index)" = "cb325642290f28ee14d8c6201159949a872f220c62af6e110a56ea914fbe42fc"
"checksum phf_codegen 0.7.21 (registry+https://github.com/rust-lang/crates.io-index)" = "d62594c0bb54c464f633175d502038177e90309daf2e0158be42ed5f023ce88f"
"checksum phf_generator 0.7.21 (registry+https://github.com/rust-lang/crates.io-index)" = "6b07ffcc532ccc85e3afc45865469bf5d9e4ef5bfcf9622e3cfe80c2d275ec03"
@ -3279,6 +3322,7 @@ dependencies = [
"checksum serde_bytes 0.10.4 (registry+https://github.com/rust-lang/crates.io-index)" = "adb6e51a6b3696b301bc221d785f898b4457c619b51d7ce195a6d20baecb37b3"
"checksum serde_derive 1.0.66 (git+https://github.com/servo/serde?branch=deserialize_from_enums8)" = "<none>"
"checksum serde_json 1.0.26 (registry+https://github.com/rust-lang/crates.io-index)" = "44dd2cfde475037451fa99b7e5df77aa3cfd1536575fa8e7a538ab36dcde49ae"
"checksum sha2 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)" = "9eb6be24e4c23a84d7184280d2722f7f2731fcdd4a9d886efbfe4413e4847ea0"
"checksum simd 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "0048b17eb9577ac545c61d85c3559b41dfb4cbea41c9bd9ca6a4f73ff05fda84"
"checksum siphasher 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "2ffc669b726f2bc9a3bcff66e5e23b56ba6bf70e22a34c3d7b6d0b3450b65b84"
"checksum slab 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "17b4fcaed89ab08ef143da37bc52adbcc04d4a69014f4c1208d6b51f0c47bc23"
@ -3323,6 +3367,7 @@ dependencies = [
"checksum tokio-uds 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)" = "65ae5d255ce739e8537221ed2942e0445f4b3b813daebac1c0050ddaaa3587f9"
"checksum toml 0.4.5 (registry+https://github.com/rust-lang/crates.io-index)" = "a7540f4ffc193e0d3c94121edb19b055670d369f77d5804db11ae053a45b6e7e"
"checksum try-lock 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "e604eb7b43c06650e854be16a2a03155743d3752dd1c943f6829e26b7a36e382"
"checksum typenum 1.10.0 (registry+https://github.com/rust-lang/crates.io-index)" = "612d636f949607bdf9b123b4a6f6d966dedf3ff669f7f045890d3a4a73948169"
"checksum ucd-util 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "fd2be2d6639d0f8fe6cdda291ad456e23629558d466e2789d2c3e9892bda285d"
"checksum uluru 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "d2606e9192f308ddc4f0b3c5d1bf3400e28a70fff956e9d9f46d23b094746d9f"
"checksum unicode-bidi 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)" = "49f2bd0c6468a8230e1db229cff8029217cf623c767ea5d60bfbd42729ea54d5"
@ -3341,7 +3386,7 @@ dependencies = [
"checksum walkdir 2.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "63636bd0eb3d00ccb8b9036381b526efac53caf112b7783b730ab3f8e44da369"
"checksum want 0.0.6 (registry+https://github.com/rust-lang/crates.io-index)" = "797464475f30ddb8830cc529aaaae648d581f99e2036a928877dfde027ddf6b3"
"checksum wasmparser 0.17.2 (registry+https://github.com/rust-lang/crates.io-index)" = "fed18a63a6796175be2254fccca1da4e8b8fec0abca37ad155aea345feb50798"
"checksum webidl 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)" = "dc14e4b71f94b5bb4c6d696e3b3be4d2e9ee6750a60870ecae09ff7138a131a7"
"checksum webidl 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)" = "d0f807f7488d680893f7188aa09d7672a3a0a8461975a098a2edf0a52e3fee29"
"checksum which 1.0.3 (registry+https://github.com/rust-lang/crates.io-index)" = "4be6cfa54dab45266e98b5d7be2f8ce959ddd49abd141a05d52dce4b07f803bb"
"checksum winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)" = "167dc9d6949a9b857f3451275e911c3f44255842c1f7a76f33c55103a909087a"
"checksum winapi 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)" = "92c1eb33641e276cfa214a0522acad57be5c56b10cb348b3c5117db75f3ac4b0"

Просмотреть файл

@ -95,6 +95,7 @@ skip-if = (verify && (os == 'linux' || os == 'mac'))
[browser_ext_devtools_page.js]
[browser_ext_devtools_panel.js]
[browser_ext_devtools_panels_elements.js]
skip-if = true # Bug 1393760
[browser_ext_devtools_panels_elements_sidebar.js]
support-files =
../../../../../devtools/client/inspector/extensions/test/head_devtools_inspector_sidebar.js

Просмотреть файл

@ -21,7 +21,7 @@ const FILE1 = createFilePath("346337_test1.file");
const FILE2 = createFilePath("346337_test2.file");
const FIELDS = {
"//input[@name='input']": Date.now().toString(16),
"//input[@name='input']": Date.now().toString(),
"//input[@name='spaced 1']": Math.random().toString(),
"//input[3]": "three",
"//input[@type='checkbox']": true,

Просмотреть файл

@ -82,6 +82,7 @@ skip-if = os == "mac" # Full keyboard navigation on OSX only works if Full Keybo
[browser_inspector_highlighter-cssshape_02.js]
[browser_inspector_highlighter-cssshape_03.js]
[browser_inspector_highlighter-cssshape_04.js]
skip-if = (os == 'win' && (debug||asan)) || (os == 'linux' && bits == 64 && !debug) # Bug 1453214
[browser_inspector_highlighter-cssshape_05.js]
[browser_inspector_highlighter-cssshape_06-scale.js]
[browser_inspector_highlighter-cssshape_06-translate.js]

Просмотреть файл

@ -8851,6 +8851,7 @@ exports.CSS_PROPERTIES = {
"supports": [],
"values": [
"capitalize",
"full-size-kana",
"full-width",
"inherit",
"initial",

Просмотреть файл

@ -27,7 +27,7 @@ byteorder = "1.0"
cfg-if = "0.1.2"
euclid = "0.19"
fxhash = "0.2.1"
gleam = "0.6.2"
gleam = "0.6.3"
image = { optional = true, version = "0.19" }
lazy_static = "1"
log = "0.4"

Просмотреть файл

@ -123,7 +123,7 @@ impl DebugRenderer {
let line_vao = device.create_vao(&DESC_COLOR);
let tri_vao = device.create_vao(&DESC_COLOR);
let font_texture = device.create_texture::<u8>(
let font_texture = device.create_texture(
TextureTarget::Array,
ImageFormat::R8,
debug_font_data::BMP_WIDTH,
@ -131,7 +131,10 @@ impl DebugRenderer {
TextureFilter::Linear,
None,
1,
Some(&debug_font_data::FONT_BITMAP),
);
device.upload_texture_immediate(
&font_texture,
&debug_font_data::FONT_BITMAP
);
Ok(DebugRenderer {

Просмотреть файл

@ -14,6 +14,7 @@ use internal_types::{FastHashMap, RenderTargetInfo};
use log::Level;
use smallvec::SmallVec;
use std::cell::RefCell;
use std::cmp;
use std::fs::File;
use std::io::Read;
use std::marker::PhantomData;
@ -45,12 +46,6 @@ impl Add<usize> for FrameId {
}
}
const GL_FORMAT_RGBA: gl::GLuint = gl::RGBA;
const GL_FORMAT_BGRA_GL: gl::GLuint = gl::BGRA;
const GL_FORMAT_BGRA_GLES: gl::GLuint = gl::BGRA_EXT;
const SHADER_VERSION_GL: &str = "#version 150\n";
const SHADER_VERSION_GLES: &str = "#version 300 es\n";
@ -737,7 +732,8 @@ pub struct Device {
#[cfg(feature = "debug_renderer")]
capabilities: Capabilities,
bgra_format: gl::GLuint,
bgra_format_internal: gl::GLuint,
bgra_format_external: gl::GLuint,
// debug
inside_frame: bool,
@ -753,6 +749,12 @@ pub struct Device {
// frames and GPU frames.
frame_id: FrameId,
/// Whether glTexStorage* is supported. We prefer this over glTexImage*
/// because it guarantees that mipmaps won't be generated (which they
/// otherwise are on some drivers, particularly ANGLE), If it's not
/// supported, we fall back to glTexImage*.
supports_texture_storage: bool,
// GL extensions
extensions: Vec<String>,
}
@ -781,14 +783,33 @@ impl Device {
extensions.push(gl.get_string_i(gl::EXTENSIONS, i));
}
// Our common-case image data in Firefox is BGRA, so we make an effort
// to use BGRA as the internal texture storage format to avoid the need
// to swizzle during upload. Currently we only do this on GLES (and thus
// for Windows, via ANGLE).
//
// On Mac, Apple docs [1] claim that BGRA is a more efficient internal
// format, so we may want to consider doing that at some point, since it
// would give us both a more efficient internal format and avoid the
// swizzling in the common case.
//
// We also need our internal format types to be sized, since glTexStorage*
// will reject non-sized internal format types.
//
// [1] https://developer.apple.com/library/archive/documentation/
// GraphicsImaging/Conceptual/OpenGL-MacProgGuide/opengl_texturedata/
// opengl_texturedata.html#//apple_ref/doc/uid/TP40001987-CH407-SW22
let supports_bgra = supports_extension(&extensions, "GL_EXT_texture_format_BGRA8888");
let bgra_format = match gl.get_type() {
gl::GlType::Gl => GL_FORMAT_BGRA_GL,
gl::GlType::Gles => if supports_bgra {
GL_FORMAT_BGRA_GLES
} else {
GL_FORMAT_RGBA
}
let (bgra_format_internal, bgra_format_external) = if supports_bgra {
assert_eq!(gl.get_type(), gl::GlType::Gles, "gleam only detects bgra on gles");
(gl::BGRA8_EXT, gl::BGRA_EXT)
} else {
(gl::RGBA8, gl::BGRA)
};
let supports_texture_storage = match gl.get_type() {
gl::GlType::Gl => supports_extension(&extensions, "GL_ARB_texture_storage"),
gl::GlType::Gles => true,
};
Device {
@ -805,7 +826,8 @@ impl Device {
supports_multisampling: false, //TODO
},
bgra_format,
bgra_format_internal,
bgra_format_external,
bound_textures: [0; 16],
bound_program: 0,
@ -821,6 +843,7 @@ impl Device {
cached_programs,
frame_id: FrameId(0),
extensions,
supports_texture_storage,
}
}
@ -1182,7 +1205,7 @@ impl Device {
}
}
pub fn create_texture<T: Texel>(
pub fn create_texture(
&mut self,
target: TextureTarget,
format: ImageFormat,
@ -1191,7 +1214,6 @@ impl Device {
filter: TextureFilter,
render_target: Option<RenderTargetInfo>,
layer_count: i32,
pixels: Option<&[T]>,
) -> Texture {
debug_assert!(self.inside_frame);
@ -1220,36 +1242,69 @@ impl Device {
// Allocate storage.
let desc = self.gl_describe_format(texture.format);
match texture.target {
gl::TEXTURE_2D_ARRAY => {
let is_array = match texture.target {
gl::TEXTURE_2D_ARRAY => true,
gl::TEXTURE_2D | gl::TEXTURE_RECTANGLE | gl::TEXTURE_EXTERNAL_OES => false,
_ => panic!("BUG: Unexpected texture target!"),
};
assert!(is_array || texture.layer_count == 1);
// Firefox doesn't use mipmaps, but Servo uses them for standalone image
// textures images larger than 512 pixels. This is the only case where
// we set the filter to trilinear.
let mipmap_levels = if texture.filter == TextureFilter::Trilinear {
let max_dimension = cmp::max(width, height);
((max_dimension) as f64).log2() as gl::GLint + 1
} else {
1
};
// Use glTexStorage where available, since it avoids allocating
// unnecessary mipmap storage and generally improves performance with
// stronger invariants.
match (self.supports_texture_storage, is_array) {
(true, true) =>
self.gl.tex_storage_3d(
gl::TEXTURE_2D_ARRAY,
mipmap_levels,
desc.internal,
texture.width as gl::GLint,
texture.height as gl::GLint,
texture.layer_count,
),
(true, false) =>
self.gl.tex_storage_2d(
texture.target,
mipmap_levels,
desc.internal,
texture.width as gl::GLint,
texture.height as gl::GLint,
),
(false, true) =>
self.gl.tex_image_3d(
gl::TEXTURE_2D_ARRAY,
0,
desc.internal,
texture.width as _,
texture.height as _,
desc.internal as gl::GLint,
texture.width as gl::GLint,
texture.height as gl::GLint,
texture.layer_count,
0,
desc.external,
desc.pixel_type,
pixels.map(texels_to_u8_slice),
)
}
gl::TEXTURE_2D | gl::TEXTURE_RECTANGLE | gl::TEXTURE_EXTERNAL_OES => {
assert_eq!(texture.layer_count, 1);
None,
),
(false, false) =>
self.gl.tex_image_2d(
texture.target,
0,
desc.internal,
texture.width as _,
texture.height as _,
desc.internal as gl::GLint,
texture.width as gl::GLint,
texture.height as gl::GLint,
0,
desc.external,
desc.pixel_type,
pixels.map(texels_to_u8_slice),
)
},
_ => panic!("BUG: Unexpected texture target!"),
None,
),
}
// Set up FBOs, if required.
@ -1302,6 +1357,26 @@ impl Device {
self.bind_read_target(None);
}
/// Notifies the device that the contents of a render target are no longer
/// needed.
pub fn invalidate_render_target(&mut self, texture: &Texture) {
let attachments: &[gl::GLenum] = if texture.has_depth() {
&[gl::COLOR_ATTACHMENT0, gl::DEPTH_ATTACHMENT]
} else {
&[gl::COLOR_ATTACHMENT0]
};
let original_bound_fbo = self.bound_draw_fbo;
for fbo_id in texture.fbo_ids.iter() {
// Note: The invalidate extension may not be supported, in which
// case this is a no-op. That's ok though, because it's just a
// hint.
self.bind_external_draw_target(*fbo_id);
self.gl.invalidate_framebuffer(gl::FRAMEBUFFER, attachments);
}
self.bind_external_draw_target(original_bound_fbo);
}
/// Notifies the device that a render target is about to be reused.
///
/// This method adds or removes a depth target as necessary.
@ -1579,7 +1654,7 @@ impl Device {
TextureUploader {
target: UploadTarget {
gl: &*self.gl,
bgra_format: self.bgra_format,
bgra_format: self.bgra_format_external,
texture,
},
buffer,
@ -1587,6 +1662,45 @@ impl Device {
}
}
/// Performs an immediate (non-PBO) texture upload.
pub fn upload_texture_immediate<T: Texel>(
&mut self,
texture: &Texture,
pixels: &[T]
) {
self.bind_texture(DEFAULT_TEXTURE, texture);
let desc = self.gl_describe_format(texture.format);
match texture.target {
gl::TEXTURE_2D | gl::TEXTURE_RECTANGLE | gl::TEXTURE_EXTERNAL_OES =>
self.gl.tex_sub_image_2d(
texture.target,
0,
0,
0,
texture.width as gl::GLint,
texture.height as gl::GLint,
desc.external,
desc.pixel_type,
texels_to_u8_slice(pixels),
),
gl::TEXTURE_2D_ARRAY =>
self.gl.tex_sub_image_3d(
texture.target,
0,
0,
0,
0,
texture.width as gl::GLint,
texture.height as gl::GLint,
texture.layer_count as gl::GLint,
desc.external,
desc.pixel_type,
texels_to_u8_slice(pixels),
),
_ => panic!("BUG: Unexpected texture target!"),
}
}
#[cfg(any(feature = "debug_renderer", feature = "capture"))]
pub fn read_pixels(&mut self, img_desc: &ImageDescriptor) -> Vec<u8> {
let desc = self.gl_describe_format(img_desc.format);
@ -1613,7 +1727,7 @@ impl Device {
ReadPixelsFormat::Rgba8 => {
(4, FormatDesc {
external: gl::RGBA,
internal: gl::RGBA8 as _,
internal: gl::RGBA8,
pixel_type: gl::UNSIGNED_BYTE,
})
}
@ -2170,38 +2284,34 @@ impl Device {
fn gl_describe_format(&self, format: ImageFormat) -> FormatDesc {
match format {
ImageFormat::R8 => FormatDesc {
internal: gl::R8 as _,
internal: gl::R8,
external: gl::RED,
pixel_type: gl::UNSIGNED_BYTE,
},
ImageFormat::R16 => FormatDesc {
internal: gl::R16 as _,
internal: gl::R16,
external: gl::RED,
pixel_type: gl::UNSIGNED_SHORT,
},
ImageFormat::BGRA8 => {
let external = self.bgra_format;
FormatDesc {
internal: match self.gl.get_type() {
gl::GlType::Gl => gl::RGBA as _,
gl::GlType::Gles => external as _,
},
external,
internal: self.bgra_format_internal,
external: self.bgra_format_external,
pixel_type: gl::UNSIGNED_BYTE,
}
},
ImageFormat::RGBAF32 => FormatDesc {
internal: gl::RGBA32F as _,
internal: gl::RGBA32F,
external: gl::RGBA,
pixel_type: gl::FLOAT,
},
ImageFormat::RGBAI32 => FormatDesc {
internal: gl::RGBA32I as _,
internal: gl::RGBA32I,
external: gl::RGBA_INTEGER,
pixel_type: gl::INT,
},
ImageFormat::RG8 => FormatDesc {
internal: gl::RG8 as _,
internal: gl::RG8,
external: gl::RG,
pixel_type: gl::UNSIGNED_BYTE,
},
@ -2210,7 +2320,7 @@ impl Device {
}
struct FormatDesc {
internal: gl::GLint,
internal: gl::GLenum,
external: gl::GLuint,
pixel_type: gl::GLuint,
}

Просмотреть файл

@ -62,8 +62,8 @@ impl GpuGlyphRenderer {
TextureFilter::Linear,
None,
1,
Some(area_lut_pixels)
);
device.upload_texture_immediate(&area_lut_texture, area_lut_pixels);
let vector_stencil_vao =
device.create_vao_with_new_instances(&renderer::desc::VECTOR_STENCIL, prim_vao);
@ -110,7 +110,7 @@ impl Renderer {
let _timer = self.gpu_profile.start_timer(GPU_TAG_GLYPH_STENCIL);
let texture = self.device.create_texture::<f32>(
let texture = self.device.create_texture(
TextureTarget::Default,
ImageFormat::RGBAF32,
target_size.width,
@ -120,7 +120,6 @@ impl Renderer {
has_depth: false,
}),
1,
None
);
// Initialize temporary framebuffer.
@ -188,8 +187,8 @@ impl Renderer {
TextureFilter::Nearest,
None,
1,
Some(&path_info_texels)
);
self.device.upload_texture_immediate(&path_info_texture, &path_info_texels);
self.gpu_glyph_renderer.vector_stencil.bind(&mut self.device,
projection,

Просмотреть файл

@ -813,7 +813,7 @@ struct TextureResolver {
impl TextureResolver {
fn new(device: &mut Device) -> TextureResolver {
let dummy_cache_texture = device
.create_texture::<u8>(
.create_texture(
TextureTarget::Array,
ImageFormat::BGRA8,
1,
@ -821,7 +821,6 @@ impl TextureResolver {
TextureFilter::Linear,
None,
1,
None,
);
TextureResolver {
@ -855,9 +854,11 @@ impl TextureResolver {
fn end_frame(&mut self, device: &mut Device, frame_id: FrameId) {
// return the cached targets to the pool
self.end_pass(None, None);
self.end_pass(device, None, None);
// return the saved targets as well
self.render_target_pool.extend(self.saved_targets.drain(..));
while let Some(target) = self.saved_targets.pop() {
self.return_to_pool(device, target);
}
// GC the render target pool.
//
@ -871,6 +872,12 @@ impl TextureResolver {
self.retain_targets(device, |texture| texture.used_recently(frame_id, 30));
}
/// Transfers ownership of a render target back to the pool.
fn return_to_pool(&mut self, device: &mut Device, target: Texture) {
device.invalidate_render_target(&target);
self.render_target_pool.push(target);
}
/// Drops all targets from the render target pool that do not satisfy the predicate.
pub fn retain_targets<F: Fn(&Texture) -> bool>(&mut self, device: &mut Device, f: F) {
// We can't just use retain() because `Texture` requires manual cleanup.
@ -887,6 +894,7 @@ impl TextureResolver {
fn end_pass(
&mut self,
device: &mut Device,
a8_texture: Option<ActiveTexture>,
rgba8_texture: Option<ActiveTexture>,
) {
@ -899,7 +907,7 @@ impl TextureResolver {
assert_eq!(self.saved_targets.len(), index.0);
self.saved_targets.push(at.texture);
} else {
self.render_target_pool.push(at.texture);
self.return_to_pool(device, at.texture);
}
}
if let Some(at) = self.prev_pass_alpha.take() {
@ -907,7 +915,7 @@ impl TextureResolver {
assert_eq!(self.saved_targets.len(), index.0);
self.saved_targets.push(at.texture);
} else {
self.render_target_pool.push(at.texture);
self.return_to_pool(device, at.texture);
}
}
@ -1103,7 +1111,7 @@ impl GpuCacheTexture {
}
// Create the new texture.
let mut texture = device.create_texture::<u8>(
let mut texture = device.create_texture(
TextureTarget::Default,
ImageFormat::RGBAF32,
new_size.width,
@ -1111,7 +1119,6 @@ impl GpuCacheTexture {
TextureFilter::Nearest,
rt_info,
1,
None,
);
// Blit the contents of the previous texture, if applicable.
@ -1400,7 +1407,7 @@ impl VertexDataTexture {
}
let new_height = (needed_height + 127) & !127;
let texture = device.create_texture::<u8>(
let texture = device.create_texture(
TextureTarget::Default,
self.format,
width,
@ -1408,7 +1415,6 @@ impl VertexDataTexture {
TextureFilter::Nearest,
None,
1,
None,
);
self.texture = Some(texture);
}
@ -1756,7 +1762,7 @@ impl Renderer {
21,
];
let mut texture = device.create_texture::<u8>(
let mut texture = device.create_texture(
TextureTarget::Default,
ImageFormat::R8,
8,
@ -1764,8 +1770,8 @@ impl Renderer {
TextureFilter::Nearest,
None,
1,
Some(&dither_matrix),
);
device.upload_texture_immediate(&texture, &dither_matrix);
Some(texture)
} else {
@ -2774,7 +2780,7 @@ impl Renderer {
//
// Ensure no PBO is bound when creating the texture storage,
// or GL will attempt to read data from there.
let texture = self.device.create_texture::<u8>(
let texture = self.device.create_texture(
TextureTarget::Array,
format,
width,
@ -2782,7 +2788,6 @@ impl Renderer {
filter,
render_target,
layer_count,
None,
);
self.texture_resolver.texture_cache_map.insert(update.id, texture);
}
@ -3815,7 +3820,7 @@ impl Renderer {
t
} else {
counters.targets_created.inc();
let mut t = self.device.create_texture::<u8>(
self.device.create_texture(
TextureTarget::Array,
list.format,
list.max_size.width,
@ -3823,9 +3828,7 @@ impl Renderer {
TextureFilter::Linear,
Some(rt_info),
list.targets.len() as _,
None,
);
t
)
};
list.check_ready(&texture);
@ -4016,6 +4019,7 @@ impl Renderer {
};
self.texture_resolver.end_pass(
&mut self.device,
cur_alpha,
cur_color,
);
@ -4763,8 +4767,8 @@ impl Renderer {
plain.filter,
plain.render_target,
plain.size.2,
Some(texels.as_slice()),
);
device.upload_texture_immediate(&texture, &texels);
(texture, texels)
}

Просмотреть файл

@ -9,7 +9,7 @@ rayon = "1"
thread_profiler = "0.1.1"
euclid = { version = "0.19", features = ["serde"] }
app_units = "0.7"
gleam = "0.6.2"
gleam = "0.6.3"
log = "0.4"
nsstring = { path = "../../servo/support/gecko/nsstring" }
bincode = "1.0"

Просмотреть файл

@ -1 +1 @@
7aa1d42ad41097b68e8026e3384127242601c95b
5adc86c19cbef6697975ea078fa0d10635e5d660

Просмотреть файл

@ -41,14 +41,16 @@ public:
void BlockUntilDecodedAndFinishObserving()
{
// Use GetFrame() to block until our image finishes decoding.
RefPtr<SourceSurface> surface =
mImage->GetFrame(imgIContainer::FRAME_CURRENT,
imgIContainer::FLAG_SYNC_DECODE);
// Use RequestDecodeForSize() to block until our image finishes decoding.
// The size is ignored because we don't pass the FLAG_HIGH_QUALITY_SCALING
// flag.
mImage->RequestDecodeForSize(gfx::IntSize(0, 0),
imgIContainer::FLAG_SYNC_DECODE);
// GetFrame() should've sent synchronous notifications that would have
// caused us to call FinishObserving() (and null out mImage) already. If for
// some reason it didn't, we should do so here.
// RequestDecodeForSize() should've sent synchronous notifications that
// would have caused us to call FinishObserving() (and null out mImage)
// already. If for some reason it didn't, we should do so here.
if (mImage) {
FinishObserving();
}

Просмотреть файл

@ -2087,7 +2087,7 @@ BinASTParser<Tok>::parseInterfaceAssertedBlockScope(const size_t start, const Bi
/*
interface AssertedBoundName : Node {
IdentifierName name;
[IdentifierName] string name;
bool isCaptured;
}
*/
@ -2124,7 +2124,7 @@ BinASTParser<Tok>::parseInterfaceAssertedBoundName(const size_t start, const Bin
#endif // defined(DEBUG)
RootedAtom name(cx_);
MOZ_TRY_VAR(name, tokenizer_->readAtom());
MOZ_TRY_VAR(name, tokenizer_->readIdentifierName());
BINJS_MOZ_TRY_DECL(isCaptured, tokenizer_->readBool());
ParseContext::Scope* scope;
@ -2192,7 +2192,7 @@ BinASTParser<Tok>::parseInterfaceAssertedBoundNamesScope(const size_t start, con
/*
interface AssertedDeclaredName : Node {
IdentifierName name;
[IdentifierName] string name;
AssertedDeclaredKind kind;
bool isCaptured;
}
@ -2230,7 +2230,7 @@ BinASTParser<Tok>::parseInterfaceAssertedDeclaredName(const size_t start, const
#endif // defined(DEBUG)
RootedAtom name(cx_);
MOZ_TRY_VAR(name, tokenizer_->readAtom());
MOZ_TRY_VAR(name, tokenizer_->readIdentifierName());
BINJS_MOZ_TRY_DECL(kind_, parseAssertedDeclaredKind());
@ -2257,7 +2257,7 @@ BinASTParser<Tok>::parseInterfaceAssertedParameterName(const size_t start, const
#endif // defined(DEBUG)
RootedAtom name(cx_);
MOZ_TRY_VAR(name, tokenizer_->readAtom());
MOZ_TRY_VAR(name, tokenizer_->readIdentifierName());
BINJS_MOZ_TRY_DECL(isCaptured, tokenizer_->readBool());
ParseContext::Scope* scope;
@ -2344,7 +2344,7 @@ BinASTParser<Tok>::parseInterfaceAssertedPositionalParameterName(const size_t st
BINJS_MOZ_TRY_DECL(index, tokenizer_->readUnsignedLong());
RootedAtom name(cx_);
MOZ_TRY_VAR(name, tokenizer_->readAtom());
MOZ_TRY_VAR(name, tokenizer_->readIdentifierName());
// FIXME: The following checks should be performed inside
// checkPositionalParameterIndices to match the spec's order
// (bug 1490976).
@ -2378,7 +2378,7 @@ BinASTParser<Tok>::parseInterfaceAssertedRestParameterName(const size_t start, c
#endif // defined(DEBUG)
RootedAtom name(cx_);
MOZ_TRY_VAR(name, tokenizer_->readAtom());
MOZ_TRY_VAR(name, tokenizer_->readIdentifierName());
BINJS_MOZ_TRY_DECL(isCaptured, tokenizer_->readBool());
ParseContext::Scope* scope;
@ -2519,7 +2519,7 @@ BinASTParser<Tok>::parseInterfaceAssignmentExpression(const size_t start, const
/*
interface AssignmentTargetIdentifier : Node {
Identifier name;
[IdentifierName] string name;
}
*/
template<typename Tok> JS::Result<ParseNode*>
@ -2552,7 +2552,7 @@ BinASTParser<Tok>::parseInterfaceAssignmentTargetIdentifier(const size_t start,
#endif // defined(DEBUG)
RootedAtom name(cx_);
MOZ_TRY_VAR(name, tokenizer_->readAtom());
MOZ_TRY_VAR(name, tokenizer_->readIdentifierName());
if (!IsIdentifier(name)) {
return raiseError("Invalid identifier");
@ -2702,7 +2702,7 @@ BinASTParser<Tok>::parseInterfaceBinaryExpression(const size_t start, const BinK
/*
interface BindingIdentifier : Node {
Identifier name;
[IdentifierName] string name;
}
*/
template<typename Tok> JS::Result<ParseNode*>
@ -2735,7 +2735,7 @@ BinASTParser<Tok>::parseInterfaceBindingIdentifier(const size_t start, const Bin
#endif // defined(DEBUG)
RootedAtom name(cx_);
MOZ_TRY_VAR(name, tokenizer_->readAtom());
MOZ_TRY_VAR(name, tokenizer_->readIdentifierName());
if (!IsIdentifier(name)) {
return raiseError("Invalid identifier");
@ -3541,8 +3541,8 @@ BinASTParser<Tok>::parseInterfaceExportFrom(const size_t start, const BinKind ki
/*
interface ExportFromSpecifier : Node {
IdentifierName name;
IdentifierName? exportedName;
[IdentifierName] string name;
[IdentifierName] string? exportedName;
}
*/
template<typename Tok> JS::Result<ParseNode*>
@ -3573,7 +3573,7 @@ BinASTParser<Tok>::parseInterfaceExportFromSpecifier(const size_t start, const B
/*
interface ExportLocalSpecifier : Node {
IdentifierExpression name;
IdentifierName? exportedName;
[PropertyKey] string? exportedName;
}
*/
template<typename Tok> JS::Result<ParseNode*>
@ -3980,7 +3980,7 @@ BinASTParser<Tok>::parseInterfaceGetterContents(const size_t start, const BinKin
/*
interface IdentifierExpression : Node {
Identifier name;
[IdentifierName] string name;
}
*/
template<typename Tok> JS::Result<ParseNode*>
@ -4013,7 +4013,7 @@ BinASTParser<Tok>::parseInterfaceIdentifierExpression(const size_t start, const
#endif // defined(DEBUG)
RootedAtom name(cx_);
MOZ_TRY_VAR(name, tokenizer_->readAtom());
MOZ_TRY_VAR(name, tokenizer_->readIdentifierName());
if (!IsIdentifier(name)) {
return raiseError("Invalid identifier");
@ -4059,7 +4059,7 @@ BinASTParser<Tok>::parseInterfaceImportNamespace(const size_t start, const BinKi
/*
interface ImportSpecifier : Node {
IdentifierName? name;
[PropertyKey] string? name;
BindingIdentifier binding;
}
*/
@ -4600,7 +4600,7 @@ BinASTParser<Tok>::parseInterfaceStaticMemberAssignmentTarget(const size_t start
RootedAtom property(cx_);
{
nameStart = tokenizer_->offset();
MOZ_TRY_VAR(property, tokenizer_->readAtom());
MOZ_TRY_VAR(property, tokenizer_->readPropertyKey());
}
@ -4625,7 +4625,7 @@ BinASTParser<Tok>::parseInterfaceStaticMemberExpression(const size_t start, cons
RootedAtom property(cx_);
{
nameStart = tokenizer_->offset();
MOZ_TRY_VAR(property, tokenizer_->readAtom());
MOZ_TRY_VAR(property, tokenizer_->readPropertyKey());
}

Просмотреть файл

@ -4,6 +4,7 @@ typedef FrozenArray<(SpreadElement or Expression)> Arguments;
typedef DOMString string;
typedef string Identifier;
typedef string IdentifierName;
typedef string PropertyKey;
typedef string Label;
enum VariableDeclarationKind {

Просмотреть файл

@ -218,11 +218,13 @@ namespace frontend {
F(OptionalExpression, "OptionalExpression") \
F(OptionalIdentifierName, "OptionalIdentifierName") \
F(OptionalLabel, "OptionalLabel") \
F(OptionalPropertyKey, "OptionalPropertyKey") \
F(OptionalSpreadElementOrExpression, "OptionalSpreadElementOrExpression") \
F(OptionalStatement, "OptionalStatement") \
F(OptionalVariableDeclarationOrExpression, "OptionalVariableDeclarationOrExpression") \
F(Parameter, "Parameter") \
F(Program, "Program") \
F(PropertyKey, "PropertyKey") \
F(PropertyName, "PropertyName") \
F(ReturnStatement, "ReturnStatement") \
F(Script, "Script") \
@ -267,7 +269,7 @@ enum class BinKind {
};
// The number of distinct values of BinKind.
const size_t BINKIND_LIMIT = 198;
const size_t BINKIND_LIMIT = 200;

Просмотреть файл

@ -286,6 +286,26 @@ BinTokenReaderMultipart::readAtom()
return maybe;
}
JS::Result<JSAtom*>
BinTokenReaderMultipart::readMaybeIdentifierName() {
return readMaybeAtom();
}
JS::Result<JSAtom*>
BinTokenReaderMultipart::readIdentifierName() {
return readAtom();
}
JS::Result<JSAtom*>
BinTokenReaderMultipart::readMaybePropertyKey() {
return readMaybeAtom();
}
JS::Result<JSAtom*>
BinTokenReaderMultipart::readPropertyKey() {
return readAtom();
}
JS::Result<Ok>
BinTokenReaderMultipart::readChars(Chars& out)
{

Просмотреть файл

@ -97,6 +97,17 @@ class MOZ_STACK_CLASS BinTokenReaderMultipart: public BinTokenReaderBase
MOZ_MUST_USE JS::Result<JSAtom*> readMaybeAtom();
MOZ_MUST_USE JS::Result<JSAtom*> readAtom();
/**
* Read a single IdentifierName value.
*/
MOZ_MUST_USE JS::Result<JSAtom*> readMaybeIdentifierName();
MOZ_MUST_USE JS::Result<JSAtom*> readIdentifierName();
/**
* Read a single PropertyKey value.
*/
MOZ_MUST_USE JS::Result<JSAtom*> readMaybePropertyKey();
MOZ_MUST_USE JS::Result<JSAtom*> readPropertyKey();
/**
* Read a single `string | null` value.

Просмотреть файл

@ -140,6 +140,26 @@ BinTokenReaderTester::readMaybeAtom()
return result.get();
}
JS::Result<JSAtom*>
BinTokenReaderTester::readMaybeIdentifierName() {
return readMaybeAtom();
}
JS::Result<JSAtom*>
BinTokenReaderTester::readIdentifierName() {
return readAtom();
}
JS::Result<JSAtom*>
BinTokenReaderTester::readMaybePropertyKey() {
return readMaybeAtom();
}
JS::Result<JSAtom*>
BinTokenReaderTester::readPropertyKey() {
return readAtom();
}
// Nullable strings:
// - "<string>" (not counted in byte length)

Просмотреть файл

@ -127,6 +127,18 @@ class MOZ_STACK_CLASS BinTokenReaderTester: public BinTokenReaderBase
*/
MOZ_MUST_USE JS::Result<JSAtom*> readAtom();
/**
* Read a single IdentifierName value.
*/
MOZ_MUST_USE JS::Result<JSAtom*> readMaybeIdentifierName();
MOZ_MUST_USE JS::Result<JSAtom*> readIdentifierName();
/**
* Read a single PropertyKey value.
*/
MOZ_MUST_USE JS::Result<JSAtom*> readMaybePropertyKey();
MOZ_MUST_USE JS::Result<JSAtom*> readPropertyKey();
/**
* Read a single `string | null` value.

Просмотреть файл

@ -1,13 +1,13 @@
[package]
name = "binsource"
version = "0.1.0"
name = "binast"
version = "0.1.1"
authors = ["David Teller <D.O.Teller@gmail.com>"]
[dependencies]
binjs_meta = "^0.3.10"
binjs_meta = "^0.4.3"
clap = "^2"
env_logger = "^0.5.6"
itertools = "^0.7.6"
log = "0.4"
yaml-rust = "^0.4"
webidl = "^0.6.0"
webidl = "^0.8"

Просмотреть файл

@ -549,7 +549,7 @@ impl CPPExporter {
if typedef.is_optional() {
let content_name = TypeName::type_spec(typedef.spec());
let content_node_name = syntax.get_node_name(&content_name)
.unwrap_or_else(|| panic!("While generating an option parser, could not find node name {}", content_name))
.unwrap_or_else(|| panic!("While generating an option parser, could not find node name \"{}\"", content_name))
.clone();
debug!(target: "generate_spidermonkey", "CPPExporter::new adding optional typedef {:?} => {:?} => {:?}",
parser_node_name,
@ -713,6 +713,10 @@ impl CPPExporter {
refgraph.insert(string_from_nodename(&parser.name), edges);
}
// 6. Primitive values.
refgraph.insert(Rc::new("IdentifierName".to_string()), HashSet::new());
refgraph.insert(Rc::new("PropertyKey".to_string()), HashSet::new());
self.refgraph = refgraph;
}
@ -1470,6 +1474,65 @@ impl CPPExporter {
return result;
}}
",
first_line = first_line,
build = build_result,
));
}
}
&TypeSpec::IdentifierName => {
let build_result = rules_for_this_node.init.reindent(" ");
let first_line = self.get_method_definition_start(&parser.name, "", "",
&extra_params);
if build_result.len() == 0 {
buffer.push_str(&format!("{first_line}
{{
return raiseError(\"FIXME: Not implemented yet ({kind})\");
}}
",
first_line = first_line,
kind = parser.name.to_str()));
} else {
buffer.push_str(&format!("{first_line}
{{
BINJS_MOZ_TRY_DECL(result, tokenizer_->readMaybeIdentifierName());
{build}
return result;
}}
",
first_line = first_line,
build = build_result,
));
}
}
&TypeSpec::PropertyKey => {
debug!(target: "generate_spidermonkey", "Generating method for PropertyKey: {:?}", parser.name);
let build_result = rules_for_this_node.init.reindent(" ");
let first_line = self.get_method_definition_start(&parser.name, "", "",
&extra_params);
if build_result.len() == 0 {
buffer.push_str(&format!("{first_line}
{{
return raiseError(\"FIXME: Not implemented yet ({kind})\");
}}
",
first_line = first_line,
kind = parser.name.to_str()));
} else {
buffer.push_str(&format!("{first_line}
{{
BINJS_MOZ_TRY_DECL(result, tokenizer_->readMaybePropertyKey());
{build}
return result;
}}
",
first_line = first_line,
build = build_result,
@ -1604,6 +1667,26 @@ impl CPPExporter {
(Some(format!("RootedAtom {var_name}(cx_);", var_name = var_name)),
Some(format!("MOZ_TRY_VAR({var_name}, tokenizer_->readAtom());", var_name = var_name)))
}
Some(IsNullable { is_nullable: false, content: Primitive::IdentifierName }) => {
(Some(format!("RootedAtom {var_name}(cx_);", var_name = var_name)),
Some(format!("MOZ_TRY_VAR({var_name}, tokenizer_->readIdentifierName());", var_name = var_name)))
}
Some(IsNullable { is_nullable: false, content: Primitive::PropertyKey }) => {
(Some(format!("RootedAtom {var_name}(cx_);", var_name = var_name)),
Some(format!("MOZ_TRY_VAR({var_name}, tokenizer_->readPropertyKey());", var_name = var_name)))
}
Some(IsNullable { is_nullable: true, content: Primitive::String }) => {
(Some(format!("RootedAtom {var_name}(cx_);", var_name = var_name)),
Some(format!("MOZ_TRY_VAR({var_name}, tokenizer_->readMaybeAtom());", var_name = var_name)))
}
Some(IsNullable { is_nullable: true, content: Primitive::IdentifierName }) => {
(Some(format!("RootedAtom {var_name}(cx_);", var_name = var_name)),
Some(format!("MOZ_TRY_VAR({var_name}, tokenizer_->readMaybeIdentifierName());", var_name = var_name)))
}
Some(IsNullable { is_nullable: true, content: Primitive::PropertyKey }) => {
(Some(format!("RootedAtom {var_name}(cx_);", var_name = var_name)),
Some(format!("MOZ_TRY_VAR({var_name}, tokenizer_->readMaybePropertyKey());", var_name = var_name)))
}
_else => {
let typename = TypeName::type_(field.type_());
let name = self.syntax.get_node_name(typename.to_str())
@ -1626,7 +1709,6 @@ impl CPPExporter {
call_kind)))
}
};
let rendered = {
if rules_for_this_field.replace.is_some() {
for &(condition, rule_name) in &[

Просмотреть файл

@ -607,6 +607,48 @@ nsCaseTransformTextRunFactory::TransformString(
ch = mozilla::unicode::GetFullWidth(ch);
break;
case NS_STYLE_TEXT_TRANSFORM_FULL_SIZE_KANA: {
static const uint16_t kSmallKanas[] = {
// ぁ ぃ ぅ ぇ ぉ っ ゃ ゅ ょ
0x3041, 0x3043, 0x3045, 0x3047, 0x3049, 0x3063, 0x3083, 0x3085, 0x3087,
// ゎ ゕ ゖ
0x308E, 0x3095, 0x3096,
// ァ ィ ゥ ェ ォ ッ ャ ュ ョ
0x30A1, 0x30A3, 0x30A5, 0x30A7, 0x30A9, 0x30C3, 0x30E3, 0x30E5, 0x30E7,
// ヮ ヵ ヶ ㇰ ㇱ ㇲ ㇳ ㇴ ㇵ
0x30EE, 0x30F5, 0x30F6, 0x31F0, 0x31F1, 0x31F2, 0x31F3, 0x31F4, 0x31F5,
// ㇶ ㇷ ㇸ ㇹ ㇺ ㇻ ㇼ ㇽ ㇾ
0x31F6, 0x31F7, 0x31F8, 0x31F9, 0x31FA, 0x31FB, 0x31FC, 0x31FD, 0x31FE,
// ㇿ
0x31FF,
// ァ ィ ゥ ェ ォ ャ ュ ョ ッ
0xFF67, 0xFF68, 0xFF69, 0xFF6A, 0xFF6B, 0xFF6C, 0xFF6D, 0xFF6E, 0xFF6F
};
static const uint16_t kFullSizeKanas[] = {
// あ い う え お つ や ゆ よ
0x3042, 0x3044, 0x3046, 0x3048, 0x304A, 0x3064, 0x3084, 0x3086, 0x3088,
// わ か け
0x308F, 0x304B, 0x3051,
// ア イ ウ エ オ ツ ヤ ユ ヨ
0x30A2, 0x30A4, 0x30A6, 0x30A8, 0x30AA, 0x30C4, 0x30E4, 0x30E6, 0x30E8,
// ワ カ ケ ク シ ス ト ヌ ハ
0x30EF, 0x30AB, 0x30B1, 0x30AF, 0x30B7, 0x30B9, 0x30C8, 0x30CC, 0x30CF,
// ヒ フ ヘ ホ ム ラ リ ル レ
0x30D2, 0x30D5, 0x30D8, 0x30DB, 0x30E0, 0x30E9, 0x30EA, 0x30EB, 0x30EC,
// ロ
0x30ED,
// ア イ ウ エ オ ヤ ユ ヨ ツ
0xFF71, 0xFF72, 0xFF73, 0xFF74, 0xFF75, 0xFF94, 0xFF95, 0xFF96, 0xFF82
};
size_t index;
const uint16_t len = MOZ_ARRAY_LENGTH(kSmallKanas);
if (mozilla::BinarySearch(kSmallKanas, 0, len, ch, &index)) {
ch = kFullSizeKanas[index];
}
break;
}
default:
break;
}

Просмотреть файл

@ -806,6 +806,7 @@ enum class StyleGridTrackBreadth : uint8_t {
#define NS_STYLE_TEXT_TRANSFORM_LOWERCASE 2
#define NS_STYLE_TEXT_TRANSFORM_UPPERCASE 3
#define NS_STYLE_TEXT_TRANSFORM_FULL_WIDTH 4
#define NS_STYLE_TEXT_TRANSFORM_FULL_SIZE_KANA 5
// See nsStyleDisplay
#define NS_STYLE_TOUCH_ACTION_NONE (1 << 0)

Просмотреть файл

@ -4585,7 +4585,7 @@ var gCSSProperties = {
applies_to_first_line: true,
applies_to_placeholder: true,
initial_values: [ "none" ],
other_values: [ "capitalize", "uppercase", "lowercase", "full-width" ],
other_values: [ "capitalize", "uppercase", "lowercase", "full-width", "full-size-kana" ],
invalid_values: []
},
"top": {

Просмотреть файл

@ -23,7 +23,7 @@ ${helpers.predefined_type(
${helpers.single_keyword(
"text-transform",
"none capitalize uppercase lowercase",
extra_gecko_values="full-width",
extra_gecko_values="full-width full-size-kana",
animation_value_type="discrete",
flags="APPLIES_TO_FIRST_LETTER APPLIES_TO_FIRST_LINE APPLIES_TO_PLACEHOLDER",
spec="https://drafts.csswg.org/css-text/#propdef-text-transform",

Просмотреть файл

@ -8,10 +8,10 @@ config = {
"emulator_manifest": """
[
{
"size": 135064025,
"digest": "125678c5b0d93ead8bbf01ba94253e532909417b40637460624cfca34e92f431534fc77a0225e9c4728dcbcf2884a8f7fa1ee059efdfa82d827ca20477d41705",
"size": 131698372,
"digest": "2f62e4f39e2bd858f640b53bbb6cd33de6646f21419d1a9531d9ab5528a7ca6ab6f4cfe370cbb72c4fd475cb9db842a89acdbb9b647d9c0861ee85bc5901dfed",
"algorithm": "sha512",
"filename": "android-sdk_r27.1.12-linux-x86emu.tar.gz",
"filename": "android-sdk_r27.3.10-linux-x86emu.tar.gz",
"unpack": "True"
}
] """,

Просмотреть файл

@ -0,0 +1,4 @@
[MediaStream-MediaElement-srcObject.https.html]
[Tests that a MediaStream can be assigned to a video element with srcObject]
expected: FAIL

Просмотреть файл

@ -1,2 +1,2 @@
local: 1e4ae874837c1012d1d4c982008196cc3bb73e17
upstream: f6bca7b6218f591edc1bcb87c9ab0837ca41970b
local: 05a38ce912d57ee269bd0af8f0b3ced1bb5f068b
upstream: 9f2daa2a5d08d0d44f680501af2b62292f6264f0

Просмотреть файл

@ -0,0 +1,4 @@
[audioworklet-postmessage-sharedarraybuffer.https.html]
[\n Test passing SharedArrayBuffer to an AudioWorklet\n ]
expected: FAIL

Просмотреть файл

@ -0,0 +1,50 @@
<!DOCTYPE html>
<title>Reference for multicolumn under position:sticky should be positioned correctly</title>
<style>
body {
margin: 0;
}
#scroller {
overflow-y: scroll;
width: 200px;
height: 200px;
}
#relative {
position: relative;
top: 100px;
margin: 10px;
}
#child {
width: 100px;
height: 100px;
background: green;
}
#contents {
position: relative;
top: 10%;
left: 10%;
width: 80%;
height: 80%;
background: lightgreen;
}
#spacer {
height: 400px;
}
</style>
<div id="scroller">
<div id="relative">
<div id="child">
<div id="contents"></div>
</div>
</div>
<div id="spacer"></div>
</div>
<div>You should see a light green box above with a dark green border.</div>
<script>
window.addEventListener('load', function() {
scroller.scrollTop = 100;
});
</script>

Просмотреть файл

@ -0,0 +1,55 @@
<!DOCTYPE html>
<title>Multicolumn under position:sticky should be positioned correctly</title>
<link rel="help" href="https://www.w3.org/TR/css-position-3/#sticky-pos" />
<link rel="match" href="position-sticky-child-multicolumn-ref.html" />
<link rel="author" title="Philip Rogers" href="mailto:pdr@chromium.org" />
<meta name="assert" content="This test checks that a multicolumn element is positioned relative to a sticky position" />
<style>
body {
margin: 0;
}
#scroller {
overflow-y: scroll;
width: 200px;
height: 200px;
}
#sticky {
position: sticky;
top: 10px;
margin: 10px;
}
#multicolumn {
width: 100px;
height: 100px;
background: green;
columns: 1;
}
#contents {
margin-left: 10%;
margin-top: 10%;
width: 80%;
height: 80%;
background: lightgreen;
}
#spacer {
height: 400px;
}
</style>
<div id="scroller">
<div id="sticky">
<div id="multicolumn">
<div id="contents"></div>
</div>
</div>
<div id="spacer"></div>
</div>
<div>You should see a light green box above with a dark green border.</div>
<script>
window.addEventListener('load', function() {
scroller.scrollTop = 100;
});
</script>

Просмотреть файл

@ -0,0 +1,72 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8"/>
<title>CSS3 Text, text transform: full-size-kana, small kanas</title>
<meta name="assert" content="For small kanas, text-transform: full-size-kana puts all kanas in full-size kanas.">
<link rel='author' title='Diego Pino Garcia' href='mailto:dpino@igalia.com'>
<link rel='help' href='https://drafts.csswg.org/css-text-3/#text-transform'>
<style type='text/css'>
.test, .ref { font-size: 200%; line-height: 2.5em; }
.test span, .ref span { margin-right: 1em; white-space: nowrap; }
/* the CSS above is not part of the test */
</style>
</head>
<body>
<p class="instructions">Test passes if both characters in each pair match. If you are missing a font glyph for a character, ignore that pair, but report which characters were ignored.</p>
<div class="test">
<span title="U+3041">&#x3042; &#x3042;</span>
<span title="U+3043">&#x3044; &#x3044;</span>
<span title="U+3045">&#x3046; &#x3046;</span>
<span title="U+3047">&#x3048; &#x3048;</span>
<span title="U+3049">&#x304A; &#x304A;</span>
<span title="U+3095">&#x304B; &#x304B;</span>
<span title="U+3096">&#x3051; &#x3051;</span>
<span title="U+3063">&#x3064; &#x3064;</span>
<span title="U+3083">&#x3084; &#x3084;</span>
<span title="U+3085">&#x3086; &#x3086;</span>
<span title="U+3087">&#x3088; &#x3088;</span>
<span title="U+308E">&#x308F; &#x308F;</span>
<span title="U+30A1">&#x30A2; &#x30A2;</span>
<span title="U+30A3">&#x30A4; &#x30A4;</span>
<span title="U+30A5">&#x30A6; &#x30A6;</span>
<span title="U+30A7">&#x30A8; &#x30A8;</span>
<span title="U+30A9">&#x30AA; &#x30AA;</span>
<span title="U+30F5">&#x30AB; &#x30AB;</span>
<span title="U+31F0">&#x30AF; &#x30AF;</span>
<span title="U+30F6">&#x30B1; &#x30B1;</span>
<span title="U+31F1">&#x30B7; &#x30B7;</span>
<span title="U+31F2">&#x30B9; &#x30B9;</span>
<span title="U+30C3">&#x30C4; &#x30C4;</span>
<span title="U+31F3">&#x30C8; &#x30C8;</span>
<span title="U+31F4">&#x30CC; &#x30CC;</span>
<span title="U+31F5">&#x30CF; &#x30CF;</span>
<span title="U+31F6">&#x30D2; &#x30D2;</span>
<span title="U+31F7">&#x30D5; &#x30D5;</span>
<span title="U+31F8">&#x30D8; &#x30D8;</span>
<span title="U+31F9">&#x30DB; &#x30DB;</span>
<span title="U+31FA">&#x30E0; &#x30E0;</span>
<span title="U+30E3">&#x30E4; &#x30E4;</span>
<span title="U+30E5">&#x30E6; &#x30E6;</span>
<span title="U+30E7">&#x30E8; &#x30E8;</span>
<span title="U+31FB">&#x30E9; &#x30E9;</span>
<span title="U+31FC">&#x30EA; &#x30EA;</span>
<span title="U+31FD">&#x30EB; &#x30EB;</span>
<span title="U+31FE">&#x30EC; &#x30EC;</span>
<span title="U+31FF">&#x30ED; &#x30ED;</span>
<span title="U+30EE">&#x30EF; &#x30EF;</span>
<span title="U+FF67">&#xFF71; &#xFF71;</span>
<span title="U+FF68">&#xFF72; &#xFF72;</span>
<span title="U+FF69">&#xFF73; &#xFF73;</span>
<span title="U+FF6A">&#xFF74; &#xFF74;</span>
<span title="U+FF6B">&#xFF75; &#xFF75;</span>
<span title="U+FF6F">&#xFF82; &#xFF82;</span>
<span title="U+FF6C">&#xFF94; &#xFF94;</span>
<span title="U+FF6D">&#xFF95; &#xFF95;</span>
<span title="U+FF6E">&#xFF96; &#xFF96;</span>
</div>
<!--Notes:
Tip: To identify the characters where differences occur, in order to report problem characters, either mouse over to reveal a tooltip, or copy and paste the sequence into a tool such as <a href='http://r12a.github.io/uniview/' target='_blank'>UniView</a> or the <a href='http://r12a.github.io/apps/conversion/' target='_blank'>Unicode Conversion Tool</a>.
-->
</body>
</html>

Просмотреть файл

@ -0,0 +1,228 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8"/>
<title>CSS3 Text, text transform: full-size-kana, full-size kanas</title>
<meta name="assert" content="For full-size kanas, text-transform: full-size-kana leaves all kanas unaltered.">
<link rel='author' title='Diego Pino Garcia' href='mailto:dpino@igalia.com'>
<link rel='help' href='https://drafts.csswg.org/css-text-3/#text-transform'>
<style type='text/css'>
.test, .ref { font-size: 200%; line-height: 2.5em; }
.test span, .ref span { margin-right: 1em; white-space: nowrap; }
/* the CSS above is not part of the test */
</style>
</head>
<body>
<p class="instructions">Test passes if both characters in each pair match. If you are missing a font glyph for a character, ignore that pair, but report which characters were ignored.</p>
<div class="test">
<span title="U+30FC">&#x30FC; &#x30FC;</span>
<span title="U+3042">&#x3042; &#x3042;</span>
<span title="U+3044">&#x3044; &#x3044;</span>
<span title="U+3046">&#x3046; &#x3046;</span>
<span title="U+3048">&#x3048; &#x3048;</span>
<span title="U+304A">&#x304A; &#x304A;</span>
<span title="U+304B">&#x304B; &#x304B;</span>
<span title="U+304C">&#x304C; &#x304C;</span>
<span title="U+304D">&#x304D; &#x304D;</span>
<span title="U+304E">&#x304E; &#x304E;</span>
<span title="U+304F">&#x304F; &#x304F;</span>
<span title="U+3050">&#x3050; &#x3050;</span>
<span title="U+3051">&#x3051; &#x3051;</span>
<span title="U+3052">&#x3052; &#x3052;</span>
<span title="U+3053">&#x3053; &#x3053;</span>
<span title="U+3054">&#x3054; &#x3054;</span>
<span title="U+3055">&#x3055; &#x3055;</span>
<span title="U+3056">&#x3056; &#x3056;</span>
<span title="U+3057">&#x3057; &#x3057;</span>
<span title="U+3058">&#x3058; &#x3058;</span>
<span title="U+3059">&#x3059; &#x3059;</span>
<span title="U+305A">&#x305A; &#x305A;</span>
<span title="U+305B">&#x305B; &#x305B;</span>
<span title="U+305C">&#x305C; &#x305C;</span>
<span title="U+305D">&#x305D; &#x305D;</span>
<span title="U+305E">&#x305E; &#x305E;</span>
<span title="U+305F">&#x305F; &#x305F;</span>
<span title="U+3060">&#x3060; &#x3060;</span>
<span title="U+3061">&#x3061; &#x3061;</span>
<span title="U+3062">&#x3062; &#x3062;</span>
<span title="U+3064">&#x3064; &#x3064;</span>
<span title="U+3065">&#x3065; &#x3065;</span>
<span title="U+3066">&#x3066; &#x3066;</span>
<span title="U+3067">&#x3067; &#x3067;</span>
<span title="U+3068">&#x3068; &#x3068;</span>
<span title="U+3069">&#x3069; &#x3069;</span>
<span title="U+306A">&#x306A; &#x306A;</span>
<span title="U+306B">&#x306B; &#x306B;</span>
<span title="U+306C">&#x306C; &#x306C;</span>
<span title="U+306D">&#x306D; &#x306D;</span>
<span title="U+306E">&#x306E; &#x306E;</span>
<span title="U+306F">&#x306F; &#x306F;</span>
<span title="U+3070">&#x3070; &#x3070;</span>
<span title="U+3071">&#x3071; &#x3071;</span>
<span title="U+3072">&#x3072; &#x3072;</span>
<span title="U+3073">&#x3073; &#x3073;</span>
<span title="U+3074">&#x3074; &#x3074;</span>
<span title="U+3075">&#x3075; &#x3075;</span>
<span title="U+3076">&#x3076; &#x3076;</span>
<span title="U+3077">&#x3077; &#x3077;</span>
<span title="U+3078">&#x3078; &#x3078;</span>
<span title="U+3079">&#x3079; &#x3079;</span>
<span title="U+307A">&#x307A; &#x307A;</span>
<span title="U+307B">&#x307B; &#x307B;</span>
<span title="U+307C">&#x307C; &#x307C;</span>
<span title="U+307D">&#x307D; &#x307D;</span>
<span title="U+307E">&#x307E; &#x307E;</span>
<span title="U+307F">&#x307F; &#x307F;</span>
<span title="U+3080">&#x3080; &#x3080;</span>
<span title="U+3081">&#x3081; &#x3081;</span>
<span title="U+3082">&#x3082; &#x3082;</span>
<span title="U+3084">&#x3084; &#x3084;</span>
<span title="U+3086">&#x3086; &#x3086;</span>
<span title="U+3088">&#x3088; &#x3088;</span>
<span title="U+3089">&#x3089; &#x3089;</span>
<span title="U+308A">&#x308A; &#x308A;</span>
<span title="U+308B">&#x308B; &#x308B;</span>
<span title="U+308C">&#x308C; &#x308C;</span>
<span title="U+308D">&#x308D; &#x308D;</span>
<span title="U+308F">&#x308F; &#x308F;</span>
<span title="U+3090">&#x3090; &#x3090;</span>
<span title="U+3091">&#x3091; &#x3091;</span>
<span title="U+3092">&#x3092; &#x3092;</span>
<span title="U+3093">&#x3093; &#x3093;</span>
<span title="U+30A2">&#x30A2; &#x30A2;</span>
<span title="U+30A4">&#x30A4; &#x30A4;</span>
<span title="U+30A6">&#x30A6; &#x30A6;</span>
<span title="U+30A8">&#x30A8; &#x30A8;</span>
<span title="U+30AA">&#x30AA; &#x30AA;</span>
<span title="U+30AB">&#x30AB; &#x30AB;</span>
<span title="U+30AC">&#x30AC; &#x30AC;</span>
<span title="U+30AD">&#x30AD; &#x30AD;</span>
<span title="U+30AE">&#x30AE; &#x30AE;</span>
<span title="U+30AF">&#x30AF; &#x30AF;</span>
<span title="U+30B0">&#x30B0; &#x30B0;</span>
<span title="U+30B1">&#x30B1; &#x30B1;</span>
<span title="U+30B2">&#x30B2; &#x30B2;</span>
<span title="U+30B3">&#x30B3; &#x30B3;</span>
<span title="U+30B4">&#x30B4; &#x30B4;</span>
<span title="U+30B5">&#x30B5; &#x30B5;</span>
<span title="U+30B6">&#x30B6; &#x30B6;</span>
<span title="U+30B7">&#x30B7; &#x30B7;</span>
<span title="U+30B8">&#x30B8; &#x30B8;</span>
<span title="U+30B9">&#x30B9; &#x30B9;</span>
<span title="U+30BA">&#x30BA; &#x30BA;</span>
<span title="U+30BB">&#x30BB; &#x30BB;</span>
<span title="U+30BC">&#x30BC; &#x30BC;</span>
<span title="U+30BD">&#x30BD; &#x30BD;</span>
<span title="U+30BE">&#x30BE; &#x30BE;</span>
<span title="U+30BF">&#x30BF; &#x30BF;</span>
<span title="U+30C0">&#x30C0; &#x30C0;</span>
<span title="U+30C1">&#x30C1; &#x30C1;</span>
<span title="U+30C2">&#x30C2; &#x30C2;</span>
<span title="U+30C4">&#x30C4; &#x30C4;</span>
<span title="U+30C5">&#x30C5; &#x30C5;</span>
<span title="U+30C6">&#x30C6; &#x30C6;</span>
<span title="U+30C7">&#x30C7; &#x30C7;</span>
<span title="U+30C8">&#x30C8; &#x30C8;</span>
<span title="U+30C9">&#x30C9; &#x30C9;</span>
<span title="U+30CA">&#x30CA; &#x30CA;</span>
<span title="U+30CB">&#x30CB; &#x30CB;</span>
<span title="U+30CC">&#x30CC; &#x30CC;</span>
<span title="U+30CD">&#x30CD; &#x30CD;</span>
<span title="U+30CE">&#x30CE; &#x30CE;</span>
<span title="U+30CF">&#x30CF; &#x30CF;</span>
<span title="U+30D0">&#x30D0; &#x30D0;</span>
<span title="U+30D1">&#x30D1; &#x30D1;</span>
<span title="U+30D2">&#x30D2; &#x30D2;</span>
<span title="U+30D3">&#x30D3; &#x30D3;</span>
<span title="U+30D4">&#x30D4; &#x30D4;</span>
<span title="U+30D5">&#x30D5; &#x30D5;</span>
<span title="U+30D6">&#x30D6; &#x30D6;</span>
<span title="U+30D7">&#x30D7; &#x30D7;</span>
<span title="U+30D8">&#x30D8; &#x30D8;</span>
<span title="U+30D9">&#x30D9; &#x30D9;</span>
<span title="U+30DA">&#x30DA; &#x30DA;</span>
<span title="U+30DB">&#x30DB; &#x30DB;</span>
<span title="U+30DC">&#x30DC; &#x30DC;</span>
<span title="U+30DD">&#x30DD; &#x30DD;</span>
<span title="U+30DE">&#x30DE; &#x30DE;</span>
<span title="U+30DF">&#x30DF; &#x30DF;</span>
<span title="U+30E0">&#x30E0; &#x30E0;</span>
<span title="U+30E1">&#x30E1; &#x30E1;</span>
<span title="U+30E2">&#x30E2; &#x30E2;</span>
<span title="U+30E4">&#x30E4; &#x30E4;</span>
<span title="U+30E6">&#x30E6; &#x30E6;</span>
<span title="U+30E8">&#x30E8; &#x30E8;</span>
<span title="U+30E9">&#x30E9; &#x30E9;</span>
<span title="U+30EA">&#x30EA; &#x30EA;</span>
<span title="U+30EB">&#x30EB; &#x30EB;</span>
<span title="U+30EC">&#x30EC; &#x30EC;</span>
<span title="U+30ED">&#x30ED; &#x30ED;</span>
<span title="U+30EF">&#x30EF; &#x30EF;</span>
<span title="U+30F0">&#x30F0; &#x30F0;</span>
<span title="U+30F1">&#x30F1; &#x30F1;</span>
<span title="U+30F2">&#x30F2; &#x30F2;</span>
<span title="U+30F3">&#x30F3; &#x30F3;</span>
<span title="U+30F4">&#x30F4; &#x30F4;</span>
<span title="U+309B">&#x309B; &#x309B;</span>
<span title="U+309C">&#x309C; &#x309C;</span>
<span title="U+FF60">&#xFF60; &#xFF60;</span>
<span title="U+FF61">&#xFF61; &#xFF61;</span>
<span title="U+FF62">&#xFF62; &#xFF62;</span>
<span title="U+FF63">&#xFF63; &#xFF63;</span>
<span title="U+FF64">&#xFF64; &#xFF64;</span>
<span title="U+FF65">&#xFF65; &#xFF65;</span>
<span title="U+FF66">&#xFF66; &#xFF66;</span>
<span title="U+FF70">&#xFF70; &#xFF70;</span>
<span title="U+FF71">&#xFF71; &#xFF71;</span>
<span title="U+FF72">&#xFF72; &#xFF72;</span>
<span title="U+FF73">&#xFF73; &#xFF73;</span>
<span title="U+FF74">&#xFF74; &#xFF74;</span>
<span title="U+FF75">&#xFF75; &#xFF75;</span>
<span title="U+FF76">&#xFF76; &#xFF76;</span>
<span title="U+FF77">&#xFF77; &#xFF77;</span>
<span title="U+FF78">&#xFF78; &#xFF78;</span>
<span title="U+FF79">&#xFF79; &#xFF79;</span>
<span title="U+FF7A">&#xFF7A; &#xFF7A;</span>
<span title="U+FF7B">&#xFF7B; &#xFF7B;</span>
<span title="U+FF7C">&#xFF7C; &#xFF7C;</span>
<span title="U+FF7D">&#xFF7D; &#xFF7D;</span>
<span title="U+FF7E">&#xFF7E; &#xFF7E;</span>
<span title="U+FF7F">&#xFF7F; &#xFF7F;</span>
<span title="U+FF80">&#xFF80; &#xFF80;</span>
<span title="U+FF81">&#xFF81; &#xFF81;</span>
<span title="U+FF82">&#xFF82; &#xFF82;</span>
<span title="U+FF83">&#xFF83; &#xFF83;</span>
<span title="U+FF84">&#xFF84; &#xFF84;</span>
<span title="U+FF85">&#xFF85; &#xFF85;</span>
<span title="U+FF86">&#xFF86; &#xFF86;</span>
<span title="U+FF87">&#xFF87; &#xFF87;</span>
<span title="U+FF88">&#xFF88; &#xFF88;</span>
<span title="U+FF89">&#xFF89; &#xFF89;</span>
<span title="U+FF8A">&#xFF8A; &#xFF8A;</span>
<span title="U+FF8B">&#xFF8B; &#xFF8B;</span>
<span title="U+FF8C">&#xFF8C; &#xFF8C;</span>
<span title="U+FF8D">&#xFF8D; &#xFF8D;</span>
<span title="U+FF8E">&#xFF8E; &#xFF8E;</span>
<span title="U+FF8F">&#xFF8F; &#xFF8F;</span>
<span title="U+FF90">&#xFF90; &#xFF90;</span>
<span title="U+FF91">&#xFF91; &#xFF91;</span>
<span title="U+FF92">&#xFF92; &#xFF92;</span>
<span title="U+FF93">&#xFF93; &#xFF93;</span>
<span title="U+FF94">&#xFF94; &#xFF94;</span>
<span title="U+FF95">&#xFF95; &#xFF95;</span>
<span title="U+FF96">&#xFF96; &#xFF96;</span>
<span title="U+FF97">&#xFF97; &#xFF97;</span>
<span title="U+FF98">&#xFF98; &#xFF98;</span>
<span title="U+FF99">&#xFF99; &#xFF99;</span>
<span title="U+FF9A">&#xFF9A; &#xFF9A;</span>
<span title="U+FF9B">&#xFF9B; &#xFF9B;</span>
<span title="U+FF9C">&#xFF9C; &#xFF9C;</span>
<span title="U+FF9D">&#xFF9D; &#xFF9D;</span>
<span title="U+FF9E">&#xFF9E; &#xFF9E;</span>
<span title="U+FF9F">&#xFF9F; &#xFF9F;</span>
</div>
<!--Notes:
Tip: To identify the characters where differences occur, in order to report problem characters, either mouse over to reveal a tooltip, or copy and paste the sequence into a tool such as <a href='http://r12a.github.io/uniview/' target='_blank'>UniView</a> or the <a href='http://r12a.github.io/apps/conversion/' target='_blank'>Unicode Conversion Tool</a>.
-->
</body>
</html>

Просмотреть файл

@ -0,0 +1,74 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8"/>
<title>CSS3 Text, text transform: full-size-kana, small kanas</title>
<meta name="assert" content="For small kanas, text-transform: full-size-kana puts all kanas in full-size kanas.">
<link rel='author' title='Diego Pino Garcia' href='mailto:dpino@igalia.com'>
<link rel='help' href='https://drafts.csswg.org/css-text-3/#text-transform'>
<link rel="match" href="reference/text-transform-full-size-kana-001-ref.html">
<style type='text/css'>
.test, .ref { font-size: 200%; line-height: 2.5em; }
.test span, .ref span { margin-right: 1em; white-space: nowrap; }
/* the CSS above is not part of the test */
.test { text-transform: full-size-kana; }
</style>
</head>
<body>
<p class="instructions">Test passes if both characters in each pair match. If you are missing a font glyph for a character, ignore that pair, but report which characters were ignored.</p>
<div class="test">
<span title="U+3041">&#x3041; &#x3042;</span>
<span title="U+3043">&#x3043; &#x3044;</span>
<span title="U+3045">&#x3045; &#x3046;</span>
<span title="U+3047">&#x3047; &#x3048;</span>
<span title="U+3049">&#x3049; &#x304A;</span>
<span title="U+3095">&#x3095; &#x304B;</span>
<span title="U+3096">&#x3096; &#x3051;</span>
<span title="U+3063">&#x3063; &#x3064;</span>
<span title="U+3083">&#x3083; &#x3084;</span>
<span title="U+3085">&#x3085; &#x3086;</span>
<span title="U+3087">&#x3087; &#x3088;</span>
<span title="U+308E">&#x308E; &#x308F;</span>
<span title="U+30A1">&#x30A1; &#x30A2;</span>
<span title="U+30A3">&#x30A3; &#x30A4;</span>
<span title="U+30A5">&#x30A5; &#x30A6;</span>
<span title="U+30A7">&#x30A7; &#x30A8;</span>
<span title="U+30A9">&#x30A9; &#x30AA;</span>
<span title="U+30F5">&#x30F5; &#x30AB;</span>
<span title="U+31F0">&#x31F0; &#x30AF;</span>
<span title="U+30F6">&#x30F6; &#x30B1;</span>
<span title="U+31F1">&#x31F1; &#x30B7;</span>
<span title="U+31F2">&#x31F2; &#x30B9;</span>
<span title="U+30C3">&#x30C3; &#x30C4;</span>
<span title="U+31F3">&#x31F3; &#x30C8;</span>
<span title="U+31F4">&#x31F4; &#x30CC;</span>
<span title="U+31F5">&#x31F5; &#x30CF;</span>
<span title="U+31F6">&#x31F6; &#x30D2;</span>
<span title="U+31F7">&#x31F7; &#x30D5;</span>
<span title="U+31F8">&#x31F8; &#x30D8;</span>
<span title="U+31F9">&#x31F9; &#x30DB;</span>
<span title="U+31FA">&#x31FA; &#x30E0;</span>
<span title="U+30E3">&#x30E3; &#x30E4;</span>
<span title="U+30E5">&#x30E5; &#x30E6;</span>
<span title="U+30E7">&#x30E7; &#x30E8;</span>
<span title="U+31FB">&#x31FB; &#x30E9;</span>
<span title="U+31FC">&#x31FC; &#x30EA;</span>
<span title="U+31FD">&#x31FD; &#x30EB;</span>
<span title="U+31FE">&#x31FE; &#x30EC;</span>
<span title="U+31FF">&#x31FF; &#x30ED;</span>
<span title="U+30EE">&#x30EE; &#x30EF;</span>
<span title="U+FF67">&#xFF67; &#xFF71;</span>
<span title="U+FF68">&#xFF68; &#xFF72;</span>
<span title="U+FF69">&#xFF69; &#xFF73;</span>
<span title="U+FF6A">&#xFF6A; &#xFF74;</span>
<span title="U+FF6B">&#xFF6B; &#xFF75;</span>
<span title="U+FF6F">&#xFF6F; &#xFF82;</span>
<span title="U+FF6C">&#xFF6C; &#xFF94;</span>
<span title="U+FF6D">&#xFF6D; &#xFF95;</span>
<span title="U+FF6E">&#xFF6E; &#xFF96;</span>
</div>
<!--Notes:
Tip: To identify the characters where differences occur, in order to report problem characters, either mouse over to reveal a tooltip, or copy and paste the sequence into a tool such as <a href='http://r12a.github.io/uniview/' target='_blank'>UniView</a> or the <a href='http://r12a.github.io/apps/conversion/' target='_blank'>Unicode Conversion Tool</a>.
-->
</body>
</html>

Просмотреть файл

@ -0,0 +1,230 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8"/>
<title>CSS3 Text, text transform: full-size-kana, full-size kanas</title>
<meta name="assert" content="For full-size kanas, text-transform: full-size-kana leaves all kanas unaltered.">
<link rel='author' title='Diego Pino Garcia' href='mailto:dpino@igalia.com'>
<link rel='help' href='https://drafts.csswg.org/css-text-3/#text-transform'>
<link rel="match" href="reference/text-transform-full-size-kana-002-ref.html">
<style type='text/css'>
.test, .ref { font-size: 200%; line-height: 2.5em; }
.test span, .ref span { margin-right: 1em; white-space: nowrap; }
/* the CSS above is not part of the test */
.test { text-transform: full-size-kana; }
</style>
</head>
<body>
<p class="instructions">Test passes if both characters in each pair match. If you are missing a font glyph for a character, ignore that pair, but report which characters were ignored.</p>
<div class="test">
<span title="U+30FC">&#x30FC; &#x30FC;</span>
<span title="U+3042">&#x3042; &#x3042;</span>
<span title="U+3044">&#x3044; &#x3044;</span>
<span title="U+3046">&#x3046; &#x3046;</span>
<span title="U+3048">&#x3048; &#x3048;</span>
<span title="U+304A">&#x304A; &#x304A;</span>
<span title="U+304B">&#x304B; &#x304B;</span>
<span title="U+304C">&#x304C; &#x304C;</span>
<span title="U+304D">&#x304D; &#x304D;</span>
<span title="U+304E">&#x304E; &#x304E;</span>
<span title="U+304F">&#x304F; &#x304F;</span>
<span title="U+3050">&#x3050; &#x3050;</span>
<span title="U+3051">&#x3051; &#x3051;</span>
<span title="U+3052">&#x3052; &#x3052;</span>
<span title="U+3053">&#x3053; &#x3053;</span>
<span title="U+3054">&#x3054; &#x3054;</span>
<span title="U+3055">&#x3055; &#x3055;</span>
<span title="U+3056">&#x3056; &#x3056;</span>
<span title="U+3057">&#x3057; &#x3057;</span>
<span title="U+3058">&#x3058; &#x3058;</span>
<span title="U+3059">&#x3059; &#x3059;</span>
<span title="U+305A">&#x305A; &#x305A;</span>
<span title="U+305B">&#x305B; &#x305B;</span>
<span title="U+305C">&#x305C; &#x305C;</span>
<span title="U+305D">&#x305D; &#x305D;</span>
<span title="U+305E">&#x305E; &#x305E;</span>
<span title="U+305F">&#x305F; &#x305F;</span>
<span title="U+3060">&#x3060; &#x3060;</span>
<span title="U+3061">&#x3061; &#x3061;</span>
<span title="U+3062">&#x3062; &#x3062;</span>
<span title="U+3064">&#x3064; &#x3064;</span>
<span title="U+3065">&#x3065; &#x3065;</span>
<span title="U+3066">&#x3066; &#x3066;</span>
<span title="U+3067">&#x3067; &#x3067;</span>
<span title="U+3068">&#x3068; &#x3068;</span>
<span title="U+3069">&#x3069; &#x3069;</span>
<span title="U+306A">&#x306A; &#x306A;</span>
<span title="U+306B">&#x306B; &#x306B;</span>
<span title="U+306C">&#x306C; &#x306C;</span>
<span title="U+306D">&#x306D; &#x306D;</span>
<span title="U+306E">&#x306E; &#x306E;</span>
<span title="U+306F">&#x306F; &#x306F;</span>
<span title="U+3070">&#x3070; &#x3070;</span>
<span title="U+3071">&#x3071; &#x3071;</span>
<span title="U+3072">&#x3072; &#x3072;</span>
<span title="U+3073">&#x3073; &#x3073;</span>
<span title="U+3074">&#x3074; &#x3074;</span>
<span title="U+3075">&#x3075; &#x3075;</span>
<span title="U+3076">&#x3076; &#x3076;</span>
<span title="U+3077">&#x3077; &#x3077;</span>
<span title="U+3078">&#x3078; &#x3078;</span>
<span title="U+3079">&#x3079; &#x3079;</span>
<span title="U+307A">&#x307A; &#x307A;</span>
<span title="U+307B">&#x307B; &#x307B;</span>
<span title="U+307C">&#x307C; &#x307C;</span>
<span title="U+307D">&#x307D; &#x307D;</span>
<span title="U+307E">&#x307E; &#x307E;</span>
<span title="U+307F">&#x307F; &#x307F;</span>
<span title="U+3080">&#x3080; &#x3080;</span>
<span title="U+3081">&#x3081; &#x3081;</span>
<span title="U+3082">&#x3082; &#x3082;</span>
<span title="U+3084">&#x3084; &#x3084;</span>
<span title="U+3086">&#x3086; &#x3086;</span>
<span title="U+3088">&#x3088; &#x3088;</span>
<span title="U+3089">&#x3089; &#x3089;</span>
<span title="U+308A">&#x308A; &#x308A;</span>
<span title="U+308B">&#x308B; &#x308B;</span>
<span title="U+308C">&#x308C; &#x308C;</span>
<span title="U+308D">&#x308D; &#x308D;</span>
<span title="U+308F">&#x308F; &#x308F;</span>
<span title="U+3090">&#x3090; &#x3090;</span>
<span title="U+3091">&#x3091; &#x3091;</span>
<span title="U+3092">&#x3092; &#x3092;</span>
<span title="U+3093">&#x3093; &#x3093;</span>
<span title="U+30A2">&#x30A2; &#x30A2;</span>
<span title="U+30A4">&#x30A4; &#x30A4;</span>
<span title="U+30A6">&#x30A6; &#x30A6;</span>
<span title="U+30A8">&#x30A8; &#x30A8;</span>
<span title="U+30AA">&#x30AA; &#x30AA;</span>
<span title="U+30AB">&#x30AB; &#x30AB;</span>
<span title="U+30AC">&#x30AC; &#x30AC;</span>
<span title="U+30AD">&#x30AD; &#x30AD;</span>
<span title="U+30AE">&#x30AE; &#x30AE;</span>
<span title="U+30AF">&#x30AF; &#x30AF;</span>
<span title="U+30B0">&#x30B0; &#x30B0;</span>
<span title="U+30B1">&#x30B1; &#x30B1;</span>
<span title="U+30B2">&#x30B2; &#x30B2;</span>
<span title="U+30B3">&#x30B3; &#x30B3;</span>
<span title="U+30B4">&#x30B4; &#x30B4;</span>
<span title="U+30B5">&#x30B5; &#x30B5;</span>
<span title="U+30B6">&#x30B6; &#x30B6;</span>
<span title="U+30B7">&#x30B7; &#x30B7;</span>
<span title="U+30B8">&#x30B8; &#x30B8;</span>
<span title="U+30B9">&#x30B9; &#x30B9;</span>
<span title="U+30BA">&#x30BA; &#x30BA;</span>
<span title="U+30BB">&#x30BB; &#x30BB;</span>
<span title="U+30BC">&#x30BC; &#x30BC;</span>
<span title="U+30BD">&#x30BD; &#x30BD;</span>
<span title="U+30BE">&#x30BE; &#x30BE;</span>
<span title="U+30BF">&#x30BF; &#x30BF;</span>
<span title="U+30C0">&#x30C0; &#x30C0;</span>
<span title="U+30C1">&#x30C1; &#x30C1;</span>
<span title="U+30C2">&#x30C2; &#x30C2;</span>
<span title="U+30C4">&#x30C4; &#x30C4;</span>
<span title="U+30C5">&#x30C5; &#x30C5;</span>
<span title="U+30C6">&#x30C6; &#x30C6;</span>
<span title="U+30C7">&#x30C7; &#x30C7;</span>
<span title="U+30C8">&#x30C8; &#x30C8;</span>
<span title="U+30C9">&#x30C9; &#x30C9;</span>
<span title="U+30CA">&#x30CA; &#x30CA;</span>
<span title="U+30CB">&#x30CB; &#x30CB;</span>
<span title="U+30CC">&#x30CC; &#x30CC;</span>
<span title="U+30CD">&#x30CD; &#x30CD;</span>
<span title="U+30CE">&#x30CE; &#x30CE;</span>
<span title="U+30CF">&#x30CF; &#x30CF;</span>
<span title="U+30D0">&#x30D0; &#x30D0;</span>
<span title="U+30D1">&#x30D1; &#x30D1;</span>
<span title="U+30D2">&#x30D2; &#x30D2;</span>
<span title="U+30D3">&#x30D3; &#x30D3;</span>
<span title="U+30D4">&#x30D4; &#x30D4;</span>
<span title="U+30D5">&#x30D5; &#x30D5;</span>
<span title="U+30D6">&#x30D6; &#x30D6;</span>
<span title="U+30D7">&#x30D7; &#x30D7;</span>
<span title="U+30D8">&#x30D8; &#x30D8;</span>
<span title="U+30D9">&#x30D9; &#x30D9;</span>
<span title="U+30DA">&#x30DA; &#x30DA;</span>
<span title="U+30DB">&#x30DB; &#x30DB;</span>
<span title="U+30DC">&#x30DC; &#x30DC;</span>
<span title="U+30DD">&#x30DD; &#x30DD;</span>
<span title="U+30DE">&#x30DE; &#x30DE;</span>
<span title="U+30DF">&#x30DF; &#x30DF;</span>
<span title="U+30E0">&#x30E0; &#x30E0;</span>
<span title="U+30E1">&#x30E1; &#x30E1;</span>
<span title="U+30E2">&#x30E2; &#x30E2;</span>
<span title="U+30E4">&#x30E4; &#x30E4;</span>
<span title="U+30E6">&#x30E6; &#x30E6;</span>
<span title="U+30E8">&#x30E8; &#x30E8;</span>
<span title="U+30E9">&#x30E9; &#x30E9;</span>
<span title="U+30EA">&#x30EA; &#x30EA;</span>
<span title="U+30EB">&#x30EB; &#x30EB;</span>
<span title="U+30EC">&#x30EC; &#x30EC;</span>
<span title="U+30ED">&#x30ED; &#x30ED;</span>
<span title="U+30EF">&#x30EF; &#x30EF;</span>
<span title="U+30F0">&#x30F0; &#x30F0;</span>
<span title="U+30F1">&#x30F1; &#x30F1;</span>
<span title="U+30F2">&#x30F2; &#x30F2;</span>
<span title="U+30F3">&#x30F3; &#x30F3;</span>
<span title="U+30F4">&#x30F4; &#x30F4;</span>
<span title="U+309B">&#x309B; &#x309B;</span>
<span title="U+309C">&#x309C; &#x309C;</span>
<span title="U+FF60">&#xFF60; &#xFF60;</span>
<span title="U+FF61">&#xFF61; &#xFF61;</span>
<span title="U+FF62">&#xFF62; &#xFF62;</span>
<span title="U+FF63">&#xFF63; &#xFF63;</span>
<span title="U+FF64">&#xFF64; &#xFF64;</span>
<span title="U+FF65">&#xFF65; &#xFF65;</span>
<span title="U+FF66">&#xFF66; &#xFF66;</span>
<span title="U+FF70">&#xFF70; &#xFF70;</span>
<span title="U+FF71">&#xFF71; &#xFF71;</span>
<span title="U+FF72">&#xFF72; &#xFF72;</span>
<span title="U+FF73">&#xFF73; &#xFF73;</span>
<span title="U+FF74">&#xFF74; &#xFF74;</span>
<span title="U+FF75">&#xFF75; &#xFF75;</span>
<span title="U+FF76">&#xFF76; &#xFF76;</span>
<span title="U+FF77">&#xFF77; &#xFF77;</span>
<span title="U+FF78">&#xFF78; &#xFF78;</span>
<span title="U+FF79">&#xFF79; &#xFF79;</span>
<span title="U+FF7A">&#xFF7A; &#xFF7A;</span>
<span title="U+FF7B">&#xFF7B; &#xFF7B;</span>
<span title="U+FF7C">&#xFF7C; &#xFF7C;</span>
<span title="U+FF7D">&#xFF7D; &#xFF7D;</span>
<span title="U+FF7E">&#xFF7E; &#xFF7E;</span>
<span title="U+FF7F">&#xFF7F; &#xFF7F;</span>
<span title="U+FF80">&#xFF80; &#xFF80;</span>
<span title="U+FF81">&#xFF81; &#xFF81;</span>
<span title="U+FF82">&#xFF82; &#xFF82;</span>
<span title="U+FF83">&#xFF83; &#xFF83;</span>
<span title="U+FF84">&#xFF84; &#xFF84;</span>
<span title="U+FF85">&#xFF85; &#xFF85;</span>
<span title="U+FF86">&#xFF86; &#xFF86;</span>
<span title="U+FF87">&#xFF87; &#xFF87;</span>
<span title="U+FF88">&#xFF88; &#xFF88;</span>
<span title="U+FF89">&#xFF89; &#xFF89;</span>
<span title="U+FF8A">&#xFF8A; &#xFF8A;</span>
<span title="U+FF8B">&#xFF8B; &#xFF8B;</span>
<span title="U+FF8C">&#xFF8C; &#xFF8C;</span>
<span title="U+FF8D">&#xFF8D; &#xFF8D;</span>
<span title="U+FF8E">&#xFF8E; &#xFF8E;</span>
<span title="U+FF8F">&#xFF8F; &#xFF8F;</span>
<span title="U+FF90">&#xFF90; &#xFF90;</span>
<span title="U+FF91">&#xFF91; &#xFF91;</span>
<span title="U+FF92">&#xFF92; &#xFF92;</span>
<span title="U+FF93">&#xFF93; &#xFF93;</span>
<span title="U+FF94">&#xFF94; &#xFF94;</span>
<span title="U+FF95">&#xFF95; &#xFF95;</span>
<span title="U+FF96">&#xFF96; &#xFF96;</span>
<span title="U+FF97">&#xFF97; &#xFF97;</span>
<span title="U+FF98">&#xFF98; &#xFF98;</span>
<span title="U+FF99">&#xFF99; &#xFF99;</span>
<span title="U+FF9A">&#xFF9A; &#xFF9A;</span>
<span title="U+FF9B">&#xFF9B; &#xFF9B;</span>
<span title="U+FF9C">&#xFF9C; &#xFF9C;</span>
<span title="U+FF9D">&#xFF9D; &#xFF9D;</span>
<span title="U+FF9E">&#xFF9E; &#xFF9E;</span>
<span title="U+FF9F">&#xFF9F; &#xFF9F;</span>
</div>
<!--Notes:
Tip: To identify the characters where differences occur, in order to report problem characters, either mouse over to reveal a tooltip, or copy and paste the sequence into a tool such as <a href='http://r12a.github.io/uniview/' target='_blank'>UniView</a> or the <a href='http://r12a.github.io/apps/conversion/' target='_blank'>Unicode Conversion Tool</a>.
-->
</body>
</html>

Просмотреть файл

@ -22,7 +22,7 @@ Now, run the tests using the `safari` product:
```
This will use the `safaridriver` found on the path, which will be stable Safari.
To run Safari Technology Preview instead, use the `--webdriver-binary` argument:
To run Safari Technology Preview instead, use the `--channel=preview` argument:
```
./wpt run --webdriver-binary "/Applications/Safari Technology Preview.app/Contents/MacOS/safaridriver" safari [test_list]
./wpt run --channel=preview safari [test_list]
```

Просмотреть файл

@ -40,8 +40,10 @@ test (t => {
'noOpenER',
' NOopener',
'=NOOPENER',
'noopener=NOOPENER', // => ('noopener', 'noopener')
'NOOPENER=noopener' // => ('noopener', 'noopener')
'noopener=1',
'NOOPENER=1',
'NOOPENER=yes',
'noopener=YES',
];
featureVariants.forEach(feature => {
var win = window.open(windowURL, '', feature);
@ -82,9 +84,9 @@ test (t => {
'noopener==,',
'noopener=\n ,',
'noopener = \t ,',
'noopener\n=\r noopener,', // => ('noopener', 'noopener')
'noopener\n=\r 1,', // => ('noopener', '1')
'noopener=,yes', // => ('noopener'), ('yes')
'noopener= foo=,', // => ('noopener', 'foo')
'noopener= yes=,', // => ('noopener', 'yes')
'noopener = \u000Cyes' // => ('noopener', 'yes')
];
featureVariants.forEach(feature => {
@ -96,14 +98,14 @@ test (t => {
test (t => {
// Tokenizing `value` should collect any non-separator code points until first separator
var featureVariants = [
'noopener=noopener', // => ('noopener', 'noopener')
'noopener=1', // => ('noopener', 'noopener')
'noopener=yes', // => ('noopener', 'yes')
'noopener = yes ,', // => ('noopener', 'yes')
'noopener=\nyes ,', // => ('noopener', 'yes')
'noopener=yes yes', // => ('noopener', 'yes'), ('yes', '')
'noopener=yes\ts', // => ('noopener', 'yes'), ('s', '')
'noopener==', // => ('noopener', '')
'noopener=0\n,', // => ('noopener', '0')
'noopener=1\n,', // => ('noopener', '1')
'==noopener===', // => ('noopener', '')
'noopener==\u000C' // => ('noopener', '')
];
@ -114,20 +116,30 @@ test (t => {
}, 'Tokenizing should read characters until first window feature separator as `value`');
test (t => {
// If tokenizedFeatures contains an entry with the key "noopener"...disown opener
// i.e. `value` should be irrelevant
var featureVariants = [
'noopener=false',
',noopener=0, ',
'foo=bar,noopener=noopener,',
'noopener=true',
'noopener=foo\nbar\t'
'noopener=1',
'noopener=2',
'noopener=12345',
'noopener=1.5',
'noopener=-1',
];
featureVariants.forEach(feature => {
var win = window.open(windowURL, '', feature);
assert_equals(win, null, `"${feature}" should activate feature "noopener"`);
});
}, '"noopener" should be based on name (key), not value');
}, 'Integer values other than 0 should activate the feature');
test (t => {
var featureVariants = [
'noopener=0',
'noopener=0.5',
'noopener=error',
];
featureVariants.forEach(feature => {
var win = window.open(windowURL, '', feature);
assert_not_equals(win, null, `"${feature}" should NOT activate feature "noopener"`);
});
}, 'Integer value of 0 should not activate the feature');
test (t => {
var invalidFeatureVariants = [

Просмотреть файл

@ -1,7 +1,7 @@
// GENERATED CONTENT - DO NOT EDIT
// Content was automatically extracted by Reffy into reffy-reports
// (https://github.com/tidoust/reffy-reports)
// Source: Web Animations (https://w3c.github.io/web-animations/)
// Source: Web Animations (https://drafts.csswg.org/web-animations-1/)
[Exposed=Window]
interface AnimationTimeline {

Просмотреть файл

@ -1,7 +1,7 @@
// GENERATED CONTENT - DO NOT EDIT
// Content was automatically extracted by Reffy into reffy-reports
// (https://github.com/tidoust/reffy-reports)
// Source: Web Share API (https://wicg.github.io/web-share/)
// Source: Web Share API - Level 1 (https://wicg.github.io/web-share/)
partial interface Navigator {
[SecureContext] Promise<void> share(optional ShareData data);

Просмотреть файл

@ -28,15 +28,26 @@ promise_test(async t => {
const stream = await timeout(navigator.mediaDevices.getUserMedia({video: true}), 10000, "getUserMedia timeout");
t.add_cleanup(() => stream.getTracks().forEach(track => track.stop()));
vid.defaultPlaybackRate = 0.4;
vid.playbackRate = 0.4;
vid.preload = "metadata";
vid.srcObject = stream;
vid.onratechange = t.unreached_func('ratechange event must not be fired');
vid.play();
assert_true(!vid.seeking, "A MediaStream is not seekable");
assert_equals(vid.seekable.length, 0, "A MediaStream is not seekable");
assert_equals(vid.defaultPlaybackRate, 1, "playback rate is always 1");
vid.defaultPlaybackRate = 0.5;
assert_equals(vid.defaultPlaybackRate, 1, "Setting defaultPlaybackRate must be ignored");
assert_equals(vid.playbackRate, 1, "playback rate is always 1");
vid.playbackRate = 0.5;
assert_equals(vid.playbackRate, 1, "Setting playbackRate must be ignored");
assert_equals(vid.buffered.length, 0, "A MediaStream cannot be preloaded. Therefore, there is no buffered timeranges");
assert_equals(vid.readyState, vid.HAVE_NOTHING, "readyState is HAVE_NOTHING initially");
assert_equals(vid.duration, NaN, "A MediaStream does not have any duration initially.");
assert_equals(vid.preload, "none", "preload must always be none");
vid.preload = "metadata";
assert_equals(vid.preload, "none", "Setting preload must be ignored");
const haveLoadedData = new Promise(r => vid.addEventListener("loadeddata", r, {once: true}));

Просмотреть файл

@ -44,7 +44,7 @@ class Browser(object):
return NotImplemented
@abstractmethod
def find_webdriver(self):
def find_webdriver(self, channel=None):
"""Find the binary of the WebDriver."""
return NotImplemented
@ -214,7 +214,7 @@ class Firefox(Browser):
return None
return path
def find_webdriver(self):
def find_webdriver(self, channel=None):
return find_executable("geckodriver")
def get_version_and_channel(self, binary):
@ -390,7 +390,7 @@ class Fennec(Browser):
def find_binary(self, venv_path=None, channel=None):
raise NotImplementedError
def find_webdriver(self):
def find_webdriver(self, channel=None):
raise NotImplementedError
def install_webdriver(self, dest=None, channel=None):
@ -444,7 +444,7 @@ class Chrome(Browser):
def find_binary(self, venv_path=None, channel=None):
raise NotImplementedError
def find_webdriver(self):
def find_webdriver(self, channel=None):
return find_executable("chromedriver")
def install_webdriver(self, dest=None, channel=None):
@ -492,7 +492,7 @@ class ChromeAndroid(Browser):
def find_binary(self, venv_path=None, channel=None):
raise NotImplementedError
def find_webdriver(self):
def find_webdriver(self, channel=None):
return find_executable("chromedriver")
def install_webdriver(self, dest=None, channel=None):
@ -545,7 +545,7 @@ class Opera(Browser):
def find_binary(self, venv_path=None, channel=None):
raise NotImplementedError
def find_webdriver(self):
def find_webdriver(self, channel=None):
return find_executable("operadriver")
def install_webdriver(self, dest=None, channel=None):
@ -588,7 +588,7 @@ class Edge(Browser):
def find_binary(self, venv_path=None, channel=None):
raise NotImplementedError
def find_webdriver(self):
def find_webdriver(self, channel=None):
return find_executable("MicrosoftWebDriver")
def install_webdriver(self, dest=None, channel=None):
@ -614,7 +614,7 @@ class InternetExplorer(Browser):
def find_binary(self, venv_path=None, channel=None):
raise NotImplementedError
def find_webdriver(self):
def find_webdriver(self, channel=None):
return find_executable("IEDriverServer.exe")
def install_webdriver(self, dest=None, channel=None):
@ -639,8 +639,11 @@ class Safari(Browser):
def find_binary(self, venv_path=None, channel=None):
raise NotImplementedError
def find_webdriver(self):
return find_executable("safaridriver")
def find_webdriver(self, channel=None):
path = None
if channel == "preview":
path = "/Applications/Safari Technology Preview.app/Contents/MacOS"
return find_executable("safaridriver", path)
def install_webdriver(self, dest=None, channel=None):
raise NotImplementedError
@ -703,7 +706,7 @@ class Servo(Browser):
path = find_executable("servo")
return path
def find_webdriver(self):
def find_webdriver(self, channel=None):
return None
def install_webdriver(self, dest=None, channel=None):
@ -727,7 +730,7 @@ class Sauce(Browser):
def find_binary(self, venev_path=None, channel=None):
raise NotImplementedError
def find_webdriver(self):
def find_webdriver(self, channel=None):
raise NotImplementedError
def install_webdriver(self, dest=None, channel=None):
@ -749,7 +752,7 @@ class WebKit(Browser):
def find_binary(self, venv_path=None, channel=None):
return None
def find_webdriver(self):
def find_webdriver(self, channel=None):
return None
def install_webdriver(self, dest=None, channel=None):

Просмотреть файл

@ -6,6 +6,8 @@ import sys
latest_channels = {
'firefox': 'nightly',
'chrome': 'dev',
'safari': 'preview',
'safari_webdriver': 'preview',
'servo': 'nightly'
}

Просмотреть файл

@ -383,7 +383,7 @@ class Safari(BrowserSetup):
def setup_kwargs(self, kwargs):
if kwargs["webdriver_binary"] is None:
webdriver_binary = self.browser.find_webdriver()
webdriver_binary = self.browser.find_webdriver(channel=kwargs["browser_channel"])
if webdriver_binary is None:
raise WptrunError("Unable to locate safaridriver binary")

Просмотреть файл

@ -0,0 +1,85 @@
<!DOCTYPE html>
<html>
<head>
<title>
Test passing SharedArrayBuffer to an AudioWorklet
</title>
<script src="/resources/testharness.js"></script>
<script src="/resources/testharnessreport.js"></script>
<script src="/webaudio/resources/audit.js"></script>
</head>
<body>
<script id="layout-test-code">
let audit = Audit.createTaskRunner();
let context = new AudioContext();
let filePath = 'processors/sharedarraybuffer-processor.js';
if (window.SharedArrayBuffer) {
audit.define(
'Test postMessage from AudioWorkletProcessor to AudioWorkletNode',
(task, should) => {
let workletNode =
new AudioWorkletNode(context, 'sharedarraybuffer-processor');
// After it is created, the worklet will send a new
// SharedArrayBuffer to the main thread.
//
// The worklet will then wait to receive a message from the main
// thread.
//
// When it receives the message, it will check whether it is a
// SharedArrayBuffer, and send this information back to the main
// thread.
workletNode.port.onmessage = (event) => {
let data = event.data;
switch (data.state) {
case 'created':
should(
data.sab instanceof SharedArrayBuffer,
'event.data.sab from worklet is an instance of SharedArrayBuffer')
.beTrue();
// Send a SharedArrayBuffer back to the worklet.
let sab = new SharedArrayBuffer(8);
workletNode.port.postMessage(sab);
break;
case 'received message':
should(data.isSab, 'event.data from main thread is an instance of SharedArrayBuffer')
.beTrue();
task.done();
break;
default:
should(false,
`Got unexpected message from worklet: ${data.state}`)
.beTrue();
task.done();
break;
}
};
workletNode.port.onmessageerror = (event) => {
should(false, 'Got messageerror from worklet').beTrue();
task.done();
};
});
} else {
// NOTE(binji): SharedArrayBuffer is only enabled where we have site
// isolation.
audit.define('Skipping test because SharedArrayBuffer is not defined',
(task, should) => {
task.done();
});
}
context.audioWorklet.addModule(filePath).then(() => {
audit.run();
});
</script>
</body>
</html>

Просмотреть файл

@ -0,0 +1,35 @@
/**
* @class SharedArrayBufferProcessor
* @extends AudioWorkletProcessor
*
* This processor class demonstrates passing SharedArrayBuffers to and from
* workers.
*/
class SharedArrayBufferProcessor extends AudioWorkletProcessor {
constructor() {
super();
this.port.onmessage = this.handleMessage.bind(this);
this.port.onmessageerror = this.handleMessageError.bind(this);
let sab = new SharedArrayBuffer(8);
this.port.postMessage({state: 'created', sab});
}
handleMessage(event) {
this.port.postMessage({
state: 'received message',
isSab: event.data instanceof SharedArrayBuffer
});
}
handleMessageError(event) {
this.port.postMessage({
state: 'received messageerror'
});
}
process() {
return true;
}
}
registerProcessor('sharedarraybuffer-processor', SharedArrayBufferProcessor);

Просмотреть файл

@ -1 +0,0 @@
{"files":{".travis.yml":"71e232ec96a9f11083a6ac2e3de7d3482032a4a9ed02c0e1be30b46da47cacef","CHANGELOG.md":"6c8e98f58fc7c4c3b7039027ff521a070b511f2882eb9985e32f118aff4ee4c0","Cargo.toml":"c87fbd92db7e1f7ace5b082a4168161e21e5ac76320ad44e01b7e3ea88aeee6e","LICENSE":"643adba34cf48432ba1bac872fdd5686d129c64e06246399bacf20142820620b","README.md":"3768d87584c808a133df7547996900d7574801f2021b6e6bc8c94cd0040b3cf8","appveyor.yml":"ab80c6004eeccda11d3e10284c7cd1bc8ecc87765204dfbf9c1dc4eb3843b86a","src/lib.rs":"16610a89cc5b9f0682a08507b4aea6b1e50ed6e78bc9a63acb6317e23a84477b"},"package":"d0fd4c0631f06448cc45a6bbb3b710ebb7ff8ccb96a0800c994afe23a70d5df2"}

43
third_party/rust/atty-0.1.2/.travis.yml поставляемый
Просмотреть файл

@ -1,43 +0,0 @@
sudo: false
language: rust
matrix:
fast_finish: true
include:
- rust: nightly
- rust: beta
- rust: stable
os:
- linux
- osx
script:
- cargo build
- cargo test
cache:
apt: true
directories:
- target/debug/deps
- target/debug/build
addons:
apt:
packages:
- libcurl4-openssl-dev
- libelf-dev
- libdw-dev
- binutils-dev # required for `kcov --verify`
- libbfd-dev # required for `kcov --verify`
after_success: |
[ $TRAVIS_RUST_VERSION = stable ] &&
wget https://github.com/SimonKagstrom/kcov/archive/master.tar.gz &&
tar xzf master.tar.gz && mkdir kcov-master/build && cd kcov-master/build && cmake .. && make && make install DESTDIR=../tmp && cd ../.. &&
ls target/debug &&
./kcov-master/tmp/usr/local/bin/kcov --verify --coveralls-id=$TRAVIS_JOB_ID --exclude-pattern=/.cargo target/kcov target/debug/atty-* &&
[ $TRAVIS_BRANCH = master ] &&
[ $TRAVIS_PULL_REQUEST = false ] &&
cargo doc --no-deps &&
echo "<meta http-equiv=refresh content=0;url=`echo $TRAVIS_REPO_SLUG | cut -d '/' -f 2`/index.html>" > target/doc/index.html &&
sudo pip install --user ghp-import &&
/home/travis/.local/bin/ghp-import -n target/doc &&
git push -fq https://${GH_TOKEN}@github.com/${TRAVIS_REPO_SLUG}.git gh-pages
env:
global:
secure: acjXoBFG4yFklz/iW4q9PLaMmTgug0c8hOov4uiaXYjDkVGhnEePBozGc8ctKuFv2BVlwBSzvE1neE9dHcCS6il0x+G79sVTekfVN5dERja3UpwrC0/QodJuDmErIUpb6zylupPnUGq5pzZabRPNKyAnsFS5wYhLMSLxGPu4pfYdW0Eu8CEPIgPYsI6o2pfKgNpXbeizdHRLMeZCN4cbEPohO1odc+Z6WJvgKn2xEkpAcfhAuaroqGGxRtmDiJZ/JaBijAKY/O9Q3Xq1GSGOPT5lmwJSp3Fxw5dgmeX6LmN0ZODASdnEoYfoqUDUFzkCON3Sk4a7hugxlkZ7cx1tfqXxMg+0BgYIUdGQNloDJnuusWvXPBFdB2jxMsfcbrCjNsrJ8kjN6uBsW9yy0kqN7a8eOJckwh5fYRWfNta0R+BrveNXWmGp4u4aBq/85jEiHi30XKTzaEUbF0Y3cIONweWeWwBOcAvPBhO63Y07TRRe+SSk1NYm7QHGW9RsHhz89OSbaIXqn+r/o+6DZcw5XaO73DtZ62Kx48NErej9kVqcIJ6HnyvCJ/fJoT7h1ixSRI/WmS30l2S/q33Q2G4C/IZ4ZZRD/1thSltAxeA6OAUnr8ITZyW47CqOmyL1IUptrdAb9OLEedYV/QrOhcg2RJLXyP66xnItOwMp014bEp4=

12
third_party/rust/atty-0.1.2/CHANGELOG.md поставляемый
Просмотреть файл

@ -1,12 +0,0 @@
# 0.1.2
* windows support (with automated testing)
* automated code coverage
# 0.1.1
* bumped libc dep from `0.1` to `0.2`
# 0.1.0
* initial release

15
third_party/rust/atty-0.1.2/Cargo.toml поставляемый
Просмотреть файл

@ -1,15 +0,0 @@
[package]
name = "atty"
version = "0.1.2"
authors = ["softprops <d.tangren@gmail.com>"]
description = "A simple interface for querying atty"
documentation = "http://softprops.github.io/atty"
homepage = "https://github.com/softprops/atty"
repository = "https://github.com/softprops/atty"
keywords = ["terminal", "tty"]
license = "MIT"
[dependencies]
libc = "0.2"
winapi = "0.2"
kernel32-sys = "0.2"

20
third_party/rust/atty-0.1.2/LICENSE поставляемый
Просмотреть файл

@ -1,20 +0,0 @@
Copyright (c) 2015 Doug Tangren
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
"Software"), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

20
third_party/rust/atty-0.1.2/README.md поставляемый
Просмотреть файл

@ -1,20 +0,0 @@
# atty
[![Build Status](https://travis-ci.org/softprops/atty.svg?branch=master)](https://travis-ci.org/softprops/atty) [![Build status](https://ci.appveyor.com/api/projects/status/geggrsnsjsuse8cv?svg=true)](https://ci.appveyor.com/project/softprops/atty) [![Coverage Status](https://coveralls.io/repos/softprops/atty/badge.svg?branch=master&service=github)](https://coveralls.io/github/softprops/atty?branch=master)
> are you or are you not a tty?
## docs
Find them [here](http://softprops.github.io/atty)
## install
Add the following to your `Cargo.toml`
```toml
[dependencies]
atty = "0.1"
```
Doug Tangren (softprops) 2015

19
third_party/rust/atty-0.1.2/appveyor.yml поставляемый
Просмотреть файл

@ -1,19 +0,0 @@
environment:
matrix:
- TARGET: nightly-x86_64-pc-windows-msvc
VCVARS: "C:\\Program Files (x86)\\Microsoft Visual Studio 14.0\\VC\\bin\\amd64\\vcvars64.bat"
- TARGET: nightly-i686-pc-windows-msvc
VCVARS: "C:\\Program Files (x86)\\Microsoft Visual Studio 14.0\\VC\\bin\\vcvars32.bat"
- TARGET: nightly-x86_64-pc-windows-gnu
- TARGET: nightly-i686-pc-windows-gnu
- TARGET: 1.2.0-x86_64-pc-windows-gnu
install:
- ps: Start-FileDownload "https://static.rust-lang.org/dist/rust-${env:TARGET}.exe" -FileName "rust-install.exe"
- ps: .\rust-install.exe /VERYSILENT /NORESTART /DIR="C:\rust" | Out-Null
- ps: $env:PATH="$env:PATH;C:\rust\bin"
- call "%VCVARS%" || ver>nul
- rustc -vV
- cargo -vV
build: false
test_script:
- cargo test --verbose

62
third_party/rust/atty-0.1.2/src/lib.rs поставляемый
Просмотреть файл

@ -1,62 +0,0 @@
//! atty is a simple utility that answers one question
//! > is this a tty?
//!
//! usage is just as simple
//!
//! ```
//! if atty::is() {
//! println!("i'm a tty")
//! }
//! ```
//!
//! ```
//! if atty::isnt() {
//! println!("i'm not a tty")
//! }
//! ```
extern crate libc;
/// returns true if this is a tty
#[cfg(unix)]
pub fn is() -> bool {
let r = unsafe { libc::isatty(libc::STDOUT_FILENO) };
r != 0
}
/// returns true if this is a tty
#[cfg(windows)]
pub fn is() -> bool {
extern crate kernel32;
extern crate winapi;
use std::ptr;
let handle: winapi::HANDLE = unsafe {
kernel32::CreateFileA(b"CONOUT$\0".as_ptr() as *const i8,
winapi::GENERIC_READ | winapi::GENERIC_WRITE,
winapi::FILE_SHARE_WRITE,
ptr::null_mut(),
winapi::OPEN_EXISTING,
0,
ptr::null_mut())
};
if handle == winapi::INVALID_HANDLE_VALUE {
return false;
}
let mut out = 0;
unsafe { kernel32::GetConsoleMode(handle, &mut out) != 0 }
}
/// returns true if this is _not_ a tty
pub fn isnt() -> bool {
!is()
}
#[cfg(test)]
mod tests {
use super::is;
#[test]
fn is_test() {
assert!(is())
}
}

Просмотреть файл

@ -1 +1 @@
{"files":{".cargo_vcs_info.json":"5a666f68ab005317d058d78c58936ebf66086a242f6a4b8415230649bbce768d","Cargo.toml":"04c87832069d5462b4b87c935fa448213e00a804fcf827334a02beda1fd7f971","README.md":"17e5ed3a3bd9b898e73c3056711daabe1238fe9682d24d255f8263fae4eb783d","examples/generate_spidermonkey.rs":"a831abf8d7a1ab73c5d70a9e8517b8af1df492589a2f180698145ac5d46d7102","src/export.rs":"e889c2f45f00c1787e2270a50fc6d9628446d620c3c0d2ac6ba3f031c561197d","src/import.rs":"7a8525aa55ff0c6c266edfb69a351345ab0c36176deeb0fb91901d4a4e6bd9d6","src/lib.rs":"d4ea18ec850054a817c6b91ed52412a2f2f39639628e5918dee688d829d3ed4b","src/spec.rs":"8f442a5d218360681ad3a5b4c4740b7ae227e087eb745df38cca07a88d8484c4","src/util.rs":"1d934eec75d9dee44289f9a9a9e67c96dd6205367430b9bcf9fc66e730bf6eb0"},"package":"cc0956bac41c458cf38340699dbb54c2220c91cdbfa33be19670fe69e0a6ac9b"}
{"files":{".cargo_vcs_info.json":"13d7d8f1c677eb54c2b0005b8e048b79461b91176796088fc70f5d40ffbefd0a","Cargo.toml":"eee9b8c9f05e442ed41ee986b07b443cb89465346dce4aae4f73f13fa7243492","README.md":"17e5ed3a3bd9b898e73c3056711daabe1238fe9682d24d255f8263fae4eb783d","examples/generate_spidermonkey.rs":"a831abf8d7a1ab73c5d70a9e8517b8af1df492589a2f180698145ac5d46d7102","src/export.rs":"56910e257a000cac963b9ac377558767d05076b677e83a7e75e570ecbd4b35f6","src/import.rs":"366bada1b19c608ffe7dc4761f1db1a1dae616f4ed99685e4260a00d5c0125d0","src/lib.rs":"d4ea18ec850054a817c6b91ed52412a2f2f39639628e5918dee688d829d3ed4b","src/spec.rs":"7cfb4705d9cfa72ba0a34c5d5beab7e23ac54d8e9fa125317364535d5aa7496a","src/util.rs":"1d934eec75d9dee44289f9a9a9e67c96dd6205367430b9bcf9fc66e730bf6eb0"},"package":"430239e4551e42b80fa5d92322ac80ea38c9dda56e5d5582e057e2288352b71a"}

Просмотреть файл

@ -1,5 +1,5 @@
{
"git": {
"sha1": "4c24254cdcfba7a929573f34e5ac12686a86bb60"
"sha1": "da502c023e7c92bff0003109935a8767d9176637"
}
}

4
third_party/rust/binjs_meta/Cargo.toml поставляемый
Просмотреть файл

@ -12,7 +12,7 @@
[package]
name = "binjs_meta"
version = "0.3.10"
version = "0.4.3"
authors = ["David Teller <D.O.Teller@gmail.com>"]
description = "Part of binjs-ref. Tools for manipulating grammars. You probably do not want to use this crate directly unless you're writing an encoder, decoder or parser generator for binjs."
homepage = "https://binast.github.io/ecmascript-binary-ast/"
@ -31,7 +31,7 @@ version = "^0.7"
version = "^0.4"
[dependencies.webidl]
version = "^0.6"
version = "^0.8"
[dev-dependencies.clap]
version = "^2"

30
third_party/rust/binjs_meta/src/export.rs поставляемый
Просмотреть файл

@ -115,11 +115,10 @@ impl TypeDeanonymizer {
// See also tagged_tuple in write.rs.
if field.is_lazy() {
declaration.with_field(skip_name_map.get(field.name()).unwrap(),
Type::offset().required(),
Laziness::Eager);
Type::offset().required());
}
declaration.with_field(field.name(), field.type_().clone(),
field.laziness());
declaration.with_field_laziness(field.name(), field.type_().clone(),
field.laziness());
}
}
// Copy and deanonymize typedefs
@ -191,6 +190,8 @@ impl TypeDeanonymizer {
TypeSpec::Boolean |
TypeSpec::Number |
TypeSpec::UnsignedLong |
TypeSpec::PropertyKey |
TypeSpec::IdentifierName |
TypeSpec::String |
TypeSpec::Offset |
TypeSpec::Void => {
@ -202,7 +203,14 @@ impl TypeDeanonymizer {
debug!(target: "export_utils", "import_typespec: Attempting to redefine typedef {name}", name = my_name.to_str());
}
}
(None, self.builder.node_name("@@"))
// This is a workaround for typedefs in the webidl that are not truly typedefs.
// See https://github.com/Yoric/ecmascript-binary-ast/pull/1
let name = match *type_spec {
TypeSpec::PropertyKey => self.builder.node_name("PropertyKey"),
TypeSpec::IdentifierName => self.builder.node_name("IdentifierName"),
_ => self.builder.node_name(&format!("@@{:?}", type_spec)),
};
(None, name)
}
TypeSpec::NamedType(ref link) => {
let resolved = spec.get_type_by_name(link)
@ -238,11 +246,13 @@ impl TypeDeanonymizer {
Some(IsNullable { is_nullable: true, .. }) |
Some(IsNullable { content: Primitive::Interface(_), .. }) => Type::named(&content).required(),
Some(IsNullable { content: Primitive::String, .. }) => Type::string().required(),
Some(IsNullable { content: Primitive::IdentifierName, .. }) => Type::identifier_name().required(),
Some(IsNullable { content: Primitive::PropertyKey, .. }) => Type::property_key().required(),
Some(IsNullable { content: Primitive::Number, .. }) => Type::number().required(),
Some(IsNullable { content: Primitive::UnsignedLong, .. }) => Type::unsigned_long().required(),
Some(IsNullable { content: Primitive::Boolean, .. }) => Type::bool().required(),
Some(IsNullable { content: Primitive::Offset, .. }) => Type::offset().required(),
Some(IsNullable { content: Primitive::Void, .. }) => Type::void().required()
Some(IsNullable { content: Primitive::Void, .. }) => Type::void().required(),
};
debug!(target: "export_utils", "import_typespec aliasing {:?} => {:?}",
my_name, deanonymized);
@ -375,6 +385,10 @@ impl TypeName {
"_String".to_string(),
TypeSpec::Void =>
"_Void".to_string(),
TypeSpec::IdentifierName =>
"IdentifierName".to_string(),
TypeSpec::PropertyKey =>
"PropertyKey".to_string(),
TypeSpec::TypeSum(ref sum) => {
format!("{}", sum.types()
.iter()
@ -408,6 +422,10 @@ impl ToWebidl {
"bool".to_string(),
TypeSpec::String =>
"string".to_string(),
TypeSpec::PropertyKey =>
"[PropertyKey] string".to_string(),
TypeSpec::IdentifierName =>
"[IdentifierName] string".to_string(),
TypeSpec::Number =>
"number".to_string(),
TypeSpec::UnsignedLong =>

104
third_party/rust/binjs_meta/src/import.rs поставляемый
Просмотреть файл

@ -1,9 +1,11 @@
use spec::{ self, SpecBuilder, TypeSum, Laziness };
use spec::{ self, Laziness, SpecBuilder, TypeSum };
use webidl::ast::*;
pub struct Importer {
builder: SpecBuilder,
/// The interfaces we have traversed so far.
path: Vec<String>,
}
impl Importer {
/// Import an AST into a SpecBuilder.
@ -62,6 +64,7 @@ impl Importer {
/// ```
pub fn import(ast: &AST) -> SpecBuilder {
let mut importer = Importer {
path: Vec::with_capacity(256),
builder: SpecBuilder::new()
};
importer.import_ast(ast);
@ -90,9 +93,22 @@ impl Importer {
}
fn import_typedef(&mut self, typedef: &Typedef) {
let name = self.builder.node_name(&typedef.name);
let type_ = self.convert_type(&*typedef.type_);
// The following are, unfortunately, not true typedefs.
// Ignore their definition.
let type_ = match typedef.name.as_ref() {
"Identifier" => spec::TypeSpec::IdentifierName
.required(),
"IdentifierName" => spec::TypeSpec::IdentifierName
.required(),
"PropertyKey" => spec::TypeSpec::PropertyKey
.required(),
_ => self.convert_type(&*typedef.type_)
};
debug!(target: "meta::import", "Importing typedef {type_:?} {name:?}",
type_ = type_,
name = name);
let mut node = self.builder.add_typedef(&name)
.expect("Name already present");
.unwrap_or_else(|| panic!("Error: Name {} is defined more than once in the spec.", name));
assert!(!type_.is_optional());
node.with_spec(type_.spec);
}
@ -102,31 +118,47 @@ impl Importer {
} else {
panic!("Expected a non-partial interface, got {:?}", interface);
};
if interface.name == "Node" {
// We're not interested in the root interface.
return;
// Handle special, hardcoded, interfaces.
match interface.name.as_ref() {
"Node" => {
// We're not interested in the root interface.
return;
}
"IdentifierName" => {
unimplemented!()
}
_ => {
}
}
if let Some(ref parent) = interface.inherits {
assert_eq!(parent, "Node");
}
self.path.push(interface.name.clone());
// Now handle regular stuff.
let mut fields = Vec::new();
for member in &interface.members {
if let InterfaceMember::Attribute(Attribute::Regular(ref attribute)) = *member {
use webidl::ast::ExtendedAttribute::NoArguments;
use webidl::ast::Other::Identifier;
let name = self.builder.field_name(&attribute.name);
let type_ = self.convert_type(&*attribute.type_);
let mut laziness = Laziness::Eager;
for extended_attribute in &attribute.extended_attributes {
use webidl::ast::ExtendedAttribute::NoArguments;
use webidl::ast::Other::Identifier;
if let &NoArguments(Identifier(ref id)) = extended_attribute.as_ref() {
if &*id == "Lazy" {
laziness = Laziness::Lazy;
let is_lazy = attribute.extended_attributes.iter()
.find(|attribute| {
if let &NoArguments(Identifier(ref id)) = attribute.as_ref() {
if &*id == "Lazy" {
return true;
}
}
}
}
fields.push((name, type_, laziness));
false
})
.is_some();
fields.push((name, type_, if is_lazy { Laziness::Lazy } else { Laziness:: Eager }));
} else {
panic!("Expected an attribute, got {:?}", member);
}
@ -134,17 +166,42 @@ impl Importer {
let name = self.builder.node_name(&interface.name);
let mut node = self.builder.add_interface(&name)
.expect("Name already present");
for (field_name, field_type, field_laziness) in fields.drain(..) {
node.with_field(&field_name, field_type, field_laziness);
for (field_name, field_type, laziness) in fields.drain(..) {
node.with_field_laziness(&field_name, field_type, laziness);
}
for extended_attribute in &interface.extended_attributes {
use webidl::ast::ExtendedAttribute::NoArguments;
use webidl::ast::Other::Identifier;
if let &NoArguments(Identifier(ref id)) = extended_attribute.as_ref() {
if &*id == "Skippable" {
panic!("Encountered deprecated attribute [Skippable]");
}
if &*id == "Scope" {
node.with_scope(true);
}
}
}
self.path.pop();
}
fn convert_type(&mut self, t: &Type) -> spec::Type {
let spec = match t.kind {
TypeKind::Boolean => spec::TypeSpec::Boolean,
TypeKind::Identifier(ref id) => {
let name = self.builder.node_name(id);
spec::TypeSpec::NamedType(name.clone())
// Sadly, some identifiers are not truly `typedef`s.
match name.to_str() {
"IdentifierName" if self.is_at_interface("StaticMemberAssignmentTarget") => spec::TypeSpec::PropertyKey,
"IdentifierName" if self.is_at_interface("StaticMemberExpression") => spec::TypeSpec::PropertyKey,
"IdentifierName" if self.is_at_interface("ImportSpecifier") => spec::TypeSpec::PropertyKey,
"IdentifierName" if self.is_at_interface("ExportSpecifier") => spec::TypeSpec::PropertyKey,
"IdentifierName" if self.is_at_interface("ExportLocalSpecifier") => spec::TypeSpec::PropertyKey,
"IdentifierName" => spec::TypeSpec::IdentifierName,
"Identifier" => spec::TypeSpec::IdentifierName,
_ => spec::TypeSpec::NamedType(name.clone())
}
}
TypeKind::DOMString if self.is_at_interface("LiteralPropertyName") => spec::TypeSpec::PropertyKey,
TypeKind::DOMString => spec::TypeSpec::String,
TypeKind::Union(ref types) => {
let mut dest = Vec::with_capacity(types.len());
@ -174,4 +231,11 @@ impl Importer {
spec.required()
}
}
fn is_at_interface(&self, name: &str) -> bool {
if self.path.len() == 0 {
return false;
}
self.path[0].as_str() == name
}
}

166
third_party/rust/binjs_meta/src/spec.rs поставляемый
Просмотреть файл

@ -2,6 +2,8 @@
pub use util::ToStr;
use itertools::Itertools;
use std;
use std::cell::*;
use std::collections::{ HashMap, HashSet };
@ -9,6 +11,14 @@ use std::fmt::{ Debug, Display };
use std::hash::*;
use std::rc::*;
/// Whether an attribute is eager or lazy.
#[derive(Clone, Debug, PartialEq, Eq)]
pub enum Laziness {
/// An eager attribute is designed to be parsed immediately.
Eager,
/// A lazy attribute is designed for deferred parsing.
Lazy
}
/// The name of an interface or enum.
#[derive(Clone, Hash, PartialEq, Eq, PartialOrd, Ord)]
@ -17,6 +27,12 @@ impl NodeName {
pub fn to_string(&self) -> &String {
self.0.as_ref()
}
pub fn to_str(&self) -> &str {
self.0.as_ref()
}
pub fn to_rc_string(&self) -> &Rc<String> {
&self.0
}
}
impl Debug for NodeName {
fn fmt(&self, formatter: &mut std::fmt::Formatter) -> Result<(), std::fmt::Error> {
@ -42,6 +58,9 @@ impl FieldName {
pub fn to_string(&self) -> &String {
self.0.as_ref()
}
pub fn to_rc_string(&self) -> &Rc<String> {
&self.0
}
}
impl Debug for FieldName {
fn fmt(&self, formatter: &mut std::fmt::Formatter) -> Result<(), std::fmt::Error> {
@ -100,20 +119,19 @@ impl TypeSum {
}
}
/// Lazy for a field with [lazy] attribute. Eager for others.
#[derive(Clone, Debug, PartialEq, Eq)]
pub enum Laziness {
Eager,
Lazy,
}
/// Representation of a field in an interface.
#[derive(Clone, PartialEq, Eq, Debug)]
pub struct Field {
/// The name of the field.
name: FieldName,
/// The type of the field.
type_: Type,
/// Documentation for the field. Ignored for the time being.
documentation: Option<String>,
laziness: Laziness,
laziness: Laziness
}
impl Hash for Field {
fn hash<H>(&self, state: &mut H) where H: Hasher {
@ -141,12 +159,20 @@ impl Field {
pub fn laziness(&self) -> Laziness {
self.laziness.clone()
}
pub fn with_laziness(mut self, laziness: Laziness) -> Self {
self.laziness = laziness;
self
}
pub fn doc(&self) -> Option<&str> {
match self.documentation {
None => None,
Some(ref s) => Some(&*s)
}
}
pub fn with_doc(mut self, doc: Option<String>) -> Self {
self.documentation = doc;
self
}
}
/// The contents of a type, typically that of a field.
@ -191,6 +217,16 @@ pub enum TypeSpec {
///
/// For the moment, this spec is used only internally.
Void,
/// A string used to represent something bound in a scope (i.e. a variable, but not a property).
/// At this level, we make no distinction between `Identifier` and `IdentifierName`.
///
/// Actually maps to a subset of `IdentifierName` in webidl.
IdentifierName,
/// A key for a property. For the time being, we make no distinction between variants such
/// as `LiteralPropertyName` and `IdentifierName`-as-property-keys.
PropertyKey,
}
#[derive(Clone, Debug)]
@ -295,6 +331,8 @@ impl TypeSpec {
TypeSpec::UnsignedLong => Some(IsNullable::non_nullable(Primitive::UnsignedLong)),
TypeSpec::String => Some(IsNullable::non_nullable(Primitive::String)),
TypeSpec::Offset => Some(IsNullable::non_nullable(Primitive::Offset)),
TypeSpec::IdentifierName => Some(IsNullable::non_nullable(Primitive::IdentifierName)),
TypeSpec::PropertyKey => Some(IsNullable::non_nullable(Primitive::PropertyKey)),
TypeSpec::NamedType(ref name) => {
match spec.get_type_by_name(name).unwrap() {
NamedType::Interface(ref interface) =>
@ -332,6 +370,8 @@ pub enum Primitive {
UnsignedLong,
Offset,
Interface(Rc<Interface>),
IdentifierName,
PropertyKey,
}
#[derive(Clone, Debug, PartialEq)]
@ -393,6 +433,12 @@ impl Type {
pub fn void() -> TypeSpec {
TypeSpec::Void
}
pub fn identifier_name() -> TypeSpec {
TypeSpec::IdentifierName
}
pub fn property_key() -> TypeSpec {
TypeSpec::PropertyKey
}
/// An `offset` type, holding a number of bytes in the binary file.
pub fn offset() -> TypeSpec {
@ -474,12 +520,9 @@ impl Obj {
return self
}
let mut fields = self.fields;
fields.push(Field {
name: name.clone(),
type_,
documentation: doc.map(str::to_string),
laziness,
});
fields.push(Field::new(name.clone(), type_)
.with_doc(doc.map(str::to_string))
.with_laziness(laziness));
Obj {
fields
}
@ -487,12 +530,16 @@ impl Obj {
}
/// Extend a structure with a field.
pub fn with_field(self, name: &FieldName, type_: Type, laziness: Laziness) -> Self {
self.with_field_aux(name, type_, laziness, None)
pub fn with_field(self, name: &FieldName, type_: Type) -> Self {
self.with_field_aux(name, type_, Laziness::Eager, None)
}
pub fn with_field_doc(self, name: &FieldName, type_: Type, laziness: Laziness, doc: &str) -> Self {
self.with_field_aux(name, type_, laziness, Some(doc))
pub fn with_field_doc(self, name: &FieldName, type_: Type, doc: &str) -> Self {
self.with_field_aux(name, type_, Laziness::Eager, Some(doc))
}
pub fn with_field_lazy(self, name: &FieldName, type_: Type) -> Self {
self.with_field_aux(name, type_, Laziness::Lazy, None)
}
}
@ -529,6 +576,8 @@ pub struct InterfaceDeclaration {
/// The contents of this interface, excluding the contents of parent interfaces.
contents: Obj,
is_scope: bool,
}
impl InterfaceDeclaration {
@ -536,18 +585,28 @@ impl InterfaceDeclaration {
let _ = self.contents.with_full_field(contents);
self
}
pub fn with_field(&mut self, name: &FieldName, type_: Type, laziness: Laziness) -> &mut Self {
self.with_field_aux(name, type_, laziness, None)
pub fn with_field(&mut self, name: &FieldName, type_: Type) -> &mut Self {
self.with_field_aux(name, type_, None, Laziness::Eager)
}
pub fn with_field_doc(&mut self, name: &FieldName, type_: Type, laziness: Laziness, doc: &str) -> &mut Self {
self.with_field_aux(name, type_, laziness, Some(doc))
pub fn with_field_lazy(&mut self, name: &FieldName, type_: Type) -> &mut Self {
self.with_field_aux(name, type_, None, Laziness::Eager)
}
fn with_field_aux(&mut self, name: &FieldName, type_: Type, laziness: Laziness, doc: Option<&str>) -> &mut Self {
pub fn with_field_laziness(&mut self, name: &FieldName, type_: Type, laziness: Laziness) -> &mut Self {
self.with_field_aux(name, type_, None, laziness)
}
pub fn with_field_doc(&mut self, name: &FieldName, type_: Type, doc: &str) -> &mut Self {
self.with_field_aux(name, type_, Some(doc), Laziness::Eager)
}
fn with_field_aux(&mut self, name: &FieldName, type_: Type, doc: Option<&str>, laziness: Laziness) -> &mut Self {
let mut contents = Obj::new();
std::mem::swap(&mut self.contents, &mut contents);
self.contents = contents.with_field_aux(name, type_, laziness, doc);
self
}
pub fn with_scope(&mut self, value: bool) -> &mut Self {
self.is_scope = value;
self
}
}
/// A data structure used to progressively construct the `Spec`.
@ -617,6 +676,7 @@ impl SpecBuilder {
let result = RefCell::new(InterfaceDeclaration {
name: name.clone(),
contents: Obj::new(),
is_scope: false,
});
self.interfaces_by_name.insert(name.clone(), result);
self.interfaces_by_name.get(name)
@ -672,10 +732,20 @@ impl SpecBuilder {
.map(|(k, v)| (k, Rc::new(RefCell::into_inner(v))))
.collect();
let mut node_names = HashMap::new();
for name in interfaces_by_name.keys().chain(string_enums_by_name.keys()).chain(typedefs_by_name.keys()) {
node_names.insert(name.to_string().clone(), name.clone());
}
let node_names: HashMap<_, _> = interfaces_by_name
.keys()
.chain(string_enums_by_name
.keys())
.chain(typedefs_by_name
.keys())
.map(|name| {
(name.to_string().clone(), name.clone())
})
.collect();
debug!(target: "spec", "Established list of node names: {:?} ({})",
node_names.keys()
.sorted(),
node_names.len());
// 2. Collect all field names.
let mut fields = HashMap::new();
@ -718,6 +788,10 @@ impl SpecBuilder {
}
}
for name in &used_typenames {
// Built-in types
if name.to_str() == "IdentifierName" || name.to_str() == "Identifier" || name.to_str() == "PropertyKey" {
continue;
}
if typedefs_by_name.contains_key(name) {
continue;
}
@ -763,7 +837,14 @@ impl SpecBuilder {
debug!(target: "spec", "classify_type => don't put me in an interface");
TypeClassification::Array
},
TypeSpec::Boolean | TypeSpec::Number | TypeSpec::UnsignedLong | TypeSpec::String | TypeSpec::Void | TypeSpec::Offset => {
TypeSpec::Boolean
| TypeSpec::Number
| TypeSpec::String
| TypeSpec::Void
| TypeSpec::Offset
| TypeSpec::UnsignedLong
| TypeSpec::IdentifierName
| TypeSpec::PropertyKey => {
debug!(target: "spec", "classify_type => don't put me in an interface");
TypeClassification::Primitive
}
@ -778,17 +859,20 @@ impl SpecBuilder {
}
// Start lookup for this name.
cache.insert(name.clone(), None);
let result = if interfaces_by_name.contains_key(name) {
let mut names = HashSet::new();
names.insert(name.clone());
TypeClassification::SumOfInterfaces(names)
} else if string_enums_by_name.contains_key(name) {
TypeClassification::StringEnum
} else {
let type_ = typedefs_by_name.get(name)
.unwrap(); // Completeness checked abover in this method.
classify_type(typedefs_by_name, string_enums_by_name, interfaces_by_name, cache, type_.spec(), name)
};
let result =
if name.to_str() == "IdentifierName" || name.to_str() == "Identifier" || name.to_str() == "PropertyKey" {
TypeClassification::Primitive
} else if interfaces_by_name.contains_key(name) {
let mut names = HashSet::new();
names.insert(name.clone());
TypeClassification::SumOfInterfaces(names)
} else if string_enums_by_name.contains_key(name) {
TypeClassification::StringEnum
} else {
let type_ = typedefs_by_name.get(name)
.unwrap_or_else(|| panic!("Type {} not found", name)); // Completeness checked abover in this method.
classify_type(typedefs_by_name, string_enums_by_name, interfaces_by_name, cache, type_.spec(), name)
};
debug!(target: "spec", "classify_type {:?} => (inserting in cache) {:?}", name, result);
cache.insert(name.clone(), Some(result.clone()));
result
@ -902,6 +986,10 @@ impl Interface {
}
None
}
pub fn is_scope(&self) -> bool {
self.declaration.is_scope
}
}
/// Immutable representation of the spec.

Просмотреть файл

@ -1 +1 @@
{"files":{".travis.yml":"e684c9479b485343f5b932e8f9de7ac046accfb4c1e3c534e6e0fb9e0c8d919b","Cargo.toml":"a30078c3db5bccf6a567ad9ae78a6258d18b990034eda7e4ce8f4b3041ff2aa9","LICENSE-APACHE":"8173d5c29b4f956d532781d2b86e4e30f83e6b7878dce18c919451d6ba707c90","LICENSE-MIT":"c9a75f18b9ab2927829a208fc6aa2cf4e63b8420887ba29cdb265d6619ae82d5","README.md":"d3a2993cd15ac201b30c86fe69f2bb692b386875eace571715007637d7ca7abf","deploy-docs.sh":"7b66111b124c1c7e59cb84cf110d98b5cb783bd35a676e970d9b3035e55f7dfd","src/lib.rs":"7276279f7008dd633d0bb90cc0ff73de170b89d69644fb21c35728c94e913c4d"},"package":"d9bf6104718e80d7b26a68fdbacff3481cfc05df670821affc7e9cbc1884400c"}
{"files":{".travis.yml":"e684c9479b485343f5b932e8f9de7ac046accfb4c1e3c534e6e0fb9e0c8d919b","Cargo.toml":"3342b785a96c022128627c03d66f701ff8f5fa3b1088f1a6282bbd7fab94d99d","LICENSE-APACHE":"8173d5c29b4f956d532781d2b86e4e30f83e6b7878dce18c919451d6ba707c90","LICENSE-MIT":"c9a75f18b9ab2927829a208fc6aa2cf4e63b8420887ba29cdb265d6619ae82d5","README.md":"49741b792be0800387a30bf6300d5ad4d306e15b63510301e377670489620f40","deploy-docs.sh":"7b66111b124c1c7e59cb84cf110d98b5cb783bd35a676e970d9b3035e55f7dfd","src/lib.rs":"51809e3f8799d712a740f5bd37b658fbda44a5c7e62bf33a69c255866afa61b1"},"package":"6f1efcc46c18245a69c38fcc5cc650f16d3a59d034f3106e9ed63748f695730a"}

35
third_party/rust/bit-set/Cargo.toml поставляемый
Просмотреть файл

@ -1,20 +1,33 @@
# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
#
# When uploading crates to the registry Cargo will automatically
# "normalize" Cargo.toml files for maximal compatibility
# with all versions of Cargo and also rewrite `path` dependencies
# to registry (e.g. crates.io) dependencies
#
# If you believe there's an error in this file please file an
# issue against the rust-lang/cargo repository. If you're
# editing this file be aware that the upstream Cargo.toml
# will likely look very different (and much more reasonable)
[package]
name = "bit-set"
version = "0.4.0"
version = "0.5.0"
authors = ["Alexis Beingessner <a.beingessner@gmail.com>"]
license = "MIT/Apache-2.0"
description = "A set of bits"
repository = "https://github.com/contain-rs/bit-set"
homepage = "https://github.com/contain-rs/bit-set"
documentation = "https://contain-rs.github.io/bit-set/bit_set"
keywords = ["data-structures", "bitset"]
readme = "README.md"
[dev-dependencies]
rand = "0.3"
[dependencies]
bit-vec = "0.4"
keywords = ["data-structures", "bitset"]
license = "MIT/Apache-2.0"
repository = "https://github.com/contain-rs/bit-set"
[dependencies.bit-vec]
version = "0.5.0"
default-features = false
[dev-dependencies.rand]
version = "0.3"
[features]
nightly = []
default = ["std"]
nightly = ["bit-vec/nightly"]
std = ["bit-vec/std"]

11
third_party/rust/bit-set/README.md поставляемый
Просмотреть файл

@ -1,3 +1,14 @@
**WARNING: THIS PROJECT IS IN MAINTENANCE MODE, DUE TO INSUFFICIENT MAINTAINER RESOURCES**
It works fine, but will generally no longer be improved.
We are currently only accepting changes which:
* keep this compiling with the latest versions of Rust or its dependencies.
* have minimal review requirements, such as documentation changes (so not totally new APIs).
------
A Set of bits.
Documentation is available at https://contain-rs.github.io/bit-set/bit_set.

17
third_party/rust/bit-set/src/lib.rs поставляемый
Просмотреть файл

@ -47,17 +47,23 @@
//! assert!(bv[3]);
//! ```
#![no_std]
#![cfg_attr(all(test, feature = "nightly"), feature(test))]
#[cfg(all(test, feature = "nightly"))] extern crate test;
#[cfg(all(test, feature = "nightly"))] extern crate rand;
extern crate bit_vec;
#[cfg(test)]
#[macro_use]
extern crate std;
use bit_vec::{BitVec, Blocks, BitBlock};
use std::cmp::Ordering;
use std::cmp;
use std::fmt;
use std::hash;
use std::iter::{self, Chain, Enumerate, FromIterator, Repeat, Skip, Take};
use core::cmp::Ordering;
use core::cmp;
use core::fmt;
use core::hash;
use core::iter::{self, Chain, Enumerate, FromIterator, Repeat, Skip, Take};
type MatchWords<'a, B> = Chain<Enumerate<Blocks<'a, B>>, Skip<Take<Enumerate<Repeat<B>>>>>;
@ -941,6 +947,7 @@ mod tests {
use std::cmp::Ordering::{Equal, Greater, Less};
use super::BitSet;
use bit_vec::BitVec;
use std::vec::Vec;
#[test]
fn test_bit_set_show() {

Просмотреть файл

@ -1 +1 @@
{"files":{".travis.yml":"26dbdd3f33aeefa6216804c025626b8e2bef5c05103410faa5e6e93f20331cbe","Cargo.toml":"6376bd862fc4827a77190427180ccf86cda76907bf3bd935601840cd03ab48da","LICENSE-APACHE":"8173d5c29b4f956d532781d2b86e4e30f83e6b7878dce18c919451d6ba707c90","LICENSE-MIT":"7b63ecd5f1902af1b63729947373683c32745c16a10e8e6292e2e2dcd7e90ae0","README.md":"2a42423b7acd5af0ee7f47dcc430b267cfe4661ced77131af2d6e97e6a15377a","benches/extern.rs":"30152d15cc55493d06396baf9eebb90c8f32b314f0dc77398ac8a121bd5ff917","crusader.sh":"e656dcb62d5122a64d55f837992e63cfd3beee37cf74c5ab6ff178a3c7ef943e","deploy-docs.sh":"7b66111b124c1c7e59cb84cf110d98b5cb783bd35a676e970d9b3035e55f7dfd","src/bench.rs":"a24345464fdbc70b5b877d13fa1b9da809ba4917e592d5de69f01b8b1340e8bb","src/lib.rs":"b784632ce3f6a16314d1d759310f297941fb5577192ba48a10ae3c6893dd5e24"},"package":"02b4ff8b16e6076c3e14220b39fbc1fabb6737522281a388998046859400895f"}
{"files":{".travis.yml":"26dbdd3f33aeefa6216804c025626b8e2bef5c05103410faa5e6e93f20331cbe","Cargo.toml":"0c1d447fdcff050a2c1f9e3267bdf5b2d3373e080603a5f9127167f31a169b7d","LICENSE-APACHE":"8173d5c29b4f956d532781d2b86e4e30f83e6b7878dce18c919451d6ba707c90","LICENSE-MIT":"7b63ecd5f1902af1b63729947373683c32745c16a10e8e6292e2e2dcd7e90ae0","README.md":"c9d3313c3cc0d55496d8c17bf950b96accd751fc67342e3b3dd3ce7756605092","benches/extern.rs":"30152d15cc55493d06396baf9eebb90c8f32b314f0dc77398ac8a121bd5ff917","crusader.sh":"e656dcb62d5122a64d55f837992e63cfd3beee37cf74c5ab6ff178a3c7ef943e","deploy-docs.sh":"7b66111b124c1c7e59cb84cf110d98b5cb783bd35a676e970d9b3035e55f7dfd","src/bench.rs":"a24345464fdbc70b5b877d13fa1b9da809ba4917e592d5de69f01b8b1340e8bb","src/lib.rs":"5162fc2658cce4d388453e73740eb1d74fbb64b0a5d714c8e7bc9a29671bbfa5"},"package":"4440d5cb623bb7390ae27fec0bb6c61111969860f8e3ae198bfa0663645e67cf"}

27
third_party/rust/bit-vec/Cargo.toml поставляемый
Просмотреть файл

@ -1,17 +1,30 @@
# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
#
# When uploading crates to the registry Cargo will automatically
# "normalize" Cargo.toml files for maximal compatibility
# with all versions of Cargo and also rewrite `path` dependencies
# to registry (e.g. crates.io) dependencies
#
# If you believe there's an error in this file please file an
# issue against the rust-lang/cargo repository. If you're
# editing this file be aware that the upstream Cargo.toml
# will likely look very different (and much more reasonable)
[package]
name = "bit-vec"
version = "0.4.4"
version = "0.5.0"
authors = ["Alexis Beingessner <a.beingessner@gmail.com>"]
license = "MIT/Apache-2.0"
description = "A vector of bits"
repository = "https://github.com/contain-rs/bit-vec"
homepage = "https://github.com/contain-rs/bit-vec"
documentation = "https://contain-rs.github.io/bit-vec/bit_vec"
keywords = ["data-structures", "bitvec", "bitmask", "bitmap", "bit"]
readme = "README.md"
[dev-dependencies]
rand = "0.3.15"
keywords = ["data-structures", "bitvec", "bitmask", "bitmap", "bit"]
license = "MIT/Apache-2.0"
repository = "https://github.com/contain-rs/bit-vec"
[dev-dependencies.rand]
version = "0.3.15"
[features]
default = ["std"]
nightly = []
std = []

12
third_party/rust/bit-vec/README.md поставляемый
Просмотреть файл

@ -1,3 +1,15 @@
**WARNING: THIS PROJECT IS IN MAINTENANCE MODE, DUE TO INSUFFICIENT MAINTAINER RESOURCES**
It works fine, but will generally no longer be improved.
We are currently only accepting changes which:
* keep this compiling with the latest versions of Rust or its dependencies.
* have minimal review requirements, such as documentation changes (so not totally new APIs).
------
A Vec of bits.
Documentation is available at https://contain-rs.github.io/bit-vec/bit_vec.

46
third_party/rust/bit-vec/src/lib.rs поставляемый
Просмотреть файл

@ -83,23 +83,38 @@
//! assert_eq!(num_primes, 1_229);
//! ```
#![no_std]
#![cfg_attr(not(feature="std"), feature(alloc))]
#![cfg_attr(all(test, feature = "nightly"), feature(test))]
#[cfg(all(test, feature = "nightly"))] extern crate test;
#[cfg(all(test, feature = "nightly"))] extern crate rand;
use std::cmp::Ordering;
use std::cmp;
use std::fmt;
use std::hash;
use std::iter::{Chain, Enumerate, Repeat, Skip, Take, repeat};
use std::iter::FromIterator;
use std::slice;
use std::{u8, usize};
#[cfg(any(test, feature = "std"))]
#[macro_use]
extern crate std;
#[cfg(feature="std")]
use std::vec::Vec;
#[cfg(not(feature="std"))]
#[macro_use]
extern crate alloc;
#[cfg(not(feature="std"))]
use alloc::Vec;
use core::cmp::Ordering;
use core::cmp;
use core::fmt;
use core::hash;
use core::iter::{Chain, Enumerate, Repeat, Skip, Take, repeat};
use core::iter::FromIterator;
use core::slice;
use core::{u8, usize};
type MutBlocks<'a, B> = slice::IterMut<'a, B>;
type MatchWords<'a, B> = Chain<Enumerate<Blocks<'a, B>>, Skip<Take<Enumerate<Repeat<B>>>>>;
use std::ops::*;
use core::ops::*;
/// Abstracts over a pile of bits (basically unsigned primitives)
pub trait BitBlock:
@ -154,7 +169,7 @@ bit_block_impl!{
(u16, 16),
(u32, 32),
(u64, 64),
(usize, std::mem::size_of::<usize>() * 8)
(usize, core::mem::size_of::<usize>() * 8)
}
@ -1091,6 +1106,16 @@ impl<B: BitBlock> BitVec<B> {
pub fn clear(&mut self) {
for w in &mut self.storage { *w = B::zero(); }
}
/// Shrinks the capacity of the underlying storage as much as
/// possible.
///
/// It will drop down as close as possible to the length but the
/// allocator may still inform the underlying storage that there
/// is space for a few more elements/bits.
pub fn shrink_to_fit(&mut self) {
self.storage.shrink_to_fit();
}
}
impl<B: BitBlock> Default for BitVec<B> {
@ -1308,6 +1333,7 @@ impl<'a, B: BitBlock> ExactSizeIterator for Blocks<'a, B> {}
#[cfg(test)]
mod tests {
use super::{BitVec, Iter};
use std::vec::Vec;
// This is stupid, but I want to differentiate from a "random" 32
const U32_BITS: usize = 32;

1
third_party/rust/block-buffer/.cargo-checksum.json поставляемый Normal file
Просмотреть файл

@ -0,0 +1 @@
{"files":{"Cargo.toml":"373908618d7bdf561f84ddc5add92f69dab295c97ab0908d3a4ec428fad23bad","LICENSE-APACHE":"a9040321c3712d8fd0b09cf52b17445de04a23a10165049ae187cd39e5c86be5","LICENSE-MIT":"9e0dfd2dd4173a530e238cb6adb37aa78c34c6bc7444e0e10c1ab5d8881f63ba","src/lib.rs":"bdf23c8a00fb4d51beabeb6600fe45ebf1be618632db885013b6f60a5666c124","src/paddings.rs":"7a18850dab9dca0a3e6cc49d6a94a9566ea2473628f42f726a69f8e07f95872a"},"package":"a076c298b9ecdb530ed9d967e74a6027d6a7478924520acddcddc24c1c8ab3ab"}

27
third_party/rust/block-buffer/Cargo.toml поставляемый Normal file
Просмотреть файл

@ -0,0 +1,27 @@
# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
#
# When uploading crates to the registry Cargo will automatically
# "normalize" Cargo.toml files for maximal compatibility
# with all versions of Cargo and also rewrite `path` dependencies
# to registry (e.g. crates.io) dependencies
#
# If you believe there's an error in this file please file an
# issue against the rust-lang/cargo repository. If you're
# editing this file be aware that the upstream Cargo.toml
# will likely look very different (and much more reasonable)
[package]
name = "block-buffer"
version = "0.3.3"
authors = ["RustCrypto Developers"]
description = "Fixed size buffer for block processing of data"
documentation = "https://docs.rs/block-buffer"
keywords = ["block", "padding", "pkcs7", "ansix923", "iso7816"]
categories = ["cryptography", "no-std"]
license = "MIT/Apache-2.0"
repository = "https://github.com/RustCrypto/utils"
[dependencies.arrayref]
version = "0.3"
[dependencies.byte-tools]
version = "0.2"

201
third_party/rust/block-buffer/LICENSE-APACHE поставляемый Normal file
Просмотреть файл

@ -0,0 +1,201 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

25
third_party/rust/block-buffer/LICENSE-MIT поставляемый Normal file
Просмотреть файл

@ -0,0 +1,25 @@
Copyright (c) 2017 Artyom Pavlov
Permission is hereby granted, free of charge, to any
person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the
Software without restriction, including without
limitation the rights to use, copy, modify, merge,
publish, distribute, sublicense, and/or sell copies of
the Software, and to permit persons to whom the Software
is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice
shall be included in all copies or substantial portions
of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.

144
third_party/rust/block-buffer/src/lib.rs поставляемый Normal file
Просмотреть файл

@ -0,0 +1,144 @@
#![no_std]
#[macro_use]
extern crate arrayref;
extern crate byte_tools;
use byte_tools::{zero, write_u64_le};
mod paddings;
pub use paddings::*;
macro_rules! impl_buffer {
($name:ident, $len:expr) => {
pub struct $name {
buffer: [u8; $len],
pos: usize,
}
impl Copy for $name {}
impl Clone for $name {
fn clone(&self) -> Self {
*self
}
}
impl Default for $name {
fn default() -> Self {
$name {buffer: [0; $len], pos: 0}
}
}
impl $name {
#[inline]
pub fn input<F: FnMut(&[u8; $len])>(&mut self, mut input: &[u8], mut func: F) {
// If there is already data in the buffer, copy as much as we can
// into it and process the data if the buffer becomes full.
if self.pos != 0 {
let rem = self.remaining();
if input.len() >= rem {
let (l, r) = input.split_at(rem);
input = r;
self.buffer[self.pos..].copy_from_slice(l);
self.pos = 0;
func(&self.buffer);
} else {
let end = self.pos + input.len();
self.buffer[self.pos..end].copy_from_slice(input);
self.pos = end;
return;
}
}
// While we have at least a full buffer size chunks's worth of data,
// process that data without copying it into the buffer
while input.len() >= self.size() {
let (l, r) = input.split_at(self.size());
input = r;
func(array_ref!(l, 0, $len));
}
// Copy any input data into the buffer. At this point in the method,
// the ammount of data left in the input vector will be less than
// the buffer size and the buffer will be empty.
self.buffer[..input.len()].copy_from_slice(input);
self.pos = input.len();
}
#[inline]
fn digest_pad<F>(&mut self, up_to: usize, func: &mut F)
where F: FnMut(&[u8; $len])
{
self.buffer[self.pos] = 0x80;
self.pos += 1;
zero(&mut self.buffer[self.pos..]);
if self.remaining() < up_to {
func(&self.buffer);
zero(&mut self.buffer[..self.pos]);
}
}
#[inline]
/// Will pad message with message length in big-endian format
pub fn len_padding<F>(&mut self, data_len: u64, mut func: F)
where F: FnMut(&[u8; $len])
{
self.digest_pad(8, &mut func);
let s = self.size();
write_u64_le(&mut self.buffer[s-8..], data_len);
func(&self.buffer);
self.pos = 0;
}
#[inline]
pub fn len_padding_u128<F>(&mut self, hi: u64, lo: u64, mut func: F)
where F: FnMut(&[u8; $len])
{
self.digest_pad(16, &mut func);
let s = self.size();
write_u64_le(&mut self.buffer[s-16..s-8], hi);
write_u64_le(&mut self.buffer[s-8..], lo);
func(&self.buffer);
self.pos = 0;
}
#[inline]
pub fn pad_with<P: Padding>(&mut self) -> &mut [u8; $len] {
P::pad(&mut self.buffer[..], self.pos);
self.pos = 0;
&mut self.buffer
}
#[inline]
pub fn size(&self) -> usize {
$len
}
#[inline]
pub fn position(&self) -> usize {
self.pos
}
#[inline]
pub fn remaining(&self) -> usize {
self.size() - self.pos
}
}
}
}
impl_buffer!(BlockBuffer128, 16);
impl_buffer!(BlockBuffer256, 32);
impl_buffer!(BlockBuffer512, 64);
impl_buffer!(BlockBuffer1024, 128);
impl_buffer!(BlockBuffer576, 72);
impl_buffer!(BlockBuffer832, 104);
impl_buffer!(BlockBuffer1088, 136);
impl_buffer!(BlockBuffer1152, 144);
impl_buffer!(BlockBuffer1344, 168);

129
third_party/rust/block-buffer/src/paddings.rs поставляемый Normal file
Просмотреть файл

@ -0,0 +1,129 @@
use byte_tools::{zero, set};
/// Trait for padding messages divided into blocks
pub trait Padding {
/// Pads `block` filled with data up to `pos`
fn pad(block: &mut [u8], pos: usize);
}
#[derive(Clone, Copy, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)]
/// Error for indicating failed unpadding process
pub struct UnpadError;
/// Trait for extracting oringinal message from padded medium
pub trait Unpadding {
/// Unpad given `data` by truncating it according to the used padding.
/// In case of the malformed padding will return `UnpadError`
fn unpad(data: &[u8]) -> Result<&[u8], UnpadError>;
}
#[derive(Clone, Copy, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)]
pub enum ZeroPadding{}
impl Padding for ZeroPadding {
#[inline]
fn pad(block: &mut [u8], pos: usize) {
zero(&mut block[pos..])
}
}
impl Unpadding for ZeroPadding {
#[inline]
fn unpad(data: &[u8]) -> Result<&[u8], UnpadError> {
let mut n = data.len() - 1;
while n != 0 {
if data[n] != 0 {
break;
}
n -= 1;
}
Ok(&data[..n+1])
}
}
#[derive(Clone, Copy, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)]
pub enum Pkcs7{}
impl Padding for Pkcs7 {
#[inline]
fn pad(block: &mut [u8], pos: usize) {
let n = block.len() - pos;
set(&mut block[pos..], n as u8);
}
}
impl Unpadding for Pkcs7 {
#[inline]
fn unpad(data: &[u8]) -> Result<&[u8], UnpadError> {
if data.is_empty() { return Err(UnpadError); }
let l = data.len();
let n = data[l-1];
if n == 0 {
return Err(UnpadError)
}
for v in &data[l-n as usize..l-1] {
if *v != n { return Err(UnpadError); }
}
Ok(&data[..l-n as usize])
}
}
#[derive(Clone, Copy, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)]
pub enum AnsiX923{}
impl Padding for AnsiX923 {
#[inline]
fn pad(block: &mut [u8], pos: usize) {
let n = block.len() - 1;
zero(&mut block[pos..n]);
block[n] = (n - pos) as u8;
}
}
impl Unpadding for AnsiX923 {
#[inline]
fn unpad(data: &[u8]) -> Result<&[u8], UnpadError> {
if data.is_empty() { return Err(UnpadError); }
let l = data.len();
let n = data[l-1] as usize;
if n == 0 {
return Err(UnpadError)
}
for v in &data[l-n..l-1] {
if *v != 0 { return Err(UnpadError); }
}
Ok(&data[..l-n])
}
}
#[derive(Clone, Copy, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)]
pub enum Iso7816{}
impl Padding for Iso7816 {
#[inline]
fn pad(block: &mut [u8], pos: usize) {
let n = block.len() - pos;
block[pos] = 0x80;
for b in block[pos+1..].iter_mut() {
*b = n as u8;
}
}
}
impl Unpadding for Iso7816 {
fn unpad(data: &[u8]) -> Result<&[u8], UnpadError> {
if data.is_empty() { return Err(UnpadError); }
let mut n = data.len() - 1;
while n != 0 {
if data[n] != 0 {
break;
}
n -= 1;
}
if data[n] != 0x80 { return Err(UnpadError); }
Ok(&data[..n])
}
}

1
third_party/rust/byte-tools/.cargo-checksum.json поставляемый Normal file
Просмотреть файл

@ -0,0 +1 @@
{"files":{"Cargo.toml":"af6af6ea1dfa296af5dc58986d1afb46952328588069ec0b08723db439e9972d","LICENSE-APACHE":"a9040321c3712d8fd0b09cf52b17445de04a23a10165049ae187cd39e5c86be5","LICENSE-MIT":"52232c2cee3bb7d8cabe47ef367f1bf8bb607c22bdfca0219d6156cb7f446e9d","src/lib.rs":"9c96cffef7458fc7bd9e4e61270b69d539ff3a9225a0319b7996155c25ff96ab","src/read_single.rs":"3ab78b15754c2a7848a1be871ff6ee2a31a099f8f4f89be44ad210cda0dbcc9a","src/read_slice.rs":"b3790f2fd080db97e239c05c63da123ea375fb9b354dc9cacb859ed9c44f552e","src/write_single.rs":"1cee4f2f5d8690e47840ea7017539ead417a26abc0717137442a6d9d2875afe4","src/write_slice.rs":"de90e6b9cfca67125871bee7cef55c63574b1871a6584e51fc00a97e5877fe69"},"package":"560c32574a12a89ecd91f5e742165893f86e3ab98d21f8ea548658eb9eef5f40"}

Просмотреть файл

@ -11,9 +11,11 @@
# will likely look very different (and much more reasonable)
[package]
name = "lalrpop-intern"
version = "0.15.1"
authors = ["Niko Matsakis <niko@alum.mit.edu>"]
description = "Simple string interner used by LALRPOP"
license = "Apache-2.0/MIT"
repository = "https://github.com/lalrpop/lalrpop"
name = "byte-tools"
version = "0.2.0"
authors = ["The Rust-Crypto Project Developers"]
description = "Utility functions for working with bytes"
documentation = "https://docs.rs/byte-tools"
keywords = ["bytes"]
license = "MIT/Apache-2.0"
repository = "https://github.com/RustCrypto/utils"

201
third_party/rust/byte-tools/LICENSE-APACHE поставляемый Normal file
Просмотреть файл

@ -0,0 +1,201 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

26
third_party/rust/byte-tools/LICENSE-MIT поставляемый Normal file
Просмотреть файл

@ -0,0 +1,26 @@
Copyright (c) 2006-2009 Graydon Hoare
Copyright (c) 2009-2013 Mozilla Foundation
Permission is hereby granted, free of charge, to any
person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the
Software without restriction, including without
limitation the rights to use, copy, modify, merge,
publish, distribute, sublicense, and/or sell copies of
the Software, and to permit persons to whom the Software
is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice
shall be included in all copies or substantial portions
of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.

37
third_party/rust/byte-tools/src/lib.rs поставляемый Normal file
Просмотреть файл

@ -0,0 +1,37 @@
#![no_std]
use core::ptr;
mod read_single;
mod write_single;
mod read_slice;
mod write_slice;
pub use read_single::*;
pub use write_single::*;
pub use read_slice::*;
pub use write_slice::*;
/// Copy bytes from src to dest
#[inline]
pub fn copy_memory(src: &[u8], dst: &mut [u8]) {
assert!(dst.len() >= src.len());
unsafe {
let srcp = src.as_ptr();
let dstp = dst.as_mut_ptr();
ptr::copy_nonoverlapping(srcp, dstp, src.len());
}
}
/// Zero all bytes in dst
#[inline]
pub fn zero(dst: &mut [u8]) {
set(dst, 0);
}
/// Sets all bytes in `dst` equal to `value`
#[inline]
pub fn set(dst: &mut [u8], value: u8) {
unsafe {
ptr::write_bytes(dst.as_mut_ptr(), value, dst.len());
}
}

38
third_party/rust/byte-tools/src/read_single.rs поставляемый Normal file
Просмотреть файл

@ -0,0 +1,38 @@
use core::{mem, ptr};
macro_rules! read_single {
($src:expr, $size:expr, $ty:ty, $which:ident) => ({
assert!($size == mem::size_of::<$ty>());
assert!($size == $src.len());
unsafe {
let mut tmp: $ty = mem::uninitialized();
let p = &mut tmp as *mut _ as *mut u8;
ptr::copy_nonoverlapping($src.as_ptr(), p, $size);
tmp.$which()
}
});
}
/// Read the value of a vector of bytes as a u32 value in little-endian format.
#[inline]
pub fn read_u32_le(src: &[u8]) -> u32 {
read_single!(src, 4, u32, to_le)
}
/// Read the value of a vector of bytes as a u32 value in big-endian format.
#[inline]
pub fn read_u32_be(src: &[u8]) -> u32 {
read_single!(src, 4, u32, to_be)
}
/// Read the value of a vector of bytes as a u64 value in little-endian format.
#[inline]
pub fn read_u64_le(src: &[u8]) -> u64 {
read_single!(src, 8, u64, to_le)
}
/// Read the value of a vector of bytes as a u64 value in big-endian format.
#[inline]
pub fn read_u64_be(src: &[u8]) -> u64 {
read_single!(src, 8, u64, to_be)
}

44
third_party/rust/byte-tools/src/read_slice.rs поставляемый Normal file
Просмотреть файл

@ -0,0 +1,44 @@
use core::ptr;
macro_rules! read_slice {
($src:expr, $dst:expr, $size:expr, $which:ident) => ({
assert_eq!($size*$dst.len(), $src.len());
unsafe {
ptr::copy_nonoverlapping(
$src.as_ptr(),
$dst.as_mut_ptr() as *mut u8,
$src.len());
}
for v in $dst.iter_mut() {
*v = v.$which();
}
});
}
/// Read a vector of bytes into a vector of u32s. The values are read in
/// little-endian format.
#[inline]
pub fn read_u32v_le(dst: &mut [u32], src: &[u8]) {
read_slice!(src, dst, 4, to_le);
}
/// Read a vector of bytes into a vector of u32s. The values are read in
/// big-endian format.
#[inline]
pub fn read_u32v_be(dst: &mut [u32], src: &[u8]) {
read_slice!(src, dst, 4, to_be);
}
/// Read a vector of bytes into a vector of u64s. The values are read in
/// little-endian format.
#[inline]
pub fn read_u64v_le(dst: &mut [u64], src: &[u8]) {
read_slice!(src, dst, 8, to_le);
}
/// Read a vector of bytes into a vector of u64s. The values are read in
/// big-endian format.
#[inline]
pub fn read_u64v_be(dst: &mut [u64], src: &[u8]) {
read_slice!(src, dst, 8, to_be);
}

39
third_party/rust/byte-tools/src/write_single.rs поставляемый Normal file
Просмотреть файл

@ -0,0 +1,39 @@
use core::{mem, ptr};
macro_rules! write_single {
($dst:expr, $n:expr, $size:expr, $which:ident) => ({
assert!($size == $dst.len());
unsafe {
let bytes = mem::transmute::<_, [u8; $size]>($n.$which());
ptr::copy_nonoverlapping((&bytes).as_ptr(), $dst.as_mut_ptr(), $size);
}
});
}
/// Write a u32 into a vector, which must be 4 bytes long. The value is written
/// in little-endian format.
#[inline]
pub fn write_u32_le(dst: &mut [u8], n: u32) {
write_single!(dst, n, 4, to_le);
}
/// Write a u32 into a vector, which must be 4 bytes long. The value is written
/// in big-endian format.
#[inline]
pub fn write_u32_be(dst: &mut [u8], n: u32) {
write_single!(dst, n, 4, to_be);
}
/// Write a u64 into a vector, which must be 8 bytes long. The value is written
/// in little-endian format.
#[inline]
pub fn write_u64_le(dst: &mut [u8], n: u64) {
write_single!(dst, n, 8, to_le);
}
/// Write a u64 into a vector, which must be 8 bytes long. The value is written
/// in big-endian format.
#[inline]
pub fn write_u64_be(dst: &mut [u8], n: u64) {
write_single!(dst, n, 8, to_be);
}

46
third_party/rust/byte-tools/src/write_slice.rs поставляемый Normal file
Просмотреть файл

@ -0,0 +1,46 @@
use core::{ptr, mem};
macro_rules! write_slice {
($src:expr, $dst:expr, $ty:ty, $size:expr, $which:ident) => ({
assert!($size == mem::size_of::<$ty>());
assert_eq!($dst.len(), $size*$src.len());
unsafe {
ptr::copy_nonoverlapping(
$src.as_ptr() as *const u8,
$dst.as_mut_ptr(),
$dst.len());
let tmp: &mut [$ty] = mem::transmute($dst);
for v in tmp[..$src.len()].iter_mut() {
*v = v.$which();
}
}
});
}
/// Write a vector of u32s into a vector of bytes. The values are written in
/// little-endian format.
#[inline]
pub fn write_u32v_le(dst: &mut [u8], src: &[u32]) {
write_slice!(src, dst, u32, 4, to_le);
}
/// Write a vector of u32s into a vector of bytes. The values are written in
/// big-endian format.
#[inline]
pub fn write_u32v_be(dst: &mut [u8], src: &[u32]) {
write_slice!(src, dst, u32, 4, to_be);
}
/// Write a vector of u64s into a vector of bytes. The values are written in
/// little-endian format.
#[inline]
pub fn write_u64v_le(dst: &mut [u8], src: &[u64]) {
write_slice!(src, dst, u64, 8, to_le);
}
/// Write a vector of u64s into a vector of bytes. The values are written in
/// little-endian format.
#[inline]
pub fn write_u64v_be(dst: &mut [u8], src: &[u64]) {
write_slice!(src, dst, u64, 8, to_be);
}

1
third_party/rust/digest/.cargo-checksum.json поставляемый Normal file
Просмотреть файл

@ -0,0 +1 @@
{"files":{".cargo_vcs_info.json":"5c4d89b9b833bb5681c04817ef4e799012a6252ba90021c6482010c8871b87a6","Cargo.toml":"b3667b1e1a3985dd2c9e7873f6945c2d7163ed7da95569f40c2097285a325ec4","LICENSE-APACHE":"a9040321c3712d8fd0b09cf52b17445de04a23a10165049ae187cd39e5c86be5","LICENSE-MIT":"9e0dfd2dd4173a530e238cb6adb37aa78c34c6bc7444e0e10c1ab5d8881f63ba","src/dev.rs":"c824f834fa8b8c729024e4ec61138e89c26a56bfb6b50295600dddb5ff8fff62","src/digest.rs":"6710ac33c80e6159a2396839794fc76a61b94ab573516a69486457b3e291c793","src/errors.rs":"cff5bf2350bc109ad4f08caacf6780ff1e7016d9995f0847e84e96a8e31ab9d5","src/lib.rs":"bf4e93ebd066513001f3d6d77024ae8addf4df4fd89f76549fd1b73df386f3e4"},"package":"03b072242a8cbaf9c145665af9d250c59af3b958f83ed6824e13533cf76d5b90"}

5
third_party/rust/digest/.cargo_vcs_info.json поставляемый Normal file
Просмотреть файл

@ -0,0 +1,5 @@
{
"git": {
"sha1": "c02ab3d77605b540fd5dc2ea1a45c184f7d9e7d8"
}
}

32
third_party/rust/digest/Cargo.toml поставляемый Normal file
Просмотреть файл

@ -0,0 +1,32 @@
# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
#
# When uploading crates to the registry Cargo will automatically
# "normalize" Cargo.toml files for maximal compatibility
# with all versions of Cargo and also rewrite `path` dependencies
# to registry (e.g. crates.io) dependencies
#
# If you believe there's an error in this file please file an
# issue against the rust-lang/cargo repository. If you're
# editing this file be aware that the upstream Cargo.toml
# will likely look very different (and much more reasonable)
[package]
name = "digest"
version = "0.7.6"
authors = ["RustCrypto Developers"]
description = "Traits for cryptographic hash functions"
documentation = "https://docs.rs/digest"
keywords = ["digest", "crypto", "hash"]
categories = ["cryptography", "no-std"]
license = "MIT/Apache-2.0"
repository = "https://github.com/RustCrypto/traits"
[package.metadata.docs.rs]
features = ["std"]
[dependencies.generic-array]
version = "0.9"
[features]
dev = []
std = []
[badges.travis-ci]
repository = "RustCrypto/traits"

201
third_party/rust/digest/LICENSE-APACHE поставляемый Normal file
Просмотреть файл

@ -0,0 +1,201 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

25
third_party/rust/digest/LICENSE-MIT поставляемый Normal file
Просмотреть файл

@ -0,0 +1,25 @@
Copyright (c) 2017 Artyom Pavlov
Permission is hereby granted, free of charge, to any
person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the
Software without restriction, including without
limitation the rights to use, copy, modify, merge,
publish, distribute, sublicense, and/or sell copies of
the Software, and to permit persons to whom the Software
is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice
shall be included in all copies or substantial portions
of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.

171
third_party/rust/digest/src/dev.rs поставляемый Normal file
Просмотреть файл

@ -0,0 +1,171 @@
use super::{Digest, Input, VariableOutput, ExtendableOutput, XofReader};
use core::fmt::Debug;
pub struct Test {
pub name: &'static str,
pub input: &'static [u8],
pub output: &'static [u8],
}
#[macro_export]
macro_rules! new_tests {
( $( $name:expr ),* ) => {
[$(
Test {
name: $name,
input: include_bytes!(concat!("data/", $name, ".input.bin")),
output: include_bytes!(concat!("data/", $name, ".output.bin")),
},
)*]
};
( $( $name:expr ),+, ) => (new_tests!($($name),+))
}
pub fn main_test<D: Digest + Debug + Clone>(tests: &[Test]) {
// Test that it works when accepting the message all at once
for t in tests.iter() {
let mut sh = D::default();
sh.input(t.input);
let out = sh.result();
assert_eq!(out[..], t.output[..]);
}
// Test that it works when accepting the message in pieces
for t in tests.iter() {
let mut sh = D::default();
let len = t.input.len();
let mut left = len;
while left > 0 {
let take = (left + 1) / 2;
sh.input(&t.input[len - left..take + len - left]);
left = left - take;
}
let out = sh.result();
assert_eq!(out[..], t.output[..]);
}
}
pub fn variable_test<D>(tests: &[Test])
where D: Input + VariableOutput + Clone + Debug
{
let mut buf = [0u8; 1024];
// Test that it works when accepting the message all at once
for t in tests.iter() {
let mut sh = D::new(t.output.len()).unwrap();
sh.process(t.input);
let out = sh.variable_result(&mut buf[..t.output.len()]).unwrap();
assert_eq!(out[..], t.output[..]);
}
// Test that it works when accepting the message in pieces
for t in tests.iter() {
let mut sh = D::new(t.output.len()).unwrap();
let len = t.input.len();
let mut left = len;
while left > 0 {
let take = (left + 1) / 2;
sh.process(&t.input[len - left..take + len - left]);
left = left - take;
}
let out = sh.variable_result(&mut buf[..t.output.len()]).unwrap();
assert_eq!(out[..], t.output[..]);
}
}
pub fn xof_test<D>(tests: &[Test])
where D: Input + ExtendableOutput + Default + Debug + Clone
{
let mut buf = [0u8; 1024];
// Test that it works when accepting the message all at once
for t in tests.iter() {
let mut sh = D::default();
sh.process(t.input);
let out = &mut buf[..t.output.len()];
sh.xof_result().read(out);
assert_eq!(out[..], t.output[..]);
}
// Test that it works when accepting the message in pieces
for t in tests.iter() {
let mut sh = D::default();
let len = t.input.len();
let mut left = len;
while left > 0 {
let take = (left + 1) / 2;
sh.process(&t.input[len - left..take + len - left]);
left = left - take;
}
let out = &mut buf[..t.output.len()];
sh.xof_result().read(out);
assert_eq!(out[..], t.output[..]);
}
// Test reeading from reader byte by byte
for t in tests.iter() {
let mut sh = D::default();
sh.process(t.input);
let mut reader = sh.xof_result();
let out = &mut buf[..t.output.len()];
for chunk in out.chunks_mut(1) {
reader.read(chunk);
}
assert_eq!(out[..], t.output[..]);
}
}
pub fn one_million_a<D: Digest + Default + Debug + Clone>(expected: &[u8]) {
let mut sh = D::default();
for _ in 0..50000 {
sh.input(&[b'a'; 10]);
}
sh.input(&[b'a'; 500000]);
let out = sh.result();
assert_eq!(out[..], expected[..]);
}
#[macro_export]
macro_rules! bench_digest {
($name:ident, $engine:path, $bs:expr) => {
#[bench]
fn $name(b: &mut Bencher) {
let mut d = <$engine>::default();
let data = [0; $bs];
b.iter(|| {
d.input(&data);
});
b.bytes = $bs;
}
};
($engine:path) => {
extern crate test;
use test::Bencher;
use digest::Digest;
bench_digest!(bench1_16, $engine, 1<<4);
bench_digest!(bench2_64, $engine, 1<<6);
bench_digest!(bench3_256, $engine, 1<<8);
bench_digest!(bench4_1k, $engine, 1<<10);
bench_digest!(bench5_4k, $engine, 1<<12);
bench_digest!(bench6_16k, $engine, 1<<14);
}
}

86
third_party/rust/digest/src/digest.rs поставляемый Normal file
Просмотреть файл

@ -0,0 +1,86 @@
use super::{Input, BlockInput, FixedOutput};
use generic_array::GenericArray;
#[cfg(feature = "std")]
use std::io;
type Output<N> = GenericArray<u8, N>;
/// The `Digest` trait specifies an interface common for digest functions.
///
/// It's a convinience wrapper around `Input`, `FixedOutput`, `BlockInput` and
/// `Default` traits. It also provides additional convenience methods.
pub trait Digest: Input + BlockInput + FixedOutput + Default {
/// Create new hasher instance
fn new() -> Self {
Self::default()
}
/// Digest input data. This method can be called repeatedly
/// for use with streaming messages.
fn input(&mut self, input: &[u8]) {
self.process(input);
}
/// Retrieve the digest result. This method consumes digest instance.
fn result(self) -> Output<Self::OutputSize> {
self.fixed_result()
}
/// Convenience function to compute hash of the `data`. It will handle
/// hasher creation, data feeding and finalization.
///
/// Example:
///
/// ```rust,ignore
/// println!("{:x}", sha2::Sha256::digest(b"Hello world"));
/// ```
#[inline]
fn digest(data: &[u8]) -> Output<Self::OutputSize> {
let mut hasher = Self::default();
hasher.process(data);
hasher.fixed_result()
}
/// Convenience function to compute hash of the string. It's equivalent to
/// `digest(input_string.as_bytes())`.
#[inline]
fn digest_str(str: &str) -> Output<Self::OutputSize> {
Self::digest(str.as_bytes())
}
/// Convenience function which takes `std::io::Read` as a source and computes
/// value of digest function `D`, e.g. SHA-2, SHA-3, BLAKE2, etc. using 1 KB
/// blocks.
///
/// Usage example:
///
/// ```rust,ignore
/// use std::fs;
/// use sha2::{Sha256, Digest};
///
/// let mut file = fs::File::open("Cargo.toml")?;
/// let result = Sha256::digest_reader(&mut file)?;
/// println!("{:x}", result);
/// ```
#[cfg(feature = "std")]
#[inline]
fn digest_reader(source: &mut io::Read)
-> io::Result<Output<Self::OutputSize>>
{
let mut hasher = Self::default();
let mut buf = [0u8; 8 * 1024];
loop {
let len = match source.read(&mut buf) {
Ok(0) => return Ok(hasher.result()),
Ok(len) => len,
Err(ref e) if e.kind() == io::ErrorKind::Interrupted => continue,
Err(e) => Err(e)?,
};
hasher.process(&buf[..len]);
}
}
}
impl<D: Input + FixedOutput + BlockInput + Default> Digest for D {}

37
third_party/rust/digest/src/errors.rs поставляемый Normal file
Просмотреть файл

@ -0,0 +1,37 @@
use core::fmt;
#[cfg(feature = "std")]
use std::error;
/// The error type for variable hasher initialization
#[derive(Clone, Copy, Debug, Default)]
pub struct InvalidOutputSize;
/// The error type for variable hasher result
#[derive(Clone, Copy, Debug, Default)]
pub struct InvalidBufferLength;
impl fmt::Display for InvalidOutputSize {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.write_str("invalid output size")
}
}
impl fmt::Display for InvalidBufferLength {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.write_str("invalid buffer length")
}
}
#[cfg(feature = "std")]
impl error::Error for InvalidOutputSize {
fn description(&self) -> &str {
"invalid output size"
}
}
#[cfg(feature = "std")]
impl error::Error for InvalidBufferLength {
fn description(&self) -> &str {
"invalid buffer size"
}
}

98
third_party/rust/digest/src/lib.rs поставляемый Normal file
Просмотреть файл

@ -0,0 +1,98 @@
//! This crate provides traits for describing funcionality of cryptographic hash
//! functions.
//!
//! By default std functionality in this crate disabled. (e.g. method for
//! hashing `Read`ers) To enable it turn on `std` feature in your `Cargo.toml`
//! for this crate.
#![cfg_attr(not(feature = "std"), no_std)]
pub extern crate generic_array;
#[cfg(feature = "std")]
use std as core;
use generic_array::{GenericArray, ArrayLength};
mod digest;
mod errors;
#[cfg(feature = "dev")]
pub mod dev;
pub use errors::{InvalidOutputSize, InvalidBufferLength};
pub use digest::Digest;
// `process` is choosen to not overlap with `input` method in the digest trait
// change it on trait alias stabilization
/// Trait for processing input data
pub trait Input {
/// Digest input data. This method can be called repeatedly
/// for use with streaming messages.
fn process(&mut self, input: &[u8]);
}
/// Trait to indicate that digest function processes data in blocks of size
/// `BlockSize`. Main usage of this trait is for implementing HMAC generically.
pub trait BlockInput {
type BlockSize: ArrayLength<u8>;
}
/// Trait for returning digest result with the fixed size
pub trait FixedOutput {
type OutputSize: ArrayLength<u8>;
/// Retrieve the digest result. This method consumes digest instance.
fn fixed_result(self) -> GenericArray<u8, Self::OutputSize>;
}
/// The error type for variable digest output
#[derive(Clone, Copy, Debug, Default, Eq, Hash, Ord, PartialEq, PartialOrd)]
pub struct InvalidLength;
/// Trait for returning digest result with the varaible size
pub trait VariableOutput: core::marker::Sized {
/// Create new hasher instance with given output size. Will return
/// `Err(InvalidLength)` in case if hasher can not work with the given
/// output size. Will always return an error if output size equals to zero.
fn new(output_size: usize) -> Result<Self, InvalidLength>;
/// Get output size of the hasher instance provided to the `new` method
fn output_size(&self) -> usize;
/// Retrieve the digest result into provided buffer. Length of the buffer
/// must be equal to output size provided to the `new` method, otherwise
/// `Err(InvalidLength)` will be returned
fn variable_result(self, buffer: &mut [u8]) -> Result<&[u8], InvalidLength>;
}
/// Trait for decribing readers which are used to extract extendable output
/// from the resulting state of hash function.
pub trait XofReader {
/// Read output into the `buffer`. Can be called unlimited number of times.
fn read(&mut self, buffer: &mut [u8]);
}
/// Trait which describes extendable output (XOF) of hash functions. Using this
/// trait you first need to get structure which implements `XofReader`, using
/// which you can read extendable output.
pub trait ExtendableOutput {
type Reader: XofReader;
/// Finalize hash function and return XOF reader
fn xof_result(self) -> Self::Reader;
}
/// Macro for defining opaque `Debug` implementation. It will use the following
/// format: "HasherName { ... }". While it's convinient to have it
/// (e.g. for including in other structs), it could be undesirable to leak
/// internall state, which can happen for example through uncareful logging.
#[macro_export]
macro_rules! impl_opaque_debug {
($state:ty) => {
impl ::core::fmt::Debug for $state {
fn fmt(&self, f: &mut ::core::fmt::Formatter)
-> Result<(), ::core::fmt::Error>
{
write!(f, concat!(stringify!($state), " {{ ... }}"))
}
}
}
}

2
third_party/rust/ena/.cargo-checksum.json поставляемый
Просмотреть файл

@ -1 +1 @@
{"files":{".travis.yml":"f8e54ea908a294d46381a1bd608da3fcc7fb0a87cb15f546b93b74ee9c97bb2b","Cargo.toml":"63ff1e6e9d93ec6a81fb28f199ccbf9299e177152cd751f568623717e85ed83a","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"0621878e61f0d0fda054bcbe02df75192c28bde1ecc8289cbd86aeba2dd72720","README.md":"11d2194be1dc7460ee631a32884516f78d5d95dc6e5efa9115767a8f55f55a06","measurements.txt":"46606bc04662362369479bce5c31b109984c1a3446d7f0566556257af91b86e2","src/bitvec.rs":"c6c66c348776ff480b7ff6e4a3e0f64554a4194266f614408b45b5e3c324ec0a","src/cc/mod.rs":"fc486ba406d5761b1bd63621c37981c2b43966d269f8a596595fca36f8b395a4","src/cc/test.rs":"b6805fd4f22b3a3214c9759a674647e8b1dc83118f81c83955949a7414298423","src/constraint/mod.rs":"7df86d708ba692edd5bdb26b1da20720ee5bf51f741985c8193eb54db9365b4b","src/constraint/test.rs":"6666ec1411a61462777c88e7edf73f4bf7c04d4021007cf3340fd7ee22cece95","src/debug.rs":"0c24b9d2302c66e8f3e615c2a6689d88bc1eeac8844ae1f239fd3244c7f2ce6f","src/graph/mod.rs":"3a98ddddb4650744d5462ee442405551272e6c0ff820fd85c26dfec133974671","src/graph/tests.rs":"e2afc7912203e158d37d1f951cb76e6f67eb63890573649b3b2e9ea3afe5ba01","src/lib.rs":"d4584bb7efa3269a328d1ef373fef02e177efb8874f81556a124a58ea18fad87","src/snapshot_vec.rs":"0654cf102f05e98694b74076d5b2fcb7d52cfcbd1771853db22784ea7ad50cb1","src/unify/mod.rs":"0f8a78332c43d6776c2afa93aef174d5d10fb83a5046f0b7081262b754a31da3","src/unify/tests.rs":"9dfc23f77c6fc0565d90b0f74eceeadf666cd9c728aac388b33f138fbc30b50c"},"package":"cabe5a5078ac8c506d3e4430763b1ba9b609b1286913e7d08e581d1c2de9b7e5"}
{"files":{".travis.yml":"8effd1577dc503149f0f829c9291d844ec44d155fd253aa6b644c4ccc25e8bc8","Cargo.toml":"13e445b6bc53bf1ea2379fd2ec33205daa9b1b74d5a41e4dd9ea8cb966185c5a","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"0621878e61f0d0fda054bcbe02df75192c28bde1ecc8289cbd86aeba2dd72720","README.md":"4b02d7ebfb188b1f2cbef20ade3082197046ccaa89e49d2bcdef6102d48919e3","measurements.txt":"b209f98f2bc696904a48829e86952f4f09b59e4e685f7c12087c59d05ed31829","src/bitvec.rs":"c6c66c348776ff480b7ff6e4a3e0f64554a4194266f614408b45b5e3c324ec0a","src/lib.rs":"294aabf6fb846dbe35bba837d70ea9115f20cd808995a318c0fccb05f91d096f","src/snapshot_vec.rs":"abc649bb42dc8592741b02d53ba1ed5f6ad64710b971070872b0c42665d73c93","src/unify/backing_vec.rs":"7d57036ce671169893d069f94454f1c4b95104517ffd62859f180d80cbe490e5","src/unify/mod.rs":"9fc90951778be635fbbf4fba8b3a0a4eb21e2c955660f019377465ac773b9563","src/unify/tests.rs":"b18974faeebdf2c03e82035fe7281bf4db3360ab10ce34b1d3441547836b19f2"},"package":"88dc8393b3c7352f94092497f6b52019643e493b6b890eb417cdb7c46117e621"}

6
third_party/rust/ena/.travis.yml поставляемый
Просмотреть файл

@ -1,5 +1,9 @@
language: rust
rust:
- stable
- nightly
script:
- cargo test
- cargo test
- |
[ $TRAVIS_RUST_VERSION != nightly ] ||
cargo test --all-features

39
third_party/rust/ena/Cargo.toml поставляемый
Просмотреть файл

@ -1,8 +1,37 @@
# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
#
# When uploading crates to the registry Cargo will automatically
# "normalize" Cargo.toml files for maximal compatibility
# with all versions of Cargo and also rewrite `path` dependencies
# to registry (e.g. crates.io) dependencies
#
# If you believe there's an error in this file please file an
# issue against the rust-lang/cargo repository. If you're
# editing this file be aware that the upstream Cargo.toml
# will likely look very different (and much more reasonable)
[package]
name = "ena"
description = "Union-find, congruence closure, and other unification code. Based on code from rustc."
license = "MIT/Apache-2.0"
homepage = "https://github.com/nikomatsakis/ena"
repository = "https://github.com/nikomatsakis/ena"
version = "0.5.0"
version = "0.9.3"
authors = ["Niko Matsakis <niko@alum.mit.edu>"]
description = "Union-find, congruence closure, and other unification code. Based on code from rustc."
homepage = "https://github.com/nikomatsakis/ena"
readme = "README.md"
keywords = ["unification", "union-find"]
license = "MIT/Apache-2.0"
repository = "https://github.com/nikomatsakis/ena"
[dependencies.dogged]
version = "0.2.0"
optional = true
[dependencies.log]
version = "0.4"
[dependencies.petgraph]
version = "0.4.5"
optional = true
[features]
bench = []
congruence-closure = ["petgraph"]
persistent = ["dogged"]

16
third_party/rust/ena/README.md поставляемый
Просмотреть файл

@ -1,15 +1,19 @@
[![Build Status](https://travis-ci.org/nikomatsakis/rayon.svg?branch=master)](https://travis-ci.org/nikomatsakis/ena)
[![Build Status](https://travis-ci.org/nikomatsakis/ena.svg?branch=master)](https://travis-ci.org/nikomatsakis/ena)
An implementation of union-find / congruence-closure in Rust. Forked
from rustc for independent experimentation. My intention is to iterate
and improve this code and gradually bring back changes into rustc
itself, but also to enable other crates.io packages to use the same
code.
An implementation of union-find in Rust; extracted from (and used by)
rustc.
### Name
The name "ena" comes from the Greek word for "one".
### Features
By default, you just get the union-find implementation. You can also
opt-in to the following experimental features:
- `bench`: use to run benchmarks (`cargo bench --features bench`)
### License
Like rustc itself, this code is dual-licensed under the MIT and Apache

23
third_party/rust/ena/measurements.txt поставляемый
Просмотреть файл

@ -1,21 +1,6 @@
base
test unify::test::big_array_bench ... bench: 1,416,793 ns/iter (+/- 216,475)
test unify::tests::big_array_bench ... bench: 740,192 ns/iter (+/- 35,823)
test unify::tests::big_array_bench ... bench: 745,031 ns/iter (+/- 240,463)
test unify::tests::big_array_bench ... bench: 762,031 ns/iter (+/- 240,463)
test unify::tests::big_array_bench ... bench: 756,234 ns/iter (+/- 264,710)
assert -> debug_assert
test unify::test::big_array_bench ... bench: 1,420,368 ns/iter (+/- 144,433)
test unify::test::big_array_bench ... bench: 1,414,448 ns/iter (+/- 219,137)
don't copy for redirects
test unify::test::big_array_bench ... bench: 1,349,796 ns/iter (+/- 233,931)
test unify::test::big_array_bench ... bench: 1,367,082 ns/iter (+/- 301,644)
test unify::test::big_array_bench ... bench: 1,358,154 ns/iter (+/- 348,796)
copy less
test unify::test::big_array_bench ... bench: 744,775 ns/iter (+/- 51,865)
test unify::test::big_array_bench ... bench: 750,939 ns/iter (+/- 146,417)
test unify::test::big_array_bench ... bench: 754,104 ns/iter (+/- 121,968)
s/set-value/update-value/
test unify::test::big_array_bench ... bench: 731,531 ns/iter (+/- 125,685)
test unify::test::big_array_bench ... bench: 725,162 ns/iter (+/- 99,013)
test unify::test::big_array_bench ... bench: 735,473 ns/iter (+/- 121,156)

436
third_party/rust/ena/src/cc/mod.rs поставляемый
Просмотреть файл

@ -1,436 +0,0 @@
//! An implementation of the Congruence Closure algorithm based on the
//! paper "Fast Decision Procedures Based on Congruence Closure" by Nelson
//! and Oppen, JACM 1980.
use graph::{self, Graph, NodeIndex};
use std::collections::HashMap;
use std::fmt::Debug;
use std::hash::Hash;
use std::iter;
use unify::{UnifyKey, UnifyValue, InfallibleUnifyValue, UnificationTable, UnionedKeys};
#[cfg(test)]
mod test;
pub struct CongruenceClosure<K: Key> {
map: HashMap<K, Token>,
table: UnificationTable<Token>,
graph: Graph<K, ()>,
}
pub trait Key: Hash + Eq + Clone + Debug {
// If this Key has some efficient way of converting itself into a
// congruence closure `Token`, then it shold return `Some(token)`.
// Otherwise, return `None`, in which case the CC will internally
// map the key to a token. Typically, this is used by layers that
// wrap the CC, where inference variables are mapped directly to
// particular tokens.
fn to_token(&self) -> Option<Token> {
None
}
fn key_kind(&self) -> KeyKind;
fn shallow_eq(&self, key: &Self) -> bool;
fn successors(&self) -> Vec<Self>;
}
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
pub enum KeyKind {
Applicative,
Generative,
}
use self::KeyKind::*;
#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct Token {
// this is the index both for the graph and the unification table,
// since for every node there is also a slot in the unification
// table
index: u32,
}
impl Token {
fn new(index: u32) -> Token {
Token { index: index }
}
fn from_node(node: NodeIndex) -> Token {
Token { index: node.0 as u32 }
}
fn node(&self) -> NodeIndex {
NodeIndex(self.index as usize)
}
}
impl UnifyKey for Token {
type Value = KeyKind;
fn index(&self) -> u32 {
self.index
}
fn from_index(i: u32) -> Token {
Token::new(i)
}
fn tag() -> &'static str {
"CongruenceClosure"
}
fn order_roots(a: Self,
&a_value: &KeyKind,
b: Self,
&b_value: &KeyKind)
-> Option<(Self, Self)> {
if a_value == b_value {
None
} else if a_value == Generative {
Some((a, b))
} else {
debug_assert!(b_value == Generative);
Some((b, a))
}
}
}
impl UnifyValue for KeyKind {
fn unify_values(&kind1: &Self, &kind2: &Self) -> Result<Self, (Self, Self)> {
match (kind1, kind2) {
(Generative, _) => Ok(Generative),
(_, Generative) => Ok(Generative),
(Applicative, Applicative) => Ok(Applicative),
}
}
}
impl InfallibleUnifyValue for KeyKind {}
impl<K: Key> CongruenceClosure<K> {
pub fn new() -> CongruenceClosure<K> {
CongruenceClosure {
map: HashMap::new(),
table: UnificationTable::new(),
graph: Graph::new(),
}
}
/// Manually create a new CC token. You don't normally need to do
/// this, as CC tokens are automatically created for each key when
/// we first observe it. However, if you wish to have keys that
/// make use of the `to_token` method to bypass the `key -> token`
/// map, then you can use this function to make a new-token. The
/// callback `key_op` will be invoked to create the key for the
/// fresh token (typically, it will wrap the token in some kind of
/// enum indicating an inference variable).
///
/// **WARNING:** The new key **must** be a leaf (no successor
/// keys) or else things will not work right. This invariant is
/// not currently checked.
pub fn new_token<OP>(&mut self, key_kind: KeyKind, key_op: OP) -> Token
where OP: FnOnce(Token) -> K
{
let token = self.table.new_key(key_kind);
let key = key_op(token);
let node = self.graph.add_node(key);
assert_eq!(token.node(), node);
token
}
/// Return the key for a given token
pub fn key(&self, token: Token) -> &K {
self.graph.node_data(token.node())
}
/// Indicates they `key1` and `key2` are equivalent.
pub fn merge(&mut self, key1: K, key2: K) {
let token1 = self.add(key1);
let token2 = self.add(key2);
self.algorithm().merge(token1, token2);
}
/// Indicates whether `key1` and `key2` are equivalent.
pub fn merged(&mut self, key1: K, key2: K) -> bool {
// Careful: you cannot naively remove the `add` calls
// here. The reason is because of patterns like the test
// `struct_union_no_add`. If we unify X and Y, and then unify
// F(X) and F(Z), we need to be sure to figure out that F(Y)
// == F(Z). This requires a non-trivial deduction step, so
// just checking if the arguments are congruent will fail,
// because `Y == Z` does not hold.
debug!("merged: called({:?}, {:?})", key1, key2);
let token1 = self.add(key1);
let token2 = self.add(key2);
self.algorithm().unioned(token1, token2)
}
/// Returns an iterator over all keys that are known to have been
/// merged with `key`. This is a bit dubious, since the set of
/// merged keys will be dependent on what has been added, and is
/// not the full set of equivalencies that one might imagine. See the
/// test `merged_keys` for an example.
pub fn merged_keys(&mut self, key: K) -> MergedKeys<K> {
let token = self.add(key);
MergedKeys {
graph: &self.graph,
iterator: self.table.unioned_keys(token),
}
}
/// Add a key into the CC table, returning the corresponding
/// token. This is not part of the public API, though it could be
/// if we wanted.
fn add(&mut self, key: K) -> Token {
debug!("add(): key={:?}", key);
let (is_new, token) = self.get_or_add(&key);
debug!("add: key={:?} is_new={:?} token={:?}", key, is_new, token);
// if this node is already in the graph, we are done
if !is_new {
return token;
}
// Otherwise, we want to add the 'successors' also. So, for
// example, if we are adding `Box<Foo>`, the successor would
// be `Foo`. So go ahead and recursively add `Foo` if it
// doesn't already exist.
let successors: Vec<_> = key.successors()
.into_iter()
.map(|s| self.add(s))
.collect();
debug!("add: key={:?} successors={:?}", key, successors);
// Now we have to be a bit careful. It might be that we are
// adding `Box<Foo>`, but `Foo` was already present, and in
// fact equated with `Bar`. That is, maybe we had a graph like:
//
// Box<Bar> -> Bar == Foo
//
// Now we just added `Box<Foo>`, but we need to equate
// `Box<Foo>` and `Box<Bar>`.
for successor in successors {
// get set of predecessors for each successor BEFORE we add the new node;
// this would be `Box<Bar>` in the above example.
let predecessors: Vec<_> = self.algorithm().all_preds(successor);
debug!("add: key={:?} successor={:?} predecessors={:?}",
key,
successor,
predecessors);
// add edge from new node `Box<Foo>` to its successor `Foo`
self.graph.add_edge(token.node(), successor.node(), ());
// Now we have to consider merging the old predecessors,
// like `Box<Bar>`, with this new node `Box<Foo>`.
//
// Note that in other cases it might be that no merge will
// occur. For example, if we were adding `(A1, B1)` to a
// graph like this:
//
// (A, B) -> A == A1
// |
// v
// B
//
// In this case, the predecessor would be `(A, B)`; but we don't
// know that `B == B1`, so we can't merge that with `(A1, B1)`.
for predecessor in predecessors {
self.algorithm().maybe_merge(token, predecessor);
}
}
token
}
/// Gets the token for a key, if any.
fn get(&self, key: &K) -> Option<Token> {
key.to_token()
.or_else(|| self.map.get(key).cloned())
}
/// Gets the token for a key, adding one if none exists. Returns the token
/// and a boolean indicating whether it had to be added.
fn get_or_add(&mut self, key: &K) -> (bool, Token) {
if let Some(token) = self.get(key) {
return (false, token);
}
let token = self.new_token(key.key_kind(), |_| key.clone());
self.map.insert(key.clone(), token);
(true, token)
}
fn algorithm(&mut self) -> Algorithm<K> {
Algorithm {
graph: &self.graph,
table: &mut self.table,
}
}
}
// # Walking merged keys
pub struct MergedKeys<'cc, K: Key + 'cc> {
graph: &'cc Graph<K, ()>,
iterator: UnionedKeys<'cc, Token>,
}
impl<'cc, K: Key> Iterator for MergedKeys<'cc, K> {
type Item = K;
fn next(&mut self) -> Option<Self::Item> {
self.iterator
.next()
.map(|token| self.graph.node_data(token.node()).clone())
}
}
// # The core algorithm
struct Algorithm<'a, K: Key + 'a> {
graph: &'a Graph<K, ()>,
table: &'a mut UnificationTable<Token>,
}
impl<'a, K: Key> Algorithm<'a, K> {
fn merge(&mut self, u: Token, v: Token) {
debug!("merge(): u={:?} v={:?}", u, v);
if self.unioned(u, v) {
return;
}
let u_preds = self.all_preds(u);
let v_preds = self.all_preds(v);
self.union(u, v);
for &p_u in &u_preds {
for &p_v in &v_preds {
self.maybe_merge(p_u, p_v);
}
}
}
fn all_preds(&mut self, u: Token) -> Vec<Token> {
let graph = self.graph;
self.table
.unioned_keys(u)
.flat_map(|k| graph.predecessor_nodes(k.node()))
.map(|i| Token::from_node(i))
.collect()
}
fn maybe_merge(&mut self, p_u: Token, p_v: Token) {
debug!("maybe_merge(): p_u={:?} p_v={:?}",
self.key(p_u),
self.key(p_v));
if !self.unioned(p_u, p_v) && self.shallow_eq(p_u, p_v) && self.congruent(p_u, p_v) {
self.merge(p_u, p_v);
}
}
// Check whether each of the successors are unioned. So if you
// have `Box<X1>` and `Box<X2>`, this is true if `X1 == X2`. (The
// result of this fn is not really meaningful unless the two nodes
// are shallow equal here.)
fn congruent(&mut self, p_u: Token, p_v: Token) -> bool {
debug_assert!(self.shallow_eq(p_u, p_v));
debug!("congruent({:?}, {:?})", self.key(p_u), self.key(p_v));
let succs_u = self.successors(p_u);
let succs_v = self.successors(p_v);
let r = succs_u.zip(succs_v).all(|(s_u, s_v)| {
debug!("congruent: s_u={:?} s_v={:?}", s_u, s_v);
self.unioned(s_u, s_v)
});
debug!("congruent({:?}, {:?}) = {:?}",
self.key(p_u),
self.key(p_v),
r);
r
}
fn key(&self, u: Token) -> &'a K {
self.graph.node_data(u.node())
}
// Compare the local data, not considering successor nodes. So e.g
// `Box<X>` and `Box<Y>` are shallow equal for any `X` and `Y`.
fn shallow_eq(&self, u: Token, v: Token) -> bool {
let r = self.key(u).shallow_eq(self.key(v));
debug!("shallow_eq({:?}, {:?}) = {:?}", self.key(u), self.key(v), r);
r
}
fn token_kind(&self, u: Token) -> KeyKind {
self.graph.node_data(u.node()).key_kind()
}
fn unioned(&mut self, u: Token, v: Token) -> bool {
let r = self.table.unioned(u, v);
debug!("unioned(u={:?}, v={:?}) = {:?}",
self.key(u),
self.key(v),
r);
r
}
fn union(&mut self, u: Token, v: Token) {
debug!("union(u={:?}, v={:?})", self.key(u), self.key(v));
// find the roots of `u` and `v`; if `u` and `v` have been unioned
// with anything generative, these will be generative.
let u = self.table.find(u);
let v = self.table.find(v);
// u and v are now union'd
self.table.union(u, v);
// if both `u` and `v` were generative, we can now propagate
// the constraint that their successors must also be the same
if self.token_kind(u) == Generative && self.token_kind(v) == Generative {
if self.shallow_eq(u, v) {
let mut succs_u = self.successors(u);
let mut succs_v = self.successors(v);
for (succ_u, succ_v) in succs_u.by_ref().zip(succs_v.by_ref()) {
// assume # of succ is equal because types are WF (asserted below)
self.merge(succ_u, succ_v);
}
debug_assert!(succs_u.next().is_none());
debug_assert!(succs_v.next().is_none());
} else {
// error: user asked us to union i32/u32 or Vec<T>/Vec<U>;
// for now just panic.
panic!("inconsistent conclusion: {:?} vs {:?}",
self.key(u),
self.key(v));
}
}
}
fn successors(&self, token: Token) -> iter::Map<graph::AdjacentTargets<'a, K, ()>,
fn(NodeIndex) -> Token> {
self.graph
.successor_nodes(token.node())
.map(Token::from_node)
}
fn predecessors(&self, token: Token) -> iter::Map<graph::AdjacentSources<'a, K, ()>,
fn(NodeIndex) -> Token> {
self.graph
.predecessor_nodes(token.node())
.map(Token::from_node)
}
/// If `token` has been unioned with something generative, returns
/// `Ok(u)` where `u` is the generative token. Otherwise, returns
/// `Err(v)` where `v` is the root of `token`.
fn normalize_to_generative(&mut self, token: Token) -> Result<Token, Token> {
let token = self.table.find(token);
match self.token_kind(token) {
Generative => Ok(token),
Applicative => Err(token),
}
}
}

349
third_party/rust/ena/src/cc/test.rs поставляемый
Просмотреть файл

@ -1,349 +0,0 @@
// use debug::Logger;
use cc::{CongruenceClosure, Key, KeyKind, Token};
use self::TypeStruct::*;
#[derive(Clone, Debug, PartialEq, Eq, Hash)]
enum TypeStruct {
// e.g., `<T as Iterator>::Item` would be `Assoc(Iterator::Item, vec![T])`
Assoc(&'static str, Vec<Type>),
// skolemized version of in-scope generic, e.g., the `T` when checking `fn foo<T>`
Skolem(u32),
// inference variable (existentially quantified)
Variable(Token),
// a nominal type applied to arguments, e.g. `i32` or `Vec<T>`
Nominal(&'static str, Vec<Type>),
}
type Type = Box<TypeStruct>;
impl Key for Type {
fn to_token(&self) -> Option<Token> {
match **self {
TypeStruct::Variable(t) => Some(t),
_ => None,
}
}
fn key_kind(&self) -> KeyKind {
match **self {
TypeStruct::Assoc(..) |
TypeStruct::Variable(_) |
TypeStruct::Skolem(_) =>
KeyKind::Applicative,
TypeStruct::Nominal(..) =>
KeyKind::Generative,
}
}
fn shallow_eq(&self, key: &Type) -> bool {
match (&**self, &**key) {
(&Assoc(i, _), &Assoc(j, _)) => i == j,
(&Skolem(i), &Skolem(j)) => i == j,
(&Nominal(i, _), &Nominal(j, _)) => i == j,
_ => false,
}
}
fn successors(&self) -> Vec<Self> {
match **self {
Assoc(_, ref s) => s.clone(),
Skolem(_) => vec![],
Variable(_) => vec![],
Nominal(_, ref s) => s.clone(),
}
}
}
fn skolem(x: u32) -> Type {
Box::new(Skolem(x))
}
fn iterator_item(t: Type) -> Type {
Box::new(Assoc("Iterator::Item", vec![t]))
}
fn integer() -> Type {
Box::new(Nominal("integer", vec![]))
}
fn character() -> Type {
Box::new(Nominal("char", vec![]))
}
fn vec(t: Type) -> Type {
Box::new(Nominal("Vec", vec![t]))
}
fn inference_var<'tcx>(cc: &mut CongruenceClosure<Type>) -> Type {
let token = cc.new_token(KeyKind::Applicative,
move |token| Box::new(TypeStruct::Variable(token)));
cc.key(token).clone()
}
#[test]
fn simple_as_it_gets() {
let mut cc: CongruenceClosure<Type> = CongruenceClosure::new();
assert!(cc.merged(skolem(0), skolem(0)));
assert!(!cc.merged(skolem(0), skolem(1)));
assert!(cc.merged(skolem(1), skolem(1)));
assert!(cc.merged(iterator_item(skolem(0)), iterator_item(skolem(0))));
assert!(!cc.merged(iterator_item(skolem(0)), iterator_item(skolem(1))));
assert!(cc.merged(iterator_item(skolem(1)), iterator_item(skolem(1))));
}
#[test]
fn union_vars() {
let mut cc: CongruenceClosure<Type> = CongruenceClosure::new();
cc.merge(skolem(0), skolem(1));
assert!(cc.merged(skolem(0), skolem(1)));
}
#[test]
fn union_iterator_item_then_test_var() {
let mut cc: CongruenceClosure<Type> = CongruenceClosure::new();
cc.merge(skolem(0), skolem(1));
assert!(cc.merged(skolem(0), skolem(1)));
}
#[test]
fn union_direct() {
let mut cc: CongruenceClosure<Type> = CongruenceClosure::new();
cc.add(iterator_item(skolem(0)));
cc.add(iterator_item(skolem(1)));
cc.add(skolem(0));
cc.add(skolem(1));
cc.merge(skolem(0), skolem(1));
assert!(cc.merged(iterator_item(skolem(0)), iterator_item(skolem(1))));
}
macro_rules! indirect_test {
($test_name:ident: $a:expr, $b:expr; $c:expr, $d:expr) => {
#[test]
fn $test_name() {
// Variant 1: call `add` explicitly
//
// This caused bugs because nodes were pre-existing.
{
let mut cc: CongruenceClosure<Type> = CongruenceClosure::new();
cc.add(iterator_item(skolem(0)));
cc.add(iterator_item(skolem(2)));
cc.add(skolem(0));
cc.add(skolem(1));
cc.add(skolem(2));
cc.merge($a, $b);
cc.merge($c, $d);
assert!(cc.merged(iterator_item(skolem(0)), iterator_item(skolem(2))));
}
// Variant 2: never call `add` explicitly
//
// This is more how we expect library to be used in practice.
{
let mut cc2: CongruenceClosure<Type> = CongruenceClosure::new();
cc2.merge($a, $b);
cc2.merge($c, $d);
assert!(cc2.merged(iterator_item(skolem(0)), iterator_item(skolem(2))));
}
}
}
}
// The indirect tests test for the case where we merge V0 and V1, and
// we merged V1 and V2, and we want to use this to conclude that
// Assoc(V0) and Assoc(V2) are merged -- but there is no node created for
// Assoc(V1).
indirect_test! { indirect_test_1: skolem(1), skolem(2); skolem(1), skolem(0) }
indirect_test! { indirect_test_2: skolem(2), skolem(1); skolem(1), skolem(0) }
indirect_test! { indirect_test_3: skolem(1), skolem(2); skolem(0), skolem(1) }
indirect_test! { indirect_test_4: skolem(2), skolem(1); skolem(0), skolem(1) }
// Here we determine that `Assoc(V0) == Assoc(V1)` because `V0==V1`,
// but we never add nodes for `Assoc(_)`.
#[test]
fn merged_no_add() {
let mut cc: CongruenceClosure<Type> = CongruenceClosure::new();
cc.merge(skolem(0), skolem(1));
assert!(cc.merged(iterator_item(skolem(0)), iterator_item(skolem(1))));
}
// Here we determine that `Assoc(V0) == Assoc(V2)` because `V0==V1==V2`,
// but we never add nodes for `Assoc(_)`.
#[test]
fn merged_no_add_indirect() {
let mut cc: CongruenceClosure<Type> = CongruenceClosure::new();
cc.merge(skolem(0), skolem(1));
cc.merge(skolem(1), skolem(2));
assert!(cc.merged(iterator_item(skolem(0)), iterator_item(skolem(2))));
}
// Here we determine that `Assoc(V0) == Assoc(V2)` because `V0==V1==V2`,
// but we never add nodes for `Assoc(_)`.
#[test]
fn iterator_item_not_merged() {
let mut cc: CongruenceClosure<Type> = CongruenceClosure::new();
cc.merge(iterator_item(skolem(0)), iterator_item(skolem(1)));
assert!(!cc.merged(skolem(0), skolem(1)));
assert!(cc.merged(iterator_item(skolem(0)), iterator_item(skolem(1))));
}
// Here we show that merging `Assoc(V1) == Assoc(V2)` does NOT imply that
// `V1 == V2`.
#[test]
fn merge_fns_not_inputs() {
let mut cc: CongruenceClosure<Type> = CongruenceClosure::new();
cc.merge(iterator_item(skolem(0)), iterator_item(skolem(1)));
assert!(!cc.merged(skolem(0), skolem(1)));
assert!(cc.merged(iterator_item(skolem(0)), iterator_item(skolem(1))));
}
#[test]
fn inf_var_union() {
let mut cc: CongruenceClosure<Type> = CongruenceClosure::new();
let v0 = inference_var(&mut cc);
let v1 = inference_var(&mut cc);
let v2 = inference_var(&mut cc);
let iterator_item_v0 = iterator_item(v0.clone());
let iterator_item_v1 = iterator_item(v1.clone());
let iterator_item_v2 = iterator_item(v2.clone());
cc.merge(v0.clone(), v1.clone());
assert!(cc.map.is_empty()); // inf variables don't take up map slots
assert!(cc.merged(iterator_item_v0.clone(), iterator_item_v1.clone()));
assert!(!cc.merged(iterator_item_v0.clone(), iterator_item_v2.clone()));
cc.merge(iterator_item_v0.clone(), iterator_item_v2.clone());
assert!(cc.merged(iterator_item_v0.clone(), iterator_item_v2.clone()));
assert!(cc.merged(iterator_item_v1.clone(), iterator_item_v2.clone()));
assert_eq!(cc.map.len(), 3); // each iterator_item needs an entry
}
#[test]
fn skolem_union_no_add() {
// This particular pattern of unifications exploits a potentially
// subtle bug:
// - We merge `skolem(0)` and `skolem(1)`
// and then merge `Assoc(skolem(0))` and `Assoc(skolem(2))`.
// - From this we should be able to deduce that `Assoc(skolem(1)) == Assoc(skolem(2))`.
// - However, if we are not careful with accounting for
// predecessors and so forth, this fails. For example, when
// adding `Assoc(skolem(1))`, we have to consider `Assoc(skolem(0))`
// to be a predecessor of `skolem(1)`.
let mut cc: CongruenceClosure<Type> = CongruenceClosure::new();
cc.merge(skolem(0), skolem(1));
assert!(cc.merged(iterator_item(skolem(0)), iterator_item(skolem(1))));
assert!(!cc.merged(iterator_item(skolem(0)), iterator_item(skolem(2))));
cc.merge(iterator_item(skolem(0)), iterator_item(skolem(2)));
assert!(cc.merged(iterator_item(skolem(0)), iterator_item(skolem(2))));
assert!(cc.merged(iterator_item(skolem(1)), iterator_item(skolem(2))));
}
#[test]
fn merged_keys() {
let mut cc: CongruenceClosure<Type> = CongruenceClosure::new();
cc.merge(skolem(0), skolem(1));
cc.merge(iterator_item(skolem(0)), iterator_item(skolem(2)));
// Here we don't yet see `iterator_item(skolem(1))` because it has no
// corresponding node:
let keys: Vec<Type> = cc.merged_keys(iterator_item(skolem(2))).collect();
assert_eq!(&keys[..], &[iterator_item(skolem(2)), iterator_item(skolem(0))]);
// But of course `merged` returns true (and adds a node):
assert!(cc.merged(iterator_item(skolem(1)), iterator_item(skolem(2))));
// So now we see it:
let keys: Vec<Type> = cc.merged_keys(iterator_item(skolem(2))).collect();
assert_eq!(&keys[..], &[iterator_item(skolem(2)),
iterator_item(skolem(1)),
iterator_item(skolem(0))]);
}
// Here we show that merging `Vec<V1> == Vec<V2>` DOES imply that
// `V1 == V2`.
#[test]
fn merge_vecs() {
let mut cc: CongruenceClosure<Type> = CongruenceClosure::new();
cc.merge(vec(skolem(0)), vec(skolem(1)));
assert!(cc.merged(skolem(0), skolem(1)));
assert!(cc.merged(vec(skolem(0)), vec(skolem(1))));
assert!(cc.merged(iterator_item(skolem(0)), iterator_item(skolem(1))));
}
// Here we show that merging `Vec<V1::Item> == Vec<V2::Item>` does NOT imply that
// `V1 == V2`.
#[test]
fn merge_vecs_of_items() {
let mut cc: CongruenceClosure<Type> = CongruenceClosure::new();
cc.merge(vec(iterator_item(skolem(0))),
vec(iterator_item(skolem(1))));
assert!(!cc.merged(skolem(0), skolem(1)));
assert!(!cc.merged(vec(skolem(0)), vec(skolem(1))));
assert!(cc.merged(vec(iterator_item(skolem(0))),
vec(iterator_item(skolem(1)))));
assert!(cc.merged(iterator_item(vec(iterator_item(skolem(0)))),
iterator_item(vec(iterator_item(skolem(1))))));
assert!(cc.merged(iterator_item(iterator_item(vec(iterator_item(skolem(0))))),
iterator_item(iterator_item(vec(iterator_item(skolem(1)))))));
assert!(cc.merged(iterator_item(skolem(0)), iterator_item(skolem(1))));
}
// Here we merge `Vec<Int>::Item` with `Int` and then merge that later
// with an inference variable, and show that we concluded that the
// variable is (indeed) `Int`.
#[test]
fn merge_iterator_item_generative() {
let mut cc: CongruenceClosure<Type> = CongruenceClosure::new();
cc.merge(iterator_item(vec(integer())), integer());
let v0 = inference_var(&mut cc);
cc.merge(iterator_item(vec(integer())), v0.clone());
assert!(cc.merged(v0.clone(), integer()));
assert!(cc.merged(vec(iterator_item(vec(integer()))), vec(integer())));
}
#[test]
fn merge_ripple() {
let mut cc: CongruenceClosure<Type> = CongruenceClosure::new();
cc.merge(iterator_item(skolem(1)), vec(skolem(0)));
cc.merge(iterator_item(skolem(2)), vec(integer()));
assert!(!cc.merged(iterator_item(skolem(1)), iterator_item(skolem(2))));
println!("------------------------------");
cc.merge(skolem(0), integer());
println!("------------------------------");
assert!(cc.merged(iterator_item(skolem(1)),
iterator_item(skolem(2))));
assert!(cc.merged(iterator_item(iterator_item(skolem(1))),
iterator_item(iterator_item(skolem(2)))));
}

160
third_party/rust/ena/src/constraint/mod.rs поставляемый
Просмотреть файл

@ -1,160 +0,0 @@
//! Constraint graph.
#![allow(dead_code)]
use graph::{Graph, NodeIndex};
use std::collections::VecDeque;
use std::u32;
#[cfg(test)]
mod test;
pub trait Lattice {
type Element: Clone + Eq;
fn lub(&self, elem1: &Self::Element, elem2: &Self::Element) -> Option<Self::Element>;
}
pub struct ConstraintGraph<L: Lattice> {
graph: Graph<(), ()>,
values: Vec<L::Element>,
lattice: L,
}
#[derive(Copy, Clone)]
pub struct Var {
index: u32,
}
impl Var {
pub fn index(&self) -> usize {
self.index as usize
}
fn to_node_index(self) -> NodeIndex {
NodeIndex(self.index as usize)
}
fn from_node_index(ni: NodeIndex) -> Var {
assert!(ni.0 < (u32::MAX as usize));
Var { index: ni.0 as u32 }
}
}
impl<L> ConstraintGraph<L>
where L: Lattice
{
fn new(lattice: L) -> ConstraintGraph<L> {
ConstraintGraph {
graph: Graph::new(),
values: Vec::new(),
lattice: lattice,
}
}
fn new_var(&mut self, initial_value: L::Element) -> Var {
assert_eq!(self.graph.all_nodes().len(), self.values.len());
let node_index = self.graph.add_node(());
self.values.push(initial_value);
Var::from_node_index(node_index)
}
pub fn constrain_var(&mut self, var: Var, value: L::Element) -> Vec<PropagationError<L>> {
let propagation = Propagation::new(&self.lattice, &self.graph, &mut self.values);
propagation.propagate(value, var)
}
pub fn add_edge(&mut self, source: Var, target: Var) -> Vec<PropagationError<L>> {
let source_node = source.to_node_index();
let target_node = target.to_node_index();
if self.graph
.successor_nodes(source_node)
.any(|n| n == target_node) {
return vec![];
}
self.graph.add_edge(source_node, target_node, ());
let value = self.current_value(source);
self.constrain_var(target, value)
}
pub fn current_value(&self, node: Var) -> L::Element {
self.values[node.index()].clone()
}
}
/// ////////////////////////////////////////////////////////////////////////
struct Propagation<'p, L>
where L: Lattice + 'p,
L::Element: 'p
{
lattice: &'p L,
graph: &'p Graph<(), ()>,
values: &'p mut Vec<L::Element>,
queue: VecDeque<Var>,
errors: Vec<PropagationError<L>>,
}
pub struct PropagationError<L>
where L: Lattice
{
var: Var,
old_value: L::Element,
new_value: L::Element,
}
impl<'p, L> Propagation<'p, L>
where L: Lattice,
L::Element: 'p
{
fn new(lattice: &'p L,
graph: &'p Graph<(), ()>,
values: &'p mut Vec<L::Element>)
-> Propagation<'p, L> {
Propagation {
lattice: lattice,
graph: graph,
values: values,
queue: VecDeque::new(),
errors: Vec::new(),
}
}
fn propagate(mut self, value: L::Element, var: Var) -> Vec<PropagationError<L>> {
self.update_node(value, var);
while let Some(dirty) = self.queue.pop_front() {
let value = self.values[dirty.index()].clone();
for succ_node_index in self.graph.successor_nodes(dirty.to_node_index()) {
let succ_var = Var::from_node_index(succ_node_index);
self.update_node(value.clone(), succ_var);
}
}
self.errors
}
fn update_node(&mut self, value: L::Element, var: Var) {
let cur_value = self.values[var.index()].clone();
match self.lattice.lub(&cur_value, &value) {
Some(new_value) => {
if cur_value != new_value {
self.values[var.index()] = value;
self.queue.push_back(var);
}
}
None => {
// Error. Record for later.
self.errors.push(PropagationError::<L> {
var: var,
old_value: cur_value,
new_value: value,
});
}
}
}
}

Некоторые файлы не были показаны из-за слишком большого количества измененных файлов Показать больше