Merge autoland to mozilla-central. a=merge

This commit is contained in:
Csoregi Natalia 2018-06-22 00:49:29 +03:00
Родитель e39954e209 bd7fd86aa9
Коммит ddf211b35e
209 изменённых файлов: 34510 добавлений и 2561 удалений

Просмотреть файл

@ -9,7 +9,7 @@ replace-with = 'vendored-sources'
[source."https://github.com/servo/serde"]
git = "https://github.com/servo/serde"
branch = "deserialize_from_enums7"
branch = "deserialize_from_enums8"
replace-with = "vendored-sources"
[source.vendored-sources]

83
Cargo.lock сгенерированный
Просмотреть файл

@ -34,7 +34,7 @@ version = "0.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"num-traits 0.1.43 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 1.0.58 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 1.0.66 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
@ -92,8 +92,8 @@ dependencies = [
"log 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)",
"memmap 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)",
"scoped-tls 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 1.0.58 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_derive 1.0.58 (git+https://github.com/servo/serde?branch=deserialize_from_enums7)",
"serde 1.0.66 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_derive 1.0.66 (git+https://github.com/servo/serde?branch=deserialize_from_enums8)",
"tokio-core 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)",
"tokio-io 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)",
"tokio-uds 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)",
@ -151,7 +151,7 @@ version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"byteorder 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 1.0.58 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 1.0.66 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
@ -583,8 +583,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"lazy_static 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
"regex 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 1.0.58 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_derive 1.0.58 (git+https://github.com/servo/serde?branch=deserialize_from_enums7)",
"serde 1.0.66 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_derive 1.0.66 (git+https://github.com/servo/serde?branch=deserialize_from_enums8)",
"strsim 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
@ -610,8 +610,8 @@ dependencies = [
"kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
"lazy_static 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.39 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 1.0.58 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_derive 1.0.58 (git+https://github.com/servo/serde?branch=deserialize_from_enums7)",
"serde 1.0.66 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_derive 1.0.66 (git+https://github.com/servo/serde?branch=deserialize_from_enums8)",
"winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
]
@ -674,7 +674,7 @@ version = "0.17.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"num-traits 0.1.43 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 1.0.58 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 1.0.66 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
@ -1018,8 +1018,8 @@ dependencies = [
"petgraph 0.4.11 (registry+https://github.com/rust-lang/crates.io-index)",
"regex 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
"regex-syntax 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 1.0.58 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_derive 1.0.58 (git+https://github.com/servo/serde?branch=deserialize_from_enums7)",
"serde 1.0.66 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_derive 1.0.66 (git+https://github.com/servo/serde?branch=deserialize_from_enums8)",
"string_cache 0.7.3 (registry+https://github.com/rust-lang/crates.io-index)",
"term 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
"unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
@ -1597,6 +1597,14 @@ dependencies = [
"unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "proc-macro2"
version = "0.4.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "procedural-masquerade"
version = "0.1.1"
@ -1635,6 +1643,14 @@ dependencies = [
"proc-macro2 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "quote"
version = "0.6.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"proc-macro2 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "rand"
version = "0.3.18"
@ -1712,7 +1728,7 @@ name = "ron"
version = "0.1.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"serde 1.0.58 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 1.0.66 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
@ -1798,10 +1814,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "serde"
version = "1.0.58"
version = "1.0.66"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"serde_derive 1.0.58 (git+https://github.com/servo/serde?branch=deserialize_from_enums7)",
"serde_derive 1.0.66 (git+https://github.com/servo/serde?branch=deserialize_from_enums8)",
]
[[package]]
@ -1809,17 +1825,17 @@ name = "serde_bytes"
version = "0.10.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"serde 1.0.58 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 1.0.66 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "serde_derive"
version = "1.0.58"
source = "git+https://github.com/servo/serde?branch=deserialize_from_enums7#884e8078a9c74314fa4a5a2e6ce4ac67ab8fa415"
version = "1.0.66"
source = "git+https://github.com/servo/serde?branch=deserialize_from_enums8#c4457d804b38b14e699b45c01d1909f93f25ab5e"
dependencies = [
"proc-macro2 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
"quote 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)",
"syn 0.13.1 (registry+https://github.com/rust-lang/crates.io-index)",
"proc-macro2 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
"quote 0.6.3 (registry+https://github.com/rust-lang/crates.io-index)",
"syn 0.14.2 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
@ -1873,7 +1889,7 @@ dependencies = [
"new_debug_unreachable 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
"phf_shared 0.7.21 (registry+https://github.com/rust-lang/crates.io-index)",
"precomputed-hash 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 1.0.58 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 1.0.66 (registry+https://github.com/rust-lang/crates.io-index)",
"string_cache_codegen 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
"string_cache_shared 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
@ -2009,6 +2025,16 @@ dependencies = [
"unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "syn"
version = "0.14.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"proc-macro2 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
"quote 0.6.3 (registry+https://github.com/rust-lang/crates.io-index)",
"unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "synstructure"
version = "0.8.1"
@ -2134,7 +2160,7 @@ name = "toml"
version = "0.4.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"serde 1.0.58 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 1.0.66 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
@ -2320,7 +2346,7 @@ dependencies = [
"plane-split 0.9.1 (registry+https://github.com/rust-lang/crates.io-index)",
"rayon 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
"ron 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 1.0.58 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 1.0.66 (registry+https://github.com/rust-lang/crates.io-index)",
"smallvec 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)",
"thread_profiler 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
"time 0.1.40 (registry+https://github.com/rust-lang/crates.io-index)",
@ -2339,9 +2365,9 @@ dependencies = [
"core-graphics 0.13.0 (registry+https://github.com/rust-lang/crates.io-index)",
"dwrote 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
"euclid 0.17.3 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 1.0.58 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 1.0.66 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_bytes 0.10.4 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_derive 1.0.58 (git+https://github.com/servo/serde?branch=deserialize_from_enums7)",
"serde_derive 1.0.66 (git+https://github.com/servo/serde?branch=deserialize_from_enums8)",
"time 0.1.40 (registry+https://github.com/rust-lang/crates.io-index)",
]
@ -2639,10 +2665,12 @@ dependencies = [
"checksum podio 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "e5422a1ee1bc57cc47ae717b0137314258138f38fd5f3cea083f43a9725383a0"
"checksum precomputed-hash 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "925383efa346730478fb4838dbe9137d2a47675ad789c546d150a6e1dd4ab31c"
"checksum proc-macro2 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)" = "49b6a521dc81b643e9a51e0d1cf05df46d5a2f3c0280ea72bcb68276ba64a118"
"checksum proc-macro2 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)" = "effdb53b25cdad54f8f48843d67398f7ef2e14f12c1b4cb4effc549a6462a4d6"
"checksum procedural-masquerade 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "9f566249236c6ca4340f7ca78968271f0ed2b0f234007a61b66f9ecd0af09260"
"checksum quick-error 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "eda5fe9b71976e62bc81b781206aaa076401769b2143379d3eb2118388babac4"
"checksum quote 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)" = "7a6e920b65c65f10b2ae65c831a81a073a89edd28c7cce89475bff467ab4167a"
"checksum quote 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)" = "9949cfe66888ffe1d53e6ec9d9f3b70714083854be20fd5e271b232a017401e8"
"checksum quote 0.6.3 (registry+https://github.com/rust-lang/crates.io-index)" = "e44651a0dc4cdd99f71c83b561e221f714912d11af1a4dff0631f923d53af035"
"checksum rand 0.3.18 (registry+https://github.com/rust-lang/crates.io-index)" = "6475140dfd8655aeb72e1fd4b7a1cc1c202be65d71669476e392fe62532b9edd"
"checksum rayon 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "485541959c8ecc49865526fe6c4de9653dd6e60d829d6edf0be228167b60372d"
"checksum rayon-core 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "9d24ad214285a7729b174ed6d3bcfcb80177807f959d95fafd5bfc5c4f201ac8"
@ -2661,9 +2689,9 @@ dependencies = [
"checksum scopeguard 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)" = "c79eb2c3ac4bc2507cda80e7f3ac5b88bd8eae4c0914d5663e6a8933994be918"
"checksum semver 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)" = "7a3186ec9e65071a2095434b1f5bb24838d4e8e130f584c790f6033c79943537"
"checksum semver-parser 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "388a1df253eca08550bef6c72392cfe7c30914bf41df5269b68cbd6ff8f570a3"
"checksum serde 1.0.58 (registry+https://github.com/rust-lang/crates.io-index)" = "34e9df8efbe7a2c12ceec1fc8744d56ae3374d8ae325f4a0028949d16433d554"
"checksum serde 1.0.66 (registry+https://github.com/rust-lang/crates.io-index)" = "e9a2d9a9ac5120e0f768801ca2b58ad6eec929dc9d1d616c162f208869c2ce95"
"checksum serde_bytes 0.10.4 (registry+https://github.com/rust-lang/crates.io-index)" = "adb6e51a6b3696b301bc221d785f898b4457c619b51d7ce195a6d20baecb37b3"
"checksum serde_derive 1.0.58 (git+https://github.com/servo/serde?branch=deserialize_from_enums7)" = "<none>"
"checksum serde_derive 1.0.66 (git+https://github.com/servo/serde?branch=deserialize_from_enums8)" = "<none>"
"checksum simd 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "ed3686dd9418ebcc3a26a0c0ae56deab0681e53fe899af91f5bbcee667ebffb1"
"checksum siphasher 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "2ffc669b726f2bc9a3bcff66e5e23b56ba6bf70e22a34c3d7b6d0b3450b65b84"
"checksum slab 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "17b4fcaed89ab08ef143da37bc52adbcc04d4a69014f4c1208d6b51f0c47bc23"
@ -2676,6 +2704,7 @@ dependencies = [
"checksum strsim 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b4d15c810519a91cf877e7e36e63fe068815c678181439f2f29e2562147c3694"
"checksum strsim 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "bb4f380125926a99e52bc279241539c018323fab05ad6368b56f93d9369ff550"
"checksum syn 0.13.1 (registry+https://github.com/rust-lang/crates.io-index)" = "91b52877572087400e83d24b9178488541e3d535259e04ff17a63df1e5ceff59"
"checksum syn 0.14.2 (registry+https://github.com/rust-lang/crates.io-index)" = "c67da57e61ebc7b7b6fff56bb34440ca3a83db037320b0507af4c10368deda7d"
"checksum synstructure 0.8.1 (registry+https://github.com/rust-lang/crates.io-index)" = "98cad891cd238c98e1f0aec9f7c0f620aa696e4e5f7daba56ac67b5e86a6b049"
"checksum tempdir 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)" = "87974a6f5c1dfb344d733055601650059a3363de2a6104819293baff662132d6"
"checksum term 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)" = "fa63644f74ce96fbeb9b794f66aff2a52d601cbd5e80f4b97123e3899f4570f1"

Просмотреть файл

@ -59,4 +59,4 @@ codegen-units = 1
[patch.crates-io]
libudev-sys = { path = "dom/webauthn/libudev-sys" }
serde_derive = { git = "https://github.com/servo/serde", branch = "deserialize_from_enums7" }
serde_derive = { git = "https://github.com/servo/serde", branch = "deserialize_from_enums8" }

Просмотреть файл

@ -589,7 +589,7 @@ toolbarbutton[constrain-size="true"][cui-areatype="menu-panel"] > .toolbarbutton
#appMenu-tp-label {
-moz-context-properties: fill;
fill: currentColor;
list-style-image: url(chrome://browser/skin/tracking-protection-16.svg#enabled);
list-style-image: url(chrome://browser/skin/tracking-protection.svg);
-moz-box-flex: 1;
}

Просмотреть файл

@ -148,12 +148,12 @@
/* TRACKING PROTECTION ICON */
#tracking-protection-icon {
list-style-image: url(chrome://browser/skin/tracking-protection-16.svg#enabled);
list-style-image: url(chrome://browser/skin/tracking-protection.svg);
margin-inline-end: 0;
}
#tracking-protection-icon[state="loaded-tracking-content"] {
list-style-image: url(chrome://browser/skin/tracking-protection-16.svg#disabled);
list-style-image: url(chrome://browser/skin/tracking-protection-disabled.svg);
}
#tracking-protection-icon[animate] {

Просмотреть файл

@ -1,44 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<!-- This Source Code Form is subject to the terms of the Mozilla Public
- License, v. 2.0. If a copy of the MPL was not distributed with this
- file, You can obtain one at http://mozilla.org/MPL/2.0/. -->
<svg xmlns="http://www.w3.org/2000/svg"
width="16" height="16" viewBox="0 0 16 16">
<style>
g:not(:target) {
display: none;
}
</style>
<defs>
<path id="shape-shield-outer" d="M8,1L2.8,1.9C2.4,1.9,2,2.4,2,2.8C2,4,2,6.1,2.1,7.1c0.3,2.7,0.8,4,1.9,5.6C5.6,14.7,8,15,8,15s2.4-0.3,4-2.4 c1.2-1.5,1.7-2.9,1.9-5.6C14,6.1,14,4,14,2.8c0-0.5-0.4-0.9-0.8-1L8,1L8,1z"/>
<path id="shape-shield-inner" d="M8,2l5,0.8c0,2,0,3.5-0.1,4.1c-0.3,2.7-0.8,3.8-1.7,5.1c-1.1,1.5-2.7,1.9-3.2,2c-0.4-0.1-2.1-0.5-3.2-2 c-1-1.3-1.5-2.4-1.7-5.1C3,6.3,3,4.8,3,2.8L8,2"/>
<path id="shape-shield-detail" d="M8,13c-0.5-0.1-1.6-0.5-2.4-1.5c-0.9-1.2-1.3-2.1-1.5-4.6C4,6.3,4,5.2,4,3.7L8,3 V13z"/>
<mask id="mask-shield-cutout">
<rect width="16" height="16" fill="#000" />
<use href="#shape-shield-outer" fill="#fff"/>
<use href="#shape-shield-inner" fill="#000"/>
<use href="#shape-shield-detail" fill="#fff"/>
</mask>
<mask id="mask-shield-cutout-disabled">
<rect width="16" height="16" fill="#000"/>
<use href="#shape-shield-outer" fill="#fff"/>
<use href="#shape-shield-inner" fill="#000"/>
<use href="#shape-shield-detail" fill="#fff"/>
<line x1="3" y1="15" x2="15" y2="3" stroke="#000" stroke-width="2"/>
</mask>
<line id="strike-through-red" x1="3" y1="14" x2="15" y2="2" stroke="#d92d21" stroke-width="2"/>
</defs>
<g id="enabled">
<use fill="context-fill" fill-opacity="context-fill-opacity" href="#shape-shield-outer" mask="url(#mask-shield-cutout)"/>
</g>
<g id="disabled">
<use fill="context-fill" fill-opacity="context-fill-opacity" href="#shape-shield-outer" mask="url(#mask-shield-cutout-disabled)"/>
<use href="#strike-through-red"/>
</g>
</svg>

До

Ширина:  |  Высота:  |  Размер: 2.0 KiB

Просмотреть файл

@ -0,0 +1,6 @@
<!-- This Source Code Form is subject to the terms of the Mozilla Public
- License, v. 2.0. If a copy of the MPL was not distributed with this
- file, You can obtain one at http://mozilla.org/MPL/2.0/. -->
<svg xmlns="http://www.w3.org/2000/svg" width="16" height="16" viewBox="0 0 16 16" fill="context-fill" fill-opacity="context-fill-opacity">
<path d="M10.513 11.382A4.221 4.221 0 0 1 8 12.987a4.267 4.267 0 0 1-1.41-.578l-1.436 1.437a6.221 6.221 0 0 0 2.734 1.148l.112.012.112-.012a6.244 6.244 0 0 0 4.012-2.427 9.26 9.26 0 0 0 1.8-5.286c.043-.518.063-1.421.076-2.281l-2 2a7.572 7.572 0 0 1-1.487 4.382zm4.194-10.089a1 1 0 0 0-1.414 0l-.537.537a1.808 1.808 0 0 0-.285-.077L8 .985l-4.473.768A1.845 1.845 0 0 0 2 3.575c0 1.025 0 2.867.08 3.706a10.2 10.2 0 0 0 1.079 4.146l-1.866 1.866a1 1 0 1 0 1.414 1.414l12-12a1 1 0 0 0 0-1.414zM4 7c-.049-.54 0-1.675 0-3.3l4-.687 3.048.523-6.4 6.4A9.517 9.517 0 0 1 4 7z"/>
</svg>

После

Ширина:  |  Высота:  |  Размер: 928 B

Просмотреть файл

@ -0,0 +1,7 @@
<!-- This Source Code Form is subject to the terms of the Mozilla Public
- License, v. 2.0. If a copy of the MPL was not distributed with this
- file, You can obtain one at http://mozilla.org/MPL/2.0/. -->
<svg xmlns="http://www.w3.org/2000/svg" width="16" height="16" viewBox="0 0 16 16" fill="context-fill" fill-opacity="context-fill-opacity">
<path d="M8 15.006l-.112-.012a6.244 6.244 0 0 1-4.012-2.427 9.26 9.26 0 0 1-1.8-5.286C2 6.442 2 4.6 2 3.575a1.845 1.845 0 0 1 1.527-1.822L8 .985l4.471.768A1.845 1.845 0 0 1 14 3.576c0 1.023 0 2.866-.08 3.705a9.26 9.26 0 0 1-1.8 5.286 6.244 6.244 0 0 1-4.012 2.427zM4 3.7C4 5.325 3.951 6.46 4 7a7.572 7.572 0 0 0 1.487 4.382A4.223 4.223 0 0 0 8 12.987a4.221 4.221 0 0 0 2.512-1.605A7.572 7.572 0 0 0 12 7c.049-.54 0-1.675 0-3.3l-4-.685z"/>
<path d="M8 4.537l-2.5.428c.009.942.03 1.655.062 2a5.765 5.765 0 0 0 1.13 3.53 2.685 2.685 0 0 0 1.3.943H8z"/>
</svg>

После

Ширина:  |  Высота:  |  Размер: 913 B

Просмотреть файл

@ -82,7 +82,8 @@
skin/classic/browser/notification-icons/midi.svg (../shared/notification-icons/midi.svg)
skin/classic/browser/notification-icons/webauthn.svg (../shared/notification-icons/webauthn.svg)
skin/classic/browser/tracking-protection-16.svg (../shared/identity-block/tracking-protection-16.svg)
skin/classic/browser/tracking-protection.svg (../shared/identity-block/tracking-protection.svg)
skin/classic/browser/tracking-protection-disabled.svg (../shared/identity-block/tracking-protection-disabled.svg)
skin/classic/browser/panel-icon-arrow-left.svg (../shared/panel-icon-arrow-left.svg)
skin/classic/browser/panel-icon-arrow-right.svg (../shared/panel-icon-arrow-right.svg)
skin/classic/browser/panel-icon-cancel.svg (../shared/panel-icon-cancel.svg)

Просмотреть файл

@ -85,30 +85,33 @@ class FontMeta extends PureComponent {
);
}
renderFontName(name) {
renderFontName(name, family) {
let options = {};
if (Services.prefs.getBoolPref(FONT_HIGHLIGHTER_PREF)) {
return dom.h1(
{
className: "font-name",
onMouseOver: this.onNameMouseOver,
onMouseOut: this.onNameMouseOut,
},
name
);
options = {
onMouseOver: this.onNameMouseOver,
onMouseOut: this.onNameMouseOut,
};
}
return dom.h1({ className: "font-name" }, name);
return dom.div(
options,
dom.div({ className: "font-family-name" }, family),
dom.div({ className: "font-name" }, name)
);
}
render() {
const {
CSSFamilyName,
name,
URI,
} = this.props.font;
return createElement(Fragment,
null,
this.renderFontName(name),
this.renderFontName(name, CSSFamilyName),
this.renderFontOrigin(URI)
);
}

Просмотреть файл

@ -1,7 +1,7 @@
/* vim: set ts=2 et sw=2 tw=80: */
/* Any copyright is dedicated to the Public Domain.
http://creativecommons.org/publicdomain/zero/1.0/ */
/* global getURL */
/* global getURL, getFamilyName */
"use strict";
requestLongerTimeout(2);
@ -38,18 +38,44 @@ async function testBodyFonts(inspector, viewDoc) {
}
async function testDivFonts(inspector, viewDoc) {
await selectNode("div", inspector);
const font = {
const FONTS = [{
selector: "div",
familyName: "bar",
name: "Ostrich Sans Medium",
remote: true,
url: URL_ROOT + "ostrich-regular.ttf",
};
},
{
selector: ".normal-text",
familyName: "barnormal",
name: "Ostrich Sans Medium",
remote: true,
url: URL_ROOT + "ostrich-regular.ttf",
},
{
selector: ".bold-text",
familyName: "bar",
name: "Ostrich Sans Black",
remote: true,
url: URL_ROOT + "ostrich-black.ttf",
}, {
selector: ".black-text",
familyName: "bar",
name: "Ostrich Sans Black",
remote: true,
url: URL_ROOT + "ostrich-black.ttf",
}];
const lis = getUsedFontsEls(viewDoc);
const li = lis[0];
is(lis.length, 1, "Found 1 font on DIV");
is(getName(li), font.name, "The DIV font has the right name");
is(isRemote(li), font.remote, `font remote value correct`);
is(getURL(li), font.url, `font url correct`);
for (let i = 0; i < FONTS.length; i++) {
await selectNode(FONTS[i].selector, inspector);
const lis = getUsedFontsEls(viewDoc);
const li = lis[0];
const font = FONTS[i];
is(lis.length, 1, `Found 1 font on ${FONTS[i].selector}`);
is(getName(li), font.name, "The DIV font has the right name");
is(getFamilyName(li), font.familyName, `font has the right family name`);
is(isRemote(li), font.remote, `font remote value correct`);
is(getURL(li), font.url, `font url correct`);
}
}

Просмотреть файл

@ -152,3 +152,15 @@ function getName(fontEl) {
function getURL(fontEl) {
return fontEl.querySelector(".font-origin").textContent;
}
/**
* Given a font element, return its family name.
*
* @param {DOMNode} fontEl
* The font element.
* @return {String}
* The name of the font family as shown in the UI.
*/
function getFamilyName(fontEl) {
return fontEl.querySelector(".font-family-name").textContent;
}

Просмотреть файл

@ -49,6 +49,9 @@ const fontVariationInstance = exports.fontVariationInstance = {
* A single font.
*/
const font = exports.font = {
// Font family name
CSSFamilyName: PropTypes.string,
// The format of the font
format: PropTypes.string,

Просмотреть файл

@ -28,6 +28,7 @@
grid-template-columns: 1fr auto;
grid-column-gap: 10px;
padding: 10px 20px;
overflow: auto;
}
#font-container .theme-twisty {
@ -76,13 +77,23 @@
color: transparent;
}
.font-name {
margin: 0;
font-size: 1.2em;
.font-name,
.font-family-name {
font-weight: normal;
white-space: nowrap;
}
.font-name {
margin-bottom: 0.6em;
font-size: 1em;
color: var(--grey-50);
}
.font-family-name {
margin-bottom: 0.2em;
font-size: 1.2em;
}
.font-css-code {
direction: ltr;
margin: 0;

Просмотреть файл

@ -213,9 +213,15 @@ public:
RefPtr<MediaDataDecoder::FlushPromise> Flush() override
{
mInputInfos.Clear();
mSeekTarget.reset();
return RemoteDataDecoder::Flush();
RefPtr<RemoteVideoDecoder> self = this;
return RemoteDataDecoder::Flush()->Then(
mTaskQueue,
__func__,
[self](const FlushPromise::ResolveOrRejectValue& aValue) {
self->mInputInfos.Clear();
self->mSeekTarget.reset();
return FlushPromise::CreateAndResolveOrReject(aValue, __func__);
});
}
RefPtr<MediaDataDecoder::DecodePromise> Decode(MediaRawData* aSample) override
@ -238,7 +244,13 @@ public:
void SetSeekThreshold(const TimeUnit& aTime) override
{
mSeekTarget = Some(aTime);
RefPtr<RemoteVideoDecoder> self = this;
nsCOMPtr<nsIRunnable> runnable = NS_NewRunnableFunction(
"RemoteVideoDecoder::SetSeekThreshold",
[self, aTime]() { self->mSeekTarget = Some(aTime); });
nsresult rv = mTaskQueue->Dispatch(runnable.forget());
MOZ_DIAGNOSTIC_ASSERT(NS_SUCCEEDED(rv));
Unused << rv;
}
bool IsUsefulData(const RefPtr<MediaData>& aSample) override
@ -258,8 +270,12 @@ private:
const VideoInfo mConfig;
GeckoSurface::GlobalRef mSurface;
AndroidSurfaceTextureHandle mSurfaceHandle;
SimpleMap<InputInfo> mInputInfos;
// Only accessed on reader's task queue.
bool mIsCodecSupportAdaptivePlayback = false;
// Accessed on mTaskQueue, reader's TaskQueue and Java callback tread.
// SimpleMap however is thread-safe, so it's okay to do so.
SimpleMap<InputInfo> mInputInfos;
// Only accessed on the TaskQueue.
Maybe<TimeUnit> mSeekTarget;
};

Просмотреть файл

@ -273,83 +273,8 @@ VRDisplayHost::SubmitFrameInternal(const layers::SurfaceDescriptor &aTexture,
#endif // !defined(MOZ_WIDGET_ANDROID)
AUTO_PROFILER_TRACING("VR", "SubmitFrameAtVRDisplayHost");
mFrameStarted = false;
switch (aTexture.type()) {
#if defined(XP_WIN)
case SurfaceDescriptor::TSurfaceDescriptorD3D10: {
if (!CreateD3DObjects()) {
return;
}
const SurfaceDescriptorD3D10& surf = aTexture.get_SurfaceDescriptorD3D10();
RefPtr<ID3D11Texture2D> dxTexture;
HRESULT hr = mDevice->OpenSharedResource((HANDLE)surf.handle(),
__uuidof(ID3D11Texture2D),
(void**)(ID3D11Texture2D**)getter_AddRefs(dxTexture));
if (FAILED(hr) || !dxTexture) {
NS_WARNING("Failed to open shared texture");
return;
}
// Similar to LockD3DTexture in TextureD3D11.cpp
RefPtr<IDXGIKeyedMutex> mutex;
dxTexture->QueryInterface((IDXGIKeyedMutex**)getter_AddRefs(mutex));
if (mutex) {
HRESULT hr = mutex->AcquireSync(0, 1000);
if (hr == WAIT_TIMEOUT) {
gfxDevCrash(LogReason::D3DLockTimeout) << "D3D lock mutex timeout";
}
else if (hr == WAIT_ABANDONED) {
gfxCriticalNote << "GFX: D3D11 lock mutex abandoned";
}
if (FAILED(hr)) {
NS_WARNING("Failed to lock the texture");
return;
}
}
bool success = SubmitFrame(dxTexture, surf.size(),
aLeftEyeRect, aRightEyeRect);
if (mutex) {
HRESULT hr = mutex->ReleaseSync(0);
if (FAILED(hr)) {
NS_WARNING("Failed to unlock the texture");
}
}
if (!success) {
return;
}
break;
}
#elif defined(XP_MACOSX)
case SurfaceDescriptor::TSurfaceDescriptorMacIOSurface: {
const auto& desc = aTexture.get_SurfaceDescriptorMacIOSurface();
RefPtr<MacIOSurface> surf = MacIOSurface::LookupSurface(desc.surfaceId(),
desc.scaleFactor(),
!desc.isOpaque());
if (!surf) {
NS_WARNING("VRDisplayHost::SubmitFrame failed to get a MacIOSurface");
return;
}
IntSize texSize = gfx::IntSize(surf->GetDevicePixelWidth(),
surf->GetDevicePixelHeight());
if (!SubmitFrame(surf, texSize, aLeftEyeRect, aRightEyeRect)) {
return;
}
break;
}
#elif defined(MOZ_WIDGET_ANDROID)
case SurfaceDescriptor::TSurfaceTextureDescriptor: {
const SurfaceTextureDescriptor& desc = aTexture.get_SurfaceTextureDescriptor();
if (!SubmitFrame(desc, aLeftEyeRect, aRightEyeRect)) {
return;
}
break;
}
#endif
default: {
NS_WARNING("Unsupported SurfaceDescriptor type for VR layer texture");
return;
}
if (!SubmitFrame(aTexture, aFrameId, aLeftEyeRect, aRightEyeRect)) {
return;
}
#if defined(XP_WIN) || defined(XP_MACOSX) || defined(MOZ_WIDGET_ANDROID)
@ -381,12 +306,6 @@ VRDisplayHost::SubmitFrame(VRLayerParent* aLayer,
const gfx::Rect& aLeftEyeRect,
const gfx::Rect& aRightEyeRect)
{
#if !defined(MOZ_WIDGET_ANDROID)
if (!mSubmitThread) {
mSubmitThread = new VRThread(NS_LITERAL_CSTRING("VR_SubmitFrame"));
}
#endif // !defined(MOZ_WIDGET_ANDROID)
if ((mDisplayInfo.mGroupMask & aLayer->GetGroup()) == 0) {
// Suppress layers hidden by the group mask
return;
@ -397,12 +316,18 @@ VRDisplayHost::SubmitFrame(VRLayerParent* aLayer,
return;
}
mFrameStarted = false;
RefPtr<Runnable> submit =
NewRunnableMethod<StoreCopyPassByConstLRef<layers::SurfaceDescriptor>, uint64_t,
StoreCopyPassByConstLRef<gfx::Rect>, StoreCopyPassByConstLRef<gfx::Rect>>(
"gfx::VRDisplayHost::SubmitFrameInternal", this, &VRDisplayHost::SubmitFrameInternal,
aTexture, aFrameId, aLeftEyeRect, aRightEyeRect);
#if !defined(MOZ_WIDGET_ANDROID)
if (!mSubmitThread) {
mSubmitThread = new VRThread(NS_LITERAL_CSTRING("VR_SubmitFrame"));
}
mSubmitThread->Start();
mSubmitThread->PostTask(submit.forget());
#else

Просмотреть файл

@ -72,25 +72,13 @@ protected:
explicit VRDisplayHost(VRDeviceType aType);
virtual ~VRDisplayHost();
#if defined(XP_WIN)
// Subclasses should override this SubmitFrame function.
// Returns true if the SubmitFrame call will block as necessary
// to control timing of the next frame and throttle the render loop
// for the needed framerate.
virtual bool SubmitFrame(ID3D11Texture2D* aSource,
const IntSize& aSize,
// This SubmitFrame() must be overridden by children and block until
// the next frame is ready to start and the resources in aTexture can
// safely be released.
virtual bool SubmitFrame(const layers::SurfaceDescriptor& aTexture,
uint64_t aFrameId,
const gfx::Rect& aLeftEyeRect,
const gfx::Rect& aRightEyeRect) = 0;
#elif defined(XP_MACOSX)
virtual bool SubmitFrame(MacIOSurface* aMacIOSurface,
const IntSize& aSize,
const gfx::Rect& aLeftEyeRect,
const gfx::Rect& aRightEyeRect) = 0;
#elif defined(MOZ_WIDGET_ANDROID)
virtual bool SubmitFrame(const mozilla::layers::SurfaceTextureDescriptor& aSurface,
const gfx::Rect& aLeftEyeRect,
const gfx::Rect& aRightEyeRect) = 0;
#endif
VRDisplayInfo mDisplayInfo;

132
gfx/vr/VRDisplayLocal.cpp Normal file
Просмотреть файл

@ -0,0 +1,132 @@
/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
/* vim: set ts=8 sts=2 et sw=2 tw=80: */
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#include "VRDisplayLocal.h"
#include "gfxPrefs.h"
#include "gfxVR.h"
#include "ipc/VRLayerParent.h"
#include "mozilla/layers/TextureHost.h"
#include "mozilla/dom/GamepadBinding.h" // For GamepadMappingType
#include "VRThread.h"
#if defined(XP_WIN)
#include <d3d11.h>
#include "gfxWindowsPlatform.h"
#include "../layers/d3d11/CompositorD3D11.h"
#include "mozilla/gfx/DeviceManagerDx.h"
#include "mozilla/layers/TextureD3D11.h"
#elif defined(XP_MACOSX)
#include "mozilla/gfx/MacIOSurface.h"
#endif
#if defined(MOZ_WIDGET_ANDROID)
#include "mozilla/layers/CompositorThread.h"
#endif // defined(MOZ_WIDGET_ANDROID)
using namespace mozilla;
using namespace mozilla::gfx;
using namespace mozilla::layers;
VRDisplayLocal::VRDisplayLocal(VRDeviceType aType)
: VRDisplayHost(aType)
{
MOZ_COUNT_CTOR_INHERITED(VRDisplayLocal, VRDisplayHost);
}
VRDisplayLocal::~VRDisplayLocal()
{
MOZ_COUNT_DTOR_INHERITED(VRDisplayLocal, VRDisplayHost);
}
bool
VRDisplayLocal::SubmitFrame(const layers::SurfaceDescriptor &aTexture,
uint64_t aFrameId,
const gfx::Rect& aLeftEyeRect,
const gfx::Rect& aRightEyeRect)
{
#if !defined(MOZ_WIDGET_ANDROID)
MOZ_ASSERT(mSubmitThread->GetThread() == NS_GetCurrentThread());
#endif // !defined(MOZ_WIDGET_ANDROID)
switch (aTexture.type()) {
#if defined(XP_WIN)
case SurfaceDescriptor::TSurfaceDescriptorD3D10: {
if (!CreateD3DObjects()) {
return false;
}
const SurfaceDescriptorD3D10& surf = aTexture.get_SurfaceDescriptorD3D10();
RefPtr<ID3D11Texture2D> dxTexture;
HRESULT hr = mDevice->OpenSharedResource((HANDLE)surf.handle(),
__uuidof(ID3D11Texture2D),
(void**)(ID3D11Texture2D**)getter_AddRefs(dxTexture));
if (FAILED(hr) || !dxTexture) {
NS_WARNING("Failed to open shared texture");
return false;
}
// Similar to LockD3DTexture in TextureD3D11.cpp
RefPtr<IDXGIKeyedMutex> mutex;
dxTexture->QueryInterface((IDXGIKeyedMutex**)getter_AddRefs(mutex));
if (mutex) {
HRESULT hr = mutex->AcquireSync(0, 1000);
if (hr == WAIT_TIMEOUT) {
gfxDevCrash(LogReason::D3DLockTimeout) << "D3D lock mutex timeout";
}
else if (hr == WAIT_ABANDONED) {
gfxCriticalNote << "GFX: D3D11 lock mutex abandoned";
}
if (FAILED(hr)) {
NS_WARNING("Failed to lock the texture");
return false;
}
}
bool success = SubmitFrame(dxTexture, surf.size(),
aLeftEyeRect, aRightEyeRect);
if (mutex) {
HRESULT hr = mutex->ReleaseSync(0);
if (FAILED(hr)) {
NS_WARNING("Failed to unlock the texture");
}
}
return success;
}
#elif defined(XP_MACOSX)
case SurfaceDescriptor::TSurfaceDescriptorMacIOSurface: {
const auto& desc = aTexture.get_SurfaceDescriptorMacIOSurface();
RefPtr<MacIOSurface> surf = MacIOSurface::LookupSurface(desc.surfaceId(),
desc.scaleFactor(),
!desc.isOpaque());
if (!surf) {
NS_WARNING("VRDisplayHost::SubmitFrame failed to get a MacIOSurface");
return false;
}
IntSize texSize = gfx::IntSize(surf->GetDevicePixelWidth(),
surf->GetDevicePixelHeight());
if (!SubmitFrame(surf, texSize, aLeftEyeRect, aRightEyeRect)) {
return false;
}
return true;
}
#elif defined(MOZ_WIDGET_ANDROID)
case SurfaceDescriptor::TSurfaceTextureDescriptor: {
const SurfaceTextureDescriptor& desc = aTexture.get_SurfaceTextureDescriptor();
if (!SubmitFrame(desc, aLeftEyeRect, aRightEyeRect)) {
return false;
}
return true;
}
#endif
default: {
NS_WARNING("Unsupported SurfaceDescriptor type for VR layer texture");
return false;
}
}
}

61
gfx/vr/VRDisplayLocal.h Normal file
Просмотреть файл

@ -0,0 +1,61 @@
/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
/* vim: set ts=8 sts=2 et sw=2 tw=80: */
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#ifndef GFX_VR_DISPLAY_LOCAL_H
#define GFX_VR_DISPLAY_LOCAL_H
#include "gfxVR.h"
#include "VRDisplayHost.h"
#if defined(XP_WIN)
#include <d3d11_1.h>
#elif defined(XP_MACOSX)
class MacIOSurface;
#endif
namespace mozilla {
namespace gfx {
class VRThread;
class VRDisplayLocal : public VRDisplayHost
{
public:
#if defined(XP_WIN)
// Subclasses should override this SubmitFrame function.
// Returns true if the SubmitFrame call will block as necessary
// to control timing of the next frame and throttle the render loop
// for the needed framerate.
virtual bool SubmitFrame(ID3D11Texture2D* aSource,
const IntSize& aSize,
const gfx::Rect& aLeftEyeRect,
const gfx::Rect& aRightEyeRect) = 0;
#elif defined(XP_MACOSX)
virtual bool SubmitFrame(MacIOSurface* aMacIOSurface,
const IntSize& aSize,
const gfx::Rect& aLeftEyeRect,
const gfx::Rect& aRightEyeRect) = 0;
#elif defined(MOZ_WIDGET_ANDROID)
virtual bool SubmitFrame(const mozilla::layers::SurfaceTextureDescriptor& aSurface,
const gfx::Rect& aLeftEyeRect,
const gfx::Rect& aRightEyeRect) = 0;
#endif
protected:
explicit VRDisplayLocal(VRDeviceType aType);
virtual ~VRDisplayLocal();
private:
bool SubmitFrame(const layers::SurfaceDescriptor& aTexture,
uint64_t aFrameId,
const gfx::Rect& aLeftEyeRect,
const gfx::Rect& aRightEyeRect) final;
};
} // namespace gfx
} // namespace mozilla
#endif // GFX_VR_DISPLAY_LOCAL_H

Просмотреть файл

@ -231,6 +231,12 @@ VRManager::CheckForInactiveTimeout()
TimeDuration duration = TimeStamp::Now() - mLastActiveTime;
if (duration.ToMilliseconds() > gfxPrefs::VRInactiveTimeout()) {
Shutdown();
// We must not throttle the next enumeration request
// after an idle timeout, as it may result in the
// user needing to refresh the browser to detect
// VR hardware when leaving and returning to a VR
// site.
mLastDisplayEnumerationTime = TimeStamp();
}
}
}

Просмотреть файл

@ -235,6 +235,8 @@ struct VRDisplayState
FloatSize_POD mStageSize;
// We can't use a Matrix4x4 here unless we ensure it's a POD type
float mSittingToStandingTransform[16];
uint64_t mLastSubmittedFrameId;
bool mLastSubmittedFrameSuccessful;
uint32_t mPresentingGeneration;
};
@ -274,9 +276,8 @@ enum class VRLayerType : uint16_t {
enum class VRLayerTextureType : uint16_t {
LayerTextureType_None = 0,
LayerTextureType_DirectX = 1,
LayerTextureType_OpenGL = 2,
LayerTextureType_Vulkan = 3
LayerTextureType_D3D10SurfaceDescriptor = 1,
LayerTextureType_MacIOSurface = 2
};
struct VRLayer_2D_Content
@ -291,6 +292,7 @@ struct VRLayer_Stereo_Immersive
void* mTextureHandle;
VRLayerTextureType mTextureType;
uint64_t mFrameId;
uint64_t mInputFrameId;
VRLayerEyeRect mLeftEyeRect;
VRLayerEyeRect mRightEyeRect;
};
@ -315,6 +317,7 @@ struct VRBrowserState
struct VRSystemState
{
uint32_t presentingGeneration;
bool enumerationCompleted;
VRDisplayState displayState;
VRHMDSensorState sensorState;
VRControllerState controllerState[kVRControllerMaxCount];

Просмотреть файл

@ -102,8 +102,6 @@ VRDisplayExternal::GetSensorState()
manager->PullState(&mDisplayInfo.mDisplayState, &mLastSensorState);
// result.CalcViewMatrices(headToEyeTransforms);
mLastSensorState.inputFrameID = mDisplayInfo.mFrameId;
return mLastSensorState;
}
@ -117,7 +115,15 @@ VRDisplayExternal::StartPresentation()
mTelemetry.Clear();
mTelemetry.mPresentationStart = TimeStamp::Now();
// TODO - Implement this
// Indicate that we are ready to start immersive mode
VRBrowserState state;
memset(&state, 0, sizeof(VRBrowserState));
state.layerState[0].type = VRLayerType::LayerType_Stereo_Immersive;
VRManager *vm = VRManager::Get();
VRSystemManagerExternal* manager = vm->GetExternalManager();
manager->PushState(&state);
// TODO - Implement telemetry:
// mTelemetry.mLastDroppedFrameCount = stats.m_nNumReprojectedFrames;
}
@ -128,11 +134,18 @@ VRDisplayExternal::StopPresentation()
if (!mIsPresenting) {
return;
}
mIsPresenting = false;
// TODO - Implement this
// Indicate that we have stopped immersive mode
VRBrowserState state;
memset(&state, 0, sizeof(VRBrowserState));
VRManager *vm = VRManager::Get();
VRSystemManagerExternal* manager = vm->GetExternalManager();
manager->PushState(&state);
// TODO - Implement telemetry:
/*
mIsPresenting = false;
const TimeDuration duration = TimeStamp::Now() - mTelemetry.mPresentationStart;
Telemetry::Accumulate(Telemetry::WEBVR_USERS_VIEW_IN, 2);
Telemetry::Accumulate(Telemetry::WEBVR_TIME_SPENT_VIEWING_IN_OPENVR,
@ -146,47 +159,88 @@ VRDisplayExternal::StopPresentation()
*/
}
bool
VRDisplayExternal::PopulateLayerTexture(const layers::SurfaceDescriptor& aTexture,
VRLayerTextureType* aTextureType,
void** aTextureHandle)
{
switch (aTexture.type()) {
#if defined(XP_WIN)
bool
VRDisplayExternal::SubmitFrame(ID3D11Texture2D* aSource,
const IntSize& aSize,
const gfx::Rect& aLeftEyeRect,
const gfx::Rect& aRightEyeRect)
{
// FINDME! Implement this
return false;
}
case SurfaceDescriptor::TSurfaceDescriptorD3D10: {
const SurfaceDescriptorD3D10& surf = aTexture.get_SurfaceDescriptorD3D10();
*aTextureType = VRLayerTextureType::LayerTextureType_D3D10SurfaceDescriptor;
*aTextureHandle = (void *)surf.handle();
return true;
}
#elif defined(XP_MACOSX)
bool
VRDisplayExternal::SubmitFrame(MacIOSurface* aMacIOSurface,
const IntSize& aSize,
const gfx::Rect& aLeftEyeRect,
const gfx::Rect& aRightEyeRect)
{
const void* ioSurface = aMacIOSurface->GetIOSurfacePtr();
bool result = false;
if (ioSurface == nullptr) {
NS_WARNING("VRDisplayExternal::SubmitFrame() could not get an IOSurface");
} else {
// FINDME! Implement this
}
return result;
}
#elif defined(MOZ_WIDGET_ANDROID)
bool
VRDisplayExternal::SubmitFrame(const layers::SurfaceTextureDescriptor& aSurface,
const gfx::Rect& aLeftEyeRect,
const gfx::Rect& aRightEyeRect) {
return false;
}
case SurfaceDescriptor::TSurfaceDescriptorMacIOSurface: {
const auto& desc = aTexture.get_SurfaceDescriptorMacIOSurface();
RefPtr<MacIOSurface> surf = MacIOSurface::LookupSurface(desc.surfaceId(),
desc.scaleFactor(),
!desc.isOpaque());
if (!surf) {
NS_WARNING("VRDisplayHost::SubmitFrame failed to get a MacIOSurface");
return false;
}
*aTextureType = VRLayerTextureType::LayerTextureType_MacIOSurface;
*aTextureHandle = (void *)surf->GetIOSurfacePtr();
return true;
}
#endif
default: {
MOZ_ASSERT(false);
return false;
}
}
}
bool
VRDisplayExternal::SubmitFrame(const layers::SurfaceDescriptor& aTexture,
uint64_t aFrameId,
const gfx::Rect& aLeftEyeRect,
const gfx::Rect& aRightEyeRect)
{
VRBrowserState state;
memset(&state, 0, sizeof(VRBrowserState));
state.layerState[0].type = VRLayerType::LayerType_Stereo_Immersive;
VRLayer_Stereo_Immersive& layer = state.layerState[0].layer_stereo_immersive;
if (!PopulateLayerTexture(aTexture, &layer.mTextureType, &layer.mTextureHandle)) {
return false;
}
layer.mFrameId = aFrameId;
layer.mInputFrameId = mDisplayInfo.mLastSensorState[mDisplayInfo.mFrameId % kVRMaxLatencyFrames].inputFrameID;
layer.mLeftEyeRect.x = aLeftEyeRect.x;
layer.mLeftEyeRect.y = aLeftEyeRect.y;
layer.mLeftEyeRect.width = aLeftEyeRect.width;
layer.mLeftEyeRect.height = aLeftEyeRect.height;
layer.mRightEyeRect.x = aRightEyeRect.x;
layer.mRightEyeRect.y = aRightEyeRect.y;
layer.mRightEyeRect.width = aRightEyeRect.width;
layer.mRightEyeRect.height = aRightEyeRect.height;
VRManager *vm = VRManager::Get();
VRSystemManagerExternal* manager = vm->GetExternalManager();
manager->PushState(&state);
VRDisplayState displayState;
memset(&displayState, 0, sizeof(VRDisplayState));
while (displayState.mLastSubmittedFrameId < aFrameId) {
if (manager->PullState(&displayState)) {
if (!displayState.mIsConnected) {
// Service has shut down or hardware has been disconnected
return false;
}
}
#ifdef XP_WIN
Sleep(0);
#else
sleep(0);
#endif
}
return displayState.mLastSubmittedFrameSuccessful;
}
VRControllerExternal::VRControllerExternal(dom::GamepadHand aHand, uint32_t aDisplayID,
uint32_t aNumButtons, uint32_t aNumTriggers,
@ -208,8 +262,9 @@ VRControllerExternal::~VRControllerExternal()
MOZ_COUNT_DTOR_INHERITED(VRControllerExternal, VRControllerHost);
}
VRSystemManagerExternal::VRSystemManagerExternal()
: mExternalShmem(nullptr)
VRSystemManagerExternal::VRSystemManagerExternal(VRExternalShmem* aAPIShmem /* = nullptr*/)
: mExternalShmem(aAPIShmem)
, mSameProcess(aAPIShmem != nullptr)
{
#if defined(XP_MACOSX)
mShmemFD = 0;
@ -323,6 +378,9 @@ VRSystemManagerExternal::CheckForShutdown()
void
VRSystemManagerExternal::CloseShmem()
{
if (mSameProcess) {
return;
}
#if defined(XP_MACOSX)
if (mExternalShmem) {
munmap((void *)mExternalShmem, sizeof(VRExternalShmem));
@ -347,15 +405,19 @@ VRSystemManagerExternal::CloseShmem()
}
/*static*/ already_AddRefed<VRSystemManagerExternal>
VRSystemManagerExternal::Create()
VRSystemManagerExternal::Create(VRExternalShmem* aAPIShmem /* = nullptr*/)
{
MOZ_ASSERT(NS_IsMainThread());
if (!gfxPrefs::VREnabled() || !gfxPrefs::VRExternalEnabled()) {
if (!gfxPrefs::VREnabled()) {
return nullptr;
}
RefPtr<VRSystemManagerExternal> manager = new VRSystemManagerExternal();
if (!gfxPrefs::VRExternalEnabled() && aAPIShmem == nullptr) {
return nullptr;
}
RefPtr<VRSystemManagerExternal> manager = new VRSystemManagerExternal(aAPIShmem);
return manager.forget();
}
@ -397,7 +459,17 @@ VRSystemManagerExternal::Enumerate()
OpenShmem();
if (mExternalShmem) {
VRDisplayState displayState;
PullState(&displayState);
memset(&displayState, 0, sizeof(VRDisplayState));
// We must block until enumeration has completed in order
// to signal that the WebVR promise should be resolved at the
// right time.
while (!PullState(&displayState)) {
#ifdef XP_WIN
Sleep(0);
#else
sleep(0);
#endif
}
if (displayState.mIsConnected) {
mDisplay = new VRDisplayExternal(displayState);
}
@ -488,9 +560,10 @@ VRSystemManagerExternal::RemoveControllers()
mControllerCount = 0;
}
void
bool
VRSystemManagerExternal::PullState(VRDisplayState* aDisplayState, VRHMDSensorState* aSensorState /* = nullptr */)
{
bool success = false;
MOZ_ASSERT(mExternalShmem);
if (mExternalShmem) {
#if defined(MOZ_WIDGET_ANDROID)
@ -501,16 +574,39 @@ VRSystemManagerExternal::PullState(VRDisplayState* aDisplayState, VRHMDSensorSta
}
pthread_mutex_unlock((pthread_mutex_t*)&(mExternalShmem->systemMutex));
mDoShutdown = aDisplayState->shutdown;
success = mExternalShmem->state.enumerationCompleted;
}
#else
VRExternalShmem tmp;
memcpy(&tmp, (void *)mExternalShmem, sizeof(VRExternalShmem));
if (tmp.generationA == tmp.generationB && tmp.generationA != 0 && tmp.generationA != -1) {
if (tmp.generationA == tmp.generationB && tmp.generationA != 0 && tmp.generationA != -1 && tmp.state.enumerationCompleted) {
memcpy(aDisplayState, &tmp.state.displayState, sizeof(VRDisplayState));
if (aSensorState) {
memcpy(aSensorState, &tmp.state.sensorState, sizeof(VRHMDSensorState));
}
success = true;
}
#endif // defined(MOZ_WIDGET_ANDROID)
}
return success;
}
void
VRSystemManagerExternal::PushState(VRBrowserState* aBrowserState)
{
MOZ_ASSERT(aBrowserState);
MOZ_ASSERT(mExternalShmem);
if (mExternalShmem) {
#if defined(MOZ_WIDGET_ANDROID)
if (pthread_mutex_lock((pthread_mutex_t*)&(mExternalShmem->browserMutex)) == 0) {
memcpy((void *)&(mExternalShmem->browserState), aBrowserState, sizeof(VRBrowserState));
pthread_mutex_unlock((pthread_mutex_t*)&(mExternalShmem->browserMutex));
}
#else
mExternalShmem->browserGenerationA++;
memcpy((void *)&(mExternalShmem->browserState), (void *)aBrowserState, sizeof(VRBrowserState));
mExternalShmem->browserGenerationB++;
#endif // defined(MOZ_WIDGET_ANDROID)
}
}

Просмотреть файл

@ -33,24 +33,14 @@ public:
void ZeroSensor() override;
protected:
virtual VRHMDSensorState GetSensorState() override;
virtual void StartPresentation() override;
virtual void StopPresentation() override;
#if defined(XP_WIN)
virtual bool SubmitFrame(ID3D11Texture2D* aSource,
const IntSize& aSize,
const gfx::Rect& aLeftEyeRect,
const gfx::Rect& aRightEyeRect) override;
#elif defined(XP_MACOSX)
virtual bool SubmitFrame(MacIOSurface* aMacIOSurface,
const IntSize& aSize,
const gfx::Rect& aLeftEyeRect,
const gfx::Rect& aRightEyeRect) override;
#elif defined(MOZ_WIDGET_ANDROID)
bool SubmitFrame(const layers::SurfaceTextureDescriptor& aSurface,
const gfx::Rect& aLeftEyeRect,
const gfx::Rect& aRightEyeRect) override;
#endif
VRHMDSensorState GetSensorState() override;
void StartPresentation() override;
void StopPresentation() override;
bool SubmitFrame(const layers::SurfaceDescriptor& aTexture,
uint64_t aFrameId,
const gfx::Rect& aLeftEyeRect,
const gfx::Rect& aRightEyeRect) override;
public:
explicit VRDisplayExternal(const VRDisplayState& aDisplayState);
@ -59,6 +49,11 @@ protected:
virtual ~VRDisplayExternal();
void Destroy();
private:
bool PopulateLayerTexture(const layers::SurfaceDescriptor& aTexture,
VRLayerTextureType* aTextureType,
void** aTextureHandle);
VRTelemetry mTelemetry;
bool mIsPresenting;
VRHMDSensorState mLastSensorState;
@ -80,7 +75,7 @@ protected:
class VRSystemManagerExternal : public VRSystemManager
{
public:
static already_AddRefed<VRSystemManagerExternal> Create();
static already_AddRefed<VRSystemManagerExternal> Create(VRExternalShmem* aAPIShmem = nullptr);
virtual void Destroy() override;
virtual void Shutdown() override;
@ -100,10 +95,11 @@ public:
double aDuration,
const VRManagerPromise& aPromise) override;
virtual void StopVibrateHaptic(uint32_t aControllerIdx) override;
void PullState(VRDisplayState* aDisplayState, VRHMDSensorState* aSensorState = nullptr);
bool PullState(VRDisplayState* aDisplayState, VRHMDSensorState* aSensorState = nullptr);
void PushState(VRBrowserState* aBrowserState);
protected:
VRSystemManagerExternal();
explicit VRSystemManagerExternal(VRExternalShmem* aAPIShmem = nullptr);
virtual ~VRSystemManagerExternal();
private:
@ -120,6 +116,7 @@ private:
#endif
volatile VRExternalShmem* mExternalShmem;
bool mSameProcess;
void OpenShmem();
void CloseShmem();

Просмотреть файл

@ -211,13 +211,13 @@ SetFromTanRadians(double left, double right, double bottom, double top)
VRDisplayOSVR::VRDisplayOSVR(OSVR_ClientContext* context,
OSVR_ClientInterface* iface,
OSVR_DisplayConfig* display)
: VRDisplayHost(VRDeviceType::OSVR)
: VRDisplayLocal(VRDeviceType::OSVR)
, m_ctx(context)
, m_iface(iface)
, m_display(display)
{
MOZ_COUNT_CTOR_INHERITED(VRDisplayOSVR, VRDisplayHost);
MOZ_COUNT_CTOR_INHERITED(VRDisplayOSVR, VRDisplayLocal);
VRDisplayState& state = mDisplayInfo.mDisplayState;
state.mIsConnected = true;

Просмотреть файл

@ -14,7 +14,7 @@
#include "mozilla/gfx/2D.h"
#include "mozilla/EnumeratedArray.h"
#include "VRDisplayHost.h"
#include "VRDisplayLocal.h"
#include <osvr/ClientKit/ClientKitC.h>
#include <osvr/ClientKit/DisplayC.h>
@ -26,7 +26,7 @@ namespace mozilla {
namespace gfx {
namespace impl {
class VRDisplayOSVR : public VRDisplayHost
class VRDisplayOSVR : public VRDisplayLocal
{
public:
void ZeroSensor() override;
@ -57,7 +57,7 @@ protected:
virtual ~VRDisplayOSVR()
{
Destroy();
MOZ_COUNT_DTOR_INHERITED(VRDisplayOSVR, VRDisplayHost);
MOZ_COUNT_DTOR_INHERITED(VRDisplayOSVR, VRDisplayLocal);
}
void Destroy();

Просмотреть файл

@ -795,7 +795,7 @@ VROculusSession::UnloadOvrLib()
}
VRDisplayOculus::VRDisplayOculus(VROculusSession* aSession)
: VRDisplayHost(VRDeviceType::Oculus)
: VRDisplayLocal(VRDeviceType::Oculus)
, mSession(aSession)
, mQuadVS(nullptr)
, mQuadPS(nullptr)
@ -806,7 +806,7 @@ VRDisplayOculus::VRDisplayOculus(VROculusSession* aSession)
, mInputLayout(nullptr)
, mEyeHeight(OVR_DEFAULT_EYE_HEIGHT)
{
MOZ_COUNT_CTOR_INHERITED(VRDisplayOculus, VRDisplayHost);
MOZ_COUNT_CTOR_INHERITED(VRDisplayOculus, VRDisplayLocal);
VRDisplayState& state = mDisplayInfo.mDisplayState;
strncpy(state.mDisplayName, "Oculus VR HMD", kVRDisplayNameMaxLen);
state.mIsConnected = true;
@ -852,7 +852,7 @@ VRDisplayOculus::VRDisplayOculus(VROculusSession* aSession)
VRDisplayOculus::~VRDisplayOculus() {
Destroy();
MOZ_COUNT_DTOR_INHERITED(VRDisplayOculus, VRDisplayHost);
MOZ_COUNT_DTOR_INHERITED(VRDisplayOculus, VRDisplayLocal);
}
void

Просмотреть файл

@ -15,7 +15,7 @@
#include "mozilla/EnumeratedArray.h"
#include "gfxVR.h"
#include "VRDisplayHost.h"
#include "VRDisplayLocal.h"
#include "ovr_capi_dynamic.h"
struct ID3D11Device;
@ -90,7 +90,7 @@ private:
void StopRendering();
};
class VRDisplayOculus : public VRDisplayHost
class VRDisplayOculus : public VRDisplayLocal
{
public:
void ZeroSensor() override;

Просмотреть файл

@ -50,13 +50,13 @@ static const uint32_t kNumOpenVRHaptcs = 1;
VRDisplayOpenVR::VRDisplayOpenVR(::vr::IVRSystem *aVRSystem,
::vr::IVRChaperone *aVRChaperone,
::vr::IVRCompositor *aVRCompositor)
: VRDisplayHost(VRDeviceType::OpenVR)
: VRDisplayLocal(VRDeviceType::OpenVR)
, mVRSystem(aVRSystem)
, mVRChaperone(aVRChaperone)
, mVRCompositor(aVRCompositor)
, mIsPresenting(false)
{
MOZ_COUNT_CTOR_INHERITED(VRDisplayOpenVR, VRDisplayHost);
MOZ_COUNT_CTOR_INHERITED(VRDisplayOpenVR, VRDisplayLocal);
VRDisplayState& state = mDisplayInfo.mDisplayState;
@ -98,7 +98,7 @@ VRDisplayOpenVR::VRDisplayOpenVR(::vr::IVRSystem *aVRSystem,
VRDisplayOpenVR::~VRDisplayOpenVR()
{
Destroy();
MOZ_COUNT_DTOR_INHERITED(VRDisplayOpenVR, VRDisplayHost);
MOZ_COUNT_DTOR_INHERITED(VRDisplayOpenVR, VRDisplayLocal);
}
void
@ -350,11 +350,11 @@ VRDisplayOpenVR::StopPresentation()
}
bool
VRDisplayOpenVR::SubmitFrame(void* aTextureHandle,
::vr::ETextureType aTextureType,
const IntSize& aSize,
const gfx::Rect& aLeftEyeRect,
const gfx::Rect& aRightEyeRect)
VRDisplayOpenVR::SubmitFrameOpenVRHandle(void* aTextureHandle,
::vr::ETextureType aTextureType,
const IntSize& aSize,
const gfx::Rect& aLeftEyeRect,
const gfx::Rect& aRightEyeRect)
{
MOZ_ASSERT(mSubmitThread->GetThread() == NS_GetCurrentThread());
if (!mIsPresenting) {
@ -400,9 +400,9 @@ VRDisplayOpenVR::SubmitFrame(ID3D11Texture2D* aSource,
const gfx::Rect& aLeftEyeRect,
const gfx::Rect& aRightEyeRect)
{
return SubmitFrame((void *)aSource,
::vr::ETextureType::TextureType_DirectX,
aSize, aLeftEyeRect, aRightEyeRect);
return SubmitFrameOpenVRHandle((void *)aSource,
::vr::ETextureType::TextureType_DirectX,
aSize, aLeftEyeRect, aRightEyeRect);
}
#elif defined(XP_MACOSX)
@ -418,9 +418,9 @@ VRDisplayOpenVR::SubmitFrame(MacIOSurface* aMacIOSurface,
if (ioSurface == nullptr) {
NS_WARNING("VRDisplayOpenVR::SubmitFrame() could not get an IOSurface");
} else {
result = SubmitFrame((void *)ioSurface,
::vr::ETextureType::TextureType_IOSurface,
aSize, aLeftEyeRect, aRightEyeRect);
result = SubmitFrameOpenVRHandle((void *)ioSurface,
::vr::ETextureType::TextureType_IOSurface,
aSize, aLeftEyeRect, aRightEyeRect);
}
return result;
}

Просмотреть файл

@ -17,7 +17,7 @@
#include "openvr.h"
#include "gfxVR.h"
#include "VRDisplayHost.h"
#include "VRDisplayLocal.h"
#if defined(XP_MACOSX)
class MacIOSurface;
@ -28,7 +28,7 @@ class VRThread;
namespace impl {
class VRDisplayOpenVR : public VRDisplayHost
class VRDisplayOpenVR : public VRDisplayLocal
{
public:
void ZeroSensor() override;
@ -70,11 +70,11 @@ protected:
void UpdateStageParameters();
void UpdateEyeParameters(gfx::Matrix4x4* aHeadToEyeTransforms = nullptr);
bool SubmitFrame(void* aTextureHandle,
::vr::ETextureType aTextureType,
const IntSize& aSize,
const gfx::Rect& aLeftEyeRect,
const gfx::Rect& aRightEyeRect);
bool SubmitFrameOpenVRHandle(void* aTextureHandle,
::vr::ETextureType aTextureType,
const IntSize& aSize,
const gfx::Rect& aLeftEyeRect,
const gfx::Rect& aRightEyeRect);
};
class VRControllerOpenVR : public VRControllerHost

Просмотреть файл

@ -50,11 +50,11 @@ static const uint32_t kNumPuppetAxis = 3;
static const uint32_t kNumPuppetHaptcs = 1;
VRDisplayPuppet::VRDisplayPuppet()
: VRDisplayHost(VRDeviceType::Puppet)
: VRDisplayLocal(VRDeviceType::Puppet)
, mIsPresenting(false)
, mSensorState{}
{
MOZ_COUNT_CTOR_INHERITED(VRDisplayPuppet, VRDisplayHost);
MOZ_COUNT_CTOR_INHERITED(VRDisplayPuppet, VRDisplayLocal);
VRDisplayState& state = mDisplayInfo.mDisplayState;
strncpy(state.mDisplayName, "Puppet HMD", kVRDisplayNameMaxLen);
@ -123,7 +123,7 @@ VRDisplayPuppet::VRDisplayPuppet()
VRDisplayPuppet::~VRDisplayPuppet()
{
MOZ_COUNT_DTOR_INHERITED(VRDisplayPuppet, VRDisplayHost);
MOZ_COUNT_DTOR_INHERITED(VRDisplayPuppet, VRDisplayLocal);
}
void

Просмотреть файл

@ -12,7 +12,7 @@
#include "nsRefPtrHashtable.h"
#include "gfxVR.h"
#include "VRDisplayHost.h"
#include "VRDisplayLocal.h"
#if defined(XP_MACOSX)
class MacIOSurface;
@ -21,7 +21,7 @@ namespace mozilla {
namespace gfx {
namespace impl {
class VRDisplayPuppet : public VRDisplayHost
class VRDisplayPuppet : public VRDisplayLocal
{
public:
void SetDisplayInfo(const VRDisplayInfo& aDisplayInfo);

Просмотреть файл

@ -47,6 +47,7 @@ struct ParamTraits<mozilla::gfx::VRDisplayState>
WriteParam(aMsg, aParam.mIsMounted);
WriteParam(aMsg, aParam.mStageSize.width);
WriteParam(aMsg, aParam.mStageSize.height);
WriteParam(aMsg, aParam.mLastSubmittedFrameId);
WriteParam(aMsg, aParam.mPresentingGeneration);
for (int i = 0; i < 16; i++) {
// TODO - Should probably memcpy the whole array or
@ -73,6 +74,7 @@ struct ParamTraits<mozilla::gfx::VRDisplayState>
!ReadParam(aMsg, aIter, &(aResult->mIsMounted)) ||
!ReadParam(aMsg, aIter, &(aResult->mStageSize.width)) ||
!ReadParam(aMsg, aIter, &(aResult->mStageSize.height)) ||
!ReadParam(aMsg, aIter, &(aResult->mLastSubmittedFrameId)) ||
!ReadParam(aMsg, aIter, &(aResult->mPresentingGeneration))) {
return false;
}

Просмотреть файл

@ -47,6 +47,7 @@ SOURCES += [
'gfxVRExternal.cpp',
'gfxVRPuppet.cpp',
'VRDisplayHost.cpp',
'VRDisplayLocal.cpp',
]
# Build OpenVR on Windows, Linux, and macOS desktop targets

Просмотреть файл

@ -6,8 +6,10 @@
// For edges, the colors are the same. For corners, these
// are the colors of each edge making up the corner.
flat varying vec4 vColor0[2];
flat varying vec4 vColor1[2];
flat varying vec4 vColor00;
flat varying vec4 vColor01;
flat varying vec4 vColor10;
flat varying vec4 vColor11;
// A point + tangent defining the line where the edge
// transition occurs. Used for corners only.
@ -181,8 +183,12 @@ void main(void) {
vPartialWidths = vec4(aWidths / 3.0, aWidths / 2.0);
vPos = aRect.zw * aPosition.xy;
vColor0 = get_colors_for_side(aColor0, style0);
vColor1 = get_colors_for_side(aColor1, style1);
vec4[2] color0 = get_colors_for_side(aColor0, style0);
vColor00 = color0[0];
vColor01 = color0[1];
vec4[2] color1 = get_colors_for_side(aColor1, style1);
vColor10 = color1[0];
vColor11 = color1[1];
vClipCenter_Sign = vec4(outer + clip_sign * aRadii, clip_sign);
vClipRadii = vec4(aRadii, max(aRadii - aWidths, 0.0));
vColorLine = vec4(outer, aWidths.y * -clip_sign.y, aWidths.x * clip_sign.x);
@ -210,7 +216,8 @@ void main(void) {
vec4 evaluate_color_for_style_in_corner(
vec2 clip_relative_pos,
int style,
vec4 color[2],
vec4 color0,
vec4 color1,
vec4 clip_radii,
float mix_factor,
int segment,
@ -234,7 +241,7 @@ vec4 evaluate_color_for_style_in_corner(
);
float d = min(-d_radii_a, d_radii_b);
float alpha = distance_aa(aa_range, d);
return alpha * color[0];
return alpha * color0;
}
case BORDER_STYLE_GROOVE:
case BORDER_STYLE_RIDGE: {
@ -252,21 +259,22 @@ vec4 evaluate_color_for_style_in_corner(
case SEGMENT_BOTTOM_LEFT: swizzled_factor = 1.0 - mix_factor; break;
default: swizzled_factor = 0.0; break;
};
vec4 c0 = mix(color[1], color[0], swizzled_factor);
vec4 c1 = mix(color[0], color[1], swizzled_factor);
vec4 c0 = mix(color1, color0, swizzled_factor);
vec4 c1 = mix(color0, color1, swizzled_factor);
return mix(c0, c1, alpha);
}
default:
break;
}
return color[0];
return color0;
}
vec4 evaluate_color_for_style_in_edge(
vec2 pos,
int style,
vec4 color[2],
vec4 color0,
vec4 color1,
float aa_range,
int edge_axis
) {
@ -284,20 +292,20 @@ vec4 evaluate_color_for_style_in_edge(
}
float d = min(d0, d1);
float alpha = distance_aa(aa_range, d);
return alpha * color[0];
return alpha * color0;
}
case BORDER_STYLE_GROOVE:
case BORDER_STYLE_RIDGE: {
float ref = vEdgeReference[edge_axis] + vPartialWidths[edge_axis+2];
float d = pos[edge_axis] - ref;
float alpha = distance_aa(aa_range, d);
return mix(color[0], color[1], alpha);
return mix(color0, color1, alpha);
}
default:
break;
}
return color[0];
return color0;
}
void main(void) {
@ -352,7 +360,8 @@ void main(void) {
color0 = evaluate_color_for_style_in_corner(
clip_relative_pos,
style.x,
vColor0,
vColor00,
vColor01,
vClipRadii,
mix_factor,
segment,
@ -361,7 +370,8 @@ void main(void) {
color1 = evaluate_color_for_style_in_corner(
clip_relative_pos,
style.y,
vColor1,
vColor10,
vColor11,
vClipRadii,
mix_factor,
segment,
@ -371,14 +381,16 @@ void main(void) {
color0 = evaluate_color_for_style_in_edge(
vPos,
style.x,
vColor0,
vColor00,
vColor01,
aa_range,
edge_axis.x
);
color1 = evaluate_color_for_style_in_edge(
vPos,
style.y,
vColor1,
vColor10,
vColor11,
aa_range,
edge_axis.y
);

Просмотреть файл

@ -59,131 +59,6 @@ RectWithSize fetch_clip_chain_rect(int index) {
return RectWithSize(rect.xy, rect.zw);
}
struct Glyph {
vec2 offset;
};
Glyph fetch_glyph(int specific_prim_address,
int glyph_index) {
// Two glyphs are packed in each texel in the GPU cache.
int glyph_address = specific_prim_address +
VECS_PER_TEXT_RUN +
glyph_index / 2;
vec4 data = fetch_from_resource_cache_1(glyph_address);
// Select XY or ZW based on glyph index.
// We use "!= 0" instead of "== 1" here in order to work around a driver
// bug with equality comparisons on integers.
vec2 glyph = mix(data.xy, data.zw, bvec2(glyph_index % 2 != 0));
return Glyph(glyph);
}
struct PrimitiveInstance {
int prim_address;
int specific_prim_address;
int render_task_index;
int clip_task_index;
int scroll_node_id;
int clip_chain_rect_index;
int z;
int user_data0;
int user_data1;
int user_data2;
};
PrimitiveInstance fetch_prim_instance() {
PrimitiveInstance pi;
pi.prim_address = aData0.x;
pi.specific_prim_address = pi.prim_address + VECS_PER_PRIM_HEADER;
pi.render_task_index = aData0.y % 0x10000;
pi.clip_task_index = aData0.y / 0x10000;
pi.clip_chain_rect_index = aData0.z;
pi.scroll_node_id = aData0.w;
pi.z = aData1.x;
pi.user_data0 = aData1.y;
pi.user_data1 = aData1.z;
pi.user_data2 = aData1.w;
return pi;
}
struct CompositeInstance {
int render_task_index;
int src_task_index;
int backdrop_task_index;
int user_data0;
int user_data1;
float z;
int user_data2;
int user_data3;
};
CompositeInstance fetch_composite_instance() {
CompositeInstance ci;
ci.render_task_index = aData0.x;
ci.src_task_index = aData0.y;
ci.backdrop_task_index = aData0.z;
ci.z = float(aData0.w);
ci.user_data0 = aData1.x;
ci.user_data1 = aData1.y;
ci.user_data2 = aData1.z;
ci.user_data3 = aData1.w;
return ci;
}
struct Primitive {
ClipScrollNode scroll_node;
ClipArea clip_area;
PictureTask task;
RectWithSize local_rect;
RectWithSize local_clip_rect;
int specific_prim_address;
int user_data0;
int user_data1;
int user_data2;
float z;
};
struct PrimitiveGeometry {
RectWithSize local_rect;
RectWithSize local_clip_rect;
};
PrimitiveGeometry fetch_primitive_geometry(int address) {
vec4 geom[2] = fetch_from_resource_cache_2(address);
return PrimitiveGeometry(RectWithSize(geom[0].xy, geom[0].zw),
RectWithSize(geom[1].xy, geom[1].zw));
}
Primitive load_primitive() {
PrimitiveInstance pi = fetch_prim_instance();
Primitive prim;
prim.scroll_node = fetch_clip_scroll_node(pi.scroll_node_id);
prim.clip_area = fetch_clip_area(pi.clip_task_index);
prim.task = fetch_picture_task(pi.render_task_index);
RectWithSize clip_chain_rect = fetch_clip_chain_rect(pi.clip_chain_rect_index);
PrimitiveGeometry geom = fetch_primitive_geometry(pi.prim_address);
prim.local_rect = geom.local_rect;
prim.local_clip_rect = intersect_rects(clip_chain_rect, geom.local_clip_rect);
prim.specific_prim_address = pi.specific_prim_address;
prim.user_data0 = pi.user_data0;
prim.user_data1 = pi.user_data1;
prim.user_data2 = pi.user_data2;
prim.z = float(pi.z);
return prim;
}
struct VertexInfo {
vec2 local_pos;
vec2 screen_pos;
@ -327,52 +202,6 @@ VertexInfo write_transform_vertex(RectWithSize local_segment_rect,
return vi;
}
VertexInfo write_transform_vertex_primitive(Primitive prim) {
return write_transform_vertex(
prim.local_rect,
prim.local_rect,
prim.local_clip_rect,
vec4(1.0),
prim.z,
prim.scroll_node,
prim.task,
true
);
}
struct GlyphResource {
vec4 uv_rect;
float layer;
vec2 offset;
float scale;
};
GlyphResource fetch_glyph_resource(int address) {
vec4 data[2] = fetch_from_resource_cache_2(address);
return GlyphResource(data[0], data[1].x, data[1].yz, data[1].w);
}
struct TextRun {
vec4 color;
vec4 bg_color;
vec2 offset;
};
TextRun fetch_text_run(int address) {
vec4 data[3] = fetch_from_resource_cache_3(address);
return TextRun(data[0], data[1], data[2].xy);
}
struct Image {
vec4 stretch_size_and_tile_spacing; // Size of the actual image and amount of space between
// tiled instances of this image.
};
Image fetch_image(int address) {
vec4 data = fetch_from_resource_cache_1(address);
return Image(data);
}
void write_clip(vec2 global_pos, ClipArea area) {
vec2 uv = global_pos +
area.common_data.task_rect.p0 -

Просмотреть файл

@ -34,9 +34,27 @@ vec3 bilerp(vec3 a, vec3 b, vec3 c, vec3 d, float s, float t) {
return mix(x, y, s);
}
struct SplitCompositeInstance {
int render_task_index;
int src_task_index;
int polygons_address;
float z;
};
SplitCompositeInstance fetch_composite_instance() {
SplitCompositeInstance ci;
ci.render_task_index = aData0.x;
ci.src_task_index = aData0.y;
ci.polygons_address = aData0.z;
ci.z = float(aData0.w);
return ci;
}
void main(void) {
CompositeInstance ci = fetch_composite_instance();
SplitGeometry geometry = fetch_split_geometry(ci.user_data0);
SplitCompositeInstance ci = fetch_composite_instance();
SplitGeometry geometry = fetch_split_geometry(ci.polygons_address);
PictureTask src_task = fetch_picture_task(ci.src_task_index);
PictureTask dest_task = fetch_picture_task(ci.render_task_index);

Просмотреть файл

@ -15,50 +15,178 @@ varying vec4 vUvClip;
#ifdef WR_VERTEX_SHADER
struct Glyph {
vec2 offset;
};
Glyph fetch_glyph(int specific_prim_address,
int glyph_index) {
// Two glyphs are packed in each texel in the GPU cache.
int glyph_address = specific_prim_address +
VECS_PER_TEXT_RUN +
glyph_index / 2;
vec4 data = fetch_from_resource_cache_1(glyph_address);
// Select XY or ZW based on glyph index.
// We use "!= 0" instead of "== 1" here in order to work around a driver
// bug with equality comparisons on integers.
vec2 glyph = mix(data.xy, data.zw, bvec2(glyph_index % 2 != 0));
return Glyph(glyph);
}
struct GlyphResource {
vec4 uv_rect;
float layer;
vec2 offset;
float scale;
};
GlyphResource fetch_glyph_resource(int address) {
vec4 data[2] = fetch_from_resource_cache_2(address);
return GlyphResource(data[0], data[1].x, data[1].yz, data[1].w);
}
struct TextRun {
vec4 color;
vec4 bg_color;
vec2 offset;
};
TextRun fetch_text_run(int address) {
vec4 data[3] = fetch_from_resource_cache_3(address);
return TextRun(data[0], data[1], data[2].xy);
}
struct PrimitiveInstance {
int prim_address;
int specific_prim_address;
int render_task_index;
int clip_task_index;
int scroll_node_id;
int clip_chain_rect_index;
int z;
int user_data0;
int user_data1;
int user_data2;
};
PrimitiveInstance fetch_prim_instance() {
PrimitiveInstance pi;
pi.prim_address = aData0.x;
pi.specific_prim_address = pi.prim_address + VECS_PER_PRIM_HEADER;
pi.render_task_index = aData0.y % 0x10000;
pi.clip_task_index = aData0.y / 0x10000;
pi.clip_chain_rect_index = aData0.z;
pi.scroll_node_id = aData0.w;
pi.z = aData1.x;
pi.user_data0 = aData1.y;
pi.user_data1 = aData1.z;
pi.user_data2 = aData1.w;
return pi;
}
struct Primitive {
ClipScrollNode scroll_node;
ClipArea clip_area;
PictureTask task;
RectWithSize local_rect;
RectWithSize local_clip_rect;
int specific_prim_address;
int user_data0;
int user_data1;
int user_data2;
float z;
};
struct PrimitiveGeometry {
RectWithSize local_rect;
RectWithSize local_clip_rect;
};
PrimitiveGeometry fetch_primitive_geometry(int address) {
vec4 geom[2] = fetch_from_resource_cache_2(address);
return PrimitiveGeometry(RectWithSize(geom[0].xy, geom[0].zw),
RectWithSize(geom[1].xy, geom[1].zw));
}
Primitive load_primitive() {
PrimitiveInstance pi = fetch_prim_instance();
Primitive prim;
prim.scroll_node = fetch_clip_scroll_node(pi.scroll_node_id);
prim.clip_area = fetch_clip_area(pi.clip_task_index);
prim.task = fetch_picture_task(pi.render_task_index);
RectWithSize clip_chain_rect = fetch_clip_chain_rect(pi.clip_chain_rect_index);
PrimitiveGeometry geom = fetch_primitive_geometry(pi.prim_address);
prim.local_rect = geom.local_rect;
prim.local_clip_rect = intersect_rects(clip_chain_rect, geom.local_clip_rect);
prim.specific_prim_address = pi.specific_prim_address;
prim.user_data0 = pi.user_data0;
prim.user_data1 = pi.user_data1;
prim.user_data2 = pi.user_data2;
prim.z = float(pi.z);
return prim;
}
VertexInfo write_text_vertex(vec2 clamped_local_pos,
RectWithSize local_clip_rect,
float z,
ClipScrollNode scroll_node,
PictureTask task,
vec2 text_offset,
RectWithSize snap_rect,
vec2 snap_bias) {
// Ensure the transform does not contain a subpixel translation to ensure
// that glyph snapping is stable for equivalent glyph subpixel positions.
#if defined(WR_FEATURE_GLYPH_TRANSFORM)
bool remove_subpx_offset = true;
#else
bool remove_subpx_offset = scroll_node.is_axis_aligned;
#endif
if (remove_subpx_offset) {
scroll_node.transform[3].xy = floor(scroll_node.transform[3].xy + 0.5);
}
// Transform the current vertex to world space.
vec4 world_pos = scroll_node.transform * vec4(clamped_local_pos, 0.0, 1.0);
// Convert the world positions to device pixel space.
vec2 device_pos = world_pos.xy / world_pos.w * uDevicePixelRatio;
float device_scale = uDevicePixelRatio / world_pos.w;
vec2 device_pos = world_pos.xy * device_scale;
// Apply offsets for the render task to get correct screen location.
vec2 final_pos = device_pos -
task.content_origin +
task.common_data.task_rect.p0;
#ifdef WR_FEATURE_GLYPH_TRANSFORM
// For transformed subpixels, we just need to align the glyph origin to a device pixel.
final_pos += floor(snap_rect.p0 + snap_bias) - snap_rect.p0;
#if defined(WR_FEATURE_GLYPH_TRANSFORM)
bool remove_subpx_offset = true;
#else
// Compute the snapping offset only if the scroll node transform is axis-aligned.
if (scroll_node.is_axis_aligned) {
bool remove_subpx_offset = scroll_node.is_axis_aligned;
#endif
if (remove_subpx_offset) {
// Ensure the transformed text offset does not contain a subpixel translation
// such that glyph snapping is stable for equivalent glyph subpixel positions.
vec2 world_text_offset = mat2(scroll_node.transform) * text_offset;
vec2 device_text_pos = (scroll_node.transform[3].xy + world_text_offset) * device_scale;
final_pos += floor(device_text_pos + 0.5) - device_text_pos;
#ifdef WR_FEATURE_GLYPH_TRANSFORM
// For transformed subpixels, we just need to align the glyph origin to a device pixel.
// The transformed text offset has already been snapped, so remove it from the glyph
// origin when snapping the glyph.
vec2 snap_offset = snap_rect.p0 - world_text_offset * device_scale;
final_pos += floor(snap_offset + snap_bias) - snap_offset;
#else
// The transformed text offset has already been snapped, so remove it from the transform
// when snapping the glyph.
mat4 snap_transform = scroll_node.transform;
snap_transform[3].xy = -world_text_offset;
final_pos += compute_snap_offset(
clamped_local_pos,
scroll_node.transform,
snap_transform,
snap_rect,
snap_bias
);
}
#endif
}
gl_Position = uTransform * vec4(final_pos, z, 1.0);
@ -154,6 +282,7 @@ void main(void) {
prim.z,
prim.scroll_node,
prim.task,
text.offset,
glyph_rect,
snap_bias);

Просмотреть файл

@ -12,8 +12,8 @@ use euclid::{TypedTransform3D, vec3};
use glyph_rasterizer::GlyphFormat;
use gpu_cache::{GpuCache, GpuCacheHandle, GpuCacheAddress};
use gpu_types::{BrushFlags, BrushInstance, ClipChainRectIndex};
use gpu_types::{ClipMaskInstance, ClipScrollNodeIndex, CompositePrimitiveInstance};
use gpu_types::{PrimitiveInstance, RasterizationSpace, SimplePrimitiveInstance, ZBufferId};
use gpu_types::{ClipMaskInstance, ClipScrollNodeIndex, SplitCompositeInstance};
use gpu_types::{PrimitiveInstance, RasterizationSpace, GlyphInstance, ZBufferId};
use gpu_types::ZBufferIdGenerator;
use internal_types::{FastHashMap, SavedTargetIndex, SourceTexture};
use picture::{PictureCompositeMode, PicturePrimitive, PictureSurface};
@ -520,17 +520,13 @@ impl AlphaBatchBuilder {
.expect("BUG: unexpected surface in splitting")
.resolve_render_task_id();
let source_task_address = render_tasks.get_task_address(source_task_id);
let gpu_address = gpu_handle.as_int(gpu_cache);
let gpu_address = gpu_cache.get_address(&gpu_handle);
let instance = CompositePrimitiveInstance::new(
let instance = SplitCompositeInstance::new(
task_address,
source_task_address,
RenderTaskAddress(0),
gpu_address,
0,
z_generator.next(),
0,
0,
);
batch.push(PrimitiveInstance::from(instance));
@ -641,14 +637,6 @@ impl AlphaBatchBuilder {
let clip_task_address = prim_metadata
.clip_task_id
.map_or(OPAQUE_TASK_ADDRESS, |id| render_tasks.get_task_address(id));
let base_instance = SimplePrimitiveInstance::new(
prim_cache_address,
task_address,
clip_task_address,
clip_chain_rect_index,
scroll_id,
z,
);
let specified_blend_mode = ctx.prim_store.get_blend_mode(prim_metadata);
@ -1169,6 +1157,14 @@ impl AlphaBatchBuilder {
let key = BatchKey::new(kind, blend_mode, textures);
let batch = batch_list.get_suitable_batch(key, &task_relative_bounding_rect);
let base_instance = GlyphInstance::new(
prim_cache_address,
task_address,
clip_task_address,
clip_chain_rect_index,
scroll_id,
z,
);
for glyph in glyphs {
batch.push(base_instance.build(
@ -1471,9 +1467,10 @@ impl BrushPrimitive {
)
}
BorderSource::Border { ref handle, .. } => {
let rt_handle = handle
.as_ref()
.expect("bug: render task handle not allocated");
let rt_handle = match *handle {
Some(ref handle) => handle,
None => return None,
};
let rt_cache_entry = resource_cache
.get_cached_render_task(rt_handle);
resource_cache.get_texture_cache_item(&rt_cache_entry.handle)

Просмотреть файл

@ -604,7 +604,7 @@ impl BorderRenderTaskInfo {
widths: &BorderWidths,
scale: LayoutToDeviceScale,
brush_segments: &mut Vec<BrushSegment>,
) -> Self {
) -> Option<Self> {
let mut border_segments = Vec::new();
let dp_width_top = (widths.top * scale.0).ceil();
@ -684,6 +684,10 @@ impl BorderRenderTaskInfo {
dp_size_tl.height.max(dp_size_tr.height) + inner_height + dp_size_bl.height.max(dp_size_br.height),
);
if size.width == 0.0 || size.height == 0.0 {
return None;
}
add_edge_segment(
LayoutRect::from_floats(
rect.origin.x,
@ -860,10 +864,10 @@ impl BorderRenderTaskInfo {
brush_segments,
);
BorderRenderTaskInfo {
Some(BorderRenderTaskInfo {
border_segments,
size: size.to_i32(),
}
})
}
pub fn build_instances(&self, border: &NormalBorder) -> Vec<BorderInstance> {

Просмотреть файл

@ -143,7 +143,7 @@ pub struct PrimitiveInstance {
data: [i32; 8],
}
pub struct SimplePrimitiveInstance {
pub struct GlyphInstance {
pub specific_prim_address: GpuCacheAddress,
pub task_address: RenderTaskAddress,
pub clip_task_address: RenderTaskAddress,
@ -152,7 +152,7 @@ pub struct SimplePrimitiveInstance {
pub z: ZBufferId,
}
impl SimplePrimitiveInstance {
impl GlyphInstance {
pub fn new(
specific_prim_address: GpuCacheAddress,
task_address: RenderTaskAddress,
@ -161,7 +161,7 @@ impl SimplePrimitiveInstance {
scroll_id: ClipScrollNodeIndex,
z: ZBufferId,
) -> Self {
SimplePrimitiveInstance {
GlyphInstance {
specific_prim_address,
task_address,
clip_task_address,
@ -187,53 +187,41 @@ impl SimplePrimitiveInstance {
}
}
pub struct CompositePrimitiveInstance {
pub struct SplitCompositeInstance {
pub task_address: RenderTaskAddress,
pub src_task_address: RenderTaskAddress,
pub backdrop_task_address: RenderTaskAddress,
pub data0: i32,
pub data1: i32,
pub polygons_address: GpuCacheAddress,
pub z: ZBufferId,
pub data2: i32,
pub data3: i32,
}
impl CompositePrimitiveInstance {
impl SplitCompositeInstance {
pub fn new(
task_address: RenderTaskAddress,
src_task_address: RenderTaskAddress,
backdrop_task_address: RenderTaskAddress,
data0: i32,
data1: i32,
polygons_address: GpuCacheAddress,
z: ZBufferId,
data2: i32,
data3: i32,
) -> Self {
CompositePrimitiveInstance {
SplitCompositeInstance {
task_address,
src_task_address,
backdrop_task_address,
data0,
data1,
polygons_address,
z,
data2,
data3,
}
}
}
impl From<CompositePrimitiveInstance> for PrimitiveInstance {
fn from(instance: CompositePrimitiveInstance) -> Self {
impl From<SplitCompositeInstance> for PrimitiveInstance {
fn from(instance: SplitCompositeInstance) -> Self {
PrimitiveInstance {
data: [
instance.task_address.0 as i32,
instance.src_task_address.0 as i32,
instance.backdrop_task_address.0 as i32,
instance.polygons_address.as_int(),
instance.z.0,
instance.data0,
instance.data1,
instance.data2,
instance.data3,
0,
0,
0,
0,
],
}
}

Просмотреть файл

@ -820,8 +820,7 @@ impl TextRunPrimitiveCpu {
// be much simpler...
let mut gpu_block = [0.0; 4];
for (i, src) in src_glyphs.enumerate() {
let layout_offset = src.point + self.offset;
let world_offset = font.transform.transform(&layout_offset);
let world_offset = font.transform.transform(&src.point);
let device_offset = device_pixel_scale.transform_point(&world_offset);
let key = GlyphKey::new(src.index, device_offset, subpx_dir);
self.glyph_keys.push(key);
@ -1424,39 +1423,39 @@ impl PrimitiveStore {
if needs_update {
cache_key.scale = scale_au;
*task_info = Some(BorderRenderTaskInfo::new(
*task_info = BorderRenderTaskInfo::new(
&metadata.local_rect,
border,
widths,
scale,
&mut new_segments,
));
);
}
let task_info = task_info.as_ref().unwrap();
*handle = task_info.as_ref().map(|task_info| {
frame_state.resource_cache.request_render_task(
RenderTaskCacheKey {
size: DeviceIntSize::zero(),
kind: RenderTaskCacheKeyKind::Border(cache_key.clone()),
},
frame_state.gpu_cache,
frame_state.render_tasks,
None,
false, // todo
|render_tasks| {
let task = RenderTask::new_border(
task_info.size,
task_info.build_instances(border),
);
*handle = Some(frame_state.resource_cache.request_render_task(
RenderTaskCacheKey {
size: DeviceIntSize::zero(),
kind: RenderTaskCacheKeyKind::Border(cache_key.clone()),
},
frame_state.gpu_cache,
frame_state.render_tasks,
None,
false, // todo
|render_tasks| {
let task = RenderTask::new_border(
task_info.size,
task_info.build_instances(border),
);
let task_id = render_tasks.add(task);
let task_id = render_tasks.add(task);
pic_state.tasks.push(task_id);
pic_state.tasks.push(task_id);
task_id
}
));
task_id
}
)
});
if needs_update {
brush.segment_desc = Some(BrushSegmentDescriptor {

Просмотреть файл

@ -762,6 +762,7 @@ impl RenderBackend {
&mut frame_counter,
&mut profile_counters,
ops,
true,
);
}
},
@ -962,6 +963,7 @@ impl RenderBackend {
frame_counter,
profile_counters,
DocumentOps::nop(),
false,
)
}
}
@ -976,6 +978,7 @@ impl RenderBackend {
frame_counter: &mut u32,
profile_counters: &mut BackendProfileCounters,
initial_op: DocumentOps,
has_built_scene: bool,
) {
let mut op = initial_op;
@ -1073,7 +1076,7 @@ impl RenderBackend {
&mut self.resource_cache,
&mut self.gpu_cache,
&mut profile_counters.resources,
op.build,
op.build || has_built_scene,
);
debug!("generated frame for document {:?} with {} passes",

Просмотреть файл

@ -525,7 +525,7 @@ impl ResourceCache {
&mut self,
image_key: ImageKey,
descriptor: ImageDescriptor,
mut data: ImageData,
data: ImageData,
dirty_rect: Option<DeviceUintRect>,
) {
let max_texture_size = self.max_texture_size();
@ -539,11 +539,11 @@ impl ResourceCache {
tiling = Some(DEFAULT_TILE_SIZE);
}
if let ImageData::Blob(ref mut blob) = data {
if let ImageData::Blob(ref blob) = data {
self.blob_image_renderer
.as_mut()
.unwrap()
.update(image_key, Arc::clone(&blob), dirty_rect);
.update(image_key, Arc::clone(blob), dirty_rect);
}
*image = ImageResource {
@ -645,33 +645,33 @@ impl ResourceCache {
// - The image is a blob.
// - The blob hasn't already been requested this frame.
if self.pending_image_requests.insert(request) && template.data.is_blob() {
if let Some(ref mut renderer) = self.blob_image_renderer {
let (offset, size) = match template.tiling {
Some(tile_size) => {
let tile_offset = request.tile.unwrap();
let actual_size = compute_tile_size(
&template.descriptor,
tile_size,
tile_offset,
);
let offset = DevicePoint::new(
tile_offset.x as f32 * tile_size as f32,
tile_offset.y as f32 * tile_size as f32,
);
let (offset, size) = match template.tiling {
Some(tile_size) => {
let tile_offset = request.tile.unwrap();
let actual_size = compute_tile_size(
&template.descriptor,
tile_size,
tile_offset,
);
if let Some(dirty) = dirty_rect {
if intersect_for_tile(dirty, actual_size, tile_size, tile_offset).is_none() {
// don't bother requesting unchanged tiles
self.pending_image_requests.remove(&request);
return
}
if let Some(dirty) = dirty_rect {
if intersect_for_tile(dirty, actual_size, tile_size, tile_offset).is_none() {
// don't bother requesting unchanged tiles
self.pending_image_requests.remove(&request);
return
}
(offset, actual_size)
}
None => (DevicePoint::zero(), template.descriptor.size),
};
let offset = DevicePoint::new(
tile_offset.x as f32 * tile_size as f32,
tile_offset.y as f32 * tile_size as f32,
);
(offset, actual_size)
}
None => (DevicePoint::zero(), template.descriptor.size),
};
if let Some(ref mut renderer) = self.blob_image_renderer {
renderer.request(
&self.resources,
request.into(),
@ -926,7 +926,6 @@ impl ResourceCache {
for request in self.pending_image_requests.drain() {
let image_template = self.resources.image_templates.get_mut(request.key).unwrap();
debug_assert!(image_template.data.uses_texture_cache());
let mut dirty_rect = image_template.dirty_rect;
let image_data = match image_template.data {
ImageData::Raw(..) | ImageData::External(..) => {
@ -962,16 +961,23 @@ impl ResourceCache {
}
};
let descriptor = if let Some(tile) = request.tile {
let entry = self.cached_images.get_mut(&request).as_mut().unwrap();
let mut descriptor = image_template.descriptor.clone();
//TODO: erasing the dirty rectangle here is incorrect for tiled images,
// since other tile requests may follow that depend on it
let mut local_dirty_rect = image_template.dirty_rect.take();
if let Some(tile) = request.tile {
let tile_size = image_template.tiling.unwrap();
let image_descriptor = &image_template.descriptor;
let clipped_tile_size = compute_tile_size(&descriptor, tile_size, tile);
let clipped_tile_size = compute_tile_size(image_descriptor, tile_size, tile);
if let Some(dirty) = dirty_rect {
dirty_rect = intersect_for_tile(dirty, clipped_tile_size, tile_size, tile);
if dirty_rect.is_none() {
continue
if let Some(ref mut rect) = local_dirty_rect {
match intersect_for_tile(*rect, clipped_tile_size, tile_size, tile) {
Some(intersection) => *rect = intersection,
None => {
// if re-uploaded, the dirty rect is ignored anyway
debug_assert!(self.texture_cache.needs_upload(&entry.texture_cache_handle))
}
}
}
@ -979,27 +985,17 @@ impl ResourceCache {
// already broken up into tiles. This affects the way we compute the stride
// and offset.
let tiled_on_cpu = image_template.data.is_blob();
let (stride, offset) = if tiled_on_cpu {
(image_descriptor.stride, 0)
} else {
let bpp = image_descriptor.format.bytes_per_pixel();
let stride = image_descriptor.compute_stride();
let offset = image_descriptor.offset +
if !tiled_on_cpu {
let bpp = descriptor.format.bytes_per_pixel();
let stride = descriptor.compute_stride();
descriptor.stride = Some(stride);
descriptor.offset +=
tile.y as u32 * tile_size as u32 * stride +
tile.x as u32 * tile_size as u32 * bpp;
(Some(stride), offset)
};
ImageDescriptor {
size: clipped_tile_size,
stride,
offset,
..*image_descriptor
}
} else {
image_template.descriptor.clone()
};
descriptor.size = clipped_tile_size;
}
let filter = match request.rendering {
ImageRendering::Pixelated => {
@ -1027,19 +1023,18 @@ impl ResourceCache {
}
};
let entry = self.cached_images.get_mut(&request).as_mut().unwrap();
//Note: at this point, the dirty rectangle is local to the descriptor space
self.texture_cache.update(
&mut entry.texture_cache_handle,
descriptor,
filter,
Some(image_data),
[0.0; 3],
dirty_rect,
local_dirty_rect,
gpu_cache,
None,
UvRectKind::Rect,
);
image_template.dirty_rect = None;
}
}

Просмотреть файл

@ -6,7 +6,7 @@ use api::{BorderRadius, ClipMode, LayoutPoint, LayoutPointAu, LayoutRect, Layout
use app_units::Au;
use prim_store::EdgeAaSegmentMask;
use std::{cmp, usize};
use util::extract_inner_rect_safe;
use util::{extract_inner_rect_safe, RectHelpers};
bitflags! {
pub struct ItemFlags: u8 {
@ -210,63 +210,71 @@ impl SegmentBuilder {
inner_rect: LayoutRect,
inner_clip_mode: Option<ClipMode>,
) {
debug_assert!(outer_rect.contains_rect(&inner_rect));
if inner_rect.is_well_formed_and_nonempty() {
debug_assert!(outer_rect.contains_rect(&inner_rect));
let p0 = outer_rect.origin;
let p1 = inner_rect.origin;
let p2 = inner_rect.bottom_right();
let p3 = outer_rect.bottom_right();
let p0 = outer_rect.origin;
let p1 = inner_rect.origin;
let p2 = inner_rect.bottom_right();
let p3 = outer_rect.bottom_right();
let segments = &[
LayoutRect::new(
LayoutPoint::new(p0.x, p0.y),
LayoutSize::new(p1.x - p0.x, p1.y - p0.y),
),
LayoutRect::new(
LayoutPoint::new(p2.x, p0.y),
LayoutSize::new(p3.x - p2.x, p1.y - p0.y),
),
LayoutRect::new(
LayoutPoint::new(p2.x, p2.y),
LayoutSize::new(p3.x - p2.x, p3.y - p2.y),
),
LayoutRect::new(
LayoutPoint::new(p0.x, p2.y),
LayoutSize::new(p1.x - p0.x, p3.y - p2.y),
),
LayoutRect::new(
LayoutPoint::new(p1.x, p0.y),
LayoutSize::new(p2.x - p1.x, p1.y - p0.y),
),
LayoutRect::new(
LayoutPoint::new(p2.x, p1.y),
LayoutSize::new(p3.x - p2.x, p2.y - p1.y),
),
LayoutRect::new(
LayoutPoint::new(p1.x, p2.y),
LayoutSize::new(p2.x - p1.x, p3.y - p2.y),
),
LayoutRect::new(
LayoutPoint::new(p0.x, p1.y),
LayoutSize::new(p1.x - p0.x, p2.y - p1.y),
),
];
let segments = &[
LayoutRect::new(
LayoutPoint::new(p0.x, p0.y),
LayoutSize::new(p1.x - p0.x, p1.y - p0.y),
),
LayoutRect::new(
LayoutPoint::new(p2.x, p0.y),
LayoutSize::new(p3.x - p2.x, p1.y - p0.y),
),
LayoutRect::new(
LayoutPoint::new(p2.x, p2.y),
LayoutSize::new(p3.x - p2.x, p3.y - p2.y),
),
LayoutRect::new(
LayoutPoint::new(p0.x, p2.y),
LayoutSize::new(p1.x - p0.x, p3.y - p2.y),
),
LayoutRect::new(
LayoutPoint::new(p1.x, p0.y),
LayoutSize::new(p2.x - p1.x, p1.y - p0.y),
),
LayoutRect::new(
LayoutPoint::new(p2.x, p1.y),
LayoutSize::new(p3.x - p2.x, p2.y - p1.y),
),
LayoutRect::new(
LayoutPoint::new(p1.x, p2.y),
LayoutSize::new(p2.x - p1.x, p3.y - p2.y),
),
LayoutRect::new(
LayoutPoint::new(p0.x, p1.y),
LayoutSize::new(p1.x - p0.x, p2.y - p1.y),
),
];
for segment in segments {
for segment in segments {
self.items.push(Item::new(
*segment,
None,
true
));
}
if inner_clip_mode.is_some() {
self.items.push(Item::new(
inner_rect,
inner_clip_mode,
false,
));
}
} else {
self.items.push(Item::new(
*segment,
outer_rect,
None,
true
));
}
if inner_clip_mode.is_some() {
self.items.push(Item::new(
inner_rect,
inner_clip_mode,
false,
));
}
}
// Push some kind of clipping region into the segment builder.

Просмотреть файл

@ -380,6 +380,16 @@ impl TextureCache {
}
}
// Returns true if the image needs to be uploaded to the
// texture cache (either never uploaded, or has been
// evicted on a previous frame).
pub fn needs_upload(&self, handle: &TextureCacheHandle) -> bool {
match handle.entry {
Some(ref handle) => self.entries.get_opt(handle).is_none(),
None => true,
}
}
pub fn max_texture_size(&self) -> u32 {
self.max_texture_size
}
@ -1211,7 +1221,7 @@ impl TextureUpdate {
layer_index: i32,
dirty_rect: Option<DeviceUintRect>,
) -> TextureUpdate {
let data_src = match data {
let source = match data {
ImageData::Blob(..) => {
panic!("The vector image should have been rasterized.");
}
@ -1236,25 +1246,33 @@ impl TextureUpdate {
let update_op = match dirty_rect {
Some(dirty) => {
// the dirty rectangle doesn't have to be within the area but has to intersect it, at least
let stride = descriptor.compute_stride();
let offset = descriptor.offset + dirty.origin.y * stride + dirty.origin.x * descriptor.format.bytes_per_pixel();
let origin =
DeviceUintPoint::new(origin.x + dirty.origin.x, origin.y + dirty.origin.y);
TextureUpdateOp::Update {
rect: DeviceUintRect::new(origin, dirty.size),
source: data_src,
rect: DeviceUintRect::new(
DeviceUintPoint::new(origin.x + dirty.origin.x, origin.y + dirty.origin.y),
DeviceUintSize::new(
dirty.size.width.min(size.width - dirty.origin.x),
dirty.size.height.min(size.height - dirty.origin.y),
),
),
source,
stride: Some(stride),
offset,
layer_index,
}
}
None => TextureUpdateOp::Update {
rect: DeviceUintRect::new(origin, size),
source: data_src,
stride: descriptor.stride,
offset: descriptor.offset,
layer_index,
},
None => {
TextureUpdateOp::Update {
rect: DeviceUintRect::new(origin, size),
source,
stride: descriptor.stride,
offset: descriptor.offset,
layer_index,
}
}
};
TextureUpdate {

Просмотреть файл

@ -18,8 +18,8 @@ bitflags = "1.0"
byteorder = "1.2.1"
ipc-channel = {version = "0.10.0", optional = true}
euclid = { version = "0.17", features = ["serde"] }
serde = { version = "=1.0.58", features = ["rc"] }
serde_derive = { version = "=1.0.58", features = ["deserialize_in_place"] }
serde = { version = "=1.0.66", features = ["rc"] }
serde_derive = { version = "=1.0.66", features = ["deserialize_in_place"] }
serde_bytes = "0.10"
time = "0.1"

Просмотреть файл

@ -186,7 +186,7 @@ pub trait BlobImageRenderer: Send {
fn request(
&mut self,
services: &BlobImageResources,
resources: &BlobImageResources,
key: BlobImageRequest,
descriptor: &BlobImageDescriptor,
dirty_rect: Option<DeviceUintRect>,

Просмотреть файл

@ -1 +1 @@
dd30fbb21c876b252b805b607bd04f3bab1fd228
cf98ad4d63729c678a7575eb9bce36794da5e270

Просмотреть файл

@ -26,7 +26,7 @@ fn deserialize_blob(blob: &[u8]) -> Result<ColorU, ()> {
};
}
// perform floor((x * a) / 255. + 0.5) see "Three wrongs make a right" for deriviation
// perform floor((x * a) / 255. + 0.5) see "Three wrongs make a right" for derivation
fn premul(x: u8, a: u8) -> u8 {
let t = (x as u32) * (a as u32) + 128;
((t + (t >> 8)) >> 8) as u8
@ -55,8 +55,9 @@ fn render_blob(
};
let mut dirty_rect = dirty_rect.unwrap_or(DeviceUintRect::new(
DeviceUintPoint::new(0, 0),
DeviceUintSize::new(descriptor.size.width, descriptor.size.height)));
DeviceUintPoint::origin(),
descriptor.size,
));
if let Some((tile_size, tile)) = tile {
dirty_rect = intersect_for_tile(dirty_rect, size2(tile_size as u32, tile_size as u32),

Просмотреть файл

@ -0,0 +1,113 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
* You can obtain one at http://mozilla.org/MPL/2.0/. */
"use strict";
ChromeUtils.import("resource://gre/modules/Services.jsm");
ChromeUtils.import("resource://gre/modules/XPCOMUtils.jsm");
XPCOMUtils.defineLazyModuleGetters(this, {
AndroidLog: "resource://gre/modules/AndroidLog.jsm",
EventDispatcher: "resource://gre/modules/Messaging.jsm",
});
const LOGTAG = "Experiments";
const EXPERIMENTS_CONFIGURATION = "https://firefox.settings.services.mozilla.com/v1/buckets/fennec/collections/experiments/records";
const Experiments = Services.wm.getMostRecentWindow("navigator:browser").Experiments;
document.addEventListener("DOMContentLoaded", initList);
function log(msg) {
AndroidLog.d(LOGTAG, msg);
}
function initList() {
const list = document.getElementById("list");
list.addEventListener("click", toggleOverride);
Promise.all([promiseEnabledExperiments(), promiseExperimentsConfiguration()]).then(values => {
const enabledExperiments = values[0];
const serverConfiguration = values[1];
serverConfiguration.data.forEach(function(experiment) {
try {
let item = document.createElement("li");
item.textContent = experiment.name;
item.setAttribute("name", experiment.name);
item.setAttribute("isEnabled", enabledExperiments.includes(experiment.name));
list.appendChild(item);
} catch (e) {
log(`Error while setting experiments list: ${e.error}`);
}
});
});
}
function toggleOverride(experiment) {
const item = experiment.originalTarget;
const name = item.getAttribute("name");
const isEnabled = item.getAttribute("isEnabled") === "true";
log(`toggleOverride: ${name}`);
Experiments.setOverride(name, !isEnabled);
item.setAttribute("isEnabled", !isEnabled);
}
/**
* Get the list of locally enabled experiments.
*/
function promiseEnabledExperiments() {
log("Getting the locally enabled experiments");
return EventDispatcher.instance.sendRequestForResult({
type: "Experiments:GetActive"
}).then(experiments => {
log("List of locally enabled experiments ready");
return experiments;
});
}
/**
* Fetch the list of experiments from server configuration.
*/
function promiseExperimentsConfiguration() {
log("Fetching server experiments");
return new Promise((resolve, reject) => {
const xhr = new XMLHttpRequest();
try {
xhr.open("GET", EXPERIMENTS_CONFIGURATION, true);
} catch (e) {
reject(`Error opening request: ${e}`);
return;
}
xhr.onerror = function(e) {
reject(`Error making request: ${e.error}`);
};
xhr.onload = function(event) {
if (xhr.readyState === 4) {
if (xhr.status === 200) {
try {
resolve(JSON.parse(xhr.responseText));
} catch (e) {
const errorMessage = `Error while parsing request: ${e}`;
log(errorMessage);
reject(errorMessage);
}
} else {
const errorMessage = `Request to ${url} returned status ${xhr.status}`;
log(errorMessage);
reject(errorMessage);
}
}
log("Finished fetching server experiments");
};
xhr.send(null);
});
}

Просмотреть файл

@ -0,0 +1,18 @@
<?xml version="1.0" encoding="UTF-8"?>
<!-- This Source Code Form is subject to the terms of the Mozilla Public
- License, v. 2.0. If a copy of the MPL was not distributed with this
- file, You can obtain one at http://mozilla.org/MPL/2.0/. -->
<html xmlns="http://www.w3.org/1999/xhtml">
<head>
<title>Switchboard Experiments</title>
<meta name="viewport" content="width=device-width; user-scalable=0" />
<link rel="stylesheet" href="chrome://browser/skin/aboutBase.css" type="text/css"/>
<link rel="stylesheet" href="chrome://browser/skin/aboutExperiments.css" type="text/css"/>
<script type="application/javascript" src="chrome://browser/content/aboutExperiments.js"></script>
</head>
<body>
<ul id="list"/>
</body>
</html>

Просмотреть файл

@ -5628,7 +5628,7 @@ var IdentityHandler = {
return this.IDENTITY_MODE_IDENTIFIED;
}
let whitelist = /^about:(about|accounts|addons|buildconfig|cache|config|crashes|devices|downloads|fennec|firefox|feedback|home|license|logins|logo|memory|mozilla|networking|privatebrowsing|rights|serviceworkers|support|telemetry|webrtc)($|\?)/i;
let whitelist = /^about:(about|accounts|addons|buildconfig|cache|config|crashes|devices|downloads|experiments|fennec|firefox|feedback|home|license|logins|logo|memory|mozilla|networking|privatebrowsing|rights|serviceworkers|support|telemetry|webrtc)($|\?)/i;
if (uri.schemeIs("about") && whitelist.test(uri.spec)) {
return this.IDENTITY_MODE_CHROMEUI;
}

Просмотреть файл

@ -45,6 +45,8 @@ chrome.jar:
content/RemoteDebugger.js (content/RemoteDebugger.js)
content/aboutAccounts.xhtml (content/aboutAccounts.xhtml)
content/aboutAccounts.js (content/aboutAccounts.js)
content/aboutExperiments.xhtml (content/aboutExperiments.xhtml)
content/aboutExperiments.js (content/aboutExperiments.js)
content/aboutLogins.xhtml (content/aboutLogins.xhtml)
content/aboutLogins.js (content/aboutLogins.js)
#ifndef RELEASE_OR_BETA

Просмотреть файл

@ -67,6 +67,11 @@ var modules = {
uri: "chrome://browser/content/aboutAccounts.xhtml",
privileged: true
},
experiments: {
uri: "chrome://browser/content/aboutExperiments.xhtml",
privileged: true,
hide: true
},
};
function AboutRedirector() {}

Просмотреть файл

@ -14,6 +14,7 @@ contract @mozilla.org/network/protocol/about;1?what=privatebrowsing {322ba47e-70
contract @mozilla.org/network/protocol/about;1?what=blocked {322ba47e-7047-4f71-aebf-cb7d69325cd9}
contract @mozilla.org/network/protocol/about;1?what=accounts {322ba47e-7047-4f71-aebf-cb7d69325cd9}
contract @mozilla.org/network/protocol/about;1?what=logins {322ba47e-7047-4f71-aebf-cb7d69325cd9}
contract @mozilla.org/network/protocol/about;1?what=experiments {322ba47e-7047-4f71-aebf-cb7d69325cd9}
# DirectoryProvider.js
component {ef0f7a87-c1ee-45a8-8d67-26f586e46a4b} DirectoryProvider.js

Просмотреть файл

@ -1 +0,0 @@
These files are managed in the android-sync repo. Do not modify directly, or your changes will be lost.

Просмотреть файл

@ -1 +0,0 @@
These files are managed in the android-sync repo. Do not modify directly, or your changes will be lost.

Просмотреть файл

@ -0,0 +1,19 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
* You can obtain one at http://mozilla.org/MPL/2.0/. */
ul {
list-style: none;
margin: 0;
padding: 0;
}
li {
padding: 25px;
margin-bottom: 5px;
}
li[isEnabled="true"] {
background-color: #c5e1a5;
}
li[isEnabled="false"] {
background-color: #ef9a9a;
}

Просмотреть файл

@ -12,6 +12,7 @@ chrome.jar:
skin/aboutAddons.css (aboutAddons.css)
skin/aboutBase.css (aboutBase.css)
skin/aboutDownloads.css (aboutDownloads.css)
skin/aboutExperiments.css (aboutExperiments.css)
skin/aboutMemory.css (aboutMemory.css)
skin/aboutPrivateBrowsing.css (aboutPrivateBrowsing.css)
skin/aboutReader.css (aboutReader.css)

Просмотреть файл

@ -1 +0,0 @@
These files are managed in the android-sync repo. Do not modify directly, or your changes will be lost.

Просмотреть файл

@ -1518,6 +1518,13 @@ nsHostResolver::CompleteLookup(nsHostRecord* rec, nsresult status, AddrInfo* aNe
if (NS_SUCCEEDED(status)) {
rec->mTRRSuccess++;
if (rec->mTRRSuccess == 1) {
// Store the duration on first succesful TRR response. We
// don't know that there will be a second response nor can we
// tell which of two has useful data, especially in
// MODE_SHADOW where the actual results are discarded.
rec->mTrrDuration = TimeStamp::Now() - rec->mTrrStart;
}
}
if (TRROutstanding()) {
rec->mFirstTRRresult = status;
@ -1586,11 +1593,6 @@ nsHostResolver::CompleteLookup(nsHostRecord* rec, nsresult status, AddrInfo* aNe
// continue
}
if (NS_SUCCEEDED(status) && (rec->mTRRSuccess == 1)) {
// store the duration on first (used) TRR response
rec->mTrrDuration = TimeStamp::Now() - rec->mTrrStart;
}
} else { // native resolve completed
if (rec->usingAnyThread) {
mActiveAnyThreadCount--;

Просмотреть файл

@ -1165,4 +1165,4 @@ static const TransportSecurityPreload kPublicKeyPinningPreloadList[] = {
static const int32_t kUnknownId = -1;
static const PRTime kPreloadPKPinsExpirationTime = INT64_C(1537820694460000);
static const PRTime kPreloadPKPinsExpirationTime = INT64_C(1538042710546000);

Просмотреть файл

@ -8,7 +8,7 @@
/*****************************************************************************/
#include <stdint.h>
const PRTime gPreloadListExpirationTime = INT64_C(1540238249613000);
const PRTime gPreloadListExpirationTime = INT64_C(1540461823890000);
%%
0-1.party, 1
0.me.uk, 1
@ -14504,6 +14504,7 @@ glasschmuck-millefiori.de, 1
glavsudexpertiza.ru, 1
glazedmag.fr, 1
glcastlekings.co.uk, 1
gle, 1
gleanview.com, 1
glenavy.tk, 1
glenberviegolfclub.com, 1

Просмотреть файл

@ -27,7 +27,7 @@ UNPACK_CMD="tar jxf"
CLOSED_TREE=false
DONTBUILD=false
APPROVAL=false
COMMIT_AUTHOR='ffxbld@mozilla.com'
COMMIT_AUTHOR='ffxbld <ffxbld@mozilla.com>'
REPODIR=''
APP_DIR=''
APP_ID=''

Просмотреть файл

@ -8,7 +8,7 @@ use mozprofile::preferences::Pref;
// a Testing :: Marionette peer before you make any changes to this file.
lazy_static! {
pub static ref DEFAULT: [(&'static str, Pref); 53] = [
pub static ref DEFAULT: [(&'static str, Pref); 54] = [
// Make sure Shield doesn't hit the network.
("app.normandy.api_url", Pref::new("")),
@ -121,6 +121,9 @@ lazy_static! {
// Show chrome errors and warnings in the error console
("javascript.options.showInConsole", Pref::new(true)),
// Disable download and usage of OpenH264, and Widevine plugins
("media.gmp-manager.updateEnabled", Pref::new(false)),
// Do not prompt with long usernames or passwords in URLs
// TODO: Remove once minimum supported Firefox release is 61.
("network.http.phishy-userpass-length", Pref::new(255)),

Просмотреть файл

@ -105,6 +105,9 @@ class GeckoInstance(object):
# Disable recommended automation prefs in CI
"marionette.prefs.recommended": False,
# Disable download and usage of OpenH264, and Widevine plugins
"media.gmp-manager.updateEnabled": False,
"media.volume_scale": "0.01",
# Do not prompt for temporary redirects

Просмотреть файл

@ -14,7 +14,7 @@ try:
except IOError:
description = None
PACKAGE_VERSION = '1.15'
PACKAGE_VERSION = '1.16.0'
deps = ['mozinfo >= 0.7',
'mozfile >= 1.0',

Просмотреть файл

@ -12,7 +12,7 @@ deps = ['httplib2 == 0.9.2',
'mozfile == 1.2',
'mozhttpd == 0.7',
'mozinfo == 0.10',
'mozinstall == 1.15',
'mozinstall == 1.16',
'mozprocess == 0.26',
'mozprofile == 1.1.0',
'mozrunner == 7.0.1',

1
third_party/rust/proc-macro2-0.3.6/.cargo-checksum.json поставляемый Normal file
Просмотреть файл

@ -0,0 +1 @@
{"files":{".travis.yml":"872a0d195dcb1e84f28aa994f301c7139f70360bb42dee3954df5ee965efea15","Cargo.toml":"6ed5d7b9bf8805abd76f9e2a9be99b98e2cb70d9b97980b8aa09b6082d26a94d","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"378f5840b258e2779c39418f3f2d7b2ba96f1c7917dd6be0713f88305dbda397","README.md":"ce05336717e1e90724491a2f54487c41c752fa2d32396639439f7c6d0f1e6776","src/lib.rs":"e99fedcb4b410c626fe1a3ab722c8b4f98baed2c64c2dff28c4eb62da354f2e2","src/stable.rs":"fd8d86f7542d211030056a7cdcc58b86131180d54f461910a4a067269eee9d4a","src/strnom.rs":"129fe22f0b50e5a64fca82e731c959135381c910e19f3305ef35420e0aadde08","src/unstable.rs":"b43c713ac16d9de0ba0fa1b9bebe390122b4ad60ef2fc75408f721305fdcd46b","tests/test.rs":"a8229931093cd6b39f759c60ef097e59bc43c98f1b0e5eea06ecc8d5d0879853"},"package":"49b6a521dc81b643e9a51e0d1cf05df46d5a2f3c0280ea72bcb68276ba64a118"}

32
third_party/rust/proc-macro2-0.3.6/.travis.yml поставляемый Normal file
Просмотреть файл

@ -0,0 +1,32 @@
language: rust
sudo: false
matrix:
include:
- rust: 1.15.0
- rust: stable
- rust: beta
- rust: nightly
before_script:
- pip install 'travis-cargo<0.2' --user && export PATH=$HOME/.local/bin:$PATH
script:
- cargo test
- cargo build --features nightly
- cargo build --no-default-features
- RUSTFLAGS='--cfg procmacro2_semver_exempt' cargo test
- RUSTFLAGS='--cfg procmacro2_semver_exempt' cargo build --features nightly
- RUSTFLAGS='--cfg procmacro2_semver_exempt' cargo doc --no-deps
after_success:
- travis-cargo --only nightly doc-upload
script:
- cargo test
- RUSTFLAGS='--cfg procmacro2_semver_exempt' cargo test
env:
global:
- TRAVIS_CARGO_NIGHTLY_FEATURE=""
- secure: "NAsZghAVTAksrm4WP4I66VmD2wW0eRbwB+ZKHUQfvbgUaCRvVdp4WBbWXGU/f/yHgDFWZwljWR4iPMiBwAK8nZsQFRuLFdHrOOHqbkj639LLdT9A07s1zLMB1GfR1fDttzrGhm903pbT2yxSyqqpahGYM7TaGDYYmKYIk4XyVNA5F5Sk7RI+rCecKraoYDeUEFbjWWYtU2FkEXsELEKj0emX5reWkR+wja3QokFcRZ25+Zd2dRC0K8W5QcY2UokLzKncBMCTC5q70H616S3r/9qW67Si1njsJ7RzP0NlZQUNQ/VCvwr4LCr9w+AD9i1SZtXxuux77tWEWSJvBzUc82dDMUv/floJuF7HTulSxxQoRm+fbzpXj9mgaJNiUHXru6ZRTCRVRUSXpcAco94bVoy/jnjrTe3jgAIZK5w14zA8yLw1Jxof31DlbcWORxgF+6fnY2nKPRN2oiQ50+jm1AuGDZX59/wMiu1QlkjOBHtikHp+u+7mp3SkkM04DvuQ/tWODQQnOOtrA0EB3i5H1zeTSnUcmbJufUljWWOvF1QYII08MccqwfG1KWbpobvdu+cV2iVhkq/lNCEL3Ai101CnmSCnMz+9oK/XxYOrx2TnaD9ootOKgnk7XWxF19GZecQx6O2hHTouxvB/0KcRPGWmMWl0H88f3T/Obql8bG8="
notifications:
email:
on_success: never

33
third_party/rust/proc-macro2-0.3.6/Cargo.toml поставляемый Normal file
Просмотреть файл

@ -0,0 +1,33 @@
# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
#
# When uploading crates to the registry Cargo will automatically
# "normalize" Cargo.toml files for maximal compatibility
# with all versions of Cargo and also rewrite `path` dependencies
# to registry (e.g. crates.io) dependencies
#
# If you believe there's an error in this file please file an
# issue against the rust-lang/cargo repository. If you're
# editing this file be aware that the upstream Cargo.toml
# will likely look very different (and much more reasonable)
[package]
name = "proc-macro2"
version = "0.3.6"
authors = ["Alex Crichton <alex@alexcrichton.com>"]
description = "A stable implementation of the upcoming new `proc_macro` API. Comes with an\noption, off by default, to also reimplement itself in terms of the upstream\nunstable API.\n"
homepage = "https://github.com/alexcrichton/proc-macro2"
documentation = "https://docs.rs/proc-macro2"
readme = "README.md"
keywords = ["macros"]
license = "MIT/Apache-2.0"
repository = "https://github.com/alexcrichton/proc-macro2"
[lib]
doctest = false
[dependencies.unicode-xid]
version = "0.1"
[features]
default = ["proc-macro"]
nightly = ["proc-macro"]
proc-macro = []

201
third_party/rust/proc-macro2-0.3.6/LICENSE-APACHE поставляемый Normal file
Просмотреть файл

@ -0,0 +1,201 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

25
third_party/rust/proc-macro2-0.3.6/LICENSE-MIT поставляемый Normal file
Просмотреть файл

@ -0,0 +1,25 @@
Copyright (c) 2014 Alex Crichton
Permission is hereby granted, free of charge, to any
person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the
Software without restriction, including without
limitation the rights to use, copy, modify, merge,
publish, distribute, sublicense, and/or sell copies of
the Software, and to permit persons to whom the Software
is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice
shall be included in all copies or substantial portions
of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.

98
third_party/rust/proc-macro2-0.3.6/README.md поставляемый Normal file
Просмотреть файл

@ -0,0 +1,98 @@
# proc-macro2
[![Build Status](https://api.travis-ci.org/alexcrichton/proc-macro2.svg?branch=master)](https://travis-ci.org/alexcrichton/proc-macro2)
[![Latest Version](https://img.shields.io/crates/v/proc-macro2.svg)](https://crates.io/crates/proc-macro2)
[![Rust Documentation](https://img.shields.io/badge/api-rustdoc-blue.svg)](https://docs.rs/proc-macro2)
A small shim over the `proc_macro` crate in the compiler intended to multiplex
the current stable interface (as of 2017-07-05) and the [upcoming richer
interface][upcoming].
[upcoming]: https://github.com/rust-lang/rust/pull/40939
The upcoming support has features like:
* Span information on tokens
* No need to go in/out through strings
* Structured input/output
The hope is that libraries ported to `proc_macro2` will be trivial to port to
the real `proc_macro` crate once the support on nightly is stabilize.
## Usage
This crate by default compiles on the stable version of the compiler. It only
uses the stable surface area of the `proc_macro` crate upstream in the compiler
itself. Usage is done via:
```toml
[dependencies]
proc-macro2 = "0.3"
```
followed by
```rust
extern crate proc_macro;
extern crate proc_macro2;
#[proc_macro_derive(MyDerive)]
pub fn my_derive(input: proc_macro::TokenStream) -> proc_macro::TokenStream {
let input: proc_macro2::TokenStream = input.into();
let output: proc_macro2::TokenStream = {
/* transform input */
};
output.into()
}
```
If you'd like you can enable the `nightly` feature in this crate. This will
cause it to compile against the **unstable and nightly-only** features of the
`proc_macro` crate. This in turn requires a nightly compiler. This should help
preserve span information, however, coming in from the compiler itself.
You can enable this feature via:
```toml
[dependencies]
proc-macro2 = { version = "0.3", features = ["nightly"] }
```
## Unstable Features
`proc-macro2` supports exporting some methods from `proc_macro` which are
currently highly unstable, and may not be stabilized in the first pass of
`proc_macro` stabilizations. These features are not exported by default. Minor
versions of `proc-macro2` may make breaking changes to them at any time.
To enable these features, the `procmacro2_semver_exempt` config flag must be
passed to rustc.
```
RUSTFLAGS='--cfg procmacro2_semver_exempt' cargo build
```
Note that this must not only be done for your crate, but for any crate that
depends on your crate. This infectious nature is intentional, as it serves as a
reminder that you are outside of the normal semver guarantees.
# License
This project is licensed under either of
* Apache License, Version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or
http://www.apache.org/licenses/LICENSE-2.0)
* MIT license ([LICENSE-MIT](LICENSE-MIT) or
http://opensource.org/licenses/MIT)
at your option.
### Contribution
Unless you explicitly state otherwise, any contribution intentionally submitted
for inclusion in Serde by you, as defined in the Apache-2.0 license, shall be
dual licensed as above, without any additional terms or conditions.

590
third_party/rust/proc-macro2-0.3.6/src/lib.rs поставляемый Normal file
Просмотреть файл

@ -0,0 +1,590 @@
//! A "shim crate" intended to multiplex the [`proc_macro`] API on to stable
//! Rust.
//!
//! Procedural macros in Rust operate over the upstream
//! [`proc_macro::TokenStream`][ts] type. This type currently is quite
//! conservative and exposed no internal implementation details. Nightly
//! compilers, however, contain a much richer interface. This richer interface
//! allows fine-grained inspection of the token stream which avoids
//! stringification/re-lexing and also preserves span information.
//!
//! The upcoming APIs added to [`proc_macro`] upstream are the foundation for
//! productive procedural macros in the ecosystem. To help prepare the ecosystem
//! for using them this crate serves to both compile on stable and nightly and
//! mirrors the API-to-be. The intention is that procedural macros which switch
//! to use this crate will be trivially able to switch to the upstream
//! `proc_macro` crate once its API stabilizes.
//!
//! In the meantime this crate also has a `nightly` Cargo feature which
//! enables it to reimplement itself with the unstable API of [`proc_macro`].
//! This'll allow immediate usage of the beneficial upstream API, particularly
//! around preserving span information.
//!
//! [`proc_macro`]: https://doc.rust-lang.org/proc_macro/
//! [ts]: https://doc.rust-lang.org/proc_macro/struct.TokenStream.html
// Proc-macro2 types in rustdoc of other crates get linked to here.
#![doc(html_root_url = "https://docs.rs/proc-macro2/0.3.6")]
#![cfg_attr(feature = "nightly", feature(proc_macro))]
#[cfg(feature = "proc-macro")]
extern crate proc_macro;
#[cfg(not(feature = "nightly"))]
extern crate unicode_xid;
use std::fmt;
use std::iter::FromIterator;
use std::marker;
use std::rc::Rc;
use std::str::FromStr;
#[macro_use]
#[cfg(not(feature = "nightly"))]
mod strnom;
#[path = "stable.rs"]
#[cfg(not(feature = "nightly"))]
mod imp;
#[path = "unstable.rs"]
#[cfg(feature = "nightly")]
mod imp;
#[derive(Clone)]
pub struct TokenStream {
inner: imp::TokenStream,
_marker: marker::PhantomData<Rc<()>>,
}
pub struct LexError {
inner: imp::LexError,
_marker: marker::PhantomData<Rc<()>>,
}
impl TokenStream {
fn _new(inner: imp::TokenStream) -> TokenStream {
TokenStream {
inner: inner,
_marker: marker::PhantomData,
}
}
pub fn empty() -> TokenStream {
TokenStream::_new(imp::TokenStream::empty())
}
pub fn is_empty(&self) -> bool {
self.inner.is_empty()
}
}
impl FromStr for TokenStream {
type Err = LexError;
fn from_str(src: &str) -> Result<TokenStream, LexError> {
let e = src.parse().map_err(|e| LexError {
inner: e,
_marker: marker::PhantomData,
})?;
Ok(TokenStream::_new(e))
}
}
#[cfg(feature = "proc-macro")]
impl From<proc_macro::TokenStream> for TokenStream {
fn from(inner: proc_macro::TokenStream) -> TokenStream {
TokenStream::_new(inner.into())
}
}
#[cfg(feature = "proc-macro")]
impl From<TokenStream> for proc_macro::TokenStream {
fn from(inner: TokenStream) -> proc_macro::TokenStream {
inner.inner.into()
}
}
impl FromIterator<TokenTree> for TokenStream {
fn from_iter<I: IntoIterator<Item = TokenTree>>(streams: I) -> Self {
TokenStream::_new(streams.into_iter().collect())
}
}
impl fmt::Display for TokenStream {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
self.inner.fmt(f)
}
}
impl fmt::Debug for TokenStream {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
self.inner.fmt(f)
}
}
impl fmt::Debug for LexError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
self.inner.fmt(f)
}
}
// Returned by reference, so we can't easily wrap it.
#[cfg(procmacro2_semver_exempt)]
pub use imp::FileName;
#[cfg(procmacro2_semver_exempt)]
#[derive(Clone, PartialEq, Eq)]
pub struct SourceFile(imp::SourceFile);
#[cfg(procmacro2_semver_exempt)]
impl SourceFile {
/// Get the path to this source file as a string.
pub fn path(&self) -> &FileName {
self.0.path()
}
pub fn is_real(&self) -> bool {
self.0.is_real()
}
}
#[cfg(procmacro2_semver_exempt)]
impl AsRef<FileName> for SourceFile {
fn as_ref(&self) -> &FileName {
self.0.path()
}
}
#[cfg(procmacro2_semver_exempt)]
impl fmt::Debug for SourceFile {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
self.0.fmt(f)
}
}
#[cfg(procmacro2_semver_exempt)]
pub struct LineColumn {
pub line: usize,
pub column: usize,
}
#[derive(Copy, Clone)]
pub struct Span {
inner: imp::Span,
_marker: marker::PhantomData<Rc<()>>,
}
impl Span {
fn _new(inner: imp::Span) -> Span {
Span {
inner: inner,
_marker: marker::PhantomData,
}
}
pub fn call_site() -> Span {
Span::_new(imp::Span::call_site())
}
#[cfg(procmacro2_semver_exempt)]
pub fn def_site() -> Span {
Span::_new(imp::Span::def_site())
}
/// Creates a new span with the same line/column information as `self` but
/// that resolves symbols as though it were at `other`.
#[cfg(procmacro2_semver_exempt)]
pub fn resolved_at(&self, other: Span) -> Span {
Span::_new(self.inner.resolved_at(other.inner))
}
/// Creates a new span with the same name resolution behavior as `self` but
/// with the line/column information of `other`.
#[cfg(procmacro2_semver_exempt)]
pub fn located_at(&self, other: Span) -> Span {
Span::_new(self.inner.located_at(other.inner))
}
/// This method is only available when the `"nightly"` feature is enabled.
#[cfg(all(feature = "nightly", feature = "proc-macro"))]
pub fn unstable(self) -> proc_macro::Span {
self.inner.unstable()
}
#[cfg(procmacro2_semver_exempt)]
pub fn source_file(&self) -> SourceFile {
SourceFile(self.inner.source_file())
}
#[cfg(procmacro2_semver_exempt)]
pub fn start(&self) -> LineColumn {
let imp::LineColumn { line, column } = self.inner.start();
LineColumn {
line: line,
column: column,
}
}
#[cfg(procmacro2_semver_exempt)]
pub fn end(&self) -> LineColumn {
let imp::LineColumn { line, column } = self.inner.end();
LineColumn {
line: line,
column: column,
}
}
#[cfg(procmacro2_semver_exempt)]
pub fn join(&self, other: Span) -> Option<Span> {
self.inner.join(other.inner).map(Span::_new)
}
#[cfg(procmacro2_semver_exempt)]
pub fn eq(&self, other: &Span) -> bool {
self.inner.eq(&other.inner)
}
}
impl fmt::Debug for Span {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
self.inner.fmt(f)
}
}
#[derive(Clone, Debug)]
pub enum TokenTree {
Group(Group),
Term(Term),
Op(Op),
Literal(Literal),
}
impl TokenTree {
pub fn span(&self) -> Span {
match *self {
TokenTree::Group(ref t) => t.span(),
TokenTree::Term(ref t) => t.span(),
TokenTree::Op(ref t) => t.span(),
TokenTree::Literal(ref t) => t.span(),
}
}
pub fn set_span(&mut self, span: Span) {
match *self {
TokenTree::Group(ref mut t) => t.set_span(span),
TokenTree::Term(ref mut t) => t.set_span(span),
TokenTree::Op(ref mut t) => t.set_span(span),
TokenTree::Literal(ref mut t) => t.set_span(span),
}
}
}
impl From<Group> for TokenTree {
fn from(g: Group) -> TokenTree {
TokenTree::Group(g)
}
}
impl From<Term> for TokenTree {
fn from(g: Term) -> TokenTree {
TokenTree::Term(g)
}
}
impl From<Op> for TokenTree {
fn from(g: Op) -> TokenTree {
TokenTree::Op(g)
}
}
impl From<Literal> for TokenTree {
fn from(g: Literal) -> TokenTree {
TokenTree::Literal(g)
}
}
impl fmt::Display for TokenTree {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
TokenTree::Group(ref t) => t.fmt(f),
TokenTree::Term(ref t) => t.fmt(f),
TokenTree::Op(ref t) => t.fmt(f),
TokenTree::Literal(ref t) => t.fmt(f),
}
}
}
#[derive(Clone, Debug)]
pub struct Group {
delimiter: Delimiter,
stream: TokenStream,
span: Span,
}
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
pub enum Delimiter {
Parenthesis,
Brace,
Bracket,
None,
}
impl Group {
pub fn new(delimiter: Delimiter, stream: TokenStream) -> Group {
Group {
delimiter: delimiter,
stream: stream,
span: Span::call_site(),
}
}
pub fn delimiter(&self) -> Delimiter {
self.delimiter
}
pub fn stream(&self) -> TokenStream {
self.stream.clone()
}
pub fn span(&self) -> Span {
self.span
}
pub fn set_span(&mut self, span: Span) {
self.span = span;
}
}
impl fmt::Display for Group {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
self.stream.fmt(f)
}
}
#[derive(Copy, Clone, Debug)]
pub struct Op {
op: char,
spacing: Spacing,
span: Span,
}
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
pub enum Spacing {
Alone,
Joint,
}
impl Op {
pub fn new(op: char, spacing: Spacing) -> Op {
Op {
op: op,
spacing: spacing,
span: Span::call_site(),
}
}
pub fn op(&self) -> char {
self.op
}
pub fn spacing(&self) -> Spacing {
self.spacing
}
pub fn span(&self) -> Span {
self.span
}
pub fn set_span(&mut self, span: Span) {
self.span = span;
}
}
impl fmt::Display for Op {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
self.op.fmt(f)
}
}
#[derive(Copy, Clone)]
pub struct Term {
inner: imp::Term,
_marker: marker::PhantomData<Rc<()>>,
}
impl Term {
fn _new(inner: imp::Term) -> Term {
Term {
inner: inner,
_marker: marker::PhantomData,
}
}
pub fn new(string: &str, span: Span) -> Term {
Term::_new(imp::Term::new(string, span.inner))
}
pub fn as_str(&self) -> &str {
self.inner.as_str()
}
pub fn span(&self) -> Span {
Span::_new(self.inner.span())
}
pub fn set_span(&mut self, span: Span) {
self.inner.set_span(span.inner);
}
}
impl fmt::Display for Term {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
self.as_str().fmt(f)
}
}
impl fmt::Debug for Term {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
self.inner.fmt(f)
}
}
#[derive(Clone)]
pub struct Literal {
inner: imp::Literal,
_marker: marker::PhantomData<Rc<()>>,
}
macro_rules! int_literals {
($($name:ident => $kind:ident,)*) => ($(
pub fn $name(n: $kind) -> Literal {
Literal::_new(imp::Literal::$name(n))
}
)*)
}
impl Literal {
fn _new(inner: imp::Literal) -> Literal {
Literal {
inner: inner,
_marker: marker::PhantomData,
}
}
int_literals! {
u8_suffixed => u8,
u16_suffixed => u16,
u32_suffixed => u32,
u64_suffixed => u64,
usize_suffixed => usize,
i8_suffixed => i8,
i16_suffixed => i16,
i32_suffixed => i32,
i64_suffixed => i64,
isize_suffixed => isize,
u8_unsuffixed => u8,
u16_unsuffixed => u16,
u32_unsuffixed => u32,
u64_unsuffixed => u64,
usize_unsuffixed => usize,
i8_unsuffixed => i8,
i16_unsuffixed => i16,
i32_unsuffixed => i32,
i64_unsuffixed => i64,
isize_unsuffixed => isize,
}
pub fn f64_unsuffixed(f: f64) -> Literal {
assert!(f.is_finite());
Literal::_new(imp::Literal::f64_unsuffixed(f))
}
pub fn f64_suffixed(f: f64) -> Literal {
assert!(f.is_finite());
Literal::_new(imp::Literal::f64_suffixed(f))
}
pub fn f32_unsuffixed(f: f32) -> Literal {
assert!(f.is_finite());
Literal::_new(imp::Literal::f32_unsuffixed(f))
}
pub fn f32_suffixed(f: f32) -> Literal {
assert!(f.is_finite());
Literal::_new(imp::Literal::f32_suffixed(f))
}
pub fn string(string: &str) -> Literal {
Literal::_new(imp::Literal::string(string))
}
pub fn character(ch: char) -> Literal {
Literal::_new(imp::Literal::character(ch))
}
pub fn byte_string(s: &[u8]) -> Literal {
Literal::_new(imp::Literal::byte_string(s))
}
pub fn span(&self) -> Span {
Span::_new(self.inner.span())
}
pub fn set_span(&mut self, span: Span) {
self.inner.set_span(span.inner);
}
}
impl fmt::Debug for Literal {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
self.inner.fmt(f)
}
}
impl fmt::Display for Literal {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
self.inner.fmt(f)
}
}
pub mod token_stream {
use std::fmt;
use std::marker;
use std::rc::Rc;
pub use TokenStream;
use TokenTree;
use imp;
pub struct IntoIter {
inner: imp::TokenTreeIter,
_marker: marker::PhantomData<Rc<()>>,
}
impl Iterator for IntoIter {
type Item = TokenTree;
fn next(&mut self) -> Option<TokenTree> {
self.inner.next()
}
}
impl fmt::Debug for IntoIter {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
self.inner.fmt(f)
}
}
impl IntoIterator for TokenStream {
type Item = TokenTree;
type IntoIter = IntoIter;
fn into_iter(self) -> IntoIter {
IntoIter {
inner: self.inner.into_iter(),
_marker: marker::PhantomData,
}
}
}
}

1265
third_party/rust/proc-macro2-0.3.6/src/stable.rs поставляемый Normal file

Разница между файлами не показана из-за своего большого размера Загрузить разницу

391
third_party/rust/proc-macro2-0.3.6/src/strnom.rs поставляемый Normal file
Просмотреть файл

@ -0,0 +1,391 @@
//! Adapted from [`nom`](https://github.com/Geal/nom).
use std::str::{Bytes, CharIndices, Chars};
use unicode_xid::UnicodeXID;
use imp::LexError;
#[derive(Copy, Clone, Eq, PartialEq)]
pub struct Cursor<'a> {
pub rest: &'a str,
#[cfg(procmacro2_semver_exempt)]
pub off: u32,
}
impl<'a> Cursor<'a> {
#[cfg(not(procmacro2_semver_exempt))]
pub fn advance(&self, amt: usize) -> Cursor<'a> {
Cursor {
rest: &self.rest[amt..],
}
}
#[cfg(procmacro2_semver_exempt)]
pub fn advance(&self, amt: usize) -> Cursor<'a> {
Cursor {
rest: &self.rest[amt..],
off: self.off + (amt as u32),
}
}
pub fn find(&self, p: char) -> Option<usize> {
self.rest.find(p)
}
pub fn starts_with(&self, s: &str) -> bool {
self.rest.starts_with(s)
}
pub fn is_empty(&self) -> bool {
self.rest.is_empty()
}
pub fn len(&self) -> usize {
self.rest.len()
}
pub fn as_bytes(&self) -> &'a [u8] {
self.rest.as_bytes()
}
pub fn bytes(&self) -> Bytes<'a> {
self.rest.bytes()
}
pub fn chars(&self) -> Chars<'a> {
self.rest.chars()
}
pub fn char_indices(&self) -> CharIndices<'a> {
self.rest.char_indices()
}
}
pub type PResult<'a, O> = Result<(Cursor<'a>, O), LexError>;
pub fn whitespace(input: Cursor) -> PResult<()> {
if input.is_empty() {
return Err(LexError);
}
let bytes = input.as_bytes();
let mut i = 0;
while i < bytes.len() {
let s = input.advance(i);
if bytes[i] == b'/' {
if s.starts_with("//") && (!s.starts_with("///") || s.starts_with("////"))
&& !s.starts_with("//!")
{
if let Some(len) = s.find('\n') {
i += len + 1;
continue;
}
break;
} else if s.starts_with("/**/") {
i += 4;
continue;
} else if s.starts_with("/*") && (!s.starts_with("/**") || s.starts_with("/***"))
&& !s.starts_with("/*!")
{
let (_, com) = block_comment(s)?;
i += com.len();
continue;
}
}
match bytes[i] {
b' ' | 0x09...0x0d => {
i += 1;
continue;
}
b if b <= 0x7f => {}
_ => {
let ch = s.chars().next().unwrap();
if is_whitespace(ch) {
i += ch.len_utf8();
continue;
}
}
}
return if i > 0 { Ok((s, ())) } else { Err(LexError) };
}
Ok((input.advance(input.len()), ()))
}
pub fn block_comment(input: Cursor) -> PResult<&str> {
if !input.starts_with("/*") {
return Err(LexError);
}
let mut depth = 0;
let bytes = input.as_bytes();
let mut i = 0;
let upper = bytes.len() - 1;
while i < upper {
if bytes[i] == b'/' && bytes[i + 1] == b'*' {
depth += 1;
i += 1; // eat '*'
} else if bytes[i] == b'*' && bytes[i + 1] == b'/' {
depth -= 1;
if depth == 0 {
return Ok((input.advance(i + 2), &input.rest[..i + 2]));
}
i += 1; // eat '/'
}
i += 1;
}
Err(LexError)
}
pub fn skip_whitespace(input: Cursor) -> Cursor {
match whitespace(input) {
Ok((rest, _)) => rest,
Err(LexError) => input,
}
}
fn is_whitespace(ch: char) -> bool {
// Rust treats left-to-right mark and right-to-left mark as whitespace
ch.is_whitespace() || ch == '\u{200e}' || ch == '\u{200f}'
}
pub fn word_break(input: Cursor) -> PResult<()> {
match input.chars().next() {
Some(ch) if UnicodeXID::is_xid_continue(ch) => Err(LexError),
Some(_) | None => Ok((input, ())),
}
}
macro_rules! named {
($name:ident -> $o:ty, $submac:ident!( $($args:tt)* )) => {
fn $name<'a>(i: Cursor<'a>) -> $crate::strnom::PResult<'a, $o> {
$submac!(i, $($args)*)
}
};
}
macro_rules! alt {
($i:expr, $e:ident | $($rest:tt)*) => {
alt!($i, call!($e) | $($rest)*)
};
($i:expr, $subrule:ident!( $($args:tt)*) | $($rest:tt)*) => {
match $subrule!($i, $($args)*) {
res @ Ok(_) => res,
_ => alt!($i, $($rest)*)
}
};
($i:expr, $subrule:ident!( $($args:tt)* ) => { $gen:expr } | $($rest:tt)+) => {
match $subrule!($i, $($args)*) {
Ok((i, o)) => Ok((i, $gen(o))),
Err(LexError) => alt!($i, $($rest)*)
}
};
($i:expr, $e:ident => { $gen:expr } | $($rest:tt)*) => {
alt!($i, call!($e) => { $gen } | $($rest)*)
};
($i:expr, $e:ident => { $gen:expr }) => {
alt!($i, call!($e) => { $gen })
};
($i:expr, $subrule:ident!( $($args:tt)* ) => { $gen:expr }) => {
match $subrule!($i, $($args)*) {
Ok((i, o)) => Ok((i, $gen(o))),
Err(LexError) => Err(LexError),
}
};
($i:expr, $e:ident) => {
alt!($i, call!($e))
};
($i:expr, $subrule:ident!( $($args:tt)*)) => {
$subrule!($i, $($args)*)
};
}
macro_rules! do_parse {
($i:expr, ( $($rest:expr),* )) => {
Ok(($i, ( $($rest),* )))
};
($i:expr, $e:ident >> $($rest:tt)*) => {
do_parse!($i, call!($e) >> $($rest)*)
};
($i:expr, $submac:ident!( $($args:tt)* ) >> $($rest:tt)*) => {
match $submac!($i, $($args)*) {
Err(LexError) => Err(LexError),
Ok((i, _)) => do_parse!(i, $($rest)*),
}
};
($i:expr, $field:ident : $e:ident >> $($rest:tt)*) => {
do_parse!($i, $field: call!($e) >> $($rest)*)
};
($i:expr, $field:ident : $submac:ident!( $($args:tt)* ) >> $($rest:tt)*) => {
match $submac!($i, $($args)*) {
Err(LexError) => Err(LexError),
Ok((i, o)) => {
let $field = o;
do_parse!(i, $($rest)*)
},
}
};
}
macro_rules! peek {
($i:expr, $submac:ident!( $($args:tt)* )) => {
match $submac!($i, $($args)*) {
Ok((_, o)) => Ok(($i, o)),
Err(LexError) => Err(LexError),
}
};
}
macro_rules! call {
($i:expr, $fun:expr $(, $args:expr)*) => {
$fun($i $(, $args)*)
};
}
macro_rules! option {
($i:expr, $f:expr) => {
match $f($i) {
Ok((i, o)) => Ok((i, Some(o))),
Err(LexError) => Ok(($i, None)),
}
};
}
macro_rules! take_until_newline_or_eof {
($i:expr,) => {{
if $i.len() == 0 {
Ok(($i, ""))
} else {
match $i.find('\n') {
Some(i) => Ok(($i.advance(i), &$i.rest[..i])),
None => Ok(($i.advance($i.len()), &$i.rest[..$i.len()])),
}
}
}};
}
macro_rules! tuple {
($i:expr, $($rest:tt)*) => {
tuple_parser!($i, (), $($rest)*)
};
}
/// Do not use directly. Use `tuple!`.
macro_rules! tuple_parser {
($i:expr, ($($parsed:tt),*), $e:ident, $($rest:tt)*) => {
tuple_parser!($i, ($($parsed),*), call!($e), $($rest)*)
};
($i:expr, (), $submac:ident!( $($args:tt)* ), $($rest:tt)*) => {
match $submac!($i, $($args)*) {
Err(LexError) => Err(LexError),
Ok((i, o)) => tuple_parser!(i, (o), $($rest)*),
}
};
($i:expr, ($($parsed:tt)*), $submac:ident!( $($args:tt)* ), $($rest:tt)*) => {
match $submac!($i, $($args)*) {
Err(LexError) => Err(LexError),
Ok((i, o)) => tuple_parser!(i, ($($parsed)* , o), $($rest)*),
}
};
($i:expr, ($($parsed:tt),*), $e:ident) => {
tuple_parser!($i, ($($parsed),*), call!($e))
};
($i:expr, (), $submac:ident!( $($args:tt)* )) => {
$submac!($i, $($args)*)
};
($i:expr, ($($parsed:expr),*), $submac:ident!( $($args:tt)* )) => {
match $submac!($i, $($args)*) {
Err(LexError) => Err(LexError),
Ok((i, o)) => Ok((i, ($($parsed),*, o)))
}
};
($i:expr, ($($parsed:expr),*)) => {
Ok(($i, ($($parsed),*)))
};
}
macro_rules! not {
($i:expr, $submac:ident!( $($args:tt)* )) => {
match $submac!($i, $($args)*) {
Ok((_, _)) => Err(LexError),
Err(LexError) => Ok(($i, ())),
}
};
}
macro_rules! tag {
($i:expr, $tag:expr) => {
if $i.starts_with($tag) {
Ok(($i.advance($tag.len()), &$i.rest[..$tag.len()]))
} else {
Err(LexError)
}
};
}
macro_rules! punct {
($i:expr, $punct:expr) => {
$crate::strnom::punct($i, $punct)
};
}
/// Do not use directly. Use `punct!`.
pub fn punct<'a>(input: Cursor<'a>, token: &'static str) -> PResult<'a, &'a str> {
let input = skip_whitespace(input);
if input.starts_with(token) {
Ok((input.advance(token.len()), token))
} else {
Err(LexError)
}
}
macro_rules! preceded {
($i:expr, $submac:ident!( $($args:tt)* ), $submac2:ident!( $($args2:tt)* )) => {
match tuple!($i, $submac!($($args)*), $submac2!($($args2)*)) {
Ok((remaining, (_, o))) => Ok((remaining, o)),
Err(LexError) => Err(LexError),
}
};
($i:expr, $submac:ident!( $($args:tt)* ), $g:expr) => {
preceded!($i, $submac!($($args)*), call!($g))
};
}
macro_rules! delimited {
($i:expr, $submac:ident!( $($args:tt)* ), $($rest:tt)+) => {
match tuple_parser!($i, (), $submac!($($args)*), $($rest)*) {
Err(LexError) => Err(LexError),
Ok((i1, (_, o, _))) => Ok((i1, o))
}
};
}
macro_rules! map {
($i:expr, $submac:ident!( $($args:tt)* ), $g:expr) => {
match $submac!($i, $($args)*) {
Err(LexError) => Err(LexError),
Ok((i, o)) => Ok((i, call!(o, $g)))
}
};
($i:expr, $f:expr, $g:expr) => {
map!($i, call!($f), $g)
};
}

399
third_party/rust/proc-macro2-0.3.6/src/unstable.rs поставляемый Normal file
Просмотреть файл

@ -0,0 +1,399 @@
#![cfg_attr(not(procmacro2_semver_exempt), allow(dead_code))]
use std::fmt;
use std::iter;
use std::str::FromStr;
use proc_macro;
use {Delimiter, Group, Op, Spacing, TokenTree};
#[derive(Clone)]
pub struct TokenStream(proc_macro::TokenStream);
pub struct LexError(proc_macro::LexError);
impl TokenStream {
pub fn empty() -> TokenStream {
TokenStream(proc_macro::TokenStream::empty())
}
pub fn is_empty(&self) -> bool {
self.0.is_empty()
}
}
impl FromStr for TokenStream {
type Err = LexError;
fn from_str(src: &str) -> Result<TokenStream, LexError> {
Ok(TokenStream(src.parse().map_err(LexError)?))
}
}
impl fmt::Display for TokenStream {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
self.0.fmt(f)
}
}
impl From<proc_macro::TokenStream> for TokenStream {
fn from(inner: proc_macro::TokenStream) -> TokenStream {
TokenStream(inner)
}
}
impl From<TokenStream> for proc_macro::TokenStream {
fn from(inner: TokenStream) -> proc_macro::TokenStream {
inner.0
}
}
impl From<TokenTree> for TokenStream {
fn from(token: TokenTree) -> TokenStream {
let tt: proc_macro::TokenTree = match token {
TokenTree::Group(tt) => {
let delim = match tt.delimiter() {
Delimiter::Parenthesis => proc_macro::Delimiter::Parenthesis,
Delimiter::Bracket => proc_macro::Delimiter::Bracket,
Delimiter::Brace => proc_macro::Delimiter::Brace,
Delimiter::None => proc_macro::Delimiter::None,
};
let span = tt.span();
let mut group = proc_macro::Group::new(delim, tt.stream.inner.0);
group.set_span(span.inner.0);
group.into()
}
TokenTree::Op(tt) => {
let spacing = match tt.spacing() {
Spacing::Joint => proc_macro::Spacing::Joint,
Spacing::Alone => proc_macro::Spacing::Alone,
};
let mut op = proc_macro::Op::new(tt.op(), spacing);
op.set_span(tt.span().inner.0);
op.into()
}
TokenTree::Term(tt) => tt.inner.term.into(),
TokenTree::Literal(tt) => tt.inner.lit.into(),
};
TokenStream(tt.into())
}
}
impl iter::FromIterator<TokenTree> for TokenStream {
fn from_iter<I: IntoIterator<Item = TokenTree>>(streams: I) -> Self {
let streams = streams.into_iter().map(TokenStream::from)
.flat_map(|t| t.0);
TokenStream(streams.collect::<proc_macro::TokenStream>())
}
}
impl fmt::Debug for TokenStream {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
self.0.fmt(f)
}
}
impl fmt::Debug for LexError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
self.0.fmt(f)
}
}
pub struct TokenTreeIter(proc_macro::token_stream::IntoIter);
impl IntoIterator for TokenStream {
type Item = TokenTree;
type IntoIter = TokenTreeIter;
fn into_iter(self) -> TokenTreeIter {
TokenTreeIter(self.0.into_iter())
}
}
impl Iterator for TokenTreeIter {
type Item = TokenTree;
fn next(&mut self) -> Option<TokenTree> {
let token = self.0.next()?;
Some(match token {
proc_macro::TokenTree::Group(tt) => {
let delim = match tt.delimiter() {
proc_macro::Delimiter::Parenthesis => Delimiter::Parenthesis,
proc_macro::Delimiter::Bracket => Delimiter::Bracket,
proc_macro::Delimiter::Brace => Delimiter::Brace,
proc_macro::Delimiter::None => Delimiter::None,
};
let stream = ::TokenStream::_new(TokenStream(tt.stream()));
let mut g = Group::new(delim, stream);
g.set_span(::Span::_new(Span(tt.span())));
g.into()
}
proc_macro::TokenTree::Op(tt) => {
let spacing = match tt.spacing() {
proc_macro::Spacing::Joint => Spacing::Joint,
proc_macro::Spacing::Alone => Spacing::Alone,
};
let mut o = Op::new(tt.op(), spacing);
o.set_span(::Span::_new(Span(tt.span())));
o.into()
}
proc_macro::TokenTree::Term(s) => {
::Term::_new(Term {
term: s,
}).into()
}
proc_macro::TokenTree::Literal(l) => {
::Literal::_new(Literal {
lit: l,
}).into()
}
})
}
fn size_hint(&self) -> (usize, Option<usize>) {
self.0.size_hint()
}
}
impl fmt::Debug for TokenTreeIter {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_struct("TokenTreeIter").finish()
}
}
#[derive(Clone, PartialEq, Eq)]
pub struct FileName(String);
impl fmt::Display for FileName {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
self.0.fmt(f)
}
}
// NOTE: We have to generate our own filename object here because we can't wrap
// the one provided by proc_macro.
#[derive(Clone, PartialEq, Eq)]
pub struct SourceFile(proc_macro::SourceFile, FileName);
impl SourceFile {
fn new(sf: proc_macro::SourceFile) -> Self {
let filename = FileName(sf.path().to_string());
SourceFile(sf, filename)
}
/// Get the path to this source file as a string.
pub fn path(&self) -> &FileName {
&self.1
}
pub fn is_real(&self) -> bool {
self.0.is_real()
}
}
impl AsRef<FileName> for SourceFile {
fn as_ref(&self) -> &FileName {
self.path()
}
}
impl fmt::Debug for SourceFile {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
self.0.fmt(f)
}
}
pub struct LineColumn {
pub line: usize,
pub column: usize,
}
#[derive(Copy, Clone)]
pub struct Span(proc_macro::Span);
impl From<proc_macro::Span> for ::Span {
fn from(proc_span: proc_macro::Span) -> ::Span {
::Span::_new(Span(proc_span))
}
}
impl Span {
pub fn call_site() -> Span {
Span(proc_macro::Span::call_site())
}
pub fn def_site() -> Span {
Span(proc_macro::Span::def_site())
}
pub fn resolved_at(&self, other: Span) -> Span {
Span(self.0.resolved_at(other.0))
}
pub fn located_at(&self, other: Span) -> Span {
Span(self.0.located_at(other.0))
}
pub fn unstable(self) -> proc_macro::Span {
self.0
}
pub fn source_file(&self) -> SourceFile {
SourceFile::new(self.0.source_file())
}
pub fn start(&self) -> LineColumn {
let proc_macro::LineColumn { line, column } = self.0.start();
LineColumn { line, column }
}
pub fn end(&self) -> LineColumn {
let proc_macro::LineColumn { line, column } = self.0.end();
LineColumn { line, column }
}
pub fn join(&self, other: Span) -> Option<Span> {
self.0.join(other.0).map(Span)
}
pub fn eq(&self, other: &Span) -> bool {
self.0.eq(&other.0)
}
}
impl fmt::Debug for Span {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
self.0.fmt(f)
}
}
#[derive(Copy, Clone)]
pub struct Term {
term: proc_macro::Term,
}
impl Term {
pub fn new(string: &str, span: Span) -> Term {
Term {
term: proc_macro::Term::new(string, span.0),
}
}
pub fn as_str(&self) -> &str {
self.term.as_str()
}
pub fn span(&self) -> Span {
Span(self.term.span())
}
pub fn set_span(&mut self, span: Span) {
self.term.set_span(span.0);
}
}
impl fmt::Debug for Term {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
self.term.fmt(f)
}
}
#[derive(Clone)]
pub struct Literal {
lit: proc_macro::Literal,
}
macro_rules! suffixed_numbers {
($($name:ident => $kind:ident,)*) => ($(
pub fn $name(n: $kind) -> Literal {
Literal::_new(proc_macro::Literal::$name(n))
}
)*)
}
macro_rules! unsuffixed_integers {
($($name:ident => $kind:ident,)*) => ($(
pub fn $name(n: $kind) -> Literal {
Literal::_new(proc_macro::Literal::$name(n))
}
)*)
}
impl Literal {
fn _new(lit: proc_macro::Literal) -> Literal {
Literal {
lit,
}
}
suffixed_numbers! {
u8_suffixed => u8,
u16_suffixed => u16,
u32_suffixed => u32,
u64_suffixed => u64,
usize_suffixed => usize,
i8_suffixed => i8,
i16_suffixed => i16,
i32_suffixed => i32,
i64_suffixed => i64,
isize_suffixed => isize,
f32_suffixed => f32,
f64_suffixed => f64,
}
unsuffixed_integers! {
u8_unsuffixed => u8,
u16_unsuffixed => u16,
u32_unsuffixed => u32,
u64_unsuffixed => u64,
usize_unsuffixed => usize,
i8_unsuffixed => i8,
i16_unsuffixed => i16,
i32_unsuffixed => i32,
i64_unsuffixed => i64,
isize_unsuffixed => isize,
}
pub fn f32_unsuffixed(f: f32) -> Literal {
Literal::_new(proc_macro::Literal::f32_unsuffixed(f))
}
pub fn f64_unsuffixed(f: f64) -> Literal {
Literal::_new(proc_macro::Literal::f64_unsuffixed(f))
}
pub fn string(t: &str) -> Literal {
Literal::_new(proc_macro::Literal::string(t))
}
pub fn character(t: char) -> Literal {
Literal::_new(proc_macro::Literal::character(t))
}
pub fn byte_string(bytes: &[u8]) -> Literal {
Literal::_new(proc_macro::Literal::byte_string(bytes))
}
pub fn span(&self) -> Span {
Span(self.lit.span())
}
pub fn set_span(&mut self, span: Span) {
self.lit.set_span(span.0);
}
}
impl fmt::Display for Literal {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
self.lit.fmt(f)
}
}
impl fmt::Debug for Literal {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
self.lit.fmt(f)
}
}

304
third_party/rust/proc-macro2-0.3.6/tests/test.rs поставляемый Normal file
Просмотреть файл

@ -0,0 +1,304 @@
extern crate proc_macro2;
use std::str::{self, FromStr};
use proc_macro2::{Literal, Span, Term, TokenStream, TokenTree};
#[test]
fn terms() {
assert_eq!(Term::new("String", Span::call_site()).as_str(), "String");
assert_eq!(Term::new("fn", Span::call_site()).as_str(), "fn");
assert_eq!(Term::new("_", Span::call_site()).as_str(), "_");
}
#[test]
fn raw_terms() {
assert_eq!(Term::new("r#String", Span::call_site()).as_str(), "r#String");
assert_eq!(Term::new("r#fn", Span::call_site()).as_str(), "r#fn");
assert_eq!(Term::new("r#_", Span::call_site()).as_str(), "r#_");
}
#[test]
fn lifetimes() {
assert_eq!(Term::new("'a", Span::call_site()).as_str(), "'a");
assert_eq!(Term::new("'static", Span::call_site()).as_str(), "'static");
assert_eq!(Term::new("'_", Span::call_site()).as_str(), "'_");
}
#[test]
#[should_panic(expected = "Term is not allowed to be empty; use Option<Term>")]
fn term_empty() {
Term::new("", Span::call_site());
}
#[test]
#[should_panic(expected = "Term cannot be a number; use Literal instead")]
fn term_number() {
Term::new("255", Span::call_site());
}
#[test]
#[should_panic(expected = "\"a#\" is not a valid Term")]
fn term_invalid() {
Term::new("a#", Span::call_site());
}
#[test]
#[should_panic(expected = "Term is not allowed to be empty; use Option<Term>")]
fn raw_term_empty() {
Term::new("r#", Span::call_site());
}
#[test]
#[should_panic(expected = "Term cannot be a number; use Literal instead")]
fn raw_term_number() {
Term::new("r#255", Span::call_site());
}
#[test]
#[should_panic(expected = "\"r#a#\" is not a valid Term")]
fn raw_term_invalid() {
Term::new("r#a#", Span::call_site());
}
#[test]
#[should_panic(expected = "Term is not allowed to be empty; use Option<Term>")]
fn lifetime_empty() {
Term::new("'", Span::call_site());
}
#[test]
#[should_panic(expected = "Term cannot be a number; use Literal instead")]
fn lifetime_number() {
Term::new("'255", Span::call_site());
}
#[test]
#[should_panic(expected = r#""\'a#" is not a valid Term"#)]
fn lifetime_invalid() {
Term::new("'a#", Span::call_site());
}
#[test]
fn literals() {
assert_eq!(Literal::string("foo").to_string(), "\"foo\"");
assert_eq!(Literal::string("\"").to_string(), "\"\\\"\"");
assert_eq!(Literal::f32_unsuffixed(10.0).to_string(), "10.0");
}
#[test]
fn roundtrip() {
fn roundtrip(p: &str) {
println!("parse: {}", p);
let s = p.parse::<TokenStream>().unwrap().to_string();
println!("first: {}", s);
let s2 = s.to_string().parse::<TokenStream>().unwrap().to_string();
assert_eq!(s, s2);
}
roundtrip("a");
roundtrip("<<");
roundtrip("<<=");
roundtrip(
"
1
1.0
1f32
2f64
1usize
4isize
4e10
1_000
1_0i32
8u8
9
0
0xffffffffffffffffffffffffffffffff
",
);
roundtrip("'a");
roundtrip("'static");
roundtrip("'\\u{10__FFFF}'");
roundtrip("\"\\u{10_F0FF__}foo\\u{1_0_0_0__}\"");
}
#[test]
fn fail() {
fn fail(p: &str) {
if p.parse::<TokenStream>().is_ok() {
panic!("should have failed to parse: {}", p);
}
}
fail("1x");
fail("1u80");
fail("1f320");
fail("' static");
fail("'mut");
fail("r#1");
fail("r#_");
}
#[cfg(procmacro2_semver_exempt)]
#[test]
fn span_test() {
use proc_macro2::TokenTree;
fn check_spans(p: &str, mut lines: &[(usize, usize, usize, usize)]) {
let ts = p.parse::<TokenStream>().unwrap();
check_spans_internal(ts, &mut lines);
}
fn check_spans_internal(ts: TokenStream, lines: &mut &[(usize, usize, usize, usize)]) {
for i in ts {
if let Some((&(sline, scol, eline, ecol), rest)) = lines.split_first() {
*lines = rest;
let start = i.span().start();
assert_eq!(start.line, sline, "sline did not match for {}", i);
assert_eq!(start.column, scol, "scol did not match for {}", i);
let end = i.span().end();
assert_eq!(end.line, eline, "eline did not match for {}", i);
assert_eq!(end.column, ecol, "ecol did not match for {}", i);
match i {
TokenTree::Group(ref g) => {
check_spans_internal(g.stream().clone(), lines);
}
_ => {}
}
}
}
}
check_spans(
"\
/// This is a document comment
testing 123
{
testing 234
}",
&[
(1, 0, 1, 30), // #
(1, 0, 1, 30), // [ ... ]
(1, 0, 1, 30), // doc
(1, 0, 1, 30), // =
(1, 0, 1, 30), // "This is..."
(2, 0, 2, 7), // testing
(2, 8, 2, 11), // 123
(3, 0, 5, 1), // { ... }
(4, 2, 4, 9), // testing
(4, 10, 4, 13), // 234
],
);
}
#[cfg(procmacro2_semver_exempt)]
#[cfg(not(feature = "nightly"))]
#[test]
fn default_span() {
let start = Span::call_site().start();
assert_eq!(start.line, 1);
assert_eq!(start.column, 0);
let end = Span::call_site().end();
assert_eq!(end.line, 1);
assert_eq!(end.column, 0);
let source_file = Span::call_site().source_file();
assert_eq!(source_file.path().to_string(), "<unspecified>");
assert!(!source_file.is_real());
}
#[cfg(procmacro2_semver_exempt)]
#[test]
fn span_join() {
let source1 = "aaa\nbbb"
.parse::<TokenStream>()
.unwrap()
.into_iter()
.collect::<Vec<_>>();
let source2 = "ccc\nddd"
.parse::<TokenStream>()
.unwrap()
.into_iter()
.collect::<Vec<_>>();
assert!(source1[0].span().source_file() != source2[0].span().source_file());
assert_eq!(
source1[0].span().source_file(),
source1[1].span().source_file()
);
let joined1 = source1[0].span().join(source1[1].span());
let joined2 = source1[0].span().join(source2[0].span());
assert!(joined1.is_some());
assert!(joined2.is_none());
let start = joined1.unwrap().start();
let end = joined1.unwrap().end();
assert_eq!(start.line, 1);
assert_eq!(start.column, 0);
assert_eq!(end.line, 2);
assert_eq!(end.column, 3);
assert_eq!(
joined1.unwrap().source_file(),
source1[0].span().source_file()
);
}
#[test]
fn no_panic() {
let s = str::from_utf8(b"b\'\xc2\x86 \x00\x00\x00^\"").unwrap();
assert!(s.parse::<proc_macro2::TokenStream>().is_err());
}
#[test]
fn tricky_doc_comment() {
let stream = "/**/".parse::<proc_macro2::TokenStream>().unwrap();
let tokens = stream.into_iter().collect::<Vec<_>>();
assert!(tokens.is_empty(), "not empty -- {:?}", tokens);
let stream = "/// doc".parse::<proc_macro2::TokenStream>().unwrap();
let tokens = stream.into_iter().collect::<Vec<_>>();
assert!(tokens.len() == 2, "not length 2 -- {:?}", tokens);
match tokens[0] {
proc_macro2::TokenTree::Op(ref tt) => assert_eq!(tt.op(), '#'),
_ => panic!("wrong token {:?}", tokens[0]),
}
let mut tokens = match tokens[1] {
proc_macro2::TokenTree::Group(ref tt) => {
assert_eq!(tt.delimiter(), proc_macro2::Delimiter::Bracket);
tt.stream().into_iter()
}
_ => panic!("wrong token {:?}", tokens[0]),
};
match tokens.next().unwrap() {
proc_macro2::TokenTree::Term(ref tt) => assert_eq!(tt.as_str(), "doc"),
t => panic!("wrong token {:?}", t),
}
match tokens.next().unwrap() {
proc_macro2::TokenTree::Op(ref tt) => assert_eq!(tt.op(), '='),
t => panic!("wrong token {:?}", t),
}
match tokens.next().unwrap() {
proc_macro2::TokenTree::Literal(ref tt) => {
assert_eq!(tt.to_string(), "\" doc\"");
}
t => panic!("wrong token {:?}", t),
}
assert!(tokens.next().is_none());
let stream = "//! doc".parse::<proc_macro2::TokenStream>().unwrap();
let tokens = stream.into_iter().collect::<Vec<_>>();
assert!(tokens.len() == 3, "not length 3 -- {:?}", tokens);
}
#[test]
fn raw_identifier() {
let mut tts = TokenStream::from_str("r#dyn").unwrap().into_iter();
match tts.next().unwrap() {
TokenTree::Term(raw) => assert_eq!("r#dyn", raw.as_str()),
wrong => panic!("wrong token {:?}", wrong),
}
assert!(tts.next().is_none());
}

Просмотреть файл

@ -1 +1 @@
{"files":{".travis.yml":"872a0d195dcb1e84f28aa994f301c7139f70360bb42dee3954df5ee965efea15","Cargo.toml":"6ed5d7b9bf8805abd76f9e2a9be99b98e2cb70d9b97980b8aa09b6082d26a94d","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"378f5840b258e2779c39418f3f2d7b2ba96f1c7917dd6be0713f88305dbda397","README.md":"ce05336717e1e90724491a2f54487c41c752fa2d32396639439f7c6d0f1e6776","src/lib.rs":"e99fedcb4b410c626fe1a3ab722c8b4f98baed2c64c2dff28c4eb62da354f2e2","src/stable.rs":"fd8d86f7542d211030056a7cdcc58b86131180d54f461910a4a067269eee9d4a","src/strnom.rs":"129fe22f0b50e5a64fca82e731c959135381c910e19f3305ef35420e0aadde08","src/unstable.rs":"b43c713ac16d9de0ba0fa1b9bebe390122b4ad60ef2fc75408f721305fdcd46b","tests/test.rs":"a8229931093cd6b39f759c60ef097e59bc43c98f1b0e5eea06ecc8d5d0879853"},"package":"49b6a521dc81b643e9a51e0d1cf05df46d5a2f3c0280ea72bcb68276ba64a118"}
{"files":{".travis.yml":"a7e89030a6a25e881fd6ccc0f065e2990eb408e26a9cbdcdf7a73b201285ab0f","Cargo.toml":"a07d0acb6f3035bbd30180e3493f8ad05f5e8ceed8970e00ee3e3ce4c3427a0a","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"378f5840b258e2779c39418f3f2d7b2ba96f1c7917dd6be0713f88305dbda397","README.md":"261fb7bbe050bbff8a8e33da68926b44cd1bbd2b1e8b655d19ae681b8fff3c6e","src/lib.rs":"8fa6ba7df93c3ee57163f406f73fb5efa2b41709b7d27d25a46629038cabf339","src/stable.rs":"c325eadc1f0a78c55117589e6bacb72dd295ccd02cb3e2dea13e1381ad2e972e","src/strnom.rs":"807c377bdb49b8b1c67d013089b8ff33fe93ffd3fa36b6440dbb1d6fe8cd9c17","src/unstable.rs":"69ce792a9d8a9caeb43f598923f34c986628f3416355e118f256263afe870e13","tests/test.rs":"40486961d171ea6312cf46c63834738e2bec07fee9badb677ffa28073cc09e8d"},"package":"effdb53b25cdad54f8f48843d67398f7ef2e14f12c1b4cb4effc549a6462a4d6"}

19
third_party/rust/proc-macro2/.travis.yml поставляемый
Просмотреть файл

@ -7,25 +7,20 @@ matrix:
- rust: stable
- rust: beta
- rust: nightly
before_script:
- pip install 'travis-cargo<0.2' --user && export PATH=$HOME/.local/bin:$PATH
script:
- cargo test
- cargo build --features nightly
- cargo build --no-default-features
- cargo test --features nightly
- cargo test --no-default-features
- RUSTFLAGS='--cfg procmacro2_semver_exempt' cargo test
- RUSTFLAGS='--cfg procmacro2_semver_exempt' cargo build --features nightly
- RUSTFLAGS='--cfg procmacro2_semver_exempt' cargo doc --no-deps
after_success:
- travis-cargo --only nightly doc-upload
- RUSTFLAGS='--cfg procmacro2_semver_exempt' cargo test --features nightly
- cargo update -Z minimal-versions && cargo build
before_script:
- set -o errexit
script:
- cargo test
- RUSTFLAGS='--cfg procmacro2_semver_exempt' cargo test
env:
global:
- TRAVIS_CARGO_NIGHTLY_FEATURE=""
- secure: "NAsZghAVTAksrm4WP4I66VmD2wW0eRbwB+ZKHUQfvbgUaCRvVdp4WBbWXGU/f/yHgDFWZwljWR4iPMiBwAK8nZsQFRuLFdHrOOHqbkj639LLdT9A07s1zLMB1GfR1fDttzrGhm903pbT2yxSyqqpahGYM7TaGDYYmKYIk4XyVNA5F5Sk7RI+rCecKraoYDeUEFbjWWYtU2FkEXsELEKj0emX5reWkR+wja3QokFcRZ25+Zd2dRC0K8W5QcY2UokLzKncBMCTC5q70H616S3r/9qW67Si1njsJ7RzP0NlZQUNQ/VCvwr4LCr9w+AD9i1SZtXxuux77tWEWSJvBzUc82dDMUv/floJuF7HTulSxxQoRm+fbzpXj9mgaJNiUHXru6ZRTCRVRUSXpcAco94bVoy/jnjrTe3jgAIZK5w14zA8yLw1Jxof31DlbcWORxgF+6fnY2nKPRN2oiQ50+jm1AuGDZX59/wMiu1QlkjOBHtikHp+u+7mp3SkkM04DvuQ/tWODQQnOOtrA0EB3i5H1zeTSnUcmbJufUljWWOvF1QYII08MccqwfG1KWbpobvdu+cV2iVhkq/lNCEL3Ai101CnmSCnMz+9oK/XxYOrx2TnaD9ootOKgnk7XWxF19GZecQx6O2hHTouxvB/0KcRPGWmMWl0H88f3T/Obql8bG8="
notifications:
email:

4
third_party/rust/proc-macro2/Cargo.toml поставляемый
Просмотреть файл

@ -12,7 +12,7 @@
[package]
name = "proc-macro2"
version = "0.3.6"
version = "0.4.6"
authors = ["Alex Crichton <alex@alexcrichton.com>"]
description = "A stable implementation of the upcoming new `proc_macro` API. Comes with an\noption, off by default, to also reimplement itself in terms of the upstream\nunstable API.\n"
homepage = "https://github.com/alexcrichton/proc-macro2"
@ -21,6 +21,8 @@ readme = "README.md"
keywords = ["macros"]
license = "MIT/Apache-2.0"
repository = "https://github.com/alexcrichton/proc-macro2"
[package.metadata.docs.rs]
rustdoc-args = ["--cfg", "procmacro2_semver_exempt"]
[lib]
doctest = false

6
third_party/rust/proc-macro2/README.md поставляемый
Просмотреть файл

@ -17,7 +17,7 @@ The upcoming support has features like:
* Structured input/output
The hope is that libraries ported to `proc_macro2` will be trivial to port to
the real `proc_macro` crate once the support on nightly is stabilize.
the real `proc_macro` crate once the support on nightly is stabilized.
## Usage
@ -27,7 +27,7 @@ itself. Usage is done via:
```toml
[dependencies]
proc-macro2 = "0.3"
proc-macro2 = "0.4"
```
followed by
@ -57,7 +57,7 @@ You can enable this feature via:
```toml
[dependencies]
proc-macro2 = { version = "0.3", features = ["nightly"] }
proc-macro2 = { version = "0.4", features = ["nightly"] }
```

566
third_party/rust/proc-macro2/src/lib.rs поставляемый
Просмотреть файл

@ -20,42 +20,68 @@
//! This'll allow immediate usage of the beneficial upstream API, particularly
//! around preserving span information.
//!
//! # Unstable Features
//!
//! `proc-macro2` supports exporting some methods from `proc_macro` which are
//! currently highly unstable, and may not be stabilized in the first pass of
//! `proc_macro` stabilizations. These features are not exported by default.
//! Minor versions of `proc-macro2` may make breaking changes to them at any
//! time.
//!
//! To enable these features, the `procmacro2_semver_exempt` config flag must be
//! passed to rustc.
//!
//! ```sh
//! RUSTFLAGS='--cfg procmacro2_semver_exempt' cargo build
//! ```
//!
//! Note that this must not only be done for your crate, but for any crate that
//! depends on your crate. This infectious nature is intentional, as it serves
//! as a reminder that you are outside of the normal semver guarantees.
//!
//! [`proc_macro`]: https://doc.rust-lang.org/proc_macro/
//! [ts]: https://doc.rust-lang.org/proc_macro/struct.TokenStream.html
// Proc-macro2 types in rustdoc of other crates get linked to here.
#![doc(html_root_url = "https://docs.rs/proc-macro2/0.3.6")]
#![doc(html_root_url = "https://docs.rs/proc-macro2/0.4.6")]
#![cfg_attr(feature = "nightly", feature(proc_macro))]
#[cfg(feature = "proc-macro")]
extern crate proc_macro;
#[cfg(not(feature = "nightly"))]
extern crate unicode_xid;
use std::cmp::Ordering;
use std::fmt;
use std::hash::{Hash, Hasher};
use std::iter::FromIterator;
use std::marker;
use std::rc::Rc;
use std::str::FromStr;
#[macro_use]
#[cfg(not(feature = "nightly"))]
mod strnom;
mod stable;
#[path = "stable.rs"]
#[cfg(not(feature = "nightly"))]
mod imp;
use stable as imp;
#[path = "unstable.rs"]
#[cfg(feature = "nightly")]
mod imp;
/// An abstract stream of tokens, or more concretely a sequence of token trees.
///
/// This type provides interfaces for iterating over token trees and for
/// collecting token trees into one stream.
///
/// Token stream is both the input and output of `#[proc_macro]`,
/// `#[proc_macro_attribute]` and `#[proc_macro_derive]` definitions.
#[derive(Clone)]
pub struct TokenStream {
inner: imp::TokenStream,
_marker: marker::PhantomData<Rc<()>>,
}
/// Error returned from `TokenStream::from_str`.
pub struct LexError {
inner: imp::LexError,
_marker: marker::PhantomData<Rc<()>>,
@ -69,15 +95,37 @@ impl TokenStream {
}
}
pub fn empty() -> TokenStream {
TokenStream::_new(imp::TokenStream::empty())
fn _new_stable(inner: stable::TokenStream) -> TokenStream {
TokenStream {
inner: inner.into(),
_marker: marker::PhantomData,
}
}
/// Returns an empty `TokenStream` containing no token trees.
pub fn new() -> TokenStream {
TokenStream::_new(imp::TokenStream::new())
}
#[deprecated(since = "0.4.4", note = "please use TokenStream::new")]
pub fn empty() -> TokenStream {
TokenStream::new()
}
/// Checks if this `TokenStream` is empty.
pub fn is_empty(&self) -> bool {
self.inner.is_empty()
}
}
/// Attempts to break the string into tokens and parse those tokens into a token
/// stream.
///
/// May fail for a number of reasons, for example, if the string contains
/// unbalanced delimiters or characters not existing in the language.
///
/// NOTE: Some errors may cause panics instead of returning `LexError`. We
/// reserve the right to change these errors into `LexError`s later.
impl FromStr for TokenStream {
type Err = LexError;
@ -104,18 +152,30 @@ impl From<TokenStream> for proc_macro::TokenStream {
}
}
impl Extend<TokenTree> for TokenStream {
fn extend<I: IntoIterator<Item = TokenTree>>(&mut self, streams: I) {
self.inner.extend(streams)
}
}
/// Collects a number of token trees into a single stream.
impl FromIterator<TokenTree> for TokenStream {
fn from_iter<I: IntoIterator<Item = TokenTree>>(streams: I) -> Self {
TokenStream::_new(streams.into_iter().collect())
}
}
/// Prints the token stream as a string that is supposed to be losslessly
/// convertible back into the same token stream (modulo spans), except for
/// possibly `TokenTree::Group`s with `Delimiter::None` delimiters and negative
/// numeric literals.
impl fmt::Display for TokenStream {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
self.inner.fmt(f)
}
}
/// Prints token in a form convenient for debugging.
impl fmt::Debug for TokenStream {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
self.inner.fmt(f)
@ -132,17 +192,34 @@ impl fmt::Debug for LexError {
#[cfg(procmacro2_semver_exempt)]
pub use imp::FileName;
/// The source file of a given `Span`.
///
/// This type is semver exempt and not exposed by default.
#[cfg(procmacro2_semver_exempt)]
#[derive(Clone, PartialEq, Eq)]
pub struct SourceFile(imp::SourceFile);
#[cfg(procmacro2_semver_exempt)]
impl SourceFile {
/// Get the path to this source file as a string.
/// Get the path to this source file.
///
/// ### Note
///
/// If the code span associated with this `SourceFile` was generated by an
/// external macro, this may not be an actual path on the filesystem. Use
/// [`is_real`] to check.
///
/// Also note that even if `is_real` returns `true`, if
/// `--remap-path-prefix` was passed on the command line, the path as given
/// may not actually be valid.
///
/// [`is_real`]: #method.is_real
pub fn path(&self) -> &FileName {
self.0.path()
}
/// Returns `true` if this source file is a real source file, and not
/// generated by an external macro's expansion.
pub fn is_real(&self) -> bool {
self.0.is_real()
}
@ -162,12 +239,20 @@ impl fmt::Debug for SourceFile {
}
}
/// A line-column pair representing the start or end of a `Span`.
///
/// This type is semver exempt and not exposed by default.
#[cfg(procmacro2_semver_exempt)]
pub struct LineColumn {
/// The 1-indexed line in the source file on which the span starts or ends
/// (inclusive).
pub line: usize,
/// The 0-indexed column (in UTF-8 characters) in the source file on which
/// the span starts or ends (inclusive).
pub column: usize,
}
/// A region of source code, along with macro expansion information.
#[derive(Copy, Clone)]
pub struct Span {
inner: imp::Span,
@ -182,10 +267,25 @@ impl Span {
}
}
fn _new_stable(inner: stable::Span) -> Span {
Span {
inner: inner.into(),
_marker: marker::PhantomData,
}
}
/// The span of the invocation of the current procedural macro.
///
/// Identifiers created with this span will be resolved as if they were
/// written directly at the macro call location (call-site hygiene) and
/// other code at the macro call site will be able to refer to them as well.
pub fn call_site() -> Span {
Span::_new(imp::Span::call_site())
}
/// A span that resolves at the macro definition site.
///
/// This method is semver exempt and not exposed by default.
#[cfg(procmacro2_semver_exempt)]
pub fn def_site() -> Span {
Span::_new(imp::Span::def_site())
@ -193,6 +293,8 @@ impl Span {
/// Creates a new span with the same line/column information as `self` but
/// that resolves symbols as though it were at `other`.
///
/// This method is semver exempt and not exposed by default.
#[cfg(procmacro2_semver_exempt)]
pub fn resolved_at(&self, other: Span) -> Span {
Span::_new(self.inner.resolved_at(other.inner))
@ -200,6 +302,8 @@ impl Span {
/// Creates a new span with the same name resolution behavior as `self` but
/// with the line/column information of `other`.
///
/// This method is semver exempt and not exposed by default.
#[cfg(procmacro2_semver_exempt)]
pub fn located_at(&self, other: Span) -> Span {
Span::_new(self.inner.located_at(other.inner))
@ -211,11 +315,17 @@ impl Span {
self.inner.unstable()
}
/// The original source file into which this span points.
///
/// This method is semver exempt and not exposed by default.
#[cfg(procmacro2_semver_exempt)]
pub fn source_file(&self) -> SourceFile {
SourceFile(self.inner.source_file())
}
/// Get the starting line/column in the source file for this span.
///
/// This method is semver exempt and not exposed by default.
#[cfg(procmacro2_semver_exempt)]
pub fn start(&self) -> LineColumn {
let imp::LineColumn { line, column } = self.inner.start();
@ -225,6 +335,9 @@ impl Span {
}
}
/// Get the ending line/column in the source file for this span.
///
/// This method is semver exempt and not exposed by default.
#[cfg(procmacro2_semver_exempt)]
pub fn end(&self) -> LineColumn {
let imp::LineColumn { line, column } = self.inner.end();
@ -234,46 +347,67 @@ impl Span {
}
}
/// Create a new span encompassing `self` and `other`.
///
/// Returns `None` if `self` and `other` are from different files.
///
/// This method is semver exempt and not exposed by default.
#[cfg(procmacro2_semver_exempt)]
pub fn join(&self, other: Span) -> Option<Span> {
self.inner.join(other.inner).map(Span::_new)
}
/// Compares to spans to see if they're equal.
///
/// This method is semver exempt and not exposed by default.
#[cfg(procmacro2_semver_exempt)]
pub fn eq(&self, other: &Span) -> bool {
self.inner.eq(&other.inner)
}
}
/// Prints a span in a form convenient for debugging.
impl fmt::Debug for Span {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
self.inner.fmt(f)
}
}
#[derive(Clone, Debug)]
/// A single token or a delimited sequence of token trees (e.g. `[1, (), ..]`).
#[derive(Clone)]
pub enum TokenTree {
/// A token stream surrounded by bracket delimiters.
Group(Group),
Term(Term),
Op(Op),
/// An identifier.
Ident(Ident),
/// A single punctuation character (`+`, `,`, `$`, etc.).
Punct(Punct),
/// A literal character (`'a'`), string (`"hello"`), number (`2.3`), etc.
Literal(Literal),
}
impl TokenTree {
/// Returns the span of this tree, delegating to the `span` method of
/// the contained token or a delimited stream.
pub fn span(&self) -> Span {
match *self {
TokenTree::Group(ref t) => t.span(),
TokenTree::Term(ref t) => t.span(),
TokenTree::Op(ref t) => t.span(),
TokenTree::Ident(ref t) => t.span(),
TokenTree::Punct(ref t) => t.span(),
TokenTree::Literal(ref t) => t.span(),
}
}
/// Configures the span for *only this token*.
///
/// Note that if this token is a `Group` then this method will not configure
/// the span of each of the internal tokens, this will simply delegate to
/// the `set_span` method of each variant.
pub fn set_span(&mut self, span: Span) {
match *self {
TokenTree::Group(ref mut t) => t.set_span(span),
TokenTree::Term(ref mut t) => t.set_span(span),
TokenTree::Op(ref mut t) => t.set_span(span),
TokenTree::Ident(ref mut t) => t.set_span(span),
TokenTree::Punct(ref mut t) => t.set_span(span),
TokenTree::Literal(ref mut t) => t.set_span(span),
}
}
@ -285,15 +419,15 @@ impl From<Group> for TokenTree {
}
}
impl From<Term> for TokenTree {
fn from(g: Term) -> TokenTree {
TokenTree::Term(g)
impl From<Ident> for TokenTree {
fn from(g: Ident) -> TokenTree {
TokenTree::Ident(g)
}
}
impl From<Op> for TokenTree {
fn from(g: Op) -> TokenTree {
TokenTree::Op(g)
impl From<Punct> for TokenTree {
fn from(g: Punct) -> TokenTree {
TokenTree::Punct(g)
}
}
@ -303,33 +437,77 @@ impl From<Literal> for TokenTree {
}
}
/// Prints the token tree as a string that is supposed to be losslessly
/// convertible back into the same token tree (modulo spans), except for
/// possibly `TokenTree::Group`s with `Delimiter::None` delimiters and negative
/// numeric literals.
impl fmt::Display for TokenTree {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
TokenTree::Group(ref t) => t.fmt(f),
TokenTree::Term(ref t) => t.fmt(f),
TokenTree::Op(ref t) => t.fmt(f),
TokenTree::Ident(ref t) => t.fmt(f),
TokenTree::Punct(ref t) => t.fmt(f),
TokenTree::Literal(ref t) => t.fmt(f),
}
}
}
#[derive(Clone, Debug)]
/// Prints token tree in a form convenient for debugging.
impl fmt::Debug for TokenTree {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
// Each of these has the name in the struct type in the derived debug,
// so don't bother with an extra layer of indirection
match *self {
TokenTree::Group(ref t) => t.fmt(f),
TokenTree::Ident(ref t) => {
let mut debug = f.debug_struct("Ident");
debug.field("sym", &format_args!("{}", t));
#[cfg(any(feature = "nightly", procmacro2_semver_exempt))]
debug.field("span", &t.span());
debug.finish()
}
TokenTree::Punct(ref t) => t.fmt(f),
TokenTree::Literal(ref t) => t.fmt(f),
}
}
}
/// A delimited token stream.
///
/// A `Group` internally contains a `TokenStream` which is surrounded by
/// `Delimiter`s.
#[derive(Clone)]
pub struct Group {
delimiter: Delimiter,
stream: TokenStream,
span: Span,
}
/// Describes how a sequence of token trees is delimited.
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
pub enum Delimiter {
/// `( ... )`
Parenthesis,
/// `{ ... }`
Brace,
/// `[ ... ]`
Bracket,
/// `Ø ... Ø`
///
/// An implicit delimiter, that may, for example, appear around tokens
/// coming from a "macro variable" `$var`. It is important to preserve
/// operator priorities in cases like `$var * 3` where `$var` is `1 + 2`.
/// Implicit delimiters may not survive roundtrip of a token stream through
/// a string.
None,
}
impl Group {
/// Creates a new `Group` with the given delimiter and token stream.
///
/// This constructor will set the span for this group to
/// `Span::call_site()`. To change the span you can use the `set_span`
/// method below.
pub fn new(delimiter: Delimiter, stream: TokenStream) -> Group {
Group {
delimiter: delimiter,
@ -338,125 +516,386 @@ impl Group {
}
}
/// Returns the delimiter of this `Group`
pub fn delimiter(&self) -> Delimiter {
self.delimiter
}
/// Returns the `TokenStream` of tokens that are delimited in this `Group`.
///
/// Note that the returned token stream does not include the delimiter
/// returned above.
pub fn stream(&self) -> TokenStream {
self.stream.clone()
}
/// Returns the span for the delimiters of this token stream, spanning the
/// entire `Group`.
pub fn span(&self) -> Span {
self.span
}
/// Configures the span for this `Group`'s delimiters, but not its internal
/// tokens.
///
/// This method will **not** set the span of all the internal tokens spanned
/// by this group, but rather it will only set the span of the delimiter
/// tokens at the level of the `Group`.
pub fn set_span(&mut self, span: Span) {
self.span = span;
}
}
/// Prints the group as a string that should be losslessly convertible back
/// into the same group (modulo spans), except for possibly `TokenTree::Group`s
/// with `Delimiter::None` delimiters.
impl fmt::Display for Group {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
self.stream.fmt(f)
let (left, right) = match self.delimiter {
Delimiter::Parenthesis => ("(", ")"),
Delimiter::Brace => ("{", "}"),
Delimiter::Bracket => ("[", "]"),
Delimiter::None => ("", ""),
};
f.write_str(left)?;
self.stream.fmt(f)?;
f.write_str(right)?;
Ok(())
}
}
#[derive(Copy, Clone, Debug)]
pub struct Op {
impl fmt::Debug for Group {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
let mut debug = fmt.debug_struct("Group");
debug.field("delimiter", &self.delimiter);
debug.field("stream", &self.stream);
#[cfg(procmacro2_semver_exempt)]
debug.field("span", &self.span);
debug.finish()
}
}
/// An `Punct` is an single punctuation character like `+`, `-` or `#`.
///
/// Multicharacter operators like `+=` are represented as two instances of
/// `Punct` with different forms of `Spacing` returned.
#[derive(Clone)]
pub struct Punct {
op: char,
spacing: Spacing,
span: Span,
}
/// Whether an `Punct` is followed immediately by another `Punct` or followed by
/// another token or whitespace.
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
pub enum Spacing {
/// E.g. `+` is `Alone` in `+ =`, `+ident` or `+()`.
Alone,
/// E.g. `+` is `Joint` in `+=` or `'#`.
///
/// Additionally, single quote `'` can join with identifiers to form
/// lifetimes `'ident`.
Joint,
}
impl Op {
pub fn new(op: char, spacing: Spacing) -> Op {
Op {
impl Punct {
/// Creates a new `Punct` from the given character and spacing.
///
/// The `ch` argument must be a valid punctuation character permitted by the
/// language, otherwise the function will panic.
///
/// The returned `Punct` will have the default span of `Span::call_site()`
/// which can be further configured with the `set_span` method below.
pub fn new(op: char, spacing: Spacing) -> Punct {
Punct {
op: op,
spacing: spacing,
span: Span::call_site(),
}
}
pub fn op(&self) -> char {
/// Returns the value of this punctuation character as `char`.
pub fn as_char(&self) -> char {
self.op
}
/// Returns the spacing of this punctuation character, indicating whether
/// it's immediately followed by another `Punct` in the token stream, so
/// they can potentially be combined into a multicharacter operator
/// (`Joint`), or it's followed by some other token or whitespace (`Alone`)
/// so the operator has certainly ended.
pub fn spacing(&self) -> Spacing {
self.spacing
}
/// Returns the span for this punctuation character.
pub fn span(&self) -> Span {
self.span
}
/// Configure the span for this punctuation character.
pub fn set_span(&mut self, span: Span) {
self.span = span;
}
}
impl fmt::Display for Op {
/// Prints the punctuation character as a string that should be losslessly
/// convertible back into the same character.
impl fmt::Display for Punct {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
self.op.fmt(f)
}
}
#[derive(Copy, Clone)]
pub struct Term {
inner: imp::Term,
impl fmt::Debug for Punct {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
let mut debug = fmt.debug_struct("Punct");
debug.field("op", &self.op);
debug.field("spacing", &self.spacing);
#[cfg(procmacro2_semver_exempt)]
debug.field("span", &self.span);
debug.finish()
}
}
/// A word of Rust code, which may be a keyword or legal variable name.
///
/// An identifier consists of at least one Unicode code point, the first of
/// which has the XID_Start property and the rest of which have the XID_Continue
/// property.
///
/// - The empty string is not an identifier. Use `Option<Ident>`.
/// - A lifetime is not an identifier. Use `syn::Lifetime` instead.
///
/// An identifier constructed with `Ident::new` is permitted to be a Rust
/// keyword, though parsing one through its [`Synom`] implementation rejects
/// Rust keywords. Use `call!(Ident::parse_any)` when parsing to match the
/// behaviour of `Ident::new`.
///
/// [`Synom`]: https://docs.rs/syn/0.14/syn/synom/trait.Synom.html
///
/// # Examples
///
/// A new ident can be created from a string using the `Ident::new` function.
/// A span must be provided explicitly which governs the name resolution
/// behavior of the resulting identifier.
///
/// ```rust
/// extern crate proc_macro2;
///
/// use proc_macro2::{Ident, Span};
///
/// fn main() {
/// let call_ident = Ident::new("calligraphy", Span::call_site());
///
/// println!("{}", call_ident);
/// }
/// ```
///
/// An ident can be interpolated into a token stream using the `quote!` macro.
///
/// ```rust
/// #[macro_use]
/// extern crate quote;
///
/// extern crate proc_macro2;
///
/// use proc_macro2::{Ident, Span};
///
/// fn main() {
/// let ident = Ident::new("demo", Span::call_site());
///
/// // Create a variable binding whose name is this ident.
/// let expanded = quote! { let #ident = 10; };
///
/// // Create a variable binding with a slightly different name.
/// let temp_ident = Ident::new(&format!("new_{}", ident), Span::call_site());
/// let expanded = quote! { let #temp_ident = 10; };
/// }
/// ```
///
/// A string representation of the ident is available through the `to_string()`
/// method.
///
/// ```rust
/// # extern crate proc_macro2;
/// #
/// # use proc_macro2::{Ident, Span};
/// #
/// # let ident = Ident::new("another_identifier", Span::call_site());
/// #
/// // Examine the ident as a string.
/// let ident_string = ident.to_string();
/// if ident_string.len() > 60 {
/// println!("Very long identifier: {}", ident_string)
/// }
/// ```
#[derive(Clone)]
pub struct Ident {
inner: imp::Ident,
_marker: marker::PhantomData<Rc<()>>,
}
impl Term {
fn _new(inner: imp::Term) -> Term {
Term {
impl Ident {
fn _new(inner: imp::Ident) -> Ident {
Ident {
inner: inner,
_marker: marker::PhantomData,
}
}
pub fn new(string: &str, span: Span) -> Term {
Term::_new(imp::Term::new(string, span.inner))
/// Creates a new `Ident` with the given `string` as well as the specified
/// `span`.
///
/// The `string` argument must be a valid identifier permitted by the
/// language, otherwise the function will panic.
///
/// Note that `span`, currently in rustc, configures the hygiene information
/// for this identifier.
///
/// As of this time `Span::call_site()` explicitly opts-in to "call-site"
/// hygiene meaning that identifiers created with this span will be resolved
/// as if they were written directly at the location of the macro call, and
/// other code at the macro call site will be able to refer to them as well.
///
/// Later spans like `Span::def_site()` will allow to opt-in to
/// "definition-site" hygiene meaning that identifiers created with this
/// span will be resolved at the location of the macro definition and other
/// code at the macro call site will not be able to refer to them.
///
/// Due to the current importance of hygiene this constructor, unlike other
/// tokens, requires a `Span` to be specified at construction.
///
/// # Panics
///
/// Panics if the input string is neither a keyword nor a legal variable
/// name.
pub fn new(string: &str, span: Span) -> Ident {
Ident::_new(imp::Ident::new(string, span.inner))
}
pub fn as_str(&self) -> &str {
self.inner.as_str()
/// Same as `Ident::new`, but creates a raw identifier (`r#ident`).
///
/// This method is semver exempt and not exposed by default.
#[cfg(procmacro2_semver_exempt)]
pub fn new_raw(string: &str, span: Span) -> Ident {
Ident::_new_raw(string, span)
}
fn _new_raw(string: &str, span: Span) -> Ident {
Ident::_new(imp::Ident::new_raw(string, span.inner))
}
/// Returns the span of this `Ident`.
pub fn span(&self) -> Span {
Span::_new(self.inner.span())
}
/// Configures the span of this `Ident`, possibly changing its hygiene
/// context.
pub fn set_span(&mut self, span: Span) {
self.inner.set_span(span.inner);
}
}
impl fmt::Display for Term {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
self.as_str().fmt(f)
impl PartialEq for Ident {
fn eq(&self, other: &Ident) -> bool {
self.to_string() == other.to_string()
}
}
impl fmt::Debug for Term {
impl<T> PartialEq<T> for Ident
where
T: ?Sized + AsRef<str>,
{
fn eq(&self, other: &T) -> bool {
self.to_string() == other.as_ref()
}
}
impl Eq for Ident {}
impl PartialOrd for Ident {
fn partial_cmp(&self, other: &Ident) -> Option<Ordering> {
Some(self.cmp(other))
}
}
impl Ord for Ident {
fn cmp(&self, other: &Ident) -> Ordering {
self.to_string().cmp(&other.to_string())
}
}
impl Hash for Ident {
fn hash<H: Hasher>(&self, hasher: &mut H) {
self.to_string().hash(hasher)
}
}
/// Prints the identifier as a string that should be losslessly convertible back
/// into the same identifier.
impl fmt::Display for Ident {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
self.inner.fmt(f)
}
}
impl fmt::Debug for Ident {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
self.inner.fmt(f)
}
}
/// A literal string (`"hello"`), byte string (`b"hello"`), character (`'a'`),
/// byte character (`b'a'`), an integer or floating point number with or without
/// a suffix (`1`, `1u8`, `2.3`, `2.3f32`).
///
/// Boolean literals like `true` and `false` do not belong here, they are
/// `Ident`s.
#[derive(Clone)]
pub struct Literal {
inner: imp::Literal,
_marker: marker::PhantomData<Rc<()>>,
}
macro_rules! int_literals {
macro_rules! suffixed_int_literals {
($($name:ident => $kind:ident,)*) => ($(
/// Creates a new suffixed integer literal with the specified value.
///
/// This function will create an integer like `1u32` where the integer
/// value specified is the first part of the token and the integral is
/// also suffixed at the end. Literals created from negative numbers may
/// not survive rountrips through `TokenStream` or strings and may be
/// broken into two tokens (`-` and positive literal).
///
/// Literals created through this method have the `Span::call_site()`
/// span by default, which can be configured with the `set_span` method
/// below.
pub fn $name(n: $kind) -> Literal {
Literal::_new(imp::Literal::$name(n))
}
)*)
}
macro_rules! unsuffixed_int_literals {
($($name:ident => $kind:ident,)*) => ($(
/// Creates a new unsuffixed integer literal with the specified value.
///
/// This function will create an integer like `1` where the integer
/// value specified is the first part of the token. No suffix is
/// specified on this token, meaning that invocations like
/// `Literal::i8_unsuffixed(1)` are equivalent to
/// `Literal::u32_unsuffixed(1)`. Literals created from negative numbers
/// may not survive rountrips through `TokenStream` or strings and may
/// be broken into two tokens (`-` and positive literal).
///
/// Literals created through this method have the `Span::call_site()`
/// span by default, which can be configured with the `set_span` method
/// below.
pub fn $name(n: $kind) -> Literal {
Literal::_new(imp::Literal::$name(n))
}
@ -471,7 +910,14 @@ impl Literal {
}
}
int_literals! {
fn _new_stable(inner: stable::Literal) -> Literal {
Literal {
inner: inner.into(),
_marker: marker::PhantomData,
}
}
suffixed_int_literals! {
u8_suffixed => u8,
u16_suffixed => u16,
u32_suffixed => u32,
@ -482,7 +928,9 @@ impl Literal {
i32_suffixed => i32,
i64_suffixed => i64,
isize_suffixed => isize,
}
unsuffixed_int_literals! {
u8_unsuffixed => u8,
u16_unsuffixed => u16,
u32_unsuffixed => u32,
@ -505,6 +953,19 @@ impl Literal {
Literal::_new(imp::Literal::f64_suffixed(f))
}
/// Creates a new unsuffixed floating-point literal.
///
/// This constructor is similar to those like `Literal::i8_unsuffixed` where
/// the float's value is emitted directly into the token but no suffix is
/// used, so it may be inferred to be a `f64` later in the compiler.
/// Literals created from negative numbers may not survive rountrips through
/// `TokenStream` or strings and may be broken into two tokens (`-` and
/// positive literal).
///
/// # Panics
///
/// This function requires that the specified float is finite, for example
/// if it is infinity or NaN this function will panic.
pub fn f32_unsuffixed(f: f32) -> Literal {
assert!(f.is_finite());
Literal::_new(imp::Literal::f32_unsuffixed(f))
@ -548,15 +1009,20 @@ impl fmt::Display for Literal {
}
}
/// Public implementation details for the `TokenStream` type, such as iterators.
pub mod token_stream {
use std::fmt;
use std::marker;
use std::rc::Rc;
use imp;
pub use TokenStream;
use TokenTree;
use imp;
/// An iterator over `TokenStream`'s `TokenTree`s.
///
/// The iteration is "shallow", e.g. the iterator doesn't recurse into
/// delimited groups, and returns whole groups as token trees.
pub struct IntoIter {
inner: imp::TokenTreeIter,
_marker: marker::PhantomData<Rc<()>>,

303
third_party/rust/proc-macro2/src/stable.rs поставляемый
Просмотреть файл

@ -1,22 +1,20 @@
#![cfg_attr(not(procmacro2_semver_exempt), allow(dead_code))]
use std::borrow::Borrow;
#[cfg(procmacro2_semver_exempt)]
use std::cell::RefCell;
#[cfg(procmacro2_semver_exempt)]
use std::cmp;
use std::collections::HashMap;
use std::fmt;
use std::iter;
use std::rc::Rc;
use std::str::FromStr;
use std::vec;
use strnom::{block_comment, skip_whitespace, whitespace, word_break, Cursor, PResult};
use unicode_xid::UnicodeXID;
use {Delimiter, Group, Op, Spacing, TokenTree};
use {Delimiter, Group, Punct, Spacing, TokenTree};
#[derive(Clone, Debug)]
#[derive(Clone)]
pub struct TokenStream {
inner: Vec<TokenTree>,
}
@ -25,7 +23,7 @@ pub struct TokenStream {
pub struct LexError;
impl TokenStream {
pub fn empty() -> TokenStream {
pub fn new() -> TokenStream {
TokenStream { inner: Vec::new() }
}
@ -65,7 +63,7 @@ impl FromStr for TokenStream {
if skip_whitespace(input).len() != 0 {
Err(LexError)
} else {
Ok(output.inner)
Ok(output)
}
}
Err(LexError) => Err(LexError),
@ -89,15 +87,15 @@ impl fmt::Display for TokenStream {
Delimiter::Bracket => ("[", "]"),
Delimiter::None => ("", ""),
};
if tt.stream().inner.inner.len() == 0 {
if tt.stream().into_iter().next().is_none() {
write!(f, "{} {}", start, end)?
} else {
write!(f, "{} {} {}", start, tt.stream(), end)?
}
}
TokenTree::Term(ref tt) => write!(f, "{}", tt.as_str())?,
TokenTree::Op(ref tt) => {
write!(f, "{}", tt.op())?;
TokenTree::Ident(ref tt) => write!(f, "{}", tt)?,
TokenTree::Punct(ref tt) => {
write!(f, "{}", tt.as_char())?;
match tt.spacing() {
Spacing::Alone => {}
Spacing::Joint => joint = true,
@ -111,6 +109,13 @@ impl fmt::Display for TokenStream {
}
}
impl fmt::Debug for TokenStream {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.write_str("TokenStream ")?;
f.debug_list().entries(self.clone()).finish()
}
}
#[cfg(feature = "proc-macro")]
impl From<::proc_macro::TokenStream> for TokenStream {
fn from(inner: ::proc_macro::TokenStream) -> TokenStream {
@ -149,6 +154,12 @@ impl iter::FromIterator<TokenTree> for TokenStream {
}
}
impl Extend<TokenTree> for TokenStream {
fn extend<I: IntoIterator<Item = TokenTree>>(&mut self, streams: I) {
self.inner.extend(streams);
}
}
pub type TokenTreeIter = vec::IntoIter<TokenTree>;
impl IntoIterator for TokenStream {
@ -160,24 +171,25 @@ impl IntoIterator for TokenStream {
}
}
#[cfg(procmacro2_semver_exempt)]
#[derive(Clone, PartialEq, Eq, Debug)]
pub struct FileName(String);
#[cfg(procmacro2_semver_exempt)]
#[allow(dead_code)]
pub fn file_name(s: String) -> FileName {
FileName(s)
}
impl fmt::Display for FileName {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
self.0.fmt(f)
}
}
#[cfg(procmacro2_semver_exempt)]
#[derive(Clone, PartialEq, Eq)]
pub struct SourceFile {
name: FileName,
}
#[cfg(procmacro2_semver_exempt)]
impl SourceFile {
/// Get the path to this source file as a string.
pub fn path(&self) -> &FileName {
@ -190,14 +202,12 @@ impl SourceFile {
}
}
#[cfg(procmacro2_semver_exempt)]
impl AsRef<FileName> for SourceFile {
fn as_ref(&self) -> &FileName {
self.path()
}
}
#[cfg(procmacro2_semver_exempt)]
impl fmt::Debug for SourceFile {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_struct("SourceFile")
@ -207,7 +217,6 @@ impl fmt::Debug for SourceFile {
}
}
#[cfg(procmacro2_semver_exempt)]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub struct LineColumn {
pub line: usize,
@ -314,7 +323,7 @@ impl Codemap {
}
}
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
#[derive(Clone, Copy, PartialEq, Eq)]
pub struct Span {
#[cfg(procmacro2_semver_exempt)]
lo: u32,
@ -393,30 +402,40 @@ impl Span {
}
}
#[derive(Copy, Clone)]
pub struct Term {
intern: usize,
span: Span,
impl fmt::Debug for Span {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
#[cfg(procmacro2_semver_exempt)]
return write!(f, "bytes({}..{})", self.lo, self.hi);
#[cfg(not(procmacro2_semver_exempt))]
write!(f, "Span")
}
}
thread_local!(static SYMBOLS: RefCell<Interner> = RefCell::new(Interner::new()));
#[derive(Clone)]
pub struct Ident {
sym: String,
span: Span,
raw: bool,
}
impl Term {
pub fn new(string: &str, span: Span) -> Term {
impl Ident {
fn _new(string: &str, raw: bool, span: Span) -> Ident {
validate_term(string);
Term {
intern: SYMBOLS.with(|s| s.borrow_mut().intern(string)),
Ident {
sym: string.to_owned(),
span: span,
raw: raw,
}
}
pub fn as_str(&self) -> &str {
SYMBOLS.with(|interner| {
let interner = interner.borrow();
let s = interner.get(self.intern);
unsafe { &*(s as *const str) }
})
pub fn new(string: &str, span: Span) -> Ident {
Ident::_new(string, false, span)
}
pub fn new_raw(string: &str, span: Span) -> Ident {
Ident::_new(string, true, span)
}
pub fn span(&self) -> Span {
@ -428,87 +447,84 @@ impl Term {
}
}
fn validate_term(string: &str) {
let validate = if string.starts_with('\'') {
&string[1..]
} else if string.starts_with("r#") {
&string[2..]
} else {
string
};
#[inline]
fn is_ident_start(c: char) -> bool {
('a' <= c && c <= 'z')
|| ('A' <= c && c <= 'Z')
|| c == '_'
|| (c > '\x7f' && UnicodeXID::is_xid_start(c))
}
#[inline]
fn is_ident_continue(c: char) -> bool {
('a' <= c && c <= 'z')
|| ('A' <= c && c <= 'Z')
|| c == '_'
|| ('0' <= c && c <= '9')
|| (c > '\x7f' && UnicodeXID::is_xid_continue(c))
}
fn validate_term(string: &str) {
let validate = string;
if validate.is_empty() {
panic!("Term is not allowed to be empty; use Option<Term>");
panic!("Ident is not allowed to be empty; use Option<Ident>");
}
if validate.bytes().all(|digit| digit >= b'0' && digit <= b'9') {
panic!("Term cannot be a number; use Literal instead");
panic!("Ident cannot be a number; use Literal instead");
}
fn xid_ok(string: &str) -> bool {
fn ident_ok(string: &str) -> bool {
let mut chars = string.chars();
let first = chars.next().unwrap();
if !(UnicodeXID::is_xid_start(first) || first == '_') {
if !is_ident_start(first) {
return false;
}
for ch in chars {
if !UnicodeXID::is_xid_continue(ch) {
if !is_ident_continue(ch) {
return false;
}
}
true
}
if !xid_ok(validate) {
panic!("{:?} is not a valid Term", string);
if !ident_ok(validate) {
panic!("{:?} is not a valid Ident", string);
}
}
impl fmt::Debug for Term {
impl fmt::Display for Ident {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_tuple("Term").field(&self.as_str()).finish()
}
}
struct Interner {
string_to_index: HashMap<MyRc, usize>,
index_to_string: Vec<Rc<String>>,
}
#[derive(Hash, Eq, PartialEq)]
struct MyRc(Rc<String>);
impl Borrow<str> for MyRc {
fn borrow(&self) -> &str {
&self.0
}
}
impl Interner {
fn new() -> Interner {
Interner {
string_to_index: HashMap::new(),
index_to_string: Vec::new(),
if self.raw {
"r#".fmt(f)?;
}
}
fn intern(&mut self, s: &str) -> usize {
if let Some(&idx) = self.string_to_index.get(s) {
return idx;
}
let s = Rc::new(s.to_string());
self.index_to_string.push(s.clone());
self.string_to_index
.insert(MyRc(s), self.index_to_string.len() - 1);
self.index_to_string.len() - 1
}
fn get(&self, idx: usize) -> &str {
&self.index_to_string[idx]
self.sym.fmt(f)
}
}
#[derive(Clone, Debug)]
impl fmt::Debug for Ident {
// Ident(proc_macro), Ident(r#union)
#[cfg(not(procmacro2_semver_exempt))]
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let mut debug = f.debug_tuple("Ident");
debug.field(&format_args!("{}", self));
debug.finish()
}
// Ident {
// sym: proc_macro,
// span: bytes(128..138)
// }
#[cfg(procmacro2_semver_exempt)]
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let mut debug = f.debug_struct("Ident");
debug.field("sym", &format_args!("{}", self));
debug.field("span", &self.span);
debug.finish()
}
}
#[derive(Clone)]
pub struct Literal {
text: String,
span: Span,
@ -584,7 +600,8 @@ impl Literal {
}
pub fn string(t: &str) -> Literal {
let mut s = t.chars()
let mut s = t
.chars()
.flat_map(|c| c.escape_default())
.collect::<String>();
s.push('"');
@ -629,17 +646,27 @@ impl fmt::Display for Literal {
}
}
fn token_stream(mut input: Cursor) -> PResult<::TokenStream> {
impl fmt::Debug for Literal {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
let mut debug = fmt.debug_struct("Literal");
debug.field("lit", &format_args!("{}", self.text));
#[cfg(procmacro2_semver_exempt)]
debug.field("span", &self.span);
debug.finish()
}
}
fn token_stream(mut input: Cursor) -> PResult<TokenStream> {
let mut trees = Vec::new();
loop {
let input_no_ws = skip_whitespace(input);
if input_no_ws.rest.len() == 0 {
break
break;
}
if let Ok((a, tokens)) = doc_comment(input_no_ws) {
input = a;
trees.extend(tokens);
continue
continue;
}
let (a, tt) = match token_tree(input_no_ws) {
@ -649,7 +676,7 @@ fn token_stream(mut input: Cursor) -> PResult<::TokenStream> {
trees.push(tt);
input = a;
}
Ok((input, ::TokenStream::_new(TokenStream { inner: trees })))
Ok((input, TokenStream { inner: trees }))
}
#[cfg(not(procmacro2_semver_exempt))]
@ -658,7 +685,7 @@ fn spanned<'a, T>(
f: fn(Cursor<'a>) -> PResult<'a, T>,
) -> PResult<'a, (T, ::Span)> {
let (a, b) = f(skip_whitespace(input))?;
Ok((a, ((b, ::Span::_new(Span { })))))
Ok((a, ((b, ::Span::_new_stable(Span {})))))
}
#[cfg(procmacro2_semver_exempt)]
@ -670,7 +697,7 @@ fn spanned<'a, T>(
let lo = input.off;
let (a, b) = f(input)?;
let hi = a.off;
let span = ::Span::_new(Span { lo: lo, hi: hi });
let span = ::Span::_new_stable(Span { lo: lo, hi: hi });
Ok((a, (b, span)))
}
@ -683,11 +710,11 @@ fn token_tree(input: Cursor) -> PResult<TokenTree> {
named!(token_kind -> TokenTree, alt!(
map!(group, TokenTree::Group)
|
map!(literal, TokenTree::Literal) // must be before symbol
map!(literal, |l| TokenTree::Literal(::Literal::_new_stable(l))) // must be before symbol
|
symbol
map!(op, TokenTree::Punct)
|
map!(op, TokenTree::Op)
symbol_leading_ws
));
named!(group -> Group, alt!(
@ -695,73 +722,61 @@ named!(group -> Group, alt!(
punct!("("),
token_stream,
punct!(")")
) => { |ts| Group::new(Delimiter::Parenthesis, ts) }
) => { |ts| Group::new(Delimiter::Parenthesis, ::TokenStream::_new_stable(ts)) }
|
delimited!(
punct!("["),
token_stream,
punct!("]")
) => { |ts| Group::new(Delimiter::Bracket, ts) }
) => { |ts| Group::new(Delimiter::Bracket, ::TokenStream::_new_stable(ts)) }
|
delimited!(
punct!("{"),
token_stream,
punct!("}")
) => { |ts| Group::new(Delimiter::Brace, ts) }
) => { |ts| Group::new(Delimiter::Brace, ::TokenStream::_new_stable(ts)) }
));
fn symbol(mut input: Cursor) -> PResult<TokenTree> {
input = skip_whitespace(input);
fn symbol_leading_ws(input: Cursor) -> PResult<TokenTree> {
symbol(skip_whitespace(input))
}
fn symbol(input: Cursor) -> PResult<TokenTree> {
let mut chars = input.char_indices();
let lifetime = input.starts_with("'");
if lifetime {
chars.next();
}
let raw = !lifetime && input.starts_with("r#");
let raw = input.starts_with("r#");
if raw {
chars.next();
chars.next();
}
match chars.next() {
Some((_, ch)) if UnicodeXID::is_xid_start(ch) || ch == '_' => {}
Some((_, ch)) if is_ident_start(ch) => {}
_ => return Err(LexError),
}
let mut end = input.len();
for (i, ch) in chars {
if !UnicodeXID::is_xid_continue(ch) {
if !is_ident_continue(ch) {
end = i;
break;
}
}
let a = &input.rest[..end];
if a == "r#_" || lifetime && a != "'static" && KEYWORDS.contains(&&a[1..]) {
if a == "r#_" {
Err(LexError)
} else if a == "_" {
Ok((input.advance(end), Op::new('_', Spacing::Alone).into()))
} else {
Ok((
input.advance(end),
::Term::new(a, ::Span::call_site()).into(),
))
let ident = if raw {
::Ident::_new_raw(&a[2..], ::Span::call_site())
} else {
::Ident::new(a, ::Span::call_site())
};
Ok((input.advance(end), ident.into()))
}
}
// From https://github.com/rust-lang/rust/blob/master/src/libsyntax_pos/symbol.rs
static KEYWORDS: &'static [&'static str] = &[
"abstract", "alignof", "as", "become", "box", "break", "const", "continue", "crate", "do",
"else", "enum", "extern", "false", "final", "fn", "for", "if", "impl", "in", "let", "loop",
"macro", "match", "mod", "move", "mut", "offsetof", "override", "priv", "proc", "pub", "pure",
"ref", "return", "self", "Self", "sizeof", "static", "struct", "super", "trait", "true",
"type", "typeof", "unsafe", "unsized", "use", "virtual", "where", "while", "yield",
];
fn literal(input: Cursor) -> PResult<::Literal> {
fn literal(input: Cursor) -> PResult<Literal> {
let input_no_ws = skip_whitespace(input);
match literal_nocapture(input_no_ws) {
@ -769,10 +784,7 @@ fn literal(input: Cursor) -> PResult<::Literal> {
let start = input.len() - input_no_ws.len();
let len = input_no_ws.len() - a.len();
let end = start + len;
Ok((
a,
::Literal::_new(Literal::_new(input.rest[start..end].to_string())),
))
Ok((a, Literal::_new(input.rest[start..end].to_string())))
}
Err(LexError) => Err(LexError),
}
@ -1131,7 +1143,7 @@ fn float_digits(input: Cursor) -> PResult<()> {
fn int(input: Cursor) -> PResult<()> {
let (rest, ()) = digits(input)?;
for suffix in &[
"isize", "i8", "i16", "i32", "i64", "i128", "usize", "u8", "u16", "u32", "u64", "u128"
"isize", "i8", "i16", "i32", "i64", "i128", "usize", "u8", "u16", "u32", "u64", "u128",
] {
if rest.starts_with(suffix) {
return word_break(rest.advance(suffix.len()));
@ -1183,21 +1195,30 @@ fn digits(mut input: Cursor) -> PResult<()> {
}
}
fn op(input: Cursor) -> PResult<Op> {
fn op(input: Cursor) -> PResult<Punct> {
let input = skip_whitespace(input);
match op_char(input) {
Ok((rest, '\'')) => {
symbol(rest)?;
Ok((rest, Punct::new('\'', Spacing::Joint)))
}
Ok((rest, ch)) => {
let kind = match op_char(rest) {
Ok(_) => Spacing::Joint,
Err(LexError) => Spacing::Alone,
};
Ok((rest, Op::new(ch, kind)))
Ok((rest, Punct::new(ch, kind)))
}
Err(LexError) => Err(LexError),
}
}
fn op_char(input: Cursor) -> PResult<char> {
if input.starts_with("//") || input.starts_with("/*") {
// Do not accept `/` of a comment as an op.
return Err(LexError);
}
let mut chars = input.chars();
let first = match chars.next() {
Some(ch) => ch,
@ -1205,7 +1226,7 @@ fn op_char(input: Cursor) -> PResult<char> {
return Err(LexError);
}
};
let recognized = "~!@#$%^&*-=+|;:,<.>/?";
let recognized = "~!@#$%^&*-=+|;:,<.>/?'";
if recognized.contains(first) {
Ok((input.advance(first.len_utf8()), first))
} else {
@ -1216,13 +1237,13 @@ fn op_char(input: Cursor) -> PResult<char> {
fn doc_comment(input: Cursor) -> PResult<Vec<TokenTree>> {
let mut trees = Vec::new();
let (rest, ((comment, inner), span)) = spanned(input, doc_comment_contents)?;
trees.push(TokenTree::Op(Op::new('#', Spacing::Alone)));
trees.push(TokenTree::Punct(Punct::new('#', Spacing::Alone)));
if inner {
trees.push(Op::new('!', Spacing::Alone).into());
trees.push(Punct::new('!', Spacing::Alone).into());
}
let mut stream = vec![
TokenTree::Term(::Term::new("doc", span)),
TokenTree::Op(Op::new('=', Spacing::Alone)),
TokenTree::Ident(::Ident::new("doc", span)),
TokenTree::Punct(Punct::new('=', Spacing::Alone)),
TokenTree::Literal(::Literal::string(comment)),
];
for tt in stream.iter_mut() {

8
third_party/rust/proc-macro2/src/strnom.rs поставляемый
Просмотреть файл

@ -4,7 +4,7 @@ use std::str::{Bytes, CharIndices, Chars};
use unicode_xid::UnicodeXID;
use imp::LexError;
use stable::LexError;
#[derive(Copy, Clone, Eq, PartialEq)]
pub struct Cursor<'a> {
@ -73,7 +73,8 @@ pub fn whitespace(input: Cursor) -> PResult<()> {
while i < bytes.len() {
let s = input.advance(i);
if bytes[i] == b'/' {
if s.starts_with("//") && (!s.starts_with("///") || s.starts_with("////"))
if s.starts_with("//")
&& (!s.starts_with("///") || s.starts_with("////"))
&& !s.starts_with("//!")
{
if let Some(len) = s.find('\n') {
@ -84,7 +85,8 @@ pub fn whitespace(input: Cursor) -> PResult<()> {
} else if s.starts_with("/**/") {
i += 4;
continue;
} else if s.starts_with("/*") && (!s.starts_with("/**") || s.starts_with("/***"))
} else if s.starts_with("/*")
&& (!s.starts_with("/**") || s.starts_with("/***"))
&& !s.starts_with("/*!")
{
let (_, com) = block_comment(s)?;

521
third_party/rust/proc-macro2/src/unstable.rs поставляемый
Просмотреть файл

@ -2,24 +2,64 @@
use std::fmt;
use std::iter;
use std::panic;
use std::str::FromStr;
use proc_macro;
use stable;
use {Delimiter, Group, Op, Spacing, TokenTree};
use {Delimiter, Group, Punct, Spacing, TokenTree};
#[derive(Clone)]
pub struct TokenStream(proc_macro::TokenStream);
pub enum TokenStream {
Nightly(proc_macro::TokenStream),
Stable(stable::TokenStream),
}
pub struct LexError(proc_macro::LexError);
pub enum LexError {
Nightly(proc_macro::LexError),
Stable(stable::LexError),
}
fn nightly_works() -> bool {
use std::sync::atomic::*;
static WORKS: AtomicUsize = ATOMIC_USIZE_INIT;
match WORKS.load(Ordering::SeqCst) {
1 => return false,
2 => return true,
_ => {}
}
let works = panic::catch_unwind(|| proc_macro::Span::call_site()).is_ok();
WORKS.store(works as usize + 1, Ordering::SeqCst);
works
}
fn mismatch() -> ! {
panic!("stable/nightly mismatch")
}
impl TokenStream {
pub fn empty() -> TokenStream {
TokenStream(proc_macro::TokenStream::empty())
pub fn new() -> TokenStream {
if nightly_works() {
TokenStream::Nightly(proc_macro::TokenStream::new())
} else {
TokenStream::Stable(stable::TokenStream::new())
}
}
pub fn is_empty(&self) -> bool {
self.0.is_empty()
match self {
TokenStream::Nightly(tts) => tts.is_empty(),
TokenStream::Stable(tts) => tts.is_empty(),
}
}
fn unwrap_nightly(self) -> proc_macro::TokenStream {
match self {
TokenStream::Nightly(s) => s,
TokenStream::Stable(_) => mismatch(),
}
}
}
@ -27,30 +67,49 @@ impl FromStr for TokenStream {
type Err = LexError;
fn from_str(src: &str) -> Result<TokenStream, LexError> {
Ok(TokenStream(src.parse().map_err(LexError)?))
if nightly_works() {
Ok(TokenStream::Nightly(src.parse()?))
} else {
Ok(TokenStream::Stable(src.parse()?))
}
}
}
impl fmt::Display for TokenStream {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
self.0.fmt(f)
match self {
TokenStream::Nightly(tts) => tts.fmt(f),
TokenStream::Stable(tts) => tts.fmt(f),
}
}
}
impl From<proc_macro::TokenStream> for TokenStream {
fn from(inner: proc_macro::TokenStream) -> TokenStream {
TokenStream(inner)
TokenStream::Nightly(inner)
}
}
impl From<TokenStream> for proc_macro::TokenStream {
fn from(inner: TokenStream) -> proc_macro::TokenStream {
inner.0
match inner {
TokenStream::Nightly(inner) => inner,
TokenStream::Stable(inner) => inner.to_string().parse().unwrap(),
}
}
}
impl From<stable::TokenStream> for TokenStream {
fn from(inner: stable::TokenStream) -> TokenStream {
TokenStream::Stable(inner)
}
}
impl From<TokenTree> for TokenStream {
fn from(token: TokenTree) -> TokenStream {
if !nightly_works() {
return TokenStream::Stable(token.into());
}
let tt: proc_macro::TokenTree = match token {
TokenTree::Group(tt) => {
let delim = match tt.delimiter() {
@ -60,54 +119,110 @@ impl From<TokenTree> for TokenStream {
Delimiter::None => proc_macro::Delimiter::None,
};
let span = tt.span();
let mut group = proc_macro::Group::new(delim, tt.stream.inner.0);
group.set_span(span.inner.0);
let mut group = proc_macro::Group::new(delim, tt.stream.inner.unwrap_nightly());
group.set_span(span.inner.unwrap_nightly());
group.into()
}
TokenTree::Op(tt) => {
TokenTree::Punct(tt) => {
let spacing = match tt.spacing() {
Spacing::Joint => proc_macro::Spacing::Joint,
Spacing::Alone => proc_macro::Spacing::Alone,
};
let mut op = proc_macro::Op::new(tt.op(), spacing);
op.set_span(tt.span().inner.0);
let mut op = proc_macro::Punct::new(tt.as_char(), spacing);
op.set_span(tt.span().inner.unwrap_nightly());
op.into()
}
TokenTree::Term(tt) => tt.inner.term.into(),
TokenTree::Literal(tt) => tt.inner.lit.into(),
TokenTree::Ident(tt) => tt.inner.unwrap_nightly().into(),
TokenTree::Literal(tt) => tt.inner.unwrap_nightly().into(),
};
TokenStream(tt.into())
TokenStream::Nightly(tt.into())
}
}
impl iter::FromIterator<TokenTree> for TokenStream {
fn from_iter<I: IntoIterator<Item = TokenTree>>(streams: I) -> Self {
let streams = streams.into_iter().map(TokenStream::from)
.flat_map(|t| t.0);
TokenStream(streams.collect::<proc_macro::TokenStream>())
fn from_iter<I: IntoIterator<Item = TokenTree>>(trees: I) -> Self {
if nightly_works() {
let trees = trees
.into_iter()
.map(TokenStream::from)
.flat_map(|t| match t {
TokenStream::Nightly(s) => s,
TokenStream::Stable(_) => mismatch(),
});
TokenStream::Nightly(trees.collect())
} else {
TokenStream::Stable(trees.into_iter().collect())
}
}
}
impl Extend<TokenTree> for TokenStream {
fn extend<I: IntoIterator<Item = TokenTree>>(&mut self, streams: I) {
match self {
TokenStream::Nightly(tts) => {
*tts = tts
.clone()
.into_iter()
.chain(
streams
.into_iter()
.map(TokenStream::from)
.flat_map(|t| match t {
TokenStream::Nightly(tts) => tts.into_iter(),
_ => panic!(),
}),
)
.collect();
}
TokenStream::Stable(tts) => tts.extend(streams),
}
}
}
impl fmt::Debug for TokenStream {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
self.0.fmt(f)
match self {
TokenStream::Nightly(tts) => tts.fmt(f),
TokenStream::Stable(tts) => tts.fmt(f),
}
}
}
impl From<proc_macro::LexError> for LexError {
fn from(e: proc_macro::LexError) -> LexError {
LexError::Nightly(e)
}
}
impl From<stable::LexError> for LexError {
fn from(e: stable::LexError) -> LexError {
LexError::Stable(e)
}
}
impl fmt::Debug for LexError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
self.0.fmt(f)
match self {
LexError::Nightly(e) => e.fmt(f),
LexError::Stable(e) => e.fmt(f),
}
}
}
pub struct TokenTreeIter(proc_macro::token_stream::IntoIter);
pub enum TokenTreeIter {
Nightly(proc_macro::token_stream::IntoIter),
Stable(stable::TokenTreeIter),
}
impl IntoIterator for TokenStream {
type Item = TokenTree;
type IntoIter = TokenTreeIter;
fn into_iter(self) -> TokenTreeIter {
TokenTreeIter(self.0.into_iter())
match self {
TokenStream::Nightly(tts) => TokenTreeIter::Nightly(tts.into_iter()),
TokenStream::Stable(tts) => TokenTreeIter::Stable(tts.into_iter()),
}
}
}
@ -115,7 +230,10 @@ impl Iterator for TokenTreeIter {
type Item = TokenTree;
fn next(&mut self) -> Option<TokenTree> {
let token = self.0.next()?;
let token = match self {
TokenTreeIter::Nightly(iter) => iter.next()?,
TokenTreeIter::Stable(iter) => return iter.next(),
};
Some(match token {
proc_macro::TokenTree::Group(tt) => {
let delim = match tt.delimiter() {
@ -124,35 +242,30 @@ impl Iterator for TokenTreeIter {
proc_macro::Delimiter::Brace => Delimiter::Brace,
proc_macro::Delimiter::None => Delimiter::None,
};
let stream = ::TokenStream::_new(TokenStream(tt.stream()));
let stream = ::TokenStream::_new(TokenStream::Nightly(tt.stream()));
let mut g = Group::new(delim, stream);
g.set_span(::Span::_new(Span(tt.span())));
g.set_span(::Span::_new(Span::Nightly(tt.span())));
g.into()
}
proc_macro::TokenTree::Op(tt) => {
proc_macro::TokenTree::Punct(tt) => {
let spacing = match tt.spacing() {
proc_macro::Spacing::Joint => Spacing::Joint,
proc_macro::Spacing::Alone => Spacing::Alone,
};
let mut o = Op::new(tt.op(), spacing);
o.set_span(::Span::_new(Span(tt.span())));
let mut o = Punct::new(tt.as_char(), spacing);
o.set_span(::Span::_new(Span::Nightly(tt.span())));
o.into()
}
proc_macro::TokenTree::Term(s) => {
::Term::_new(Term {
term: s,
}).into()
}
proc_macro::TokenTree::Literal(l) => {
::Literal::_new(Literal {
lit: l,
}).into()
}
proc_macro::TokenTree::Ident(s) => ::Ident::_new(Ident::Nightly(s)).into(),
proc_macro::TokenTree::Literal(l) => ::Literal::_new(Literal::Nightly(l)).into(),
})
}
fn size_hint(&self) -> (usize, Option<usize>) {
self.0.size_hint()
match self {
TokenTreeIter::Nightly(tts) => tts.size_hint(),
TokenTreeIter::Stable(tts) => tts.size_hint(),
}
}
}
@ -162,33 +275,35 @@ impl fmt::Debug for TokenTreeIter {
}
}
#[derive(Clone, PartialEq, Eq)]
pub struct FileName(String);
impl fmt::Display for FileName {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
self.0.fmt(f)
}
}
pub use stable::FileName;
// NOTE: We have to generate our own filename object here because we can't wrap
// the one provided by proc_macro.
#[derive(Clone, PartialEq, Eq)]
pub struct SourceFile(proc_macro::SourceFile, FileName);
pub enum SourceFile {
Nightly(proc_macro::SourceFile, FileName),
Stable(stable::SourceFile),
}
impl SourceFile {
fn new(sf: proc_macro::SourceFile) -> Self {
let filename = FileName(sf.path().to_string());
SourceFile(sf, filename)
fn nightly(sf: proc_macro::SourceFile) -> Self {
let filename = stable::file_name(sf.path().to_string());
SourceFile::Nightly(sf, filename)
}
/// Get the path to this source file as a string.
pub fn path(&self) -> &FileName {
&self.1
match self {
SourceFile::Nightly(_, f) => f,
SourceFile::Stable(a) => a.path(),
}
}
pub fn is_real(&self) -> bool {
self.0.is_real()
match self {
SourceFile::Nightly(a, _) => a.is_real(),
SourceFile::Stable(a) => a.is_real(),
}
}
}
@ -200,7 +315,10 @@ impl AsRef<FileName> for SourceFile {
impl fmt::Debug for SourceFile {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
self.0.fmt(f)
match self {
SourceFile::Nightly(a, _) => a.fmt(f),
SourceFile::Stable(a) => a.fmt(f),
}
}
}
@ -210,104 +328,210 @@ pub struct LineColumn {
}
#[derive(Copy, Clone)]
pub struct Span(proc_macro::Span);
impl From<proc_macro::Span> for ::Span {
fn from(proc_span: proc_macro::Span) -> ::Span {
::Span::_new(Span(proc_span))
}
pub enum Span {
Nightly(proc_macro::Span),
Stable(stable::Span),
}
impl Span {
pub fn call_site() -> Span {
Span(proc_macro::Span::call_site())
if nightly_works() {
Span::Nightly(proc_macro::Span::call_site())
} else {
Span::Stable(stable::Span::call_site())
}
}
pub fn def_site() -> Span {
Span(proc_macro::Span::def_site())
if nightly_works() {
Span::Nightly(proc_macro::Span::def_site())
} else {
Span::Stable(stable::Span::def_site())
}
}
pub fn resolved_at(&self, other: Span) -> Span {
Span(self.0.resolved_at(other.0))
match (self, other) {
(Span::Nightly(a), Span::Nightly(b)) => Span::Nightly(a.resolved_at(b)),
(Span::Stable(a), Span::Stable(b)) => Span::Stable(a.resolved_at(b)),
_ => mismatch(),
}
}
pub fn located_at(&self, other: Span) -> Span {
Span(self.0.located_at(other.0))
match (self, other) {
(Span::Nightly(a), Span::Nightly(b)) => Span::Nightly(a.located_at(b)),
(Span::Stable(a), Span::Stable(b)) => Span::Stable(a.located_at(b)),
_ => mismatch(),
}
}
pub fn unstable(self) -> proc_macro::Span {
self.0
match self {
Span::Nightly(s) => s,
Span::Stable(_) => mismatch(),
}
}
#[cfg(procmacro2_semver_exempt)]
pub fn source_file(&self) -> SourceFile {
SourceFile::new(self.0.source_file())
match self {
Span::Nightly(s) => SourceFile::nightly(s.source_file()),
Span::Stable(s) => SourceFile::Stable(s.source_file()),
}
}
#[cfg(procmacro2_semver_exempt)]
pub fn start(&self) -> LineColumn {
let proc_macro::LineColumn { line, column } = self.0.start();
LineColumn { line, column }
match self {
Span::Nightly(s) => {
let proc_macro::LineColumn { line, column } = s.start();
LineColumn { line, column }
}
Span::Stable(s) => {
let stable::LineColumn { line, column } = s.start();
LineColumn { line, column }
}
}
}
#[cfg(procmacro2_semver_exempt)]
pub fn end(&self) -> LineColumn {
let proc_macro::LineColumn { line, column } = self.0.end();
LineColumn { line, column }
match self {
Span::Nightly(s) => {
let proc_macro::LineColumn { line, column } = s.end();
LineColumn { line, column }
}
Span::Stable(s) => {
let stable::LineColumn { line, column } = s.end();
LineColumn { line, column }
}
}
}
#[cfg(procmacro2_semver_exempt)]
pub fn join(&self, other: Span) -> Option<Span> {
self.0.join(other.0).map(Span)
let ret = match (self, other) {
(Span::Nightly(a), Span::Nightly(b)) => Span::Nightly(a.join(b)?),
(Span::Stable(a), Span::Stable(b)) => Span::Stable(a.join(b)?),
_ => return None,
};
Some(ret)
}
pub fn eq(&self, other: &Span) -> bool {
self.0.eq(&other.0)
match (self, other) {
(Span::Nightly(a), Span::Nightly(b)) => a.eq(b),
(Span::Stable(a), Span::Stable(b)) => a.eq(b),
_ => false,
}
}
fn unwrap_nightly(self) -> proc_macro::Span {
match self {
Span::Nightly(s) => s,
Span::Stable(_) => mismatch(),
}
}
}
impl From<proc_macro::Span> for ::Span {
fn from(proc_span: proc_macro::Span) -> ::Span {
::Span::_new(Span::Nightly(proc_span))
}
}
impl From<stable::Span> for Span {
fn from(inner: stable::Span) -> Span {
Span::Stable(inner)
}
}
impl fmt::Debug for Span {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
self.0.fmt(f)
}
}
#[derive(Copy, Clone)]
pub struct Term {
term: proc_macro::Term,
}
impl Term {
pub fn new(string: &str, span: Span) -> Term {
Term {
term: proc_macro::Term::new(string, span.0),
match self {
Span::Nightly(s) => s.fmt(f),
Span::Stable(s) => s.fmt(f),
}
}
pub fn as_str(&self) -> &str {
self.term.as_str()
}
pub fn span(&self) -> Span {
Span(self.term.span())
}
pub fn set_span(&mut self, span: Span) {
self.term.set_span(span.0);
}
}
impl fmt::Debug for Term {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
self.term.fmt(f)
}
}
#[derive(Clone)]
pub struct Literal {
lit: proc_macro::Literal,
pub enum Ident {
Nightly(proc_macro::Ident),
Stable(stable::Ident),
}
impl Ident {
pub fn new(string: &str, span: Span) -> Ident {
match span {
Span::Nightly(s) => Ident::Nightly(proc_macro::Ident::new(string, s)),
Span::Stable(s) => Ident::Stable(stable::Ident::new(string, s)),
}
}
pub fn new_raw(string: &str, span: Span) -> Ident {
match span {
Span::Nightly(s) => Ident::Nightly(proc_macro::Ident::new_raw(string, s)),
Span::Stable(s) => Ident::Stable(stable::Ident::new_raw(string, s)),
}
}
pub fn span(&self) -> Span {
match self {
Ident::Nightly(t) => Span::Nightly(t.span()),
Ident::Stable(t) => Span::Stable(t.span()),
}
}
pub fn set_span(&mut self, span: Span) {
match (self, span) {
(Ident::Nightly(t), Span::Nightly(s)) => t.set_span(s),
(Ident::Stable(t), Span::Stable(s)) => t.set_span(s),
_ => mismatch(),
}
}
fn unwrap_nightly(self) -> proc_macro::Ident {
match self {
Ident::Nightly(s) => s,
Ident::Stable(_) => mismatch(),
}
}
}
impl fmt::Display for Ident {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
Ident::Nightly(t) => t.fmt(f),
Ident::Stable(t) => t.fmt(f),
}
}
}
impl fmt::Debug for Ident {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
Ident::Nightly(t) => t.fmt(f),
Ident::Stable(t) => t.fmt(f),
}
}
}
#[derive(Clone)]
pub enum Literal {
Nightly(proc_macro::Literal),
Stable(stable::Literal),
}
macro_rules! suffixed_numbers {
($($name:ident => $kind:ident,)*) => ($(
pub fn $name(n: $kind) -> Literal {
Literal::_new(proc_macro::Literal::$name(n))
if nightly_works() {
Literal::Nightly(proc_macro::Literal::$name(n))
} else {
Literal::Stable(stable::Literal::$name(n))
}
}
)*)
}
@ -315,18 +539,16 @@ macro_rules! suffixed_numbers {
macro_rules! unsuffixed_integers {
($($name:ident => $kind:ident,)*) => ($(
pub fn $name(n: $kind) -> Literal {
Literal::_new(proc_macro::Literal::$name(n))
if nightly_works() {
Literal::Nightly(proc_macro::Literal::$name(n))
} else {
Literal::Stable(stable::Literal::$name(n))
}
}
)*)
}
impl Literal {
fn _new(lit: proc_macro::Literal) -> Literal {
Literal {
lit,
}
}
suffixed_numbers! {
u8_suffixed => u8,
u16_suffixed => u16,
@ -357,43 +579,88 @@ impl Literal {
}
pub fn f32_unsuffixed(f: f32) -> Literal {
Literal::_new(proc_macro::Literal::f32_unsuffixed(f))
if nightly_works() {
Literal::Nightly(proc_macro::Literal::f32_unsuffixed(f))
} else {
Literal::Stable(stable::Literal::f32_unsuffixed(f))
}
}
pub fn f64_unsuffixed(f: f64) -> Literal {
Literal::_new(proc_macro::Literal::f64_unsuffixed(f))
if nightly_works() {
Literal::Nightly(proc_macro::Literal::f64_unsuffixed(f))
} else {
Literal::Stable(stable::Literal::f64_unsuffixed(f))
}
}
pub fn string(t: &str) -> Literal {
Literal::_new(proc_macro::Literal::string(t))
if nightly_works() {
Literal::Nightly(proc_macro::Literal::string(t))
} else {
Literal::Stable(stable::Literal::string(t))
}
}
pub fn character(t: char) -> Literal {
Literal::_new(proc_macro::Literal::character(t))
if nightly_works() {
Literal::Nightly(proc_macro::Literal::character(t))
} else {
Literal::Stable(stable::Literal::character(t))
}
}
pub fn byte_string(bytes: &[u8]) -> Literal {
Literal::_new(proc_macro::Literal::byte_string(bytes))
if nightly_works() {
Literal::Nightly(proc_macro::Literal::byte_string(bytes))
} else {
Literal::Stable(stable::Literal::byte_string(bytes))
}
}
pub fn span(&self) -> Span {
Span(self.lit.span())
match self {
Literal::Nightly(lit) => Span::Nightly(lit.span()),
Literal::Stable(lit) => Span::Stable(lit.span()),
}
}
pub fn set_span(&mut self, span: Span) {
self.lit.set_span(span.0);
match (self, span) {
(Literal::Nightly(lit), Span::Nightly(s)) => lit.set_span(s),
(Literal::Stable(lit), Span::Stable(s)) => lit.set_span(s),
_ => mismatch(),
}
}
fn unwrap_nightly(self) -> proc_macro::Literal {
match self {
Literal::Nightly(s) => s,
Literal::Stable(_) => mismatch(),
}
}
}
impl From<stable::Literal> for Literal {
fn from(s: stable::Literal) -> Literal {
Literal::Stable(s)
}
}
impl fmt::Display for Literal {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
self.lit.fmt(f)
match self {
Literal::Nightly(t) => t.fmt(f),
Literal::Stable(t) => t.fmt(f),
}
}
}
impl fmt::Debug for Literal {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
self.lit.fmt(f)
match self {
Literal::Nightly(t) => t.fmt(f),
Literal::Stable(t) => t.fmt(f),
}
}
}

157
third_party/rust/proc-macro2/tests/test.rs поставляемый
Просмотреть файл

@ -2,81 +2,81 @@ extern crate proc_macro2;
use std::str::{self, FromStr};
use proc_macro2::{Literal, Span, Term, TokenStream, TokenTree};
use proc_macro2::{Ident, Literal, Spacing, Span, TokenStream, TokenTree};
#[test]
fn terms() {
assert_eq!(Term::new("String", Span::call_site()).as_str(), "String");
assert_eq!(Term::new("fn", Span::call_site()).as_str(), "fn");
assert_eq!(Term::new("_", Span::call_site()).as_str(), "_");
assert_eq!(
Ident::new("String", Span::call_site()).to_string(),
"String"
);
assert_eq!(Ident::new("fn", Span::call_site()).to_string(), "fn");
assert_eq!(Ident::new("_", Span::call_site()).to_string(), "_");
}
#[test]
#[cfg(procmacro2_semver_exempt)]
fn raw_terms() {
assert_eq!(Term::new("r#String", Span::call_site()).as_str(), "r#String");
assert_eq!(Term::new("r#fn", Span::call_site()).as_str(), "r#fn");
assert_eq!(Term::new("r#_", Span::call_site()).as_str(), "r#_");
assert_eq!(
Ident::new_raw("String", Span::call_site()).to_string(),
"r#String"
);
assert_eq!(Ident::new_raw("fn", Span::call_site()).to_string(), "r#fn");
assert_eq!(Ident::new_raw("_", Span::call_site()).to_string(), "r#_");
}
#[test]
fn lifetimes() {
assert_eq!(Term::new("'a", Span::call_site()).as_str(), "'a");
assert_eq!(Term::new("'static", Span::call_site()).as_str(), "'static");
assert_eq!(Term::new("'_", Span::call_site()).as_str(), "'_");
}
#[test]
#[should_panic(expected = "Term is not allowed to be empty; use Option<Term>")]
#[should_panic(expected = "Ident is not allowed to be empty; use Option<Ident>")]
fn term_empty() {
Term::new("", Span::call_site());
Ident::new("", Span::call_site());
}
#[test]
#[should_panic(expected = "Term cannot be a number; use Literal instead")]
#[should_panic(expected = "Ident cannot be a number; use Literal instead")]
fn term_number() {
Term::new("255", Span::call_site());
Ident::new("255", Span::call_site());
}
#[test]
#[should_panic(expected = "\"a#\" is not a valid Term")]
#[should_panic(expected = "\"a#\" is not a valid Ident")]
fn term_invalid() {
Term::new("a#", Span::call_site());
Ident::new("a#", Span::call_site());
}
#[test]
#[should_panic(expected = "Term is not allowed to be empty; use Option<Term>")]
#[should_panic(expected = "not a valid Ident")]
fn raw_term_empty() {
Term::new("r#", Span::call_site());
Ident::new("r#", Span::call_site());
}
#[test]
#[should_panic(expected = "Term cannot be a number; use Literal instead")]
#[should_panic(expected = "not a valid Ident")]
fn raw_term_number() {
Term::new("r#255", Span::call_site());
Ident::new("r#255", Span::call_site());
}
#[test]
#[should_panic(expected = "\"r#a#\" is not a valid Term")]
#[should_panic(expected = "\"r#a#\" is not a valid Ident")]
fn raw_term_invalid() {
Term::new("r#a#", Span::call_site());
Ident::new("r#a#", Span::call_site());
}
#[test]
#[should_panic(expected = "Term is not allowed to be empty; use Option<Term>")]
#[should_panic(expected = "not a valid Ident")]
fn lifetime_empty() {
Term::new("'", Span::call_site());
Ident::new("'", Span::call_site());
}
#[test]
#[should_panic(expected = "Term cannot be a number; use Literal instead")]
#[should_panic(expected = "not a valid Ident")]
fn lifetime_number() {
Term::new("'255", Span::call_site());
Ident::new("'255", Span::call_site());
}
#[test]
#[should_panic(expected = r#""\'a#" is not a valid Term"#)]
#[should_panic(expected = r#""\'a#" is not a valid Ident"#)]
fn lifetime_invalid() {
Term::new("'a#", Span::call_site());
Ident::new("'a#", Span::call_site());
}
#[test]
@ -116,6 +116,7 @@ fn roundtrip() {
",
);
roundtrip("'a");
roundtrip("'_");
roundtrip("'static");
roundtrip("'\\u{10__FFFF}'");
roundtrip("\"\\u{10_F0FF__}foo\\u{1_0_0_0__}\"");
@ -124,15 +125,14 @@ fn roundtrip() {
#[test]
fn fail() {
fn fail(p: &str) {
if p.parse::<TokenStream>().is_ok() {
panic!("should have failed to parse: {}", p);
if let Ok(s) = p.parse::<TokenStream>() {
panic!("should have failed to parse: {}\n{:#?}", p, s);
}
}
fail("1x");
fail("1u80");
fail("1f320");
fail("' static");
fail("'mut");
fail("r#1");
fail("r#_");
}
@ -261,7 +261,7 @@ fn tricky_doc_comment() {
let tokens = stream.into_iter().collect::<Vec<_>>();
assert!(tokens.len() == 2, "not length 2 -- {:?}", tokens);
match tokens[0] {
proc_macro2::TokenTree::Op(ref tt) => assert_eq!(tt.op(), '#'),
proc_macro2::TokenTree::Punct(ref tt) => assert_eq!(tt.as_char(), '#'),
_ => panic!("wrong token {:?}", tokens[0]),
}
let mut tokens = match tokens[1] {
@ -273,11 +273,11 @@ fn tricky_doc_comment() {
};
match tokens.next().unwrap() {
proc_macro2::TokenTree::Term(ref tt) => assert_eq!(tt.as_str(), "doc"),
proc_macro2::TokenTree::Ident(ref tt) => assert_eq!(tt.to_string(), "doc"),
t => panic!("wrong token {:?}", t),
}
match tokens.next().unwrap() {
proc_macro2::TokenTree::Op(ref tt) => assert_eq!(tt.op(), '='),
proc_macro2::TokenTree::Punct(ref tt) => assert_eq!(tt.as_char(), '='),
t => panic!("wrong token {:?}", t),
}
match tokens.next().unwrap() {
@ -293,12 +293,91 @@ fn tricky_doc_comment() {
assert!(tokens.len() == 3, "not length 3 -- {:?}", tokens);
}
#[test]
fn op_before_comment() {
let mut tts = TokenStream::from_str("~// comment").unwrap().into_iter();
match tts.next().unwrap() {
TokenTree::Punct(tt) => {
assert_eq!(tt.as_char(), '~');
assert_eq!(tt.spacing(), Spacing::Alone);
}
wrong => panic!("wrong token {:?}", wrong),
}
}
#[test]
fn raw_identifier() {
let mut tts = TokenStream::from_str("r#dyn").unwrap().into_iter();
match tts.next().unwrap() {
TokenTree::Term(raw) => assert_eq!("r#dyn", raw.as_str()),
TokenTree::Ident(raw) => assert_eq!("r#dyn", raw.to_string()),
wrong => panic!("wrong token {:?}", wrong),
}
assert!(tts.next().is_none());
}
#[test]
fn test_debug_ident() {
let ident = Ident::new("proc_macro", Span::call_site());
#[cfg(not(procmacro2_semver_exempt))]
let expected = "Ident(proc_macro)";
#[cfg(procmacro2_semver_exempt)]
let expected = "Ident { sym: proc_macro, span: bytes(0..0) }";
assert_eq!(expected, format!("{:?}", ident));
}
#[test]
#[cfg(not(feature = "nightly"))]
fn test_debug_tokenstream() {
let tts = TokenStream::from_str("[a + 1]").unwrap();
#[cfg(not(procmacro2_semver_exempt))]
let expected = "\
TokenStream [
Group {
delimiter: Bracket,
stream: TokenStream [
Ident {
sym: a
},
Punct {
op: '+',
spacing: Alone
},
Literal {
lit: 1
}
]
}
]\
";
#[cfg(procmacro2_semver_exempt)]
let expected = "\
TokenStream [
Group {
delimiter: Bracket,
stream: TokenStream [
Ident {
sym: a,
span: bytes(2..3)
},
Punct {
op: '+',
spacing: Alone,
span: bytes(4..5)
},
Literal {
lit: 1,
span: bytes(6..7)
}
],
span: bytes(1..8)
}
]\
";
assert_eq!(expected, format!("{:#?}", tts));
}

1
third_party/rust/quote-0.5.2/.cargo-checksum.json поставляемый Normal file
Просмотреть файл

@ -0,0 +1 @@
{"files":{"Cargo.toml":"8078663280ca2bbda17459a3c2629b84aee2b9904a83f83b87f1bf60e096692f","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"c9a75f18b9ab2927829a208fc6aa2cf4e63b8420887ba29cdb265d6619ae82d5","README.md":"bdb5b5375e8cd37b75b4e0269b8fa9fb22776df9762c1df11ec88eb4cd2dc097","src/lib.rs":"7f72accca88679bc49cc8aedf7d96c922288e66a3d63bf1d06f5da262f02a4ad","src/to_tokens.rs":"6eb18c100701d1f9556cd21b1f2faca3316e85029345274dcfe7691e7ffa254d","src/tokens.rs":"a4939fc092d6466d5a2e75474886152e880586b12e057c0d7bf7b3f22428b2de","tests/test.rs":"35bac59a637a8dc3919df51bfa0957b6f964f408cc63c7a81a3e759ab8557f55"},"package":"9949cfe66888ffe1d53e6ec9d9f3b70714083854be20fd5e271b232a017401e8"}

30
third_party/rust/quote-0.5.2/Cargo.toml поставляемый Normal file
Просмотреть файл

@ -0,0 +1,30 @@
# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
#
# When uploading crates to the registry Cargo will automatically
# "normalize" Cargo.toml files for maximal compatibility
# with all versions of Cargo and also rewrite `path` dependencies
# to registry (e.g. crates.io) dependencies
#
# If you believe there's an error in this file please file an
# issue against the rust-lang/cargo repository. If you're
# editing this file be aware that the upstream Cargo.toml
# will likely look very different (and much more reasonable)
[package]
name = "quote"
version = "0.5.2"
authors = ["David Tolnay <dtolnay@gmail.com>"]
include = ["Cargo.toml", "src/**/*.rs", "tests/**/*.rs", "README.md", "LICENSE-APACHE", "LICENSE-MIT"]
description = "Quasi-quoting macro quote!(...)"
documentation = "https://docs.rs/quote/"
readme = "README.md"
keywords = ["syn"]
license = "MIT/Apache-2.0"
repository = "https://github.com/dtolnay/quote"
[dependencies.proc-macro2]
version = "0.3"
default-features = false
[features]
default = ["proc-macro"]
proc-macro = ["proc-macro2/proc-macro"]

201
third_party/rust/quote-0.5.2/LICENSE-APACHE поставляемый Normal file
Просмотреть файл

@ -0,0 +1,201 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

25
third_party/rust/quote-0.5.2/LICENSE-MIT поставляемый Normal file
Просмотреть файл

@ -0,0 +1,25 @@
Copyright (c) 2016 The Rust Project Developers
Permission is hereby granted, free of charge, to any
person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the
Software without restriction, including without
limitation the rights to use, copy, modify, merge,
publish, distribute, sublicense, and/or sell copies of
the Software, and to permit persons to whom the Software
is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice
shall be included in all copies or substantial portions
of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.

135
third_party/rust/quote-0.5.2/README.md поставляемый Normal file
Просмотреть файл

@ -0,0 +1,135 @@
Rust Quasi-Quoting
==================
[![Build Status](https://api.travis-ci.org/dtolnay/quote.svg?branch=master)](https://travis-ci.org/dtolnay/quote)
[![Latest Version](https://img.shields.io/crates/v/quote.svg)](https://crates.io/crates/quote)
[![Rust Documentation](https://img.shields.io/badge/api-rustdoc-blue.svg)](https://docs.rs/quote/)
This crate provides the [`quote!`] macro for turning Rust syntax tree data
structures into tokens of source code.
[`quote!`]: https://docs.rs/quote/0.5/quote/macro.quote.html
Procedural macros in Rust receive a stream of tokens as input, execute arbitrary
Rust code to determine how to manipulate those tokens, and produce a stream of
tokens to hand back to the compiler to compile into the caller's crate.
Quasi-quoting is a solution to one piece of that -- producing tokens to return
to the compiler.
The idea of quasi-quoting is that we write *code* that we treat as *data*.
Within the `quote!` macro, we can write what looks like code to our text editor
or IDE. We get all the benefits of the editor's brace matching, syntax
highlighting, indentation, and maybe autocompletion. But rather than compiling
that as code into the current crate, we can treat it as data, pass it around,
mutate it, and eventually hand it back to the compiler as tokens to compile into
the macro caller's crate.
This crate is motivated by the procedural macro use case, but is a
general-purpose Rust quasi-quoting library and is not specific to procedural
macros.
*Version requirement: Quote supports any compiler version back to Rust's very
first support for procedural macros in Rust 1.15.0.*
```toml
[dependencies]
quote = "0.5"
```
```rust
#[macro_use]
extern crate quote;
```
## Syntax
The quote crate provides a [`quote!`] macro within which you can write Rust code
that gets packaged into a [`quote::Tokens`] and can be treated as data. You
should think of `Tokens` as representing a fragment of Rust source code. Call
`to_string()` on a `Tokens` to get back the fragment of source code as a string,
or call `into()` to stream them as a `TokenStream` back to the compiler in a
procedural macro.
[`quote::Tokens`]: https://docs.rs/quote/0.5/quote/struct.Tokens.html
Within the `quote!` macro, interpolation is done with `#var`. Any type
implementing the [`quote::ToTokens`] trait can be interpolated. This includes
most Rust primitive types as well as most of the syntax tree types from [`syn`].
[`quote::ToTokens`]: https://docs.rs/quote/0.5/quote/trait.ToTokens.html
[`syn`]: https://github.com/dtolnay/syn
```rust
let tokens = quote! {
struct SerializeWith #generics #where_clause {
value: &'a #field_ty,
phantom: ::std::marker::PhantomData<#item_ty>,
}
impl #generics serde::Serialize for SerializeWith #generics #where_clause {
fn serialize<S>(&self, s: &mut S) -> Result<(), S::Error>
where S: serde::Serializer
{
#path(self.value, s)
}
}
SerializeWith {
value: #value,
phantom: ::std::marker::PhantomData::<#item_ty>,
}
};
```
## Repetition
Repetition is done using `#(...)*` or `#(...),*` similar to `macro_rules!`. This
iterates through the elements of any variable interpolated within the repetition
and inserts a copy of the repetition body for each one. The variables in an
interpolation may be anything that implements `IntoIterator`, including `Vec` or
a pre-existing iterator.
- `#(#var)*` — no separators
- `#(#var),*` — the character before the asterisk is used as a separator
- `#( struct #var; )*` — the repetition can contain other things
- `#( #k => println!("{}", #v), )*` — even multiple interpolations
Note that there is a difference between `#(#var ,)*` and `#(#var),*`—the latter
does not produce a trailing comma. This matches the behavior of delimiters in
`macro_rules!`.
## Hygiene
Any interpolated tokens preserve the `Span` information provided by their
`ToTokens` implementation. Tokens that originate within a `quote!` invocation
are spanned with [`Span::def_site()`].
[`Span::def_site()`]: https://docs.rs/proc-macro2/0.2/proc_macro2/struct.Span.html#method.def_site
A different span can be provided explicitly through the [`quote_spanned!`]
macro.
[`quote_spanned!`]: https://docs.rs/quote/0.5/quote/macro.quote_spanned.html
### Recursion limit
The `quote!` macro relies on deep recursion so some large invocations may fail
with "recursion limit reached" when you compile. If it fails, bump up the
recursion limit by adding `#![recursion_limit = "128"]` to your crate. An even
higher limit may be necessary for especially large invocations. You don't need
this unless the compiler tells you that you need it.
## License
Licensed under either of
* Apache License, Version 2.0 ([LICENSE-APACHE](LICENSE-APACHE) or http://www.apache.org/licenses/LICENSE-2.0)
* MIT license ([LICENSE-MIT](LICENSE-MIT) or http://opensource.org/licenses/MIT)
at your option.
### Contribution
Unless you explicitly state otherwise, any contribution intentionally submitted
for inclusion in this crate by you, as defined in the Apache-2.0 license, shall
be dual licensed as above, without any additional terms or conditions.

511
third_party/rust/quote-0.5.2/src/lib.rs поставляемый Normal file
Просмотреть файл

@ -0,0 +1,511 @@
//! This crate provides the [`quote!`] macro for turning Rust syntax tree data
//! structures into tokens of source code.
//!
//! [`quote!`]: macro.quote.html
//!
//! Procedural macros in Rust receive a stream of tokens as input, execute
//! arbitrary Rust code to determine how to manipulate those tokens, and produce
//! a stream of tokens to hand back to the compiler to compile into the caller's
//! crate. Quasi-quoting is a solution to one piece of that -- producing tokens
//! to return to the compiler.
//!
//! The idea of quasi-quoting is that we write *code* that we treat as *data*.
//! Within the `quote!` macro, we can write what looks like code to our text
//! editor or IDE. We get all the benefits of the editor's brace matching,
//! syntax highlighting, indentation, and maybe autocompletion. But rather than
//! compiling that as code into the current crate, we can treat it as data, pass
//! it around, mutate it, and eventually hand it back to the compiler as tokens
//! to compile into the macro caller's crate.
//!
//! This crate is motivated by the procedural macro use case, but is a
//! general-purpose Rust quasi-quoting library and is not specific to procedural
//! macros.
//!
//! *Version requirement: Quote supports any compiler version back to Rust's
//! very first support for procedural macros in Rust 1.15.0.*
//!
//! ```toml
//! [dependencies]
//! quote = "0.5"
//! ```
//!
//! ```
//! #[macro_use]
//! extern crate quote;
//! #
//! # fn main() {}
//! ```
//!
//! # Example
//!
//! The following quasi-quoted block of code is something you might find in [a]
//! procedural macro having to do with data structure serialization. The `#var`
//! syntax performs interpolation of runtime variables into the quoted tokens.
//! Check out the documentation of the [`quote!`] macro for more detail about
//! the syntax. See also the [`quote_spanned!`] macro which is important for
//! implementing hygienic procedural macros.
//!
//! [a]: https://serde.rs/
//! [`quote_spanned!`]: macro.quote_spanned.html
//!
//! ```
//! # #[macro_use]
//! # extern crate quote;
//! #
//! # fn main() {
//! # let generics = "";
//! # let where_clause = "";
//! # let field_ty = "";
//! # let item_ty = "";
//! # let path = "";
//! # let value = "";
//! #
//! let tokens = quote! {
//! struct SerializeWith #generics #where_clause {
//! value: &'a #field_ty,
//! phantom: ::std::marker::PhantomData<#item_ty>,
//! }
//!
//! impl #generics serde::Serialize for SerializeWith #generics #where_clause {
//! fn serialize<S>(&self, s: &mut S) -> Result<(), S::Error>
//! where S: serde::Serializer
//! {
//! #path(self.value, s)
//! }
//! }
//!
//! SerializeWith {
//! value: #value,
//! phantom: ::std::marker::PhantomData::<#item_ty>,
//! }
//! };
//! #
//! # }
//! ```
//!
//! ## Recursion limit
//!
//! The `quote!` macro relies on deep recursion so some large invocations may
//! fail with "recursion limit reached" when you compile. If it fails, bump up
//! the recursion limit by adding `#![recursion_limit = "128"]` to your crate.
//! An even higher limit may be necessary for especially large invocations.
// Quote types in rustdoc of other crates get linked to here.
#![doc(html_root_url = "https://docs.rs/quote/0.5.2")]
#[cfg(feature = "proc-macro")]
extern crate proc_macro;
extern crate proc_macro2;
mod tokens;
pub use tokens::Tokens;
mod to_tokens;
pub use to_tokens::ToTokens;
// Not public API.
#[doc(hidden)]
pub mod __rt {
// Not public API.
pub use proc_macro2::*;
// Not public API.
pub fn parse(tokens: &mut ::Tokens, span: Span, s: &str) {
let s: TokenStream = s.parse().expect("invalid token stream");
tokens.append_all(s.into_iter().map(|mut t| {
t.set_span(span);
t
}));
}
}
/// The whole point.
///
/// Performs variable interpolation against the input and produces it as
/// [`Tokens`]. For returning tokens to the compiler in a procedural macro, use
/// `into()` to build a `TokenStream`.
///
/// [`Tokens`]: struct.Tokens.html
///
/// # Interpolation
///
/// Variable interpolation is done with `#var` (similar to `$var` in
/// `macro_rules!` macros). This grabs the `var` variable that is currently in
/// scope and inserts it in that location in the output tokens. The variable
/// must implement the [`ToTokens`] trait.
///
/// [`ToTokens`]: trait.ToTokens.html
///
/// Repetition is done using `#(...)*` or `#(...),*` again similar to
/// `macro_rules!`. This iterates through the elements of any variable
/// interpolated within the repetition and inserts a copy of the repetition body
/// for each one. The variables in an interpolation may be anything that
/// implements `IntoIterator`, including `Vec` or a pre-existing iterator.
///
/// - `#(#var)*` — no separators
/// - `#(#var),*` — the character before the asterisk is used as a separator
/// - `#( struct #var; )*` — the repetition can contain other tokens
/// - `#( #k => println!("{}", #v), )*` — even multiple interpolations
///
/// # Hygiene
///
/// Any interpolated tokens preserve the `Span` information provided by their
/// `ToTokens` implementation. Tokens that originate within the `quote!`
/// invocation are spanned with [`Span::call_site()`].
///
/// [`Span::call_site()`]: https://docs.rs/proc-macro2/0.2/proc_macro2/struct.Span.html#method.call_site
///
/// A different span can be provided through the [`quote_spanned!`] macro.
///
/// [`quote_spanned!`]: macro.quote_spanned.html
///
/// # Example
///
/// ```
/// # #[cfg(feature = "proc-macro")]
/// extern crate proc_macro;
/// # #[cfg(not(feature = "proc-macro"))]
/// # extern crate proc_macro2 as proc_macro;
///
/// #[macro_use]
/// extern crate quote;
///
/// use proc_macro::TokenStream;
///
/// # const IGNORE_TOKENS: &'static str = stringify! {
/// #[proc_macro_derive(HeapSize)]
/// # };
/// pub fn derive_heap_size(input: TokenStream) -> TokenStream {
/// // Parse the input and figure out what implementation to generate...
/// # const IGNORE_TOKENS: &'static str = stringify! {
/// let name = /* ... */;
/// let expr = /* ... */;
/// # };
/// #
/// # let name = 0;
/// # let expr = 0;
///
/// let expanded = quote! {
/// // The generated impl.
/// impl ::heapsize::HeapSize for #name {
/// fn heap_size_of_children(&self) -> usize {
/// #expr
/// }
/// }
/// };
///
/// // Hand the output tokens back to the compiler.
/// expanded.into()
/// }
/// #
/// # fn main() {}
/// ```
#[macro_export]
macro_rules! quote {
($($tt:tt)*) => (quote_spanned!($crate::__rt::Span::call_site()=> $($tt)*));
}
/// Same as `quote!`, but applies a given span to all tokens originating within
/// the macro invocation.
///
/// # Syntax
///
/// A span expression of type [`Span`], followed by `=>`, followed by the tokens
/// to quote. The span expression should be brief -- use a variable for anything
/// more than a few characters. There should be no space before the `=>` token.
///
/// [`Span`]: https://docs.rs/proc-macro2/0.2/proc_macro2/struct.Span.html
///
/// ```
/// # #[macro_use]
/// # extern crate quote;
/// # extern crate proc_macro2;
/// #
/// # use proc_macro2::Span;
/// #
/// # fn main() {
/// # const IGNORE_TOKENS: &'static str = stringify! {
/// let span = /* ... */;
/// # };
/// # let span = Span::call_site();
/// # let init = 0;
///
/// // On one line, use parentheses.
/// let tokens = quote_spanned!(span=> Box::into_raw(Box::new(#init)));
///
/// // On multiple lines, place the span at the top and use braces.
/// let tokens = quote_spanned! {span=>
/// Box::into_raw(Box::new(#init))
/// };
/// # }
/// ```
///
/// The lack of space before the `=>` should look jarring to Rust programmers
/// and this is intentional. The formatting is designed to be visibly
/// off-balance and draw the eye a particular way, due to the span expression
/// being evaluated in the context of the procedural macro and the remaining
/// tokens being evaluated in the generated code.
///
/// # Hygiene
///
/// Any interpolated tokens preserve the `Span` information provided by their
/// `ToTokens` implementation. Tokens that originate within the `quote_spanned!`
/// invocation are spanned with the given span argument.
///
/// # Example
///
/// The following procedural macro code uses `quote_spanned!` to assert that a
/// particular Rust type implements the [`Sync`] trait so that references can be
/// safely shared between threads.
///
/// [`Sync`]: https://doc.rust-lang.org/std/marker/trait.Sync.html
///
/// ```
/// # #[macro_use]
/// # extern crate quote;
/// # extern crate proc_macro2;
/// #
/// # use quote::{Tokens, ToTokens};
/// # use proc_macro2::Span;
/// #
/// # struct Type;
/// #
/// # impl Type {
/// # fn span(&self) -> Span {
/// # Span::call_site()
/// # }
/// # }
/// #
/// # impl ToTokens for Type {
/// # fn to_tokens(&self, _tokens: &mut Tokens) {}
/// # }
/// #
/// # fn main() {
/// # let ty = Type;
/// # let call_site = Span::call_site();
/// #
/// let ty_span = ty.span();
/// let assert_sync = quote_spanned! {ty_span=>
/// struct _AssertSync where #ty: Sync;
/// };
/// # }
/// ```
///
/// If the assertion fails, the user will see an error like the following. The
/// input span of their type is hightlighted in the error.
///
/// ```text
/// error[E0277]: the trait bound `*const (): std::marker::Sync` is not satisfied
/// --> src/main.rs:10:21
/// |
/// 10 | static ref PTR: *const () = &();
/// | ^^^^^^^^^ `*const ()` cannot be shared between threads safely
/// ```
///
/// In this example it is important for the where-clause to be spanned with the
/// line/column information of the user's input type so that error messages are
/// placed appropriately by the compiler. But it is also incredibly important
/// that `Sync` resolves at the macro definition site and not the macro call
/// site. If we resolve `Sync` at the same span that the user's type is going to
/// be resolved, then they could bypass our check by defining their own trait
/// named `Sync` that is implemented for their type.
#[macro_export]
macro_rules! quote_spanned {
($span:expr=> $($tt:tt)*) => {
{
let mut _s = $crate::Tokens::new();
let _span = $span;
quote_each_token!(_s _span $($tt)*);
_s
}
};
}
// Extract the names of all #metavariables and pass them to the $finish macro.
//
// in: pounded_var_names!(then () a #b c #( #d )* #e)
// out: then!(() b d e)
#[macro_export]
#[doc(hidden)]
macro_rules! pounded_var_names {
($finish:ident ($($found:ident)*) # ( $($inner:tt)* ) $($rest:tt)*) => {
pounded_var_names!($finish ($($found)*) $($inner)* $($rest)*)
};
($finish:ident ($($found:ident)*) # [ $($inner:tt)* ] $($rest:tt)*) => {
pounded_var_names!($finish ($($found)*) $($inner)* $($rest)*)
};
($finish:ident ($($found:ident)*) # { $($inner:tt)* } $($rest:tt)*) => {
pounded_var_names!($finish ($($found)*) $($inner)* $($rest)*)
};
($finish:ident ($($found:ident)*) # $first:ident $($rest:tt)*) => {
pounded_var_names!($finish ($($found)* $first) $($rest)*)
};
($finish:ident ($($found:ident)*) ( $($inner:tt)* ) $($rest:tt)*) => {
pounded_var_names!($finish ($($found)*) $($inner)* $($rest)*)
};
($finish:ident ($($found:ident)*) [ $($inner:tt)* ] $($rest:tt)*) => {
pounded_var_names!($finish ($($found)*) $($inner)* $($rest)*)
};
($finish:ident ($($found:ident)*) { $($inner:tt)* } $($rest:tt)*) => {
pounded_var_names!($finish ($($found)*) $($inner)* $($rest)*)
};
($finish:ident ($($found:ident)*) $ignore:tt $($rest:tt)*) => {
pounded_var_names!($finish ($($found)*) $($rest)*)
};
($finish:ident ($($found:ident)*)) => {
$finish!(() $($found)*)
};
}
// in: nested_tuples_pat!(() a b c d e)
// out: ((((a b) c) d) e)
//
// in: nested_tuples_pat!(() a)
// out: a
#[macro_export]
#[doc(hidden)]
macro_rules! nested_tuples_pat {
(()) => {
&()
};
(() $first:ident $($rest:ident)*) => {
nested_tuples_pat!(($first) $($rest)*)
};
(($pat:pat) $first:ident $($rest:ident)*) => {
nested_tuples_pat!((($pat, $first)) $($rest)*)
};
(($done:pat)) => {
$done
};
}
// in: multi_zip_expr!(() a b c d e)
// out: a.into_iter().zip(b).zip(c).zip(d).zip(e)
//
// in: multi_zip_iter!(() a)
// out: a
#[macro_export]
#[doc(hidden)]
macro_rules! multi_zip_expr {
(()) => {
&[]
};
(() $single:ident) => {
$single
};
(() $first:ident $($rest:ident)*) => {
multi_zip_expr!(($first.into_iter()) $($rest)*)
};
(($zips:expr) $first:ident $($rest:ident)*) => {
multi_zip_expr!(($zips.zip($first)) $($rest)*)
};
(($done:expr)) => {
$done
};
}
#[macro_export]
#[doc(hidden)]
macro_rules! quote_each_token {
($tokens:ident $span:ident) => {};
($tokens:ident $span:ident # ! $($rest:tt)*) => {
quote_each_token!($tokens $span #);
quote_each_token!($tokens $span !);
quote_each_token!($tokens $span $($rest)*);
};
($tokens:ident $span:ident # ( $($inner:tt)* ) * $($rest:tt)*) => {
for pounded_var_names!(nested_tuples_pat () $($inner)*)
in pounded_var_names!(multi_zip_expr () $($inner)*) {
quote_each_token!($tokens $span $($inner)*);
}
quote_each_token!($tokens $span $($rest)*);
};
($tokens:ident $span:ident # ( $($inner:tt)* ) $sep:tt * $($rest:tt)*) => {
for (_i, pounded_var_names!(nested_tuples_pat () $($inner)*))
in pounded_var_names!(multi_zip_expr () $($inner)*).into_iter().enumerate() {
if _i > 0 {
quote_each_token!($tokens $span $sep);
}
quote_each_token!($tokens $span $($inner)*);
}
quote_each_token!($tokens $span $($rest)*);
};
($tokens:ident $span:ident # [ $($inner:tt)* ] $($rest:tt)*) => {
quote_each_token!($tokens $span #);
$tokens.append({
let mut g = $crate::__rt::Group::new(
$crate::__rt::Delimiter::Bracket,
quote_spanned!($span=> $($inner)*).into(),
);
g.set_span($span);
g
});
quote_each_token!($tokens $span $($rest)*);
};
($tokens:ident $span:ident # $first:ident $($rest:tt)*) => {
$crate::ToTokens::to_tokens(&$first, &mut $tokens);
quote_each_token!($tokens $span $($rest)*);
};
($tokens:ident $span:ident ( $($first:tt)* ) $($rest:tt)*) => {
$tokens.append({
let mut g = $crate::__rt::Group::new(
$crate::__rt::Delimiter::Parenthesis,
quote_spanned!($span=> $($first)*).into(),
);
g.set_span($span);
g
});
quote_each_token!($tokens $span $($rest)*);
};
($tokens:ident $span:ident [ $($first:tt)* ] $($rest:tt)*) => {
$tokens.append({
let mut g = $crate::__rt::Group::new(
$crate::__rt::Delimiter::Bracket,
quote_spanned!($span=> $($first)*).into(),
);
g.set_span($span);
g
});
quote_each_token!($tokens $span $($rest)*);
};
($tokens:ident $span:ident { $($first:tt)* } $($rest:tt)*) => {
$tokens.append({
let mut g = $crate::__rt::Group::new(
$crate::__rt::Delimiter::Brace,
quote_spanned!($span=> $($first)*).into(),
);
g.set_span($span);
g
});
quote_each_token!($tokens $span $($rest)*);
};
($tokens:ident $span:ident $first:tt $($rest:tt)*) => {
// TODO: this seems slow... special case some `:tt` arguments?
$crate::__rt::parse(&mut $tokens, $span, stringify!($first));
quote_each_token!($tokens $span $($rest)*);
};
}

179
third_party/rust/quote-0.5.2/src/to_tokens.rs поставляемый Normal file
Просмотреть файл

@ -0,0 +1,179 @@
use super::Tokens;
use std::borrow::Cow;
use proc_macro2::{Group, Literal, Op, Span, Term, TokenStream, TokenTree};
/// Types that can be interpolated inside a [`quote!`] invocation.
///
/// [`quote!`]: macro.quote.html
pub trait ToTokens {
/// Write `self` to the given `Tokens`.
///
/// Example implementation for a struct representing Rust paths like
/// `std::cmp::PartialEq`:
///
/// ```
/// extern crate quote;
/// use quote::{Tokens, ToTokens};
///
/// extern crate proc_macro2;
/// use proc_macro2::{TokenTree, Spacing, Span, Op};
///
/// pub struct Path {
/// pub global: bool,
/// pub segments: Vec<PathSegment>,
/// }
///
/// impl ToTokens for Path {
/// fn to_tokens(&self, tokens: &mut Tokens) {
/// for (i, segment) in self.segments.iter().enumerate() {
/// if i > 0 || self.global {
/// // Double colon `::`
/// tokens.append(Op::new(':', Spacing::Joint));
/// tokens.append(Op::new(':', Spacing::Alone));
/// }
/// segment.to_tokens(tokens);
/// }
/// }
/// }
/// #
/// # pub struct PathSegment;
/// #
/// # impl ToTokens for PathSegment {
/// # fn to_tokens(&self, tokens: &mut Tokens) {
/// # unimplemented!()
/// # }
/// # }
/// #
/// # fn main() {}
/// ```
fn to_tokens(&self, tokens: &mut Tokens);
/// Convert `self` directly into a `Tokens` object.
///
/// This method is implicitly implemented using `to_tokens`, and acts as a
/// convenience method for consumers of the `ToTokens` trait.
fn into_tokens(self) -> Tokens
where
Self: Sized,
{
let mut tokens = Tokens::new();
self.to_tokens(&mut tokens);
tokens
}
}
impl<'a, T: ?Sized + ToTokens> ToTokens for &'a T {
fn to_tokens(&self, tokens: &mut Tokens) {
(**self).to_tokens(tokens);
}
}
impl<'a, T: ?Sized + ToOwned + ToTokens> ToTokens for Cow<'a, T> {
fn to_tokens(&self, tokens: &mut Tokens) {
(**self).to_tokens(tokens);
}
}
impl<T: ?Sized + ToTokens> ToTokens for Box<T> {
fn to_tokens(&self, tokens: &mut Tokens) {
(**self).to_tokens(tokens);
}
}
impl<T: ToTokens> ToTokens for Option<T> {
fn to_tokens(&self, tokens: &mut Tokens) {
if let Some(ref t) = *self {
t.to_tokens(tokens);
}
}
}
impl ToTokens for str {
fn to_tokens(&self, tokens: &mut Tokens) {
tokens.append(Literal::string(self));
}
}
impl ToTokens for String {
fn to_tokens(&self, tokens: &mut Tokens) {
self.as_str().to_tokens(tokens);
}
}
macro_rules! primitive {
($($t:ident => $name:ident)*) => ($(
impl ToTokens for $t {
fn to_tokens(&self, tokens: &mut Tokens) {
tokens.append(Literal::$name(*self));
}
}
)*)
}
primitive! {
i8 => i8_suffixed
i16 => i16_suffixed
i32 => i32_suffixed
i64 => i64_suffixed
isize => isize_suffixed
u8 => u8_suffixed
u16 => u16_suffixed
u32 => u32_suffixed
u64 => u64_suffixed
usize => usize_suffixed
f32 => f32_suffixed
f64 => f64_suffixed
}
impl ToTokens for char {
fn to_tokens(&self, tokens: &mut Tokens) {
tokens.append(Literal::character(*self));
}
}
impl ToTokens for bool {
fn to_tokens(&self, tokens: &mut Tokens) {
let word = if *self { "true" } else { "false" };
tokens.append(Term::new(word, Span::call_site()));
}
}
impl ToTokens for Group {
fn to_tokens(&self, tokens: &mut Tokens) {
tokens.append(self.clone());
}
}
impl ToTokens for Term {
fn to_tokens(&self, tokens: &mut Tokens) {
tokens.append(self.clone());
}
}
impl ToTokens for Op {
fn to_tokens(&self, tokens: &mut Tokens) {
tokens.append(self.clone());
}
}
impl ToTokens for Literal {
fn to_tokens(&self, tokens: &mut Tokens) {
tokens.append(self.clone());
}
}
impl ToTokens for TokenTree {
fn to_tokens(&self, dst: &mut Tokens) {
dst.append(self.clone());
}
}
impl ToTokens for TokenStream {
fn to_tokens(&self, dst: &mut Tokens) {
dst.append_all(self.clone().into_iter());
}
}

Просмотреть файл

289
third_party/rust/quote-0.5.2/tests/test.rs поставляемый Normal file
Просмотреть файл

@ -0,0 +1,289 @@
#![cfg_attr(feature = "cargo-clippy", allow(blacklisted_name))]
use std::borrow::Cow;
extern crate proc_macro2;
#[macro_use]
extern crate quote;
use proc_macro2::{Span, Term};
struct X;
impl quote::ToTokens for X {
fn to_tokens(&self, tokens: &mut quote::Tokens) {
tokens.append(Term::new("X", Span::call_site()));
}
}
#[test]
fn test_quote_impl() {
let tokens = quote! {
impl<'a, T: ToTokens> ToTokens for &'a T {
fn to_tokens(&self, tokens: &mut Tokens) {
(**self).to_tokens(tokens)
}
}
};
let expected = concat!(
"impl < 'a , T : ToTokens > ToTokens for & 'a T { ",
"fn to_tokens ( & self , tokens : & mut Tokens ) { ",
"( * * self ) . to_tokens ( tokens ) ",
"} ",
"}"
);
assert_eq!(expected, tokens.to_string());
}
#[test]
fn test_substitution() {
let x = X;
let tokens = quote!(#x <#x> (#x) [#x] {#x});
let expected = "X < X > ( X ) [ X ] { X }";
assert_eq!(expected, tokens.to_string());
}
#[test]
fn test_iter() {
let primes = &[X, X, X, X];
assert_eq!("X X X X", quote!(#(#primes)*).to_string());
assert_eq!("X , X , X , X ,", quote!(#(#primes,)*).to_string());
assert_eq!("X , X , X , X", quote!(#(#primes),*).to_string());
}
#[test]
fn test_advanced() {
let generics = quote!( <'a, T> );
let where_clause = quote!( where T: Serialize );
let field_ty = quote!(String);
let item_ty = quote!(Cow<'a, str>);
let path = quote!(SomeTrait::serialize_with);
let value = quote!(self.x);
let tokens = quote! {
struct SerializeWith #generics #where_clause {
value: &'a #field_ty,
phantom: ::std::marker::PhantomData<#item_ty>,
}
impl #generics ::serde::Serialize for SerializeWith #generics #where_clause {
fn serialize<S>(&self, s: &mut S) -> Result<(), S::Error>
where S: ::serde::Serializer
{
#path(self.value, s)
}
}
SerializeWith {
value: #value,
phantom: ::std::marker::PhantomData::<#item_ty>,
}
};
let expected = concat!(
"struct SerializeWith < 'a , T > where T : Serialize { ",
"value : & 'a String , ",
"phantom : :: std :: marker :: PhantomData < Cow < 'a , str > > , ",
"} ",
"impl < 'a , T > :: serde :: Serialize for SerializeWith < 'a , T > where T : Serialize { ",
"fn serialize < S > ( & self , s : & mut S ) -> Result < ( ) , S :: Error > ",
"where S : :: serde :: Serializer ",
"{ ",
"SomeTrait :: serialize_with ( self . value , s ) ",
"} ",
"} ",
"SerializeWith { ",
"value : self . x , ",
"phantom : :: std :: marker :: PhantomData :: < Cow < 'a , str > > , ",
"}"
);
assert_eq!(expected, tokens.to_string());
}
#[test]
fn test_integer() {
let ii8 = -1i8;
let ii16 = -1i16;
let ii32 = -1i32;
let ii64 = -1i64;
let iisize = -1isize;
let uu8 = 1u8;
let uu16 = 1u16;
let uu32 = 1u32;
let uu64 = 1u64;
let uusize = 1usize;
let tokens = quote! {
#ii8 #ii16 #ii32 #ii64 #iisize
#uu8 #uu16 #uu32 #uu64 #uusize
};
let expected = "-1i8 -1i16 -1i32 -1i64 -1isize 1u8 1u16 1u32 1u64 1usize";
assert_eq!(expected, tokens.to_string());
}
#[test]
fn test_floating() {
let e32 = 2.345f32;
let e64 = 2.345f64;
let tokens = quote! {
#e32
#e64
};
let expected = concat!("2.345f32 2.345f64");
assert_eq!(expected, tokens.to_string());
}
#[test]
fn test_char() {
let zero = '\0';
let pound = '#';
let quote = '"';
let apost = '\'';
let newline = '\n';
let heart = '\u{2764}';
let tokens = quote! {
#zero #pound #quote #apost #newline #heart
};
let expected = "'\\u{0}' '#' '\\\"' '\\'' '\\n' '\\u{2764}'";
assert_eq!(expected, tokens.to_string());
}
#[test]
fn test_str() {
let s = "\0 a 'b \" c";
let tokens = quote!(#s);
let expected = "\"\\u{0} a \\'b \\\" c\"";
assert_eq!(expected, tokens.to_string());
}
#[test]
fn test_string() {
let s = "\0 a 'b \" c".to_string();
let tokens = quote!(#s);
let expected = "\"\\u{0} a \\'b \\\" c\"";
assert_eq!(expected, tokens.to_string());
}
#[test]
fn test_ident() {
let foo = Term::new("Foo", Span::call_site());
let bar = Term::new(&format!("Bar{}", 7), Span::call_site());
let tokens = quote!(struct #foo; enum #bar {});
let expected = "struct Foo ; enum Bar7 { }";
assert_eq!(expected, tokens.to_string());
}
#[test]
fn test_duplicate() {
let ch = 'x';
let tokens = quote!(#ch #ch);
let expected = "'x' 'x'";
assert_eq!(expected, tokens.to_string());
}
#[test]
fn test_fancy_repetition() {
let foo = vec!["a", "b"];
let bar = vec![true, false];
let tokens = quote! {
#(#foo: #bar),*
};
let expected = r#""a" : true , "b" : false"#;
assert_eq!(expected, tokens.to_string());
}
#[test]
fn test_nested_fancy_repetition() {
let nested = vec![vec!['a', 'b', 'c'], vec!['x', 'y', 'z']];
let tokens = quote! {
#(
#(#nested)*
),*
};
let expected = "'a' 'b' 'c' , 'x' 'y' 'z'";
assert_eq!(expected, tokens.to_string());
}
#[test]
fn test_empty_repetition() {
let tokens = quote!(#(a b)* #(c d),*);
assert_eq!("", tokens.to_string());
}
#[test]
fn test_variable_name_conflict() {
// The implementation of `#(...),*` uses the variable `_i` but it should be
// fine, if a little confusing when debugging.
let _i = vec!['a', 'b'];
let tokens = quote! { #(#_i),* };
let expected = "'a' , 'b'";
assert_eq!(expected, tokens.to_string());
}
#[test]
fn test_empty_quote() {
let tokens = quote!();
assert_eq!("", tokens.to_string());
}
#[test]
fn test_box_str() {
let b = "str".to_owned().into_boxed_str();
let tokens = quote! { #b };
assert_eq!("\"str\"", tokens.to_string());
}
#[test]
fn test_cow() {
let owned: Cow<Term> = Cow::Owned(Term::new("owned", Span::call_site()));
let ident = Term::new("borrowed", Span::call_site());
let borrowed = Cow::Borrowed(&ident);
let tokens = quote! { #owned #borrowed };
assert_eq!("owned borrowed", tokens.to_string());
}
#[test]
fn test_closure() {
fn field_i(i: usize) -> Term {
Term::new(&format!("__field{}", i), Span::call_site())
}
let fields = (0usize..3)
.map(field_i as fn(_) -> _)
.map(|var| quote! { #var });
let tokens = quote! { #(#fields)* };
assert_eq!("__field0 __field1 __field2", tokens.to_string());
}
#[test]
fn test_append_tokens() {
let mut a = quote!(a);
let b = quote!(b);
a.append_all(b);
assert_eq!("a b", a.to_string());
}

2
third_party/rust/quote/.cargo-checksum.json поставляемый
Просмотреть файл

@ -1 +1 @@
{"files":{"Cargo.toml":"8078663280ca2bbda17459a3c2629b84aee2b9904a83f83b87f1bf60e096692f","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"c9a75f18b9ab2927829a208fc6aa2cf4e63b8420887ba29cdb265d6619ae82d5","README.md":"bdb5b5375e8cd37b75b4e0269b8fa9fb22776df9762c1df11ec88eb4cd2dc097","src/lib.rs":"7f72accca88679bc49cc8aedf7d96c922288e66a3d63bf1d06f5da262f02a4ad","src/to_tokens.rs":"6eb18c100701d1f9556cd21b1f2faca3316e85029345274dcfe7691e7ffa254d","src/tokens.rs":"a4939fc092d6466d5a2e75474886152e880586b12e057c0d7bf7b3f22428b2de","tests/test.rs":"35bac59a637a8dc3919df51bfa0957b6f964f408cc63c7a81a3e759ab8557f55"},"package":"9949cfe66888ffe1d53e6ec9d9f3b70714083854be20fd5e271b232a017401e8"}
{"files":{"Cargo.toml":"33e512b1a2fd40b4d0b5af4ac16ad4f163e0383ba2f4abcd7a7e575e2af3442c","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"c9a75f18b9ab2927829a208fc6aa2cf4e63b8420887ba29cdb265d6619ae82d5","README.md":"61dc7827fb2e29185f0d73594db326bfdbec8393ca7a48429b259711d42e80f9","src/ext.rs":"2e2f71fca8c8580eeed138da42d93dc21fc48d7a8da973ae6d3b616da6a3b0e3","src/lib.rs":"0dedf7935a0203324804cecdf6350245caa24dbdaaf9e168b7ab90b0883ec0c4","src/to_tokens.rs":"10dc32fbe69798408ee1f49ec25770b90eeb6b069552f50cd4e03228b8e85847","tests/test.rs":"90fe0e9a704e628339fe9298f0cb8307e94ebadfe28fffd7b2fc2d94203bc342"},"package":"e44651a0dc4cdd99f71c83b561e221f714912d11af1a4dff0631f923d53af035"}

4
third_party/rust/quote/Cargo.toml поставляемый
Просмотреть файл

@ -12,7 +12,7 @@
[package]
name = "quote"
version = "0.5.2"
version = "0.6.3"
authors = ["David Tolnay <dtolnay@gmail.com>"]
include = ["Cargo.toml", "src/**/*.rs", "tests/**/*.rs", "README.md", "LICENSE-APACHE", "LICENSE-MIT"]
description = "Quasi-quoting macro quote!(...)"
@ -22,7 +22,7 @@ keywords = ["syn"]
license = "MIT/Apache-2.0"
repository = "https://github.com/dtolnay/quote"
[dependencies.proc-macro2]
version = "0.3"
version = "0.4.4"
default-features = false
[features]

38
third_party/rust/quote/README.md поставляемый
Просмотреть файл

@ -8,7 +8,7 @@ Rust Quasi-Quoting
This crate provides the [`quote!`] macro for turning Rust syntax tree data
structures into tokens of source code.
[`quote!`]: https://docs.rs/quote/0.5/quote/macro.quote.html
[`quote!`]: https://docs.rs/quote/0.6/quote/macro.quote.html
Procedural macros in Rust receive a stream of tokens as input, execute arbitrary
Rust code to determine how to manipulate those tokens, and produce a stream of
@ -31,9 +31,11 @@ macros.
*Version requirement: Quote supports any compiler version back to Rust's very
first support for procedural macros in Rust 1.15.0.*
[*Release notes*](https://github.com/dtolnay/quote/releases)
```toml
[dependencies]
quote = "0.5"
quote = "0.6"
```
```rust
@ -44,19 +46,18 @@ extern crate quote;
## Syntax
The quote crate provides a [`quote!`] macro within which you can write Rust code
that gets packaged into a [`quote::Tokens`] and can be treated as data. You
should think of `Tokens` as representing a fragment of Rust source code. Call
`to_string()` on a `Tokens` to get back the fragment of source code as a string,
or call `into()` to stream them as a `TokenStream` back to the compiler in a
procedural macro.
that gets packaged into a [`TokenStream`] and can be treated as data. You should
think of `TokenStream` as representing a fragment of Rust source code. This type
can be returned directly back to the compiler by a procedural macro to get
compiled into the caller's crate.
[`quote::Tokens`]: https://docs.rs/quote/0.5/quote/struct.Tokens.html
[`TokenStream`]: https://docs.rs/proc-macro2/0.4/proc_macro2/struct.TokenStream.html
Within the `quote!` macro, interpolation is done with `#var`. Any type
implementing the [`quote::ToTokens`] trait can be interpolated. This includes
most Rust primitive types as well as most of the syntax tree types from [`syn`].
[`quote::ToTokens`]: https://docs.rs/quote/0.5/quote/trait.ToTokens.html
[`quote::ToTokens`]: https://docs.rs/quote/0.6/quote/trait.ToTokens.html
[`syn`]: https://github.com/dtolnay/syn
```rust
@ -68,7 +69,8 @@ let tokens = quote! {
impl #generics serde::Serialize for SerializeWith #generics #where_clause {
fn serialize<S>(&self, s: &mut S) -> Result<(), S::Error>
where S: serde::Serializer
where
S: serde::Serializer,
{
#path(self.value, s)
}
@ -102,14 +104,24 @@ does not produce a trailing comma. This matches the behavior of delimiters in
Any interpolated tokens preserve the `Span` information provided by their
`ToTokens` implementation. Tokens that originate within a `quote!` invocation
are spanned with [`Span::def_site()`].
are spanned with [`Span::call_site()`].
[`Span::def_site()`]: https://docs.rs/proc-macro2/0.2/proc_macro2/struct.Span.html#method.def_site
[`Span::call_site()`]: https://docs.rs/proc-macro2/0.4/proc_macro2/struct.Span.html#method.call_site
A different span can be provided explicitly through the [`quote_spanned!`]
macro.
[`quote_spanned!`]: https://docs.rs/quote/0.5/quote/macro.quote_spanned.html
[`quote_spanned!`]: https://docs.rs/quote/0.6/quote/macro.quote_spanned.html
### Limitations
- A non-repeating variable may not be interpolated inside of a repeating block
([#7]).
- The same variable may not be interpolated more than once inside of a repeating
block ([#8]).
[#7]: https://github.com/dtolnay/quote/issues/7
[#8]: https://github.com/dtolnay/quote/issues/8
### Recursion limit

Некоторые файлы не были показаны из-за слишком большого количества измененных файлов Показать больше