From ba9ddd45c515a12905090c8117383ac2a38e929c Mon Sep 17 00:00:00 2001 From: Narcis Beleuzu Date: Fri, 6 Nov 2020 03:32:33 +0200 Subject: [PATCH] Backed out 3 changesets (bug 1675639) for Bpgo bustages. CLOSED TREE Backed out changeset 9a7b394ca845 (bug 1675639) Backed out changeset 270207ea42c1 (bug 1675639) Backed out changeset 3d38566d80c6 (bug 1675639) --- Cargo.lock | 26 +- servo/components/style/custom_properties.rs | 19 +- .../rust/autocfg-0.1.6/.cargo-checksum.json | 1 - third_party/rust/autocfg-0.1.6/Cargo.lock | 6 - third_party/rust/autocfg-0.1.6/Cargo.toml | 24 - third_party/rust/autocfg-0.1.6/LICENSE-APACHE | 201 -- third_party/rust/autocfg-0.1.6/LICENSE-MIT | 25 - third_party/rust/autocfg-0.1.6/README.md | 81 - .../rust/autocfg-0.1.6/examples/integers.rs | 9 - .../rust/autocfg-0.1.6/examples/paths.rs | 22 - .../rust/autocfg-0.1.6/examples/traits.rs | 26 - .../rust/autocfg-0.1.6/examples/versions.rs | 9 - third_party/rust/autocfg-0.1.6/src/error.rs | 69 - third_party/rust/autocfg-0.1.6/src/lib.rs | 328 --- third_party/rust/autocfg-0.1.6/src/tests.rs | 99 - third_party/rust/autocfg-0.1.6/src/version.rs | 60 - third_party/rust/autocfg/.cargo-checksum.json | 2 +- third_party/rust/autocfg/Cargo.lock | 2 +- third_party/rust/autocfg/Cargo.toml | 5 +- third_party/rust/autocfg/README.md | 25 +- third_party/rust/autocfg/src/lib.rs | 116 +- third_party/rust/autocfg/src/tests.rs | 122 +- third_party/rust/autocfg/tests/rustflags.rs | 19 - .../rust/indexmap/.cargo-checksum.json | 2 +- third_party/rust/indexmap/Cargo.toml | 31 +- third_party/rust/indexmap/README.rst | 158 +- third_party/rust/indexmap/benches/bench.rs | 216 +- .../rust/indexmap/benches/faststring.rs | 56 +- third_party/rust/indexmap/build.rs | 8 - third_party/rust/indexmap/src/equivalent.rs | 8 +- third_party/rust/indexmap/src/lib.rs | 162 +- third_party/rust/indexmap/src/macros.rs | 30 +- third_party/rust/indexmap/src/map.rs | 1991 +++++++++++------ third_party/rust/indexmap/src/map/core.rs | 410 ---- third_party/rust/indexmap/src/map/core/raw.rs | 335 --- third_party/rust/indexmap/src/mutable_keys.rs | 38 +- third_party/rust/indexmap/src/rayon/map.rs | 205 +- third_party/rust/indexmap/src/rayon/mod.rs | 34 +- third_party/rust/indexmap/src/rayon/set.rs | 281 +-- third_party/rust/indexmap/src/serde.rs | 126 +- third_party/rust/indexmap/src/set.rs | 713 ++---- third_party/rust/indexmap/src/util.rs | 42 +- .../rust/indexmap/tests/equivalent_trait.rs | 22 +- .../rust/indexmap/tests/macros_full_path.rs | 19 - third_party/rust/indexmap/tests/quick.rs | 231 +- third_party/rust/indexmap/tests/serde.rs | 59 + third_party/rust/indexmap/tests/tests.rs | 16 +- 47 files changed, 2263 insertions(+), 4226 deletions(-) delete mode 100644 third_party/rust/autocfg-0.1.6/.cargo-checksum.json delete mode 100644 third_party/rust/autocfg-0.1.6/Cargo.lock delete mode 100644 third_party/rust/autocfg-0.1.6/Cargo.toml delete mode 100644 third_party/rust/autocfg-0.1.6/LICENSE-APACHE delete mode 100644 third_party/rust/autocfg-0.1.6/LICENSE-MIT delete mode 100644 third_party/rust/autocfg-0.1.6/README.md delete mode 100644 third_party/rust/autocfg-0.1.6/examples/integers.rs delete mode 100644 third_party/rust/autocfg-0.1.6/examples/paths.rs delete mode 100644 third_party/rust/autocfg-0.1.6/examples/traits.rs delete mode 100644 third_party/rust/autocfg-0.1.6/examples/versions.rs delete mode 100644 third_party/rust/autocfg-0.1.6/src/error.rs delete mode 100644 third_party/rust/autocfg-0.1.6/src/lib.rs delete mode 100644 third_party/rust/autocfg-0.1.6/src/tests.rs delete mode 100644 third_party/rust/autocfg-0.1.6/src/version.rs delete mode 100644 third_party/rust/autocfg/tests/rustflags.rs delete mode 100644 third_party/rust/indexmap/build.rs delete mode 100644 third_party/rust/indexmap/src/map/core.rs delete mode 100644 third_party/rust/indexmap/src/map/core/raw.rs delete mode 100644 third_party/rust/indexmap/tests/macros_full_path.rs create mode 100644 third_party/rust/indexmap/tests/serde.rs diff --git a/Cargo.lock b/Cargo.lock index 7d04698747c3..fad2e3e704b2 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -210,12 +210,6 @@ version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b671c8fb71b457dd4ae18c4ba1e59aa81793daacc361d82fcd410cef0d491875" -[[package]] -name = "autocfg" -version = "1.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cdb031dd78e28731d87d56cc8ffef4a8f36ca26c38fe2de700543e627f8a464a" - [[package]] name = "baldrdash" version = "0.1.0" @@ -874,7 +868,7 @@ version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5064ebdbf05ce3cb95e45c8b086f72263f4166b29b97f6baff7ef7fe047b55ac" dependencies = [ - "autocfg 0.1.6", + "autocfg", "cfg-if 0.1.10", "crossbeam-utils 0.7.0", "lazy_static", @@ -907,7 +901,7 @@ version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ce446db02cdc3165b94ae73111e570793400d0794e46125cc4056c81cbb039f4" dependencies = [ - "autocfg 0.1.6", + "autocfg", "cfg-if 0.1.10", "lazy_static", ] @@ -2383,13 +2377,9 @@ dependencies = [ [[package]] name = "indexmap" -version = "1.6.0" +version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "55e2e4c765aa53a0424761bf9f41aa7a6ac1efa87238f59560640e27fca028f2" -dependencies = [ - "autocfg 1.0.1", - "hashbrown", -] +checksum = "a4d6d89e0948bf10c08b9ecc8ac5b83f07f857ebe2c0cbe38de15b4e4f510356" [[package]] name = "inflate" @@ -3455,7 +3445,7 @@ version = "0.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f9c3f34cdd24f334cb265d9bf8bfa8a241920d026916785747a92f0e55541a1a" dependencies = [ - "autocfg 0.1.6", + "autocfg", "num-integer", "num-traits", ] @@ -3477,7 +3467,7 @@ version = "0.1.41" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b85e541ef8255f6cf42bbfe4ef361305c6c135d10919ecc26126c4e5ae94bc09" dependencies = [ - "autocfg 0.1.6", + "autocfg", "num-traits", ] @@ -3487,7 +3477,7 @@ version = "0.1.39" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "76bd5272412d173d6bf9afdf98db8612bbabc9a7a830b7bfc9c188911716132e" dependencies = [ - "autocfg 0.1.6", + "autocfg", "num-integer", "num-traits", ] @@ -3508,7 +3498,7 @@ version = "0.2.10" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d4c81ffc11c212fa327657cb19dd85eb7419e163b5b076bede2bdb5c974c07e4" dependencies = [ - "autocfg 0.1.6", + "autocfg", ] [[package]] diff --git a/servo/components/style/custom_properties.rs b/servo/components/style/custom_properties.rs index 4a0bf5bed7aa..23e25b5a155b 100644 --- a/servo/components/style/custom_properties.rs +++ b/servo/components/style/custom_properties.rs @@ -247,11 +247,8 @@ impl VariableValue { .collect::>() .into_boxed_slice(); - let mut css = css.into_owned(); - css.shrink_to_fit(); - Ok(Arc::new(VariableValue { - css, + css: css.into_owned(), first_token_type, last_token_type, references: custom_property_references, @@ -271,11 +268,9 @@ impl VariableValue { unit: CowRcStr::from("px"), }; let token_type = token.serialization_type(); - let mut css = token.to_css_string(); - css.shrink_to_fit(); VariableValue { - css, + css: token.to_css_string(), first_token_type: token_type, last_token_type: token_type, references: Default::default(), @@ -582,7 +577,7 @@ impl<'a> CustomPropertiesBuilder<'a> { let value = if !has_references && unparsed_value.references_environment { let result = substitute_references_in_value(unparsed_value, &map, &self.device); match result { - Ok(new_value) => new_value, + Ok(new_value) => Arc::new(new_value), Err(..) => { // Don't touch the map, this has the same effect as // making it compute to the inherited one. @@ -662,7 +657,6 @@ impl<'a> CustomPropertiesBuilder<'a> { let inherited = self.inherited.as_ref().map(|m| &***m); substitute_all(&mut map, inherited, self.device); } - map.shrink_to_fit(); Some(Arc::new(map)) } } @@ -853,7 +847,7 @@ fn substitute_all( let result = substitute_references_in_value(&value, &context.map, &context.device); match result { Ok(computed_value) => { - context.map.insert(name, computed_value); + context.map.insert(name, Arc::new(computed_value)); }, Err(..) => { // This is invalid, reset it to the unset (inherited) value. @@ -895,7 +889,7 @@ fn substitute_references_in_value<'i>( value: &'i VariableValue, custom_properties: &CustomPropertiesMap, device: &Device, -) -> Result, ParseError<'i>> { +) -> Result> { debug_assert!(!value.references.is_empty() || value.references_environment); let mut input = ParserInput::new(&value.css); @@ -912,8 +906,7 @@ fn substitute_references_in_value<'i>( )?; computed_value.push_from(&input, position, last_token_type)?; - computed_value.css.shrink_to_fit(); - Ok(Arc::new(computed_value)) + Ok(computed_value) } /// Replace `var()` functions in an arbitrary bit of input. diff --git a/third_party/rust/autocfg-0.1.6/.cargo-checksum.json b/third_party/rust/autocfg-0.1.6/.cargo-checksum.json deleted file mode 100644 index e7bd53b6e17f..000000000000 --- a/third_party/rust/autocfg-0.1.6/.cargo-checksum.json +++ /dev/null @@ -1 +0,0 @@ -{"files":{"Cargo.lock":"461e5e87b13d7faf25813b08b5003060c39d8af0953f30d5b80ae0926c888022","Cargo.toml":"1eded5c9954b3bb92bb2c7403e026198e66a2a42199db06fc9cafddc8d1fd677","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"27995d58ad5c1145c1a8cd86244ce844886958a35eb2b78c6b772748669999ac","README.md":"ba9a1621483e0b9f017f07c282d00d5cf3a2d8660cca6df6b14941319d748953","examples/integers.rs":"589ff4271566dfa322becddf3e2c7b592e6e0bc97b02892ce75619b7e452e930","examples/paths.rs":"1b30e466b824ce8df7ad0a55334424131d9d2573d6cf9f7d5d50c09c8901d526","examples/traits.rs":"cbee6a3e1f7db60b02ae25b714926517144a77cb492021f492774cf0e1865a9e","examples/versions.rs":"38535e6d9f5bfae0de474a3db79a40e8f5da8ba9334c5ff4c363de9bc99d4d12","src/error.rs":"12de7dafea4a35d1dc2f0fa79bfa038386bbbea72bf083979f4ddf227999eeda","src/lib.rs":"411d8dbc48ab0f67cb10243f1e16b235407818c96556c838182e4004da995dff","src/tests.rs":"0b1353344e832553d328c47f1639ced877b5dff70fd2024d84130bd1c33eee07","src/version.rs":"175727d5f02f2fe2271ddc9b041db2a5b9c6fe0f95afd17c73a4d982612764a3"},"package":"b671c8fb71b457dd4ae18c4ba1e59aa81793daacc361d82fcd410cef0d491875"} \ No newline at end of file diff --git a/third_party/rust/autocfg-0.1.6/Cargo.lock b/third_party/rust/autocfg-0.1.6/Cargo.lock deleted file mode 100644 index 4f899ca29289..000000000000 --- a/third_party/rust/autocfg-0.1.6/Cargo.lock +++ /dev/null @@ -1,6 +0,0 @@ -# This file is automatically @generated by Cargo. -# It is not intended for manual editing. -[[package]] -name = "autocfg" -version = "0.1.6" - diff --git a/third_party/rust/autocfg-0.1.6/Cargo.toml b/third_party/rust/autocfg-0.1.6/Cargo.toml deleted file mode 100644 index 4453db3d9817..000000000000 --- a/third_party/rust/autocfg-0.1.6/Cargo.toml +++ /dev/null @@ -1,24 +0,0 @@ -# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO -# -# When uploading crates to the registry Cargo will automatically -# "normalize" Cargo.toml files for maximal compatibility -# with all versions of Cargo and also rewrite `path` dependencies -# to registry (e.g., crates.io) dependencies -# -# If you believe there's an error in this file please file an -# issue against the rust-lang/cargo repository. If you're -# editing this file be aware that the upstream Cargo.toml -# will likely look very different (and much more reasonable) - -[package] -name = "autocfg" -version = "0.1.6" -authors = ["Josh Stone "] -description = "Automatic cfg for Rust compiler features" -readme = "README.md" -keywords = ["rustc", "build", "autoconf"] -categories = ["development-tools::build-utils"] -license = "Apache-2.0/MIT" -repository = "https://github.com/cuviper/autocfg" - -[dependencies] diff --git a/third_party/rust/autocfg-0.1.6/LICENSE-APACHE b/third_party/rust/autocfg-0.1.6/LICENSE-APACHE deleted file mode 100644 index 16fe87b06e80..000000000000 --- a/third_party/rust/autocfg-0.1.6/LICENSE-APACHE +++ /dev/null @@ -1,201 +0,0 @@ - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - -TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - -1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - -2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - -3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - -4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - -5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - -6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - -7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - -8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - -9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - -END OF TERMS AND CONDITIONS - -APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - -Copyright [yyyy] [name of copyright owner] - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. diff --git a/third_party/rust/autocfg-0.1.6/LICENSE-MIT b/third_party/rust/autocfg-0.1.6/LICENSE-MIT deleted file mode 100644 index 44fbc4d8b90d..000000000000 --- a/third_party/rust/autocfg-0.1.6/LICENSE-MIT +++ /dev/null @@ -1,25 +0,0 @@ -Copyright (c) 2018 Josh Stone - -Permission is hereby granted, free of charge, to any -person obtaining a copy of this software and associated -documentation files (the "Software"), to deal in the -Software without restriction, including without -limitation the rights to use, copy, modify, merge, -publish, distribute, sublicense, and/or sell copies of -the Software, and to permit persons to whom the Software -is furnished to do so, subject to the following -conditions: - -The above copyright notice and this permission notice -shall be included in all copies or substantial portions -of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF -ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED -TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A -PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT -SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY -CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR -IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER -DEALINGS IN THE SOFTWARE. diff --git a/third_party/rust/autocfg-0.1.6/README.md b/third_party/rust/autocfg-0.1.6/README.md deleted file mode 100644 index 40004a88e805..000000000000 --- a/third_party/rust/autocfg-0.1.6/README.md +++ /dev/null @@ -1,81 +0,0 @@ -autocfg -======= - -[![autocfg crate](https://img.shields.io/crates/v/autocfg.svg)](https://crates.io/crates/autocfg) -[![autocfg documentation](https://docs.rs/autocfg/badge.svg)](https://docs.rs/autocfg) -![minimum rustc 1.0](https://img.shields.io/badge/rustc-1.0+-red.svg) -[![Travis Status](https://travis-ci.org/cuviper/autocfg.svg?branch=master)](https://travis-ci.org/cuviper/autocfg) - -A Rust library for build scripts to automatically configure code based on -compiler support. Code snippets are dynamically tested to see if the `rustc` -will accept them, rather than hard-coding specific version support. - - -## Usage - -Add this to your `Cargo.toml`: - -```toml -[build-dependencies] -autocfg = "0.1" -``` - -Then use it in your `build.rs` script to detect compiler features. For -example, to test for 128-bit integer support, it might look like: - -```rust -extern crate autocfg; - -fn main() { - let ac = autocfg::new(); - ac.emit_has_type("i128"); - - // (optional) We don't need to rerun for anything external. - autocfg::rerun_path(file!()); -} -``` - -If the type test succeeds, this will write a `cargo:rustc-cfg=has_i128` line -for Cargo, which translates to Rust arguments `--cfg has_i128`. Then in the -rest of your Rust code, you can add `#[cfg(has_i128)]` conditions on code that -should only be used when the compiler supports it. - - -## Release Notes - -- 0.1.6 (2019-08-19) - - Add `probe`/`emit_sysroot_crate`, by @leo60228 - -- 0.1.5 (2019-07-16) - - Mask some warnings from newer rustc. - -- 0.1.4 (2019-05-22) - - Relax `std`/`no_std` probing to a warning instead of an error. - - Improve `rustc` bootstrap compatibility. - -- 0.1.3 (2019-05-21) - - Auto-detects if `#![no_std]` is needed for the `$TARGET` - -- 0.1.2 (2019-01-16) - - Add `rerun_env(ENV)` to print `cargo:rerun-if-env-changed=ENV` - - Add `rerun_path(PATH)` to print `cargo:rerun-if-changed=PATH` - - -## Minimum Rust version policy - -This crate's minimum supported `rustc` version is `1.0.0`. Compatibility is -its entire reason for existence, so this crate will be extremely conservative -about raising this requirement. If this is ever deemed necessary, it will be -treated as a major breaking change for semver purposes. - - -## License - -This project is licensed under either of - - * Apache License, Version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or - http://www.apache.org/licenses/LICENSE-2.0) - * MIT license ([LICENSE-MIT](LICENSE-MIT) or - http://opensource.org/licenses/MIT) - -at your option. diff --git a/third_party/rust/autocfg-0.1.6/examples/integers.rs b/third_party/rust/autocfg-0.1.6/examples/integers.rs deleted file mode 100644 index 23d4cba6ee3e..000000000000 --- a/third_party/rust/autocfg-0.1.6/examples/integers.rs +++ /dev/null @@ -1,9 +0,0 @@ -extern crate autocfg; - -fn main() { - // Normally, cargo will set `OUT_DIR` for build scripts. - let ac = autocfg::AutoCfg::with_dir("target").unwrap(); - for i in 3..8 { - ac.emit_has_type(&format!("i{}", 1 << i)); - } -} diff --git a/third_party/rust/autocfg-0.1.6/examples/paths.rs b/third_party/rust/autocfg-0.1.6/examples/paths.rs deleted file mode 100644 index b7a6ca7a25f7..000000000000 --- a/third_party/rust/autocfg-0.1.6/examples/paths.rs +++ /dev/null @@ -1,22 +0,0 @@ -extern crate autocfg; - -fn main() { - // Normally, cargo will set `OUT_DIR` for build scripts. - let ac = autocfg::AutoCfg::with_dir("target").unwrap(); - - // since ancient times... - ac.emit_has_path("std::vec::Vec"); - ac.emit_path_cfg("std::vec::Vec", "has_vec"); - - // rustc 1.10.0 - ac.emit_has_path("std::panic::PanicInfo"); - ac.emit_path_cfg("std::panic::PanicInfo", "has_panic_info"); - - // rustc 1.20.0 - ac.emit_has_path("std::mem::ManuallyDrop"); - ac.emit_path_cfg("std::mem::ManuallyDrop", "has_manually_drop"); - - // rustc 1.25.0 - ac.emit_has_path("std::ptr::NonNull"); - ac.emit_path_cfg("std::ptr::NonNull", "has_non_null"); -} diff --git a/third_party/rust/autocfg-0.1.6/examples/traits.rs b/third_party/rust/autocfg-0.1.6/examples/traits.rs deleted file mode 100644 index c1ca00385cd4..000000000000 --- a/third_party/rust/autocfg-0.1.6/examples/traits.rs +++ /dev/null @@ -1,26 +0,0 @@ -extern crate autocfg; - -fn main() { - // Normally, cargo will set `OUT_DIR` for build scripts. - let ac = autocfg::AutoCfg::with_dir("target").unwrap(); - - // since ancient times... - ac.emit_has_trait("std::ops::Add"); - ac.emit_trait_cfg("std::ops::Add", "has_ops"); - - // trait parameters have to be provided - ac.emit_has_trait("std::borrow::Borrow"); - ac.emit_trait_cfg("std::borrow::Borrow", "has_borrow"); - - // rustc 1.8.0 - ac.emit_has_trait("std::ops::AddAssign"); - ac.emit_trait_cfg("std::ops::AddAssign", "has_assign_ops"); - - // rustc 1.12.0 - ac.emit_has_trait("std::iter::Sum"); - ac.emit_trait_cfg("std::iter::Sum", "has_sum"); - - // rustc 1.28.0 - ac.emit_has_trait("std::alloc::GlobalAlloc"); - ac.emit_trait_cfg("std::alloc::GlobalAlloc", "has_global_alloc"); -} diff --git a/third_party/rust/autocfg-0.1.6/examples/versions.rs b/third_party/rust/autocfg-0.1.6/examples/versions.rs deleted file mode 100644 index 992919b7c642..000000000000 --- a/third_party/rust/autocfg-0.1.6/examples/versions.rs +++ /dev/null @@ -1,9 +0,0 @@ -extern crate autocfg; - -fn main() { - // Normally, cargo will set `OUT_DIR` for build scripts. - let ac = autocfg::AutoCfg::with_dir("target").unwrap(); - for i in 0..100 { - ac.emit_rustc_version(1, i); - } -} diff --git a/third_party/rust/autocfg-0.1.6/src/error.rs b/third_party/rust/autocfg-0.1.6/src/error.rs deleted file mode 100644 index 4624835451b0..000000000000 --- a/third_party/rust/autocfg-0.1.6/src/error.rs +++ /dev/null @@ -1,69 +0,0 @@ -use std::error; -use std::fmt; -use std::io; -use std::num; -use std::str; - -/// A common error type for the `autocfg` crate. -#[derive(Debug)] -pub struct Error { - kind: ErrorKind, -} - -impl error::Error for Error { - fn description(&self) -> &str { - "AutoCfg error" - } - - fn cause(&self) -> Option<&error::Error> { - match self.kind { - ErrorKind::Io(ref e) => Some(e), - ErrorKind::Num(ref e) => Some(e), - ErrorKind::Utf8(ref e) => Some(e), - ErrorKind::Other(_) => None, - } - } -} - -impl fmt::Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match self.kind { - ErrorKind::Io(ref e) => e.fmt(f), - ErrorKind::Num(ref e) => e.fmt(f), - ErrorKind::Utf8(ref e) => e.fmt(f), - ErrorKind::Other(s) => s.fmt(f), - } - } -} - -#[derive(Debug)] -enum ErrorKind { - Io(io::Error), - Num(num::ParseIntError), - Utf8(str::Utf8Error), - Other(&'static str), -} - -pub fn from_io(e: io::Error) -> Error { - Error { - kind: ErrorKind::Io(e), - } -} - -pub fn from_num(e: num::ParseIntError) -> Error { - Error { - kind: ErrorKind::Num(e), - } -} - -pub fn from_utf8(e: str::Utf8Error) -> Error { - Error { - kind: ErrorKind::Utf8(e), - } -} - -pub fn from_str(s: &'static str) -> Error { - Error { - kind: ErrorKind::Other(s), - } -} diff --git a/third_party/rust/autocfg-0.1.6/src/lib.rs b/third_party/rust/autocfg-0.1.6/src/lib.rs deleted file mode 100644 index 96cceedbfb64..000000000000 --- a/third_party/rust/autocfg-0.1.6/src/lib.rs +++ /dev/null @@ -1,328 +0,0 @@ -//! A Rust library for build scripts to automatically configure code based on -//! compiler support. Code snippets are dynamically tested to see if the `rustc` -//! will accept them, rather than hard-coding specific version support. -//! -//! -//! ## Usage -//! -//! Add this to your `Cargo.toml`: -//! -//! ```toml -//! [build-dependencies] -//! autocfg = "0.1" -//! ``` -//! -//! Then use it in your `build.rs` script to detect compiler features. For -//! example, to test for 128-bit integer support, it might look like: -//! -//! ```rust -//! extern crate autocfg; -//! -//! fn main() { -//! # // Normally, cargo will set `OUT_DIR` for build scripts. -//! # std::env::set_var("OUT_DIR", "target"); -//! let ac = autocfg::new(); -//! ac.emit_has_type("i128"); -//! -//! // (optional) We don't need to rerun for anything external. -//! autocfg::rerun_path(file!()); -//! } -//! ``` -//! -//! If the type test succeeds, this will write a `cargo:rustc-cfg=has_i128` line -//! for Cargo, which translates to Rust arguments `--cfg has_i128`. Then in the -//! rest of your Rust code, you can add `#[cfg(has_i128)]` conditions on code that -//! should only be used when the compiler supports it. - -#![deny(missing_debug_implementations)] -#![deny(missing_docs)] -// allow future warnings that can't be fixed while keeping 1.0 compatibility -#![allow(unknown_lints)] -#![allow(bare_trait_objects)] -#![allow(ellipsis_inclusive_range_patterns)] - -use std::env; -use std::ffi::OsString; -use std::fs; -use std::io::{stderr, Write}; -use std::path::PathBuf; -use std::process::{Command, Stdio}; -#[allow(deprecated)] -use std::sync::atomic::ATOMIC_USIZE_INIT; -use std::sync::atomic::{AtomicUsize, Ordering}; - -mod error; -pub use error::Error; - -mod version; -use version::Version; - -#[cfg(test)] -mod tests; - -/// Helper to detect compiler features for `cfg` output in build scripts. -#[derive(Clone, Debug)] -pub struct AutoCfg { - out_dir: PathBuf, - rustc: PathBuf, - rustc_version: Version, - target: Option, - no_std: bool, -} - -/// Writes a config flag for rustc on standard out. -/// -/// This looks like: `cargo:rustc-cfg=CFG` -/// -/// Cargo will use this in arguments to rustc, like `--cfg CFG`. -pub fn emit(cfg: &str) { - println!("cargo:rustc-cfg={}", cfg); -} - -/// Writes a line telling Cargo to rerun the build script if `path` changes. -/// -/// This looks like: `cargo:rerun-if-changed=PATH` -/// -/// This requires at least cargo 0.7.0, corresponding to rustc 1.6.0. Earlier -/// versions of cargo will simply ignore the directive. -pub fn rerun_path(path: &str) { - println!("cargo:rerun-if-changed={}", path); -} - -/// Writes a line telling Cargo to rerun the build script if the environment -/// variable `var` changes. -/// -/// This looks like: `cargo:rerun-if-env-changed=VAR` -/// -/// This requires at least cargo 0.21.0, corresponding to rustc 1.20.0. Earlier -/// versions of cargo will simply ignore the directive. -pub fn rerun_env(var: &str) { - println!("cargo:rerun-if-env-changed={}", var); -} - -/// Create a new `AutoCfg` instance. -/// -/// # Panics -/// -/// Panics if `AutoCfg::new()` returns an error. -pub fn new() -> AutoCfg { - AutoCfg::new().unwrap() -} - -impl AutoCfg { - /// Create a new `AutoCfg` instance. - /// - /// # Common errors - /// - /// - `rustc` can't be executed, from `RUSTC` or in the `PATH`. - /// - The version output from `rustc` can't be parsed. - /// - `OUT_DIR` is not set in the environment, or is not a writable directory. - /// - pub fn new() -> Result { - match env::var_os("OUT_DIR") { - Some(d) => Self::with_dir(d), - None => Err(error::from_str("no OUT_DIR specified!")), - } - } - - /// Create a new `AutoCfg` instance with the specified output directory. - /// - /// # Common errors - /// - /// - `rustc` can't be executed, from `RUSTC` or in the `PATH`. - /// - The version output from `rustc` can't be parsed. - /// - `dir` is not a writable directory. - /// - pub fn with_dir>(dir: T) -> Result { - let rustc = env::var_os("RUSTC").unwrap_or_else(|| "rustc".into()); - let rustc: PathBuf = rustc.into(); - let rustc_version = try!(Version::from_rustc(&rustc)); - - // Sanity check the output directory - let dir = dir.into(); - let meta = try!(fs::metadata(&dir).map_err(error::from_io)); - if !meta.is_dir() || meta.permissions().readonly() { - return Err(error::from_str("output path is not a writable directory")); - } - - let mut ac = AutoCfg { - out_dir: dir, - rustc: rustc, - rustc_version: rustc_version, - target: env::var_os("TARGET"), - no_std: false, - }; - - // Sanity check with and without `std`. - if !ac.probe("").unwrap_or(false) { - ac.no_std = true; - if !ac.probe("").unwrap_or(false) { - // Neither worked, so assume nothing... - ac.no_std = false; - let warning = b"warning: autocfg could not probe for `std`\n"; - stderr().write_all(warning).ok(); - } - } - Ok(ac) - } - - /// Test whether the current `rustc` reports a version greater than - /// or equal to "`major`.`minor`". - pub fn probe_rustc_version(&self, major: usize, minor: usize) -> bool { - self.rustc_version >= Version::new(major, minor, 0) - } - - /// Sets a `cfg` value of the form `rustc_major_minor`, like `rustc_1_29`, - /// if the current `rustc` is at least that version. - pub fn emit_rustc_version(&self, major: usize, minor: usize) { - if self.probe_rustc_version(major, minor) { - emit(&format!("rustc_{}_{}", major, minor)); - } - } - - fn probe>(&self, code: T) -> Result { - #[allow(deprecated)] - static ID: AtomicUsize = ATOMIC_USIZE_INIT; - - let id = ID.fetch_add(1, Ordering::Relaxed); - let mut command = Command::new(&self.rustc); - command - .arg("--crate-name") - .arg(format!("probe{}", id)) - .arg("--crate-type=lib") - .arg("--out-dir") - .arg(&self.out_dir) - .arg("--emit=llvm-ir"); - - if let Some(target) = self.target.as_ref() { - command.arg("--target").arg(target); - } - - command.arg("-").stdin(Stdio::piped()); - let mut child = try!(command.spawn().map_err(error::from_io)); - let mut stdin = child.stdin.take().expect("rustc stdin"); - - if self.no_std { - try!(stdin.write_all(b"#![no_std]\n").map_err(error::from_io)); - } - try!(stdin.write_all(code.as_ref()).map_err(error::from_io)); - drop(stdin); - - let status = try!(child.wait().map_err(error::from_io)); - Ok(status.success()) - } - - /// Tests whether the given sysroot crate can be used. - /// - /// The test code is subject to change, but currently looks like: - /// - /// ```ignore - /// extern crate CRATE as probe; - /// ``` - pub fn probe_sysroot_crate(&self, name: &str) -> bool { - self.probe(format!("extern crate {} as probe;", name)) // `as _` wasn't stabilized until Rust 1.33 - .unwrap_or(false) - } - - /// Emits a config value `has_CRATE` if `probe_sysroot_crate` returns true. - pub fn emit_sysroot_crate(&self, name: &str) { - if self.probe_sysroot_crate(name) { - emit(&format!("has_{}", mangle(name))); - } - } - - /// Tests whether the given path can be used. - /// - /// The test code is subject to change, but currently looks like: - /// - /// ```ignore - /// pub use PATH; - /// ``` - pub fn probe_path(&self, path: &str) -> bool { - self.probe(format!("pub use {};", path)).unwrap_or(false) - } - - /// Emits a config value `has_PATH` if `probe_path` returns true. - /// - /// Any non-identifier characters in the `path` will be replaced with - /// `_` in the generated config value. - pub fn emit_has_path(&self, path: &str) { - if self.probe_path(path) { - emit(&format!("has_{}", mangle(path))); - } - } - - /// Emits the given `cfg` value if `probe_path` returns true. - pub fn emit_path_cfg(&self, path: &str, cfg: &str) { - if self.probe_path(path) { - emit(cfg); - } - } - - /// Tests whether the given trait can be used. - /// - /// The test code is subject to change, but currently looks like: - /// - /// ```ignore - /// pub trait Probe: TRAIT + Sized {} - /// ``` - pub fn probe_trait(&self, name: &str) -> bool { - self.probe(format!("pub trait Probe: {} + Sized {{}}", name)) - .unwrap_or(false) - } - - /// Emits a config value `has_TRAIT` if `probe_trait` returns true. - /// - /// Any non-identifier characters in the trait `name` will be replaced with - /// `_` in the generated config value. - pub fn emit_has_trait(&self, name: &str) { - if self.probe_trait(name) { - emit(&format!("has_{}", mangle(name))); - } - } - - /// Emits the given `cfg` value if `probe_trait` returns true. - pub fn emit_trait_cfg(&self, name: &str, cfg: &str) { - if self.probe_trait(name) { - emit(cfg); - } - } - - /// Tests whether the given type can be used. - /// - /// The test code is subject to change, but currently looks like: - /// - /// ```ignore - /// pub type Probe = TYPE; - /// ``` - pub fn probe_type(&self, name: &str) -> bool { - self.probe(format!("pub type Probe = {};", name)) - .unwrap_or(false) - } - - /// Emits a config value `has_TYPE` if `probe_type` returns true. - /// - /// Any non-identifier characters in the type `name` will be replaced with - /// `_` in the generated config value. - pub fn emit_has_type(&self, name: &str) { - if self.probe_type(name) { - emit(&format!("has_{}", mangle(name))); - } - } - - /// Emits the given `cfg` value if `probe_type` returns true. - pub fn emit_type_cfg(&self, name: &str, cfg: &str) { - if self.probe_type(name) { - emit(cfg); - } - } -} - -fn mangle(s: &str) -> String { - s.chars() - .map(|c| match c { - 'A'...'Z' | 'a'...'z' | '0'...'9' => c, - _ => '_', - }) - .collect() -} diff --git a/third_party/rust/autocfg-0.1.6/src/tests.rs b/third_party/rust/autocfg-0.1.6/src/tests.rs deleted file mode 100644 index 304d989bcb40..000000000000 --- a/third_party/rust/autocfg-0.1.6/src/tests.rs +++ /dev/null @@ -1,99 +0,0 @@ -use super::AutoCfg; - -impl AutoCfg { - fn core_std(&self, path: &str) -> String { - let krate = if self.no_std { "core" } else { "std" }; - format!("{}::{}", krate, path) - } -} - -#[test] -fn autocfg_version() { - let ac = AutoCfg::with_dir("target").unwrap(); - println!("version: {:?}", ac.rustc_version); - assert!(ac.probe_rustc_version(1, 0)); -} - -#[test] -fn version_cmp() { - use super::version::Version; - let v123 = Version::new(1, 2, 3); - - assert!(Version::new(1, 0, 0) < v123); - assert!(Version::new(1, 2, 2) < v123); - assert!(Version::new(1, 2, 3) == v123); - assert!(Version::new(1, 2, 4) > v123); - assert!(Version::new(1, 10, 0) > v123); - assert!(Version::new(2, 0, 0) > v123); -} - -#[test] -fn probe_add() { - let ac = AutoCfg::with_dir("target").unwrap(); - let add = ac.core_std("ops::Add"); - let add_rhs = ac.core_std("ops::Add"); - let add_rhs_output = ac.core_std("ops::Add"); - assert!(ac.probe_path(&add)); - assert!(ac.probe_trait(&add)); - assert!(ac.probe_trait(&add_rhs)); - assert!(ac.probe_trait(&add_rhs_output)); - assert!(ac.probe_type(&add_rhs_output)); -} - -#[test] -fn probe_as_ref() { - let ac = AutoCfg::with_dir("target").unwrap(); - let as_ref = ac.core_std("convert::AsRef"); - let as_ref_str = ac.core_std("convert::AsRef"); - assert!(ac.probe_path(&as_ref)); - assert!(ac.probe_trait(&as_ref_str)); - assert!(ac.probe_type(&as_ref_str)); -} - -#[test] -fn probe_i128() { - let ac = AutoCfg::with_dir("target").unwrap(); - let missing = !ac.probe_rustc_version(1, 26); - let i128_path = ac.core_std("i128"); - assert!(missing ^ ac.probe_path(&i128_path)); - assert!(missing ^ ac.probe_type("i128")); -} - -#[test] -fn probe_sum() { - let ac = AutoCfg::with_dir("target").unwrap(); - let missing = !ac.probe_rustc_version(1, 12); - let sum = ac.core_std("iter::Sum"); - let sum_i32 = ac.core_std("iter::Sum"); - assert!(missing ^ ac.probe_path(&sum)); - assert!(missing ^ ac.probe_trait(&sum)); - assert!(missing ^ ac.probe_trait(&sum_i32)); - assert!(missing ^ ac.probe_type(&sum_i32)); -} - -#[test] -fn probe_std() { - let ac = AutoCfg::with_dir("target").unwrap(); - assert_eq!(ac.probe_sysroot_crate("std"), !ac.no_std); -} - -#[test] -fn probe_alloc() { - let ac = AutoCfg::with_dir("target").unwrap(); - let missing = !ac.probe_rustc_version(1, 36); - assert!(missing ^ ac.probe_sysroot_crate("alloc")); -} - -#[test] -fn probe_bad_sysroot_crate() { - let ac = AutoCfg::with_dir("target").unwrap(); - assert!(!ac.probe_sysroot_crate("doesnt_exist")); -} - -#[test] -fn probe_no_std() { - let ac = AutoCfg::with_dir("target").unwrap(); - assert!(ac.probe_type("i32")); - assert!(ac.probe_type("[i32]")); - assert_eq!(ac.probe_type("Vec"), !ac.no_std); -} diff --git a/third_party/rust/autocfg-0.1.6/src/version.rs b/third_party/rust/autocfg-0.1.6/src/version.rs deleted file mode 100644 index 378c21e61e0c..000000000000 --- a/third_party/rust/autocfg-0.1.6/src/version.rs +++ /dev/null @@ -1,60 +0,0 @@ -use std::path::Path; -use std::process::Command; -use std::str; - -use super::{error, Error}; - -/// A version structure for making relative comparisons. -#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord)] -pub struct Version { - major: usize, - minor: usize, - patch: usize, -} - -impl Version { - /// Creates a `Version` instance for a specific `major.minor.patch` version. - pub fn new(major: usize, minor: usize, patch: usize) -> Self { - Version { - major: major, - minor: minor, - patch: patch, - } - } - - pub fn from_rustc(rustc: &Path) -> Result { - // Get rustc's verbose version - let output = try!(Command::new(rustc) - .args(&["--version", "--verbose"]) - .output() - .map_err(error::from_io)); - if !output.status.success() { - return Err(error::from_str("could not execute rustc")); - } - let output = try!(str::from_utf8(&output.stdout).map_err(error::from_utf8)); - - // Find the release line in the verbose version output. - let release = match output.lines().find(|line| line.starts_with("release: ")) { - Some(line) => &line["release: ".len()..], - None => return Err(error::from_str("could not find rustc release")), - }; - - // Strip off any extra channel info, e.g. "-beta.N", "-nightly" - let version = match release.find('-') { - Some(i) => &release[..i], - None => release, - }; - - // Split the version into semver components. - let mut iter = version.splitn(3, '.'); - let major = try!(iter.next().ok_or(error::from_str("missing major version"))); - let minor = try!(iter.next().ok_or(error::from_str("missing minor version"))); - let patch = try!(iter.next().ok_or(error::from_str("missing patch version"))); - - Ok(Version::new( - try!(major.parse().map_err(error::from_num)), - try!(minor.parse().map_err(error::from_num)), - try!(patch.parse().map_err(error::from_num)), - )) - } -} diff --git a/third_party/rust/autocfg/.cargo-checksum.json b/third_party/rust/autocfg/.cargo-checksum.json index c8e262a0132f..e7bd53b6e17f 100644 --- a/third_party/rust/autocfg/.cargo-checksum.json +++ b/third_party/rust/autocfg/.cargo-checksum.json @@ -1 +1 @@ -{"files":{"Cargo.lock":"d658acfaa27a2b30de98cf004d4d3f4ec0f1757b136610289cbbd1c847ae2e6c","Cargo.toml":"e2176be78c2989884eba4a20a58a672277b3a8a99a72b0ba7347f48eb827ae0e","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"27995d58ad5c1145c1a8cd86244ce844886958a35eb2b78c6b772748669999ac","README.md":"2d8b6f07819ad7adfab1d153216bfdcde154ffd4a870d98794125c910b0f4593","examples/integers.rs":"589ff4271566dfa322becddf3e2c7b592e6e0bc97b02892ce75619b7e452e930","examples/paths.rs":"1b30e466b824ce8df7ad0a55334424131d9d2573d6cf9f7d5d50c09c8901d526","examples/traits.rs":"cbee6a3e1f7db60b02ae25b714926517144a77cb492021f492774cf0e1865a9e","examples/versions.rs":"38535e6d9f5bfae0de474a3db79a40e8f5da8ba9334c5ff4c363de9bc99d4d12","src/error.rs":"12de7dafea4a35d1dc2f0fa79bfa038386bbbea72bf083979f4ddf227999eeda","src/lib.rs":"9b450d90730624807979045ea7ff48374355314cd894345e1b9651485ba1b2ff","src/tests.rs":"a902fbd42b0f0b81a2830f2368fab733041b02fcb902c8e2520d07b3bff10713","src/version.rs":"175727d5f02f2fe2271ddc9b041db2a5b9c6fe0f95afd17c73a4d982612764a3","tests/rustflags.rs":"441fb0c6606e243c31d3817a5ae2240b65fcae0ea8ab583f80f8f6d6c267e614"},"package":"cdb031dd78e28731d87d56cc8ffef4a8f36ca26c38fe2de700543e627f8a464a"} \ No newline at end of file +{"files":{"Cargo.lock":"461e5e87b13d7faf25813b08b5003060c39d8af0953f30d5b80ae0926c888022","Cargo.toml":"1eded5c9954b3bb92bb2c7403e026198e66a2a42199db06fc9cafddc8d1fd677","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"27995d58ad5c1145c1a8cd86244ce844886958a35eb2b78c6b772748669999ac","README.md":"ba9a1621483e0b9f017f07c282d00d5cf3a2d8660cca6df6b14941319d748953","examples/integers.rs":"589ff4271566dfa322becddf3e2c7b592e6e0bc97b02892ce75619b7e452e930","examples/paths.rs":"1b30e466b824ce8df7ad0a55334424131d9d2573d6cf9f7d5d50c09c8901d526","examples/traits.rs":"cbee6a3e1f7db60b02ae25b714926517144a77cb492021f492774cf0e1865a9e","examples/versions.rs":"38535e6d9f5bfae0de474a3db79a40e8f5da8ba9334c5ff4c363de9bc99d4d12","src/error.rs":"12de7dafea4a35d1dc2f0fa79bfa038386bbbea72bf083979f4ddf227999eeda","src/lib.rs":"411d8dbc48ab0f67cb10243f1e16b235407818c96556c838182e4004da995dff","src/tests.rs":"0b1353344e832553d328c47f1639ced877b5dff70fd2024d84130bd1c33eee07","src/version.rs":"175727d5f02f2fe2271ddc9b041db2a5b9c6fe0f95afd17c73a4d982612764a3"},"package":"b671c8fb71b457dd4ae18c4ba1e59aa81793daacc361d82fcd410cef0d491875"} \ No newline at end of file diff --git a/third_party/rust/autocfg/Cargo.lock b/third_party/rust/autocfg/Cargo.lock index a2d13436985b..4f899ca29289 100644 --- a/third_party/rust/autocfg/Cargo.lock +++ b/third_party/rust/autocfg/Cargo.lock @@ -2,5 +2,5 @@ # It is not intended for manual editing. [[package]] name = "autocfg" -version = "1.0.1" +version = "0.1.6" diff --git a/third_party/rust/autocfg/Cargo.toml b/third_party/rust/autocfg/Cargo.toml index 7adf7956c978..4453db3d9817 100644 --- a/third_party/rust/autocfg/Cargo.toml +++ b/third_party/rust/autocfg/Cargo.toml @@ -12,14 +12,13 @@ [package] name = "autocfg" -version = "1.0.1" +version = "0.1.6" authors = ["Josh Stone "] -exclude = ["/.github/**", "/bors.toml"] description = "Automatic cfg for Rust compiler features" readme = "README.md" keywords = ["rustc", "build", "autoconf"] categories = ["development-tools::build-utils"] -license = "Apache-2.0 OR MIT" +license = "Apache-2.0/MIT" repository = "https://github.com/cuviper/autocfg" [dependencies] diff --git a/third_party/rust/autocfg/README.md b/third_party/rust/autocfg/README.md index 3788161d7ff3..40004a88e805 100644 --- a/third_party/rust/autocfg/README.md +++ b/third_party/rust/autocfg/README.md @@ -4,7 +4,7 @@ autocfg [![autocfg crate](https://img.shields.io/crates/v/autocfg.svg)](https://crates.io/crates/autocfg) [![autocfg documentation](https://docs.rs/autocfg/badge.svg)](https://docs.rs/autocfg) ![minimum rustc 1.0](https://img.shields.io/badge/rustc-1.0+-red.svg) -![build status](https://github.com/cuviper/autocfg/workflows/master/badge.svg) +[![Travis Status](https://travis-ci.org/cuviper/autocfg.svg?branch=master)](https://travis-ci.org/cuviper/autocfg) A Rust library for build scripts to automatically configure code based on compiler support. Code snippets are dynamically tested to see if the `rustc` @@ -17,7 +17,7 @@ Add this to your `Cargo.toml`: ```toml [build-dependencies] -autocfg = "1" +autocfg = "0.1" ``` Then use it in your `build.rs` script to detect compiler features. For @@ -31,7 +31,7 @@ fn main() { ac.emit_has_type("i128"); // (optional) We don't need to rerun for anything external. - autocfg::rerun_path("build.rs"); + autocfg::rerun_path(file!()); } ``` @@ -43,19 +43,8 @@ should only be used when the compiler supports it. ## Release Notes -- 1.0.1 (2020-08-20) - - Apply `RUSTFLAGS` for more `--target` scenarios, by @adamreichold. - -- 1.0.0 (2020-01-08) - - 🎉 Release 1.0! 🎉 (no breaking changes) - - Add `probe_expression` and `emit_expression_cfg` to test arbitrary expressions. - - Add `probe_constant` and `emit_constant_cfg` to test arbitrary constant expressions. - -- 0.1.7 (2019-10-20) - - Apply `RUSTFLAGS` when probing `$TARGET != $HOST`, mainly for sysroot, by @roblabla. - - 0.1.6 (2019-08-19) - - Add `probe`/`emit_sysroot_crate`, by @leo60228. + - Add `probe`/`emit_sysroot_crate`, by @leo60228 - 0.1.5 (2019-07-16) - Mask some warnings from newer rustc. @@ -65,11 +54,11 @@ should only be used when the compiler supports it. - Improve `rustc` bootstrap compatibility. - 0.1.3 (2019-05-21) - - Auto-detects if `#![no_std]` is needed for the `$TARGET`. + - Auto-detects if `#![no_std]` is needed for the `$TARGET` - 0.1.2 (2019-01-16) - - Add `rerun_env(ENV)` to print `cargo:rerun-if-env-changed=ENV`. - - Add `rerun_path(PATH)` to print `cargo:rerun-if-changed=PATH`. + - Add `rerun_env(ENV)` to print `cargo:rerun-if-env-changed=ENV` + - Add `rerun_path(PATH)` to print `cargo:rerun-if-changed=PATH` ## Minimum Rust version policy diff --git a/third_party/rust/autocfg/src/lib.rs b/third_party/rust/autocfg/src/lib.rs index de50135bbdb3..96cceedbfb64 100644 --- a/third_party/rust/autocfg/src/lib.rs +++ b/third_party/rust/autocfg/src/lib.rs @@ -9,7 +9,7 @@ //! //! ```toml //! [build-dependencies] -//! autocfg = "1" +//! autocfg = "0.1" //! ``` //! //! Then use it in your `build.rs` script to detect compiler features. For @@ -25,7 +25,7 @@ //! ac.emit_has_type("i128"); //! //! // (optional) We don't need to rerun for anything external. -//! autocfg::rerun_path("build.rs"); +//! autocfg::rerun_path(file!()); //! } //! ``` //! @@ -33,14 +33,6 @@ //! for Cargo, which translates to Rust arguments `--cfg has_i128`. Then in the //! rest of your Rust code, you can add `#[cfg(has_i128)]` conditions on code that //! should only be used when the compiler supports it. -//! -//! ## Caution -//! -//! Many of the probing methods of `AutoCfg` document the particular template they -//! use, **subject to change**. The inputs are not validated to make sure they are -//! semantically correct for their expected use, so it's _possible_ to escape and -//! inject something unintended. However, such abuse is unsupported and will not -//! be considered when making changes to the templates. #![deny(missing_debug_implementations)] #![deny(missing_docs)] @@ -49,16 +41,6 @@ #![allow(bare_trait_objects)] #![allow(ellipsis_inclusive_range_patterns)] -/// Local macro to avoid `std::try!`, deprecated in Rust 1.39. -macro_rules! try { - ($result:expr) => { - match $result { - Ok(value) => value, - Err(error) => return Err(error), - } - }; -} - use std::env; use std::ffi::OsString; use std::fs; @@ -86,7 +68,6 @@ pub struct AutoCfg { rustc_version: Version, target: Option, no_std: bool, - rustflags: Option>, } /// Writes a config flag for rustc on standard out. @@ -157,8 +138,6 @@ impl AutoCfg { let rustc: PathBuf = rustc.into(); let rustc_version = try!(Version::from_rustc(&rustc)); - let target = env::var_os("TARGET"); - // Sanity check the output directory let dir = dir.into(); let meta = try!(fs::metadata(&dir).map_err(error::from_io)); @@ -166,37 +145,12 @@ impl AutoCfg { return Err(error::from_str("output path is not a writable directory")); } - // Cargo only applies RUSTFLAGS for building TARGET artifact in - // cross-compilation environment. Sadly, we don't have a way to detect - // when we're building HOST artifact in a cross-compilation environment, - // so for now we only apply RUSTFLAGS when cross-compiling an artifact. - // - // See https://github.com/cuviper/autocfg/pull/10#issuecomment-527575030. - let rustflags = if target != env::var_os("HOST") - || dir_contains_target(&target, &dir, env::var_os("CARGO_TARGET_DIR")) - { - env::var("RUSTFLAGS").ok().map(|rustflags| { - // This is meant to match how cargo handles the RUSTFLAG environment - // variable. - // See https://github.com/rust-lang/cargo/blob/69aea5b6f69add7c51cca939a79644080c0b0ba0/src/cargo/core/compiler/build_context/target_info.rs#L434-L441 - rustflags - .split(' ') - .map(str::trim) - .filter(|s| !s.is_empty()) - .map(str::to_string) - .collect::>() - }) - } else { - None - }; - let mut ac = AutoCfg { out_dir: dir, rustc: rustc, rustc_version: rustc_version, - target: target, + target: env::var_os("TARGET"), no_std: false, - rustflags: rustflags, }; // Sanity check with and without `std`. @@ -240,10 +194,6 @@ impl AutoCfg { .arg(&self.out_dir) .arg("--emit=llvm-ir"); - if let &Some(ref rustflags) = &self.rustflags { - command.args(rustflags); - } - if let Some(target) = self.target.as_ref() { command.arg("--target").arg(target); } @@ -366,44 +316,6 @@ impl AutoCfg { emit(cfg); } } - - /// Tests whether the given expression can be used. - /// - /// The test code is subject to change, but currently looks like: - /// - /// ```ignore - /// pub fn probe() { let _ = EXPR; } - /// ``` - pub fn probe_expression(&self, expr: &str) -> bool { - self.probe(format!("pub fn probe() {{ let _ = {}; }}", expr)) - .unwrap_or(false) - } - - /// Emits the given `cfg` value if `probe_expression` returns true. - pub fn emit_expression_cfg(&self, expr: &str, cfg: &str) { - if self.probe_expression(expr) { - emit(cfg); - } - } - - /// Tests whether the given constant expression can be used. - /// - /// The test code is subject to change, but currently looks like: - /// - /// ```ignore - /// pub const PROBE: () = ((), EXPR).0; - /// ``` - pub fn probe_constant(&self, expr: &str) -> bool { - self.probe(format!("pub const PROBE: () = ((), {}).0;", expr)) - .unwrap_or(false) - } - - /// Emits the given `cfg` value if `probe_constant` returns true. - pub fn emit_constant_cfg(&self, expr: &str, cfg: &str) { - if self.probe_constant(expr) { - emit(cfg); - } - } } fn mangle(s: &str) -> String { @@ -414,25 +326,3 @@ fn mangle(s: &str) -> String { }) .collect() } - -fn dir_contains_target( - target: &Option, - dir: &PathBuf, - cargo_target_dir: Option, -) -> bool { - target - .as_ref() - .and_then(|target| { - dir.to_str().and_then(|dir| { - let mut cargo_target_dir = cargo_target_dir - .map(PathBuf::from) - .unwrap_or_else(|| PathBuf::from("target")); - cargo_target_dir.push(target); - - cargo_target_dir - .to_str() - .map(|cargo_target_dir| dir.contains(&cargo_target_dir)) - }) - }) - .unwrap_or(false) -} diff --git a/third_party/rust/autocfg/src/tests.rs b/third_party/rust/autocfg/src/tests.rs index 4c674622b8e5..304d989bcb40 100644 --- a/third_party/rust/autocfg/src/tests.rs +++ b/third_party/rust/autocfg/src/tests.rs @@ -1,31 +1,15 @@ use super::AutoCfg; -use std::env; impl AutoCfg { fn core_std(&self, path: &str) -> String { let krate = if self.no_std { "core" } else { "std" }; format!("{}::{}", krate, path) } - - fn assert_std(&self, probe_result: bool) { - assert_eq!(!self.no_std, probe_result); - } - - fn assert_min(&self, major: usize, minor: usize, probe_result: bool) { - assert_eq!(self.probe_rustc_version(major, minor), probe_result); - } - - fn for_test() -> Result { - match env::var_os("TESTS_TARGET_DIR") { - Some(d) => Self::with_dir(d), - None => Self::with_dir("target"), - } - } } #[test] fn autocfg_version() { - let ac = AutoCfg::for_test().unwrap(); + let ac = AutoCfg::with_dir("target").unwrap(); println!("version: {:?}", ac.rustc_version); assert!(ac.probe_rustc_version(1, 0)); } @@ -45,125 +29,71 @@ fn version_cmp() { #[test] fn probe_add() { - let ac = AutoCfg::for_test().unwrap(); + let ac = AutoCfg::with_dir("target").unwrap(); let add = ac.core_std("ops::Add"); - let add_rhs = add.clone() + ""; - let add_rhs_output = add.clone() + ""; - let dyn_add_rhs_output = "dyn ".to_string() + &*add_rhs_output; + let add_rhs = ac.core_std("ops::Add"); + let add_rhs_output = ac.core_std("ops::Add"); assert!(ac.probe_path(&add)); assert!(ac.probe_trait(&add)); assert!(ac.probe_trait(&add_rhs)); assert!(ac.probe_trait(&add_rhs_output)); - ac.assert_min(1, 27, ac.probe_type(&dyn_add_rhs_output)); + assert!(ac.probe_type(&add_rhs_output)); } #[test] fn probe_as_ref() { - let ac = AutoCfg::for_test().unwrap(); + let ac = AutoCfg::with_dir("target").unwrap(); let as_ref = ac.core_std("convert::AsRef"); - let as_ref_str = as_ref.clone() + ""; - let dyn_as_ref_str = "dyn ".to_string() + &*as_ref_str; + let as_ref_str = ac.core_std("convert::AsRef"); assert!(ac.probe_path(&as_ref)); assert!(ac.probe_trait(&as_ref_str)); assert!(ac.probe_type(&as_ref_str)); - ac.assert_min(1, 27, ac.probe_type(&dyn_as_ref_str)); } #[test] fn probe_i128() { - let ac = AutoCfg::for_test().unwrap(); + let ac = AutoCfg::with_dir("target").unwrap(); + let missing = !ac.probe_rustc_version(1, 26); let i128_path = ac.core_std("i128"); - ac.assert_min(1, 26, ac.probe_path(&i128_path)); - ac.assert_min(1, 26, ac.probe_type("i128")); + assert!(missing ^ ac.probe_path(&i128_path)); + assert!(missing ^ ac.probe_type("i128")); } #[test] fn probe_sum() { - let ac = AutoCfg::for_test().unwrap(); + let ac = AutoCfg::with_dir("target").unwrap(); + let missing = !ac.probe_rustc_version(1, 12); let sum = ac.core_std("iter::Sum"); - let sum_i32 = sum.clone() + ""; - let dyn_sum_i32 = "dyn ".to_string() + &*sum_i32; - ac.assert_min(1, 12, ac.probe_path(&sum)); - ac.assert_min(1, 12, ac.probe_trait(&sum)); - ac.assert_min(1, 12, ac.probe_trait(&sum_i32)); - ac.assert_min(1, 12, ac.probe_type(&sum_i32)); - ac.assert_min(1, 27, ac.probe_type(&dyn_sum_i32)); + let sum_i32 = ac.core_std("iter::Sum"); + assert!(missing ^ ac.probe_path(&sum)); + assert!(missing ^ ac.probe_trait(&sum)); + assert!(missing ^ ac.probe_trait(&sum_i32)); + assert!(missing ^ ac.probe_type(&sum_i32)); } #[test] fn probe_std() { - let ac = AutoCfg::for_test().unwrap(); - ac.assert_std(ac.probe_sysroot_crate("std")); + let ac = AutoCfg::with_dir("target").unwrap(); + assert_eq!(ac.probe_sysroot_crate("std"), !ac.no_std); } #[test] fn probe_alloc() { - let ac = AutoCfg::for_test().unwrap(); - ac.assert_min(1, 36, ac.probe_sysroot_crate("alloc")); + let ac = AutoCfg::with_dir("target").unwrap(); + let missing = !ac.probe_rustc_version(1, 36); + assert!(missing ^ ac.probe_sysroot_crate("alloc")); } #[test] fn probe_bad_sysroot_crate() { - let ac = AutoCfg::for_test().unwrap(); + let ac = AutoCfg::with_dir("target").unwrap(); assert!(!ac.probe_sysroot_crate("doesnt_exist")); } #[test] fn probe_no_std() { - let ac = AutoCfg::for_test().unwrap(); + let ac = AutoCfg::with_dir("target").unwrap(); assert!(ac.probe_type("i32")); assert!(ac.probe_type("[i32]")); - ac.assert_std(ac.probe_type("Vec")); -} - -#[test] -fn probe_expression() { - let ac = AutoCfg::for_test().unwrap(); - assert!(ac.probe_expression(r#""test".trim_left()"#)); - ac.assert_min(1, 30, ac.probe_expression(r#""test".trim_start()"#)); - ac.assert_std(ac.probe_expression("[1, 2, 3].to_vec()")); -} - -#[test] -fn probe_constant() { - let ac = AutoCfg::for_test().unwrap(); - assert!(ac.probe_constant("1 + 2 + 3")); - ac.assert_min(1, 33, ac.probe_constant("{ let x = 1 + 2 + 3; x * x }")); - ac.assert_min(1, 39, ac.probe_constant(r#""test".len()"#)); -} - -#[test] -fn dir_does_not_contain_target() { - assert!(!super::dir_contains_target( - &Some("x86_64-unknown-linux-gnu".into()), - &"/project/target/debug/build/project-ea75983148559682/out".into(), - None, - )); -} - -#[test] -fn dir_does_contain_target() { - assert!(super::dir_contains_target( - &Some("x86_64-unknown-linux-gnu".into()), - &"/project/target/x86_64-unknown-linux-gnu/debug/build/project-0147aca016480b9d/out".into(), - None, - )); -} - -#[test] -fn dir_does_not_contain_target_with_custom_target_dir() { - assert!(!super::dir_contains_target( - &Some("x86_64-unknown-linux-gnu".into()), - &"/project/custom/debug/build/project-ea75983148559682/out".into(), - Some("custom".into()), - )); -} - -#[test] -fn dir_does_contain_target_with_custom_target_dir() { - assert!(super::dir_contains_target( - &Some("x86_64-unknown-linux-gnu".into()), - &"/project/custom/x86_64-unknown-linux-gnu/debug/build/project-0147aca016480b9d/out".into(), - Some("custom".into()), - )); + assert_eq!(ac.probe_type("Vec"), !ac.no_std); } diff --git a/third_party/rust/autocfg/tests/rustflags.rs b/third_party/rust/autocfg/tests/rustflags.rs deleted file mode 100644 index 119376c409a8..000000000000 --- a/third_party/rust/autocfg/tests/rustflags.rs +++ /dev/null @@ -1,19 +0,0 @@ -extern crate autocfg; - -use std::env; - -/// Tests that autocfg uses the RUSTFLAGS environment variable when running -/// rustc. -#[test] -fn test_with_sysroot() { - // Use the same path as this test binary. - let dir = env::current_exe().unwrap().parent().unwrap().to_path_buf(); - env::set_var("RUSTFLAGS", &format!("-L {}", dir.display())); - env::set_var("OUT_DIR", &format!("{}", dir.display())); - - // Ensure HOST != TARGET. - env::set_var("HOST", "lol"); - - let ac = autocfg::AutoCfg::new().unwrap(); - assert!(ac.probe_sysroot_crate("autocfg")); -} diff --git a/third_party/rust/indexmap/.cargo-checksum.json b/third_party/rust/indexmap/.cargo-checksum.json index 7cf443999b8a..61a4c65901a8 100644 --- a/third_party/rust/indexmap/.cargo-checksum.json +++ b/third_party/rust/indexmap/.cargo-checksum.json @@ -1 +1 @@ -{"files":{"Cargo.toml":"b2e03e7a524d10bb253021375ccc06a3d2a060d2c7c045a1a60d8bb3739b46a9","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"ecc269ef87fd38a1d98e30bfac9ba964a9dbd9315c3770fed98d4d7cb5882055","README.rst":"6751ed067142c9445ffb9c201457a40bb73af76aa3c7e525fb415508abec74b1","benches/bench.rs":"dda3c7e3cd68b8f5c74dbd60189e89f0e43339cdc25dae7b1e5a9d25b1fb80a9","benches/faststring.rs":"2472b9d0d52031988cd31401357908fd850d15f1efa9f9761aa6b6779aad1889","build.rs":"558b4d0b9e9b3a44f7e1a2b69f7a7567ea721cd45cb54f4e458e850bf702f35c","src/equivalent.rs":"2e6ae24ef09a09b917f4e2b0f6288f901878e42f5080f61b1bd1afdcc90aba87","src/lib.rs":"4c4fa0d4882387d2c1681fa29a333354230de157101783bea2af6af04951d973","src/macros.rs":"cb2e9742b7e2bcadc4eb356a4ca4eb3317d1a80e293f37a3cd5a0155c5347c50","src/map.rs":"21c0c04acfb25d526da41b447b2e572d14e52778b84910514869d1cd2dce267c","src/map/core.rs":"9d9df231fbc02e3539429451c6f8cfd142c97d6e47b0c1f58672f75e3806d578","src/map/core/raw.rs":"9842320404357ec15e536240a9ab32d969e61f31508d3c485c0c76077618ef63","src/mutable_keys.rs":"99fdec7c0182902ba19524f342c220b51475bcd41c44c2cb2c3f62eacb029335","src/rayon/map.rs":"cdbe6cad8e5753aca3f07b621d2bb3b480f1ae3e10697568b8c16aeb865de65b","src/rayon/mod.rs":"bf2b0fc074f20135a6734db600b04828e88dd263b3e361606be4efe8e916eafc","src/rayon/set.rs":"789efd10d41f03fa73268bae8afdc3b6582767420218a4b28f3c83c528ef726c","src/serde.rs":"2f7ce29e63e92ddbe14d3ad0b7e83bdc9662b226565ec0aa707d5cd92cb1e263","src/set.rs":"a88c5e902e8c67d014c7c1680a7e4b9156af67d2ac58e7dcf96698b742abc88b","src/util.rs":"c415261cff9f610d7331192feba0839cb05e04d3d375a5fa2f8190a29661994e","tests/equivalent_trait.rs":"efe9393069e3cfc893d2c9c0343679979578e437fdb98a10baefeced027ba310","tests/macros_full_path.rs":"c33c86d7341581fdd08e2e6375a4afca507fa603540c54a3b9e51c4cd011cd71","tests/quick.rs":"6efe8c6dfa5bdd466b3f5c491d9dfbf57fd730301849a710abff9358968218c5","tests/tests.rs":"f6dbeeb0e2950402b0e66ac52bf74c9e4197d3c5d9c0dde64a7998a2ef74d327"},"package":"55e2e4c765aa53a0424761bf9f41aa7a6ac1efa87238f59560640e27fca028f2"} \ No newline at end of file +{"files":{"Cargo.toml":"eeb5f5f88978e950820356ca434718da8ed9b40bf2745b03a400ac5a8b9fa57d","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"ecc269ef87fd38a1d98e30bfac9ba964a9dbd9315c3770fed98d4d7cb5882055","README.rst":"cdcaa79d0e2a2d4980604827b6b043a464a9949fa2bfd033f65a5d6ea4a844de","benches/bench.rs":"e34466bc3c56d3f0bb2ccf37a0588dbec51bb1048729f3b5f2ef41e36900460f","benches/faststring.rs":"c490c03dc5f3d686fcb17c92d4dd79428ca9eff78f9a2c1cab60f83c48140827","src/equivalent.rs":"4d07c0ae8c8ff405fdbb45e6c891158d3fdcfedd47001e4cec090c79b5c56564","src/lib.rs":"31cde3e6244107cea11be6584dd803eaa441fb9fb762f410c85975e5a286a071","src/macros.rs":"472c9ec707495e6de60b4e67c1b25f2201bb962fa6672fae32addde2eb4df376","src/map.rs":"7530ca07f39ba3d7101c129c729af58457fa0e7f41e9ae6ed662c070d2a058a3","src/mutable_keys.rs":"2bf26fb36ad0ccd3c40b0f2cc4e5b8429e6627207f50fca07110a5011880a9dc","src/rayon/map.rs":"9fc361acad0c65bdee35b826915fc595708728382fa8995254a2adf6277d5979","src/rayon/mod.rs":"d0657d28aaf5f5df8b6904ed4da6987565d98f92f686f30fb0b0af2eac94fdac","src/rayon/set.rs":"cea5e1724bc8449b0bbacbb18c5ae60a2d41cfc5cfd496e89e1c17f77c6e7a49","src/serde.rs":"96850a3adc93ad890573de596cfe1edd70a71e0ad1a0d55963c40f12f49e63d0","src/set.rs":"311b415b5e310f32036075d03b3e9bf7783cb86e9b0747be19dc02e292953326","src/util.rs":"331f80b48387878caa01ab9cfd43927ea0c15129c6beb755d6b40abc6ada900f","tests/equivalent_trait.rs":"f48ef255e4bc6bc85ed11fd9bee4cc53759efb182e448d315f8d12af1f80b05d","tests/quick.rs":"025e9e4355c4ce76daf0366d5bde2e32bf90fe2d27831a7b7617a6d0e5974529","tests/serde.rs":"48f2a2184c819ffaa5c234ccea9c3bea1c58edf8ad9ada1476eedc179438d07d","tests/tests.rs":"c916ae9c5d08c042b7c3a0447ef3db5a1b9d37b3122fddace4235296a623725b"},"package":"a4d6d89e0948bf10c08b9ecc8ac5b83f07f857ebe2c0cbe38de15b4e4f510356"} \ No newline at end of file diff --git a/third_party/rust/indexmap/Cargo.toml b/third_party/rust/indexmap/Cargo.toml index 96876f030e34..f3b7264e09c2 100644 --- a/third_party/rust/indexmap/Cargo.toml +++ b/third_party/rust/indexmap/Cargo.toml @@ -11,15 +11,13 @@ # will likely look very different (and much more reasonable) [package] -edition = "2018" name = "indexmap" -version = "1.6.0" +version = "1.1.0" authors = ["bluss", "Josh Stone "] -build = "build.rs" description = "A hash table with consistent order and fast iteration.\n\nThe indexmap is a hash table where the iteration order of the key-value\npairs is independent of the hash values of the keys. It has the usual\nhash table functionality, it preserves insertion order except after\nremovals, and it allows lookup of its elements by either hash table key\nor numerical index. A corresponding hash set type is also provided.\n\nThis crate was initially published under the name ordermap, but it was renamed to\nindexmap.\n" documentation = "https://docs.rs/indexmap/" -keywords = ["hashmap", "no_std"] -categories = ["data-structures", "no-std"] +keywords = ["hashmap"] +categories = ["data-structures"] license = "Apache-2.0/MIT" repository = "https://github.com/bluss/indexmap" [package.metadata.docs.rs] @@ -27,17 +25,11 @@ features = ["serde-1", "rayon"] [package.metadata.release] no-dev-version = true -tag-name = "{{version}}" [profile.bench] debug = true [lib] bench = false -[dependencies.hashbrown] -version = "0.9.0" -features = ["raw"] -default-features = false - [dependencies.rayon] version = "1.0" optional = true @@ -45,31 +37,26 @@ optional = true [dependencies.serde] version = "1.0" optional = true -default-features = false [dev-dependencies.fnv] version = "1.0" -[dev-dependencies.fxhash] -version = "0.2.1" - [dev-dependencies.itertools] -version = "0.9" +version = "0.8" [dev-dependencies.lazy_static] version = "1.3" [dev-dependencies.quickcheck] -version = "0.9" +version = "0.8" default-features = false [dev-dependencies.rand] -version = "0.7" -features = ["small_rng"] -[build-dependencies.autocfg] -version = "1" +version = "0.6" + +[dev-dependencies.serde_test] +version = "1.0.99" [features] serde-1 = ["serde"] -std = [] test_debug = [] test_low_transition_point = [] diff --git a/third_party/rust/indexmap/README.rst b/third_party/rust/indexmap/README.rst index 2c489850df41..27e6fc58bed7 100644 --- a/third_party/rust/indexmap/README.rst +++ b/third_party/rust/indexmap/README.rst @@ -12,10 +12,10 @@ indexmap .. |docs| image:: https://docs.rs/indexmap/badge.svg .. _docs: https://docs.rs/indexmap -.. |rustc| image:: https://img.shields.io/badge/rust-1.32%2B-orange.svg -.. _rustc: https://img.shields.io/badge/rust-1.32%2B-orange.svg +.. |rustc| image:: https://img.shields.io/badge/rust-1.18%2B-orange.svg +.. _rustc: https://img.shields.io/badge/rust-1.18%2B-orange.svg -A pure-Rust hash table which preserves (in a limited sense) insertion order. +A safe, pure-Rust hash table which preserves insertion order. This crate implements compact map and set data-structures, where the iteration order of the keys is independent from their hash or @@ -38,7 +38,16 @@ was indexmap, a hash table that has following properties: - Fast to iterate. - Indexed in compact space. - Preserves insertion order **as long** as you don't call ``.remove()``. -- Uses hashbrown for the inner table, just like Rust's libstd ``HashMap`` does. +- Uses robin hood hashing just like Rust's libstd ``HashMap`` used to do + (before std switched to hashbrown). + + - It's the usual backwards shift deletion, but only on the index vector, so + it's cheaper because it's moving less memory around. + +Does not implement (Yet) +------------------------ + +- ``.reserve()`` exists but does not have a complete implementation Performance ----------- @@ -46,14 +55,15 @@ Performance ``IndexMap`` derives a couple of performance facts directly from how it is constructed, which is roughly: - A raw hash table of key-value indices, and a vector of key-value pairs. + Two vectors, the first, sparse, with hashes and key-value indices, and the + second, dense, the key-value pairs. - Iteration is very fast since it is on the dense key-values. -- Removal is fast since it moves memory areas only in the table, - and uses a single swap in the vector. -- Lookup is fast-ish because the initial 7-bit hash lookup uses SIMD, and indices are - densely stored. Lookup also is slow-ish since the actual key-value pairs are stored - separately. (Visible when cpu caches size is limiting.) +- Removal is fast since it moves memory areas only in the first vector, + and uses a single swap in the second vector. +- Lookup is fast-ish because the hashes and indices are densely stored. + Lookup also is slow-ish since hashes and key-value pairs are stored in + separate places. (Visible when cpu caches size is limiting.) - In practice, ``IndexMap`` has been tested out as the hashmap in rustc in PR45282_ and the performance was roughly on par across the whole workload. @@ -62,105 +72,43 @@ which is roughly: .. _PR45282: https://github.com/rust-lang/rust/pull/45282 +Interesting Features +-------------------- + +- Insertion order is preserved (``.swap_remove()`` perturbs the order, like the method name says). +- Implements ``.pop() -> Option<(K, V)>`` in O(1) time. +- ``IndexMap::new()`` is empty and uses no allocation until you insert something. +- Lookup key-value pairs by index and vice versa. +- No ``unsafe``. +- Supports ``IndexMut``. + + +Where to go from here? +---------------------- + +- Ideas and PRs for how to implement insertion-order preserving remove (for example tombstones) + are welcome. The plan is to split the crate into two hash table implementations + a) the current compact index space version and b) the full insertion order version. + + +Ideas that we already did +------------------------- + +- It can be an *indexable* ordered map in the current fashion + (This was implemented in 0.2.0, for potential use as a graph datastructure). + +- Idea for more cache efficient lookup (This was implemented in 0.1.2). + + Current ``indices: Vec``. ``Pos`` is interpreted as ``(u32, u32)`` more + or less when ``.raw_capacity()`` fits in 32 bits. ``Pos`` then stores both the lower + half of the hash and the entry index. + This means that the hash values in ``Bucket`` don't need to be accessed + while scanning for an entry. + Recent Changes ============== -- 1.6.0 - - - **MSRV**: Rust 1.36 or later is now required. - - - The ``hashbrown`` dependency has been updated to version 0.9. - -- 1.5.2 - - - The new "std" feature will force the use of ``std`` for users that explicitly - want the default ``S = RandomState``, bypassing the autodetection added in 1.3.0, - by @cuviper in PR 145_. - -.. _145: https://github.com/bluss/indexmap/pull/145 - -- 1.5.1 - - - Values can now be indexed by their ``usize`` position by @cuviper in PR 132_. - - - Some of the generic bounds have been relaxed to match ``std`` by @cuviper in PR 141_. - - - ``drain`` now accepts any ``R: RangeBounds`` by @cuviper in PR 142_. - -.. _132: https://github.com/bluss/indexmap/pull/132 -.. _141: https://github.com/bluss/indexmap/pull/141 -.. _142: https://github.com/bluss/indexmap/pull/142 - -- 1.5.0 - - - **MSRV**: Rust 1.32 or later is now required. - - - The inner hash table is now based on ``hashbrown`` by @cuviper in PR 131_. - This also completes the method ``reserve`` and adds ``shrink_to_fit``. - - - Add new methods ``get_key_value``, ``remove_entry``, ``swap_remove_entry``, - and ``shift_remove_entry``, by @cuviper in PR 136_ - - - ``Clone::clone_from`` reuses allocations by @cuviper in PR 125_ - - - Add new method ``reverse`` by @linclelinkpart5 in PR 128_ - -.. _125: https://github.com/bluss/indexmap/pull/125 -.. _128: https://github.com/bluss/indexmap/pull/128 -.. _131: https://github.com/bluss/indexmap/pull/131 -.. _136: https://github.com/bluss/indexmap/pull/136 - -- 1.4.0 - - - Add new method ``get_index_of`` by @Thermatrix in PR 115_ and 120_ - - - Fix build script rebuild-if-changed configuration to use "build.rs"; - fixes issue 123_. Fix by @cuviper. - - - Dev-dependencies (rand and quickcheck) have been updated. The crate's tests - now run using Rust 1.32 or later (MSRV for building the crate has not changed). - by @kjeremy and @bluss - -.. _123: https://github.com/bluss/indexmap/issues/123 -.. _115: https://github.com/bluss/indexmap/pull/115 -.. _120: https://github.com/bluss/indexmap/pull/120 - -- 1.3.2 - - - Maintenance update to regenerate the published `Cargo.toml`. - -- 1.3.1 - - - Maintenance update for formatting and ``autocfg`` 1.0. - -- 1.3.0 - - - The deprecation messages in the previous version have been removed. - (The methods have not otherwise changed.) Docs for removal methods have been - improved. - - From Rust 1.36, this crate supports being built **without std**, requiring - ``alloc`` instead. This is enabled automatically when it is detected that - ``std`` is not available. There is no crate feature to enable/disable to - trigger this. The new build-dep ``autocfg`` enables this. - -- 1.2.0 - - - Plain ``.remove()`` now has a deprecation message, it informs the user - about picking one of the removal functions ``swap_remove`` and ``shift_remove`` - which have different performance and order semantics. - Plain ``.remove()`` will not be removed, the warning message and method - will remain until further. - - - Add new method ``shift_remove`` for order preserving removal on the map, - and ``shift_take`` for the corresponding operation on the set. - - - Add methods ``swap_remove``, ``swap_remove_entry`` to ``Entry``. - - - Fix indexset/indexmap to support full paths, like ``indexmap::indexmap!()`` - - - Internal improvements: fix warnings, deprecations and style lints - - 1.1.0 - Added optional feature `"rayon"` that adds parallel iterator support diff --git a/third_party/rust/indexmap/benches/bench.rs b/third_party/rust/indexmap/benches/bench.rs index 102cd49c8c72..95fe8edd09c3 100644 --- a/third_party/rust/indexmap/benches/bench.rs +++ b/third_party/rust/indexmap/benches/bench.rs @@ -1,16 +1,19 @@ #![feature(test)] - extern crate test; +extern crate rand; +extern crate fnv; #[macro_use] extern crate lazy_static; +use std::hash::Hash; use fnv::FnvHasher; use std::hash::BuildHasherDefault; -use std::hash::Hash; type FnvBuilder = BuildHasherDefault; -use test::black_box; use test::Bencher; +use test::black_box; + +extern crate indexmap; use indexmap::IndexMap; @@ -18,33 +21,35 @@ use std::collections::HashMap; use std::iter::FromIterator; use rand::rngs::SmallRng; +use rand::FromEntropy; use rand::seq::SliceRandom; -use rand::SeedableRng; - -/// Use a consistently seeded Rng for benchmark stability -fn small_rng() -> SmallRng { - let seed = u64::from_le_bytes(*b"indexmap"); - SmallRng::seed_from_u64(seed) -} #[bench] fn new_hashmap(b: &mut Bencher) { - b.iter(|| HashMap::::new()); + b.iter(|| { + HashMap::::new() + }); } #[bench] -fn new_indexmap(b: &mut Bencher) { - b.iter(|| IndexMap::::new()); +fn new_orderedmap(b: &mut Bencher) { + b.iter(|| { + IndexMap::::new() + }); } #[bench] fn with_capacity_10e5_hashmap(b: &mut Bencher) { - b.iter(|| HashMap::::with_capacity(10_000)); + b.iter(|| { + HashMap::::with_capacity(10_000) + }); } #[bench] -fn with_capacity_10e5_indexmap(b: &mut Bencher) { - b.iter(|| IndexMap::::with_capacity(10_000)); +fn with_capacity_10e5_orderedmap(b: &mut Bencher) { + b.iter(|| { + IndexMap::::with_capacity(10_000) + }); } #[bench] @@ -60,7 +65,7 @@ fn insert_hashmap_10_000(b: &mut Bencher) { } #[bench] -fn insert_indexmap_10_000(b: &mut Bencher) { +fn insert_orderedmap_10_000(b: &mut Bencher) { let c = 10_000; b.iter(|| { let mut map = IndexMap::with_capacity(c); @@ -84,7 +89,7 @@ fn insert_hashmap_string_10_000(b: &mut Bencher) { } #[bench] -fn insert_indexmap_string_10_000(b: &mut Bencher) { +fn insert_orderedmap_string_10_000(b: &mut Bencher) { let c = 10_000; b.iter(|| { let mut map = IndexMap::with_capacity(c); @@ -109,7 +114,7 @@ fn insert_hashmap_str_10_000(b: &mut Bencher) { } #[bench] -fn insert_indexmap_str_10_000(b: &mut Bencher) { +fn insert_orderedmap_str_10_000(b: &mut Bencher) { let c = 10_000; let ss = Vec::from_iter((0..c).map(|x| x.to_string())); b.iter(|| { @@ -135,7 +140,7 @@ fn insert_hashmap_int_bigvalue_10_000(b: &mut Bencher) { } #[bench] -fn insert_indexmap_int_bigvalue_10_000(b: &mut Bencher) { +fn insert_orderedmap_int_bigvalue_10_000(b: &mut Bencher) { let c = 10_000; let value = [0u64; 10]; b.iter(|| { @@ -160,7 +165,7 @@ fn insert_hashmap_100_000(b: &mut Bencher) { } #[bench] -fn insert_indexmap_100_000(b: &mut Bencher) { +fn insert_orderedmap_100_000(b: &mut Bencher) { let c = 100_000; b.iter(|| { let mut map = IndexMap::with_capacity(c); @@ -184,7 +189,7 @@ fn insert_hashmap_150(b: &mut Bencher) { } #[bench] -fn insert_indexmap_150(b: &mut Bencher) { +fn insert_orderedmap_150(b: &mut Bencher) { let c = 150; b.iter(|| { let mut map = IndexMap::with_capacity(c); @@ -208,7 +213,7 @@ fn entry_hashmap_150(b: &mut Bencher) { } #[bench] -fn entry_indexmap_150(b: &mut Bencher) { +fn entry_orderedmap_150(b: &mut Bencher) { let c = 150; b.iter(|| { let mut map = IndexMap::with_capacity(c); @@ -223,31 +228,35 @@ fn entry_indexmap_150(b: &mut Bencher) { fn iter_sum_hashmap_10_000(b: &mut Bencher) { let c = 10_000; let mut map = HashMap::with_capacity(c); - let len = c - c / 10; + let len = c - c/10; for x in 0..len { map.insert(x, ()); } assert_eq!(map.len(), len); - b.iter(|| map.keys().sum::()); + b.iter(|| { + map.keys().sum::() + }); } #[bench] -fn iter_sum_indexmap_10_000(b: &mut Bencher) { +fn iter_sum_orderedmap_10_000(b: &mut Bencher) { let c = 10_000; let mut map = IndexMap::with_capacity(c); - let len = c - c / 10; + let len = c - c/10; for x in 0..len { map.insert(x, ()); } assert_eq!(map.len(), len); - b.iter(|| map.keys().sum::()); + b.iter(|| { + map.keys().sum::() + }); } #[bench] fn iter_black_box_hashmap_10_000(b: &mut Bencher) { let c = 10_000; let mut map = HashMap::with_capacity(c); - let len = c - c / 10; + let len = c - c/10; for x in 0..len { map.insert(x, ()); } @@ -260,10 +269,10 @@ fn iter_black_box_hashmap_10_000(b: &mut Bencher) { } #[bench] -fn iter_black_box_indexmap_10_000(b: &mut Bencher) { +fn iter_black_box_orderedmap_10_000(b: &mut Bencher) { let c = 10_000; let mut map = IndexMap::with_capacity(c); - let len = c - c / 10; + let len = c - c/10; for x in 0..len { map.insert(x, ()); } @@ -276,11 +285,10 @@ fn iter_black_box_indexmap_10_000(b: &mut Bencher) { } fn shuffled_keys(iter: I) -> Vec -where - I: IntoIterator, + where I: IntoIterator { let mut v = Vec::from_iter(iter); - let mut rng = small_rng(); + let mut rng = SmallRng::from_entropy(); v.shuffle(&mut rng); v } @@ -320,7 +328,7 @@ fn lookup_hashmap_10_000_noexist(b: &mut Bencher) { } #[bench] -fn lookup_indexmap_10_000_exist(b: &mut Bencher) { +fn lookup_orderedmap_10_000_exist(b: &mut Bencher) { let c = 10_000; let mut map = IndexMap::with_capacity(c); let keys = shuffled_keys(0..c); @@ -337,7 +345,7 @@ fn lookup_indexmap_10_000_exist(b: &mut Bencher) { } #[bench] -fn lookup_indexmap_10_000_noexist(b: &mut Bencher) { +fn lookup_orderedmap_10_000_noexist(b: &mut Bencher) { let c = 10_000; let mut map = IndexMap::with_capacity(c); let keys = shuffled_keys(0..c); @@ -358,9 +366,12 @@ const LOOKUP_MAP_SIZE: u32 = 100_000_u32; const LOOKUP_SAMPLE_SIZE: u32 = 5000; const SORT_MAP_SIZE: usize = 10_000; + // use lazy_static so that comparison benchmarks use the exact same inputs lazy_static! { - static ref KEYS: Vec = shuffled_keys(0..LOOKUP_MAP_SIZE); + static ref KEYS: Vec = { + shuffled_keys(0..LOOKUP_MAP_SIZE) + }; } lazy_static! { @@ -376,7 +387,7 @@ lazy_static! { } lazy_static! { - static ref IMAP_100K: IndexMap = { + static ref OMAP_100K: IndexMap = { let c = LOOKUP_MAP_SIZE; let mut map = IndexMap::with_capacity(c as usize); let keys = &*KEYS; @@ -388,7 +399,7 @@ lazy_static! { } lazy_static! { - static ref IMAP_SORT_U32: IndexMap = { + static ref OMAP_SORT_U32: IndexMap = { let mut map = IndexMap::with_capacity(SORT_MAP_SIZE); for &key in &KEYS[..SORT_MAP_SIZE] { map.insert(key, key); @@ -397,7 +408,7 @@ lazy_static! { }; } lazy_static! { - static ref IMAP_SORT_S: IndexMap = { + static ref OMAP_SORT_S: IndexMap = { let mut map = IndexMap::with_capacity(SORT_MAP_SIZE); for &key in &KEYS[..SORT_MAP_SIZE] { map.insert(format!("{:^16x}", &key), String::new()); @@ -418,9 +429,10 @@ fn lookup_hashmap_100_000_multi(b: &mut Bencher) { }); } + #[bench] -fn lookup_indexmap_100_000_multi(b: &mut Bencher) { - let map = &*IMAP_100K; +fn lookup_ordermap_100_000_multi(b: &mut Bencher) { + let map = &*OMAP_100K; b.iter(|| { let mut found = 0; for key in 0..LOOKUP_SAMPLE_SIZE { @@ -444,9 +456,10 @@ fn lookup_hashmap_100_000_inorder_multi(b: &mut Bencher) { }); } + #[bench] -fn lookup_indexmap_100_000_inorder_multi(b: &mut Bencher) { - let map = &*IMAP_100K; +fn lookup_ordermap_100_000_inorder_multi(b: &mut Bencher) { + let map = &*OMAP_100K; let keys = &*KEYS; b.iter(|| { let mut found = 0; @@ -467,9 +480,10 @@ fn lookup_hashmap_100_000_single(b: &mut Bencher) { }); } + #[bench] -fn lookup_indexmap_100_000_single(b: &mut Bencher) { - let map = &*IMAP_100K; +fn lookup_ordermap_100_000_single(b: &mut Bencher) { + let map = &*OMAP_100K; let mut iter = (0..LOOKUP_MAP_SIZE + LOOKUP_SAMPLE_SIZE).cycle(); b.iter(|| { let key = iter.next().unwrap(); @@ -493,7 +507,7 @@ fn grow_fnv_hashmap_100_000(b: &mut Bencher) { } #[bench] -fn grow_fnv_indexmap_100_000(b: &mut Bencher) { +fn grow_fnv_ordermap_100_000(b: &mut Bencher) { b.iter(|| { let mut map: IndexMap<_, _, FnvBuilder> = IndexMap::default(); for x in 0..GROW_SIZE { @@ -503,6 +517,7 @@ fn grow_fnv_indexmap_100_000(b: &mut Bencher) { }); } + const MERGE: u64 = 10_000; #[bench] fn hashmap_merge_simple(b: &mut Bencher) { @@ -520,7 +535,7 @@ fn hashmap_merge_shuffle(b: &mut Bencher) { let first_map: HashMap = (0..MERGE).map(|i| (i, ())).collect(); let second_map: HashMap = (MERGE..MERGE * 2).map(|i| (i, ())).collect(); let mut v = Vec::new(); - let mut rng = small_rng(); + let mut rng = SmallRng::from_entropy(); b.iter(|| { let mut merged = first_map.clone(); v.extend(second_map.iter().map(|(&k, &v)| (k, v))); @@ -532,7 +547,7 @@ fn hashmap_merge_shuffle(b: &mut Bencher) { } #[bench] -fn indexmap_merge_simple(b: &mut Bencher) { +fn ordermap_merge_simple(b: &mut Bencher) { let first_map: IndexMap = (0..MERGE).map(|i| (i, ())).collect(); let second_map: IndexMap = (MERGE..MERGE * 2).map(|i| (i, ())).collect(); b.iter(|| { @@ -543,11 +558,11 @@ fn indexmap_merge_simple(b: &mut Bencher) { } #[bench] -fn indexmap_merge_shuffle(b: &mut Bencher) { +fn ordermap_merge_shuffle(b: &mut Bencher) { let first_map: IndexMap = (0..MERGE).map(|i| (i, ())).collect(); let second_map: IndexMap = (MERGE..MERGE * 2).map(|i| (i, ())).collect(); let mut v = Vec::new(); - let mut rng = small_rng(); + let mut rng = SmallRng::from_entropy(); b.iter(|| { let mut merged = first_map.clone(); v.extend(second_map.iter().map(|(&k, &v)| (k, v))); @@ -559,16 +574,16 @@ fn indexmap_merge_shuffle(b: &mut Bencher) { } #[bench] -fn swap_remove_indexmap_100_000(b: &mut Bencher) { - let map = IMAP_100K.clone(); +fn remove_ordermap_100_000(b: &mut Bencher) { + let map = OMAP_100K.clone(); let mut keys = Vec::from_iter(map.keys().cloned()); - let mut rng = small_rng(); + let mut rng = SmallRng::from_entropy(); keys.shuffle(&mut rng); b.iter(|| { let mut map = map.clone(); for key in &keys { - map.swap_remove(key); + map.remove(key); } assert_eq!(map.len(), 0); map @@ -576,50 +591,12 @@ fn swap_remove_indexmap_100_000(b: &mut Bencher) { } #[bench] -fn shift_remove_indexmap_100_000_few(b: &mut Bencher) { - let map = IMAP_100K.clone(); - let mut keys = Vec::from_iter(map.keys().cloned()); - let mut rng = small_rng(); - keys.shuffle(&mut rng); - keys.truncate(50); +fn pop_ordermap_100_000(b: &mut Bencher) { + let map = OMAP_100K.clone(); b.iter(|| { let mut map = map.clone(); - for key in &keys { - map.shift_remove(key); - } - assert_eq!(map.len(), IMAP_100K.len() - keys.len()); - map - }); -} - -#[bench] -fn shift_remove_indexmap_2_000_full(b: &mut Bencher) { - let mut keys = KEYS[..2_000].to_vec(); - let mut map = IndexMap::with_capacity(keys.len()); - for &key in &keys { - map.insert(key, key); - } - let mut rng = small_rng(); - keys.shuffle(&mut rng); - - b.iter(|| { - let mut map = map.clone(); - for key in &keys { - map.shift_remove(key); - } - assert_eq!(map.len(), 0); - map - }); -} - -#[bench] -fn pop_indexmap_100_000(b: &mut Bencher) { - let map = IMAP_100K.clone(); - - b.iter(|| { - let mut map = map.clone(); - while !map.is_empty() { + while map.len() > 0 { map.pop(); } assert_eq!(map.len(), 0); @@ -628,8 +605,8 @@ fn pop_indexmap_100_000(b: &mut Bencher) { } #[bench] -fn few_retain_indexmap_100_000(b: &mut Bencher) { - let map = IMAP_100K.clone(); +fn few_retain_ordermap_100_000(b: &mut Bencher) { + let map = OMAP_100K.clone(); b.iter(|| { let mut map = map.clone(); @@ -650,8 +627,8 @@ fn few_retain_hashmap_100_000(b: &mut Bencher) { } #[bench] -fn half_retain_indexmap_100_000(b: &mut Bencher) { - let map = IMAP_100K.clone(); +fn half_retain_ordermap_100_000(b: &mut Bencher) { + let map = OMAP_100K.clone(); b.iter(|| { let mut map = map.clone(); @@ -672,8 +649,8 @@ fn half_retain_hashmap_100_000(b: &mut Bencher) { } #[bench] -fn many_retain_indexmap_100_000(b: &mut Bencher) { - let map = IMAP_100K.clone(); +fn many_retain_ordermap_100_000(b: &mut Bencher) { + let map = OMAP_100K.clone(); b.iter(|| { let mut map = map.clone(); @@ -693,6 +670,7 @@ fn many_retain_hashmap_100_000(b: &mut Bencher) { }); } + // simple sort impl for comparison pub fn simple_sort(m: &mut IndexMap) { let mut ordered: Vec<_> = m.drain(..).collect(); @@ -700,9 +678,10 @@ pub fn simple_sort(m: &mut IndexMap) { m.extend(ordered); } + #[bench] -fn indexmap_sort_s(b: &mut Bencher) { - let map = IMAP_SORT_S.clone(); +fn ordermap_sort_s(b: &mut Bencher) { + let map = OMAP_SORT_S.clone(); // there's a map clone there, but it's still useful to profile this b.iter(|| { @@ -713,8 +692,8 @@ fn indexmap_sort_s(b: &mut Bencher) { } #[bench] -fn indexmap_simple_sort_s(b: &mut Bencher) { - let map = IMAP_SORT_S.clone(); +fn ordermap_simple_sort_s(b: &mut Bencher) { + let map = OMAP_SORT_S.clone(); // there's a map clone there, but it's still useful to profile this b.iter(|| { @@ -725,8 +704,8 @@ fn indexmap_simple_sort_s(b: &mut Bencher) { } #[bench] -fn indexmap_sort_u32(b: &mut Bencher) { - let map = IMAP_SORT_U32.clone(); +fn ordermap_sort_u32(b: &mut Bencher) { + let map = OMAP_SORT_U32.clone(); // there's a map clone there, but it's still useful to profile this b.iter(|| { @@ -737,8 +716,8 @@ fn indexmap_sort_u32(b: &mut Bencher) { } #[bench] -fn indexmap_simple_sort_u32(b: &mut Bencher) { - let map = IMAP_SORT_U32.clone(); +fn ordermap_simple_sort_u32(b: &mut Bencher) { + let map = OMAP_SORT_U32.clone(); // there's a map clone there, but it's still useful to profile this b.iter(|| { @@ -750,15 +729,20 @@ fn indexmap_simple_sort_u32(b: &mut Bencher) { // measure the fixed overhead of cloning in sort benchmarks #[bench] -fn indexmap_clone_for_sort_s(b: &mut Bencher) { - let map = IMAP_SORT_S.clone(); +fn ordermap_clone_for_sort_s(b: &mut Bencher) { + let map = OMAP_SORT_S.clone(); - b.iter(|| map.clone()); + b.iter(|| { + map.clone() + }); } #[bench] -fn indexmap_clone_for_sort_u32(b: &mut Bencher) { - let map = IMAP_SORT_U32.clone(); +fn ordermap_clone_for_sort_u32(b: &mut Bencher) { + let map = OMAP_SORT_U32.clone(); - b.iter(|| map.clone()); + b.iter(|| { + map.clone() + }); } + diff --git a/third_party/rust/indexmap/benches/faststring.rs b/third_party/rust/indexmap/benches/faststring.rs index 86b7e9cf71d0..adca6391bfee 100644 --- a/third_party/rust/indexmap/benches/faststring.rs +++ b/third_party/rust/indexmap/benches/faststring.rs @@ -1,78 +1,80 @@ #![feature(test)] - extern crate test; +extern crate rand; +extern crate lazy_static; use test::Bencher; +extern crate indexmap; + use indexmap::IndexMap; use std::collections::HashMap; use std::iter::FromIterator; -use rand::rngs::SmallRng; +use rand::thread_rng; use rand::seq::SliceRandom; -use rand::SeedableRng; use std::hash::{Hash, Hasher}; use std::borrow::Borrow; use std::ops::Deref; - -/// Use a consistently seeded Rng for benchmark stability -fn small_rng() -> SmallRng { - let seed = u64::from_le_bytes(*b"indexmap"); - SmallRng::seed_from_u64(seed) -} +use std::mem; #[derive(PartialEq, Eq, Copy, Clone)] -#[repr(transparent)] pub struct OneShot(pub T); -impl Hash for OneShot { +impl Hash for OneShot +{ fn hash(&self, h: &mut H) { h.write(self.0.as_bytes()) } } impl<'a, S> From<&'a S> for &'a OneShot -where - S: AsRef, + where S: AsRef { fn from(s: &'a S) -> Self { let s: &str = s.as_ref(); - unsafe { &*(s as *const str as *const OneShot) } + unsafe { + mem::transmute(s) + } } } -impl Hash for OneShot { +impl Hash for OneShot +{ fn hash(&self, h: &mut H) { h.write(self.0.as_bytes()) } } -impl Borrow> for OneShot { +impl Borrow> for OneShot +{ fn borrow(&self) -> &OneShot { <&OneShot>::from(&self.0) } } -impl Deref for OneShot { +impl Deref for OneShot +{ type Target = T; fn deref(&self) -> &T { &self.0 } } + fn shuffled_keys(iter: I) -> Vec -where - I: IntoIterator, + where I: IntoIterator { let mut v = Vec::from_iter(iter); - let mut rng = small_rng(); + let mut rng = thread_rng(); v.shuffle(&mut rng); v } + #[bench] fn insert_hashmap_string_10_000(b: &mut Bencher) { let c = 10_000; @@ -98,7 +100,7 @@ fn insert_hashmap_string_oneshot_10_000(b: &mut Bencher) { } #[bench] -fn insert_indexmap_string_10_000(b: &mut Bencher) { +fn insert_orderedmap_string_10_000(b: &mut Bencher) { let c = 10_000; b.iter(|| { let mut map = IndexMap::with_capacity(c); @@ -135,9 +137,7 @@ fn lookup_hashmap_10_000_exist_string_oneshot(b: &mut Bencher) { for &key in &keys { map.insert(OneShot(key.to_string()), 1); } - let lookups = (5000..c) - .map(|x| OneShot(x.to_string())) - .collect::>(); + let lookups = (5000..c).map(|x| OneShot(x.to_string())).collect::>(); b.iter(|| { let mut found = 0; for key in &lookups { @@ -148,7 +148,7 @@ fn lookup_hashmap_10_000_exist_string_oneshot(b: &mut Bencher) { } #[bench] -fn lookup_indexmap_10_000_exist_string(b: &mut Bencher) { +fn lookup_ordermap_10_000_exist_string(b: &mut Bencher) { let c = 10_000; let mut map = IndexMap::with_capacity(c); let keys = shuffled_keys(0..c); @@ -166,16 +166,14 @@ fn lookup_indexmap_10_000_exist_string(b: &mut Bencher) { } #[bench] -fn lookup_indexmap_10_000_exist_string_oneshot(b: &mut Bencher) { +fn lookup_ordermap_10_000_exist_string_oneshot(b: &mut Bencher) { let c = 10_000; let mut map = IndexMap::with_capacity(c); let keys = shuffled_keys(0..c); for &key in &keys { map.insert(OneShot(key.to_string()), 1); } - let lookups = (5000..c) - .map(|x| OneShot(x.to_string())) - .collect::>(); + let lookups = (5000..c).map(|x| OneShot(x.to_string())).collect::>(); b.iter(|| { let mut found = 0; for key in &lookups { diff --git a/third_party/rust/indexmap/build.rs b/third_party/rust/indexmap/build.rs deleted file mode 100644 index 9f9fa054f8c1..000000000000 --- a/third_party/rust/indexmap/build.rs +++ /dev/null @@ -1,8 +0,0 @@ -fn main() { - // If "std" is explicitly requested, don't bother probing the target for it. - match std::env::var_os("CARGO_FEATURE_STD") { - Some(_) => autocfg::emit("has_std"), - None => autocfg::new().emit_sysroot_crate("std"), - } - autocfg::rerun_path("build.rs"); -} diff --git a/third_party/rust/indexmap/src/equivalent.rs b/third_party/rust/indexmap/src/equivalent.rs index ad6635ffacae..d72b2ef3a202 100644 --- a/third_party/rust/indexmap/src/equivalent.rs +++ b/third_party/rust/indexmap/src/equivalent.rs @@ -1,4 +1,5 @@ -use core::borrow::Borrow; + +use std::borrow::Borrow; /// Key equivalence trait. /// @@ -16,9 +17,8 @@ pub trait Equivalent { } impl Equivalent for Q -where - Q: Eq, - K: Borrow, + where Q: Eq, + K: Borrow, { #[inline] fn equivalent(&self, key: &K) -> bool { diff --git a/third_party/rust/indexmap/src/lib.rs b/third_party/rust/indexmap/src/lib.rs index 2df2ceed8a64..cb28caf854f2 100644 --- a/third_party/rust/indexmap/src/lib.rs +++ b/third_party/rust/indexmap/src/lib.rs @@ -1,8 +1,6 @@ -// We *mostly* avoid unsafe code, but `map::core::raw` allows it to use `RawTable` buckets. + #![deny(unsafe_code)] -#![warn(rust_2018_idioms)] #![doc(html_root_url = "https://docs.rs/indexmap/1/")] -#![no_std] //! [`IndexMap`] is a hash table where the iteration order of the key-value //! pairs is independent of the hash values of the keys. @@ -14,170 +12,75 @@ //! [`IndexSet`]: set/struct.IndexSet.html //! //! -//! ### Feature Highlights +//! ## Rust Version //! -//! [`IndexMap`] and [`IndexSet`] are drop-in compatible with the std `HashMap` -//! and `HashSet`, but they also have some features of note: -//! -//! - The ordering semantics (see their documentation for details) -//! - Sorting methods and the [`.pop()`][IndexMap::pop] methods. -//! - The [`Equivalent`] trait, which offers more flexible equality definitions -//! between borrowed and owned versions of keys. -//! - The [`MutableKeys`][map::MutableKeys] trait, which gives opt-in mutable -//! access to hash map keys. -//! -//! ### Alternate Hashers -//! -//! [`IndexMap`] and [`IndexSet`] have a default hasher type `S = RandomState`, -//! just like the standard `HashMap` and `HashSet`, which is resistant to -//! HashDoS attacks but not the most performant. Type aliases can make it easier -//! to use alternate hashers: -//! -//! ``` -//! use fnv::FnvBuildHasher; -//! use fxhash::FxBuildHasher; -//! use indexmap::{IndexMap, IndexSet}; -//! -//! type FnvIndexMap = IndexMap; -//! type FnvIndexSet = IndexSet; -//! -//! type FxIndexMap = IndexMap; -//! type FxIndexSet = IndexSet; -//! -//! let std: IndexSet = (0..100).collect(); -//! let fnv: FnvIndexSet = (0..100).collect(); -//! let fx: FxIndexSet = (0..100).collect(); -//! assert_eq!(std, fnv); -//! assert_eq!(std, fx); -//! ``` -//! -//! ### Rust Version -//! -//! This version of indexmap requires Rust 1.32 or later, or Rust 1.36+ for -//! using with `alloc` (without `std`), see below. +//! This version of indexmap requires Rust 1.18 or later, or 1.30+ for +//! development builds. //! //! The indexmap 1.x release series will use a carefully considered version //! upgrade policy, where in a later 1.x version, we will raise the minimum //! required Rust version. -//! -//! ## No Standard Library Targets -//! -//! From Rust 1.36, this crate supports being built without `std`, requiring -//! `alloc` instead. This is enabled automatically when it is detected that -//! `std` is not available. There is no crate feature to enable/disable to -//! trigger this. It can be tested by building for a std-less target. -//! -//! - Creating maps and sets using [`new`][IndexMap::new] and -//! [`with_capacity`][IndexMap::with_capacity] is unavailable without `std`. -//! Use methods [`IndexMap::default`][def], -//! [`with_hasher`][IndexMap::with_hasher], -//! [`with_capacity_and_hasher`][IndexMap::with_capacity_and_hasher] instead. -//! A no-std compatible hasher will be needed as well, for example -//! from the crate `twox-hash`. -//! - Macros [`indexmap!`] and [`indexset!`] are unavailable without `std`. -//! -//! [def]: map/struct.IndexMap.html#impl-Default - -#[cfg(not(has_std))] -extern crate alloc; - -#[cfg(has_std)] -#[macro_use] -extern crate std; - -#[cfg(not(has_std))] -use alloc::vec::{self, Vec}; - -#[cfg(has_std)] -use std::vec::{self, Vec}; #[macro_use] mod macros; -mod equivalent; -mod mutable_keys; #[cfg(feature = "serde-1")] mod serde; mod util; +mod equivalent; +mod mutable_keys; -pub mod map; pub mod set; +pub mod map; // Placed after `map` and `set` so new `rayon` methods on the types // are documented after the "normal" methods. #[cfg(feature = "rayon")] mod rayon; -pub use crate::equivalent::Equivalent; -pub use crate::map::IndexMap; -pub use crate::set::IndexSet; +pub use equivalent::Equivalent; +pub use map::IndexMap; +pub use set::IndexSet; // shared private items /// Hash value newtype. Not larger than usize, since anything larger /// isn't used for selecting position anyway. -#[derive(Clone, Copy, Debug, PartialEq)] +#[derive(Copy, Debug)] struct HashValue(usize); impl HashValue { #[inline(always)] - fn get(self) -> u64 { - self.0 as u64 + fn get(self) -> usize { self.0 } +} + +impl Clone for HashValue { + #[inline] + fn clone(&self) -> Self { *self } +} +impl PartialEq for HashValue { + #[inline] + fn eq(&self, rhs: &Self) -> bool { + self.0 == rhs.0 } } -#[derive(Copy, Debug)] +#[derive(Copy, Clone, Debug)] struct Bucket { hash: HashValue, key: K, value: V, } -impl Clone for Bucket -where - K: Clone, - V: Clone, -{ - fn clone(&self) -> Self { - Bucket { - hash: self.hash, - key: self.key.clone(), - value: self.value.clone(), - } - } - - fn clone_from(&mut self, other: &Self) { - self.hash = other.hash; - self.key.clone_from(&other.key); - self.value.clone_from(&other.value); - } -} - impl Bucket { // field accessors -- used for `f` instead of closures in `.map(f)` - fn key_ref(&self) -> &K { - &self.key - } - fn value_ref(&self) -> &V { - &self.value - } - fn value_mut(&mut self) -> &mut V { - &mut self.value - } - fn key(self) -> K { - self.key - } - fn key_value(self) -> (K, V) { - (self.key, self.value) - } - fn refs(&self) -> (&K, &V) { - (&self.key, &self.value) - } - fn ref_mut(&mut self) -> (&K, &mut V) { - (&self.key, &mut self.value) - } - fn muts(&mut self) -> (&mut K, &mut V) { - (&mut self.key, &mut self.value) - } + fn key_ref(&self) -> &K { &self.key } + fn value_ref(&self) -> &V { &self.value } + fn value_mut(&mut self) -> &mut V { &mut self.value } + fn key(self) -> K { self.key } + fn key_value(self) -> (K, V) { (self.key, self.value) } + fn refs(&self) -> (&K, &V) { (&self.key, &self.value) } + fn ref_mut(&mut self) -> (&K, &mut V) { (&self.key, &mut self.value) } + fn muts(&mut self) -> (&mut K, &mut V) { (&mut self.key, &mut self.value) } } trait Entries { @@ -186,6 +89,5 @@ trait Entries { fn as_entries(&self) -> &[Self::Entry]; fn as_entries_mut(&mut self) -> &mut [Self::Entry]; fn with_entries(&mut self, f: F) - where - F: FnOnce(&mut [Self::Entry]); + where F: FnOnce(&mut [Self::Entry]); } diff --git a/third_party/rust/indexmap/src/macros.rs b/third_party/rust/indexmap/src/macros.rs index c4d84217d1a6..b8c6f9b1fc7f 100644 --- a/third_party/rust/indexmap/src/macros.rs +++ b/third_party/rust/indexmap/src/macros.rs @@ -1,11 +1,12 @@ -#[cfg(has_std)] + #[macro_export] /// Create an `IndexMap` from a list of key-value pairs /// /// ## Example /// /// ``` -/// use indexmap::indexmap; +/// #[macro_use] extern crate indexmap; +/// # fn main() { /// /// let map = indexmap!{ /// "a" => 1, @@ -17,15 +18,16 @@ /// /// // "a" is the first key /// assert_eq!(map.keys().next(), Some(&"a")); +/// # } /// ``` macro_rules! indexmap { (@single $($x:tt)*) => (()); - (@count $($rest:expr),*) => (<[()]>::len(&[$($crate::indexmap!(@single $rest)),*])); + (@count $($rest:expr),*) => (<[()]>::len(&[$(indexmap!(@single $rest)),*])); - ($($key:expr => $value:expr,)+) => { $crate::indexmap!($($key => $value),+) }; + ($($key:expr => $value:expr,)+) => { indexmap!($($key => $value),+) }; ($($key:expr => $value:expr),*) => { { - let _cap = $crate::indexmap!(@count $($key),*); + let _cap = indexmap!(@count $($key),*); let mut _map = $crate::IndexMap::with_capacity(_cap); $( _map.insert($key, $value); @@ -35,14 +37,14 @@ macro_rules! indexmap { }; } -#[cfg(has_std)] #[macro_export] /// Create an `IndexSet` from a list of values /// /// ## Example /// /// ``` -/// use indexmap::indexset; +/// #[macro_use] extern crate indexmap; +/// # fn main() { /// /// let set = indexset!{ /// "a", @@ -54,15 +56,16 @@ macro_rules! indexmap { /// /// // "a" is the first value /// assert_eq!(set.iter().next(), Some(&"a")); +/// # } /// ``` macro_rules! indexset { (@single $($x:tt)*) => (()); - (@count $($rest:expr),*) => (<[()]>::len(&[$($crate::indexset!(@single $rest)),*])); + (@count $($rest:expr),*) => (<[()]>::len(&[$(indexset!(@single $rest)),*])); - ($($value:expr,)+) => { $crate::indexset!($($value),+) }; + ($($value:expr,)+) => { indexset!($($value),+) }; ($($value:expr),*) => { { - let _cap = $crate::indexset!(@count $($value),*); + let _cap = indexset!(@count $($value),*); let mut _set = $crate::IndexSet::with_capacity(_cap); $( _set.insert($value); @@ -99,14 +102,13 @@ macro_rules! iterator_methods { } fn collect(self) -> C - where - C: FromIterator, + where C: FromIterator { // NB: forwarding this directly to standard iterators will // allow it to leverage unstable traits like `TrustedLen`. self.iter.map($map_elt).collect() } - }; + } } macro_rules! double_ended_iterator_methods { @@ -116,5 +118,5 @@ macro_rules! double_ended_iterator_methods { fn next_back(&mut self) -> Option { self.iter.next_back().map($map_elt) } - }; + } } diff --git a/third_party/rust/indexmap/src/map.rs b/third_party/rust/indexmap/src/map.rs index 6829ef94b8d4..6466078a12da 100644 --- a/third_party/rust/indexmap/src/map.rs +++ b/third_party/rust/indexmap/src/map.rs @@ -1,30 +1,226 @@ //! `IndexMap` is a hash table where the iteration order of the key-value //! pairs is independent of the hash values of the keys. -mod core; - -pub use crate::mutable_keys::MutableKeys; +pub use mutable_keys::MutableKeys; #[cfg(feature = "rayon")] -pub use crate::rayon::map as rayon; +pub use ::rayon::map as rayon; -use crate::vec::{self, Vec}; -use ::core::cmp::Ordering; -use ::core::fmt; -use ::core::hash::{BuildHasher, Hash, Hasher}; -use ::core::iter::FromIterator; -use ::core::ops::{Index, IndexMut, RangeBounds}; -use ::core::slice::{Iter as SliceIter, IterMut as SliceIterMut}; - -#[cfg(has_std)] +use std::hash::Hash; +use std::hash::BuildHasher; +use std::hash::Hasher; +use std::iter::FromIterator; use std::collections::hash_map::RandomState; +use std::ops::RangeFull; -use self::core::IndexMapCore; -use crate::equivalent::Equivalent; -use crate::util::third; -use crate::{Bucket, Entries, HashValue}; +use std::cmp::{max, Ordering}; +use std::fmt; +use std::mem::{replace}; +use std::marker::PhantomData; -pub use self::core::{Entry, OccupiedEntry, VacantEntry}; +use util::{third, ptrdistance, enumerate}; +use equivalent::Equivalent; +use { + Bucket, + Entries, + HashValue, +}; + +fn hash_elem_using(build: &B, k: &K) -> HashValue { + let mut h = build.build_hasher(); + k.hash(&mut h); + HashValue(h.finish() as usize) +} + +/// A possibly truncated hash value. +/// +#[derive(Debug)] +struct ShortHash(usize, PhantomData); + +impl ShortHash { + /// Pretend this is a full HashValue, which + /// is completely ok w.r.t determining bucket index + /// + /// - Sz = u32: 32-bit hash is enough to select bucket index + /// - Sz = u64: hash is not truncated + fn into_hash(self) -> HashValue { + HashValue(self.0) + } +} + +impl Copy for ShortHash { } +impl Clone for ShortHash { + #[inline] + fn clone(&self) -> Self { *self } +} + +impl PartialEq for ShortHash { + #[inline] + fn eq(&self, rhs: &Self) -> bool { + self.0 == rhs.0 + } +} + +// Compare ShortHash == HashValue by truncating appropriately +// if applicable before the comparison +impl PartialEq for ShortHash where Sz: Size { + #[inline] + fn eq(&self, rhs: &HashValue) -> bool { + if Sz::is_64_bit() { + self.0 == rhs.0 + } else { + lo32(self.0 as u64) == lo32(rhs.0 as u64) + } + } +} +impl From> for HashValue { + fn from(x: ShortHash) -> Self { HashValue(x.0) } +} + +/// `Pos` is stored in the `indices` array and it points to the index of a +/// `Bucket` in self.core.entries. +/// +/// Pos can be interpreted either as a 64-bit index, or as a 32-bit index and +/// a 32-bit hash. +/// +/// Storing the truncated hash next to the index saves loading the hash from the +/// entry, increasing the cache efficiency. +/// +/// Note that the lower 32 bits of the hash is enough to compute desired +/// position and probe distance in a hash map with less than 2**32 buckets. +/// +/// The IndexMap will simply query its **current raw capacity** to see what its +/// current size class is, and dispatch to the 32-bit or 64-bit lookup code as +/// appropriate. Only the growth code needs some extra logic to handle the +/// transition from one class to another +#[derive(Copy)] +struct Pos { + index: u64, +} + +impl Clone for Pos { + #[inline(always)] + fn clone(&self) -> Self { *self } +} + +impl fmt::Debug for Pos { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match self.pos() { + Some(i) => write!(f, "Pos({} / {:x})", i, self.index), + None => write!(f, "Pos(None)"), + } + } +} + +impl Pos { + #[inline] + fn none() -> Self { Pos { index: !0 } } + + #[inline] + fn is_none(&self) -> bool { self.index == !0 } + + /// Return the index part of the Pos value inside `Some(_)` if the position + /// is not none, otherwise return `None`. + #[inline] + fn pos(&self) -> Option { + if self.index == !0 { None } else { Some(lo32(self.index as u64)) } + } + + /// Set the index part of the Pos value to `i` + #[inline] + fn set_pos(&mut self, i: usize) + where Sz: Size, + { + debug_assert!(!self.is_none()); + if Sz::is_64_bit() { + self.index = i as u64; + } else { + self.index = i as u64 | ((self.index >> 32) << 32) + } + } + + #[inline] + fn with_hash(i: usize, hash: HashValue) -> Self + where Sz: Size + { + if Sz::is_64_bit() { + Pos { + index: i as u64, + } + } else { + Pos { + index: i as u64 | ((hash.0 as u64) << 32) + } + } + } + + /// “Resolve” the Pos into a combination of its index value and + /// a proxy value to the hash (whether it contains the hash or not + /// depends on the size class of the hash map). + #[inline] + fn resolve(&self) -> Option<(usize, ShortHashProxy)> + where Sz: Size + { + if Sz::is_64_bit() { + if !self.is_none() { + Some((self.index as usize, ShortHashProxy::new(0))) + } else { + None + } + } else { + if !self.is_none() { + let (i, hash) = split_lo_hi(self.index); + Some((i as usize, ShortHashProxy::new(hash as usize))) + } else { + None + } + } + } + + /// Like resolve, but the Pos **must** be non-none. Return its index. + #[inline] + fn resolve_existing_index(&self) -> usize + where Sz: Size + { + debug_assert!(!self.is_none(), "datastructure inconsistent: none where valid Pos expected"); + if Sz::is_64_bit() { + self.index as usize + } else { + let (i, _) = split_lo_hi(self.index); + i as usize + } + } + +} + +#[inline] +fn lo32(x: u64) -> usize { (x & 0xFFFF_FFFF) as usize } + +// split into low, hi parts +#[inline] +fn split_lo_hi(x: u64) -> (u32, u32) { (x as u32, (x >> 32) as u32) } + +// Possibly contains the truncated hash value for an entry, depending on +// the size class. +struct ShortHashProxy(usize, PhantomData); + +impl ShortHashProxy + where Sz: Size +{ + fn new(x: usize) -> Self { + ShortHashProxy(x, PhantomData) + } + + /// Get the hash from either `self` or from a lookup into `entries`, + /// depending on `Sz`. + fn get_short_hash(&self, entries: &[Bucket], index: usize) -> ShortHash { + if Sz::is_64_bit() { + ShortHash(entries[index].hash.0, PhantomData) + } else { + ShortHash(self.0, PhantomData) + } + } +} /// A hash table where the iteration order of the key-value pairs is independent /// of the hash values of the keys. @@ -61,89 +257,130 @@ pub use self::core::{Entry, OccupiedEntry, VacantEntry}; /// for ch in "a short treatise on fungi".chars() { /// *letters.entry(ch).or_insert(0) += 1; /// } -/// +/// /// assert_eq!(letters[&'s'], 2); /// assert_eq!(letters[&'t'], 3); /// assert_eq!(letters[&'u'], 1); /// assert_eq!(letters.get(&'y'), None); /// ``` -#[cfg(has_std)] +#[derive(Clone)] pub struct IndexMap { - core: IndexMapCore, - hash_builder: S, -} -#[cfg(not(has_std))] -pub struct IndexMap { - core: IndexMapCore, + core: OrderMapCore, hash_builder: S, } -impl Clone for IndexMap -where - K: Clone, - V: Clone, - S: Clone, -{ - fn clone(&self) -> Self { - IndexMap { - core: self.core.clone(), - hash_builder: self.hash_builder.clone(), - } - } +// core of the map that does not depend on S +#[derive(Clone)] +struct OrderMapCore { + pub(crate) mask: usize, + /// indices are the buckets. indices.len() == raw capacity + pub(crate) indices: Box<[Pos]>, + /// entries is a dense vec of entries in their order. entries.len() == len + pub(crate) entries: Vec>, +} - fn clone_from(&mut self, other: &Self) { - self.core.clone_from(&other.core); - self.hash_builder.clone_from(&other.hash_builder); - } +#[inline(always)] +fn desired_pos(mask: usize, hash: HashValue) -> usize { + hash.0 & mask } impl Entries for IndexMap { type Entry = Bucket; - #[inline] fn into_entries(self) -> Vec { - self.core.into_entries() + self.core.entries } - #[inline] fn as_entries(&self) -> &[Self::Entry] { - self.core.as_entries() + &self.core.entries } - #[inline] fn as_entries_mut(&mut self) -> &mut [Self::Entry] { - self.core.as_entries_mut() + &mut self.core.entries } fn with_entries(&mut self, f: F) - where - F: FnOnce(&mut [Self::Entry]), + where F: FnOnce(&mut [Self::Entry]) { - self.core.with_entries(f); + let side_index = self.core.save_hash_index(); + f(&mut self.core.entries); + self.core.restore_hash_index(side_index); + } +} + +/// The number of steps that `current` is forward of the desired position for hash +#[inline(always)] +fn probe_distance(mask: usize, hash: HashValue, current: usize) -> usize { + current.wrapping_sub(desired_pos(mask, hash)) & mask +} + +enum Inserted { + Done, + Swapped { prev_value: V }, + RobinHood { + probe: usize, + old_pos: Pos, } } impl fmt::Debug for IndexMap -where - K: fmt::Debug, - V: fmt::Debug, + where K: fmt::Debug + Hash + Eq, + V: fmt::Debug, + S: BuildHasher, { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + try!(f.debug_map().entries(self.iter()).finish()); if cfg!(not(feature = "test_debug")) { - f.debug_map().entries(self.iter()).finish() - } else { - // Let the inner `IndexMapCore` print all of its details - f.debug_struct("IndexMap") - .field("core", &self.core) - .finish() + return Ok(()); + } + try!(writeln!(f, "")); + for (i, index) in enumerate(&*self.core.indices) { + try!(write!(f, "{}: {:?}", i, index)); + if let Some(pos) = index.pos() { + let hash = self.core.entries[pos].hash; + let key = &self.core.entries[pos].key; + let desire = desired_pos(self.core.mask, hash); + try!(write!(f, ", desired={}, probe_distance={}, key={:?}", + desire, + probe_distance(self.core.mask, hash, i), + key)); + } + try!(writeln!(f, "")); + } + try!(writeln!(f, "cap={}, raw_cap={}, entries.cap={}", + self.capacity(), + self.raw_capacity(), + self.core.entries.capacity())); + Ok(()) + } +} + +#[inline] +fn usable_capacity(cap: usize) -> usize { + cap - cap / 4 +} + +#[inline] +fn to_raw_capacity(n: usize) -> usize { + n + n / 3 +} + +// this could not be captured in an efficient iterator +macro_rules! probe_loop { + ($probe_var: ident < $len: expr, $body: expr) => { + loop { + if $probe_var < $len { + $body + $probe_var += 1; + } else { + $probe_var = 0; + } } } } -#[cfg(has_std)] impl IndexMap { /// Create a new map. (Does not allocate.) - #[inline] pub fn new() -> Self { Self::with_capacity(0) } @@ -152,99 +389,355 @@ impl IndexMap { /// allocate if `n` is zero.) /// /// Computes in **O(n)** time. - #[inline] pub fn with_capacity(n: usize) -> Self { Self::with_capacity_and_hasher(n, <_>::default()) } } -impl IndexMap { +impl IndexMap +{ /// Create a new map with capacity for `n` key-value pairs. (Does not /// allocate if `n` is zero.) /// /// Computes in **O(n)** time. - #[inline] - pub fn with_capacity_and_hasher(n: usize, hash_builder: S) -> Self { + pub fn with_capacity_and_hasher(n: usize, hash_builder: S) -> Self + where S: BuildHasher + { if n == 0 { IndexMap { - core: IndexMapCore::new(), - hash_builder, + core: OrderMapCore { + mask: 0, + indices: Box::new([]), + entries: Vec::new(), + }, + hash_builder: hash_builder, } } else { + let raw = to_raw_capacity(n); + let raw_cap = max(raw.next_power_of_two(), 8); IndexMap { - core: IndexMapCore::with_capacity(n), - hash_builder, + core: OrderMapCore { + mask: raw_cap.wrapping_sub(1), + indices: vec![Pos::none(); raw_cap].into_boxed_slice(), + entries: Vec::with_capacity(usable_capacity(raw_cap)), + }, + hash_builder: hash_builder, } } } + /// Return the number of key-value pairs in the map. + /// + /// Computes in **O(1)** time. + pub fn len(&self) -> usize { self.core.len() } + + /// Returns true if the map contains no elements. + /// + /// Computes in **O(1)** time. + pub fn is_empty(&self) -> bool { self.len() == 0 } + /// Create a new map with `hash_builder` - pub fn with_hasher(hash_builder: S) -> Self { + pub fn with_hasher(hash_builder: S) -> Self + where S: BuildHasher + { Self::with_capacity_and_hasher(0, hash_builder) } + /// Return a reference to the map's `BuildHasher`. + pub fn hasher(&self) -> &S + where S: BuildHasher + { + &self.hash_builder + } + /// Computes in **O(1)** time. pub fn capacity(&self) -> usize { self.core.capacity() } - /// Return a reference to the map's `BuildHasher`. - pub fn hasher(&self) -> &S { - &self.hash_builder - } - - /// Return the number of key-value pairs in the map. - /// - /// Computes in **O(1)** time. #[inline] - pub fn len(&self) -> usize { - self.core.len() + fn size_class_is_64bit(&self) -> bool { + self.core.size_class_is_64bit() } - /// Returns true if the map contains no elements. - /// - /// Computes in **O(1)** time. + #[inline(always)] + fn raw_capacity(&self) -> usize { + self.core.raw_capacity() + } +} + +impl OrderMapCore { + // Return whether we need 32 or 64 bits to specify a bucket or entry index + #[cfg(not(feature = "test_low_transition_point"))] + fn size_class_is_64bit(&self) -> bool { + usize::max_value() > u32::max_value() as usize && + self.raw_capacity() >= u32::max_value() as usize + } + + // for testing + #[cfg(feature = "test_low_transition_point")] + fn size_class_is_64bit(&self) -> bool { + self.raw_capacity() >= 64 + } + + #[inline(always)] + fn raw_capacity(&self) -> usize { + self.indices.len() + } +} + +/// Trait for the "size class". Either u32 or u64 depending on the index +/// size needed to address an entry's indes in self.core.entries. +trait Size { + fn is_64_bit() -> bool; + fn is_same_size() -> bool { + Self::is_64_bit() == T::is_64_bit() + } +} + +impl Size for u32 { #[inline] - pub fn is_empty(&self) -> bool { - self.len() == 0 - } + fn is_64_bit() -> bool { false } +} - /// Return an iterator over the key-value pairs of the map, in their order - pub fn iter(&self) -> Iter<'_, K, V> { - Iter { - iter: self.as_entries().iter(), +impl Size for u64 { + #[inline] + fn is_64_bit() -> bool { true } +} + +/// Call self.method(args) with `::` or `::` depending on `self` +/// size class. +/// +/// The u32 or u64 is *prepended* to the type parameter list! +macro_rules! dispatch_32_vs_64 { + // self.methodname with other explicit type params, + // size is prepended + ($self_:ident . $method:ident::<$($t:ty),*>($($arg:expr),*)) => { + if $self_.size_class_is_64bit() { + $self_.$method::($($arg),*) + } else { + $self_.$method::($($arg),*) + } + }; + // self.methodname with only one type param, the size. + ($self_:ident . $method:ident ($($arg:expr),*)) => { + if $self_.size_class_is_64bit() { + $self_.$method::($($arg),*) + } else { + $self_.$method::($($arg),*) + } + }; + // functionname with size_class_is_64bit as the first argument, only one + // type param, the size. + ($self_:ident => $function:ident ($($arg:expr),*)) => { + if $self_.size_class_is_64bit() { + $function::($($arg),*) + } else { + $function::($($arg),*) + } + }; +} + +/// Entry for an existing key-value pair or a vacant location to +/// insert one. +pub enum Entry<'a, K: 'a, V: 'a> { + /// Existing slot with equivalent key. + Occupied(OccupiedEntry<'a, K, V>), + /// Vacant slot (no equivalent key in the map). + Vacant(VacantEntry<'a, K, V>), +} + +impl<'a, K, V> Entry<'a, K, V> { + /// Computes in **O(1)** time (amortized average). + pub fn or_insert(self, default: V) -> &'a mut V { + match self { + Entry::Occupied(entry) => entry.into_mut(), + Entry::Vacant(entry) => entry.insert(default), } } - /// Return an iterator over the key-value pairs of the map, in their order - pub fn iter_mut(&mut self) -> IterMut<'_, K, V> { - IterMut { - iter: self.as_entries_mut().iter_mut(), + /// Computes in **O(1)** time (amortized average). + pub fn or_insert_with(self, call: F) -> &'a mut V + where F: FnOnce() -> V, + { + match self { + Entry::Occupied(entry) => entry.into_mut(), + Entry::Vacant(entry) => entry.insert(call()), } } - /// Return an iterator over the keys of the map, in their order - pub fn keys(&self) -> Keys<'_, K, V> { - Keys { - iter: self.as_entries().iter(), + pub fn key(&self) -> &K { + match *self { + Entry::Occupied(ref entry) => entry.key(), + Entry::Vacant(ref entry) => entry.key(), } } - /// Return an iterator over the values of the map, in their order - pub fn values(&self) -> Values<'_, K, V> { - Values { - iter: self.as_entries().iter(), + /// Return the index where the key-value pair exists or will be inserted. + pub fn index(&self) -> usize { + match *self { + Entry::Occupied(ref entry) => entry.index(), + Entry::Vacant(ref entry) => entry.index(), } } - /// Return an iterator over mutable references to the the values of the map, - /// in their order - pub fn values_mut(&mut self) -> ValuesMut<'_, K, V> { - ValuesMut { - iter: self.as_entries_mut().iter_mut(), + /// Modifies the entry if it is occupied. + pub fn and_modify(self, f: F) -> Self + where F: FnOnce(&mut V), + { + match self { + Entry::Occupied(mut o) => { + f(o.get_mut()); + Entry::Occupied(o) + } + x => x, } } + /// Inserts a default-constructed value in the entry if it is vacant and returns a mutable + /// reference to it. Otherwise a mutable reference to an already existent value is returned. + /// + /// Computes in **O(1)** time (amortized average). + pub fn or_default(self) -> &'a mut V + where V: Default + { + match self { + Entry::Occupied(entry) => entry.into_mut(), + Entry::Vacant(entry) => entry.insert(V::default()), + } + } +} + +impl<'a, K: 'a + fmt::Debug, V: 'a + fmt::Debug> fmt::Debug for Entry<'a, K, V> { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match *self { + Entry::Vacant(ref v) => { + f.debug_tuple("Entry") + .field(v) + .finish() + } + Entry::Occupied(ref o) => { + f.debug_tuple("Entry") + .field(o) + .finish() + } + } + } +} + +/// A view into an occupied entry in a `IndexMap`. +/// It is part of the [`Entry`] enum. +/// +/// [`Entry`]: enum.Entry.html +pub struct OccupiedEntry<'a, K: 'a, V: 'a> { + map: &'a mut OrderMapCore, + key: K, + probe: usize, + index: usize, +} + +impl<'a, K, V> OccupiedEntry<'a, K, V> { + pub fn key(&self) -> &K { &self.key } + pub fn get(&self) -> &V { + &self.map.entries[self.index].value + } + pub fn get_mut(&mut self) -> &mut V { + &mut self.map.entries[self.index].value + } + + /// Put the new key in the occupied entry's key slot + pub(crate) fn replace_key(self) -> K { + let old_key = &mut self.map.entries[self.index].key; + replace(old_key, self.key) + } + + /// Return the index of the key-value pair + pub fn index(&self) -> usize { + self.index + } + pub fn into_mut(self) -> &'a mut V { + &mut self.map.entries[self.index].value + } + + /// Sets the value of the entry to `value`, and returns the entry's old value. + pub fn insert(&mut self, value: V) -> V { + replace(self.get_mut(), value) + } + + pub fn remove(self) -> V { + self.remove_entry().1 + } + + /// Remove and return the key, value pair stored in the map for this entry + pub fn remove_entry(self) -> (K, V) { + self.map.remove_found(self.probe, self.index) + } +} + +impl<'a, K: 'a + fmt::Debug, V: 'a + fmt::Debug> fmt::Debug for OccupiedEntry<'a, K, V> { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + f.debug_struct("OccupiedEntry") + .field("key", self.key()) + .field("value", self.get()) + .finish() + } +} + +/// A view into a vacant entry in a `IndexMap`. +/// It is part of the [`Entry`] enum. +/// +/// [`Entry`]: enum.Entry.html +pub struct VacantEntry<'a, K: 'a, V: 'a> { + map: &'a mut OrderMapCore, + key: K, + hash: HashValue, + probe: usize, +} + +impl<'a, K, V> VacantEntry<'a, K, V> { + pub fn key(&self) -> &K { &self.key } + pub fn into_key(self) -> K { self.key } + /// Return the index where the key-value pair will be inserted. + pub fn index(&self) -> usize { self.map.len() } + pub fn insert(self, value: V) -> &'a mut V { + if self.map.size_class_is_64bit() { + self.insert_impl::(value) + } else { + self.insert_impl::(value) + } + } + + fn insert_impl(self, value: V) -> &'a mut V + where Sz: Size + { + let index = self.map.entries.len(); + self.map.entries.push(Bucket { hash: self.hash, key: self.key, value: value }); + let old_pos = Pos::with_hash::(index, self.hash); + self.map.insert_phase_2::(self.probe, old_pos); + &mut {self.map}.entries[index].value + } +} + +impl<'a, K: 'a + fmt::Debug, V: 'a> fmt::Debug for VacantEntry<'a, K, V> { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + f.debug_tuple("VacantEntry") + .field(self.key()) + .finish() + } +} + +impl IndexMap + where K: Hash + Eq, + S: BuildHasher, +{ + // FIXME: reduce duplication (compare with insert) + fn entry_phase_1(&mut self, key: K) -> Entry + where Sz: Size + { + let hash = hash_elem_using(&self.hash_builder, &key); + self.core.entry_phase_1::(hash, key) + } + /// Remove all key-value pairs in the map, while preserving its capacity. /// /// Computes in **O(n)** time. @@ -252,52 +745,36 @@ impl IndexMap { self.core.clear(); } - /// Clears the `IndexMap` in the given index range, returning those - /// key-value pairs as a drain iterator. - /// - /// The range may be any type that implements `RangeBounds`, - /// including all of the `std::ops::Range*` types, or even a tuple pair of - /// `Bound` start and end values. To drain the map entirely, use `RangeFull` - /// like `map.drain(..)`. - /// - /// This shifts down all entries following the drained range to fill the - /// gap, and keeps the allocated memory for reuse. - /// - /// ***Panics*** if the starting point is greater than the end point or if - /// the end point is greater than the length of the map. - pub fn drain(&mut self, range: R) -> Drain<'_, K, V> - where - R: RangeBounds, - { - Drain { - iter: self.core.drain(range), - } - } -} - -impl IndexMap -where - K: Hash + Eq, - S: BuildHasher, -{ /// Reserve capacity for `additional` more key-value pairs. /// - /// Computes in **O(n)** time. + /// FIXME Not implemented fully yet. pub fn reserve(&mut self, additional: usize) { - self.core.reserve(additional); + if additional > 0 { + self.reserve_one(); + } } - /// Shrink the capacity of the map as much as possible. - /// - /// Computes in **O(n)** time. - pub fn shrink_to_fit(&mut self) { - self.core.shrink_to_fit(); + // First phase: Look for the preferred location for key. + // + // We will know if `key` is already in the map, before we need to insert it. + // When we insert they key, it might be that we need to continue displacing + // entries (robin hood hashing), in which case Inserted::RobinHood is returned + fn insert_phase_1(&mut self, key: K, value: V) -> Inserted + where Sz: Size + { + let hash = hash_elem_using(&self.hash_builder, &key); + self.core.insert_phase_1::(hash, key, value) } - fn hash(&self, key: &Q) -> HashValue { - let mut h = self.hash_builder.build_hasher(); - key.hash(&mut h); - HashValue(h.finish() as usize) + fn reserve_one(&mut self) { + if self.len() == self.capacity() { + dispatch_32_vs_64!(self.double_capacity()); + } + } + fn double_capacity(&mut self) + where Sz: Size, + { + self.core.double_capacity::(); } /// Insert a key-value pair in the map. @@ -314,7 +791,26 @@ where /// See also [`entry`](#method.entry) if you you want to insert *or* modify /// or if you need to get the index of the corresponding key-value pair. pub fn insert(&mut self, key: K, value: V) -> Option { - self.insert_full(key, value).1 + self.reserve_one(); + if self.size_class_is_64bit() { + match self.insert_phase_1::(key, value) { + Inserted::Swapped { prev_value } => Some(prev_value), + Inserted::Done => None, + Inserted::RobinHood { probe, old_pos } => { + self.core.insert_phase_2::(probe, old_pos); + None + } + } + } else { + match self.insert_phase_1::(key, value) { + Inserted::Swapped { prev_value } => Some(prev_value), + Inserted::Done => None, + Inserted::RobinHood { probe, old_pos } => { + self.core.insert_phase_2::(probe, old_pos); + None + } + } + } } /// Insert a key-value pair in the map, and get their index. @@ -331,27 +827,71 @@ where /// See also [`entry`](#method.entry) if you you want to insert *or* modify /// or if you need to get the index of the corresponding key-value pair. pub fn insert_full(&mut self, key: K, value: V) -> (usize, Option) { - let hash = self.hash(&key); - self.core.insert_full(hash, key, value) + let entry = self.entry(key); + let index = entry.index(); + + match entry { + Entry::Occupied(mut entry) => (index, Some(entry.insert(value))), + Entry::Vacant(entry) => { + entry.insert(value); + (index, None) + } + } } /// Get the given key’s corresponding entry in the map for insertion and/or /// in-place manipulation. /// /// Computes in **O(1)** time (amortized average). - pub fn entry(&mut self, key: K) -> Entry<'_, K, V> { - let hash = self.hash(&key); - self.core.entry(hash, key) + pub fn entry(&mut self, key: K) -> Entry { + self.reserve_one(); + dispatch_32_vs_64!(self.entry_phase_1(key)) + } + + + /// Return an iterator over the key-value pairs of the map, in their order + pub fn iter(&self) -> Iter { + Iter { + iter: self.core.entries.iter() + } + } + + /// Return an iterator over the key-value pairs of the map, in their order + pub fn iter_mut(&mut self) -> IterMut { + IterMut { + iter: self.core.entries.iter_mut() + } + } + + /// Return an iterator over the keys of the map, in their order + pub fn keys(&self) -> Keys { + Keys { + iter: self.core.entries.iter() + } + } + + /// Return an iterator over the values of the map, in their order + pub fn values(&self) -> Values { + Values { + iter: self.core.entries.iter() + } + } + + /// Return an iterator over mutable references to the the values of the map, + /// in their order + pub fn values_mut(&mut self) -> ValuesMut { + ValuesMut { + iter: self.core.entries.iter_mut() + } } /// Return `true` if an equivalent to `key` exists in the map. /// /// Computes in **O(1)** time (average). pub fn contains_key(&self, key: &Q) -> bool - where - Q: Hash + Equivalent, + where Q: Hash + Equivalent, { - self.get_index_of(key).is_some() + self.find(key).is_some() } /// Return a reference to the value stored for `key`, if it is present, @@ -359,127 +899,68 @@ where /// /// Computes in **O(1)** time (average). pub fn get(&self, key: &Q) -> Option<&V> - where - Q: Hash + Equivalent, + where Q: Hash + Equivalent, { - if let Some(i) = self.get_index_of(key) { - let entry = &self.as_entries()[i]; - Some(&entry.value) - } else { - None - } - } - - /// Return references to the key-value pair stored for `key`, - /// if it is present, else `None`. - /// - /// Computes in **O(1)** time (average). - pub fn get_key_value(&self, key: &Q) -> Option<(&K, &V)> - where - Q: Hash + Equivalent, - { - if let Some(i) = self.get_index_of(key) { - let entry = &self.as_entries()[i]; - Some((&entry.key, &entry.value)) - } else { - None - } + self.get_full(key).map(third) } /// Return item index, key and value pub fn get_full(&self, key: &Q) -> Option<(usize, &K, &V)> - where - Q: Hash + Equivalent, + where Q: Hash + Equivalent, { - if let Some(i) = self.get_index_of(key) { - let entry = &self.as_entries()[i]; - Some((i, &entry.key, &entry.value)) + if let Some((_, found)) = self.find(key) { + let entry = &self.core.entries[found]; + Some((found, &entry.key, &entry.value)) } else { None } } - /// Return item index, if it exists in the map - pub fn get_index_of(&self, key: &Q) -> Option - where - Q: Hash + Equivalent, - { - if self.is_empty() { - None - } else { - let hash = self.hash(key); - self.core.get_index_of(hash, key) - } - } - pub fn get_mut(&mut self, key: &Q) -> Option<&mut V> - where - Q: Hash + Equivalent, + where Q: Hash + Equivalent, { - if let Some(i) = self.get_index_of(key) { - let entry = &mut self.as_entries_mut()[i]; - Some(&mut entry.value) + self.get_full_mut(key).map(third) + } + + pub fn get_full_mut(&mut self, key: &Q) + -> Option<(usize, &K, &mut V)> + where Q: Hash + Equivalent, + { + self.get_full_mut2_impl(key).map(|(i, k, v)| (i, &*k, v)) + } + + + pub(crate) fn get_full_mut2_impl(&mut self, key: &Q) + -> Option<(usize, &mut K, &mut V)> + where Q: Hash + Equivalent, + { + if let Some((_, found)) = self.find(key) { + let entry = &mut self.core.entries[found]; + Some((found, &mut entry.key, &mut entry.value)) } else { None } } - pub fn get_full_mut(&mut self, key: &Q) -> Option<(usize, &K, &mut V)> - where - Q: Hash + Equivalent, + + /// Return probe (indices) and position (entries) + pub(crate) fn find(&self, key: &Q) -> Option<(usize, usize)> + where Q: Hash + Equivalent, { - if let Some(i) = self.get_index_of(key) { - let entry = &mut self.as_entries_mut()[i]; - Some((i, &entry.key, &mut entry.value)) - } else { - None - } + if self.len() == 0 { return None; } + let h = hash_elem_using(&self.hash_builder, key); + self.core.find_using(h, move |entry| { Q::equivalent(key, &entry.key) }) } - pub(crate) fn get_full_mut2_impl( - &mut self, - key: &Q, - ) -> Option<(usize, &mut K, &mut V)> - where - Q: Hash + Equivalent, - { - if let Some(i) = self.get_index_of(key) { - let entry = &mut self.as_entries_mut()[i]; - Some((i, &mut entry.key, &mut entry.value)) - } else { - None - } - } - - /// Remove the key-value pair equivalent to `key` and return - /// its value. - /// - /// **NOTE:** This is equivalent to `.swap_remove(key)`, if you need to - /// preserve the order of the keys in the map, use `.shift_remove(key)` - /// instead. + /// NOTE: Same as .swap_remove /// /// Computes in **O(1)** time (average). pub fn remove(&mut self, key: &Q) -> Option - where - Q: Hash + Equivalent, + where Q: Hash + Equivalent, { self.swap_remove(key) } - /// Remove and return the key-value pair equivalent to `key`. - /// - /// **NOTE:** This is equivalent to `.swap_remove_entry(key)`, if you need to - /// preserve the order of the keys in the map, use `.shift_remove_entry(key)` - /// instead. - /// - /// Computes in **O(1)** time (average). - pub fn remove_entry(&mut self, key: &Q) -> Option<(K, V)> - where - Q: Hash + Equivalent, - { - self.swap_remove_entry(key) - } - /// Remove the key-value pair equivalent to `key` and return /// its value. /// @@ -491,31 +972,11 @@ where /// /// Computes in **O(1)** time (average). pub fn swap_remove(&mut self, key: &Q) -> Option - where - Q: Hash + Equivalent, + where Q: Hash + Equivalent, { self.swap_remove_full(key).map(third) } - /// Remove and return the key-value pair equivalent to `key`. - /// - /// Like `Vec::swap_remove`, the pair is removed by swapping it with the - /// last element of the map and popping it off. **This perturbs - /// the postion of what used to be the last element!** - /// - /// Return `None` if `key` is not in map. - /// - /// Computes in **O(1)** time (average). - pub fn swap_remove_entry(&mut self, key: &Q) -> Option<(K, V)> - where - Q: Hash + Equivalent, - { - match self.swap_remove_full(key) { - Some((_, key, value)) => Some((key, value)), - None => None, - } - } - /// Remove the key-value pair equivalent to `key` and return it and /// the index it had. /// @@ -524,81 +985,22 @@ where /// the postion of what used to be the last element!** /// /// Return `None` if `key` is not in map. - /// - /// Computes in **O(1)** time (average). pub fn swap_remove_full(&mut self, key: &Q) -> Option<(usize, K, V)> - where - Q: Hash + Equivalent, + where Q: Hash + Equivalent, { - if self.is_empty() { - return None; - } - let hash = self.hash(key); - self.core.swap_remove_full(hash, key) - } - - /// Remove the key-value pair equivalent to `key` and return - /// its value. - /// - /// Like `Vec::remove`, the pair is removed by shifting all of the - /// elements that follow it, preserving their relative order. - /// **This perturbs the index of all of those elements!** - /// - /// Return `None` if `key` is not in map. - /// - /// Computes in **O(n)** time (average). - pub fn shift_remove(&mut self, key: &Q) -> Option - where - Q: Hash + Equivalent, - { - self.shift_remove_full(key).map(third) - } - - /// Remove and return the key-value pair equivalent to `key`. - /// - /// Like `Vec::remove`, the pair is removed by shifting all of the - /// elements that follow it, preserving their relative order. - /// **This perturbs the index of all of those elements!** - /// - /// Return `None` if `key` is not in map. - /// - /// Computes in **O(n)** time (average). - pub fn shift_remove_entry(&mut self, key: &Q) -> Option<(K, V)> - where - Q: Hash + Equivalent, - { - match self.shift_remove_full(key) { - Some((_, key, value)) => Some((key, value)), - None => None, - } - } - - /// Remove the key-value pair equivalent to `key` and return it and - /// the index it had. - /// - /// Like `Vec::remove`, the pair is removed by shifting all of the - /// elements that follow it, preserving their relative order. - /// **This perturbs the index of all of those elements!** - /// - /// Return `None` if `key` is not in map. - /// - /// Computes in **O(n)** time (average). - pub fn shift_remove_full(&mut self, key: &Q) -> Option<(usize, K, V)> - where - Q: Hash + Equivalent, - { - if self.is_empty() { - return None; - } - let hash = self.hash(key); - self.core.shift_remove_full(hash, key) + let (probe, found) = match self.find(key) { + None => return None, + Some(t) => t, + }; + let (k, v) = self.core.remove_found(probe, found); + Some((found, k, v)) } /// Remove the last key-value pair /// /// Computes in **O(1)** time (average). pub fn pop(&mut self) -> Option<(K, V)> { - self.core.pop() + self.core.pop_impl() } /// Scan through each key-value pair in the map and keep those where the @@ -609,29 +1011,31 @@ where /// /// Computes in **O(n)** time (average). pub fn retain(&mut self, mut keep: F) - where - F: FnMut(&K, &mut V) -> bool, + where F: FnMut(&K, &mut V) -> bool, { - self.core.retain_in_order(move |k, v| keep(k, v)); + self.retain_mut(move |k, v| keep(k, v)); } pub(crate) fn retain_mut(&mut self, keep: F) - where - F: FnMut(&mut K, &mut V) -> bool, + where F: FnMut(&mut K, &mut V) -> bool, { - self.core.retain_in_order(keep); + dispatch_32_vs_64!(self.retain_mut_sz::<_>(keep)); + } + + fn retain_mut_sz(&mut self, keep: F) + where F: FnMut(&mut K, &mut V) -> bool, + Sz: Size, + { + self.core.retain_in_order_impl::(keep); } /// Sort the map’s key-value pairs by the default ordering of the keys. /// /// See `sort_by` for details. pub fn sort_keys(&mut self) - where - K: Ord, + where K: Ord, { - self.with_entries(|entries| { - entries.sort_by(|a, b| Ord::cmp(&a.key, &b.key)); - }); + self.core.sort_by(key_cmp) } /// Sort the map’s key-value pairs in place using the comparison @@ -642,38 +1046,40 @@ where /// /// Computes in **O(n log n + c)** time and **O(n)** space where *n* is /// the length of the map and *c* the capacity. The sort is stable. - pub fn sort_by(&mut self, mut cmp: F) - where - F: FnMut(&K, &V, &K, &V) -> Ordering, + pub fn sort_by(&mut self, compare: F) + where F: FnMut(&K, &V, &K, &V) -> Ordering, { - self.with_entries(move |entries| { - entries.sort_by(move |a, b| cmp(&a.key, &a.value, &b.key, &b.value)); - }); + self.core.sort_by(compare) } /// Sort the key-value pairs of the map and return a by value iterator of /// the key-value pairs with the result. /// /// The sort is stable. - pub fn sorted_by(self, mut cmp: F) -> IntoIter - where - F: FnMut(&K, &V, &K, &V) -> Ordering, + pub fn sorted_by(mut self, mut cmp: F) -> IntoIter + where F: FnMut(&K, &V, &K, &V) -> Ordering { - let mut entries = self.into_entries(); - entries.sort_by(move |a, b| cmp(&a.key, &a.value, &b.key, &b.value)); - IntoIter { - iter: entries.into_iter(), - } + self.core.entries.sort_by(move |a, b| cmp(&a.key, &a.value, &b.key, &b.value)); + self.into_iter() } - /// Reverses the order of the map’s key-value pairs in place. - /// - /// Computes in **O(n)** time and **O(1)** space. - pub fn reverse(&mut self) { - self.core.reverse() + /// Clears the `IndexMap`, returning all key-value pairs as a drain iterator. + /// Keeps the allocated memory for reuse. + pub fn drain(&mut self, range: RangeFull) -> Drain { + self.core.clear_indices(); + + Drain { + iter: self.core.entries.drain(range), + } } } +fn key_cmp(k1: &K, _v1: &V, k2: &K, _v2: &V) -> Ordering + where K: Ord +{ + Ord::cmp(k1, k2) +} + impl IndexMap { /// Get a key-value pair by index /// @@ -681,7 +1087,7 @@ impl IndexMap { /// /// Computes in **O(1)** time. pub fn get_index(&self, index: usize) -> Option<(&K, &V)> { - self.as_entries().get(index).map(Bucket::refs) + self.core.entries.get(index).map(Bucket::refs) } /// Get a key-value pair by index @@ -690,36 +1096,440 @@ impl IndexMap { /// /// Computes in **O(1)** time. pub fn get_index_mut(&mut self, index: usize) -> Option<(&mut K, &mut V)> { - self.as_entries_mut().get_mut(index).map(Bucket::muts) + self.core.entries.get_mut(index).map(Bucket::muts) } /// Remove the key-value pair by index /// /// Valid indices are *0 <= index < self.len()* /// - /// Like `Vec::swap_remove`, the pair is removed by swapping it with the - /// last element of the map and popping it off. **This perturbs - /// the postion of what used to be the last element!** - /// /// Computes in **O(1)** time (average). pub fn swap_remove_index(&mut self, index: usize) -> Option<(K, V)> { - self.core.swap_remove_index(index) - } - - /// Remove the key-value pair by index - /// - /// Valid indices are *0 <= index < self.len()* - /// - /// Like `Vec::remove`, the pair is removed by shifting all of the - /// elements that follow it, preserving their relative order. - /// **This perturbs the index of all of those elements!** - /// - /// Computes in **O(n)** time (average). - pub fn shift_remove_index(&mut self, index: usize) -> Option<(K, V)> { - self.core.shift_remove_index(index) + let (probe, found) = match self.core.entries.get(index) + .map(|e| self.core.find_existing_entry(e)) + { + None => return None, + Some(t) => t, + }; + Some(self.core.remove_found(probe, found)) } } +// Methods that don't use any properties (Hash / Eq) of K. +// +// It's cleaner to separate them out, then the compiler checks that we are not +// using Hash + Eq at all in these methods. +// +// However, we should probably not let this show in the public API or docs. +impl OrderMapCore { + fn len(&self) -> usize { self.entries.len() } + + fn capacity(&self) -> usize { + usable_capacity(self.raw_capacity()) + } + + fn clear(&mut self) { + self.entries.clear(); + self.clear_indices(); + } + + // clear self.indices to the same state as "no elements" + fn clear_indices(&mut self) { + for pos in self.indices.iter_mut() { + *pos = Pos::none(); + } + } + + fn first_allocation(&mut self) { + debug_assert_eq!(self.len(), 0); + let raw_cap = 8usize; + self.mask = raw_cap.wrapping_sub(1); + self.indices = vec![Pos::none(); raw_cap].into_boxed_slice(); + self.entries = Vec::with_capacity(usable_capacity(raw_cap)); + } + + #[inline(never)] + // `Sz` is *current* Size class, before grow + fn double_capacity(&mut self) + where Sz: Size + { + debug_assert!(self.raw_capacity() == 0 || self.len() > 0); + if self.raw_capacity() == 0 { + return self.first_allocation(); + } + + // find first ideally placed element -- start of cluster + let mut first_ideal = 0; + for (i, index) in enumerate(&*self.indices) { + if let Some(pos) = index.pos() { + if 0 == probe_distance(self.mask, self.entries[pos].hash, i) { + first_ideal = i; + break; + } + } + } + + // visit the entries in an order where we can simply reinsert them + // into self.indices without any bucket stealing. + let new_raw_cap = self.indices.len() * 2; + let old_indices = replace(&mut self.indices, vec![Pos::none(); new_raw_cap].into_boxed_slice()); + self.mask = new_raw_cap.wrapping_sub(1); + + // `Sz` is the old size class, and either u32 or u64 is the new + for &pos in &old_indices[first_ideal..] { + dispatch_32_vs_64!(self.reinsert_entry_in_order::(pos)); + } + + for &pos in &old_indices[..first_ideal] { + dispatch_32_vs_64!(self.reinsert_entry_in_order::(pos)); + } + let more = self.capacity() - self.len(); + self.entries.reserve_exact(more); + } + + // write to self.indices + // read from self.entries at `pos` + // + // reinserting rewrites all `Pos` entries anyway. This handles transitioning + // from u32 to u64 size class if needed by using the two type parameters. + fn reinsert_entry_in_order(&mut self, pos: Pos) + where SzNew: Size, + SzOld: Size, + { + if let Some((i, hash_proxy)) = pos.resolve::() { + // only if the size class is conserved can we use the short hash + let entry_hash = if SzOld::is_same_size::() { + hash_proxy.get_short_hash(&self.entries, i).into_hash() + } else { + self.entries[i].hash + }; + // find first empty bucket and insert there + let mut probe = desired_pos(self.mask, entry_hash); + probe_loop!(probe < self.indices.len(), { + if let Some(_) = self.indices[probe].resolve::() { + /* nothing */ + } else { + // empty bucket, insert here + self.indices[probe] = Pos::with_hash::(i, entry_hash); + return; + } + }); + } + } + + fn pop_impl(&mut self) -> Option<(K, V)> { + let (probe, found) = match self.entries.last() + .map(|e| self.find_existing_entry(e)) + { + None => return None, + Some(t) => t, + }; + debug_assert_eq!(found, self.entries.len() - 1); + Some(self.remove_found(probe, found)) + } + + // FIXME: reduce duplication (compare with insert) + fn entry_phase_1(&mut self, hash: HashValue, key: K) -> Entry + where Sz: Size, + K: Eq, + { + let mut probe = desired_pos(self.mask, hash); + let mut dist = 0; + debug_assert!(self.len() < self.raw_capacity()); + probe_loop!(probe < self.indices.len(), { + if let Some((i, hash_proxy)) = self.indices[probe].resolve::() { + let entry_hash = hash_proxy.get_short_hash(&self.entries, i); + // if existing element probed less than us, swap + let their_dist = probe_distance(self.mask, entry_hash.into_hash(), probe); + if their_dist < dist { + // robin hood: steal the spot if it's better for us + return Entry::Vacant(VacantEntry { + map: self, + hash: hash, + key: key, + probe: probe, + }); + } else if entry_hash == hash && self.entries[i].key == key { + return Entry::Occupied(OccupiedEntry { + map: self, + key: key, + probe: probe, + index: i, + }); + } + } else { + // empty bucket, insert here + return Entry::Vacant(VacantEntry { + map: self, + hash: hash, + key: key, + probe: probe, + }); + } + dist += 1; + }); + } + + // First phase: Look for the preferred location for key. + // + // We will know if `key` is already in the map, before we need to insert it. + // When we insert they key, it might be that we need to continue displacing + // entries (robin hood hashing), in which case Inserted::RobinHood is returned + fn insert_phase_1(&mut self, hash: HashValue, key: K, value: V) -> Inserted + where Sz: Size, + K: Eq, + { + let mut probe = desired_pos(self.mask, hash); + let mut dist = 0; + let insert_kind; + debug_assert!(self.len() < self.raw_capacity()); + probe_loop!(probe < self.indices.len(), { + let pos = &mut self.indices[probe]; + if let Some((i, hash_proxy)) = pos.resolve::() { + let entry_hash = hash_proxy.get_short_hash(&self.entries, i); + // if existing element probed less than us, swap + let their_dist = probe_distance(self.mask, entry_hash.into_hash(), probe); + if their_dist < dist { + // robin hood: steal the spot if it's better for us + let index = self.entries.len(); + insert_kind = Inserted::RobinHood { + probe: probe, + old_pos: Pos::with_hash::(index, hash), + }; + break; + } else if entry_hash == hash && self.entries[i].key == key { + return Inserted::Swapped { + prev_value: replace(&mut self.entries[i].value, value), + }; + } + } else { + // empty bucket, insert here + let index = self.entries.len(); + *pos = Pos::with_hash::(index, hash); + insert_kind = Inserted::Done; + break; + } + dist += 1; + }); + self.entries.push(Bucket { hash: hash, key: key, value: value }); + insert_kind + } + + + /// phase 2 is post-insert where we forward-shift `Pos` in the indices. + fn insert_phase_2(&mut self, mut probe: usize, mut old_pos: Pos) + where Sz: Size + { + probe_loop!(probe < self.indices.len(), { + let pos = &mut self.indices[probe]; + if pos.is_none() { + *pos = old_pos; + break; + } else { + old_pos = replace(pos, old_pos); + } + }); + } + + + /// Return probe (indices) and position (entries) + fn find_using(&self, hash: HashValue, key_eq: F) -> Option<(usize, usize)> + where F: Fn(&Bucket) -> bool, + { + dispatch_32_vs_64!(self.find_using_impl::<_>(hash, key_eq)) + } + + fn find_using_impl(&self, hash: HashValue, key_eq: F) -> Option<(usize, usize)> + where F: Fn(&Bucket) -> bool, + Sz: Size, + { + debug_assert!(self.len() > 0); + let mut probe = desired_pos(self.mask, hash); + let mut dist = 0; + probe_loop!(probe < self.indices.len(), { + if let Some((i, hash_proxy)) = self.indices[probe].resolve::() { + let entry_hash = hash_proxy.get_short_hash(&self.entries, i); + if dist > probe_distance(self.mask, entry_hash.into_hash(), probe) { + // give up when probe distance is too long + return None; + } else if entry_hash == hash && key_eq(&self.entries[i]) { + return Some((probe, i)); + } + } else { + return None; + } + dist += 1; + }); + } + + /// Find `entry` which is already placed inside self.entries; + /// return its probe and entry index. + fn find_existing_entry(&self, entry: &Bucket) -> (usize, usize) + { + debug_assert!(self.len() > 0); + + let hash = entry.hash; + let actual_pos = ptrdistance(&self.entries[0], entry); + let probe = dispatch_32_vs_64!(self => + find_existing_entry_at(&self.indices, hash, self.mask, actual_pos)); + (probe, actual_pos) + } + + fn remove_found(&mut self, probe: usize, found: usize) -> (K, V) { + dispatch_32_vs_64!(self.remove_found_impl(probe, found)) + } + + fn remove_found_impl(&mut self, probe: usize, found: usize) -> (K, V) + where Sz: Size + { + // index `probe` and entry `found` is to be removed + // use swap_remove, but then we need to update the index that points + // to the other entry that has to move + self.indices[probe] = Pos::none(); + let entry = self.entries.swap_remove(found); + + // correct index that points to the entry that had to swap places + if let Some(entry) = self.entries.get(found) { + // was not last element + // examine new element in `found` and find it in indices + let mut probe = desired_pos(self.mask, entry.hash); + probe_loop!(probe < self.indices.len(), { + if let Some((i, _)) = self.indices[probe].resolve::() { + if i >= self.entries.len() { + // found it + self.indices[probe] = Pos::with_hash::(found, entry.hash); + break; + } + } + }); + } + + self.backward_shift_after_removal::(probe); + + (entry.key, entry.value) + } + + fn backward_shift_after_removal(&mut self, probe_at_remove: usize) + where Sz: Size + { + // backward shift deletion in self.indices + // after probe, shift all non-ideally placed indices backward + let mut last_probe = probe_at_remove; + let mut probe = probe_at_remove + 1; + probe_loop!(probe < self.indices.len(), { + if let Some((i, hash_proxy)) = self.indices[probe].resolve::() { + let entry_hash = hash_proxy.get_short_hash(&self.entries, i); + if probe_distance(self.mask, entry_hash.into_hash(), probe) > 0 { + self.indices[last_probe] = self.indices[probe]; + self.indices[probe] = Pos::none(); + } else { + break; + } + } else { + break; + } + last_probe = probe; + }); + } + + fn retain_in_order_impl(&mut self, mut keep: F) + where F: FnMut(&mut K, &mut V) -> bool, + Sz: Size, + { + // Like Vec::retain in self.entries; for each removed key-value pair, + // we clear its corresponding spot in self.indices, and run the + // usual backward shift in self.indices. + let len = self.entries.len(); + let mut n_deleted = 0; + for i in 0..len { + let will_keep; + let hash; + { + let ent = &mut self.entries[i]; + hash = ent.hash; + will_keep = keep(&mut ent.key, &mut ent.value); + }; + let probe = find_existing_entry_at::(&self.indices, hash, self.mask, i); + if !will_keep { + n_deleted += 1; + self.indices[probe] = Pos::none(); + self.backward_shift_after_removal::(probe); + } else if n_deleted > 0 { + self.indices[probe].set_pos::(i - n_deleted); + self.entries.swap(i - n_deleted, i); + } + } + self.entries.truncate(len - n_deleted); + } + + fn sort_by(&mut self, mut compare: F) + where F: FnMut(&K, &V, &K, &V) -> Ordering, + { + let side_index = self.save_hash_index(); + self.entries.sort_by(move |ei, ej| compare(&ei.key, &ei.value, &ej.key, &ej.value)); + self.restore_hash_index(side_index); + } + + fn save_hash_index(&mut self) -> Vec { + // Temporarily use the hash field in a bucket to store the old index. + // Save the old hash values in `side_index`. Then we can sort + // `self.entries` in place. + Vec::from_iter(enumerate(&mut self.entries).map(|(i, elt)| { + replace(&mut elt.hash, HashValue(i)).get() + })) + } + + fn restore_hash_index(&mut self, mut side_index: Vec) { + // Write back the hash values from side_index and fill `side_index` with + // a mapping from the old to the new index instead. + for (i, ent) in enumerate(&mut self.entries) { + let old_index = ent.hash.get(); + ent.hash = HashValue(replace(&mut side_index[old_index], i)); + } + + // Apply new index to self.indices + dispatch_32_vs_64!(self => apply_new_index(&mut self.indices, &side_index)); + + fn apply_new_index(indices: &mut [Pos], new_index: &[usize]) + where Sz: Size + { + for pos in indices { + if let Some((i, _)) = pos.resolve::() { + pos.set_pos::(new_index[i]); + } + } + } + } +} + +/// Find, in the indices, an entry that already exists at a known position +/// inside self.entries in the IndexMap. +/// +/// This is effectively reverse lookup, from the entries into the hash buckets. +/// +/// Return the probe index (into self.indices) +/// +/// + indices: The self.indices of the map, +/// + hash: The full hash value from the bucket +/// + mask: self.mask. +/// + entry_index: The index of the entry in self.entries +fn find_existing_entry_at(indices: &[Pos], hash: HashValue, + mask: usize, entry_index: usize) -> usize + where Sz: Size, +{ + let mut probe = desired_pos(mask, hash); + probe_loop!(probe < indices.len(), { + // the entry *must* be present; if we hit a Pos::none this was not true + // and there is a debug assertion in resolve_existing_index for that. + let i = indices[probe].resolve_existing_index::(); + if i == entry_index { return probe; } + }); +} + +use std::slice::Iter as SliceIter; +use std::slice::IterMut as SliceIterMut; +use std::vec::IntoIter as VecIntoIter; + /// An iterator over the keys of a `IndexMap`. /// /// This `struct` is created by the [`keys`] method on [`IndexMap`]. See its @@ -727,7 +1537,7 @@ impl IndexMap { /// /// [`keys`]: struct.IndexMap.html#method.keys /// [`IndexMap`]: struct.IndexMap.html -pub struct Keys<'a, K, V> { +pub struct Keys<'a, K: 'a, V: 'a> { pub(crate) iter: SliceIter<'a, Bucket>, } @@ -737,30 +1547,30 @@ impl<'a, K, V> Iterator for Keys<'a, K, V> { iterator_methods!(Bucket::key_ref); } -impl DoubleEndedIterator for Keys<'_, K, V> { - fn next_back(&mut self) -> Option { +impl<'a, K, V> DoubleEndedIterator for Keys<'a, K, V> { + fn next_back(&mut self) -> Option<&'a K> { self.iter.next_back().map(Bucket::key_ref) } } -impl ExactSizeIterator for Keys<'_, K, V> { +impl<'a, K, V> ExactSizeIterator for Keys<'a, K, V> { fn len(&self) -> usize { self.iter.len() } } // FIXME(#26925) Remove in favor of `#[derive(Clone)]` -impl Clone for Keys<'_, K, V> { - fn clone(&self) -> Self { - Keys { - iter: self.iter.clone(), - } +impl<'a, K, V> Clone for Keys<'a, K, V> { + fn clone(&self) -> Keys<'a, K, V> { + Keys { iter: self.iter.clone() } } } -impl fmt::Debug for Keys<'_, K, V> { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.debug_list().entries(self.clone()).finish() +impl<'a, K: fmt::Debug, V> fmt::Debug for Keys<'a, K, V> { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + f.debug_list() + .entries(self.clone()) + .finish() } } @@ -771,7 +1581,7 @@ impl fmt::Debug for Keys<'_, K, V> { /// /// [`values`]: struct.IndexMap.html#method.values /// [`IndexMap`]: struct.IndexMap.html -pub struct Values<'a, K, V> { +pub struct Values<'a, K: 'a, V: 'a> { iter: SliceIter<'a, Bucket>, } @@ -781,30 +1591,30 @@ impl<'a, K, V> Iterator for Values<'a, K, V> { iterator_methods!(Bucket::value_ref); } -impl DoubleEndedIterator for Values<'_, K, V> { +impl<'a, K, V> DoubleEndedIterator for Values<'a, K, V> { fn next_back(&mut self) -> Option { self.iter.next_back().map(Bucket::value_ref) } } -impl ExactSizeIterator for Values<'_, K, V> { +impl<'a, K, V> ExactSizeIterator for Values<'a, K, V> { fn len(&self) -> usize { self.iter.len() } } // FIXME(#26925) Remove in favor of `#[derive(Clone)]` -impl Clone for Values<'_, K, V> { - fn clone(&self) -> Self { - Values { - iter: self.iter.clone(), - } +impl<'a, K, V> Clone for Values<'a, K, V> { + fn clone(&self) -> Values<'a, K, V> { + Values { iter: self.iter.clone() } } } -impl fmt::Debug for Values<'_, K, V> { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.debug_list().entries(self.clone()).finish() +impl<'a, K, V: fmt::Debug> fmt::Debug for Values<'a, K, V> { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + f.debug_list() + .entries(self.clone()) + .finish() } } @@ -815,7 +1625,7 @@ impl fmt::Debug for Values<'_, K, V> { /// /// [`values_mut`]: struct.IndexMap.html#method.values_mut /// [`IndexMap`]: struct.IndexMap.html -pub struct ValuesMut<'a, K, V> { +pub struct ValuesMut<'a, K: 'a, V: 'a> { iter: SliceIterMut<'a, Bucket>, } @@ -825,13 +1635,13 @@ impl<'a, K, V> Iterator for ValuesMut<'a, K, V> { iterator_methods!(Bucket::value_mut); } -impl DoubleEndedIterator for ValuesMut<'_, K, V> { +impl<'a, K, V> DoubleEndedIterator for ValuesMut<'a, K, V> { fn next_back(&mut self) -> Option { self.iter.next_back().map(Bucket::value_mut) } } -impl ExactSizeIterator for ValuesMut<'_, K, V> { +impl<'a, K, V> ExactSizeIterator for ValuesMut<'a, K, V> { fn len(&self) -> usize { self.iter.len() } @@ -844,7 +1654,7 @@ impl ExactSizeIterator for ValuesMut<'_, K, V> { /// /// [`iter`]: struct.IndexMap.html#method.iter /// [`IndexMap`]: struct.IndexMap.html -pub struct Iter<'a, K, V> { +pub struct Iter<'a, K: 'a, V: 'a> { iter: SliceIter<'a, Bucket>, } @@ -854,30 +1664,30 @@ impl<'a, K, V> Iterator for Iter<'a, K, V> { iterator_methods!(Bucket::refs); } -impl DoubleEndedIterator for Iter<'_, K, V> { +impl<'a, K, V> DoubleEndedIterator for Iter<'a, K, V> { fn next_back(&mut self) -> Option { self.iter.next_back().map(Bucket::refs) } } -impl ExactSizeIterator for Iter<'_, K, V> { +impl<'a, K, V> ExactSizeIterator for Iter<'a, K, V> { fn len(&self) -> usize { self.iter.len() } } // FIXME(#26925) Remove in favor of `#[derive(Clone)]` -impl Clone for Iter<'_, K, V> { - fn clone(&self) -> Self { - Iter { - iter: self.iter.clone(), - } +impl<'a, K, V> Clone for Iter<'a, K, V> { + fn clone(&self) -> Iter<'a, K, V> { + Iter { iter: self.iter.clone() } } } -impl fmt::Debug for Iter<'_, K, V> { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.debug_list().entries(self.clone()).finish() +impl<'a, K: fmt::Debug, V: fmt::Debug> fmt::Debug for Iter<'a, K, V> { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + f.debug_list() + .entries(self.clone()) + .finish() } } @@ -888,7 +1698,7 @@ impl fmt::Debug for Iter<'_, K, V> { /// /// [`iter_mut`]: struct.IndexMap.html#method.iter_mut /// [`IndexMap`]: struct.IndexMap.html -pub struct IterMut<'a, K, V> { +pub struct IterMut<'a, K: 'a, V: 'a> { iter: SliceIterMut<'a, Bucket>, } @@ -898,13 +1708,13 @@ impl<'a, K, V> Iterator for IterMut<'a, K, V> { iterator_methods!(Bucket::ref_mut); } -impl DoubleEndedIterator for IterMut<'_, K, V> { +impl<'a, K, V> DoubleEndedIterator for IterMut<'a, K, V> { fn next_back(&mut self) -> Option { self.iter.next_back().map(Bucket::ref_mut) } } -impl ExactSizeIterator for IterMut<'_, K, V> { +impl<'a, K, V> ExactSizeIterator for IterMut<'a, K, V> { fn len(&self) -> usize { self.iter.len() } @@ -918,7 +1728,7 @@ impl ExactSizeIterator for IterMut<'_, K, V> { /// [`into_iter`]: struct.IndexMap.html#method.into_iter /// [`IndexMap`]: struct.IndexMap.html pub struct IntoIter { - pub(crate) iter: vec::IntoIter>, + pub(crate) iter: VecIntoIter>, } impl Iterator for IntoIter { @@ -927,7 +1737,7 @@ impl Iterator for IntoIter { iterator_methods!(Bucket::key_value); } -impl DoubleEndedIterator for IntoIter { +impl<'a, K, V> DoubleEndedIterator for IntoIter { fn next_back(&mut self) -> Option { self.iter.next_back().map(Bucket::key_value) } @@ -940,7 +1750,7 @@ impl ExactSizeIterator for IntoIter { } impl fmt::Debug for IntoIter { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { let iter = self.iter.as_slice().iter().map(Bucket::refs); f.debug_list().entries(iter).finish() } @@ -953,21 +1763,25 @@ impl fmt::Debug for IntoIter { /// /// [`drain`]: struct.IndexMap.html#method.drain /// [`IndexMap`]: struct.IndexMap.html -pub struct Drain<'a, K, V> { - pub(crate) iter: vec::Drain<'a, Bucket>, +pub struct Drain<'a, K, V> where K: 'a, V: 'a { + pub(crate) iter: ::std::vec::Drain<'a, Bucket> } -impl Iterator for Drain<'_, K, V> { +impl<'a, K, V> Iterator for Drain<'a, K, V> { type Item = (K, V); iterator_methods!(Bucket::key_value); } -impl DoubleEndedIterator for Drain<'_, K, V> { +impl<'a, K, V> DoubleEndedIterator for Drain<'a, K, V> { double_ended_iterator_methods!(Bucket::key_value); } -impl<'a, K, V, S> IntoIterator for &'a IndexMap { + +impl<'a, K, V, S> IntoIterator for &'a IndexMap + where K: Hash + Eq, + S: BuildHasher, +{ type Item = (&'a K, &'a V); type IntoIter = Iter<'a, K, V>; fn into_iter(self) -> Self::IntoIter { @@ -975,7 +1789,10 @@ impl<'a, K, V, S> IntoIterator for &'a IndexMap { } } -impl<'a, K, V, S> IntoIterator for &'a mut IndexMap { +impl<'a, K, V, S> IntoIterator for &'a mut IndexMap + where K: Hash + Eq, + S: BuildHasher, +{ type Item = (&'a K, &'a mut V); type IntoIter = IterMut<'a, K, V>; fn into_iter(self) -> Self::IntoIter { @@ -983,189 +1800,67 @@ impl<'a, K, V, S> IntoIterator for &'a mut IndexMap { } } -impl IntoIterator for IndexMap { +impl IntoIterator for IndexMap + where K: Hash + Eq, + S: BuildHasher, +{ type Item = (K, V); type IntoIter = IntoIter; fn into_iter(self) -> Self::IntoIter { IntoIter { - iter: self.into_entries().into_iter(), + iter: self.core.entries.into_iter(), } } } -/// Access `IndexMap` values corresponding to a key. -/// -/// # Examples -/// -/// ``` -/// use indexmap::IndexMap; -/// -/// let mut map = IndexMap::new(); -/// for word in "Lorem ipsum dolor sit amet".split_whitespace() { -/// map.insert(word.to_lowercase(), word.to_uppercase()); -/// } -/// assert_eq!(map["lorem"], "LOREM"); -/// assert_eq!(map["ipsum"], "IPSUM"); -/// ``` -/// -/// ```should_panic -/// use indexmap::IndexMap; -/// -/// let mut map = IndexMap::new(); -/// map.insert("foo", 1); -/// println!("{:?}", map["bar"]); // panics! -/// ``` -impl Index<&Q> for IndexMap -where - Q: Hash + Equivalent, - K: Hash + Eq, - S: BuildHasher, +use std::ops::{Index, IndexMut}; + +impl<'a, K, V, Q: ?Sized, S> Index<&'a Q> for IndexMap + where Q: Hash + Equivalent, + K: Hash + Eq, + S: BuildHasher, { type Output = V; - /// Returns a reference to the value corresponding to the supplied `key`. - /// /// ***Panics*** if `key` is not present in the map. - fn index(&self, key: &Q) -> &V { - self.get(key).expect("IndexMap: key not found") + fn index(&self, key: &'a Q) -> &V { + if let Some(v) = self.get(key) { + v + } else { + panic!("IndexMap: key not found") + } } } -/// Access `IndexMap` values corresponding to a key. -/// /// Mutable indexing allows changing / updating values of key-value /// pairs that are already present. /// /// You can **not** insert new pairs with index syntax, use `.insert()`. -/// -/// # Examples -/// -/// ``` -/// use indexmap::IndexMap; -/// -/// let mut map = IndexMap::new(); -/// for word in "Lorem ipsum dolor sit amet".split_whitespace() { -/// map.insert(word.to_lowercase(), word.to_string()); -/// } -/// let lorem = &mut map["lorem"]; -/// assert_eq!(lorem, "Lorem"); -/// lorem.retain(char::is_lowercase); -/// assert_eq!(map["lorem"], "orem"); -/// ``` -/// -/// ```should_panic -/// use indexmap::IndexMap; -/// -/// let mut map = IndexMap::new(); -/// map.insert("foo", 1); -/// map["bar"] = 1; // panics! -/// ``` -impl IndexMut<&Q> for IndexMap -where - Q: Hash + Equivalent, - K: Hash + Eq, - S: BuildHasher, +impl<'a, K, V, Q: ?Sized, S> IndexMut<&'a Q> for IndexMap + where Q: Hash + Equivalent, + K: Hash + Eq, + S: BuildHasher, { - /// Returns a mutable reference to the value corresponding to the supplied `key`. - /// /// ***Panics*** if `key` is not present in the map. - fn index_mut(&mut self, key: &Q) -> &mut V { - self.get_mut(key).expect("IndexMap: key not found") - } -} - -/// Access `IndexMap` values at indexed positions. -/// -/// # Examples -/// -/// ``` -/// use indexmap::IndexMap; -/// -/// let mut map = IndexMap::new(); -/// for word in "Lorem ipsum dolor sit amet".split_whitespace() { -/// map.insert(word.to_lowercase(), word.to_uppercase()); -/// } -/// assert_eq!(map[0], "LOREM"); -/// assert_eq!(map[1], "IPSUM"); -/// map.reverse(); -/// assert_eq!(map[0], "AMET"); -/// assert_eq!(map[1], "SIT"); -/// map.sort_keys(); -/// assert_eq!(map[0], "AMET"); -/// assert_eq!(map[1], "DOLOR"); -/// ``` -/// -/// ```should_panic -/// use indexmap::IndexMap; -/// -/// let mut map = IndexMap::new(); -/// map.insert("foo", 1); -/// println!("{:?}", map[10]); // panics! -/// ``` -impl Index for IndexMap { - type Output = V; - - /// Returns a reference to the value at the supplied `index`. - /// - /// ***Panics*** if `index` is out of bounds. - fn index(&self, index: usize) -> &V { - self.get_index(index) - .expect("IndexMap: index out of bounds") - .1 - } -} - -/// Access `IndexMap` values at indexed positions. -/// -/// Mutable indexing allows changing / updating indexed values -/// that are already present. -/// -/// You can **not** insert new values with index syntax, use `.insert()`. -/// -/// # Examples -/// -/// ``` -/// use indexmap::IndexMap; -/// -/// let mut map = IndexMap::new(); -/// for word in "Lorem ipsum dolor sit amet".split_whitespace() { -/// map.insert(word.to_lowercase(), word.to_string()); -/// } -/// let lorem = &mut map[0]; -/// assert_eq!(lorem, "Lorem"); -/// lorem.retain(char::is_lowercase); -/// assert_eq!(map["lorem"], "orem"); -/// ``` -/// -/// ```should_panic -/// use indexmap::IndexMap; -/// -/// let mut map = IndexMap::new(); -/// map.insert("foo", 1); -/// map[10] = 1; // panics! -/// ``` -impl IndexMut for IndexMap { - /// Returns a mutable reference to the value at the supplied `index`. - /// - /// ***Panics*** if `index` is out of bounds. - fn index_mut(&mut self, index: usize) -> &mut V { - self.get_index_mut(index) - .expect("IndexMap: index out of bounds") - .1 + fn index_mut(&mut self, key: &'a Q) -> &mut V { + if let Some(v) = self.get_mut(key) { + v + } else { + panic!("IndexMap: key not found") + } } } impl FromIterator<(K, V)> for IndexMap -where - K: Hash + Eq, - S: BuildHasher + Default, + where K: Hash + Eq, + S: BuildHasher + Default, { /// Create an `IndexMap` from the sequence of key-value pairs in the /// iterable. /// /// `from_iter` uses the same logic as `extend`. See /// [`extend`](#method.extend) for more details. - fn from_iter>(iterable: I) -> Self { + fn from_iter>(iterable: I) -> Self { let iter = iterable.into_iter(); let (low, _) = iter.size_hint(); let mut map = Self::with_capacity_and_hasher(low, <_>::default()); @@ -1175,9 +1870,8 @@ where } impl Extend<(K, V)> for IndexMap -where - K: Hash + Eq, - S: BuildHasher, + where K: Hash + Eq, + S: BuildHasher, { /// Extend the map with all key-value pairs in the iterable. /// @@ -1185,45 +1879,29 @@ where /// them in order, which means that for keys that already existed /// in the map, their value is updated but it keeps the existing order. /// - /// New keys are inserted in the order they appear in the sequence. If + /// New keys are inserted inserted in the order in the sequence. If /// equivalents of a key occur more than once, the last corresponding value /// prevails. - fn extend>(&mut self, iterable: I) { - // (Note: this is a copy of `std`/`hashbrown`'s reservation logic.) - // Keys may be already present or show multiple times in the iterator. - // Reserve the entire hint lower bound if the map is empty. - // Otherwise reserve half the hint (rounded up), so the map - // will only resize twice in the worst case. - let iter = iterable.into_iter(); - let reserve = if self.is_empty() { - iter.size_hint().0 - } else { - (iter.size_hint().0 + 1) / 2 - }; - self.reserve(reserve); - iter.for_each(move |(k, v)| { - self.insert(k, v); - }); + fn extend>(&mut self, iterable: I) { + for (k, v) in iterable { self.insert(k, v); } } } impl<'a, K, V, S> Extend<(&'a K, &'a V)> for IndexMap -where - K: Hash + Eq + Copy, - V: Copy, - S: BuildHasher, + where K: Hash + Eq + Copy, + V: Copy, + S: BuildHasher, { /// Extend the map with all key-value pairs in the iterable. /// /// See the first extend method for more details. - fn extend>(&mut self, iterable: I) { + fn extend>(&mut self, iterable: I) { self.extend(iterable.into_iter().map(|(&key, &value)| (key, value))); } } impl Default for IndexMap -where - S: Default, + where S: BuildHasher + Default, { /// Return an empty `IndexMap` fn default() -> Self { @@ -1232,35 +1910,31 @@ where } impl PartialEq> for IndexMap -where - K: Hash + Eq, - V1: PartialEq, - S1: BuildHasher, - S2: BuildHasher, + where K: Hash + Eq, + V1: PartialEq, + S1: BuildHasher, + S2: BuildHasher { fn eq(&self, other: &IndexMap) -> bool { if self.len() != other.len() { return false; } - self.iter() - .all(|(key, value)| other.get(key).map_or(false, |v| *value == *v)) + self.iter().all(|(key, value)| other.get(key).map_or(false, |v| *value == *v)) } } impl Eq for IndexMap -where - K: Eq + Hash, - V: Eq, - S: BuildHasher, + where K: Eq + Hash, + V: Eq, + S: BuildHasher { } #[cfg(test)] mod tests { use super::*; - use crate::util::enumerate; - use std::string::String; + use util::enumerate; #[test] fn it_works() { @@ -1337,7 +2011,7 @@ mod tests { let old_map = map.clone(); map.insert(i, ()); for key in old_map.keys() { - if map.get(key).is_none() { + if !map.get(key).is_some() { println!("old_map: {:?}", old_map); println!("map: {:?}", map); panic!("did not find {} in map", key); @@ -1375,6 +2049,7 @@ mod tests { let not_present = [1, 3, 6, 9, 10]; let mut map = IndexMap::with_capacity(insert.len()); + for (i, &elt) in enumerate(&insert) { assert_eq!(map.len(), i); map.insert(elt, elt); @@ -1399,43 +2074,6 @@ mod tests { } } - #[test] - fn reserve() { - let mut map = IndexMap::::new(); - assert_eq!(map.capacity(), 0); - map.reserve(100); - let capacity = map.capacity(); - assert!(capacity >= 100); - for i in 0..capacity { - assert_eq!(map.len(), i); - map.insert(i, i * i); - assert_eq!(map.len(), i + 1); - assert_eq!(map.capacity(), capacity); - assert_eq!(map.get(&i), Some(&(i * i))); - } - map.insert(capacity, std::usize::MAX); - assert_eq!(map.len(), capacity + 1); - assert!(map.capacity() > capacity); - assert_eq!(map.get(&capacity), Some(&std::usize::MAX)); - } - - #[test] - fn shrink_to_fit() { - let mut map = IndexMap::::new(); - assert_eq!(map.capacity(), 0); - for i in 0..100 { - assert_eq!(map.len(), i); - map.insert(i, i * i); - assert_eq!(map.len(), i + 1); - assert!(map.capacity() >= i + 1); - assert_eq!(map.get(&i), Some(&(i * i))); - map.shrink_to_fit(); - assert_eq!(map.len(), i + 1); - assert_eq!(map.capacity(), i + 1); - assert_eq!(map.get(&i), Some(&(i * i))); - } - } - #[test] fn remove() { let insert = [0, 4, 2, 12, 8, 7, 11, 5, 3, 17, 19, 22, 23]; @@ -1459,7 +2097,7 @@ mod tests { } println!("{:?}", map); for &key in &remove { - //println!("{:?}", map); + //println!("{:?}", map); let index = map.get_full(&key).unwrap().0; assert_eq!(map.swap_remove_full(&key), Some((index, key, key))); } @@ -1513,10 +2151,10 @@ mod tests { map_a.insert(2, "2"); let mut map_b = map_a.clone(); assert_eq!(map_a, map_b); - map_b.swap_remove(&1); + map_b.remove(&1); assert_ne!(map_a, map_b); - let map_c: IndexMap<_, String> = map_b.into_iter().map(|(k, v)| (k, v.into())).collect(); + let map_c: IndexMap<_, String> = map_b.into_iter().map(|(k, v)| (k, v.to_owned())).collect(); assert_ne!(map_a, map_c); assert_ne!(map_c, map_a); } @@ -1526,16 +2164,13 @@ mod tests { let mut map = IndexMap::new(); map.extend(vec![(&1, &2), (&3, &4)]); map.extend(vec![(5, 6)]); - assert_eq!( - map.into_iter().collect::>(), - vec![(1, 2), (3, 4), (5, 6)] - ); + assert_eq!(map.into_iter().collect::>(), vec![(1, 2), (3, 4), (5, 6)]); } #[test] fn entry() { let mut map = IndexMap::new(); - + map.insert(1, "1"); map.insert(2, "2"); { @@ -1544,13 +2179,13 @@ mod tests { let e = e.or_insert("3"); assert_eq!(e, &"3"); } - + let e = map.entry(2); assert_eq!(e.index(), 1); assert_eq!(e.key(), &2); match e { Entry::Occupied(ref e) => assert_eq!(e.get(), &"2"), - Entry::Vacant(_) => panic!(), + Entry::Vacant(_) => panic!() } assert_eq!(e.or_insert("4"), &"2"); } @@ -1616,7 +2251,7 @@ mod tests { let vec = vec![(1, 1), (2, 2), (3, 3)]; let mut map: IndexMap<_, _> = vec.into_iter().collect(); for value in map.values_mut() { - *value *= 2 + *value = (*value) * 2 } let values: Vec<_> = map.values().cloned().collect(); assert_eq!(values.len(), 3); diff --git a/third_party/rust/indexmap/src/map/core.rs b/third_party/rust/indexmap/src/map/core.rs deleted file mode 100644 index 7adb69a13333..000000000000 --- a/third_party/rust/indexmap/src/map/core.rs +++ /dev/null @@ -1,410 +0,0 @@ -//! This is the core implementation that doesn't depend on the hasher at all. -//! -//! The methods of `IndexMapCore` don't use any Hash properties of K. -//! -//! It's cleaner to separate them out, then the compiler checks that we are not -//! using Hash at all in these methods. -//! -//! However, we should probably not let this show in the public API or docs. - -mod raw; - -use hashbrown::raw::RawTable; - -use crate::vec::{Drain, Vec}; -use core::cmp; -use core::fmt; -use core::mem::replace; -use core::ops::RangeBounds; - -use crate::equivalent::Equivalent; -use crate::util::{enumerate, simplify_range}; -use crate::{Bucket, Entries, HashValue}; - -/// Core of the map that does not depend on S -pub(crate) struct IndexMapCore { - /// indices mapping from the entry hash to its index. - indices: RawTable, - /// entries is a dense vec of entries in their order. - entries: Vec>, -} - -#[inline(always)] -fn get_hash(entries: &[Bucket]) -> impl Fn(&usize) -> u64 + '_ { - move |&i| entries[i].hash.get() -} - -impl Clone for IndexMapCore -where - K: Clone, - V: Clone, -{ - fn clone(&self) -> Self { - let indices = self.indices.clone(); - let mut entries = Vec::with_capacity(indices.capacity()); - entries.clone_from(&self.entries); - IndexMapCore { indices, entries } - } - - fn clone_from(&mut self, other: &Self) { - let hasher = get_hash(&other.entries); - self.indices.clone_from_with_hasher(&other.indices, hasher); - if self.entries.capacity() < other.entries.len() { - // If we must resize, match the indices capacity - self.reserve_entries(); - } - self.entries.clone_from(&other.entries); - } -} - -impl fmt::Debug for IndexMapCore -where - K: fmt::Debug, - V: fmt::Debug, -{ - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.debug_struct("IndexMapCore") - .field("indices", &raw::DebugIndices(&self.indices)) - .field("entries", &self.entries) - .finish() - } -} - -impl Entries for IndexMapCore { - type Entry = Bucket; - - #[inline] - fn into_entries(self) -> Vec { - self.entries - } - - #[inline] - fn as_entries(&self) -> &[Self::Entry] { - &self.entries - } - - #[inline] - fn as_entries_mut(&mut self) -> &mut [Self::Entry] { - &mut self.entries - } - - fn with_entries(&mut self, f: F) - where - F: FnOnce(&mut [Self::Entry]), - { - f(&mut self.entries); - self.rebuild_hash_table(); - } -} - -impl IndexMapCore { - #[inline] - pub(crate) fn new() -> Self { - IndexMapCore { - indices: RawTable::new(), - entries: Vec::new(), - } - } - - #[inline] - pub(crate) fn with_capacity(n: usize) -> Self { - IndexMapCore { - indices: RawTable::with_capacity(n), - entries: Vec::with_capacity(n), - } - } - - #[inline] - pub(crate) fn len(&self) -> usize { - self.indices.len() - } - - #[inline] - pub(crate) fn capacity(&self) -> usize { - cmp::min(self.indices.capacity(), self.entries.capacity()) - } - - pub(crate) fn clear(&mut self) { - self.indices.clear(); - self.entries.clear(); - } - - pub(crate) fn drain(&mut self, range: R) -> Drain<'_, Bucket> - where - R: RangeBounds, - { - let range = simplify_range(range, self.entries.len()); - self.erase_indices(range.start, range.end); - self.entries.drain(range) - } - - /// Reserve capacity for `additional` more key-value pairs. - pub(crate) fn reserve(&mut self, additional: usize) { - self.indices.reserve(additional, get_hash(&self.entries)); - self.reserve_entries(); - } - - /// Reserve entries capacity to match the indices - fn reserve_entries(&mut self) { - let additional = self.indices.capacity() - self.entries.len(); - self.entries.reserve_exact(additional); - } - - /// Shrink the capacity of the map as much as possible. - pub(crate) fn shrink_to_fit(&mut self) { - self.indices.shrink_to(0, get_hash(&self.entries)); - self.entries.shrink_to_fit(); - } - - /// Remove the last key-value pair - pub(crate) fn pop(&mut self) -> Option<(K, V)> { - if let Some(entry) = self.entries.pop() { - let last = self.entries.len(); - self.erase_index(entry.hash, last); - Some((entry.key, entry.value)) - } else { - None - } - } - - /// Append a key-value pair, *without* checking whether it already exists, - /// and return the pair's new index. - fn push(&mut self, hash: HashValue, key: K, value: V) -> usize { - let i = self.entries.len(); - self.indices.insert(hash.get(), i, get_hash(&self.entries)); - if i == self.entries.capacity() { - // Reserve our own capacity synced to the indices, - // rather than letting `Vec::push` just double it. - self.reserve_entries(); - } - self.entries.push(Bucket { hash, key, value }); - i - } - - pub(crate) fn insert_full(&mut self, hash: HashValue, key: K, value: V) -> (usize, Option) - where - K: Eq, - { - match self.get_index_of(hash, &key) { - Some(i) => (i, Some(replace(&mut self.entries[i].value, value))), - None => (self.push(hash, key, value), None), - } - } - - pub(crate) fn retain_in_order(&mut self, mut keep: F) - where - F: FnMut(&mut K, &mut V) -> bool, - { - // Like Vec::retain in self.entries, but with mutable K and V. - // We swap-shift all the items we want to keep, truncate the rest, - // then rebuild the raw hash table with the new indexes. - let len = self.entries.len(); - let mut n_deleted = 0; - for i in 0..len { - let will_keep = { - let entry = &mut self.entries[i]; - keep(&mut entry.key, &mut entry.value) - }; - if !will_keep { - n_deleted += 1; - } else if n_deleted > 0 { - self.entries.swap(i - n_deleted, i); - } - } - if n_deleted > 0 { - self.entries.truncate(len - n_deleted); - self.rebuild_hash_table(); - } - } - - fn rebuild_hash_table(&mut self) { - self.indices.clear(); - debug_assert!(self.indices.capacity() >= self.entries.len()); - for (i, entry) in enumerate(&self.entries) { - // We should never have to reallocate, so there's no need for a real hasher. - self.indices.insert_no_grow(entry.hash.get(), i); - } - } -} - -/// Entry for an existing key-value pair or a vacant location to -/// insert one. -pub enum Entry<'a, K, V> { - /// Existing slot with equivalent key. - Occupied(OccupiedEntry<'a, K, V>), - /// Vacant slot (no equivalent key in the map). - Vacant(VacantEntry<'a, K, V>), -} - -impl<'a, K, V> Entry<'a, K, V> { - /// Computes in **O(1)** time (amortized average). - pub fn or_insert(self, default: V) -> &'a mut V { - match self { - Entry::Occupied(entry) => entry.into_mut(), - Entry::Vacant(entry) => entry.insert(default), - } - } - - /// Computes in **O(1)** time (amortized average). - pub fn or_insert_with(self, call: F) -> &'a mut V - where - F: FnOnce() -> V, - { - match self { - Entry::Occupied(entry) => entry.into_mut(), - Entry::Vacant(entry) => entry.insert(call()), - } - } - - pub fn key(&self) -> &K { - match *self { - Entry::Occupied(ref entry) => entry.key(), - Entry::Vacant(ref entry) => entry.key(), - } - } - - /// Return the index where the key-value pair exists or will be inserted. - pub fn index(&self) -> usize { - match *self { - Entry::Occupied(ref entry) => entry.index(), - Entry::Vacant(ref entry) => entry.index(), - } - } - - /// Modifies the entry if it is occupied. - pub fn and_modify(self, f: F) -> Self - where - F: FnOnce(&mut V), - { - match self { - Entry::Occupied(mut o) => { - f(o.get_mut()); - Entry::Occupied(o) - } - x => x, - } - } - - /// Inserts a default-constructed value in the entry if it is vacant and returns a mutable - /// reference to it. Otherwise a mutable reference to an already existent value is returned. - /// - /// Computes in **O(1)** time (amortized average). - pub fn or_default(self) -> &'a mut V - where - V: Default, - { - match self { - Entry::Occupied(entry) => entry.into_mut(), - Entry::Vacant(entry) => entry.insert(V::default()), - } - } -} - -impl fmt::Debug for Entry<'_, K, V> { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - match *self { - Entry::Vacant(ref v) => f.debug_tuple(stringify!(Entry)).field(v).finish(), - Entry::Occupied(ref o) => f.debug_tuple(stringify!(Entry)).field(o).finish(), - } - } -} - -pub use self::raw::OccupiedEntry; - -// Extra methods that don't threaten the unsafe encapsulation. -impl OccupiedEntry<'_, K, V> { - /// Sets the value of the entry to `value`, and returns the entry's old value. - pub fn insert(&mut self, value: V) -> V { - replace(self.get_mut(), value) - } - - /// Remove the key, value pair stored in the map for this entry, and return the value. - /// - /// **NOTE:** This is equivalent to `.swap_remove()`. - pub fn remove(self) -> V { - self.swap_remove() - } - - /// Remove the key, value pair stored in the map for this entry, and return the value. - /// - /// Like `Vec::swap_remove`, the pair is removed by swapping it with the - /// last element of the map and popping it off. **This perturbs - /// the postion of what used to be the last element!** - /// - /// Computes in **O(1)** time (average). - pub fn swap_remove(self) -> V { - self.swap_remove_entry().1 - } - - /// Remove the key, value pair stored in the map for this entry, and return the value. - /// - /// Like `Vec::remove`, the pair is removed by shifting all of the - /// elements that follow it, preserving their relative order. - /// **This perturbs the index of all of those elements!** - /// - /// Computes in **O(n)** time (average). - pub fn shift_remove(self) -> V { - self.shift_remove_entry().1 - } - - /// Remove and return the key, value pair stored in the map for this entry - /// - /// **NOTE:** This is equivalent to `.swap_remove_entry()`. - pub fn remove_entry(self) -> (K, V) { - self.swap_remove_entry() - } -} - -impl fmt::Debug for OccupiedEntry<'_, K, V> { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.debug_struct(stringify!(OccupiedEntry)) - .field("key", self.key()) - .field("value", self.get()) - .finish() - } -} - -/// A view into a vacant entry in a `IndexMap`. -/// It is part of the [`Entry`] enum. -/// -/// [`Entry`]: enum.Entry.html -pub struct VacantEntry<'a, K, V> { - map: &'a mut IndexMapCore, - hash: HashValue, - key: K, -} - -impl<'a, K, V> VacantEntry<'a, K, V> { - pub fn key(&self) -> &K { - &self.key - } - - pub fn into_key(self) -> K { - self.key - } - - /// Return the index where the key-value pair will be inserted. - pub fn index(&self) -> usize { - self.map.len() - } - - pub fn insert(self, value: V) -> &'a mut V { - let i = self.map.push(self.hash, self.key, value); - &mut self.map.entries[i].value - } -} - -impl fmt::Debug for VacantEntry<'_, K, V> { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.debug_tuple(stringify!(VacantEntry)) - .field(self.key()) - .finish() - } -} - -#[test] -fn assert_send_sync() { - fn assert_send_sync() {} - assert_send_sync::>(); - assert_send_sync::>(); -} diff --git a/third_party/rust/indexmap/src/map/core/raw.rs b/third_party/rust/indexmap/src/map/core/raw.rs deleted file mode 100644 index fdfadb91c1d6..000000000000 --- a/third_party/rust/indexmap/src/map/core/raw.rs +++ /dev/null @@ -1,335 +0,0 @@ -#![allow(unsafe_code)] -//! This module encapsulates the `unsafe` access to `hashbrown::raw::RawTable`, -//! mostly in dealing with its bucket "pointers". - -use super::{Entry, Equivalent, HashValue, IndexMapCore, VacantEntry}; -use crate::util::enumerate; -use core::fmt; -use core::mem::replace; -use hashbrown::raw::RawTable; - -type RawBucket = hashbrown::raw::Bucket; - -pub(super) struct DebugIndices<'a>(pub &'a RawTable); -impl fmt::Debug for DebugIndices<'_> { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - let indices = unsafe { self.0.iter().map(|raw_bucket| raw_bucket.read()) }; - f.debug_list().entries(indices).finish() - } -} - -impl IndexMapCore { - /// Return the raw bucket with an equivalent key - fn find_equivalent(&self, hash: HashValue, key: &Q) -> Option - where - Q: ?Sized + Equivalent, - { - self.indices.find(hash.get(), { - move |&i| Q::equivalent(key, &self.entries[i].key) - }) - } - - /// Return the raw bucket for the given index - fn find_index(&self, hash: HashValue, index: usize) -> Option { - self.indices.find(hash.get(), move |&i| i == index) - } - - /// Return the index in `entries` where an equivalent key can be found - pub(crate) fn get_index_of(&self, hash: HashValue, key: &Q) -> Option - where - Q: ?Sized + Equivalent, - { - match self.find_equivalent(hash, key) { - Some(raw_bucket) => Some(unsafe { raw_bucket.read() }), - None => None, - } - } - - /// Erase the given index from `indices`. - /// - /// The index doesn't need to be valid in `entries` while calling this. No other index - /// adjustments are made -- this is only used by `pop` for the greatest index. - pub(super) fn erase_index(&mut self, hash: HashValue, index: usize) { - debug_assert_eq!(index, self.indices.len() - 1); - let raw_bucket = self.find_index(hash, index).unwrap(); - unsafe { self.indices.erase(raw_bucket) }; - } - - /// Erase `start..end` from `indices`, and shift `end..` indices down to `start..` - /// - /// All of these items should still be at their original location in `entries`. - /// This is used by `drain`, which will let `Vec::drain` do the work on `entries`. - pub(super) fn erase_indices(&mut self, start: usize, end: usize) { - let (init, shifted_entries) = self.entries.split_at(end); - let (start_entries, erased_entries) = init.split_at(start); - - let erased = erased_entries.len(); - let shifted = shifted_entries.len(); - let half_capacity = self.indices.buckets() / 2; - - // Use a heuristic between different strategies - if erased == 0 { - // Degenerate case, nothing to do - } else if start + shifted < half_capacity && start < erased { - // Reinsert everything, as there are few kept indices - self.indices.clear(); - - // Reinsert stable indices - for (i, entry) in enumerate(start_entries) { - self.indices.insert_no_grow(entry.hash.get(), i); - } - - // Reinsert shifted indices - for (i, entry) in (start..).zip(shifted_entries) { - self.indices.insert_no_grow(entry.hash.get(), i); - } - } else if erased + shifted < half_capacity { - // Find each affected index, as there are few to adjust - - // Find erased indices - for (i, entry) in (start..).zip(erased_entries) { - let bucket = self.find_index(entry.hash, i).unwrap(); - unsafe { self.indices.erase(bucket) }; - } - - // Find shifted indices - for ((new, old), entry) in (start..).zip(end..).zip(shifted_entries) { - let bucket = self.find_index(entry.hash, old).unwrap(); - unsafe { bucket.write(new) }; - } - } else { - // Sweep the whole table for adjustments - unsafe { - for bucket in self.indices.iter() { - let i = bucket.read(); - if i >= end { - bucket.write(i - erased); - } else if i >= start { - self.indices.erase(bucket); - } - } - } - } - - debug_assert_eq!(self.indices.len(), start + shifted); - } - - pub(crate) fn entry(&mut self, hash: HashValue, key: K) -> Entry<'_, K, V> - where - K: Eq, - { - match self.find_equivalent(hash, &key) { - // Safety: The entry is created with a live raw bucket, at the same time we have a &mut - // reference to the map, so it can not be modified further. - Some(raw_bucket) => Entry::Occupied(OccupiedEntry { - map: self, - raw_bucket, - key, - }), - None => Entry::Vacant(VacantEntry { - map: self, - hash, - key, - }), - } - } - - /// Remove an entry by shifting all entries that follow it - pub(crate) fn shift_remove_full(&mut self, hash: HashValue, key: &Q) -> Option<(usize, K, V)> - where - Q: ?Sized + Equivalent, - { - match self.find_equivalent(hash, key) { - Some(raw_bucket) => unsafe { Some(self.shift_remove_bucket(raw_bucket)) }, - None => None, - } - } - - /// Remove an entry by shifting all entries that follow it - pub(crate) fn shift_remove_index(&mut self, index: usize) -> Option<(K, V)> { - let raw_bucket = match self.entries.get(index) { - Some(entry) => self.find_index(entry.hash, index).unwrap(), - None => return None, - }; - unsafe { - let (_, key, value) = self.shift_remove_bucket(raw_bucket); - Some((key, value)) - } - } - - /// Remove an entry by shifting all entries that follow it - /// - /// Safety: The caller must pass a live `raw_bucket`. - #[allow(unused_unsafe)] - unsafe fn shift_remove_bucket(&mut self, raw_bucket: RawBucket) -> (usize, K, V) { - // use Vec::remove, but then we need to update the indices that point - // to all of the other entries that have to move - let index = unsafe { self.indices.remove(raw_bucket) }; - let entry = self.entries.remove(index); - - // correct indices that point to the entries that followed the removed entry. - // use a heuristic between a full sweep vs. a `find()` for every shifted item. - let raw_capacity = self.indices.buckets(); - let shifted_entries = &self.entries[index..]; - if shifted_entries.len() > raw_capacity / 2 { - // shift all indices greater than `index` - unsafe { - for bucket in self.indices.iter() { - let i = bucket.read(); - if i > index { - bucket.write(i - 1); - } - } - } - } else { - // find each following entry to shift its index - for (i, entry) in (index + 1..).zip(shifted_entries) { - let shifted_bucket = self.find_index(entry.hash, i).unwrap(); - unsafe { shifted_bucket.write(i - 1) }; - } - } - - (index, entry.key, entry.value) - } - - /// Remove an entry by swapping it with the last - pub(crate) fn swap_remove_full(&mut self, hash: HashValue, key: &Q) -> Option<(usize, K, V)> - where - Q: ?Sized + Equivalent, - { - match self.find_equivalent(hash, key) { - Some(raw_bucket) => unsafe { Some(self.swap_remove_bucket(raw_bucket)) }, - None => None, - } - } - - /// Remove an entry by swapping it with the last - pub(crate) fn swap_remove_index(&mut self, index: usize) -> Option<(K, V)> { - let raw_bucket = match self.entries.get(index) { - Some(entry) => self.find_index(entry.hash, index).unwrap(), - None => return None, - }; - unsafe { - let (_, key, value) = self.swap_remove_bucket(raw_bucket); - Some((key, value)) - } - } - - /// Remove an entry by swapping it with the last - /// - /// Safety: The caller must pass a live `raw_bucket`. - #[allow(unused_unsafe)] - unsafe fn swap_remove_bucket(&mut self, raw_bucket: RawBucket) -> (usize, K, V) { - // use swap_remove, but then we need to update the index that points - // to the other entry that has to move - let index = unsafe { self.indices.remove(raw_bucket) }; - let entry = self.entries.swap_remove(index); - - // correct index that points to the entry that had to swap places - if let Some(entry) = self.entries.get(index) { - // was not last element - // examine new element in `index` and find it in indices - let last = self.entries.len(); - let swapped_bucket = self.find_index(entry.hash, last).unwrap(); - unsafe { swapped_bucket.write(index) }; - } - - (index, entry.key, entry.value) - } - - pub(crate) fn reverse(&mut self) { - self.entries.reverse(); - - // No need to save hash indices, can easily calculate what they should - // be, given that this is an in-place reversal. - let len = self.entries.len(); - unsafe { - for raw_bucket in self.indices.iter() { - let i = raw_bucket.read(); - raw_bucket.write(len - i - 1); - } - } - } -} - -/// A view into an occupied entry in a `IndexMap`. -/// It is part of the [`Entry`] enum. -/// -/// [`Entry`]: enum.Entry.html -// SAFETY: The lifetime of the map reference also constrains the raw bucket, -// which is essentially a raw pointer into the map indices. -pub struct OccupiedEntry<'a, K, V> { - map: &'a mut IndexMapCore, - raw_bucket: RawBucket, - key: K, -} - -// `hashbrown::raw::Bucket` is only `Send`, not `Sync`. -// SAFETY: `&self` only accesses the bucket to read it. -unsafe impl Sync for OccupiedEntry<'_, K, V> {} - -// The parent module also adds methods that don't threaten the unsafe encapsulation. -impl<'a, K, V> OccupiedEntry<'a, K, V> { - pub fn key(&self) -> &K { - &self.key - } - - pub fn get(&self) -> &V { - &self.map.entries[self.index()].value - } - - pub fn get_mut(&mut self) -> &mut V { - let index = self.index(); - &mut self.map.entries[index].value - } - - /// Put the new key in the occupied entry's key slot - pub(crate) fn replace_key(self) -> K { - let index = self.index(); - let old_key = &mut self.map.entries[index].key; - replace(old_key, self.key) - } - - /// Return the index of the key-value pair - #[inline] - pub fn index(&self) -> usize { - unsafe { self.raw_bucket.read() } - } - - pub fn into_mut(self) -> &'a mut V { - let index = self.index(); - &mut self.map.entries[index].value - } - - /// Remove and return the key, value pair stored in the map for this entry - /// - /// Like `Vec::swap_remove`, the pair is removed by swapping it with the - /// last element of the map and popping it off. **This perturbs - /// the postion of what used to be the last element!** - /// - /// Computes in **O(1)** time (average). - pub fn swap_remove_entry(self) -> (K, V) { - // This is safe because it can only happen once (self is consumed) - // and map.indices have not been modified since entry construction - unsafe { - let (_, key, value) = self.map.swap_remove_bucket(self.raw_bucket); - (key, value) - } - } - - /// Remove and return the key, value pair stored in the map for this entry - /// - /// Like `Vec::remove`, the pair is removed by shifting all of the - /// elements that follow it, preserving their relative order. - /// **This perturbs the index of all of those elements!** - /// - /// Computes in **O(n)** time (average). - pub fn shift_remove_entry(self) -> (K, V) { - // This is safe because it can only happen once (self is consumed) - // and map.indices have not been modified since entry construction - unsafe { - let (_, key, value) = self.map.shift_remove_bucket(self.raw_bucket); - (key, value) - } - } -} diff --git a/third_party/rust/indexmap/src/mutable_keys.rs b/third_party/rust/indexmap/src/mutable_keys.rs index 0688441ee9a8..9291f96f2802 100644 --- a/third_party/rust/indexmap/src/mutable_keys.rs +++ b/third_party/rust/indexmap/src/mutable_keys.rs @@ -1,8 +1,10 @@ -use core::hash::{BuildHasher, Hash}; -use super::{Equivalent, IndexMap}; +use std::hash::Hash; +use std::hash::BuildHasher; -pub struct PrivateMarker {} +use super::{IndexMap, Equivalent}; + +pub struct PrivateMarker { } /// Opt-in mutable access to keys. /// @@ -19,14 +21,11 @@ pub struct PrivateMarker {} pub trait MutableKeys { type Key; type Value; - + /// Return item index, mutable reference to key and value - fn get_full_mut2( - &mut self, - key: &Q, - ) -> Option<(usize, &mut Self::Key, &mut Self::Value)> - where - Q: Hash + Equivalent; + fn get_full_mut2(&mut self, key: &Q) + -> Option<(usize, &mut Self::Key, &mut Self::Value)> + where Q: Hash + Equivalent; /// Scan through each key-value pair in the map and keep those where the /// closure `keep` returns `true`. @@ -36,8 +35,7 @@ pub trait MutableKeys { /// /// Computes in **O(n)** time (average). fn retain2(&mut self, keep: F) - where - F: FnMut(&mut Self::Key, &mut Self::Value) -> bool; + where F: FnMut(&mut Self::Key, &mut Self::Value) -> bool; /// This method is not useful in itself – it is there to “seal” the trait /// for external implementation, so that we can add methods without @@ -49,27 +47,25 @@ pub trait MutableKeys { /// /// See [`MutableKeys`](trait.MutableKeys.html) for more information. impl MutableKeys for IndexMap -where - K: Eq + Hash, - S: BuildHasher, + where K: Eq + Hash, + S: BuildHasher, { type Key = K; type Value = V; - fn get_full_mut2(&mut self, key: &Q) -> Option<(usize, &mut K, &mut V)> - where - Q: Hash + Equivalent, + fn get_full_mut2(&mut self, key: &Q) + -> Option<(usize, &mut K, &mut V)> + where Q: Hash + Equivalent, { self.get_full_mut2_impl(key) } fn retain2(&mut self, keep: F) - where - F: FnMut(&mut K, &mut V) -> bool, + where F: FnMut(&mut K, &mut V) -> bool, { self.retain_mut(keep) } fn __private_marker(&self) -> PrivateMarker { - PrivateMarker {} + PrivateMarker { } } } diff --git a/third_party/rust/indexmap/src/rayon/map.rs b/third_party/rust/indexmap/src/rayon/map.rs index ed4c74e43876..0e1bc869da80 100644 --- a/third_party/rust/indexmap/src/rayon/map.rs +++ b/third_party/rust/indexmap/src/rayon/map.rs @@ -6,23 +6,23 @@ //! Requires crate feature `"rayon"` use super::collect; -use rayon::iter::plumbing::{Consumer, ProducerCallback, UnindexedConsumer}; -use rayon::prelude::*; +use super::rayon::prelude::*; +use super::rayon::iter::plumbing::{Consumer, UnindexedConsumer, ProducerCallback}; -use crate::vec::Vec; -use core::cmp::Ordering; -use core::fmt; -use core::hash::{BuildHasher, Hash}; +use std::cmp::Ordering; +use std::fmt; +use std::hash::Hash; +use std::hash::BuildHasher; -use crate::Bucket; -use crate::Entries; -use crate::IndexMap; +use Bucket; +use Entries; +use IndexMap; /// Requires crate feature `"rayon"`. impl IntoParallelIterator for IndexMap -where - K: Send, - V: Send, + where K: Hash + Eq + Send, + V: Send, + S: BuildHasher, { type Item = (K, V); type Iter = IntoParIter; @@ -46,7 +46,7 @@ pub struct IntoParIter { } impl fmt::Debug for IntoParIter { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { let iter = self.entries.iter().map(Bucket::refs); f.debug_list().entries(iter).finish() } @@ -62,11 +62,12 @@ impl IndexedParallelIterator for IntoParIter { indexed_parallel_iterator_methods!(Bucket::key_value); } + /// Requires crate feature `"rayon"`. impl<'a, K, V, S> IntoParallelIterator for &'a IndexMap -where - K: Sync, - V: Sync, + where K: Hash + Eq + Sync, + V: Sync, + S: BuildHasher, { type Item = (&'a K, &'a V); type Iter = ParIter<'a, K, V>; @@ -85,18 +86,18 @@ where /// /// [`par_iter`]: ../struct.IndexMap.html#method.par_iter /// [`IndexMap`]: ../struct.IndexMap.html -pub struct ParIter<'a, K, V> { +pub struct ParIter<'a, K: 'a, V: 'a> { entries: &'a [Bucket], } -impl Clone for ParIter<'_, K, V> { - fn clone(&self) -> Self { +impl<'a, K, V> Clone for ParIter<'a, K, V> { + fn clone(&self) -> ParIter<'a, K, V> { ParIter { ..*self } } } -impl fmt::Debug for ParIter<'_, K, V> { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { +impl<'a, K: fmt::Debug, V: fmt::Debug> fmt::Debug for ParIter<'a, K, V> { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { let iter = self.entries.iter().map(Bucket::refs); f.debug_list().entries(iter).finish() } @@ -108,15 +109,16 @@ impl<'a, K: Sync, V: Sync> ParallelIterator for ParIter<'a, K, V> { parallel_iterator_methods!(Bucket::refs); } -impl IndexedParallelIterator for ParIter<'_, K, V> { +impl<'a, K: Sync, V: Sync> IndexedParallelIterator for ParIter<'a, K, V> { indexed_parallel_iterator_methods!(Bucket::refs); } + /// Requires crate feature `"rayon"`. impl<'a, K, V, S> IntoParallelIterator for &'a mut IndexMap -where - K: Sync + Send, - V: Send, + where K: Hash + Eq + Sync + Send, + V: Send, + S: BuildHasher, { type Item = (&'a K, &'a mut V); type Iter = ParIterMut<'a, K, V>; @@ -135,7 +137,7 @@ where /// /// [`par_iter_mut`]: ../struct.IndexMap.html#method.par_iter_mut /// [`IndexMap`]: ../struct.IndexMap.html -pub struct ParIterMut<'a, K, V> { +pub struct ParIterMut<'a, K: 'a, V: 'a> { entries: &'a mut [Bucket], } @@ -145,25 +147,22 @@ impl<'a, K: Sync + Send, V: Send> ParallelIterator for ParIterMut<'a, K, V> { parallel_iterator_methods!(Bucket::ref_mut); } -impl IndexedParallelIterator for ParIterMut<'_, K, V> { +impl<'a, K: Sync + Send, V: Send> IndexedParallelIterator for ParIterMut<'a, K, V> { indexed_parallel_iterator_methods!(Bucket::ref_mut); } -/// Parallel iterator methods and other parallel methods. -/// -/// The following methods **require crate feature `"rayon"`**. -/// -/// See also the `IntoParallelIterator` implementations. + +/// Requires crate feature `"rayon"`. impl IndexMap -where - K: Sync, - V: Sync, + where K: Hash + Eq + Sync, + V: Sync, + S: BuildHasher, { /// Return a parallel iterator over the keys of the map. /// /// While parallel iterators can process items in any order, their relative order /// in the map is still preserved for operations like `reduce` and `collect`. - pub fn par_keys(&self) -> ParKeys<'_, K, V> { + pub fn par_keys(&self) -> ParKeys { ParKeys { entries: self.as_entries(), } @@ -173,31 +172,23 @@ where /// /// While parallel iterators can process items in any order, their relative order /// in the map is still preserved for operations like `reduce` and `collect`. - pub fn par_values(&self) -> ParValues<'_, K, V> { + pub fn par_values(&self) -> ParValues { ParValues { entries: self.as_entries(), } } -} -impl IndexMap -where - K: Hash + Eq + Sync, - V: Sync, - S: BuildHasher, -{ /// Returns `true` if `self` contains all of the same key-value pairs as `other`, /// regardless of each map's indexed order, determined in parallel. pub fn par_eq(&self, other: &IndexMap) -> bool - where - V: PartialEq, - V2: Sync, - S2: BuildHasher + Sync, + where V: PartialEq, + V2: Sync, + S2: BuildHasher + Sync { - self.len() == other.len() - && self - .par_iter() - .all(move |(key, value)| other.get(key).map_or(false, |v| *value == *v)) + self.len() == other.len() && + self.par_iter().all(move |(key, value)| { + other.get(key).map_or(false, |v| *value == *v) + }) } } @@ -208,18 +199,18 @@ where /// /// [`par_keys`]: ../struct.IndexMap.html#method.par_keys /// [`IndexMap`]: ../struct.IndexMap.html -pub struct ParKeys<'a, K, V> { +pub struct ParKeys<'a, K: 'a, V: 'a> { entries: &'a [Bucket], } -impl Clone for ParKeys<'_, K, V> { - fn clone(&self) -> Self { +impl<'a, K, V> Clone for ParKeys<'a, K, V> { + fn clone(&self) -> ParKeys<'a, K, V> { ParKeys { ..*self } } } -impl fmt::Debug for ParKeys<'_, K, V> { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { +impl<'a, K: fmt::Debug, V> fmt::Debug for ParKeys<'a, K, V> { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { let iter = self.entries.iter().map(Bucket::key_ref); f.debug_list().entries(iter).finish() } @@ -231,7 +222,7 @@ impl<'a, K: Sync, V: Sync> ParallelIterator for ParKeys<'a, K, V> { parallel_iterator_methods!(Bucket::key_ref); } -impl IndexedParallelIterator for ParKeys<'_, K, V> { +impl<'a, K: Sync, V: Sync> IndexedParallelIterator for ParKeys<'a, K, V> { indexed_parallel_iterator_methods!(Bucket::key_ref); } @@ -242,18 +233,18 @@ impl IndexedParallelIterator for ParKeys<'_, K, V> { /// /// [`par_values`]: ../struct.IndexMap.html#method.par_values /// [`IndexMap`]: ../struct.IndexMap.html -pub struct ParValues<'a, K, V> { +pub struct ParValues<'a, K: 'a, V: 'a> { entries: &'a [Bucket], } -impl Clone for ParValues<'_, K, V> { - fn clone(&self) -> Self { +impl<'a, K, V> Clone for ParValues<'a, K, V> { + fn clone(&self) -> ParValues<'a, K, V> { ParValues { ..*self } } } -impl fmt::Debug for ParValues<'_, K, V> { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { +impl<'a, K, V: fmt::Debug> fmt::Debug for ParValues<'a, K, V> { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { let iter = self.entries.iter().map(Bucket::value_ref); f.debug_list().entries(iter).finish() } @@ -265,37 +256,30 @@ impl<'a, K: Sync, V: Sync> ParallelIterator for ParValues<'a, K, V> { parallel_iterator_methods!(Bucket::value_ref); } -impl IndexedParallelIterator for ParValues<'_, K, V> { +impl<'a, K: Sync, V: Sync> IndexedParallelIterator for ParValues<'a, K, V> { indexed_parallel_iterator_methods!(Bucket::value_ref); } + /// Requires crate feature `"rayon"`. impl IndexMap -where - K: Send, - V: Send, + where K: Hash + Eq + Send, + V: Send, + S: BuildHasher, { /// Return a parallel iterator over mutable references to the the values of the map /// /// While parallel iterators can process items in any order, their relative order /// in the map is still preserved for operations like `reduce` and `collect`. - pub fn par_values_mut(&mut self) -> ParValuesMut<'_, K, V> { + pub fn par_values_mut(&mut self) -> ParValuesMut { ParValuesMut { entries: self.as_entries_mut(), } } -} -impl IndexMap -where - K: Hash + Eq + Send, - V: Send, - S: BuildHasher, -{ /// Sort the map’s key-value pairs in parallel, by the default ordering of the keys. pub fn par_sort_keys(&mut self) - where - K: Ord, + where K: Ord, { self.with_entries(|entries| { entries.par_sort_by(|a, b| K::cmp(&a.key, &b.key)); @@ -308,8 +292,7 @@ where /// The comparison function receives two key and value pairs to compare (you /// can sort by keys or values or their combination as needed). pub fn par_sort_by(&mut self, cmp: F) - where - F: Fn(&K, &V, &K, &V) -> Ordering + Sync, + where F: Fn(&K, &V, &K, &V) -> Ordering + Sync, { self.with_entries(|entries| { entries.par_sort_by(move |a, b| cmp(&a.key, &a.value, &b.key, &b.value)); @@ -319,8 +302,7 @@ where /// Sort the key-value pairs of the map in parallel and return a by value parallel /// iterator of the key-value pairs with the result. pub fn par_sorted_by(self, cmp: F) -> IntoParIter - where - F: Fn(&K, &V, &K, &V) -> Ordering + Sync, + where F: Fn(&K, &V, &K, &V) -> Ordering + Sync { let mut entries = self.into_entries(); entries.par_sort_by(move |a, b| cmp(&a.key, &a.value, &b.key, &b.value)); @@ -335,7 +317,7 @@ where /// /// [`par_values_mut`]: ../struct.IndexMap.html#method.par_values_mut /// [`IndexMap`]: ../struct.IndexMap.html -pub struct ParValuesMut<'a, K, V> { +pub struct ParValuesMut<'a, K: 'a, V: 'a> { entries: &'a mut [Bucket], } @@ -345,20 +327,19 @@ impl<'a, K: Send, V: Send> ParallelIterator for ParValuesMut<'a, K, V> { parallel_iterator_methods!(Bucket::value_mut); } -impl IndexedParallelIterator for ParValuesMut<'_, K, V> { +impl<'a, K: Send, V: Send> IndexedParallelIterator for ParValuesMut<'a, K, V> { indexed_parallel_iterator_methods!(Bucket::value_mut); } + /// Requires crate feature `"rayon"`. impl FromParallelIterator<(K, V)> for IndexMap -where - K: Eq + Hash + Send, - V: Send, - S: BuildHasher + Default + Send, + where K: Eq + Hash + Send, + V: Send, + S: BuildHasher + Default + Send, { fn from_par_iter(iter: I) -> Self - where - I: IntoParallelIterator, + where I: IntoParallelIterator { let list = collect(iter); let len = list.iter().map(Vec::len).sum(); @@ -372,14 +353,12 @@ where /// Requires crate feature `"rayon"`. impl ParallelExtend<(K, V)> for IndexMap -where - K: Eq + Hash + Send, - V: Send, - S: BuildHasher + Send, + where K: Eq + Hash + Send, + V: Send, + S: BuildHasher + Send, { fn par_extend(&mut self, iter: I) - where - I: IntoParallelIterator, + where I: IntoParallelIterator { for vec in collect(iter) { self.extend(vec); @@ -389,14 +368,12 @@ where /// Requires crate feature `"rayon"`. impl<'a, K: 'a, V: 'a, S> ParallelExtend<(&'a K, &'a V)> for IndexMap -where - K: Copy + Eq + Hash + Send + Sync, - V: Copy + Send + Sync, - S: BuildHasher + Send, + where K: Copy + Eq + Hash + Send + Sync, + V: Copy + Send + Sync, + S: BuildHasher + Send, { fn par_extend(&mut self, iter: I) - where - I: IntoParallelIterator, + where I: IntoParallelIterator { for vec in collect(iter) { self.extend(vec); @@ -404,10 +381,10 @@ where } } + #[cfg(test)] mod tests { use super::*; - use std::string::String; #[test] fn insert_order() { @@ -423,12 +400,9 @@ mod tests { insert.par_iter().zip(map.par_keys()).for_each(|(a, b)| { assert_eq!(a, b); }); - (0..insert.len()) - .into_par_iter() - .zip(map.par_keys()) - .for_each(|(i, k)| { - assert_eq!(map.get_index(i).unwrap().0, k); - }); + (0..insert.len()).into_par_iter().zip(map.par_keys()).for_each(|(i, k)| { + assert_eq!(map.get_index(i).unwrap().0, k); + }); } #[test] @@ -438,13 +412,13 @@ mod tests { map_a.insert(2, "2"); let mut map_b = map_a.clone(); assert!(map_a.par_eq(&map_b)); - map_b.swap_remove(&1); + map_b.remove(&1); assert!(!map_a.par_eq(&map_b)); map_b.insert(3, "3"); assert!(!map_a.par_eq(&map_b)); - let map_c: IndexMap<_, String> = - map_b.into_par_iter().map(|(k, v)| (k, v.into())).collect(); + let map_c: IndexMap<_, String> + = map_b.into_par_iter().map(|(k, v)| (k, v.to_owned())).collect(); assert!(!map_a.par_eq(&map_c)); assert!(!map_c.par_eq(&map_a)); } @@ -454,10 +428,7 @@ mod tests { let mut map = IndexMap::new(); map.par_extend(vec![(&1, &2), (&3, &4)]); map.par_extend(vec![(5, 6)]); - assert_eq!( - map.into_par_iter().collect::>(), - vec![(1, 2), (3, 4), (5, 6)] - ); + assert_eq!(map.into_par_iter().collect::>(), vec![(1, 2), (3, 4), (5, 6)]); } #[test] @@ -486,7 +457,9 @@ mod tests { fn values_mut() { let vec = vec![(1, 1), (2, 2), (3, 3)]; let mut map: IndexMap<_, _> = vec.into_par_iter().collect(); - map.par_values_mut().for_each(|value| *value *= 2); + map.par_values_mut().for_each(|value| { + *value = (*value) * 2 + }); let values: Vec<_> = map.par_values().cloned().collect(); assert_eq!(values.len(), 3); assert!(values.contains(&2)); diff --git a/third_party/rust/indexmap/src/rayon/mod.rs b/third_party/rust/indexmap/src/rayon/mod.rs index 57c810beeae1..d8603361af51 100644 --- a/third_party/rust/indexmap/src/rayon/mod.rs +++ b/third_party/rust/indexmap/src/rayon/mod.rs @@ -1,24 +1,19 @@ -use rayon::prelude::*; -#[cfg(not(has_std))] -use alloc::collections::LinkedList; +extern crate rayon; + +use self::rayon::prelude::*; -#[cfg(has_std)] use std::collections::LinkedList; -use crate::vec::Vec; - // generate `ParallelIterator` methods by just forwarding to the underlying // self.entries and mapping its elements. macro_rules! parallel_iterator_methods { // $map_elt is the mapping function from the underlying iterator's element ($map_elt:expr) => { fn drive_unindexed(self, consumer: C) -> C::Result - where - C: UnindexedConsumer, + where C: UnindexedConsumer { - self.entries - .into_par_iter() + self.entries.into_par_iter() .map($map_elt) .drive_unindexed(consumer) } @@ -29,7 +24,7 @@ macro_rules! parallel_iterator_methods { fn opt_len(&self) -> Option { Some(self.entries.len()) } - }; + } } // generate `IndexedParallelIterator` methods by just forwarding to the underlying @@ -38,10 +33,11 @@ macro_rules! indexed_parallel_iterator_methods { // $map_elt is the mapping function from the underlying iterator's element ($map_elt:expr) => { fn drive(self, consumer: C) -> C::Result - where - C: Consumer, + where C: Consumer { - self.entries.into_par_iter().map($map_elt).drive(consumer) + self.entries.into_par_iter() + .map($map_elt) + .drive(consumer) } fn len(&self) -> usize { @@ -49,20 +45,20 @@ macro_rules! indexed_parallel_iterator_methods { } fn with_producer(self, callback: CB) -> CB::Output - where - CB: ProducerCallback, + where CB: ProducerCallback { - self.entries - .into_par_iter() + self.entries.into_par_iter() .map($map_elt) .with_producer(callback) } - }; + } } + pub mod map; pub mod set; + // This form of intermediate collection is also how Rayon collects `HashMap`. // Note that the order will also be preserved! fn collect(iter: I) -> LinkedList> { diff --git a/third_party/rust/indexmap/src/rayon/set.rs b/third_party/rust/indexmap/src/rayon/set.rs index f6e08d48373d..a8acb7b74ce5 100644 --- a/third_party/rust/indexmap/src/rayon/set.rs +++ b/third_party/rust/indexmap/src/rayon/set.rs @@ -6,23 +6,23 @@ //! Requires crate feature `"rayon"`. use super::collect; -use rayon::iter::plumbing::{Consumer, ProducerCallback, UnindexedConsumer}; -use rayon::prelude::*; +use super::rayon::prelude::*; +use super::rayon::iter::plumbing::{Consumer, UnindexedConsumer, ProducerCallback}; -use crate::vec::Vec; -use core::cmp::Ordering; -use core::fmt; -use core::hash::{BuildHasher, Hash}; +use std::cmp::Ordering; +use std::fmt; +use std::hash::Hash; +use std::hash::BuildHasher; -use crate::Entries; -use crate::IndexSet; +use Entries; +use IndexSet; -type Bucket = crate::Bucket; +type Bucket = ::Bucket; /// Requires crate feature `"rayon"`. impl IntoParallelIterator for IndexSet -where - T: Send, + where T: Hash + Eq + Send, + S: BuildHasher, { type Item = T; type Iter = IntoParIter; @@ -46,7 +46,7 @@ pub struct IntoParIter { } impl fmt::Debug for IntoParIter { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { let iter = self.entries.iter().map(Bucket::key_ref); f.debug_list().entries(iter).finish() } @@ -62,10 +62,11 @@ impl IndexedParallelIterator for IntoParIter { indexed_parallel_iterator_methods!(Bucket::key); } + /// Requires crate feature `"rayon"`. impl<'a, T, S> IntoParallelIterator for &'a IndexSet -where - T: Sync, + where T: Hash + Eq + Sync, + S: BuildHasher, { type Item = &'a T; type Iter = ParIter<'a, T>; @@ -84,18 +85,18 @@ where /// /// [`IndexSet`]: ../struct.IndexSet.html /// [`par_iter`]: ../struct.IndexSet.html#method.par_iter -pub struct ParIter<'a, T> { +pub struct ParIter<'a, T: 'a> { entries: &'a [Bucket], } -impl Clone for ParIter<'_, T> { +impl<'a, T> Clone for ParIter<'a, T> { fn clone(&self) -> Self { ParIter { ..*self } } } -impl fmt::Debug for ParIter<'_, T> { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { +impl<'a, T: fmt::Debug> fmt::Debug for ParIter<'a, T> { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { let iter = self.entries.iter().map(Bucket::key_ref); f.debug_list().entries(iter).finish() } @@ -107,30 +108,23 @@ impl<'a, T: Sync> ParallelIterator for ParIter<'a, T> { parallel_iterator_methods!(Bucket::key_ref); } -impl IndexedParallelIterator for ParIter<'_, T> { +impl<'a, T: Sync> IndexedParallelIterator for ParIter<'a, T> { indexed_parallel_iterator_methods!(Bucket::key_ref); } -/// Parallel iterator methods and other parallel methods. -/// -/// The following methods **require crate feature `"rayon"`**. -/// -/// See also the `IntoParallelIterator` implementations. + +/// Requires crate feature `"rayon"`. impl IndexSet -where - T: Hash + Eq + Sync, - S: BuildHasher + Sync, + where T: Hash + Eq + Sync, + S: BuildHasher + Sync, { /// Return a parallel iterator over the values that are in `self` but not `other`. /// /// While parallel iterators can process items in any order, their relative order /// in the `self` set is still preserved for operations like `reduce` and `collect`. - pub fn par_difference<'a, S2>( - &'a self, - other: &'a IndexSet, - ) -> ParDifference<'a, T, S, S2> - where - S2: BuildHasher + Sync, + pub fn par_difference<'a, S2>(&'a self, other: &'a IndexSet) + -> ParDifference<'a, T, S, S2> + where S2: BuildHasher + Sync { ParDifference { set1: self, @@ -145,12 +139,9 @@ where /// in the sets is still preserved for operations like `reduce` and `collect`. /// Values from `self` are produced in their original order, followed by /// values from `other` in their original order. - pub fn par_symmetric_difference<'a, S2>( - &'a self, - other: &'a IndexSet, - ) -> ParSymmetricDifference<'a, T, S, S2> - where - S2: BuildHasher + Sync, + pub fn par_symmetric_difference<'a, S2>(&'a self, other: &'a IndexSet) + -> ParSymmetricDifference<'a, T, S, S2> + where S2: BuildHasher + Sync { ParSymmetricDifference { set1: self, @@ -162,12 +153,9 @@ where /// /// While parallel iterators can process items in any order, their relative order /// in the `self` set is still preserved for operations like `reduce` and `collect`. - pub fn par_intersection<'a, S2>( - &'a self, - other: &'a IndexSet, - ) -> ParIntersection<'a, T, S, S2> - where - S2: BuildHasher + Sync, + pub fn par_intersection<'a, S2>(&'a self, other: &'a IndexSet) + -> ParIntersection<'a, T, S, S2> + where S2: BuildHasher + Sync { ParIntersection { set1: self, @@ -181,9 +169,9 @@ where /// in the sets is still preserved for operations like `reduce` and `collect`. /// Values from `self` are produced in their original order, followed by /// values that are unique to `other` in their original order. - pub fn par_union<'a, S2>(&'a self, other: &'a IndexSet) -> ParUnion<'a, T, S, S2> - where - S2: BuildHasher + Sync, + pub fn par_union<'a, S2>(&'a self, other: &'a IndexSet) + -> ParUnion<'a, T, S, S2> + where S2: BuildHasher + Sync { ParUnion { set1: self, @@ -194,8 +182,7 @@ where /// Returns `true` if `self` contains all of the same values as `other`, /// regardless of each set's indexed order, determined in parallel. pub fn par_eq(&self, other: &IndexSet) -> bool - where - S2: BuildHasher + Sync, + where S2: BuildHasher + Sync { self.len() == other.len() && self.par_is_subset(other) } @@ -203,8 +190,7 @@ where /// Returns `true` if `self` has no elements in common with `other`, /// determined in parallel. pub fn par_is_disjoint(&self, other: &IndexSet) -> bool - where - S2: BuildHasher + Sync, + where S2: BuildHasher + Sync { if self.len() <= other.len() { self.par_iter().all(move |value| !other.contains(value)) @@ -216,8 +202,7 @@ where /// Returns `true` if all elements of `other` are contained in `self`, /// determined in parallel. pub fn par_is_superset(&self, other: &IndexSet) -> bool - where - S2: BuildHasher + Sync, + where S2: BuildHasher + Sync { other.par_is_subset(self) } @@ -225,8 +210,7 @@ where /// Returns `true` if all elements of `self` are contained in `other`, /// determined in parallel. pub fn par_is_subset(&self, other: &IndexSet) -> bool - where - S2: BuildHasher + Sync, + where S2: BuildHasher + Sync { self.len() <= other.len() && self.par_iter().all(move |value| other.contains(value)) } @@ -239,41 +223,36 @@ where /// /// [`IndexSet`]: ../struct.IndexSet.html /// [`par_difference`]: ../struct.IndexSet.html#method.par_difference -pub struct ParDifference<'a, T, S1, S2> { +pub struct ParDifference<'a, T: 'a, S1: 'a, S2: 'a> { set1: &'a IndexSet, set2: &'a IndexSet, } -impl Clone for ParDifference<'_, T, S1, S2> { +impl<'a, T, S1, S2> Clone for ParDifference<'a, T, S1, S2> { fn clone(&self) -> Self { ParDifference { ..*self } } } -impl fmt::Debug for ParDifference<'_, T, S1, S2> -where - T: fmt::Debug + Eq + Hash, - S1: BuildHasher, - S2: BuildHasher, +impl<'a, T, S1, S2> fmt::Debug for ParDifference<'a, T, S1, S2> + where T: fmt::Debug + Eq + Hash, + S1: BuildHasher, + S2: BuildHasher, { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.debug_list() - .entries(self.set1.difference(&self.set2)) - .finish() + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + f.debug_list().entries(self.set1.difference(&self.set2)).finish() } } impl<'a, T, S1, S2> ParallelIterator for ParDifference<'a, T, S1, S2> -where - T: Hash + Eq + Sync, - S1: BuildHasher + Sync, - S2: BuildHasher + Sync, + where T: Hash + Eq + Sync, + S1: BuildHasher + Sync, + S2: BuildHasher + Sync, { type Item = &'a T; fn drive_unindexed(self, consumer: C) -> C::Result - where - C: UnindexedConsumer, + where C: UnindexedConsumer { let Self { set1, set2 } = self; @@ -290,41 +269,36 @@ where /// /// [`IndexSet`]: ../struct.IndexSet.html /// [`par_intersection`]: ../struct.IndexSet.html#method.par_intersection -pub struct ParIntersection<'a, T, S1, S2> { +pub struct ParIntersection<'a, T: 'a, S1: 'a, S2: 'a> { set1: &'a IndexSet, set2: &'a IndexSet, } -impl Clone for ParIntersection<'_, T, S1, S2> { +impl<'a, T, S1, S2> Clone for ParIntersection<'a, T, S1, S2> { fn clone(&self) -> Self { ParIntersection { ..*self } } } -impl fmt::Debug for ParIntersection<'_, T, S1, S2> -where - T: fmt::Debug + Eq + Hash, - S1: BuildHasher, - S2: BuildHasher, +impl<'a, T, S1, S2> fmt::Debug for ParIntersection<'a, T, S1, S2> + where T: fmt::Debug + Eq + Hash, + S1: BuildHasher, + S2: BuildHasher, { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.debug_list() - .entries(self.set1.intersection(&self.set2)) - .finish() + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + f.debug_list().entries(self.set1.intersection(&self.set2)).finish() } } impl<'a, T, S1, S2> ParallelIterator for ParIntersection<'a, T, S1, S2> -where - T: Hash + Eq + Sync, - S1: BuildHasher + Sync, - S2: BuildHasher + Sync, + where T: Hash + Eq + Sync, + S1: BuildHasher + Sync, + S2: BuildHasher + Sync, { type Item = &'a T; fn drive_unindexed(self, consumer: C) -> C::Result - where - C: UnindexedConsumer, + where C: UnindexedConsumer { let Self { set1, set2 } = self; @@ -341,41 +315,36 @@ where /// /// [`IndexSet`]: ../struct.IndexSet.html /// [`par_symmetric_difference`]: ../struct.IndexSet.html#method.par_symmetric_difference -pub struct ParSymmetricDifference<'a, T, S1, S2> { +pub struct ParSymmetricDifference<'a, T: 'a, S1: 'a, S2: 'a> { set1: &'a IndexSet, set2: &'a IndexSet, } -impl Clone for ParSymmetricDifference<'_, T, S1, S2> { +impl<'a, T, S1, S2> Clone for ParSymmetricDifference<'a, T, S1, S2> { fn clone(&self) -> Self { ParSymmetricDifference { ..*self } } } -impl fmt::Debug for ParSymmetricDifference<'_, T, S1, S2> -where - T: fmt::Debug + Eq + Hash, - S1: BuildHasher, - S2: BuildHasher, +impl<'a, T, S1, S2> fmt::Debug for ParSymmetricDifference<'a, T, S1, S2> + where T: fmt::Debug + Eq + Hash, + S1: BuildHasher, + S2: BuildHasher, { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.debug_list() - .entries(self.set1.symmetric_difference(&self.set2)) - .finish() + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + f.debug_list().entries(self.set1.symmetric_difference(&self.set2)).finish() } } impl<'a, T, S1, S2> ParallelIterator for ParSymmetricDifference<'a, T, S1, S2> -where - T: Hash + Eq + Sync, - S1: BuildHasher + Sync, - S2: BuildHasher + Sync, + where T: Hash + Eq + Sync, + S1: BuildHasher + Sync, + S2: BuildHasher + Sync, { type Item = &'a T; fn drive_unindexed(self, consumer: C) -> C::Result - where - C: UnindexedConsumer, + where C: UnindexedConsumer { let Self { set1, set2 } = self; @@ -392,39 +361,36 @@ where /// /// [`IndexSet`]: ../struct.IndexSet.html /// [`par_union`]: ../struct.IndexSet.html#method.par_union -pub struct ParUnion<'a, T, S1, S2> { +pub struct ParUnion<'a, T: 'a, S1: 'a, S2: 'a> { set1: &'a IndexSet, set2: &'a IndexSet, } -impl Clone for ParUnion<'_, T, S1, S2> { +impl<'a, T, S1, S2> Clone for ParUnion<'a, T, S1, S2> { fn clone(&self) -> Self { ParUnion { ..*self } } } -impl fmt::Debug for ParUnion<'_, T, S1, S2> -where - T: fmt::Debug + Eq + Hash, - S1: BuildHasher, - S2: BuildHasher, +impl<'a, T, S1, S2> fmt::Debug for ParUnion<'a, T, S1, S2> + where T: fmt::Debug + Eq + Hash, + S1: BuildHasher, + S2: BuildHasher, { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.debug_list().entries(self.set1.union(&self.set2)).finish() } } impl<'a, T, S1, S2> ParallelIterator for ParUnion<'a, T, S1, S2> -where - T: Hash + Eq + Sync, - S1: BuildHasher + Sync, - S2: BuildHasher + Sync, + where T: Hash + Eq + Sync, + S1: BuildHasher + Sync, + S2: BuildHasher + Sync, { type Item = &'a T; fn drive_unindexed(self, consumer: C) -> C::Result - where - C: UnindexedConsumer, + where C: UnindexedConsumer { let Self { set1, set2 } = self; @@ -434,18 +400,15 @@ where } } -/// Parallel sorting methods. -/// -/// The following methods **require crate feature `"rayon"`**. + +/// Requires crate feature `"rayon"`. impl IndexSet -where - T: Hash + Eq + Send, - S: BuildHasher + Send, + where T: Hash + Eq + Send, + S: BuildHasher + Send, { /// Sort the set’s values in parallel by their default ordering. pub fn par_sort(&mut self) - where - T: Ord, + where T: Ord, { self.with_entries(|entries| { entries.par_sort_by(|a, b| T::cmp(&a.key, &b.key)); @@ -454,8 +417,7 @@ where /// Sort the set’s values in place and in parallel, using the comparison function `compare`. pub fn par_sort_by(&mut self, cmp: F) - where - F: Fn(&T, &T) -> Ordering + Sync, + where F: Fn(&T, &T) -> Ordering + Sync, { self.with_entries(|entries| { entries.par_sort_by(move |a, b| cmp(&a.key, &b.key)); @@ -465,8 +427,7 @@ where /// Sort the values of the set in parallel and return a by value parallel iterator of /// the values with the result. pub fn par_sorted_by(self, cmp: F) -> IntoParIter - where - F: Fn(&T, &T) -> Ordering + Sync, + where F: Fn(&T, &T) -> Ordering + Sync { let mut entries = self.into_entries(); entries.par_sort_by(move |a, b| cmp(&a.key, &b.key)); @@ -474,15 +435,14 @@ where } } + /// Requires crate feature `"rayon"`. impl FromParallelIterator for IndexSet -where - T: Eq + Hash + Send, - S: BuildHasher + Default + Send, + where T: Eq + Hash + Send, + S: BuildHasher + Default + Send, { fn from_par_iter(iter: I) -> Self - where - I: IntoParallelIterator, + where I: IntoParallelIterator { let list = collect(iter); let len = list.iter().map(Vec::len).sum(); @@ -495,14 +455,12 @@ where } /// Requires crate feature `"rayon"`. -impl ParallelExtend for IndexSet -where - T: Eq + Hash + Send, - S: BuildHasher + Send, +impl ParallelExtend<(T)> for IndexSet + where T: Eq + Hash + Send, + S: BuildHasher + Send, { fn par_extend(&mut self, iter: I) - where - I: IntoParallelIterator, + where I: IntoParallelIterator { for vec in collect(iter) { self.extend(vec); @@ -512,13 +470,11 @@ where /// Requires crate feature `"rayon"`. impl<'a, T: 'a, S> ParallelExtend<&'a T> for IndexSet -where - T: Copy + Eq + Hash + Send + Sync, - S: BuildHasher + Send, + where T: Copy + Eq + Hash + Send + Sync, + S: BuildHasher + Send, { fn par_extend(&mut self, iter: I) - where - I: IntoParallelIterator, + where I: IntoParallelIterator { for vec in collect(iter) { self.extend(vec); @@ -526,6 +482,7 @@ where } } + #[cfg(test)] mod tests { use super::*; @@ -544,12 +501,9 @@ mod tests { insert.par_iter().zip(&set).for_each(|(a, b)| { assert_eq!(a, b); }); - (0..insert.len()) - .into_par_iter() - .zip(&set) - .for_each(|(i, v)| { - assert_eq!(set.get_index(i).unwrap(), v); - }); + (0..insert.len()).into_par_iter().zip(&set).for_each(|(i, v)| { + assert_eq!(set.get_index(i).unwrap(), v); + }); } #[test] @@ -559,7 +513,7 @@ mod tests { set_a.insert(2); let mut set_b = set_a.clone(); assert!(set_a.par_eq(&set_b)); - set_b.swap_remove(&1); + set_b.remove(&1); assert!(!set_a.par_eq(&set_b)); set_b.insert(3); assert!(!set_a.par_eq(&set_b)); @@ -574,10 +528,7 @@ mod tests { let mut set = IndexSet::new(); set.par_extend(vec![&1, &2, &3, &4]); set.par_extend(vec![5, 6]); - assert_eq!( - set.into_par_iter().collect::>(), - vec![1, 2, 3, 4, 5, 6] - ); + assert_eq!(set.into_par_iter().collect::>(), vec![1, 2, 3, 4, 5, 6]); } #[test] @@ -657,14 +608,8 @@ mod tests { check(set_c.par_difference(&set_d), 0..3); check(set_d.par_difference(&set_c), (6..9).rev()); - check( - set_c.par_symmetric_difference(&set_d), - (0..3).chain((6..9).rev()), - ); - check( - set_d.par_symmetric_difference(&set_c), - (6..9).rev().chain(0..3), - ); + check(set_c.par_symmetric_difference(&set_d), (0..3).chain((6..9).rev())); + check(set_d.par_symmetric_difference(&set_c), (6..9).rev().chain(0..3)); check(set_c.par_intersection(&set_d), 3..6); check(set_d.par_intersection(&set_c), (3..6).rev()); check(set_c.par_union(&set_d), (0..6).chain((6..9).rev())); diff --git a/third_party/rust/indexmap/src/serde.rs b/third_party/rust/indexmap/src/serde.rs index 853c6b9aa301..eb54e01f444a 100644 --- a/third_party/rust/indexmap/src/serde.rs +++ b/third_party/rust/indexmap/src/serde.rs @@ -1,56 +1,52 @@ -use serde::de::value::{MapDeserializer, SeqDeserializer}; -use serde::de::{ - Deserialize, Deserializer, Error, IntoDeserializer, MapAccess, SeqAccess, Visitor, -}; -use serde::ser::{Serialize, SerializeMap, SerializeSeq, Serializer}; -use core::fmt::{self, Formatter}; -use core::hash::{BuildHasher, Hash}; -use core::marker::PhantomData; +extern crate serde; -use crate::IndexMap; +use self::serde::ser::{Serialize, Serializer, SerializeMap, SerializeSeq}; +use self::serde::de::{Deserialize, Deserializer, Error, IntoDeserializer, MapAccess, SeqAccess, Visitor}; +use self::serde::de::value::{MapDeserializer, SeqDeserializer}; + +use std::fmt::{self, Formatter}; +use std::hash::{BuildHasher, Hash}; +use std::marker::PhantomData; + +use IndexMap; /// Requires crate feature `"serde-1"` impl Serialize for IndexMap -where - K: Serialize + Hash + Eq, - V: Serialize, - S: BuildHasher, + where K: Serialize + Hash + Eq, + V: Serialize, + S: BuildHasher { fn serialize(&self, serializer: T) -> Result - where - T: Serializer, + where T: Serializer { - let mut map_serializer = serializer.serialize_map(Some(self.len()))?; + let mut map_serializer = try!(serializer.serialize_map(Some(self.len()))); for (key, value) in self { - map_serializer.serialize_entry(key, value)?; + try!(map_serializer.serialize_entry(key, value)); } map_serializer.end() } } -struct IndexMapVisitor(PhantomData<(K, V, S)>); +struct OrderMapVisitor(PhantomData<(K, V, S)>); -impl<'de, K, V, S> Visitor<'de> for IndexMapVisitor -where - K: Deserialize<'de> + Eq + Hash, - V: Deserialize<'de>, - S: Default + BuildHasher, +impl<'de, K, V, S> Visitor<'de> for OrderMapVisitor + where K: Deserialize<'de> + Eq + Hash, + V: Deserialize<'de>, + S: Default + BuildHasher { type Value = IndexMap; - fn expecting(&self, formatter: &mut Formatter<'_>) -> fmt::Result { + fn expecting(&self, formatter: &mut Formatter) -> fmt::Result { write!(formatter, "a map") } fn visit_map(self, mut map: A) -> Result - where - A: MapAccess<'de>, + where A: MapAccess<'de> { - let mut values = - IndexMap::with_capacity_and_hasher(map.size_hint().unwrap_or(0), S::default()); + let mut values = IndexMap::with_capacity_and_hasher(map.size_hint().unwrap_or(0), S::default()); - while let Some((key, value)) = map.next_entry()? { + while let Some((key, value)) = try!(map.next_entry()) { values.insert(key, value); } @@ -60,25 +56,22 @@ where /// Requires crate feature `"serde-1"` impl<'de, K, V, S> Deserialize<'de> for IndexMap -where - K: Deserialize<'de> + Eq + Hash, - V: Deserialize<'de>, - S: Default + BuildHasher, + where K: Deserialize<'de> + Eq + Hash, + V: Deserialize<'de>, + S: Default + BuildHasher { fn deserialize(deserializer: D) -> Result - where - D: Deserializer<'de>, + where D: Deserializer<'de> { - deserializer.deserialize_map(IndexMapVisitor(PhantomData)) + deserializer.deserialize_map(OrderMapVisitor(PhantomData)) } } impl<'de, K, V, S, E> IntoDeserializer<'de, E> for IndexMap -where - K: IntoDeserializer<'de, E> + Eq + Hash, - V: IntoDeserializer<'de, E>, - S: BuildHasher, - E: Error, + where K: IntoDeserializer<'de, E> + Eq + Hash, + V: IntoDeserializer<'de, E>, + S: BuildHasher, + E: Error, { type Deserializer = MapDeserializer<'de, ::IntoIter, E>; @@ -87,47 +80,43 @@ where } } -use crate::IndexSet; + +use IndexSet; /// Requires crate feature `"serde-1"` impl Serialize for IndexSet -where - T: Serialize + Hash + Eq, - S: BuildHasher, + where T: Serialize + Hash + Eq, + S: BuildHasher { fn serialize(&self, serializer: Se) -> Result - where - Se: Serializer, + where Se: Serializer { - let mut set_serializer = serializer.serialize_seq(Some(self.len()))?; + let mut set_serializer = try!(serializer.serialize_seq(Some(self.len()))); for value in self { - set_serializer.serialize_element(value)?; + try!(set_serializer.serialize_element(value)); } set_serializer.end() } } -struct IndexSetVisitor(PhantomData<(T, S)>); +struct OrderSetVisitor(PhantomData<(T, S)>); -impl<'de, T, S> Visitor<'de> for IndexSetVisitor -where - T: Deserialize<'de> + Eq + Hash, - S: Default + BuildHasher, +impl<'de, T, S> Visitor<'de> for OrderSetVisitor + where T: Deserialize<'de> + Eq + Hash, + S: Default + BuildHasher { type Value = IndexSet; - fn expecting(&self, formatter: &mut Formatter<'_>) -> fmt::Result { + fn expecting(&self, formatter: &mut Formatter) -> fmt::Result { write!(formatter, "a set") } fn visit_seq(self, mut seq: A) -> Result - where - A: SeqAccess<'de>, + where A: SeqAccess<'de> { - let mut values = - IndexSet::with_capacity_and_hasher(seq.size_hint().unwrap_or(0), S::default()); + let mut values = IndexSet::with_capacity_and_hasher(seq.size_hint().unwrap_or(0), S::default()); - while let Some(value) = seq.next_element()? { + while let Some(value) = try!(seq.next_element()) { values.insert(value); } @@ -137,23 +126,20 @@ where /// Requires crate feature `"serde-1"` impl<'de, T, S> Deserialize<'de> for IndexSet -where - T: Deserialize<'de> + Eq + Hash, - S: Default + BuildHasher, + where T: Deserialize<'de> + Eq + Hash, + S: Default + BuildHasher { fn deserialize(deserializer: D) -> Result - where - D: Deserializer<'de>, + where D: Deserializer<'de> { - deserializer.deserialize_seq(IndexSetVisitor(PhantomData)) + deserializer.deserialize_seq(OrderSetVisitor(PhantomData)) } } impl<'de, T, S, E> IntoDeserializer<'de, E> for IndexSet -where - T: IntoDeserializer<'de, E> + Eq + Hash, - S: BuildHasher, - E: Error, + where T: IntoDeserializer<'de, E> + Eq + Hash, + S: BuildHasher, + E: Error, { type Deserializer = SeqDeserializer<::IntoIter, E>; diff --git a/third_party/rust/indexmap/src/set.rs b/third_party/rust/indexmap/src/set.rs index 99493891871b..f89123261e92 100644 --- a/third_party/rust/indexmap/src/set.rs +++ b/third_party/rust/indexmap/src/set.rs @@ -1,20 +1,19 @@ //! A hash set implemented using `IndexMap` #[cfg(feature = "rayon")] -pub use crate::rayon::set as rayon; +pub use ::rayon::set as rayon; -#[cfg(has_std)] +use std::cmp::Ordering; use std::collections::hash_map::RandomState; +use std::fmt; +use std::iter::{FromIterator, Chain}; +use std::hash::{Hash, BuildHasher}; +use std::ops::RangeFull; +use std::ops::{BitAnd, BitOr, BitXor, Sub}; +use std::slice; +use std::vec; -use crate::vec::{self, Vec}; -use core::cmp::Ordering; -use core::fmt; -use core::hash::{BuildHasher, Hash}; -use core::iter::{Chain, FromIterator}; -use core::ops::{BitAnd, BitOr, BitXor, Index, RangeBounds, Sub}; -use core::slice; - -use super::{Entries, Equivalent, IndexMap}; +use super::{IndexMap, Equivalent, Entries}; type Bucket = super::Bucket; @@ -53,68 +52,44 @@ type Bucket = super::Bucket; /// /// // Collects which letters appear in a sentence. /// let letters: IndexSet<_> = "a short treatise on fungi".chars().collect(); -/// +/// /// assert!(letters.contains(&'s')); /// assert!(letters.contains(&'t')); /// assert!(letters.contains(&'u')); /// assert!(!letters.contains(&'y')); /// ``` -#[cfg(has_std)] +#[derive(Clone)] pub struct IndexSet { map: IndexMap, } -#[cfg(not(has_std))] -pub struct IndexSet { - map: IndexMap, -} - -impl Clone for IndexSet -where - T: Clone, - S: Clone, -{ - fn clone(&self) -> Self { - IndexSet { - map: self.map.clone(), - } - } - - fn clone_from(&mut self, other: &Self) { - self.map.clone_from(&other.map); - } -} impl Entries for IndexSet { type Entry = Bucket; - #[inline] fn into_entries(self) -> Vec { self.map.into_entries() } - #[inline] fn as_entries(&self) -> &[Self::Entry] { self.map.as_entries() } - #[inline] fn as_entries_mut(&mut self) -> &mut [Self::Entry] { self.map.as_entries_mut() } fn with_entries(&mut self, f: F) - where - F: FnOnce(&mut [Self::Entry]), + where F: FnOnce(&mut [Self::Entry]) { self.map.with_entries(f); } } impl fmt::Debug for IndexSet -where - T: fmt::Debug, + where T: fmt::Debug + Hash + Eq, + S: BuildHasher, { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { if cfg!(not(feature = "test_debug")) { f.debug_set().entries(self.iter()).finish() } else { @@ -124,13 +99,10 @@ where } } -#[cfg(has_std)] impl IndexSet { /// Create a new set. (Does not allocate.) pub fn new() -> Self { - IndexSet { - map: IndexMap::new(), - } + IndexSet { map: IndexMap::new() } } /// Create a new set with capacity for `n` elements. @@ -138,9 +110,7 @@ impl IndexSet { /// /// Computes in **O(n)** time. pub fn with_capacity(n: usize) -> Self { - IndexSet { - map: IndexMap::with_capacity(n), - } + IndexSet { map: IndexMap::with_capacity(n) } } } @@ -149,27 +119,10 @@ impl IndexSet { /// (Does not allocate if `n` is zero.) /// /// Computes in **O(n)** time. - pub fn with_capacity_and_hasher(n: usize, hash_builder: S) -> Self { - IndexSet { - map: IndexMap::with_capacity_and_hasher(n, hash_builder), - } - } - - /// Create a new set with `hash_builder` - pub fn with_hasher(hash_builder: S) -> Self { - IndexSet { - map: IndexMap::with_hasher(hash_builder), - } - } - - /// Computes in **O(1)** time. - pub fn capacity(&self) -> usize { - self.map.capacity() - } - - /// Return a reference to the set's `BuildHasher`. - pub fn hasher(&self) -> &S { - self.map.hasher() + pub fn with_capacity_and_hasher(n: usize, hash_builder: S) -> Self + where S: BuildHasher + { + IndexSet { map: IndexMap::with_capacity_and_hasher(n, hash_builder) } } /// Return the number of elements in the set. @@ -186,13 +139,30 @@ impl IndexSet { self.map.is_empty() } - /// Return an iterator over the values of the set, in their order - pub fn iter(&self) -> Iter<'_, T> { - Iter { - iter: self.map.keys().iter, - } + /// Create a new set with `hash_builder` + pub fn with_hasher(hash_builder: S) -> Self + where S: BuildHasher + { + IndexSet { map: IndexMap::with_hasher(hash_builder) } } + /// Return a reference to the set's `BuildHasher`. + pub fn hasher(&self) -> &S + where S: BuildHasher + { + self.map.hasher() + } + + /// Computes in **O(1)** time. + pub fn capacity(&self) -> usize { + self.map.capacity() + } +} + +impl IndexSet + where T: Hash + Eq, + S: BuildHasher, +{ /// Remove all elements in the set, while preserving its capacity. /// /// Computes in **O(n)** time. @@ -200,48 +170,11 @@ impl IndexSet { self.map.clear(); } - /// Clears the `IndexSet` in the given index range, returning those values - /// as a drain iterator. - /// - /// The range may be any type that implements `RangeBounds`, - /// including all of the `std::ops::Range*` types, or even a tuple pair of - /// `Bound` start and end values. To drain the set entirely, use `RangeFull` - /// like `set.drain(..)`. - /// - /// This shifts down all entries following the drained range to fill the - /// gap, and keeps the allocated memory for reuse. - /// - /// ***Panics*** if the starting point is greater than the end point or if - /// the end point is greater than the length of the set. - pub fn drain(&mut self, range: R) -> Drain<'_, T> - where - R: RangeBounds, - { - Drain { - iter: self.map.drain(range).iter, - } - } -} - -impl IndexSet -where - T: Hash + Eq, - S: BuildHasher, -{ - /// Reserve capacity for `additional` more values. - /// - /// Computes in **O(n)** time. + /// FIXME Not implemented fully yet pub fn reserve(&mut self, additional: usize) { self.map.reserve(additional); } - /// Shrink the capacity of the set as much as possible. - /// - /// Computes in **O(n)** time. - pub fn shrink_to_fit(&mut self) { - self.map.shrink_to_fit(); - } - /// Insert the value into the set. /// /// If an equivalent item already exists in the set, it returns @@ -276,16 +209,22 @@ where } } + /// Return an iterator over the values of the set, in their order + pub fn iter(&self) -> Iter { + Iter { + iter: self.map.keys().iter + } + } + /// Return an iterator over the values that are in `self` but not `other`. /// /// Values are produced in the same order that they appear in `self`. pub fn difference<'a, S2>(&'a self, other: &'a IndexSet) -> Difference<'a, T, S2> - where - S2: BuildHasher, + where S2: BuildHasher { Difference { iter: self.iter(), - other, + other: other, } } @@ -294,12 +233,9 @@ where /// /// Values from `self` are produced in their original order, followed by /// values from `other` in their original order. - pub fn symmetric_difference<'a, S2>( - &'a self, - other: &'a IndexSet, - ) -> SymmetricDifference<'a, T, S, S2> - where - S2: BuildHasher, + pub fn symmetric_difference<'a, S2>(&'a self, other: &'a IndexSet) + -> SymmetricDifference<'a, T, S, S2> + where S2: BuildHasher { SymmetricDifference { iter: self.difference(other).chain(other.difference(self)), @@ -310,12 +246,11 @@ where /// /// Values are produced in the same order that they appear in `self`. pub fn intersection<'a, S2>(&'a self, other: &'a IndexSet) -> Intersection<'a, T, S2> - where - S2: BuildHasher, + where S2: BuildHasher { Intersection { iter: self.iter(), - other, + other: other, } } @@ -324,8 +259,7 @@ where /// Values from `self` are produced in their original order, followed by /// values that are unique to `other` in their original order. pub fn union<'a, S2>(&'a self, other: &'a IndexSet) -> Union<'a, T, S> - where - S2: BuildHasher, + where S2: BuildHasher { Union { iter: self.iter().chain(other.difference(self)), @@ -336,8 +270,7 @@ where /// /// Computes in **O(1)** time (average). pub fn contains(&self, value: &Q) -> bool - where - Q: Hash + Equivalent, + where Q: Hash + Equivalent, { self.map.contains_key(value) } @@ -347,53 +280,37 @@ where /// /// Computes in **O(1)** time (average). pub fn get(&self, value: &Q) -> Option<&T> - where - Q: Hash + Equivalent, + where Q: Hash + Equivalent, { - self.map.get_key_value(value).map(|(x, &())| x) + self.map.get_full(value).map(|(_, x, &())| x) } /// Return item index and value pub fn get_full(&self, value: &Q) -> Option<(usize, &T)> - where - Q: Hash + Equivalent, + where Q: Hash + Equivalent, { self.map.get_full(value).map(|(i, x, &())| (i, x)) } - /// Return item index, if it exists in the set - pub fn get_index_of(&self, value: &Q) -> Option - where - Q: Hash + Equivalent, - { - self.map.get_index_of(value) - } - /// Adds a value to the set, replacing the existing value, if any, that is /// equal to the given one. Returns the replaced value. /// /// Computes in **O(1)** time (average). - pub fn replace(&mut self, value: T) -> Option { + pub fn replace(&mut self, value: T) -> Option + { use super::map::Entry::*; match self.map.entry(value) { - Vacant(e) => { - e.insert(()); - None - } + Vacant(e) => { e.insert(()); None }, Occupied(e) => Some(e.replace_key()), } } - /// Remove the value from the set, and return `true` if it was present. - /// - /// **NOTE:** This is equivalent to `.swap_remove(value)`, if you want - /// to preserve the order of the values in the set, use `.shift_remove(value)`. + /// FIXME Same as .swap_remove /// /// Computes in **O(1)** time (average). pub fn remove(&mut self, value: &Q) -> bool - where - Q: Hash + Equivalent, + where Q: Hash + Equivalent, { self.swap_remove(value) } @@ -408,39 +325,16 @@ where /// /// Computes in **O(1)** time (average). pub fn swap_remove(&mut self, value: &Q) -> bool - where - Q: Hash + Equivalent, + where Q: Hash + Equivalent, { self.map.swap_remove(value).is_some() } - /// Remove the value from the set, and return `true` if it was present. - /// - /// Like `Vec::remove`, the value is removed by shifting all of the - /// elements that follow it, preserving their relative order. - /// **This perturbs the index of all of those elements!** - /// - /// Return `false` if `value` was not in the set. - /// - /// Computes in **O(n)** time (average). - pub fn shift_remove(&mut self, value: &Q) -> bool - where - Q: Hash + Equivalent, - { - self.map.shift_remove(value).is_some() - } - - /// Removes and returns the value in the set, if any, that is equal to the - /// given one. - /// - /// **NOTE:** This is equivalent to `.swap_take(value)`, if you need to - /// preserve the order of the values in the set, use `.shift_take(value)` - /// instead. + /// FIXME Same as .swap_take /// /// Computes in **O(1)** time (average). pub fn take(&mut self, value: &Q) -> Option - where - Q: Hash + Equivalent, + where Q: Hash + Equivalent, { self.swap_take(value) } @@ -456,27 +350,9 @@ where /// /// Computes in **O(1)** time (average). pub fn swap_take(&mut self, value: &Q) -> Option - where - Q: Hash + Equivalent, + where Q: Hash + Equivalent, { - self.map.swap_remove_entry(value).map(|(x, ())| x) - } - - /// Removes and returns the value in the set, if any, that is equal to the - /// given one. - /// - /// Like `Vec::remove`, the value is removed by shifting all of the - /// elements that follow it, preserving their relative order. - /// **This perturbs the index of all of those elements!** - /// - /// Return `None` if `value` was not in the set. - /// - /// Computes in **O(n)** time (average). - pub fn shift_take(&mut self, value: &Q) -> Option - where - Q: Hash + Equivalent, - { - self.map.shift_remove_entry(value).map(|(x, ())| x) + self.map.swap_remove_full(value).map(|(_, x, ())| x) } /// Remove the value from the set return it and the index it had. @@ -487,26 +363,11 @@ where /// /// Return `None` if `value` was not in the set. pub fn swap_remove_full(&mut self, value: &Q) -> Option<(usize, T)> - where - Q: Hash + Equivalent, + where Q: Hash + Equivalent, { self.map.swap_remove_full(value).map(|(i, x, ())| (i, x)) } - /// Remove the value from the set return it and the index it had. - /// - /// Like `Vec::remove`, the value is removed by shifting all of the - /// elements that follow it, preserving their relative order. - /// **This perturbs the index of all of those elements!** - /// - /// Return `None` if `value` was not in the set. - pub fn shift_remove_full(&mut self, value: &Q) -> Option<(usize, T)> - where - Q: Hash + Equivalent, - { - self.map.shift_remove_full(value).map(|(i, x, ())| (i, x)) - } - /// Remove the last value /// /// Computes in **O(1)** time (average). @@ -522,8 +383,7 @@ where /// /// Computes in **O(n)** time (average). pub fn retain(&mut self, mut keep: F) - where - F: FnMut(&T) -> bool, + where F: FnMut(&T) -> bool, { self.map.retain(move |x, &mut ()| keep(x)) } @@ -532,8 +392,7 @@ where /// /// See `sort_by` for details. pub fn sort(&mut self) - where - T: Ord, + where T: Ord, { self.map.sort_keys() } @@ -542,8 +401,7 @@ where /// /// Computes in **O(n log n)** time and **O(n)** space. The sort is stable. pub fn sort_by(&mut self, mut compare: F) - where - F: FnMut(&T, &T) -> Ordering, + where F: FnMut(&T, &T) -> Ordering, { self.map.sort_by(move |a, _, b, _| compare(a, b)); } @@ -553,19 +411,19 @@ where /// /// The sort is stable. pub fn sorted_by(self, mut cmp: F) -> IntoIter - where - F: FnMut(&T, &T) -> Ordering, + where F: FnMut(&T, &T) -> Ordering { IntoIter { iter: self.map.sorted_by(move |a, &(), b, &()| cmp(a, b)).iter, } } - /// Reverses the order of the set’s values in place. - /// - /// Computes in **O(n)** time and **O(1)** space. - pub fn reverse(&mut self) { - self.map.reverse() + /// Clears the `IndexSet`, returning all values as a drain iterator. + /// Keeps the allocated memory for reuse. + pub fn drain(&mut self, range: RangeFull) -> Drain { + Drain { + iter: self.map.drain(range).iter, + } } } @@ -583,68 +441,12 @@ impl IndexSet { /// /// Valid indices are *0 <= index < self.len()* /// - /// Like `Vec::swap_remove`, the value is removed by swapping it with the - /// last element of the set and popping it off. **This perturbs - /// the postion of what used to be the last element!** - /// /// Computes in **O(1)** time (average). pub fn swap_remove_index(&mut self, index: usize) -> Option { self.map.swap_remove_index(index).map(|(x, ())| x) } - - /// Remove the key-value pair by index - /// - /// Valid indices are *0 <= index < self.len()* - /// - /// Like `Vec::remove`, the value is removed by shifting all of the - /// elements that follow it, preserving their relative order. - /// **This perturbs the index of all of those elements!** - /// - /// Computes in **O(n)** time (average). - pub fn shift_remove_index(&mut self, index: usize) -> Option { - self.map.shift_remove_index(index).map(|(x, ())| x) - } } -/// Access `IndexSet` values at indexed positions. -/// -/// # Examples -/// -/// ``` -/// use indexmap::IndexSet; -/// -/// let mut set = IndexSet::new(); -/// for word in "Lorem ipsum dolor sit amet".split_whitespace() { -/// set.insert(word.to_string()); -/// } -/// assert_eq!(set[0], "Lorem"); -/// assert_eq!(set[1], "ipsum"); -/// set.reverse(); -/// assert_eq!(set[0], "amet"); -/// assert_eq!(set[1], "sit"); -/// set.sort(); -/// assert_eq!(set[0], "Lorem"); -/// assert_eq!(set[1], "amet"); -/// ``` -/// -/// ```should_panic -/// use indexmap::IndexSet; -/// -/// let mut set = IndexSet::new(); -/// set.insert("foo"); -/// println!("{:?}", set[10]); // panics! -/// ``` -impl Index for IndexSet { - type Output = T; - - /// Returns a reference to the value at the supplied `index`. - /// - /// ***Panics*** if `index` is out of bounds. - fn index(&self, index: usize) -> &T { - self.get_index(index) - .expect("IndexSet: index out of bounds") - } -} /// An owning iterator over the items of a `IndexSet`. /// @@ -676,12 +478,13 @@ impl ExactSizeIterator for IntoIter { } impl fmt::Debug for IntoIter { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { let iter = self.iter.as_slice().iter().map(Bucket::key_ref); f.debug_list().entries(iter).finish() } } + /// An iterator over the items of a `IndexSet`. /// /// This `struct` is created by the [`iter`] method on [`IndexSet`]. @@ -689,7 +492,7 @@ impl fmt::Debug for IntoIter { /// /// [`IndexSet`]: struct.IndexSet.html /// [`iter`]: struct.IndexSet.html#method.iter -pub struct Iter<'a, T> { +pub struct Iter<'a, T: 'a> { iter: slice::Iter<'a, Bucket>, } @@ -699,28 +502,26 @@ impl<'a, T> Iterator for Iter<'a, T> { iterator_methods!(Bucket::key_ref); } -impl DoubleEndedIterator for Iter<'_, T> { +impl<'a, T> DoubleEndedIterator for Iter<'a, T> { fn next_back(&mut self) -> Option { self.iter.next_back().map(Bucket::key_ref) } } -impl ExactSizeIterator for Iter<'_, T> { +impl<'a, T> ExactSizeIterator for Iter<'a, T> { fn len(&self) -> usize { self.iter.len() } } -impl Clone for Iter<'_, T> { +impl<'a, T> Clone for Iter<'a, T> { fn clone(&self) -> Self { - Iter { - iter: self.iter.clone(), - } + Iter { iter: self.iter.clone() } } } -impl fmt::Debug for Iter<'_, T> { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { +impl<'a, T: fmt::Debug> fmt::Debug for Iter<'a, T> { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.debug_list().entries(self.clone()).finish() } } @@ -732,21 +533,24 @@ impl fmt::Debug for Iter<'_, T> { /// /// [`IndexSet`]: struct.IndexSet.html /// [`drain`]: struct.IndexSet.html#method.drain -pub struct Drain<'a, T> { +pub struct Drain<'a, T: 'a> { iter: vec::Drain<'a, Bucket>, } -impl Iterator for Drain<'_, T> { +impl<'a, T> Iterator for Drain<'a, T> { type Item = T; iterator_methods!(Bucket::key); } -impl DoubleEndedIterator for Drain<'_, T> { +impl<'a, T> DoubleEndedIterator for Drain<'a, T> { double_ended_iterator_methods!(Bucket::key); } -impl<'a, T, S> IntoIterator for &'a IndexSet { +impl<'a, T, S> IntoIterator for &'a IndexSet + where T: Hash + Eq, + S: BuildHasher, +{ type Item = &'a T; type IntoIter = Iter<'a, T>; @@ -755,7 +559,10 @@ impl<'a, T, S> IntoIterator for &'a IndexSet { } } -impl IntoIterator for IndexSet { +impl IntoIterator for IndexSet + where T: Hash + Eq, + S: BuildHasher, +{ type Item = T; type IntoIter = IntoIter; @@ -767,57 +574,49 @@ impl IntoIterator for IndexSet { } impl FromIterator for IndexSet -where - T: Hash + Eq, - S: BuildHasher + Default, + where T: Hash + Eq, + S: BuildHasher + Default, { - fn from_iter>(iterable: I) -> Self { + fn from_iter>(iterable: I) -> Self { let iter = iterable.into_iter().map(|x| (x, ())); - IndexSet { - map: IndexMap::from_iter(iter), - } + IndexSet { map: IndexMap::from_iter(iter) } } } impl Extend for IndexSet -where - T: Hash + Eq, - S: BuildHasher, + where T: Hash + Eq, + S: BuildHasher, { - fn extend>(&mut self, iterable: I) { + fn extend>(&mut self, iterable: I) { let iter = iterable.into_iter().map(|x| (x, ())); self.map.extend(iter); } } impl<'a, T, S> Extend<&'a T> for IndexSet -where - T: Hash + Eq + Copy + 'a, - S: BuildHasher, + where T: Hash + Eq + Copy, + S: BuildHasher, { - fn extend>(&mut self, iterable: I) { - let iter = iterable.into_iter().cloned(); // FIXME: use `copied` in Rust 1.36 + fn extend>(&mut self, iterable: I) { + let iter = iterable.into_iter().map(|&x| x); self.extend(iter); } } + impl Default for IndexSet -where - S: Default, + where S: BuildHasher + Default, { /// Return an empty `IndexSet` fn default() -> Self { - IndexSet { - map: IndexMap::default(), - } + IndexSet { map: IndexMap::default() } } } impl PartialEq> for IndexSet -where - T: Hash + Eq, - S1: BuildHasher, - S2: BuildHasher, + where T: Hash + Eq, + S1: BuildHasher, + S2: BuildHasher { fn eq(&self, other: &IndexSet) -> bool { self.len() == other.len() && self.is_subset(other) @@ -825,21 +624,18 @@ where } impl Eq for IndexSet -where - T: Eq + Hash, - S: BuildHasher, + where T: Eq + Hash, + S: BuildHasher { } impl IndexSet -where - T: Eq + Hash, - S: BuildHasher, + where T: Eq + Hash, + S: BuildHasher { /// Returns `true` if `self` has no elements in common with `other`. pub fn is_disjoint(&self, other: &IndexSet) -> bool - where - S2: BuildHasher, + where S2: BuildHasher { if self.len() <= other.len() { self.iter().all(move |value| !other.contains(value)) @@ -850,21 +646,20 @@ where /// Returns `true` if all elements of `self` are contained in `other`. pub fn is_subset(&self, other: &IndexSet) -> bool - where - S2: BuildHasher, + where S2: BuildHasher { self.len() <= other.len() && self.iter().all(move |value| other.contains(value)) } /// Returns `true` if all elements of `other` are contained in `self`. pub fn is_superset(&self, other: &IndexSet) -> bool - where - S2: BuildHasher, + where S2: BuildHasher { other.is_subset(self) } } + /// A lazy iterator producing elements in the difference of `IndexSet`s. /// /// This `struct` is created by the [`difference`] method on [`IndexSet`]. @@ -872,15 +667,14 @@ where /// /// [`IndexSet`]: struct.IndexSet.html /// [`difference`]: struct.IndexSet.html#method.difference -pub struct Difference<'a, T, S> { +pub struct Difference<'a, T: 'a, S: 'a> { iter: Iter<'a, T>, other: &'a IndexSet, } impl<'a, T, S> Iterator for Difference<'a, T, S> -where - T: Eq + Hash, - S: BuildHasher, + where T: Eq + Hash, + S: BuildHasher { type Item = &'a T; @@ -898,10 +692,9 @@ where } } -impl DoubleEndedIterator for Difference<'_, T, S> -where - T: Eq + Hash, - S: BuildHasher, +impl<'a, T, S> DoubleEndedIterator for Difference<'a, T, S> + where T: Eq + Hash, + S: BuildHasher { fn next_back(&mut self) -> Option { while let Some(item) = self.iter.next_back() { @@ -913,25 +706,22 @@ where } } -impl Clone for Difference<'_, T, S> { +impl<'a, T, S> Clone for Difference<'a, T, S> { fn clone(&self) -> Self { - Difference { - iter: self.iter.clone(), - ..*self - } + Difference { iter: self.iter.clone(), ..*self } } } -impl fmt::Debug for Difference<'_, T, S> -where - T: fmt::Debug + Eq + Hash, - S: BuildHasher, +impl<'a, T, S> fmt::Debug for Difference<'a, T, S> + where T: fmt::Debug + Eq + Hash, + S: BuildHasher { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.debug_list().entries(self.clone()).finish() } } + /// A lazy iterator producing elements in the intersection of `IndexSet`s. /// /// This `struct` is created by the [`intersection`] method on [`IndexSet`]. @@ -939,15 +729,14 @@ where /// /// [`IndexSet`]: struct.IndexSet.html /// [`intersection`]: struct.IndexSet.html#method.intersection -pub struct Intersection<'a, T, S> { +pub struct Intersection<'a, T: 'a, S: 'a> { iter: Iter<'a, T>, other: &'a IndexSet, } impl<'a, T, S> Iterator for Intersection<'a, T, S> -where - T: Eq + Hash, - S: BuildHasher, + where T: Eq + Hash, + S: BuildHasher { type Item = &'a T; @@ -965,10 +754,9 @@ where } } -impl DoubleEndedIterator for Intersection<'_, T, S> -where - T: Eq + Hash, - S: BuildHasher, +impl<'a, T, S> DoubleEndedIterator for Intersection<'a, T, S> + where T: Eq + Hash, + S: BuildHasher { fn next_back(&mut self) -> Option { while let Some(item) = self.iter.next_back() { @@ -980,25 +768,22 @@ where } } -impl Clone for Intersection<'_, T, S> { +impl<'a, T, S> Clone for Intersection<'a, T, S> { fn clone(&self) -> Self { - Intersection { - iter: self.iter.clone(), - ..*self - } + Intersection { iter: self.iter.clone(), ..*self } } } -impl fmt::Debug for Intersection<'_, T, S> -where - T: fmt::Debug + Eq + Hash, - S: BuildHasher, +impl<'a, T, S> fmt::Debug for Intersection<'a, T, S> + where T: fmt::Debug + Eq + Hash, + S: BuildHasher, { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.debug_list().entries(self.clone()).finish() } } + /// A lazy iterator producing elements in the symmetric difference of `IndexSet`s. /// /// This `struct` is created by the [`symmetric_difference`] method on @@ -1006,15 +791,14 @@ where /// /// [`IndexSet`]: struct.IndexSet.html /// [`symmetric_difference`]: struct.IndexSet.html#method.symmetric_difference -pub struct SymmetricDifference<'a, T, S1, S2> { +pub struct SymmetricDifference<'a, T: 'a, S1: 'a, S2: 'a> { iter: Chain, Difference<'a, T, S1>>, } impl<'a, T, S1, S2> Iterator for SymmetricDifference<'a, T, S1, S2> -where - T: Eq + Hash, - S1: BuildHasher, - S2: BuildHasher, + where T: Eq + Hash, + S1: BuildHasher, + S2: BuildHasher, { type Item = &'a T; @@ -1027,43 +811,39 @@ where } fn fold(self, init: B, f: F) -> B - where - F: FnMut(B, Self::Item) -> B, + where F: FnMut(B, Self::Item) -> B { self.iter.fold(init, f) } } -impl DoubleEndedIterator for SymmetricDifference<'_, T, S1, S2> -where - T: Eq + Hash, - S1: BuildHasher, - S2: BuildHasher, +impl<'a, T, S1, S2> DoubleEndedIterator for SymmetricDifference<'a, T, S1, S2> + where T: Eq + Hash, + S1: BuildHasher, + S2: BuildHasher, { fn next_back(&mut self) -> Option { self.iter.next_back() } } -impl Clone for SymmetricDifference<'_, T, S1, S2> { +impl<'a, T, S1, S2> Clone for SymmetricDifference<'a, T, S1, S2> { fn clone(&self) -> Self { - SymmetricDifference { - iter: self.iter.clone(), - } + SymmetricDifference { iter: self.iter.clone() } } } -impl fmt::Debug for SymmetricDifference<'_, T, S1, S2> -where - T: fmt::Debug + Eq + Hash, - S1: BuildHasher, - S2: BuildHasher, +impl<'a, T, S1, S2> fmt::Debug for SymmetricDifference<'a, T, S1, S2> + where T: fmt::Debug + Eq + Hash, + S1: BuildHasher, + S2: BuildHasher, { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.debug_list().entries(self.clone()).finish() } } + /// A lazy iterator producing elements in the union of `IndexSet`s. /// /// This `struct` is created by the [`union`] method on [`IndexSet`]. @@ -1071,14 +851,13 @@ where /// /// [`IndexSet`]: struct.IndexSet.html /// [`union`]: struct.IndexSet.html#method.union -pub struct Union<'a, T, S> { +pub struct Union<'a, T: 'a, S: 'a> { iter: Chain, Difference<'a, T, S>>, } impl<'a, T, S> Iterator for Union<'a, T, S> -where - T: Eq + Hash, - S: BuildHasher, + where T: Eq + Hash, + S: BuildHasher, { type Item = &'a T; @@ -1091,62 +870,56 @@ where } fn fold(self, init: B, f: F) -> B - where - F: FnMut(B, Self::Item) -> B, + where F: FnMut(B, Self::Item) -> B { self.iter.fold(init, f) } } -impl DoubleEndedIterator for Union<'_, T, S> -where - T: Eq + Hash, - S: BuildHasher, +impl<'a, T, S> DoubleEndedIterator for Union<'a, T, S> + where T: Eq + Hash, + S: BuildHasher, { fn next_back(&mut self) -> Option { self.iter.next_back() } } -impl Clone for Union<'_, T, S> { +impl<'a, T, S> Clone for Union<'a, T, S> { fn clone(&self) -> Self { - Union { - iter: self.iter.clone(), - } + Union { iter: self.iter.clone() } } } -impl fmt::Debug for Union<'_, T, S> -where - T: fmt::Debug + Eq + Hash, - S: BuildHasher, +impl<'a, T, S> fmt::Debug for Union<'a, T, S> + where T: fmt::Debug + Eq + Hash, + S: BuildHasher, { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.debug_list().entries(self.clone()).finish() } } -impl BitAnd<&IndexSet> for &IndexSet -where - T: Eq + Hash + Clone, - S1: BuildHasher + Default, - S2: BuildHasher, + +impl<'a, 'b, T, S1, S2> BitAnd<&'b IndexSet> for &'a IndexSet + where T: Eq + Hash + Clone, + S1: BuildHasher + Default, + S2: BuildHasher, { type Output = IndexSet; /// Returns the set intersection, cloned into a new set. /// /// Values are collected in the same order that they appear in `self`. - fn bitand(self, other: &IndexSet) -> Self::Output { + fn bitand(self, other: &'b IndexSet) -> Self::Output { self.intersection(other).cloned().collect() } } -impl BitOr<&IndexSet> for &IndexSet -where - T: Eq + Hash + Clone, - S1: BuildHasher + Default, - S2: BuildHasher, +impl<'a, 'b, T, S1, S2> BitOr<&'b IndexSet> for &'a IndexSet + where T: Eq + Hash + Clone, + S1: BuildHasher + Default, + S2: BuildHasher, { type Output = IndexSet; @@ -1154,16 +927,15 @@ where /// /// Values from `self` are collected in their original order, followed by /// values that are unique to `other` in their original order. - fn bitor(self, other: &IndexSet) -> Self::Output { + fn bitor(self, other: &'b IndexSet) -> Self::Output { self.union(other).cloned().collect() } } -impl BitXor<&IndexSet> for &IndexSet -where - T: Eq + Hash + Clone, - S1: BuildHasher + Default, - S2: BuildHasher, +impl<'a, 'b, T, S1, S2> BitXor<&'b IndexSet> for &'a IndexSet + where T: Eq + Hash + Clone, + S1: BuildHasher + Default, + S2: BuildHasher, { type Output = IndexSet; @@ -1171,32 +943,31 @@ where /// /// Values from `self` are collected in their original order, followed by /// values from `other` in their original order. - fn bitxor(self, other: &IndexSet) -> Self::Output { + fn bitxor(self, other: &'b IndexSet) -> Self::Output { self.symmetric_difference(other).cloned().collect() } } -impl Sub<&IndexSet> for &IndexSet -where - T: Eq + Hash + Clone, - S1: BuildHasher + Default, - S2: BuildHasher, +impl<'a, 'b, T, S1, S2> Sub<&'b IndexSet> for &'a IndexSet + where T: Eq + Hash + Clone, + S1: BuildHasher + Default, + S2: BuildHasher, { type Output = IndexSet; /// Returns the set difference, cloned into a new set. /// /// Values are collected in the same order that they appear in `self`. - fn sub(self, other: &IndexSet) -> Self::Output { + fn sub(self, other: &'b IndexSet) -> Self::Output { self.difference(other).cloned().collect() } } + #[cfg(test)] mod tests { use super::*; - use crate::util::enumerate; - use std::string::String; + use util::enumerate; #[test] fn it_works() { @@ -1272,7 +1043,7 @@ mod tests { let old_set = set.clone(); set.insert(i); for value in old_set.iter() { - if set.get(value).is_none() { + if !set.get(value).is_some() { println!("old_set: {:?}", old_set); println!("set: {:?}", set); panic!("did not find {} in set", value); @@ -1330,6 +1101,7 @@ mod tests { let not_present = [1, 3, 6, 9, 10]; let mut set = IndexSet::with_capacity(insert.len()); + for (i, &elt) in enumerate(&insert) { assert_eq!(set.len(), i); set.insert(elt); @@ -1353,43 +1125,6 @@ mod tests { } } - #[test] - fn reserve() { - let mut set = IndexSet::::new(); - assert_eq!(set.capacity(), 0); - set.reserve(100); - let capacity = set.capacity(); - assert!(capacity >= 100); - for i in 0..capacity { - assert_eq!(set.len(), i); - set.insert(i); - assert_eq!(set.len(), i + 1); - assert_eq!(set.capacity(), capacity); - assert_eq!(set.get(&i), Some(&i)); - } - set.insert(capacity); - assert_eq!(set.len(), capacity + 1); - assert!(set.capacity() > capacity); - assert_eq!(set.get(&capacity), Some(&capacity)); - } - - #[test] - fn shrink_to_fit() { - let mut set = IndexSet::::new(); - assert_eq!(set.capacity(), 0); - for i in 0..100 { - assert_eq!(set.len(), i); - set.insert(i); - assert_eq!(set.len(), i + 1); - assert!(set.capacity() >= i + 1); - assert_eq!(set.get(&i), Some(&i)); - set.shrink_to_fit(); - assert_eq!(set.len(), i + 1); - assert_eq!(set.capacity(), i + 1); - assert_eq!(set.get(&i), Some(&i)); - } - } - #[test] fn remove() { let insert = [0, 4, 2, 12, 8, 7, 11, 5, 3, 17, 19, 22, 23]; @@ -1413,7 +1148,7 @@ mod tests { } println!("{:?}", set); for &value in &remove { - //println!("{:?}", set); + //println!("{:?}", set); let index = set.get_full(&value).unwrap().0; assert_eq!(set.swap_remove_full(&value), Some((index, value))); } @@ -1458,7 +1193,7 @@ mod tests { set_a.insert(2); let mut set_b = set_a.clone(); assert_eq!(set_a, set_b); - set_b.swap_remove(&1); + set_b.remove(&1); assert_ne!(set_a, set_b); let set_c: IndexSet<_> = set_b.into_iter().collect(); @@ -1512,9 +1247,8 @@ mod tests { use std::iter::empty; fn check<'a, I1, I2>(iter1: I1, iter2: I2) - where - I1: Iterator, - I2: Iterator, + where I1: Iterator, + I2: Iterator, { assert!(iter1.cloned().eq(iter2)); } @@ -1549,10 +1283,7 @@ mod tests { check(set_c.difference(&set_d), 0..3); check(set_d.difference(&set_c), (6..9).rev()); - check( - set_c.symmetric_difference(&set_d), - (0..3).chain((6..9).rev()), - ); + check(set_c.symmetric_difference(&set_d), (0..3).chain((6..9).rev())); check(set_d.symmetric_difference(&set_c), (6..9).rev().chain(0..3)); check(set_c.intersection(&set_d), 3..6); check(set_d.intersection(&set_c), (3..6).rev()); @@ -1568,14 +1299,10 @@ mod tests { let set_c: IndexSet<_> = (0..6).collect(); let set_d: IndexSet<_> = (3..9).rev().collect(); - // FIXME: #[allow(clippy::eq_op)] in Rust 1.31 - #[cfg_attr(feature = "cargo-clippy", allow(renamed_and_removed_lints, eq_op))] - { - assert_eq!(&set_a & &set_a, set_a); - assert_eq!(&set_a | &set_a, set_a); - assert_eq!(&set_a ^ &set_a, empty); - assert_eq!(&set_a - &set_a, empty); - } + assert_eq!(&set_a & &set_a, set_a); + assert_eq!(&set_a | &set_a, set_a); + assert_eq!(&set_a ^ &set_a, empty); + assert_eq!(&set_a - &set_a, empty); assert_eq!(&set_a & &set_b, empty); assert_eq!(&set_b & &set_a, empty); diff --git a/third_party/rust/indexmap/src/util.rs b/third_party/rust/indexmap/src/util.rs index 5388f470cf08..90d3e7e33491 100644 --- a/third_party/rust/indexmap/src/util.rs +++ b/third_party/rust/indexmap/src/util.rs @@ -1,39 +1,17 @@ -use core::iter::Enumerate; -use core::ops::{Bound, Range, RangeBounds}; -pub(crate) fn third(t: (A, B, C)) -> C { - t.2 -} +use std::iter::Enumerate; +use std::mem::size_of; -pub(crate) fn enumerate(iterable: I) -> Enumerate -where - I: IntoIterator, +pub fn third(t: (A, B, C)) -> C { t.2 } + +pub fn enumerate(iterable: I) -> Enumerate + where I: IntoIterator { iterable.into_iter().enumerate() } -pub(crate) fn simplify_range(range: R, len: usize) -> Range -where - R: RangeBounds, -{ - let start = match range.start_bound() { - Bound::Unbounded => 0, - Bound::Included(&i) if i <= len => i, - Bound::Excluded(&i) if i < len => i + 1, - bound => panic!("range start {:?} should be <= length {}", bound, len), - }; - let end = match range.end_bound() { - Bound::Unbounded => len, - Bound::Excluded(&i) if i <= len => i, - Bound::Included(&i) if i < len => i + 1, - bound => panic!("range end {:?} should be <= length {}", bound, len), - }; - if start > end { - panic!( - "range start {:?} should be <= range end {:?}", - range.start_bound(), - range.end_bound() - ); - } - start..end +/// return the number of steps from a to b +pub fn ptrdistance(a: *const T, b: *const T) -> usize { + debug_assert!(a as usize <= b as usize); + (b as usize - a as usize) / size_of::() } diff --git a/third_party/rust/indexmap/tests/equivalent_trait.rs b/third_party/rust/indexmap/tests/equivalent_trait.rs index ff5943a3edb8..8b79e20a19b2 100644 --- a/third_party/rust/indexmap/tests/equivalent_trait.rs +++ b/third_party/rust/indexmap/tests/equivalent_trait.rs @@ -1,4 +1,6 @@ -use indexmap::indexmap; + +#[macro_use] extern crate indexmap; + use indexmap::Equivalent; use std::hash::Hash; @@ -7,20 +9,20 @@ use std::hash::Hash; pub struct Pair(pub A, pub B); impl PartialEq<(A, B)> for Pair -where - C: PartialEq, - D: PartialEq, + where C: PartialEq, + D: PartialEq, { fn eq(&self, rhs: &(A, B)) -> bool { - self.0 == rhs.0 && self.1 == rhs.1 + self.0 == rhs.0 && + self.1 == rhs.1 && + true } } impl Equivalent for Pair -where - Pair: PartialEq, - A: Hash + Eq, - B: Hash + Eq, + where Pair: PartialEq, + A: Hash + Eq, + B: Hash + Eq, { fn equivalent(&self, other: &X) -> bool { *self == *other @@ -49,5 +51,5 @@ fn test_string_str() { assert!(map.contains_key("a")); assert!(!map.contains_key("z")); - assert_eq!(map.swap_remove("b"), Some(2)); + assert_eq!(map.remove("b"), Some(2)); } diff --git a/third_party/rust/indexmap/tests/macros_full_path.rs b/third_party/rust/indexmap/tests/macros_full_path.rs deleted file mode 100644 index 2467d9b4f5d4..000000000000 --- a/third_party/rust/indexmap/tests/macros_full_path.rs +++ /dev/null @@ -1,19 +0,0 @@ -#[test] -fn test_create_map() { - let _m = indexmap::indexmap! { - 1 => 2, - 7 => 1, - 2 => 2, - 3 => 3, - }; -} - -#[test] -fn test_create_set() { - let _s = indexmap::indexset! { - 1, - 7, - 2, - 3, - }; -} diff --git a/third_party/rust/indexmap/tests/quick.rs b/third_party/rust/indexmap/tests/quick.rs index 10e11b5285df..0c6e8aa4e4ac 100644 --- a/third_party/rust/indexmap/tests/quick.rs +++ b/third_party/rust/indexmap/tests/quick.rs @@ -1,42 +1,48 @@ -use indexmap::{IndexMap, IndexSet}; + +extern crate indexmap; +extern crate itertools; +#[macro_use] +extern crate quickcheck; +extern crate rand; + +extern crate fnv; + +use indexmap::IndexMap; use itertools::Itertools; -use quickcheck::quickcheck; use quickcheck::Arbitrary; use quickcheck::Gen; -use quickcheck::TestResult; use rand::Rng; use fnv::FnvHasher; use std::hash::{BuildHasher, BuildHasherDefault}; type FnvBuilder = BuildHasherDefault; -type IndexMapFnv = IndexMap; +type OrderMapFnv = IndexMap; -use std::cmp::min; -use std::collections::HashMap; use std::collections::HashSet; -use std::fmt::Debug; -use std::hash::Hash; +use std::collections::HashMap; use std::iter::FromIterator; -use std::ops::Bound; +use std::hash::Hash; +use std::fmt::Debug; use std::ops::Deref; +use std::cmp::min; + use indexmap::map::Entry as OEntry; use std::collections::hash_map::Entry as HEntry; + fn set<'a, T: 'a, I>(iter: I) -> HashSet -where - I: IntoIterator, - T: Copy + Hash + Eq, + where I: IntoIterator, + T: Copy + Hash + Eq { iter.into_iter().cloned().collect() } fn indexmap<'a, T: 'a, I>(iter: I) -> IndexMap -where - I: IntoIterator, - T: Copy + Hash + Eq, + where I: IntoIterator, + T: Copy + Hash + Eq, { IndexMap::from_iter(iter.into_iter().cloned().map(|k| (k, ()))) } @@ -102,7 +108,7 @@ quickcheck! { map.capacity() >= cap } - fn drain_full(insert: Vec) -> bool { + fn drain(insert: Vec) -> bool { let mut map = IndexMap::new(); for &key in &insert { map.insert(key, ()); @@ -110,82 +116,13 @@ quickcheck! { let mut clone = map.clone(); let drained = clone.drain(..); for (key, _) in drained { - map.swap_remove(&key); + map.remove(&key); } map.is_empty() } - - fn drain_bounds(insert: Vec, range: (Bound, Bound)) -> TestResult { - let mut map = IndexMap::new(); - for &key in &insert { - map.insert(key, ()); - } - - // First see if `Vec::drain` is happy with this range. - let result = std::panic::catch_unwind(|| { - let mut keys: Vec = map.keys().cloned().collect(); - keys.drain(range); - keys - }); - - if let Ok(keys) = result { - map.drain(range); - // Check that our `drain` matches the same key order. - assert!(map.keys().eq(&keys)); - // Check that hash lookups all work too. - assert!(keys.iter().all(|key| map.contains_key(key))); - TestResult::passed() - } else { - // If `Vec::drain` panicked, so should we. - TestResult::must_fail(move || { map.drain(range); }) - } - } - - fn shift_remove(insert: Vec, remove: Vec) -> bool { - let mut map = IndexMap::new(); - for &key in &insert { - map.insert(key, ()); - } - for &key in &remove { - map.shift_remove(&key); - } - let elements = &set(&insert) - &set(&remove); - - // Check that order is preserved after removals - let mut iter = map.keys(); - for &key in insert.iter().unique() { - if elements.contains(&key) { - assert_eq!(Some(key), iter.next().cloned()); - } - } - - map.len() == elements.len() && map.iter().count() == elements.len() && - elements.iter().all(|k| map.get(k).is_some()) - } - - fn indexing(insert: Vec) -> bool { - let mut map: IndexMap<_, _> = insert.into_iter().map(|x| (x, x)).collect(); - let set: IndexSet<_> = map.keys().cloned().collect(); - assert_eq!(map.len(), set.len()); - - for (i, &key) in set.iter().enumerate() { - assert_eq!(map.get_index(i), Some((&key, &key))); - assert_eq!(set.get_index(i), Some(&key)); - assert_eq!(map[i], key); - assert_eq!(set[i], key); - - *map.get_index_mut(i).unwrap().1 >>= 1; - map[i] <<= 1; - } - - set.iter().enumerate().all(|(i, &key)| { - let value = key & !1; - map[&key] == value && map[i] == value - }) - } } -use crate::Op::*; +use Op::*; #[derive(Copy, Clone, Debug)] enum Op { Add(K, V), @@ -195,9 +132,8 @@ enum Op { } impl Arbitrary for Op -where - K: Arbitrary, - V: Arbitrary, + where K: Arbitrary, + V: Arbitrary, { fn arbitrary(g: &mut G) -> Self { match g.gen::() % 4 { @@ -210,10 +146,9 @@ where } fn do_ops(ops: &[Op], a: &mut IndexMap, b: &mut HashMap) -where - K: Hash + Eq + Clone, - V: Clone, - S: BuildHasher, + where K: Hash + Eq + Clone, + V: Clone, + S: BuildHasher, { for op in ops { match *op { @@ -222,19 +157,21 @@ where b.insert(k.clone(), v.clone()); } AddEntry(ref k, ref v) => { - a.entry(k.clone()).or_insert_with(|| v.clone()); - b.entry(k.clone()).or_insert_with(|| v.clone()); + a.entry(k.clone()).or_insert(v.clone()); + b.entry(k.clone()).or_insert(v.clone()); } Remove(ref k) => { a.swap_remove(k); b.remove(k); } RemoveEntry(ref k) => { - if let OEntry::Occupied(ent) = a.entry(k.clone()) { - ent.swap_remove_entry(); + match a.entry(k.clone()) { + OEntry::Occupied(ent) => { ent.remove_entry(); }, + _ => { } } - if let HEntry::Occupied(ent) = b.entry(k.clone()) { - ent.remove_entry(); + match b.entry(k.clone()) { + HEntry::Occupied(ent) => { ent.remove_entry(); }, + _ => { } } } } @@ -243,9 +180,8 @@ where } fn assert_maps_equivalent(a: &IndexMap, b: &HashMap) -> bool -where - K: Hash + Eq + Debug, - V: Eq + Debug, + where K: Hash + Eq + Debug, + V: Eq + Debug, { assert_eq!(a.len(), b.len()); assert_eq!(a.iter().next().is_some(), b.iter().next().is_some()); @@ -316,7 +252,7 @@ quickcheck! { ops2.remove(i); } } - let mut map2 = IndexMapFnv::default(); + let mut map2 = OrderMapFnv::default(); let mut reference2 = HashMap::new(); do_ops(&ops2, &mut map2, &mut reference2); assert_eq!(map == map2, reference == reference2); @@ -371,67 +307,13 @@ quickcheck! { map.sort_by(|_, v1, _, v2| Ord::cmp(v1, v2)); assert_sorted_by_key(map, |t| t.1); } - - fn reverse(keyvals: Large>) -> () { - let mut map: IndexMap<_, _> = IndexMap::from_iter(keyvals.to_vec()); - - fn generate_answer(input: &Vec<(i8, i8)>) -> Vec<(i8, i8)> { - // to mimic what `IndexMap::from_iter` does: - // need to get (A) the unique keys in forward order, and (B) the - // last value of each of those keys. - - // create (A): an iterable that yields the unique keys in ltr order - let mut seen_keys = HashSet::new(); - let unique_keys_forward = input.iter().filter_map(move |(k, _)| { - if seen_keys.contains(k) { None } - else { seen_keys.insert(*k); Some(*k) } - }); - - // create (B): a mapping of keys to the last value seen for that key - // this is the same as reversing the input and taking the first - // value seen for that key! - let mut last_val_per_key = HashMap::new(); - for &(k, v) in input.iter().rev() { - if !last_val_per_key.contains_key(&k) { - last_val_per_key.insert(k, v); - } - } - - // iterate over the keys in (A) in order, and match each one with - // the corresponding last value from (B) - let mut ans: Vec<_> = unique_keys_forward - .map(|k| (k, *last_val_per_key.get(&k).unwrap())) - .collect(); - - // finally, since this test is testing `.reverse()`, reverse the - // answer in-place - ans.reverse(); - - ans - } - - let answer = generate_answer(&keyvals.0); - - // perform the work - map.reverse(); - - // check it contains all the values it should - for &(key, val) in &answer { - assert_eq!(map[&key], val); - } - - // check the order - let mapv = Vec::from_iter(map); - assert_eq!(answer, mapv); - } } fn assert_sorted_by_key(iterable: I, key: Key) -where - I: IntoIterator, - I::Item: Ord + Clone + Debug, - Key: Fn(&I::Item) -> X, - X: Ord, + where I: IntoIterator, + I::Item: Ord + Clone + Debug, + Key: Fn(&I::Item) -> X, + X: Ord, { let input = Vec::from_iter(iterable); let mut sorted = input.clone(); @@ -444,25 +326,21 @@ struct Alpha(String); impl Deref for Alpha { type Target = String; - fn deref(&self) -> &String { - &self.0 - } + fn deref(&self) -> &String { &self.0 } } -const ALPHABET: &[u8] = b"abcdefghijklmnopqrstuvwxyz"; +const ALPHABET: &'static [u8] = b"abcdefghijklmnopqrstuvwxyz"; impl Arbitrary for Alpha { fn arbitrary(g: &mut G) -> Self { let len = g.next_u32() % g.size() as u32; let len = min(len, 16); - Alpha( - (0..len) - .map(|_| ALPHABET[g.next_u32() as usize % ALPHABET.len()] as char) - .collect(), - ) + Alpha((0..len).map(|_| { + ALPHABET[g.next_u32() as usize % ALPHABET.len()] as char + }).collect()) } - fn shrink(&self) -> Box> { + fn shrink(&self) -> Box> { Box::new((**self).shrink().map(Alpha)) } } @@ -473,21 +351,18 @@ struct Large(T); impl Deref for Large { type Target = T; - fn deref(&self) -> &T { - &self.0 - } + fn deref(&self) -> &T { &self.0 } } impl Arbitrary for Large> -where - T: Arbitrary, + where T: Arbitrary { fn arbitrary(g: &mut G) -> Self { let len = g.next_u32() % (g.size() * 10) as u32; Large((0..len).map(|_| T::arbitrary(g)).collect()) } - fn shrink(&self) -> Box> { + fn shrink(&self) -> Box> { Box::new((**self).shrink().map(Large)) } } diff --git a/third_party/rust/indexmap/tests/serde.rs b/third_party/rust/indexmap/tests/serde.rs new file mode 100644 index 000000000000..dbb23573e4c2 --- /dev/null +++ b/third_party/rust/indexmap/tests/serde.rs @@ -0,0 +1,59 @@ +#![cfg(feature = "serde-1")] + +#[macro_use] +extern crate indexmap; +extern crate serde_test; +extern crate fnv; + +use serde_test::{Token, assert_tokens}; + +#[test] +fn test_serde() { + let map = indexmap! { 1 => 2, 3 => 4 }; + assert_tokens(&map, + &[Token::Map { len: Some(2) }, + Token::I32(1), + Token::I32(2), + Token::I32(3), + Token::I32(4), + Token::MapEnd]); +} + +#[test] +fn test_serde_set() { + let set = indexset! { 1, 2, 3, 4 }; + assert_tokens(&set, + &[Token::Seq { len: Some(4) }, + Token::I32(1), + Token::I32(2), + Token::I32(3), + Token::I32(4), + Token::SeqEnd]); +} + +#[test] +fn test_serde_fnv_hasher() { + let mut map: ::indexmap::IndexMap = Default::default(); + map.insert(1, 2); + map.insert(3, 4); + assert_tokens(&map, + &[Token::Map { len: Some(2) }, + Token::I32(1), + Token::I32(2), + Token::I32(3), + Token::I32(4), + Token::MapEnd]); +} + +#[test] +fn test_serde_map_fnv_hasher() { + let mut set: ::indexmap::IndexSet = Default::default(); + set.extend(1..5); + assert_tokens(&set, + &[Token::Seq { len: Some(4) }, + Token::I32(1), + Token::I32(2), + Token::I32(3), + Token::I32(4), + Token::SeqEnd]); +} diff --git a/third_party/rust/indexmap/tests/tests.rs b/third_party/rust/indexmap/tests/tests.rs index 7d522f1c9708..4a7f4db7a493 100644 --- a/third_party/rust/indexmap/tests/tests.rs +++ b/third_party/rust/indexmap/tests/tests.rs @@ -1,4 +1,8 @@ -use indexmap::{indexmap, indexset}; + +#[macro_use] +extern crate indexmap; +extern crate itertools; + #[test] fn test_sort() { @@ -9,12 +13,11 @@ fn test_sort() { 3 => 3, }; - itertools::assert_equal( - m.sorted_by(|_k1, v1, _k2, v2| v1.cmp(v2)), - vec![(7, 1), (1, 2), (2, 2), (3, 3)], - ); + itertools::assert_equal(m.sorted_by(|_k1, v1, _k2, v2| v1.cmp(v2)), + vec![(7, 1), (1, 2), (2, 2), (3, 3)]); } + #[test] fn test_sort_set() { let s = indexset! { @@ -24,5 +27,6 @@ fn test_sort_set() { 3, }; - itertools::assert_equal(s.sorted_by(|v1, v2| v1.cmp(v2)), vec![1, 2, 3, 7]); + itertools::assert_equal(s.sorted_by(|v1, v2| v1.cmp(v2)), + vec![1, 2, 3, 7]); }