зеркало из https://github.com/mozilla/gecko-dev.git
Bug 1633289 - Upgrade rust-url to 2.4.1 r=necko-reviewers,webdriver-reviewers,glandium,places-reviewers,jesup,whimboo,lina,supply-chain-reviewers
- ran `cargo vet` - ran `cargo vet prune` - updated rust-url to 2.4.1 - ran `./mach vendor rust` Differential Revision: https://phabricator.services.mozilla.com/D186796
This commit is contained in:
Родитель
a05fb32f2b
Коммит
acb150212e
|
@ -1868,9 +1868,9 @@ checksum = "aa9a19cbb55df58761df49b23516a86d432839add4af60fc256da840f66ed35b"
|
|||
|
||||
[[package]]
|
||||
name = "form_urlencoded"
|
||||
version = "1.1.0"
|
||||
version = "1.2.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a9c384f161156f5260c24a097c56119f9be8c798586aecc13afbcbe7b7e26bf8"
|
||||
checksum = "a62bc1cf6f830c2ec14a513a9fb124d0a213a629668a4186f329db21fe045652"
|
||||
dependencies = [
|
||||
"percent-encoding",
|
||||
]
|
||||
|
@ -2748,11 +2748,10 @@ checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39"
|
|||
|
||||
[[package]]
|
||||
name = "idna"
|
||||
version = "0.2.3"
|
||||
version = "0.4.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "418a0a6fab821475f634efe3ccc45c013f742efe03d853e8d3355d5cb850ecf8"
|
||||
checksum = "7d20d6b07bfbc108882d88ed8e37d39636dcc260e15e30c45e6ba089610b917c"
|
||||
dependencies = [
|
||||
"matches",
|
||||
"unicode-bidi",
|
||||
"unicode-normalization",
|
||||
]
|
||||
|
@ -4214,9 +4213,9 @@ checksum = "19b17cddbe7ec3f8bc800887bab5e717348c95ea2ca0b1bf0837fb964dc67099"
|
|||
|
||||
[[package]]
|
||||
name = "percent-encoding"
|
||||
version = "2.2.0"
|
||||
version = "2.3.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "478c572c3d73181ff3c2539045f6eb99e5491218eae919370993b890cdbdd98e"
|
||||
checksum = "9b2a4787296e9989611394c33f193f676704af1686e70b8f8033ab5ba9a35a94"
|
||||
|
||||
[[package]]
|
||||
name = "phf"
|
||||
|
@ -6009,12 +6008,12 @@ checksum = "2ace0b4755d0a2959962769239d56267f8a024fef2d9b32666b3dcd0946b0906"
|
|||
|
||||
[[package]]
|
||||
name = "url"
|
||||
version = "2.1.0"
|
||||
version = "2.4.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "75b414f6c464c879d7f9babf951f23bc3743fb7313c081b2e6ca719067ea9d61"
|
||||
checksum = "143b538f18257fac9cad154828a57c6bf5157e1aa604d4816b5995bf6de87ae5"
|
||||
dependencies = [
|
||||
"form_urlencoded",
|
||||
"idna",
|
||||
"matches",
|
||||
"percent-encoding",
|
||||
"serde",
|
||||
]
|
||||
|
|
|
@ -5,7 +5,7 @@ authors = ["Nika Layzell <nika@thelayzells.com>"]
|
|||
license = "MPL-2.0"
|
||||
|
||||
[dependencies]
|
||||
url = "2.0"
|
||||
url = "2.4"
|
||||
nserror = { path = "../../../xpcom/rust/nserror" }
|
||||
nsstring = { path = "../../../xpcom/rust/nsstring" }
|
||||
xpcom = { path = "../../../xpcom/rust/xpcom" }
|
||||
|
|
|
@ -46,7 +46,7 @@ smallvec = "1.0"
|
|||
string_cache = { version = "0.8", optional = true }
|
||||
thin-vec = { version = "0.2.1", features = ["gecko-ffi"] }
|
||||
time = { version = "0.1.17", optional = true }
|
||||
url = { version = "2.0", optional = true }
|
||||
url = { version = "2.4", optional = true }
|
||||
uuid = { version = "0.8", features = ["v4"], optional = true }
|
||||
void = "1.0.2"
|
||||
webrender_api = { git = "https://github.com/servo/webrender", optional = true }
|
||||
|
|
|
@ -1543,6 +1543,11 @@ who = "Teodor Tanasoaia <ttanasoaia@mozilla.com>"
|
|||
criteria = "safe-to-deploy"
|
||||
delta = "0.1.1 -> 0.3.1"
|
||||
|
||||
[[audits.form_urlencoded]]
|
||||
who = "Valentin Gosu <valentin.gosu@gmail.com>"
|
||||
criteria = "safe-to-deploy"
|
||||
version = "1.2.0"
|
||||
|
||||
[[audits.fs-err]]
|
||||
who = "Mike Hommey <mh+mozilla@glandium.org>"
|
||||
criteria = "safe-to-deploy"
|
||||
|
@ -2581,6 +2586,11 @@ criteria = "safe-to-deploy"
|
|||
delta = "1.0.0 -> 0.1.2"
|
||||
notes = "Small refactor of some simple iterator logic, no unsafe code or capabilities."
|
||||
|
||||
[[audits.percent-encoding]]
|
||||
who = "Valentin Gosu <valentin.gosu@gmail.com>"
|
||||
criteria = "safe-to-deploy"
|
||||
delta = "2.2.0 -> 2.3.0"
|
||||
|
||||
[[audits.phf]]
|
||||
who = "Mike Hommey <mh+mozilla@glandium.org>"
|
||||
criteria = "safe-to-deploy"
|
||||
|
@ -3866,6 +3876,16 @@ criteria = "safe-to-deploy"
|
|||
version = "0.23.0"
|
||||
notes = "Maintained by the Glean and Application Services team."
|
||||
|
||||
[[audits.url]]
|
||||
who = "Valentin Gosu <valentin.gosu@gmail.com>"
|
||||
criteria = "safe-to-deploy"
|
||||
version = "2.4.0"
|
||||
|
||||
[[audits.url]]
|
||||
who = "Valentin Gosu <valentin.gosu@gmail.com>"
|
||||
criteria = "safe-to-deploy"
|
||||
delta = "2.4.0 -> 2.4.1"
|
||||
|
||||
[[audits.uuid]]
|
||||
who = "Gabriele Svelto <gsvelto@mozilla.com>"
|
||||
criteria = "safe-to-deploy"
|
||||
|
|
|
@ -811,10 +811,6 @@ criteria = "safe-to-run"
|
|||
version = "1.0.0"
|
||||
criteria = "safe-to-run"
|
||||
|
||||
[[exemptions.url]]
|
||||
version = "2.1.0"
|
||||
criteria = "safe-to-deploy"
|
||||
|
||||
[[exemptions.uuid]]
|
||||
version = "0.8.2"
|
||||
criteria = "safe-to-deploy"
|
||||
|
|
|
@ -922,16 +922,6 @@ who = "Pat Hickey <phickey@fastly.com>"
|
|||
criteria = "safe-to-deploy"
|
||||
version = "0.1.1"
|
||||
|
||||
[[audits.bytecode-alliance.audits.form_urlencoded]]
|
||||
who = "Alex Crichton <alex@alexcrichton.com>"
|
||||
criteria = "safe-to-deploy"
|
||||
version = "1.1.0"
|
||||
notes = """
|
||||
This is a small crate for working with url-encoded forms which doesn't have any
|
||||
more than what it says on the tin. Contains one `unsafe` block related to
|
||||
performance around utf-8 validation which is fairly easy to verify as correct.
|
||||
"""
|
||||
|
||||
[[audits.bytecode-alliance.audits.futures-channel]]
|
||||
who = "Pat Hickey <phickey@fastly.com>"
|
||||
criteria = "safe-to-deploy"
|
||||
|
@ -1142,6 +1132,12 @@ criteria = "safe-to-deploy"
|
|||
version = "1.0.1"
|
||||
notes = "No unsafe usage or ambient capabilities"
|
||||
|
||||
[[audits.embark-studios.audits.idna]]
|
||||
who = "Johan Andersson <opensource@embark-studios.com>"
|
||||
criteria = "safe-to-deploy"
|
||||
delta = "0.3.0 -> 0.4.0"
|
||||
notes = "No unsafe usage or ambient capabilities"
|
||||
|
||||
[[audits.embark-studios.audits.line-wrap]]
|
||||
who = "Johan Andersson <opensource@embark-studios.com>"
|
||||
criteria = "safe-to-deploy"
|
||||
|
|
|
@ -39,7 +39,7 @@ serde_json = "1.0"
|
|||
serde_yaml = "0.8"
|
||||
tempfile = "3"
|
||||
unicode-segmentation = "1.9"
|
||||
url = "2.0"
|
||||
url = "2.4"
|
||||
uuid = { version = "1.0", features = ["v4"] }
|
||||
webdriver = { path = "../webdriver", version="0.49.0" }
|
||||
zip = { version = "0.6", default-features = false, features = ["deflate"] }
|
||||
|
|
|
@ -34,5 +34,5 @@ time = "0.3"
|
|||
tokio = { version = "1.0", features = ["rt", "net"], optional = true}
|
||||
tokio-stream = { version = "0.1", features = ["net"], optional = true}
|
||||
unicode-segmentation = "1.2"
|
||||
url = "2.0"
|
||||
url = "2.4"
|
||||
warp = { version = "0.3", default-features = false, optional = true }
|
||||
|
|
|
@ -1 +1 @@
|
|||
{"files":{"Cargo.toml":"1771c35bc67b544a7ebc6f31168d7bc08d29ae76323e7ee10352d43b5a38afac","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"20c7855c364d57ea4c97889a5e8d98470a9952dade37bd9248b9a54431670e5e","src/lib.rs":"c1a0db36086e4fd0e8bb1489f9c2bb55945dd233614bf5dc3a0c41cb49292dd3"},"package":"a9c384f161156f5260c24a097c56119f9be8c798586aecc13afbcbe7b7e26bf8"}
|
||||
{"files":{"Cargo.toml":"85081d9e0386029e6e983d830975837967803e476cbfd48e576d276a5bcc77b6","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"20c7855c364d57ea4c97889a5e8d98470a9952dade37bd9248b9a54431670e5e","src/lib.rs":"ab14e1d39ed5ffaf5ded448f3cdf7157e721c61321963c9b6eb6ae27a0ab8d48"},"package":"a62bc1cf6f830c2ec14a513a9fb124d0a213a629668a4186f329db21fe045652"}
|
|
@ -13,9 +13,10 @@
|
|||
edition = "2018"
|
||||
rust-version = "1.51"
|
||||
name = "form_urlencoded"
|
||||
version = "1.1.0"
|
||||
version = "1.2.0"
|
||||
authors = ["The rust-url developers"]
|
||||
description = "Parser and serializer for the application/x-www-form-urlencoded syntax, as used by HTML forms."
|
||||
categories = ["no_std"]
|
||||
license = "MIT OR Apache-2.0"
|
||||
repository = "https://github.com/servo/rust-url"
|
||||
|
||||
|
@ -23,4 +24,13 @@ repository = "https://github.com/servo/rust-url"
|
|||
test = false
|
||||
|
||||
[dependencies.percent-encoding]
|
||||
version = "2.2.0"
|
||||
version = "2.3.0"
|
||||
default-features = false
|
||||
|
||||
[features]
|
||||
alloc = ["percent-encoding/alloc"]
|
||||
default = ["std"]
|
||||
std = [
|
||||
"alloc",
|
||||
"percent-encoding/std",
|
||||
]
|
||||
|
|
|
@ -12,10 +12,21 @@
|
|||
//!
|
||||
//! Converts between a string (such as an URL’s query string)
|
||||
//! and a sequence of (name, value) pairs.
|
||||
#![no_std]
|
||||
|
||||
// For forwards compatibility
|
||||
#[cfg(feature = "std")]
|
||||
extern crate std as _;
|
||||
|
||||
extern crate alloc;
|
||||
|
||||
#[cfg(not(feature = "alloc"))]
|
||||
compile_error!("the `alloc` feature must currently be enabled");
|
||||
|
||||
use alloc::borrow::{Borrow, Cow, ToOwned};
|
||||
use alloc::string::String;
|
||||
use core::str;
|
||||
use percent_encoding::{percent_decode, percent_encode_byte};
|
||||
use std::borrow::{Borrow, Cow};
|
||||
use std::str;
|
||||
|
||||
/// Convert a byte string in the `application/x-www-form-urlencoded` syntax
|
||||
/// into a iterator of (name, value) pairs.
|
||||
|
@ -186,7 +197,7 @@ impl Target for String {
|
|||
|
||||
impl<'a> Target for &'a mut String {
|
||||
fn as_mut_string(&mut self) -> &mut String {
|
||||
&mut **self
|
||||
self
|
||||
}
|
||||
fn finish(self) -> Self {
|
||||
self
|
||||
|
@ -282,7 +293,7 @@ impl<'a, T: Target> Serializer<'a, T> {
|
|||
{
|
||||
let string = string(&mut self.target);
|
||||
for pair in iter {
|
||||
let &(ref k, ref v) = pair.borrow();
|
||||
let (k, v) = pair.borrow();
|
||||
append_pair(
|
||||
string,
|
||||
self.start_position,
|
||||
|
|
|
@ -1 +1 @@
|
|||
{"files":{"Cargo.toml":"fa141dcb135262e5fda9f680671699045326d96779bb1acf38d48c70c712bcdf","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"20c7855c364d57ea4c97889a5e8d98470a9952dade37bd9248b9a54431670e5e","benches/all.rs":"e734b9c9092ed66986725f86cfe90f3756cfddb058af308b796ba494f9beefc2","src/IdnaMappingTable.txt":"87d6553a4b86bc49dcade38bf26b745cd81800eb8af295dc3fb99b4729eaea38","src/lib.rs":"d61b2bfcf4265b9a41eedd1de33ab49ea615e3c06df944321b30c57950a85342","src/make_uts46_mapping_table.py":"917055fa841f813de2bcf79cc79b595da3d5551559ee768db8660ab77cb26c34","src/punycode.rs":"07edf5293bc384a164eebb01bc18fe3d4b2d009b4565a36b74a3030978ea6e04","src/uts46.rs":"40521a01e5b8c38667252d5b1e0141c5a71f63aeae2f451b986792984e633b09","src/uts46_mapping_table.rs":"942fff78147c61da942f5f3a7ff4e90f9d7a00a29285733ac3fc3357eb2ed06f","tests/IdnaTestV2.txt":"c6f3778b0545fd150c8063286c7f5adc901e16557eddccc3751213646d07593d","tests/punycode.rs":"e6fb978f48445d1525a6b97351c41c5393a1612a35f85b9a7f45b8794fce9aba","tests/punycode_tests.json":"3d4ac0cf25984c37b9ce197f5df680a0136f728fb8ec82bc76624e42139eb3a8","tests/tests.rs":"de7425a3e4e6e871255721107803704d1431246601fa9c87105224d88dfe60d6","tests/unit.rs":"be025a7d9bab3bd1ce134c87f9d848269e157b31ca5ba0ea03426c1ac736b69e","tests/uts46.rs":"ca91d48811d366fb9e32d7aa79cfda1261b93c271b6ed7fb5535de9a2500205b"},"package":"418a0a6fab821475f634efe3ccc45c013f742efe03d853e8d3355d5cb850ecf8"}
|
||||
{"files":{"Cargo.toml":"32d6c206ec373e92afd20d07a6337c45e069b00e4a80344c82f010e72eb22605","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"76e972ac0f4ddb116e86e10100132a783931a596e7b9872eaa31be15cd4d751d","benches/all.rs":"e734b9c9092ed66986725f86cfe90f3756cfddb058af308b796ba494f9beefc2","src/IdnaMappingTable.txt":"87d6553a4b86bc49dcade38bf26b745cd81800eb8af295dc3fb99b4729eaea38","src/lib.rs":"e7fd80070a7e52dfd1e9fe785bf092eddc9fb421fd0f9a1ba1c2189b8d40d3ed","src/make_uts46_mapping_table.py":"917055fa841f813de2bcf79cc79b595da3d5551559ee768db8660ab77cb26c34","src/punycode.rs":"0e4a879cffee5af747ab3042928b4ff682b1c4881b40f9f241511668109ae124","src/uts46.rs":"f2dc62e5d7472eba2676e073bf5c0a4c635da5487740d8dc4310f61831859702","src/uts46_mapping_table.rs":"942fff78147c61da942f5f3a7ff4e90f9d7a00a29285733ac3fc3357eb2ed06f","tests/IdnaTestV2.txt":"c6f3778b0545fd150c8063286c7f5adc901e16557eddccc3751213646d07593d","tests/punycode.rs":"cd55e1ba1150a3a2a05aec6617affd1fed38c53cb344024a6556918a02ced78d","tests/punycode_tests.json":"3d4ac0cf25984c37b9ce197f5df680a0136f728fb8ec82bc76624e42139eb3a8","tests/tests.rs":"d205a2bfb29dfee73e014faebd3207a55ef0d40121e6dbd52f5d611b37ac111e","tests/unit.rs":"be025a7d9bab3bd1ce134c87f9d848269e157b31ca5ba0ea03426c1ac736b69e","tests/uts46.rs":"06c97bf7dc20f5372b542fa46922d6dd63fe15e0aa34d799d08df9e3a241aa21"},"package":"7d20d6b07bfbc108882d88ed8e37d39636dcc260e15e30c45e6ba089610b917c"}
|
|
@ -3,21 +3,22 @@
|
|||
# When uploading crates to the registry Cargo will automatically
|
||||
# "normalize" Cargo.toml files for maximal compatibility
|
||||
# with all versions of Cargo and also rewrite `path` dependencies
|
||||
# to registry (e.g., crates.io) dependencies
|
||||
# to registry (e.g., crates.io) dependencies.
|
||||
#
|
||||
# If you believe there's an error in this file please file an
|
||||
# issue against the rust-lang/cargo repository. If you're
|
||||
# editing this file be aware that the upstream Cargo.toml
|
||||
# will likely look very different (and much more reasonable)
|
||||
# If you are reading this file be aware that the original Cargo.toml
|
||||
# will likely look very different (and much more reasonable).
|
||||
# See Cargo.toml.orig for the original contents.
|
||||
|
||||
[package]
|
||||
edition = "2018"
|
||||
rust-version = "1.51"
|
||||
name = "idna"
|
||||
version = "0.2.3"
|
||||
version = "0.4.0"
|
||||
authors = ["The rust-url developers"]
|
||||
autotests = false
|
||||
description = "IDNA (Internationalizing Domain Names in Applications) and Punycode."
|
||||
license = "MIT/Apache-2.0"
|
||||
categories = ["no_std"]
|
||||
license = "MIT OR Apache-2.0"
|
||||
repository = "https://github.com/servo/rust-url/"
|
||||
|
||||
[lib]
|
||||
|
@ -33,22 +34,33 @@ name = "unit"
|
|||
[[bench]]
|
||||
name = "all"
|
||||
harness = false
|
||||
[dependencies.matches]
|
||||
version = "0.1"
|
||||
|
||||
[dependencies.unicode-bidi]
|
||||
version = "0.3"
|
||||
version = "0.3.10"
|
||||
features = ["hardcoded-data"]
|
||||
default-features = false
|
||||
|
||||
[dependencies.unicode-normalization]
|
||||
version = "0.1.17"
|
||||
version = "0.1.22"
|
||||
default-features = false
|
||||
|
||||
[dev-dependencies.assert_matches]
|
||||
version = "1.3"
|
||||
|
||||
[dev-dependencies.bencher]
|
||||
version = "0.1"
|
||||
|
||||
[dev-dependencies.rustc-test]
|
||||
version = "0.3"
|
||||
|
||||
[dev-dependencies.serde_json]
|
||||
version = "1.0"
|
||||
|
||||
[dev-dependencies.tester]
|
||||
version = "0.9"
|
||||
|
||||
[features]
|
||||
alloc = []
|
||||
default = ["std"]
|
||||
std = [
|
||||
"alloc",
|
||||
"unicode-bidi/std",
|
||||
"unicode-normalization/std",
|
||||
]
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
Copyright (c) 2013-2016 The rust-url developers
|
||||
Copyright (c) 2013-2022 The rust-url developers
|
||||
|
||||
Permission is hereby granted, free of charge, to any
|
||||
person obtaining a copy of this software and associated
|
||||
|
|
|
@ -31,9 +31,22 @@
|
|||
//! > This document specifies a mechanism
|
||||
//! > that minimizes the impact of this transition for client software,
|
||||
//! > allowing client software to access domains that are valid under either system.
|
||||
#![no_std]
|
||||
|
||||
// For forwards compatibility
|
||||
#[cfg(feature = "std")]
|
||||
extern crate std;
|
||||
|
||||
extern crate alloc;
|
||||
|
||||
#[cfg(not(feature = "alloc"))]
|
||||
compile_error!("the `alloc` feature must be enabled");
|
||||
|
||||
#[cfg(test)]
|
||||
#[macro_use]
|
||||
extern crate matches;
|
||||
extern crate assert_matches;
|
||||
|
||||
use alloc::string::String;
|
||||
|
||||
pub mod punycode;
|
||||
mod uts46;
|
||||
|
|
|
@ -13,8 +13,9 @@
|
|||
//! `encode_str` and `decode_to_string` provide convenience wrappers
|
||||
//! that convert from and to Rust’s UTF-8 based `str` and `String` types.
|
||||
|
||||
use std::char;
|
||||
use std::u32;
|
||||
use alloc::{string::String, vec::Vec};
|
||||
use core::char;
|
||||
use core::u32;
|
||||
|
||||
// Bootstring parameters for Punycode
|
||||
static BASE: u32 = 36;
|
||||
|
@ -168,7 +169,7 @@ impl Decoder {
|
|||
}
|
||||
|
||||
pub(crate) struct Decode<'a> {
|
||||
base: std::str::Chars<'a>,
|
||||
base: core::str::Chars<'a>,
|
||||
pub(crate) insertions: &'a [(usize, char)],
|
||||
inserted: usize,
|
||||
position: usize,
|
||||
|
|
|
@ -11,7 +11,9 @@
|
|||
|
||||
use self::Mapping::*;
|
||||
use crate::punycode;
|
||||
use std::{error::Error as StdError, fmt};
|
||||
|
||||
use alloc::string::String;
|
||||
use core::fmt;
|
||||
use unicode_bidi::{bidi_class, BidiClass};
|
||||
use unicode_normalization::char::is_combining_mark;
|
||||
use unicode_normalization::{is_nfc, UnicodeNormalization};
|
||||
|
@ -70,10 +72,10 @@ fn find_char(codepoint: char) -> &'static Mapping {
|
|||
}
|
||||
|
||||
struct Mapper<'a> {
|
||||
chars: std::str::Chars<'a>,
|
||||
chars: core::str::Chars<'a>,
|
||||
config: Config,
|
||||
errors: &'a mut Errors,
|
||||
slice: Option<std::str::Chars<'static>>,
|
||||
slice: Option<core::str::Chars<'static>>,
|
||||
}
|
||||
|
||||
impl<'a> Iterator for Mapper<'a> {
|
||||
|
@ -156,7 +158,7 @@ fn passes_bidi(label: &str, is_bidi_domain: bool) -> bool {
|
|||
// LTR label
|
||||
BidiClass::L => {
|
||||
// Rule 5
|
||||
while let Some(c) = chars.next() {
|
||||
for c in chars.by_ref() {
|
||||
if !matches!(
|
||||
bidi_class(c),
|
||||
BidiClass::L
|
||||
|
@ -274,7 +276,7 @@ fn passes_bidi(label: &str, is_bidi_domain: bool) -> bool {
|
|||
/// http://www.unicode.org/reports/tr46/#Validity_Criteria
|
||||
fn check_validity(label: &str, config: Config, errors: &mut Errors) {
|
||||
let first_char = label.chars().next();
|
||||
if first_char == None {
|
||||
if first_char.is_none() {
|
||||
// Empty string, pass
|
||||
return;
|
||||
}
|
||||
|
@ -318,51 +320,48 @@ fn check_validity(label: &str, config: Config, errors: &mut Errors) {
|
|||
// V8: Bidi rules are checked inside `processing()`
|
||||
}
|
||||
|
||||
/// http://www.unicode.org/reports/tr46/#Processing
|
||||
#[allow(clippy::manual_strip)] // introduced in 1.45, MSRV is 1.36
|
||||
fn processing(
|
||||
domain: &str,
|
||||
config: Config,
|
||||
normalized: &mut String,
|
||||
output: &mut String,
|
||||
) -> Errors {
|
||||
// Weed out the simple cases: only allow all lowercase ASCII characters and digits where none
|
||||
// of the labels start with PUNYCODE_PREFIX and labels don't start or end with hyphen.
|
||||
let (mut prev, mut simple, mut puny_prefix) = ('?', !domain.is_empty(), 0);
|
||||
// Detect simple cases: all lowercase ASCII characters and digits where none
|
||||
// of the labels start with PUNYCODE_PREFIX and labels don't start or end with hyphen.
|
||||
fn is_simple(domain: &str) -> bool {
|
||||
if domain.is_empty() {
|
||||
return false;
|
||||
}
|
||||
let (mut prev, mut puny_prefix) = ('?', 0);
|
||||
for c in domain.chars() {
|
||||
if c == '.' {
|
||||
if prev == '-' {
|
||||
simple = false;
|
||||
break;
|
||||
return false;
|
||||
}
|
||||
puny_prefix = 0;
|
||||
continue;
|
||||
} else if puny_prefix == 0 && c == '-' {
|
||||
simple = false;
|
||||
break;
|
||||
return false;
|
||||
} else if puny_prefix < 5 {
|
||||
if c == ['x', 'n', '-', '-'][puny_prefix] {
|
||||
puny_prefix += 1;
|
||||
if puny_prefix == 4 {
|
||||
simple = false;
|
||||
break;
|
||||
return false;
|
||||
}
|
||||
} else {
|
||||
puny_prefix = 5;
|
||||
}
|
||||
}
|
||||
if !c.is_ascii_lowercase() && !c.is_ascii_digit() {
|
||||
simple = false;
|
||||
break;
|
||||
return false;
|
||||
}
|
||||
prev = c;
|
||||
}
|
||||
|
||||
if simple {
|
||||
output.push_str(domain);
|
||||
return Errors::default();
|
||||
}
|
||||
true
|
||||
}
|
||||
|
||||
/// http://www.unicode.org/reports/tr46/#Processing
|
||||
fn processing(
|
||||
domain: &str,
|
||||
config: Config,
|
||||
normalized: &mut String,
|
||||
output: &mut String,
|
||||
) -> Errors {
|
||||
normalized.clear();
|
||||
let mut errors = Errors::default();
|
||||
let offset = output.len();
|
||||
|
@ -384,8 +383,8 @@ fn processing(
|
|||
output.push('.');
|
||||
}
|
||||
first = false;
|
||||
if label.starts_with(PUNYCODE_PREFIX) {
|
||||
match decoder.decode(&label[PUNYCODE_PREFIX.len()..]) {
|
||||
if let Some(remainder) = label.strip_prefix(PUNYCODE_PREFIX) {
|
||||
match decoder.decode(remainder) {
|
||||
Ok(decode) => {
|
||||
let start = output.len();
|
||||
output.extend(decode);
|
||||
|
@ -396,7 +395,7 @@ fn processing(
|
|||
}
|
||||
|
||||
if !errors.is_err() {
|
||||
if !is_nfc(&decoded_label) {
|
||||
if !is_nfc(decoded_label) {
|
||||
errors.nfc = true;
|
||||
} else {
|
||||
check_validity(decoded_label, non_transitional, &mut errors);
|
||||
|
@ -448,11 +447,13 @@ impl Idna {
|
|||
}
|
||||
}
|
||||
|
||||
/// http://www.unicode.org/reports/tr46/#ToASCII
|
||||
#[allow(clippy::wrong_self_convention)]
|
||||
pub fn to_ascii<'a>(&'a mut self, domain: &str, out: &mut String) -> Result<(), Errors> {
|
||||
let mut errors = processing(domain, self.config, &mut self.normalized, &mut self.output);
|
||||
|
||||
pub fn to_ascii_inner(&mut self, domain: &str, out: &mut String) -> Errors {
|
||||
if is_simple(domain) {
|
||||
out.push_str(domain);
|
||||
return Errors::default();
|
||||
}
|
||||
let mut errors = processing(domain, self.config, &mut self.normalized, out);
|
||||
self.output = std::mem::replace(out, String::with_capacity(out.len()));
|
||||
let mut first = true;
|
||||
for label in self.output.split('.') {
|
||||
if !first {
|
||||
|
@ -471,6 +472,13 @@ impl Idna {
|
|||
}
|
||||
}
|
||||
}
|
||||
errors
|
||||
}
|
||||
|
||||
/// http://www.unicode.org/reports/tr46/#ToASCII
|
||||
#[allow(clippy::wrong_self_convention)]
|
||||
pub fn to_ascii(&mut self, domain: &str, out: &mut String) -> Result<(), Errors> {
|
||||
let mut errors = self.to_ascii_inner(domain, out);
|
||||
|
||||
if self.config.verify_dns_length {
|
||||
let domain = if out.ends_with('.') {
|
||||
|
@ -491,7 +499,11 @@ impl Idna {
|
|||
|
||||
/// http://www.unicode.org/reports/tr46/#ToUnicode
|
||||
#[allow(clippy::wrong_self_convention)]
|
||||
pub fn to_unicode<'a>(&'a mut self, domain: &str, out: &mut String) -> Result<(), Errors> {
|
||||
pub fn to_unicode(&mut self, domain: &str, out: &mut String) -> Result<(), Errors> {
|
||||
if is_simple(domain) {
|
||||
out.push_str(domain);
|
||||
return Errors::default().into();
|
||||
}
|
||||
processing(domain, self.config, &mut self.normalized, out).into()
|
||||
}
|
||||
}
|
||||
|
@ -555,7 +567,7 @@ impl Config {
|
|||
|
||||
/// http://www.unicode.org/reports/tr46/#ToASCII
|
||||
pub fn to_ascii(self, domain: &str) -> Result<String, Errors> {
|
||||
let mut result = String::new();
|
||||
let mut result = String::with_capacity(domain.len());
|
||||
let mut codec = Idna::new(self);
|
||||
codec.to_ascii(domain, &mut result).map(|()| result)
|
||||
}
|
||||
|
@ -675,7 +687,7 @@ impl fmt::Debug for Errors {
|
|||
if !empty {
|
||||
f.write_str(", ")?;
|
||||
}
|
||||
f.write_str(*name)?;
|
||||
f.write_str(name)?;
|
||||
empty = false;
|
||||
}
|
||||
}
|
||||
|
@ -698,7 +710,8 @@ impl From<Errors> for Result<(), Errors> {
|
|||
}
|
||||
}
|
||||
|
||||
impl StdError for Errors {}
|
||||
#[cfg(feature = "std")]
|
||||
impl std::error::Error for Errors {}
|
||||
|
||||
impl fmt::Display for Errors {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
|
|
|
@ -41,7 +41,7 @@ fn one_test(decoded: &str, encoded: &str) {
|
|||
|
||||
fn get_string<'a>(map: &'a Map<String, Value>, key: &str) -> &'a str {
|
||||
match map.get(&key.to_string()) {
|
||||
Some(&Value::String(ref s)) => s,
|
||||
Some(Value::String(s)) => s,
|
||||
None => "",
|
||||
_ => panic!(),
|
||||
}
|
||||
|
@ -63,9 +63,9 @@ pub fn collect_tests<F: FnMut(String, TestFn)>(add_test: &mut F) {
|
|||
};
|
||||
add_test(
|
||||
test_name,
|
||||
TestFn::dyn_test_fn(move || {
|
||||
TestFn::DynTestFn(Box::new(move || {
|
||||
one_test(get_string(&o, "decoded"), get_string(&o, "encoded"))
|
||||
}),
|
||||
})),
|
||||
)
|
||||
}
|
||||
_ => panic!(),
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
use rustc_test as test;
|
||||
use tester as test;
|
||||
|
||||
mod punycode;
|
||||
mod uts46;
|
||||
|
@ -8,12 +8,18 @@ fn main() {
|
|||
{
|
||||
let mut add_test = |name, run| {
|
||||
tests.push(test::TestDescAndFn {
|
||||
desc: test::TestDesc::new(test::DynTestName(name)),
|
||||
desc: test::TestDesc {
|
||||
name: test::DynTestName(name),
|
||||
ignore: false,
|
||||
should_panic: test::ShouldPanic::No,
|
||||
allow_fail: false,
|
||||
test_type: test::TestType::Unknown,
|
||||
},
|
||||
testfn: run,
|
||||
})
|
||||
};
|
||||
punycode::collect_tests(&mut add_test);
|
||||
uts46::collect_tests(&mut add_test);
|
||||
}
|
||||
test::test_main(&std::env::args().collect::<Vec<_>>(), tests)
|
||||
test::test_main(&std::env::args().collect::<Vec<_>>(), tests, None)
|
||||
}
|
||||
|
|
|
@ -8,6 +8,7 @@
|
|||
|
||||
use crate::test::TestFn;
|
||||
use std::char;
|
||||
use std::fmt::Write;
|
||||
|
||||
use idna::Errors;
|
||||
|
||||
|
@ -25,10 +26,10 @@ pub fn collect_tests<F: FnMut(String, TestFn)>(add_test: &mut F) {
|
|||
};
|
||||
|
||||
let mut pieces = line.split(';').map(|x| x.trim()).collect::<Vec<&str>>();
|
||||
let source = unescape(&pieces.remove(0));
|
||||
let source = unescape(pieces.remove(0));
|
||||
|
||||
// ToUnicode
|
||||
let mut to_unicode = unescape(&pieces.remove(0));
|
||||
let mut to_unicode = unescape(pieces.remove(0));
|
||||
if to_unicode.is_empty() {
|
||||
to_unicode = source.clone();
|
||||
}
|
||||
|
@ -65,7 +66,7 @@ pub fn collect_tests<F: FnMut(String, TestFn)>(add_test: &mut F) {
|
|||
let test_name = format!("UTS #46 line {}", i + 1);
|
||||
add_test(
|
||||
test_name,
|
||||
TestFn::dyn_test_fn(move || {
|
||||
TestFn::DynTestFn(Box::new(move || {
|
||||
let config = idna::Config::default()
|
||||
.use_std3_ascii_rules(true)
|
||||
.verify_dns_length(true)
|
||||
|
@ -109,7 +110,7 @@ pub fn collect_tests<F: FnMut(String, TestFn)>(add_test: &mut F) {
|
|||
to_ascii_t_result,
|
||||
|e| e.starts_with('C') || e == "V2",
|
||||
);
|
||||
}),
|
||||
})),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
@ -160,8 +161,8 @@ fn unescape(input: &str) -> String {
|
|||
match char::from_u32(((c1 * 16 + c2) * 16 + c3) * 16 + c4) {
|
||||
Some(c) => output.push(c),
|
||||
None => {
|
||||
output
|
||||
.push_str(&format!("\\u{:X}{:X}{:X}{:X}", c1, c2, c3, c4));
|
||||
write!(&mut output, "\\u{:X}{:X}{:X}{:X}", c1, c2, c3, c4)
|
||||
.expect("Could not write to output");
|
||||
}
|
||||
};
|
||||
}
|
||||
|
|
|
@ -1 +1 @@
|
|||
{"files":{"Cargo.toml":"7330cca9fe75c0f7cadf87d42f1af587e57d7f3e90efad7b4476d8d78580e435","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"76e972ac0f4ddb116e86e10100132a783931a596e7b9872eaa31be15cd4d751d","src/lib.rs":"5eafa40dda1d5de9770181b836c37b078f790ccdee4f461d4601fc5bc17716b8"},"package":"478c572c3d73181ff3c2539045f6eb99e5491218eae919370993b890cdbdd98e"}
|
||||
{"files":{"Cargo.toml":"876192ec0c492d9e09d2835252e1f75c7988cebb0b9bc9cfcfe47250d9481429","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"76e972ac0f4ddb116e86e10100132a783931a596e7b9872eaa31be15cd4d751d","src/lib.rs":"9f55aebbabb7e07c64382218a16477b95ba7120d5655d318c3d4dfe7721aa531"},"package":"9b2a4787296e9989611394c33f193f676704af1686e70b8f8033ab5ba9a35a94"}
|
|
@ -13,12 +13,14 @@
|
|||
edition = "2018"
|
||||
rust-version = "1.51"
|
||||
name = "percent-encoding"
|
||||
version = "2.2.0"
|
||||
version = "2.3.0"
|
||||
authors = ["The rust-url developers"]
|
||||
description = "Percent encoding and decoding"
|
||||
categories = ["no_std"]
|
||||
license = "MIT OR Apache-2.0"
|
||||
repository = "https://github.com/servo/rust-url/"
|
||||
|
||||
[features]
|
||||
alloc = []
|
||||
default = ["alloc"]
|
||||
default = ["std"]
|
||||
std = ["alloc"]
|
||||
|
|
|
@ -36,8 +36,12 @@
|
|||
//!
|
||||
//! assert_eq!(utf8_percent_encode("foo <bar>", FRAGMENT).to_string(), "foo%20%3Cbar%3E");
|
||||
//! ```
|
||||
|
||||
#![no_std]
|
||||
|
||||
// For forwards compatibility
|
||||
#[cfg(feature = "std")]
|
||||
extern crate std as _;
|
||||
|
||||
#[cfg(feature = "alloc")]
|
||||
extern crate alloc;
|
||||
|
||||
|
@ -180,9 +184,9 @@ pub const NON_ALPHANUMERIC: &AsciiSet = &CONTROLS
|
|||
/// assert_eq!("foo bar".bytes().map(percent_encode_byte).collect::<String>(),
|
||||
/// "%66%6F%6F%20%62%61%72");
|
||||
/// ```
|
||||
#[inline]
|
||||
pub fn percent_encode_byte(byte: u8) -> &'static str {
|
||||
let index = usize::from(byte) * 3;
|
||||
&"\
|
||||
static ENC_TABLE: &[u8; 768] = b"\
|
||||
%00%01%02%03%04%05%06%07%08%09%0A%0B%0C%0D%0E%0F\
|
||||
%10%11%12%13%14%15%16%17%18%19%1A%1B%1C%1D%1E%1F\
|
||||
%20%21%22%23%24%25%26%27%28%29%2A%2B%2C%2D%2E%2F\
|
||||
|
@ -199,7 +203,12 @@ pub fn percent_encode_byte(byte: u8) -> &'static str {
|
|||
%D0%D1%D2%D3%D4%D5%D6%D7%D8%D9%DA%DB%DC%DD%DE%DF\
|
||||
%E0%E1%E2%E3%E4%E5%E6%E7%E8%E9%EA%EB%EC%ED%EE%EF\
|
||||
%F0%F1%F2%F3%F4%F5%F6%F7%F8%F9%FA%FB%FC%FD%FE%FF\
|
||||
"[index..index + 3]
|
||||
";
|
||||
|
||||
let index = usize::from(byte) * 3;
|
||||
// SAFETY: ENC_TABLE is ascii-only, so any subset if it should be
|
||||
// ascii-only too, which is valid utf8.
|
||||
unsafe { str::from_utf8_unchecked(&ENC_TABLE[index..index + 3]) }
|
||||
}
|
||||
|
||||
/// Percent-encode the given bytes with the given set.
|
||||
|
|
|
@ -1 +1 @@
|
|||
{"files":{"Cargo.toml":"911d5d7758567d81f098c351b005ca2b9a9963dce59b2e4c9601990b23d32bc1","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"20c7855c364d57ea4c97889a5e8d98470a9952dade37bd9248b9a54431670e5e","README.md":"8f7f1699e885e867f16e2beacf161751735582fbc080ada0762ba512244f28fc","UPGRADING.md":"fbcc2d39bdf17db0745793db6626fcd5c909dddd4ce13b27566cfabece22c368","appveyor.yml":"c78486dbfbe6ebbf3d808afb9a19f7ec18c4704ce451c6305f0716999b70a1a6","benches/parse_url.rs":"821ecb051c3c6c40eb3b268ba7337b2988333627d0af0c8e1afc84734ffbbf2b","src/form_urlencoded.rs":"97b948b959460ef1323bdbb9147eef3edac9db2e236f75cb49668cbab8d1f708","src/host.rs":"8401138bbda58771e0377e5e45b695844a1ad3320584115d931e933c2097e4d1","src/lib.rs":"f822ca47da43bb283af3b590d541321ff1ec828045e12a8d97fb688e7e3f8610","src/origin.rs":"5ee6d1dc360a191362e2bee9840bdc26df8f20cb1b70282024cebd3e14490e92","src/parser.rs":"912733b8f62bf765e077dde2bc21918bef39f6f41d262059cd1738f3d6a8824c","src/path_segments.rs":"c322c048a075db47dd73289e83876cbb25b69a3b17cad6054ddb47a93fab85dd","src/query_encoding.rs":"88d31936327461af1382393117fc07bbdf064c6930aaff3cd8b38d2343e41b51","src/quirks.rs":"a5be1ade22b29e86b432d0340cc5737614b28c7db0df36d7c1b6ea84e60e3c83","src/slicing.rs":"a59ec10a3c3a6b454f66014ca7fd949ea48159a13b100fca2712c784021ccdc3","tests/data.rs":"3c8c1255d86c1d1cfb17d4c979c18dd13d4f7386512b95be9a47dd755502fe68","tests/setters_tests.json":"08ddaa632ad19c81e83b904bfaa94bc971f26e2bdfcef27d2f93fd033ad57340","tests/unit.rs":"6068d8386d56d7a4495eb81bbd9b21bca7696755cf5b14efc3d7b33cfad24a19","tests/urltestdata.json":"1b0c7c727d8d7e79dfb0d0aa347ff05675ddb68bc4ead38f83fd8e89bc59cc32"},"package":"75b414f6c464c879d7f9babf951f23bc3743fb7313c081b2e6ca719067ea9d61"}
|
||||
{"files":{"Cargo.toml":"e8485127c0165af45af4d307166b2db764ee5e1299b9a2cb81c144376361614e","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"76e972ac0f4ddb116e86e10100132a783931a596e7b9872eaa31be15cd4d751d","src/host.rs":"5e25476aaec0153b64d35b53940a72a1ec58e29a0e1fde36944f52eeb945c5f6","src/lib.rs":"1491370cc37bcf7e86dcaa060e24fa83db5ee49c2b91637a188d1407caa1611e","src/origin.rs":"19a4b451e8615bfef7239d2fc719c489398fe5044edb0df7c84b54eef4ceba1b","src/parser.rs":"5427cd15caedc8e3c1418cc576a7263e96df26a51ad3ce88f8c32d3eb7d6dd2c","src/path_segments.rs":"29db87b6902da4ab1ae925b3874afdeff42b8ddfb46356af6a83b86f34e03b14","src/quirks.rs":"c9311e3dd6f701fb4b8e438b3e3960ff6f8c78a67ae763f3640b178f15c60e45","src/slicing.rs":"39f4e624adbdbf952b7da8bfe6abdfa6e344193d6e1bbca1b0ccbfc821573f10","tests/expected_failures.txt":"1afb47b036a9fd2a9db671f233ebfa2802b5cd27c4e65839b518ab0a24380ff2","tests/setters_tests.json":"a3a4cbd7b798bc2c4d9656dc50be7397a5a5ed1f0b52daa1da1ad654d38c1dcd","tests/unit.rs":"1abe0a410c5078e1ad9de8c93f2f2ae660ddb47b7efaac9047e952457b068ded","tests/urltestdata.json":"58d67bea710d5f46324fe6841df5fd82090fe4ec2d882bc0fc7c1784d4771884","tests/wpt.rs":"6302c008cde6e7c0df8626701cc825731363722d02e35804bb370c385b455145"},"package":"143b538f18257fac9cad154828a57c6bf5157e1aa604d4816b5995bf6de87ae5"}
|
|
@ -3,46 +3,50 @@
|
|||
# When uploading crates to the registry Cargo will automatically
|
||||
# "normalize" Cargo.toml files for maximal compatibility
|
||||
# with all versions of Cargo and also rewrite `path` dependencies
|
||||
# to registry (e.g., crates.io) dependencies
|
||||
# to registry (e.g., crates.io) dependencies.
|
||||
#
|
||||
# If you believe there's an error in this file please file an
|
||||
# issue against the rust-lang/cargo repository. If you're
|
||||
# editing this file be aware that the upstream Cargo.toml
|
||||
# will likely look very different (and much more reasonable)
|
||||
# If you are reading this file be aware that the original Cargo.toml
|
||||
# will likely look very different (and much more reasonable).
|
||||
# See Cargo.toml.orig for the original contents.
|
||||
|
||||
[package]
|
||||
edition = "2018"
|
||||
rust-version = "1.56"
|
||||
name = "url"
|
||||
version = "2.1.0"
|
||||
version = "2.4.1"
|
||||
authors = ["The rust-url developers"]
|
||||
include = ["src/**/*", "LICENSE-*", "README.md", "tests/**"]
|
||||
description = "URL library for Rust, based on the WHATWG URL Standard"
|
||||
documentation = "https://docs.rs/url"
|
||||
readme = "README.md"
|
||||
readme = "../README.md"
|
||||
keywords = ["url", "parser"]
|
||||
categories = ["parser-implementations", "web-programming", "encoding"]
|
||||
license = "MIT/Apache-2.0"
|
||||
license = "MIT OR Apache-2.0"
|
||||
repository = "https://github.com/servo/rust-url"
|
||||
[package.metadata.docs.rs]
|
||||
features = ["serde"]
|
||||
rustdoc-args = ["--generate-link-to-definition"]
|
||||
|
||||
[lib]
|
||||
test = false
|
||||
[package.metadata.playground]
|
||||
features = ["serde"]
|
||||
|
||||
[[test]]
|
||||
name = "unit"
|
||||
|
||||
[[test]]
|
||||
name = "data"
|
||||
name = "url_wpt"
|
||||
path = "tests/wpt.rs"
|
||||
harness = false
|
||||
|
||||
[[bench]]
|
||||
name = "parse_url"
|
||||
path = "benches/parse_url.rs"
|
||||
harness = false
|
||||
[dependencies.idna]
|
||||
version = "0.2.0"
|
||||
[dependencies.form_urlencoded]
|
||||
version = "1.2.0"
|
||||
|
||||
[dependencies.matches]
|
||||
version = "0.1"
|
||||
[dependencies.idna]
|
||||
version = "0.4.0"
|
||||
|
||||
[dependencies.percent-encoding]
|
||||
version = "2.0.0"
|
||||
version = "2.3.0"
|
||||
|
||||
[dependencies.serde]
|
||||
version = "1.0"
|
||||
|
@ -51,13 +55,14 @@ optional = true
|
|||
[dev-dependencies.bencher]
|
||||
version = "0.1"
|
||||
|
||||
[dev-dependencies.rustc-test]
|
||||
version = "0.3"
|
||||
[dev-dependencies.serde]
|
||||
version = "1.0"
|
||||
features = ["derive"]
|
||||
|
||||
[dev-dependencies.serde_json]
|
||||
version = "1.0"
|
||||
[badges.appveyor]
|
||||
repository = "Manishearth/rust-url"
|
||||
|
||||
[badges.travis-ci]
|
||||
repository = "servo/rust-url"
|
||||
[features]
|
||||
debugger_visualizer = []
|
||||
default = []
|
||||
expose_internals = []
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
Copyright (c) 2013-2016 The rust-url developers
|
||||
Copyright (c) 2013-2022 The rust-url developers
|
||||
|
||||
Permission is hereby granted, free of charge, to any
|
||||
person obtaining a copy of this software and associated
|
||||
|
|
|
@ -1,10 +0,0 @@
|
|||
rust-url
|
||||
========
|
||||
|
||||
[![Travis build Status](https://travis-ci.com/servo/rust-url.svg?branch=master)](https://travis-ci.com/servo/rust-url) [![Appveyor build status](https://ci.appveyor.com/api/projects/status/ulkqx2xcemyod6xa?svg=true)](https://ci.appveyor.com/project/Manishearth/rust-url)
|
||||
|
||||
URL library for Rust, based on the [URL Standard](https://url.spec.whatwg.org/).
|
||||
|
||||
[Documentation](https://docs.rs/url/)
|
||||
|
||||
Please see [UPGRADING.md](https://github.com/servo/rust-url/blob/master/UPGRADING.md) if you are upgrading from 0.x to 1.x.
|
|
@ -1,263 +0,0 @@
|
|||
# Guide to upgrading from url 0.x to 1.x
|
||||
|
||||
* The fields of `Url` are now private because the `Url` constructor, parser,
|
||||
and setters maintain invariants that could be violated if you were to set the fields directly.
|
||||
Instead of accessing, for example, `url.scheme`, use the getter method, such as `url.scheme()`.
|
||||
Instead of assigning directly to a field, for example `url.scheme = "https".to_string()`,
|
||||
use the setter method, such as `url.set_scheme("https").unwrap()`.
|
||||
(Some setters validate the new value and return a `Result` that must be used).
|
||||
|
||||
* The methods of `Url` now return `&str` instead of `String`,
|
||||
thus reducing allocations and making serialization cheap.
|
||||
|
||||
* The `path()` method on `url::Url` instances used to return `Option<&[String]>`;
|
||||
now it returns `&str`.
|
||||
If you would like functionality more similar to the old behavior of `path()`,
|
||||
use `path_segments()` that returns `Option<str::Split<char>>`.
|
||||
|
||||
Before upgrading:
|
||||
|
||||
```rust
|
||||
let issue_list_url = Url::parse(
|
||||
"https://github.com/rust-lang/rust/issues?labels=E-easy&state=open"
|
||||
).unwrap();
|
||||
assert_eq!(issue_list_url.path(), Some(&["rust-lang".to_string(),
|
||||
"rust".to_string(),
|
||||
"issues".to_string()][..]));
|
||||
```
|
||||
|
||||
After upgrading:
|
||||
|
||||
```rust
|
||||
let issue_list_url = Url::parse(
|
||||
"https://github.com/rust-lang/rust/issues?labels=E-easy&state=open"
|
||||
).unwrap();
|
||||
assert_eq!(issue_list_url.path(), "/rust-lang/rust/issues");
|
||||
assert_eq!(issue_list_url.path_segments().map(|c| c.collect::<Vec<_>>()),
|
||||
Some(vec!["rust-lang", "rust", "issues"]));
|
||||
```
|
||||
|
||||
* The `path_mut()` method on `url::Url` instances that allowed modification of a URL's path
|
||||
has been replaced by `path_segments_mut()`.
|
||||
|
||||
Before upgrading:
|
||||
|
||||
```rust
|
||||
let mut url = Url::parse("https://github.com/rust-lang/rust").unwrap();
|
||||
url.path_mut().unwrap().push("issues");
|
||||
```
|
||||
|
||||
After upgrading:
|
||||
|
||||
```rust
|
||||
let mut url = Url::parse("https://github.com/rust-lang/rust").unwrap();
|
||||
url.path_segments_mut().unwrap().push("issues");
|
||||
```
|
||||
|
||||
* The `domain_mut()` method on `url::Url` instances that allowed modification of a URL's domain
|
||||
has been replaced by `set_host()` and `set_ip_host()`.
|
||||
|
||||
* The `host()` method on `url::Url` instances used to return `Option<&Host>`;
|
||||
now it returns `Option<Host<&str>>`.
|
||||
The `serialize_host()` method that returned `Option<String>`
|
||||
has been replaced by the `host_str()` method that returns `Option<&str>`.
|
||||
|
||||
* The `serialize()` method on `url::Url` instances that returned `String`
|
||||
has been replaced by an `as_str()` method that returns `&str`.
|
||||
|
||||
Before upgrading:
|
||||
|
||||
```rust
|
||||
let this_document = Url::parse("http://servo.github.io/rust-url/url/index.html").unwrap();
|
||||
assert_eq!(this_document.serialize(), "http://servo.github.io/rust-url/url/index.html".to_string());
|
||||
```
|
||||
|
||||
After upgrading:
|
||||
|
||||
```rust
|
||||
let this_document = Url::parse("http://servo.github.io/rust-url/url/index.html").unwrap();
|
||||
assert_eq!(this_document.as_str(), "http://servo.github.io/rust-url/url/index.html");
|
||||
```
|
||||
|
||||
* `url::UrlParser` has been replaced by `url::Url::parse()` and `url::Url::join()`.
|
||||
|
||||
Before upgrading:
|
||||
|
||||
```rust
|
||||
let this_document = Url::parse("http://servo.github.io/rust-url/url/index.html").unwrap();
|
||||
let css_url = UrlParser::new().base_url(&this_document).parse("../main.css").unwrap();
|
||||
assert_eq!(css_url.serialize(), "http://servo.github.io/rust-url/main.css".to_string());
|
||||
```
|
||||
|
||||
After upgrading:
|
||||
|
||||
```rust
|
||||
let this_document = Url::parse("http://servo.github.io/rust-url/url/index.html").unwrap();
|
||||
let css_url = this_document.join("../main.css").unwrap();
|
||||
assert_eq!(css_url.as_str(), "http://servo.github.io/rust-url/main.css");
|
||||
```
|
||||
|
||||
* `url::parse_path()` and `url::UrlParser::parse_path()` have been removed without replacement.
|
||||
As a workaround, you can give a base URL that you then ignore too `url::Url::parse()`.
|
||||
|
||||
Before upgrading:
|
||||
|
||||
```rust
|
||||
let (path, query, fragment) = url::parse_path("/foo/bar/../baz?q=42").unwrap();
|
||||
assert_eq!(path, vec!["foo".to_string(), "baz".to_string()]);
|
||||
assert_eq!(query, Some("q=42".to_string()));
|
||||
assert_eq!(fragment, None);
|
||||
```
|
||||
|
||||
After upgrading:
|
||||
|
||||
```rust
|
||||
let base = Url::parse("http://example.com").unwrap();
|
||||
let with_path = base.join("/foo/bar/../baz?q=42").unwrap();
|
||||
assert_eq!(with_path.path(), "/foo/baz");
|
||||
assert_eq!(with_path.query(), Some("q=42"));
|
||||
assert_eq!(with_path.fragment(), None);
|
||||
```
|
||||
|
||||
* The `url::form_urlencoded::serialize()` method
|
||||
has been replaced with the `url::form_urlencoded::Serializer` struct.
|
||||
Instead of calling `serialize()` with key/value pairs,
|
||||
create a new `Serializer` with a new string,
|
||||
call the `extend_pairs()` method on the `Serializer` instance with the key/value pairs as the argument,
|
||||
then call `finish()`.
|
||||
|
||||
Before upgrading:
|
||||
|
||||
```rust
|
||||
let form = url::form_urlencoded::serialize(form.iter().map(|(k, v)| {
|
||||
(&k[..], &v[..])
|
||||
}));
|
||||
```
|
||||
|
||||
After upgrading:
|
||||
|
||||
```rust
|
||||
let form = url::form_urlencoded::Serializer::new(String::new()).extend_pairs(
|
||||
form.iter().map(|(k, v)| { (&k[..], &v[..]) })
|
||||
).finish();
|
||||
```
|
||||
|
||||
* The `set_query_from_pairs()` method on `url::Url` instances that took key/value pairs
|
||||
has been replaced with `query_pairs_mut()`, which allows you to modify the `url::Url`'s query pairs.
|
||||
|
||||
Before upgrading:
|
||||
|
||||
```rust
|
||||
let mut url = Url::parse("https://duckduckgo.com/").unwrap();
|
||||
let pairs = vec![
|
||||
("q", "test"),
|
||||
("ia", "images"),
|
||||
];
|
||||
url.set_query_from_pairs(pairs.iter().map(|&(k, v)| {
|
||||
(&k[..], &v[..])
|
||||
}));
|
||||
```
|
||||
|
||||
After upgrading:
|
||||
|
||||
```rust
|
||||
let mut url = Url::parse("https://duckduckgo.com/").unwrap();
|
||||
let pairs = vec![
|
||||
("q", "test"),
|
||||
("ia", "images"),
|
||||
];
|
||||
url.query_pairs_mut().clear().extend_pairs(
|
||||
pairs.iter().map(|&(k, v)| { (&k[..], &v[..]) })
|
||||
);
|
||||
```
|
||||
|
||||
* `url::SchemeData`, its variants `Relative` and `NonRelative`,
|
||||
and the struct `url::RelativeSchemeData` have been removed.
|
||||
Instead of matching on these variants
|
||||
to determine if you have a URL in a relative scheme such as HTTP
|
||||
versus a URL in a non-relative scheme as data,
|
||||
use the `cannot_be_a_base()` method to determine which kind you have.
|
||||
|
||||
Before upgrading:
|
||||
|
||||
```rust
|
||||
match url.scheme_data {
|
||||
url::SchemeData::Relative(..) => {}
|
||||
url::SchemeData::NonRelative(..) => {
|
||||
return Err(human(format!("`{}` must have relative scheme \
|
||||
data: {}", field, url)))
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
After upgrading:
|
||||
|
||||
```rust
|
||||
if url.cannot_be_a_base() {
|
||||
return Err(human(format!("`{}` must have relative scheme \
|
||||
data: {}", field, url)))
|
||||
}
|
||||
```
|
||||
|
||||
* The functions `url::whatwg_scheme_type_mapper()`, the `SchemeType` enum,
|
||||
and the `scheme_type_mapper()` method on `url::UrlParser` instances have been removed.
|
||||
`SchemeType` had a method for getting the `default_port()`;
|
||||
to replicate this functionality, use the method `port_or_known_default()` on `url::Url` instances.
|
||||
The `port_or_default()` method on `url::Url` instances has been removed;
|
||||
use `port_or_known_default()` instead.
|
||||
|
||||
Before upgrading:
|
||||
|
||||
```rust
|
||||
let port = match whatwg_scheme_type_mapper(&url.scheme) {
|
||||
SchemeType::Relative(port) => port,
|
||||
_ => return Err(format!("Invalid special scheme: `{}`",
|
||||
raw_url.scheme)),
|
||||
};
|
||||
```
|
||||
|
||||
After upgrading:
|
||||
|
||||
```rust
|
||||
let port = match url.port_or_known_default() {
|
||||
Some(port) => port,
|
||||
_ => return Err(format!("Invalid special scheme: `{}`",
|
||||
url.scheme())),
|
||||
};
|
||||
```
|
||||
|
||||
* The following formatting utilities have been removed without replacement;
|
||||
look at their linked previous implementations
|
||||
if you would like to replicate the functionality in your code:
|
||||
* [`url::format::PathFormatter`](https://github.com/servo/rust-url/pull/176/commits/9e759f18726c8e1343162922b87163d4dd08fe3c#diff-0bb16ac13b75e9b568fa4aff61b0e71dL24)
|
||||
* [`url::format::UserInfoFormatter`](https://github.com/servo/rust-url/pull/176/commits/9e759f18726c8e1343162922b87163d4dd08fe3c#diff-0bb16ac13b75e9b568fa4aff61b0e71dL50)
|
||||
* [`url::format::UrlNoFragmentFormatter`](https://github.com/servo/rust-url/pull/176/commits/9e759f18726c8e1343162922b87163d4dd08fe3c#diff-0bb16ac13b75e9b568fa4aff61b0e71dL70)
|
||||
|
||||
* `url::percent_encoding::percent_decode()` used to have a return type of `Vec<u8>`;
|
||||
now it returns an iterator of decoded `u8` bytes that also implements `Into<Cow<u8>>`.
|
||||
Use `.into().to_owned()` to obtain a `Vec<u8>`.
|
||||
(`.collect()` also works but might not be as efficient.)
|
||||
|
||||
* The `url::percent_encoding::EncodeSet` struct and constant instances
|
||||
used with `url::percent_encoding::percent_encode()`
|
||||
have been changed to structs that implement the trait `url::percent_encoding::EncodeSet`.
|
||||
* `SIMPLE_ENCODE_SET`, `QUERY_ENCODE_SET`, `DEFAULT_ENCODE_SET`,
|
||||
and `USERINFO_ENCODE_SET` have the same behavior.
|
||||
* `USERNAME_ENCODE_SET` and `PASSWORD_ENCODE_SET` have been removed;
|
||||
use `USERINFO_ENCODE_SET` instead.
|
||||
* `HTTP_VALUE_ENCODE_SET` has been removed;
|
||||
an implementation of it in the new types can be found [in hyper's source](
|
||||
https://github.com/hyperium/hyper/blob/67436c5bf615cf5a55a71e32b788afef5985570e/src/header/parsing.rs#L131-L138)
|
||||
if you need to replicate this functionality in your code.
|
||||
* `FORM_URLENCODED_ENCODE_SET` has been removed;
|
||||
instead, use the functionality in `url::form_urlencoded`.
|
||||
* `PATH_SEGMENT_ENCODE_SET` has been added for use on '/'-separated path segments.
|
||||
|
||||
* `url::percent_encoding::percent_decode_to()` has been removed.
|
||||
Use `url::percent_encoding::percent_decode()` which returns an iterator.
|
||||
You can then use the iterator’s `collect()` method
|
||||
or give it to some data structure’s `extend()` method.
|
||||
* A number of `ParseError` variants have changed.
|
||||
[See the documentation for the current set](http://servo.github.io/rust-url/url/enum.ParseError.html).
|
||||
* `url::OpaqueOrigin::new()` and `url::Origin::UID(OpaqueOrigin)`
|
||||
have been replaced by `url::Origin::new_opaque()` and `url::Origin::Opaque(OpaqueOrigin)`, respectively.
|
|
@ -1,13 +0,0 @@
|
|||
install:
|
||||
- ps: Start-FileDownload 'https://static.rust-lang.org/dist/rust-nightly-i686-pc-windows-gnu.exe'
|
||||
- rust-nightly-i686-pc-windows-gnu.exe /VERYSILENT /NORESTART /DIR="C:\Program Files (x86)\Rust"
|
||||
- SET PATH=%PATH%;C:\Program Files (x86)\Rust\bin
|
||||
- rustc -V
|
||||
- cargo -V
|
||||
- git submodule update --init --recursive
|
||||
|
||||
build: false
|
||||
|
||||
test_script:
|
||||
- cargo build
|
||||
- cargo test --verbose
|
|
@ -1,18 +0,0 @@
|
|||
#[macro_use]
|
||||
extern crate bencher;
|
||||
|
||||
extern crate url;
|
||||
|
||||
use bencher::{black_box, Bencher};
|
||||
|
||||
use url::Url;
|
||||
|
||||
fn short(bench: &mut Bencher) {
|
||||
let url = "https://example.com/bench";
|
||||
|
||||
bench.bytes = url.len() as u64;
|
||||
bench.iter(|| black_box(url).parse::<Url>().unwrap());
|
||||
}
|
||||
|
||||
benchmark_group!(benches, short);
|
||||
benchmark_main!(benches);
|
|
@ -1,347 +0,0 @@
|
|||
// Copyright 2013-2016 The rust-url developers.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
||||
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
//! Parser and serializer for the [`application/x-www-form-urlencoded` syntax](
|
||||
//! http://url.spec.whatwg.org/#application/x-www-form-urlencoded),
|
||||
//! as used by HTML forms.
|
||||
//!
|
||||
//! Converts between a string (such as an URL’s query string)
|
||||
//! and a sequence of (name, value) pairs.
|
||||
|
||||
use percent_encoding::{percent_decode, percent_encode_byte};
|
||||
use query_encoding::{self, decode_utf8_lossy, EncodingOverride};
|
||||
use std::borrow::{Borrow, Cow};
|
||||
use std::str;
|
||||
|
||||
/// Convert a byte string in the `application/x-www-form-urlencoded` syntax
|
||||
/// into a iterator of (name, value) pairs.
|
||||
///
|
||||
/// Use `parse(input.as_bytes())` to parse a `&str` string.
|
||||
///
|
||||
/// The names and values are percent-decoded. For instance, `%23first=%25try%25` will be
|
||||
/// converted to `[("#first", "%try%")]`.
|
||||
#[inline]
|
||||
pub fn parse(input: &[u8]) -> Parse {
|
||||
Parse { input }
|
||||
}
|
||||
/// The return type of `parse()`.
|
||||
#[derive(Copy, Clone)]
|
||||
pub struct Parse<'a> {
|
||||
input: &'a [u8],
|
||||
}
|
||||
|
||||
impl<'a> Iterator for Parse<'a> {
|
||||
type Item = (Cow<'a, str>, Cow<'a, str>);
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
loop {
|
||||
if self.input.is_empty() {
|
||||
return None;
|
||||
}
|
||||
let mut split2 = self.input.splitn(2, |&b| b == b'&');
|
||||
let sequence = split2.next().unwrap();
|
||||
self.input = split2.next().unwrap_or(&[][..]);
|
||||
if sequence.is_empty() {
|
||||
continue;
|
||||
}
|
||||
let mut split2 = sequence.splitn(2, |&b| b == b'=');
|
||||
let name = split2.next().unwrap();
|
||||
let value = split2.next().unwrap_or(&[][..]);
|
||||
return Some((decode(name), decode(value)));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn decode(input: &[u8]) -> Cow<str> {
|
||||
let replaced = replace_plus(input);
|
||||
decode_utf8_lossy(match percent_decode(&replaced).into() {
|
||||
Cow::Owned(vec) => Cow::Owned(vec),
|
||||
Cow::Borrowed(_) => replaced,
|
||||
})
|
||||
}
|
||||
|
||||
/// Replace b'+' with b' '
|
||||
fn replace_plus(input: &[u8]) -> Cow<[u8]> {
|
||||
match input.iter().position(|&b| b == b'+') {
|
||||
None => Cow::Borrowed(input),
|
||||
Some(first_position) => {
|
||||
let mut replaced = input.to_owned();
|
||||
replaced[first_position] = b' ';
|
||||
for byte in &mut replaced[first_position + 1..] {
|
||||
if *byte == b'+' {
|
||||
*byte = b' ';
|
||||
}
|
||||
}
|
||||
Cow::Owned(replaced)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Parse<'a> {
|
||||
/// Return a new iterator that yields pairs of `String` instead of pairs of `Cow<str>`.
|
||||
pub fn into_owned(self) -> ParseIntoOwned<'a> {
|
||||
ParseIntoOwned { inner: self }
|
||||
}
|
||||
}
|
||||
|
||||
/// Like `Parse`, but yields pairs of `String` instead of pairs of `Cow<str>`.
|
||||
pub struct ParseIntoOwned<'a> {
|
||||
inner: Parse<'a>,
|
||||
}
|
||||
|
||||
impl<'a> Iterator for ParseIntoOwned<'a> {
|
||||
type Item = (String, String);
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
self.inner
|
||||
.next()
|
||||
.map(|(k, v)| (k.into_owned(), v.into_owned()))
|
||||
}
|
||||
}
|
||||
|
||||
/// The [`application/x-www-form-urlencoded` byte serializer](
|
||||
/// https://url.spec.whatwg.org/#concept-urlencoded-byte-serializer).
|
||||
///
|
||||
/// Return an iterator of `&str` slices.
|
||||
pub fn byte_serialize(input: &[u8]) -> ByteSerialize {
|
||||
ByteSerialize { bytes: input }
|
||||
}
|
||||
|
||||
/// Return value of `byte_serialize()`.
|
||||
#[derive(Debug)]
|
||||
pub struct ByteSerialize<'a> {
|
||||
bytes: &'a [u8],
|
||||
}
|
||||
|
||||
fn byte_serialized_unchanged(byte: u8) -> bool {
|
||||
matches!(byte, b'*' | b'-' | b'.' | b'0' ..= b'9' | b'A' ..= b'Z' | b'_' | b'a' ..= b'z')
|
||||
}
|
||||
|
||||
impl<'a> Iterator for ByteSerialize<'a> {
|
||||
type Item = &'a str;
|
||||
|
||||
fn next(&mut self) -> Option<&'a str> {
|
||||
if let Some((&first, tail)) = self.bytes.split_first() {
|
||||
if !byte_serialized_unchanged(first) {
|
||||
self.bytes = tail;
|
||||
return Some(if first == b' ' {
|
||||
"+"
|
||||
} else {
|
||||
percent_encode_byte(first)
|
||||
});
|
||||
}
|
||||
let position = tail.iter().position(|&b| !byte_serialized_unchanged(b));
|
||||
let (unchanged_slice, remaining) = match position {
|
||||
// 1 for first_byte + i unchanged in tail
|
||||
Some(i) => self.bytes.split_at(1 + i),
|
||||
None => (self.bytes, &[][..]),
|
||||
};
|
||||
self.bytes = remaining;
|
||||
Some(unsafe { str::from_utf8_unchecked(unchanged_slice) })
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
fn size_hint(&self) -> (usize, Option<usize>) {
|
||||
if self.bytes.is_empty() {
|
||||
(0, Some(0))
|
||||
} else {
|
||||
(1, Some(self.bytes.len()))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// The [`application/x-www-form-urlencoded` serializer](
|
||||
/// https://url.spec.whatwg.org/#concept-urlencoded-serializer).
|
||||
pub struct Serializer<'a, T: Target> {
|
||||
target: Option<T>,
|
||||
start_position: usize,
|
||||
encoding: EncodingOverride<'a>,
|
||||
}
|
||||
|
||||
pub trait Target {
|
||||
fn as_mut_string(&mut self) -> &mut String;
|
||||
fn finish(self) -> Self::Finished;
|
||||
type Finished;
|
||||
}
|
||||
|
||||
impl Target for String {
|
||||
fn as_mut_string(&mut self) -> &mut String {
|
||||
self
|
||||
}
|
||||
fn finish(self) -> Self {
|
||||
self
|
||||
}
|
||||
type Finished = Self;
|
||||
}
|
||||
|
||||
impl<'a> Target for &'a mut String {
|
||||
fn as_mut_string(&mut self) -> &mut String {
|
||||
&mut **self
|
||||
}
|
||||
fn finish(self) -> Self {
|
||||
self
|
||||
}
|
||||
type Finished = Self;
|
||||
}
|
||||
|
||||
// `as_mut_string` string here exposes the internal serialization of an `Url`,
|
||||
// which should not be exposed to users.
|
||||
// We achieve that by not giving users direct access to `UrlQuery`:
|
||||
// * Its fields are private
|
||||
// (and so can not be constructed with struct literal syntax outside of this crate),
|
||||
// * It has no constructor
|
||||
// * It is only visible (on the type level) to users in the return type of
|
||||
// `Url::query_pairs_mut` which is `Serializer<UrlQuery>`
|
||||
// * `Serializer` keeps its target in a private field
|
||||
// * Unlike in other `Target` impls, `UrlQuery::finished` does not return `Self`.
|
||||
impl<'a> Target for ::UrlQuery<'a> {
|
||||
fn as_mut_string(&mut self) -> &mut String {
|
||||
&mut self.url.as_mut().unwrap().serialization
|
||||
}
|
||||
|
||||
fn finish(mut self) -> &'a mut ::Url {
|
||||
let url = self.url.take().unwrap();
|
||||
url.restore_already_parsed_fragment(self.fragment.take());
|
||||
url
|
||||
}
|
||||
|
||||
type Finished = &'a mut ::Url;
|
||||
}
|
||||
|
||||
impl<'a, T: Target> Serializer<'a, T> {
|
||||
/// Create a new `application/x-www-form-urlencoded` serializer for the given target.
|
||||
///
|
||||
/// If the target is non-empty,
|
||||
/// its content is assumed to already be in `application/x-www-form-urlencoded` syntax.
|
||||
pub fn new(target: T) -> Self {
|
||||
Self::for_suffix(target, 0)
|
||||
}
|
||||
|
||||
/// Create a new `application/x-www-form-urlencoded` serializer
|
||||
/// for a suffix of the given target.
|
||||
///
|
||||
/// If that suffix is non-empty,
|
||||
/// its content is assumed to already be in `application/x-www-form-urlencoded` syntax.
|
||||
pub fn for_suffix(mut target: T, start_position: usize) -> Self {
|
||||
&target.as_mut_string()[start_position..]; // Panic if out of bounds
|
||||
Serializer {
|
||||
target: Some(target),
|
||||
start_position,
|
||||
encoding: None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Remove any existing name/value pair.
|
||||
///
|
||||
/// Panics if called after `.finish()`.
|
||||
pub fn clear(&mut self) -> &mut Self {
|
||||
string(&mut self.target).truncate(self.start_position);
|
||||
self
|
||||
}
|
||||
|
||||
/// Set the character encoding to be used for names and values before percent-encoding.
|
||||
pub fn encoding_override(&mut self, new: EncodingOverride<'a>) -> &mut Self {
|
||||
self.encoding = new;
|
||||
self
|
||||
}
|
||||
|
||||
/// Serialize and append a name/value pair.
|
||||
///
|
||||
/// Panics if called after `.finish()`.
|
||||
pub fn append_pair(&mut self, name: &str, value: &str) -> &mut Self {
|
||||
append_pair(
|
||||
string(&mut self.target),
|
||||
self.start_position,
|
||||
self.encoding,
|
||||
name,
|
||||
value,
|
||||
);
|
||||
self
|
||||
}
|
||||
|
||||
/// Serialize and append a number of name/value pairs.
|
||||
///
|
||||
/// This simply calls `append_pair` repeatedly.
|
||||
/// This can be more convenient, so the user doesn’t need to introduce a block
|
||||
/// to limit the scope of `Serializer`’s borrow of its string.
|
||||
///
|
||||
/// Panics if called after `.finish()`.
|
||||
pub fn extend_pairs<I, K, V>(&mut self, iter: I) -> &mut Self
|
||||
where
|
||||
I: IntoIterator,
|
||||
I::Item: Borrow<(K, V)>,
|
||||
K: AsRef<str>,
|
||||
V: AsRef<str>,
|
||||
{
|
||||
{
|
||||
let string = string(&mut self.target);
|
||||
for pair in iter {
|
||||
let &(ref k, ref v) = pair.borrow();
|
||||
append_pair(
|
||||
string,
|
||||
self.start_position,
|
||||
self.encoding,
|
||||
k.as_ref(),
|
||||
v.as_ref(),
|
||||
);
|
||||
}
|
||||
}
|
||||
self
|
||||
}
|
||||
|
||||
/// If this serializer was constructed with a string, take and return that string.
|
||||
///
|
||||
/// ```rust
|
||||
/// use url::form_urlencoded;
|
||||
/// let encoded: String = form_urlencoded::Serializer::new(String::new())
|
||||
/// .append_pair("foo", "bar & baz")
|
||||
/// .append_pair("saison", "Été+hiver")
|
||||
/// .finish();
|
||||
/// assert_eq!(encoded, "foo=bar+%26+baz&saison=%C3%89t%C3%A9%2Bhiver");
|
||||
/// ```
|
||||
///
|
||||
/// Panics if called more than once.
|
||||
pub fn finish(&mut self) -> T::Finished {
|
||||
self.target
|
||||
.take()
|
||||
.expect("url::form_urlencoded::Serializer double finish")
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
|
||||
fn append_separator_if_needed(string: &mut String, start_position: usize) {
|
||||
if string.len() > start_position {
|
||||
string.push('&')
|
||||
}
|
||||
}
|
||||
|
||||
fn string<T: Target>(target: &mut Option<T>) -> &mut String {
|
||||
target
|
||||
.as_mut()
|
||||
.expect("url::form_urlencoded::Serializer finished")
|
||||
.as_mut_string()
|
||||
}
|
||||
|
||||
fn append_pair(
|
||||
string: &mut String,
|
||||
start_position: usize,
|
||||
encoding: EncodingOverride,
|
||||
name: &str,
|
||||
value: &str,
|
||||
) {
|
||||
append_separator_if_needed(string, start_position);
|
||||
append_encoded(name, string, encoding);
|
||||
string.push('=');
|
||||
append_encoded(value, string, encoding);
|
||||
}
|
||||
|
||||
fn append_encoded(s: &str, string: &mut String, encoding: EncodingOverride) {
|
||||
string.extend(byte_serialize(&query_encoding::encode(encoding, s.into())))
|
||||
}
|
|
@ -6,15 +6,16 @@
|
|||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
use idna;
|
||||
use parser::{ParseError, ParseResult};
|
||||
use percent_encoding::{percent_decode, utf8_percent_encode, CONTROLS};
|
||||
#[cfg(feature = "serde")]
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::cmp;
|
||||
use std::fmt::{self, Formatter};
|
||||
use std::net::{Ipv4Addr, Ipv6Addr};
|
||||
|
||||
use percent_encoding::{percent_decode, utf8_percent_encode, CONTROLS};
|
||||
#[cfg(feature = "serde")]
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::parser::{ParseError, ParseResult};
|
||||
|
||||
#[cfg_attr(feature = "serde", derive(Deserialize, Serialize))]
|
||||
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
|
||||
pub(crate) enum HostInternal {
|
||||
|
@ -24,9 +25,10 @@ pub(crate) enum HostInternal {
|
|||
Ipv6(Ipv6Addr),
|
||||
}
|
||||
|
||||
impl<S> From<Host<S>> for HostInternal {
|
||||
fn from(host: Host<S>) -> HostInternal {
|
||||
impl From<Host<String>> for HostInternal {
|
||||
fn from(host: Host<String>) -> HostInternal {
|
||||
match host {
|
||||
Host::Domain(ref s) if s.is_empty() => HostInternal::None,
|
||||
Host::Domain(_) => HostInternal::Domain,
|
||||
Host::Ipv4(address) => HostInternal::Ipv4(address),
|
||||
Host::Ipv6(address) => HostInternal::Ipv6(address),
|
||||
|
@ -36,7 +38,7 @@ impl<S> From<Host<S>> for HostInternal {
|
|||
|
||||
/// The host name of an URL.
|
||||
#[cfg_attr(feature = "serde", derive(Deserialize, Serialize))]
|
||||
#[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash)]
|
||||
#[derive(Clone, Debug, Eq, Ord, PartialOrd, Hash)]
|
||||
pub enum Host<S = String> {
|
||||
/// A DNS domain name, as '.' dot-separated labels.
|
||||
/// Non-ASCII labels are encoded in punycode per IDNA if this is the host of
|
||||
|
@ -80,34 +82,42 @@ impl Host<String> {
|
|||
return parse_ipv6addr(&input[1..input.len() - 1]).map(Host::Ipv6);
|
||||
}
|
||||
let domain = percent_decode(input.as_bytes()).decode_utf8_lossy();
|
||||
let domain = idna::domain_to_ascii(&domain)?;
|
||||
if domain
|
||||
.find(|c| {
|
||||
matches!(
|
||||
c,
|
||||
'\0' | '\t'
|
||||
| '\n'
|
||||
| '\r'
|
||||
| ' '
|
||||
| '#'
|
||||
| '%'
|
||||
| '/'
|
||||
| ':'
|
||||
| '?'
|
||||
| '@'
|
||||
| '['
|
||||
| '\\'
|
||||
| ']'
|
||||
)
|
||||
})
|
||||
.is_some()
|
||||
{
|
||||
return Err(ParseError::InvalidDomainCharacter);
|
||||
|
||||
let domain = Self::domain_to_ascii(&domain)?;
|
||||
|
||||
if domain.is_empty() {
|
||||
return Err(ParseError::EmptyHost);
|
||||
}
|
||||
if let Some(address) = parse_ipv4addr(&domain)? {
|
||||
|
||||
let is_invalid_domain_char = |c| {
|
||||
matches!(
|
||||
c,
|
||||
'\0'..='\u{001F}'
|
||||
| ' '
|
||||
| '#'
|
||||
| '%'
|
||||
| '/'
|
||||
| ':'
|
||||
| '<'
|
||||
| '>'
|
||||
| '?'
|
||||
| '@'
|
||||
| '['
|
||||
| '\\'
|
||||
| ']'
|
||||
| '^'
|
||||
| '\u{007F}'
|
||||
| '|'
|
||||
)
|
||||
};
|
||||
|
||||
if domain.find(is_invalid_domain_char).is_some() {
|
||||
Err(ParseError::InvalidDomainCharacter)
|
||||
} else if ends_in_a_number(&domain) {
|
||||
let address = parse_ipv4addr(&domain)?;
|
||||
Ok(Host::Ipv4(address))
|
||||
} else {
|
||||
Ok(Host::Domain(domain.into()))
|
||||
Ok(Host::Domain(domain))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -119,35 +129,46 @@ impl Host<String> {
|
|||
}
|
||||
return parse_ipv6addr(&input[1..input.len() - 1]).map(Host::Ipv6);
|
||||
}
|
||||
if input
|
||||
.find(|c| {
|
||||
matches!(
|
||||
c,
|
||||
'\0' | '\t'
|
||||
| '\n'
|
||||
| '\r'
|
||||
| ' '
|
||||
| '#'
|
||||
| '/'
|
||||
| ':'
|
||||
| '?'
|
||||
| '@'
|
||||
| '['
|
||||
| '\\'
|
||||
| ']'
|
||||
)
|
||||
})
|
||||
.is_some()
|
||||
{
|
||||
return Err(ParseError::InvalidDomainCharacter);
|
||||
|
||||
let is_invalid_host_char = |c| {
|
||||
matches!(
|
||||
c,
|
||||
'\0' | '\t'
|
||||
| '\n'
|
||||
| '\r'
|
||||
| ' '
|
||||
| '#'
|
||||
| '/'
|
||||
| ':'
|
||||
| '<'
|
||||
| '>'
|
||||
| '?'
|
||||
| '@'
|
||||
| '['
|
||||
| '\\'
|
||||
| ']'
|
||||
| '^'
|
||||
| '|'
|
||||
)
|
||||
};
|
||||
|
||||
if input.find(is_invalid_host_char).is_some() {
|
||||
Err(ParseError::InvalidDomainCharacter)
|
||||
} else {
|
||||
Ok(Host::Domain(
|
||||
utf8_percent_encode(input, CONTROLS).to_string(),
|
||||
))
|
||||
}
|
||||
let s = utf8_percent_encode(input, CONTROLS).to_string();
|
||||
Ok(Host::Domain(s))
|
||||
}
|
||||
|
||||
/// convert domain with idna
|
||||
fn domain_to_ascii(domain: &str) -> Result<String, ParseError> {
|
||||
idna::domain_to_ascii(domain).map_err(Into::into)
|
||||
}
|
||||
}
|
||||
|
||||
impl<S: AsRef<str>> fmt::Display for Host<S> {
|
||||
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
|
||||
match *self {
|
||||
Host::Domain(ref domain) => domain.as_ref().fmt(f),
|
||||
Host::Ipv4(ref addr) => addr.fmt(f),
|
||||
|
@ -160,7 +181,21 @@ impl<S: AsRef<str>> fmt::Display for Host<S> {
|
|||
}
|
||||
}
|
||||
|
||||
fn write_ipv6(addr: &Ipv6Addr, f: &mut Formatter) -> fmt::Result {
|
||||
impl<S, T> PartialEq<Host<T>> for Host<S>
|
||||
where
|
||||
S: PartialEq<T>,
|
||||
{
|
||||
fn eq(&self, other: &Host<T>) -> bool {
|
||||
match (self, other) {
|
||||
(Host::Domain(a), Host::Domain(b)) => a == b,
|
||||
(Host::Ipv4(a), Host::Ipv4(b)) => a == b,
|
||||
(Host::Ipv6(a), Host::Ipv6(b)) => a == b,
|
||||
(_, _) => false,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn write_ipv6(addr: &Ipv6Addr, f: &mut Formatter<'_>) -> fmt::Result {
|
||||
let segments = addr.segments();
|
||||
let (compress_start, compress_end) = longest_zero_sequence(&segments);
|
||||
let mut i = 0;
|
||||
|
@ -221,8 +256,33 @@ fn longest_zero_sequence(pieces: &[u16; 8]) -> (isize, isize) {
|
|||
}
|
||||
}
|
||||
|
||||
/// <https://url.spec.whatwg.org/#ends-in-a-number-checker>
|
||||
fn ends_in_a_number(input: &str) -> bool {
|
||||
let mut parts = input.rsplit('.');
|
||||
let last = parts.next().unwrap();
|
||||
let last = if last.is_empty() {
|
||||
if let Some(last) = parts.next() {
|
||||
last
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
} else {
|
||||
last
|
||||
};
|
||||
if !last.is_empty() && last.as_bytes().iter().all(|c| c.is_ascii_digit()) {
|
||||
return true;
|
||||
}
|
||||
|
||||
parse_ipv4number(last).is_ok()
|
||||
}
|
||||
|
||||
/// <https://url.spec.whatwg.org/#ipv4-number-parser>
|
||||
/// Ok(None) means the input is a valid number, but it overflows a `u32`.
|
||||
fn parse_ipv4number(mut input: &str) -> Result<Option<u32>, ()> {
|
||||
if input.is_empty() {
|
||||
return Err(());
|
||||
}
|
||||
|
||||
let mut r = 10;
|
||||
if input.starts_with("0x") || input.starts_with("0X") {
|
||||
input = &input[2..];
|
||||
|
@ -232,62 +292,44 @@ fn parse_ipv4number(mut input: &str) -> Result<Option<u32>, ()> {
|
|||
r = 8;
|
||||
}
|
||||
|
||||
// At the moment we can't know the reason why from_str_radix fails
|
||||
// https://github.com/rust-lang/rust/issues/22639
|
||||
// So instead we check if the input looks like a real number and only return
|
||||
// an error when it's an overflow.
|
||||
let valid_number = match r {
|
||||
8 => input.chars().all(|c| c >= '0' && c <= '7'),
|
||||
10 => input.chars().all(|c| c >= '0' && c <= '9'),
|
||||
16 => input
|
||||
.chars()
|
||||
.all(|c| (c >= '0' && c <= '9') || (c >= 'a' && c <= 'f') || (c >= 'A' && c <= 'F')),
|
||||
_ => false,
|
||||
};
|
||||
|
||||
if !valid_number {
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
if input.is_empty() {
|
||||
return Ok(Some(0));
|
||||
}
|
||||
if input.starts_with('+') {
|
||||
return Ok(None);
|
||||
|
||||
let valid_number = match r {
|
||||
8 => input.as_bytes().iter().all(|c| (b'0'..=b'7').contains(c)),
|
||||
10 => input.as_bytes().iter().all(|c| c.is_ascii_digit()),
|
||||
16 => input.as_bytes().iter().all(|c| c.is_ascii_hexdigit()),
|
||||
_ => false,
|
||||
};
|
||||
if !valid_number {
|
||||
return Err(());
|
||||
}
|
||||
|
||||
match u32::from_str_radix(input, r) {
|
||||
Ok(number) => Ok(Some(number)),
|
||||
Err(_) => Err(()),
|
||||
Ok(num) => Ok(Some(num)),
|
||||
Err(_) => Ok(None), // The only possible error kind here is an integer overflow.
|
||||
// The validity of the chars in the input is checked above.
|
||||
}
|
||||
}
|
||||
|
||||
/// <https://url.spec.whatwg.org/#concept-ipv4-parser>
|
||||
fn parse_ipv4addr(input: &str) -> ParseResult<Option<Ipv4Addr>> {
|
||||
if input.is_empty() {
|
||||
return Ok(None);
|
||||
}
|
||||
fn parse_ipv4addr(input: &str) -> ParseResult<Ipv4Addr> {
|
||||
let mut parts: Vec<&str> = input.split('.').collect();
|
||||
if parts.last() == Some(&"") {
|
||||
parts.pop();
|
||||
}
|
||||
if parts.len() > 4 {
|
||||
return Ok(None);
|
||||
return Err(ParseError::InvalidIpv4Address);
|
||||
}
|
||||
let mut numbers: Vec<u32> = Vec::new();
|
||||
let mut overflow = false;
|
||||
for part in parts {
|
||||
if part == "" {
|
||||
return Ok(None);
|
||||
}
|
||||
match parse_ipv4number(part) {
|
||||
Ok(Some(n)) => numbers.push(n),
|
||||
Ok(None) => return Ok(None),
|
||||
Err(()) => overflow = true,
|
||||
Ok(None) => return Err(ParseError::InvalidIpv4Address), // u32 overflow
|
||||
Err(()) => return Err(ParseError::InvalidIpv4Address),
|
||||
};
|
||||
}
|
||||
if overflow {
|
||||
return Err(ParseError::InvalidIpv4Address);
|
||||
}
|
||||
let mut ipv4 = numbers.pop().expect("a non-empty list of numbers");
|
||||
// Equivalent to: ipv4 >= 256 ** (4 − numbers.len())
|
||||
if ipv4 > u32::max_value() >> (8 * numbers.len() as u32) {
|
||||
|
@ -299,7 +341,7 @@ fn parse_ipv4addr(input: &str) -> ParseResult<Option<Ipv4Addr>> {
|
|||
for (counter, n) in numbers.iter().enumerate() {
|
||||
ipv4 += n << (8 * (3 - counter as u32))
|
||||
}
|
||||
Ok(Some(Ipv4Addr::from(ipv4)))
|
||||
Ok(Ipv4Addr::from(ipv4))
|
||||
}
|
||||
|
||||
/// <https://url.spec.whatwg.org/#concept-ipv6-parser>
|
||||
|
|
Разница между файлами не показана из-за своего большого размера
Загрузить разницу
|
@ -6,11 +6,10 @@
|
|||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
use host::Host;
|
||||
use idna::domain_to_unicode;
|
||||
use parser::default_port;
|
||||
use crate::host::Host;
|
||||
use crate::parser::default_port;
|
||||
use crate::Url;
|
||||
use std::sync::atomic::{AtomicUsize, Ordering};
|
||||
use Url;
|
||||
|
||||
pub fn url_origin(url: &Url) -> Origin {
|
||||
let scheme = url.scheme();
|
||||
|
@ -22,7 +21,7 @@ pub fn url_origin(url: &Url) -> Origin {
|
|||
Err(_) => Origin::new_opaque(),
|
||||
}
|
||||
}
|
||||
"ftp" | "gopher" | "http" | "https" | "ws" | "wss" => Origin::Tuple(
|
||||
"ftp" | "http" | "https" | "ws" | "wss" => Origin::Tuple(
|
||||
scheme.to_owned(),
|
||||
url.host().unwrap().to_owned(),
|
||||
url.port_or_known_default().unwrap(),
|
||||
|
@ -44,7 +43,7 @@ pub fn url_origin(url: &Url) -> Origin {
|
|||
/// - If the scheme is "blob" the origin is the origin of the
|
||||
/// URL contained in the path component. If parsing fails,
|
||||
/// it is an opaque origin.
|
||||
/// - If the scheme is "ftp", "gopher", "http", "https", "ws", or "wss",
|
||||
/// - If the scheme is "ftp", "http", "https", "ws", or "wss",
|
||||
/// then the origin is a tuple of the scheme, host, and port.
|
||||
/// - If the scheme is anything else, the origin is opaque, meaning
|
||||
/// the URL does not have the same origin as any other URL.
|
||||
|
@ -93,7 +92,7 @@ impl Origin {
|
|||
Origin::Tuple(ref scheme, ref host, port) => {
|
||||
let host = match *host {
|
||||
Host::Domain(ref domain) => {
|
||||
let (domain, _errors) = domain_to_unicode(domain);
|
||||
let (domain, _errors) = idna::domain_to_unicode(domain);
|
||||
Host::Domain(domain)
|
||||
}
|
||||
_ => host.clone(),
|
||||
|
|
Разница между файлами не показана из-за своего большого размера
Загрузить разницу
|
@ -6,9 +6,9 @@
|
|||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
use parser::{self, to_u32, SchemeType};
|
||||
use crate::parser::{self, to_u32, SchemeType};
|
||||
use crate::Url;
|
||||
use std::str;
|
||||
use Url;
|
||||
|
||||
/// Exposes methods to manipulate the path of an URL that is not cannot-be-base.
|
||||
///
|
||||
|
@ -21,7 +21,7 @@ use Url;
|
|||
/// use url::Url;
|
||||
/// # use std::error::Error;
|
||||
///
|
||||
/// # fn run() -> Result<(), Box<Error>> {
|
||||
/// # fn run() -> Result<(), Box<dyn Error>> {
|
||||
/// let mut url = Url::parse("mailto:me@example.com")?;
|
||||
/// assert!(url.path_segments_mut().is_err());
|
||||
///
|
||||
|
@ -42,10 +42,18 @@ pub struct PathSegmentsMut<'a> {
|
|||
}
|
||||
|
||||
// Not re-exported outside the crate
|
||||
pub fn new(url: &mut Url) -> PathSegmentsMut {
|
||||
pub fn new(url: &mut Url) -> PathSegmentsMut<'_> {
|
||||
let after_path = url.take_after_path();
|
||||
let old_after_path_position = to_u32(url.serialization.len()).unwrap();
|
||||
debug_assert!(url.byte_at(url.path_start) == b'/');
|
||||
// Special urls always have a non empty path
|
||||
if SchemeType::from(url.scheme()).is_special() {
|
||||
debug_assert!(url.byte_at(url.path_start) == b'/');
|
||||
} else {
|
||||
debug_assert!(
|
||||
url.serialization.len() == url.path_start as usize
|
||||
|| url.byte_at(url.path_start) == b'/'
|
||||
);
|
||||
}
|
||||
PathSegmentsMut {
|
||||
after_first_slash: url.path_start as usize + "/".len(),
|
||||
url,
|
||||
|
@ -72,7 +80,7 @@ impl<'a> PathSegmentsMut<'a> {
|
|||
/// use url::Url;
|
||||
/// # use std::error::Error;
|
||||
///
|
||||
/// # fn run() -> Result<(), Box<Error>> {
|
||||
/// # fn run() -> Result<(), Box<dyn Error>> {
|
||||
/// let mut url = Url::parse("https://github.com/servo/rust-url/")?;
|
||||
/// url.path_segments_mut().map_err(|_| "cannot be base")?
|
||||
/// .clear().push("logout");
|
||||
|
@ -100,7 +108,7 @@ impl<'a> PathSegmentsMut<'a> {
|
|||
/// use url::Url;
|
||||
/// # use std::error::Error;
|
||||
///
|
||||
/// # fn run() -> Result<(), Box<Error>> {
|
||||
/// # fn run() -> Result<(), Box<dyn Error>> {
|
||||
/// let mut url = Url::parse("https://github.com/servo/rust-url/")?;
|
||||
/// url.path_segments_mut().map_err(|_| "cannot be base")?
|
||||
/// .push("pulls");
|
||||
|
@ -115,6 +123,9 @@ impl<'a> PathSegmentsMut<'a> {
|
|||
/// # run().unwrap();
|
||||
/// ```
|
||||
pub fn pop_if_empty(&mut self) -> &mut Self {
|
||||
if self.after_first_slash >= self.url.serialization.len() {
|
||||
return self;
|
||||
}
|
||||
if self.url.serialization[self.after_first_slash..].ends_with('/') {
|
||||
self.url.serialization.pop();
|
||||
}
|
||||
|
@ -127,6 +138,9 @@ impl<'a> PathSegmentsMut<'a> {
|
|||
///
|
||||
/// Returns `&mut Self` so that method calls can be chained.
|
||||
pub fn pop(&mut self) -> &mut Self {
|
||||
if self.after_first_slash >= self.url.serialization.len() {
|
||||
return self;
|
||||
}
|
||||
let last_slash = self.url.serialization[self.after_first_slash..]
|
||||
.rfind('/')
|
||||
.unwrap_or(0);
|
||||
|
@ -169,7 +183,7 @@ impl<'a> PathSegmentsMut<'a> {
|
|||
/// use url::Url;
|
||||
/// # use std::error::Error;
|
||||
///
|
||||
/// # fn run() -> Result<(), Box<Error>> {
|
||||
/// # fn run() -> Result<(), Box<dyn Error>> {
|
||||
/// let mut url = Url::parse("https://github.com/")?;
|
||||
/// let org = "servo";
|
||||
/// let repo = "rust-url";
|
||||
|
@ -189,7 +203,7 @@ impl<'a> PathSegmentsMut<'a> {
|
|||
/// use url::Url;
|
||||
/// # use std::error::Error;
|
||||
///
|
||||
/// # fn run() -> Result<(), Box<Error>> {
|
||||
/// # fn run() -> Result<(), Box<dyn Error>> {
|
||||
/// let mut url = Url::parse("https://github.com/servo")?;
|
||||
/// url.path_segments_mut().map_err(|_| "cannot be base")?
|
||||
/// .extend(&["..", "rust-url", ".", "pulls"]);
|
||||
|
@ -212,7 +226,10 @@ impl<'a> PathSegmentsMut<'a> {
|
|||
if matches!(segment, "." | "..") {
|
||||
continue;
|
||||
}
|
||||
if parser.serialization.len() > path_start + 1 {
|
||||
if parser.serialization.len() > path_start + 1
|
||||
// Non special url's path might still be empty
|
||||
|| parser.serialization.len() == path_start
|
||||
{
|
||||
parser.serialization.push('/');
|
||||
}
|
||||
let mut has_host = true; // FIXME account for this?
|
||||
|
@ -220,7 +237,7 @@ impl<'a> PathSegmentsMut<'a> {
|
|||
scheme_type,
|
||||
&mut has_host,
|
||||
path_start,
|
||||
parser::Input::new(segment),
|
||||
parser::Input::new_no_trim(segment),
|
||||
);
|
||||
}
|
||||
});
|
||||
|
|
|
@ -1,35 +0,0 @@
|
|||
// Copyright 2019 The rust-url developers.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
||||
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
use std::borrow::Cow;
|
||||
|
||||
pub type EncodingOverride<'a> = Option<&'a dyn Fn(&str) -> Cow<[u8]>>;
|
||||
|
||||
pub(crate) fn encode<'a>(encoding_override: EncodingOverride, input: &'a str) -> Cow<'a, [u8]> {
|
||||
if let Some(o) = encoding_override {
|
||||
return o(input);
|
||||
}
|
||||
input.as_bytes().into()
|
||||
}
|
||||
|
||||
pub(crate) fn decode_utf8_lossy(input: Cow<[u8]>) -> Cow<str> {
|
||||
match input {
|
||||
Cow::Borrowed(bytes) => String::from_utf8_lossy(bytes),
|
||||
Cow::Owned(bytes) => {
|
||||
let raw_utf8: *const [u8];
|
||||
match String::from_utf8_lossy(&bytes) {
|
||||
Cow::Borrowed(utf8) => raw_utf8 = utf8.as_bytes(),
|
||||
Cow::Owned(s) => return s.into(),
|
||||
}
|
||||
// from_utf8_lossy returned a borrow of `bytes` unchanged.
|
||||
debug_assert!(raw_utf8 == &*bytes as *const [u8]);
|
||||
// Reuse the existing `Vec` allocation.
|
||||
unsafe { String::from_utf8_unchecked(bytes) }.into()
|
||||
}
|
||||
}
|
||||
}
|
|
@ -11,8 +11,51 @@
|
|||
//! Unless you need to be interoperable with web browsers,
|
||||
//! you probably want to use `Url` method instead.
|
||||
|
||||
use parser::{default_port, Context, Input, Parser, SchemeType};
|
||||
use {idna, Host, ParseError, Position, Url};
|
||||
use crate::parser::{default_port, Context, Input, Parser, SchemeType};
|
||||
use crate::{Host, ParseError, Position, Url};
|
||||
|
||||
/// Internal components / offsets of a URL.
|
||||
///
|
||||
/// https://user@pass:example.com:1234/foo/bar?baz#quux
|
||||
/// | | | | ^^^^| | |
|
||||
/// | | | | | | | `----- fragment_start
|
||||
/// | | | | | | `--------- query_start
|
||||
/// | | | | | `----------------- path_start
|
||||
/// | | | | `--------------------- port
|
||||
/// | | | `----------------------- host_end
|
||||
/// | | `---------------------------------- host_start
|
||||
/// | `--------------------------------------- username_end
|
||||
/// `---------------------------------------------- scheme_end
|
||||
#[derive(Copy, Clone)]
|
||||
#[cfg(feature = "expose_internals")]
|
||||
pub struct InternalComponents {
|
||||
pub scheme_end: u32,
|
||||
pub username_end: u32,
|
||||
pub host_start: u32,
|
||||
pub host_end: u32,
|
||||
pub port: Option<u16>,
|
||||
pub path_start: u32,
|
||||
pub query_start: Option<u32>,
|
||||
pub fragment_start: Option<u32>,
|
||||
}
|
||||
|
||||
/// Internal component / parsed offsets of the URL.
|
||||
///
|
||||
/// This can be useful for implementing efficient serialization
|
||||
/// for the URL.
|
||||
#[cfg(feature = "expose_internals")]
|
||||
pub fn internal_components(url: &Url) -> InternalComponents {
|
||||
InternalComponents {
|
||||
scheme_end: url.scheme_end,
|
||||
username_end: url.username_end,
|
||||
host_start: url.host_start,
|
||||
host_end: url.host_end,
|
||||
port: url.port,
|
||||
path_start: url.path_start,
|
||||
query_start: url.query_start,
|
||||
fragment_start: url.fragment_start,
|
||||
}
|
||||
}
|
||||
|
||||
/// https://url.spec.whatwg.org/#dom-url-domaintoascii
|
||||
pub fn domain_to_ascii(domain: &str) -> String {
|
||||
|
@ -56,6 +99,7 @@ pub fn protocol(url: &Url) -> &str {
|
|||
}
|
||||
|
||||
/// Setter for https://url.spec.whatwg.org/#dom-url-protocol
|
||||
#[allow(clippy::result_unit_err)]
|
||||
pub fn set_protocol(url: &mut Url, mut new_protocol: &str) -> Result<(), ()> {
|
||||
// The scheme state in the spec ignores everything after the first `:`,
|
||||
// but `set_scheme` errors if there is more.
|
||||
|
@ -72,6 +116,7 @@ pub fn username(url: &Url) -> &str {
|
|||
}
|
||||
|
||||
/// Setter for https://url.spec.whatwg.org/#dom-url-username
|
||||
#[allow(clippy::result_unit_err)]
|
||||
pub fn set_username(url: &mut Url, new_username: &str) -> Result<(), ()> {
|
||||
url.set_username(new_username)
|
||||
}
|
||||
|
@ -83,6 +128,7 @@ pub fn password(url: &Url) -> &str {
|
|||
}
|
||||
|
||||
/// Setter for https://url.spec.whatwg.org/#dom-url-password
|
||||
#[allow(clippy::result_unit_err)]
|
||||
pub fn set_password(url: &mut Url, new_password: &str) -> Result<(), ()> {
|
||||
url.set_password(if new_password.is_empty() {
|
||||
None
|
||||
|
@ -98,29 +144,48 @@ pub fn host(url: &Url) -> &str {
|
|||
}
|
||||
|
||||
/// Setter for https://url.spec.whatwg.org/#dom-url-host
|
||||
#[allow(clippy::result_unit_err)]
|
||||
pub fn set_host(url: &mut Url, new_host: &str) -> Result<(), ()> {
|
||||
// If context object’s url’s cannot-be-a-base-URL flag is set, then return.
|
||||
if url.cannot_be_a_base() {
|
||||
return Err(());
|
||||
}
|
||||
// Host parsing rules are strict,
|
||||
// We don't want to trim the input
|
||||
let input = Input::new_no_trim(new_host);
|
||||
let host;
|
||||
let opt_port;
|
||||
{
|
||||
let scheme = url.scheme();
|
||||
let result = Parser::parse_host(Input::new(new_host), SchemeType::from(scheme));
|
||||
match result {
|
||||
Ok((h, remaining)) => {
|
||||
host = h;
|
||||
opt_port = if let Some(remaining) = remaining.split_prefix(':') {
|
||||
let scheme_type = SchemeType::from(scheme);
|
||||
if scheme_type == SchemeType::File && new_host.is_empty() {
|
||||
url.set_host_internal(Host::Domain(String::new()), None);
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
if let Ok((h, remaining)) = Parser::parse_host(input, scheme_type) {
|
||||
host = h;
|
||||
opt_port = if let Some(remaining) = remaining.split_prefix(':') {
|
||||
if remaining.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Parser::parse_port(remaining, || default_port(scheme), Context::Setter)
|
||||
.ok()
|
||||
.map(|(port, _remaining)| port)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
}
|
||||
Err(_) => return Err(()),
|
||||
}
|
||||
} else {
|
||||
None
|
||||
};
|
||||
} else {
|
||||
return Err(());
|
||||
}
|
||||
}
|
||||
// Make sure we won't set an empty host to a url with a username or a port
|
||||
if host == Host::Domain("".to_string())
|
||||
&& (!username(url).is_empty() || matches!(opt_port, Some(Some(_))) || url.port().is_some())
|
||||
{
|
||||
return Err(());
|
||||
}
|
||||
url.set_host_internal(host, opt_port);
|
||||
Ok(())
|
||||
}
|
||||
|
@ -132,12 +197,34 @@ pub fn hostname(url: &Url) -> &str {
|
|||
}
|
||||
|
||||
/// Setter for https://url.spec.whatwg.org/#dom-url-hostname
|
||||
#[allow(clippy::result_unit_err)]
|
||||
pub fn set_hostname(url: &mut Url, new_hostname: &str) -> Result<(), ()> {
|
||||
if url.cannot_be_a_base() {
|
||||
return Err(());
|
||||
}
|
||||
let result = Parser::parse_host(Input::new(new_hostname), SchemeType::from(url.scheme()));
|
||||
if let Ok((host, _remaining)) = result {
|
||||
// Host parsing rules are strict we don't want to trim the input
|
||||
let input = Input::new_no_trim(new_hostname);
|
||||
let scheme_type = SchemeType::from(url.scheme());
|
||||
if scheme_type == SchemeType::File && new_hostname.is_empty() {
|
||||
url.set_host_internal(Host::Domain(String::new()), None);
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
if let Ok((host, _remaining)) = Parser::parse_host(input, scheme_type) {
|
||||
if let Host::Domain(h) = &host {
|
||||
if h.is_empty() {
|
||||
// Empty host on special not file url
|
||||
if SchemeType::from(url.scheme()) == SchemeType::SpecialNotFile
|
||||
// Port with an empty host
|
||||
||!port(url).is_empty()
|
||||
// Empty host that includes credentials
|
||||
|| !url.username().is_empty()
|
||||
|| !url.password().unwrap_or("").is_empty()
|
||||
{
|
||||
return Err(());
|
||||
}
|
||||
}
|
||||
}
|
||||
url.set_host_internal(host, None);
|
||||
Ok(())
|
||||
} else {
|
||||
|
@ -152,6 +239,7 @@ pub fn port(url: &Url) -> &str {
|
|||
}
|
||||
|
||||
/// Setter for https://url.spec.whatwg.org/#dom-url-port
|
||||
#[allow(clippy::result_unit_err)]
|
||||
pub fn set_port(url: &mut Url, new_port: &str) -> Result<(), ()> {
|
||||
let result;
|
||||
{
|
||||
|
@ -161,7 +249,7 @@ pub fn set_port(url: &mut Url, new_port: &str) -> Result<(), ()> {
|
|||
return Err(());
|
||||
}
|
||||
result = Parser::parse_port(
|
||||
Input::new(new_port),
|
||||
Input::new_no_trim(new_port),
|
||||
|| default_port(scheme),
|
||||
Context::Setter,
|
||||
)
|
||||
|
@ -182,8 +270,19 @@ pub fn pathname(url: &Url) -> &str {
|
|||
|
||||
/// Setter for https://url.spec.whatwg.org/#dom-url-pathname
|
||||
pub fn set_pathname(url: &mut Url, new_pathname: &str) {
|
||||
if !url.cannot_be_a_base() {
|
||||
if url.cannot_be_a_base() {
|
||||
return;
|
||||
}
|
||||
if new_pathname.starts_with('/')
|
||||
|| (SchemeType::from(url.scheme()).is_special()
|
||||
// \ is a segment delimiter for 'special' URLs"
|
||||
&& new_pathname.starts_with('\\'))
|
||||
{
|
||||
url.set_path(new_pathname)
|
||||
} else {
|
||||
let mut path_to_set = String::from("/");
|
||||
path_to_set.push_str(new_pathname);
|
||||
url.set_path(&path_to_set)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -208,13 +307,14 @@ pub fn hash(url: &Url) -> &str {
|
|||
|
||||
/// Setter for https://url.spec.whatwg.org/#dom-url-hash
|
||||
pub fn set_hash(url: &mut Url, new_hash: &str) {
|
||||
if url.scheme() != "javascript" {
|
||||
url.set_fragment(match new_hash {
|
||||
"" => None,
|
||||
_ if new_hash.starts_with('#') => Some(&new_hash[1..]),
|
||||
_ => Some(new_hash),
|
||||
})
|
||||
}
|
||||
url.set_fragment(match new_hash {
|
||||
// If the given value is the empty string,
|
||||
// then set context object’s url’s fragment to null and return.
|
||||
"" => None,
|
||||
// Let input be the given value with a single leading U+0023 (#) removed, if any.
|
||||
_ if new_hash.starts_with('#') => Some(&new_hash[1..]),
|
||||
_ => Some(new_hash),
|
||||
})
|
||||
}
|
||||
|
||||
fn trim(s: &str) -> &str {
|
||||
|
|
|
@ -6,8 +6,8 @@
|
|||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
use crate::Url;
|
||||
use std::ops::{Index, Range, RangeFrom, RangeFull, RangeTo};
|
||||
use Url;
|
||||
|
||||
impl Index<RangeFull> for Url {
|
||||
type Output = str;
|
||||
|
@ -37,6 +37,29 @@ impl Index<Range<Position>> for Url {
|
|||
}
|
||||
}
|
||||
|
||||
// Counts how many base-10 digits are required to represent n in the given base
|
||||
fn count_digits(n: u16) -> usize {
|
||||
match n {
|
||||
0..=9 => 1,
|
||||
10..=99 => 2,
|
||||
100..=999 => 3,
|
||||
1000..=9999 => 4,
|
||||
10000..=65535 => 5,
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_count_digits() {
|
||||
assert_eq!(count_digits(0), 1);
|
||||
assert_eq!(count_digits(1), 1);
|
||||
assert_eq!(count_digits(9), 1);
|
||||
assert_eq!(count_digits(10), 2);
|
||||
assert_eq!(count_digits(99), 2);
|
||||
assert_eq!(count_digits(100), 3);
|
||||
assert_eq!(count_digits(9999), 4);
|
||||
assert_eq!(count_digits(65535), 5);
|
||||
}
|
||||
|
||||
/// Indicates a position within a URL based on its components.
|
||||
///
|
||||
/// A range of positions can be used for slicing `Url`:
|
||||
|
@ -149,7 +172,14 @@ impl Url {
|
|||
}
|
||||
}
|
||||
|
||||
Position::AfterPort => self.path_start as usize,
|
||||
Position::AfterPort => {
|
||||
if let Some(port) = self.port {
|
||||
debug_assert!(self.byte_at(self.host_end) == b':');
|
||||
self.host_end as usize + ":".len() + count_digits(port)
|
||||
} else {
|
||||
self.host_end as usize
|
||||
}
|
||||
}
|
||||
|
||||
Position::BeforePath => self.path_start as usize,
|
||||
|
||||
|
|
|
@ -1,210 +0,0 @@
|
|||
// Copyright 2013-2014 The rust-url developers.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
||||
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
//! Data-driven tests
|
||||
|
||||
extern crate rustc_test as test;
|
||||
extern crate serde_json;
|
||||
extern crate url;
|
||||
|
||||
use serde_json::Value;
|
||||
use std::str::FromStr;
|
||||
use url::{quirks, Url};
|
||||
|
||||
fn check_invariants(url: &Url) {
|
||||
url.check_invariants().unwrap();
|
||||
#[cfg(feature = "serde")]
|
||||
{
|
||||
let bytes = serde_json::to_vec(url).unwrap();
|
||||
let new_url: Url = serde_json::from_slice(&bytes).unwrap();
|
||||
assert_eq!(url, &new_url);
|
||||
}
|
||||
}
|
||||
|
||||
fn run_parsing(input: &str, base: &str, expected: Result<ExpectedAttributes, ()>) {
|
||||
let base = match Url::parse(&base) {
|
||||
Ok(base) => base,
|
||||
Err(_) if expected.is_err() => return,
|
||||
Err(message) => panic!("Error parsing base {:?}: {}", base, message),
|
||||
};
|
||||
let (url, expected) = match (base.join(&input), expected) {
|
||||
(Ok(url), Ok(expected)) => (url, expected),
|
||||
(Err(_), Err(())) => return,
|
||||
(Err(message), Ok(_)) => panic!("Error parsing URL {:?}: {}", input, message),
|
||||
(Ok(_), Err(())) => panic!("Expected a parse error for URL {:?}", input),
|
||||
};
|
||||
|
||||
check_invariants(&url);
|
||||
|
||||
macro_rules! assert_eq {
|
||||
($expected: expr, $got: expr) => {{
|
||||
let expected = $expected;
|
||||
let got = $got;
|
||||
assert!(
|
||||
expected == got,
|
||||
"\n{:?}\n!= {}\n{:?}\nfor URL {:?}\n",
|
||||
got,
|
||||
stringify!($expected),
|
||||
expected,
|
||||
url
|
||||
);
|
||||
}};
|
||||
}
|
||||
|
||||
macro_rules! assert_attributes {
|
||||
($($attr: ident)+) => {
|
||||
{
|
||||
$(
|
||||
assert_eq!(expected.$attr, quirks::$attr(&url));
|
||||
)+;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
assert_attributes!(href protocol username password host hostname port pathname search hash);
|
||||
|
||||
if let Some(expected_origin) = expected.origin {
|
||||
assert_eq!(expected_origin, quirks::origin(&url));
|
||||
}
|
||||
}
|
||||
|
||||
struct ExpectedAttributes {
|
||||
href: String,
|
||||
origin: Option<String>,
|
||||
protocol: String,
|
||||
username: String,
|
||||
password: String,
|
||||
host: String,
|
||||
hostname: String,
|
||||
port: String,
|
||||
pathname: String,
|
||||
search: String,
|
||||
hash: String,
|
||||
}
|
||||
|
||||
trait JsonExt {
|
||||
fn take_key(&mut self, key: &str) -> Option<Value>;
|
||||
fn string(self) -> String;
|
||||
fn take_string(&mut self, key: &str) -> String;
|
||||
}
|
||||
|
||||
impl JsonExt for Value {
|
||||
fn take_key(&mut self, key: &str) -> Option<Value> {
|
||||
self.as_object_mut().unwrap().remove(key)
|
||||
}
|
||||
|
||||
fn string(self) -> String {
|
||||
if let Value::String(s) = self {
|
||||
s
|
||||
} else {
|
||||
panic!("Not a Value::String")
|
||||
}
|
||||
}
|
||||
|
||||
fn take_string(&mut self, key: &str) -> String {
|
||||
self.take_key(key).unwrap().string()
|
||||
}
|
||||
}
|
||||
|
||||
fn collect_parsing<F: FnMut(String, test::TestFn)>(add_test: &mut F) {
|
||||
// Copied form https://github.com/w3c/web-platform-tests/blob/master/url/
|
||||
let mut json = Value::from_str(include_str!("urltestdata.json"))
|
||||
.expect("JSON parse error in urltestdata.json");
|
||||
for entry in json.as_array_mut().unwrap() {
|
||||
if entry.is_string() {
|
||||
continue; // ignore comments
|
||||
}
|
||||
let base = entry.take_string("base");
|
||||
let input = entry.take_string("input");
|
||||
let expected = if entry.take_key("failure").is_some() {
|
||||
Err(())
|
||||
} else {
|
||||
Ok(ExpectedAttributes {
|
||||
href: entry.take_string("href"),
|
||||
origin: entry.take_key("origin").map(|s| s.string()),
|
||||
protocol: entry.take_string("protocol"),
|
||||
username: entry.take_string("username"),
|
||||
password: entry.take_string("password"),
|
||||
host: entry.take_string("host"),
|
||||
hostname: entry.take_string("hostname"),
|
||||
port: entry.take_string("port"),
|
||||
pathname: entry.take_string("pathname"),
|
||||
search: entry.take_string("search"),
|
||||
hash: entry.take_string("hash"),
|
||||
})
|
||||
};
|
||||
add_test(
|
||||
format!("{:?} @ base {:?}", input, base),
|
||||
test::TestFn::dyn_test_fn(move || run_parsing(&input, &base, expected)),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
fn collect_setters<F>(add_test: &mut F)
|
||||
where
|
||||
F: FnMut(String, test::TestFn),
|
||||
{
|
||||
let mut json = Value::from_str(include_str!("setters_tests.json"))
|
||||
.expect("JSON parse error in setters_tests.json");
|
||||
|
||||
macro_rules! setter {
|
||||
($attr: expr, $setter: ident) => {{
|
||||
let mut tests = json.take_key($attr).unwrap();
|
||||
for mut test in tests.as_array_mut().unwrap().drain(..) {
|
||||
let comment = test.take_key("comment")
|
||||
.map(|s| s.string())
|
||||
.unwrap_or(String::new());
|
||||
let href = test.take_string("href");
|
||||
let new_value = test.take_string("new_value");
|
||||
let name = format!("{:?}.{} = {:?} {}", href, $attr, new_value, comment);
|
||||
let mut expected = test.take_key("expected").unwrap();
|
||||
add_test(name, test::TestFn::dyn_test_fn(move || {
|
||||
let mut url = Url::parse(&href).unwrap();
|
||||
check_invariants(&url);
|
||||
let _ = quirks::$setter(&mut url, &new_value);
|
||||
assert_attributes!(url, expected,
|
||||
href protocol username password host hostname port pathname search hash);
|
||||
check_invariants(&url);
|
||||
}))
|
||||
}
|
||||
}}
|
||||
}
|
||||
macro_rules! assert_attributes {
|
||||
($url: expr, $expected: expr, $($attr: ident)+) => {
|
||||
$(
|
||||
if let Some(value) = $expected.take_key(stringify!($attr)) {
|
||||
assert_eq!(quirks::$attr(&$url), value.string())
|
||||
}
|
||||
)+
|
||||
}
|
||||
}
|
||||
setter!("protocol", set_protocol);
|
||||
setter!("username", set_username);
|
||||
setter!("password", set_password);
|
||||
setter!("hostname", set_hostname);
|
||||
setter!("host", set_host);
|
||||
setter!("port", set_port);
|
||||
setter!("pathname", set_pathname);
|
||||
setter!("search", set_search);
|
||||
setter!("hash", set_hash);
|
||||
}
|
||||
|
||||
fn main() {
|
||||
let mut tests = Vec::new();
|
||||
{
|
||||
let mut add_one = |name: String, run: test::TestFn| {
|
||||
tests.push(test::TestDescAndFn {
|
||||
desc: test::TestDesc::new(test::DynTestName(name)),
|
||||
testfn: run,
|
||||
})
|
||||
};
|
||||
collect_parsing(&mut add_one);
|
||||
collect_setters(&mut add_one);
|
||||
}
|
||||
test::test_main(&std::env::args().collect::<Vec<_>>(), tests)
|
||||
}
|
|
@ -0,0 +1,53 @@
|
|||
</> against <file://h/C:/a/b>
|
||||
<file:\\\\//>
|
||||
<file:\\\\\\\\>
|
||||
<file:\\\\\\\\?fox>
|
||||
<file:\\\\\\\\#guppy>
|
||||
<file://spider///>
|
||||
<file:\\\\localhost//>
|
||||
<file://\\/localhost//cat>
|
||||
<file://localhost//a//../..//>
|
||||
</////mouse> against <file:///elephant>
|
||||
<\\/localhost//pig> against <file://lion/>
|
||||
<//localhost//pig> against <file://lion/>
|
||||
</..//localhost//pig> against <file://lion/>
|
||||
<C|> against <file://host/dir/file>
|
||||
<C|> against <file://host/D:/dir1/dir2/file>
|
||||
<C|#> against <file://host/dir/file>
|
||||
<C|?> against <file://host/dir/file>
|
||||
<C|/> against <file://host/dir/file>
|
||||
<C|\n/> against <file://host/dir/file>
|
||||
<C|\\> against <file://host/dir/file>
|
||||
</c:/foo/bar> against <file://host/path>
|
||||
<file://example.net/C:/>
|
||||
<file://1.2.3.4/C:/>
|
||||
<file://[1::8]/C:/>
|
||||
<C|/> against <file://host/>
|
||||
</C:/> against <file://host/>
|
||||
<file:C:/> against <file://host/>
|
||||
<file:/C:/> against <file://host/>
|
||||
<file://localhost//a//../..//foo>
|
||||
<file://localhost////foo>
|
||||
<file:////foo>
|
||||
<file:////one/two> against <file:///>
|
||||
<////one/two> against <file:///>
|
||||
<file:///.//> against <file:////>
|
||||
<file:.//p>
|
||||
<file:/.//p>
|
||||
<http://example.net/path> set hostname to <example.com:8080>
|
||||
<http://example.net:8080/path> set hostname to <example.com:>
|
||||
<non-spec:/.//p> set hostname to <h>
|
||||
<non-spec:/.//p> set hostname to <>
|
||||
<foo://somehost/some/path> set pathname to <>
|
||||
<foo:///some/path> set pathname to <>
|
||||
<http://example.net:8080/path> set port to <randomstring>
|
||||
<file:///var/log/system.log> set href to <http://0300.168.0xF0>
|
||||
<data:space ?query#fragment> set search to <>
|
||||
<sc:space ?query#fragment> set search to <>
|
||||
<file://monkey/> set pathname to <\\\\>
|
||||
<file:///unicorn> set pathname to <//\\/>
|
||||
<file:///unicorn> set pathname to <//monkey/..//>
|
||||
<non-spec:/> set pathname to </.//p>
|
||||
<non-spec:/> set pathname to </..//p>
|
||||
<non-spec:/> set pathname to <//p>
|
||||
<non-spec:/.//> set pathname to <p>
|
Разница между файлами не показана из-за своего большого размера
Загрузить разницу
|
@ -8,14 +8,11 @@
|
|||
|
||||
//! Unit tests
|
||||
|
||||
extern crate percent_encoding;
|
||||
extern crate url;
|
||||
|
||||
use std::borrow::Cow;
|
||||
use std::cell::{Cell, RefCell};
|
||||
use std::net::{Ipv4Addr, Ipv6Addr};
|
||||
use std::path::{Path, PathBuf};
|
||||
use url::{form_urlencoded, Host, Url};
|
||||
use url::{form_urlencoded, Host, Origin, Url};
|
||||
|
||||
#[test]
|
||||
fn size() {
|
||||
|
@ -23,6 +20,103 @@ fn size() {
|
|||
assert_eq!(size_of::<Url>(), size_of::<Option<Url>>());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_relative() {
|
||||
let base: Url = "sc://%C3%B1".parse().unwrap();
|
||||
let url = base.join("/resources/testharness.js").unwrap();
|
||||
assert_eq!(url.as_str(), "sc://%C3%B1/resources/testharness.js");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_relative_empty() {
|
||||
let base: Url = "sc://%C3%B1".parse().unwrap();
|
||||
let url = base.join("").unwrap();
|
||||
assert_eq!(url.as_str(), "sc://%C3%B1");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_strip_trailing_spaces_from_opaque_path() {
|
||||
let mut url: Url = "data:space ?query".parse().unwrap();
|
||||
url.set_query(None);
|
||||
assert_eq!(url.as_str(), "data:space");
|
||||
|
||||
let mut url: Url = "data:space #hash".parse().unwrap();
|
||||
url.set_fragment(None);
|
||||
assert_eq!(url.as_str(), "data:space");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_set_empty_host() {
|
||||
let mut base: Url = "moz://foo:bar@servo/baz".parse().unwrap();
|
||||
base.set_username("").unwrap();
|
||||
assert_eq!(base.as_str(), "moz://:bar@servo/baz");
|
||||
base.set_host(None).unwrap();
|
||||
assert_eq!(base.as_str(), "moz:/baz");
|
||||
base.set_host(Some("servo")).unwrap();
|
||||
assert_eq!(base.as_str(), "moz://servo/baz");
|
||||
|
||||
let mut base: Url = "file://server/share/foo/bar".parse().unwrap();
|
||||
base.set_host(None).unwrap();
|
||||
assert_eq!(base.as_str(), "file:///share/foo/bar");
|
||||
|
||||
let mut base: Url = "file://server/share/foo/bar".parse().unwrap();
|
||||
base.set_host(Some("foo")).unwrap();
|
||||
assert_eq!(base.as_str(), "file://foo/share/foo/bar");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_set_empty_username_and_password() {
|
||||
let mut base: Url = "moz://foo:bar@servo/baz".parse().unwrap();
|
||||
base.set_username("").unwrap();
|
||||
assert_eq!(base.as_str(), "moz://:bar@servo/baz");
|
||||
|
||||
base.set_password(Some("")).unwrap();
|
||||
assert_eq!(base.as_str(), "moz://servo/baz");
|
||||
|
||||
base.set_password(None).unwrap();
|
||||
assert_eq!(base.as_str(), "moz://servo/baz");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_set_empty_password() {
|
||||
let mut base: Url = "moz://foo:bar@servo/baz".parse().unwrap();
|
||||
|
||||
base.set_password(Some("")).unwrap();
|
||||
assert_eq!(base.as_str(), "moz://foo@servo/baz");
|
||||
|
||||
base.set_password(None).unwrap();
|
||||
assert_eq!(base.as_str(), "moz://foo@servo/baz");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_set_empty_hostname() {
|
||||
use url::quirks;
|
||||
let mut base: Url = "moz://foo@servo/baz".parse().unwrap();
|
||||
assert!(
|
||||
quirks::set_hostname(&mut base, "").is_err(),
|
||||
"setting an empty hostname to a url with a username should fail"
|
||||
);
|
||||
base = "moz://:pass@servo/baz".parse().unwrap();
|
||||
assert!(
|
||||
quirks::set_hostname(&mut base, "").is_err(),
|
||||
"setting an empty hostname to a url with a password should fail"
|
||||
);
|
||||
base = "moz://servo/baz".parse().unwrap();
|
||||
quirks::set_hostname(&mut base, "").unwrap();
|
||||
assert_eq!(base.as_str(), "moz:///baz");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_set_empty_query() {
|
||||
let mut base: Url = "moz://example.com/path?query".parse().unwrap();
|
||||
|
||||
base.set_query(Some(""));
|
||||
assert_eq!(base.as_str(), "moz://example.com/path?");
|
||||
|
||||
base.set_query(None);
|
||||
assert_eq!(base.as_str(), "moz://example.com/path");
|
||||
}
|
||||
|
||||
macro_rules! assert_from_file_path {
|
||||
($path: expr) => {
|
||||
assert_from_file_path!($path, $path)
|
||||
|
@ -216,7 +310,6 @@ fn host() {
|
|||
0x2001, 0x0db8, 0x85a3, 0x08d3, 0x1319, 0x8a2e, 0x0370, 0x7344,
|
||||
)),
|
||||
);
|
||||
assert_host("http://1.35.+33.49", Host::Domain("1.35.+33.49"));
|
||||
assert_host(
|
||||
"http://[::]",
|
||||
Host::Ipv6(Ipv6Addr::new(0, 0, 0, 0, 0, 0, 0, 0)),
|
||||
|
@ -231,9 +324,13 @@ fn host() {
|
|||
);
|
||||
assert_host("http://0x1232131", Host::Ipv4(Ipv4Addr::new(1, 35, 33, 49)));
|
||||
assert_host("http://111", Host::Ipv4(Ipv4Addr::new(0, 0, 0, 111)));
|
||||
assert_host("http://2..2.3", Host::Domain("2..2.3"));
|
||||
assert!(Url::parse("http://1.35.+33.49").is_err());
|
||||
assert!(Url::parse("http://2..2.3").is_err());
|
||||
assert!(Url::parse("http://42.0x1232131").is_err());
|
||||
assert!(Url::parse("http://192.168.0.257").is_err());
|
||||
|
||||
assert_eq!(Host::Domain("foo"), Host::Domain("foo").to_owned());
|
||||
assert_ne!(Host::Domain("foo"), Host::Domain("bar").to_owned());
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -294,7 +391,7 @@ fn test_serialization() {
|
|||
|
||||
#[test]
|
||||
fn test_form_urlencoded() {
|
||||
let pairs: &[(Cow<str>, Cow<str>)] = &[
|
||||
let pairs: &[(Cow<'_, str>, Cow<'_, str>)] = &[
|
||||
("foo".into(), "é&".into()),
|
||||
("bar".into(), "".into()),
|
||||
("foo".into(), "#".into()),
|
||||
|
@ -315,8 +412,9 @@ fn test_form_serialize() {
|
|||
.append_pair("foo", "é&")
|
||||
.append_pair("bar", "")
|
||||
.append_pair("foo", "#")
|
||||
.append_key_only("json")
|
||||
.finish();
|
||||
assert_eq!(encoded, "foo=%C3%A9%26&bar=&foo=%23");
|
||||
assert_eq!(encoded, "foo=%C3%A9%26&bar=&foo=%23&json");
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -324,8 +422,9 @@ fn form_urlencoded_encoding_override() {
|
|||
let encoded = form_urlencoded::Serializer::new(String::new())
|
||||
.encoding_override(Some(&|s| s.as_bytes().to_ascii_uppercase().into()))
|
||||
.append_pair("foo", "bar")
|
||||
.append_key_only("xml")
|
||||
.finish();
|
||||
assert_eq!(encoded, "FOO=BAR");
|
||||
assert_eq!(encoded, "FOO=BAR&XML");
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -373,7 +472,7 @@ fn append_trailing_slash() {
|
|||
fn extend_query_pairs_then_mutate() {
|
||||
let mut url: Url = "http://localhost:6767/foo/bar".parse().unwrap();
|
||||
url.query_pairs_mut()
|
||||
.extend_pairs(vec![("auth", "my-token")].into_iter());
|
||||
.extend_pairs(vec![("auth", "my-token")]);
|
||||
url.check_invariants().unwrap();
|
||||
assert_eq!(
|
||||
url.to_string(),
|
||||
|
@ -413,9 +512,9 @@ fn test_set_host() {
|
|||
assert_eq!(url.as_str(), "foobar:/hello");
|
||||
|
||||
let mut url = Url::parse("foo://ș").unwrap();
|
||||
assert_eq!(url.as_str(), "foo://%C8%99/");
|
||||
assert_eq!(url.as_str(), "foo://%C8%99");
|
||||
url.set_host(Some("goșu.ro")).unwrap();
|
||||
assert_eq!(url.as_str(), "foo://go%C8%99u.ro/");
|
||||
assert_eq!(url.as_str(), "foo://go%C8%99u.ro");
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -472,6 +571,237 @@ fn test_origin_hash() {
|
|||
assert_ne!(hash(&opaque_origin), hash(&other_opaque_origin));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_origin_blob_equality() {
|
||||
let origin = &Url::parse("http://example.net/").unwrap().origin();
|
||||
let blob_origin = &Url::parse("blob:http://example.net/").unwrap().origin();
|
||||
|
||||
assert_eq!(origin, blob_origin);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_origin_opaque() {
|
||||
assert!(!Origin::new_opaque().is_tuple());
|
||||
assert!(!&Url::parse("blob:malformed//").unwrap().origin().is_tuple())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_origin_unicode_serialization() {
|
||||
let data = [
|
||||
("http://😅.com", "http://😅.com"),
|
||||
("ftp://😅:🙂@🙂.com", "ftp://🙂.com"),
|
||||
("https://user@😅.com", "https://😅.com"),
|
||||
("http://😅.🙂:40", "http://😅.🙂:40"),
|
||||
];
|
||||
for &(unicode_url, expected_serialization) in &data {
|
||||
let origin = Url::parse(unicode_url).unwrap().origin();
|
||||
assert_eq!(origin.unicode_serialization(), *expected_serialization);
|
||||
}
|
||||
|
||||
let ascii_origins = [
|
||||
Url::parse("http://example.net/").unwrap().origin(),
|
||||
Url::parse("http://example.net:80/").unwrap().origin(),
|
||||
Url::parse("http://example.net:81/").unwrap().origin(),
|
||||
Url::parse("http://example.net").unwrap().origin(),
|
||||
Url::parse("http://example.net/hello").unwrap().origin(),
|
||||
Url::parse("https://example.net").unwrap().origin(),
|
||||
Url::parse("ftp://example.net").unwrap().origin(),
|
||||
Url::parse("file://example.net").unwrap().origin(),
|
||||
Url::parse("http://user@example.net/").unwrap().origin(),
|
||||
Url::parse("http://user:pass@example.net/")
|
||||
.unwrap()
|
||||
.origin(),
|
||||
Url::parse("http://127.0.0.1").unwrap().origin(),
|
||||
];
|
||||
for ascii_origin in &ascii_origins {
|
||||
assert_eq!(
|
||||
ascii_origin.ascii_serialization(),
|
||||
ascii_origin.unicode_serialization()
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_socket_addrs() {
|
||||
use std::net::ToSocketAddrs;
|
||||
|
||||
let data = [
|
||||
("https://127.0.0.1/", "127.0.0.1", 443),
|
||||
("https://127.0.0.1:9742/", "127.0.0.1", 9742),
|
||||
("custom-protocol://127.0.0.1:9742/", "127.0.0.1", 9742),
|
||||
("custom-protocol://127.0.0.1/", "127.0.0.1", 9743),
|
||||
("https://[::1]/", "::1", 443),
|
||||
("https://[::1]:9742/", "::1", 9742),
|
||||
("custom-protocol://[::1]:9742/", "::1", 9742),
|
||||
("custom-protocol://[::1]/", "::1", 9743),
|
||||
("https://localhost/", "localhost", 443),
|
||||
("https://localhost:9742/", "localhost", 9742),
|
||||
("custom-protocol://localhost:9742/", "localhost", 9742),
|
||||
("custom-protocol://localhost/", "localhost", 9743),
|
||||
];
|
||||
|
||||
for (url_string, host, port) in &data {
|
||||
let url = url::Url::parse(url_string).unwrap();
|
||||
let addrs = url
|
||||
.socket_addrs(|| match url.scheme() {
|
||||
"custom-protocol" => Some(9743),
|
||||
_ => None,
|
||||
})
|
||||
.unwrap();
|
||||
assert_eq!(
|
||||
Some(addrs[0]),
|
||||
(*host, *port).to_socket_addrs().unwrap().next()
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_no_base_url() {
|
||||
let mut no_base_url = Url::parse("mailto:test@example.net").unwrap();
|
||||
|
||||
assert!(no_base_url.cannot_be_a_base());
|
||||
assert!(no_base_url.path_segments().is_none());
|
||||
assert!(no_base_url.path_segments_mut().is_err());
|
||||
assert!(no_base_url.set_host(Some("foo")).is_err());
|
||||
assert!(no_base_url
|
||||
.set_ip_host("127.0.0.1".parse().unwrap())
|
||||
.is_err());
|
||||
|
||||
no_base_url.set_path("/foo");
|
||||
assert_eq!(no_base_url.path(), "%2Ffoo");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_domain() {
|
||||
let url = Url::parse("https://127.0.0.1/").unwrap();
|
||||
assert_eq!(url.domain(), None);
|
||||
|
||||
let url = Url::parse("mailto:test@example.net").unwrap();
|
||||
assert_eq!(url.domain(), None);
|
||||
|
||||
let url = Url::parse("https://example.com/").unwrap();
|
||||
assert_eq!(url.domain(), Some("example.com"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_query() {
|
||||
let url = Url::parse("https://example.com/products?page=2#fragment").unwrap();
|
||||
assert_eq!(url.query(), Some("page=2"));
|
||||
assert_eq!(
|
||||
url.query_pairs().next(),
|
||||
Some((Cow::Borrowed("page"), Cow::Borrowed("2")))
|
||||
);
|
||||
|
||||
let url = Url::parse("https://example.com/products").unwrap();
|
||||
assert!(url.query().is_none());
|
||||
assert_eq!(url.query_pairs().count(), 0);
|
||||
|
||||
let url = Url::parse("https://example.com/?country=español").unwrap();
|
||||
assert_eq!(url.query(), Some("country=espa%C3%B1ol"));
|
||||
assert_eq!(
|
||||
url.query_pairs().next(),
|
||||
Some((Cow::Borrowed("country"), Cow::Borrowed("español")))
|
||||
);
|
||||
|
||||
let url = Url::parse("https://example.com/products?page=2&sort=desc").unwrap();
|
||||
assert_eq!(url.query(), Some("page=2&sort=desc"));
|
||||
let mut pairs = url.query_pairs();
|
||||
assert_eq!(pairs.count(), 2);
|
||||
assert_eq!(
|
||||
pairs.next(),
|
||||
Some((Cow::Borrowed("page"), Cow::Borrowed("2")))
|
||||
);
|
||||
assert_eq!(
|
||||
pairs.next(),
|
||||
Some((Cow::Borrowed("sort"), Cow::Borrowed("desc")))
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_fragment() {
|
||||
let url = Url::parse("https://example.com/#fragment").unwrap();
|
||||
assert_eq!(url.fragment(), Some("fragment"));
|
||||
|
||||
let url = Url::parse("https://example.com/").unwrap();
|
||||
assert_eq!(url.fragment(), None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_set_ip_host() {
|
||||
let mut url = Url::parse("http://example.com").unwrap();
|
||||
|
||||
url.set_ip_host("127.0.0.1".parse().unwrap()).unwrap();
|
||||
assert_eq!(url.host_str(), Some("127.0.0.1"));
|
||||
|
||||
url.set_ip_host("::1".parse().unwrap()).unwrap();
|
||||
assert_eq!(url.host_str(), Some("[::1]"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_set_href() {
|
||||
use url::quirks::set_href;
|
||||
|
||||
let mut url = Url::parse("https://existing.url").unwrap();
|
||||
|
||||
assert!(set_href(&mut url, "mal//formed").is_err());
|
||||
|
||||
assert!(set_href(
|
||||
&mut url,
|
||||
"https://user:pass@domain.com:9742/path/file.ext?key=val&key2=val2#fragment"
|
||||
)
|
||||
.is_ok());
|
||||
assert_eq!(
|
||||
url,
|
||||
Url::parse("https://user:pass@domain.com:9742/path/file.ext?key=val&key2=val2#fragment")
|
||||
.unwrap()
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_domain_encoding_quirks() {
|
||||
use url::quirks::{domain_to_ascii, domain_to_unicode};
|
||||
|
||||
let data = [
|
||||
("http://example.com", "", ""),
|
||||
("😅.🙂", "xn--j28h.xn--938h", "😅.🙂"),
|
||||
("example.com", "example.com", "example.com"),
|
||||
("mailto:test@example.net", "", ""),
|
||||
];
|
||||
|
||||
for url in &data {
|
||||
assert_eq!(domain_to_ascii(url.0), url.1);
|
||||
assert_eq!(domain_to_unicode(url.0), url.2);
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "expose_internals")]
|
||||
#[test]
|
||||
fn test_expose_internals() {
|
||||
use url::quirks::internal_components;
|
||||
use url::quirks::InternalComponents;
|
||||
|
||||
let url = Url::parse("https://example.com/path/file.ext?key=val&key2=val2#fragment").unwrap();
|
||||
let InternalComponents {
|
||||
scheme_end,
|
||||
username_end,
|
||||
host_start,
|
||||
host_end,
|
||||
port,
|
||||
path_start,
|
||||
query_start,
|
||||
fragment_start,
|
||||
} = internal_components(&url);
|
||||
|
||||
assert_eq!(scheme_end, 5);
|
||||
assert_eq!(username_end, 8);
|
||||
assert_eq!(host_start, 8);
|
||||
assert_eq!(host_end, 19);
|
||||
assert_eq!(port, None);
|
||||
assert_eq!(path_start, 19);
|
||||
assert_eq!(query_start, Some(33));
|
||||
assert_eq!(fragment_start, Some(51));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_windows_unc_path() {
|
||||
if !cfg!(windows) {
|
||||
|
@ -536,6 +866,38 @@ fn test_syntax_violation_callback_lifetimes() {
|
|||
assert_eq!(violation.take(), Some(Backslash));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_syntax_violation_callback_types() {
|
||||
use url::SyntaxViolation::*;
|
||||
|
||||
let data = [
|
||||
("http://mozilla.org/\\foo", Backslash, "backslash"),
|
||||
(" http://mozilla.org", C0SpaceIgnored, "leading or trailing control or space character are ignored in URLs"),
|
||||
("http://user:pass@mozilla.org", EmbeddedCredentials, "embedding authentication information (username or password) in an URL is not recommended"),
|
||||
("http:///mozilla.org", ExpectedDoubleSlash, "expected //"),
|
||||
("file:/foo.txt", ExpectedFileDoubleSlash, "expected // after file:"),
|
||||
("file://mozilla.org/c:/file.txt", FileWithHostAndWindowsDrive, "file: with host and Windows drive letter"),
|
||||
("http://mozilla.org/^", NonUrlCodePoint, "non-URL code point"),
|
||||
("http://mozilla.org/#\x000", NullInFragment, "NULL characters are ignored in URL fragment identifiers"),
|
||||
("http://mozilla.org/%1", PercentDecode, "expected 2 hex digits after %"),
|
||||
("http://mozilla.org\t/foo", TabOrNewlineIgnored, "tabs or newlines are ignored in URLs"),
|
||||
("http://user@:pass@mozilla.org", UnencodedAtSign, "unencoded @ sign in username or password")
|
||||
];
|
||||
|
||||
for test_case in &data {
|
||||
let violation = Cell::new(None);
|
||||
Url::options()
|
||||
.syntax_violation_callback(Some(&|v| violation.set(Some(v))))
|
||||
.parse(test_case.0)
|
||||
.unwrap();
|
||||
|
||||
let v = violation.take();
|
||||
assert_eq!(v, Some(test_case.1));
|
||||
assert_eq!(v.unwrap().description(), test_case.2);
|
||||
assert_eq!(v.unwrap().to_string(), test_case.2);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_options_reuse() {
|
||||
use url::SyntaxViolation::*;
|
||||
|
@ -550,3 +912,397 @@ fn test_options_reuse() {
|
|||
assert_eq!(url.as_str(), "http://mozilla.org/sub/path");
|
||||
assert_eq!(*violations.borrow(), vec!(ExpectedDoubleSlash, Backslash));
|
||||
}
|
||||
|
||||
/// https://github.com/servo/rust-url/issues/505
|
||||
#[cfg(windows)]
|
||||
#[test]
|
||||
fn test_url_from_file_path() {
|
||||
use std::path::PathBuf;
|
||||
use url::Url;
|
||||
|
||||
let p = PathBuf::from("c:///");
|
||||
let u = Url::from_file_path(p).unwrap();
|
||||
let path = u.to_file_path().unwrap();
|
||||
assert_eq!("C:\\", path.to_str().unwrap());
|
||||
}
|
||||
|
||||
/// https://github.com/servo/rust-url/issues/505
|
||||
#[cfg(not(windows))]
|
||||
#[test]
|
||||
fn test_url_from_file_path() {
|
||||
use std::path::PathBuf;
|
||||
use url::Url;
|
||||
|
||||
let p = PathBuf::from("/c:/");
|
||||
let u = Url::from_file_path(p).unwrap();
|
||||
let path = u.to_file_path().unwrap();
|
||||
assert_eq!("/c:/", path.to_str().unwrap());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_non_special_path() {
|
||||
let mut db_url = url::Url::parse("postgres://postgres@localhost/").unwrap();
|
||||
assert_eq!(db_url.as_str(), "postgres://postgres@localhost/");
|
||||
db_url.set_path("diesel_foo");
|
||||
assert_eq!(db_url.as_str(), "postgres://postgres@localhost/diesel_foo");
|
||||
assert_eq!(db_url.path(), "/diesel_foo");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_non_special_path2() {
|
||||
let mut db_url = url::Url::parse("postgres://postgres@localhost/").unwrap();
|
||||
assert_eq!(db_url.as_str(), "postgres://postgres@localhost/");
|
||||
db_url.set_path("");
|
||||
assert_eq!(db_url.path(), "");
|
||||
assert_eq!(db_url.as_str(), "postgres://postgres@localhost");
|
||||
db_url.set_path("foo");
|
||||
assert_eq!(db_url.path(), "/foo");
|
||||
assert_eq!(db_url.as_str(), "postgres://postgres@localhost/foo");
|
||||
db_url.set_path("/bar");
|
||||
assert_eq!(db_url.path(), "/bar");
|
||||
assert_eq!(db_url.as_str(), "postgres://postgres@localhost/bar");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_non_special_path3() {
|
||||
let mut db_url = url::Url::parse("postgres://postgres@localhost/").unwrap();
|
||||
assert_eq!(db_url.as_str(), "postgres://postgres@localhost/");
|
||||
db_url.set_path("/");
|
||||
assert_eq!(db_url.as_str(), "postgres://postgres@localhost/");
|
||||
assert_eq!(db_url.path(), "/");
|
||||
db_url.set_path("/foo");
|
||||
assert_eq!(db_url.as_str(), "postgres://postgres@localhost/foo");
|
||||
assert_eq!(db_url.path(), "/foo");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_set_scheme_to_file_with_host() {
|
||||
let mut url: Url = "http://localhost:6767/foo/bar".parse().unwrap();
|
||||
let result = url.set_scheme("file");
|
||||
assert_eq!(url.to_string(), "http://localhost:6767/foo/bar");
|
||||
assert_eq!(result, Err(()));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn no_panic() {
|
||||
let mut url = Url::parse("arhttpsps:/.//eom/dae.com/\\\\t\\:").unwrap();
|
||||
url::quirks::set_hostname(&mut url, "//eom/datcom/\\\\t\\://eom/data.cs").unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_null_host_with_leading_empty_path_segment() {
|
||||
// since Note in item 3 of URL serializing in the URL Standard
|
||||
// https://url.spec.whatwg.org/#url-serializing
|
||||
let url = Url::parse("m:/.//\\").unwrap();
|
||||
let encoded = url.as_str();
|
||||
let reparsed = Url::parse(encoded).unwrap();
|
||||
assert_eq!(reparsed, url);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn pop_if_empty_in_bounds() {
|
||||
let mut url = Url::parse("m://").unwrap();
|
||||
let mut segments = url.path_segments_mut().unwrap();
|
||||
segments.pop_if_empty();
|
||||
segments.pop();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_slicing() {
|
||||
use url::Position::*;
|
||||
|
||||
#[derive(Default)]
|
||||
struct ExpectedSlices<'a> {
|
||||
full: &'a str,
|
||||
scheme: &'a str,
|
||||
username: &'a str,
|
||||
password: &'a str,
|
||||
host: &'a str,
|
||||
port: &'a str,
|
||||
path: &'a str,
|
||||
query: &'a str,
|
||||
fragment: &'a str,
|
||||
}
|
||||
|
||||
let data = [
|
||||
ExpectedSlices {
|
||||
full: "https://user:pass@domain.com:9742/path/file.ext?key=val&key2=val2#fragment",
|
||||
scheme: "https",
|
||||
username: "user",
|
||||
password: "pass",
|
||||
host: "domain.com",
|
||||
port: "9742",
|
||||
path: "/path/file.ext",
|
||||
query: "key=val&key2=val2",
|
||||
fragment: "fragment",
|
||||
},
|
||||
ExpectedSlices {
|
||||
full: "https://domain.com:9742/path/file.ext#fragment",
|
||||
scheme: "https",
|
||||
host: "domain.com",
|
||||
port: "9742",
|
||||
path: "/path/file.ext",
|
||||
fragment: "fragment",
|
||||
..Default::default()
|
||||
},
|
||||
ExpectedSlices {
|
||||
full: "https://domain.com:9742/path/file.ext",
|
||||
scheme: "https",
|
||||
host: "domain.com",
|
||||
port: "9742",
|
||||
path: "/path/file.ext",
|
||||
..Default::default()
|
||||
},
|
||||
ExpectedSlices {
|
||||
full: "blob:blob-info",
|
||||
scheme: "blob",
|
||||
path: "blob-info",
|
||||
..Default::default()
|
||||
},
|
||||
];
|
||||
|
||||
for expected_slices in &data {
|
||||
let url = Url::parse(expected_slices.full).unwrap();
|
||||
assert_eq!(&url[..], expected_slices.full);
|
||||
assert_eq!(&url[BeforeScheme..AfterScheme], expected_slices.scheme);
|
||||
assert_eq!(
|
||||
&url[BeforeUsername..AfterUsername],
|
||||
expected_slices.username
|
||||
);
|
||||
assert_eq!(
|
||||
&url[BeforePassword..AfterPassword],
|
||||
expected_slices.password
|
||||
);
|
||||
assert_eq!(&url[BeforeHost..AfterHost], expected_slices.host);
|
||||
assert_eq!(&url[BeforePort..AfterPort], expected_slices.port);
|
||||
assert_eq!(&url[BeforePath..AfterPath], expected_slices.path);
|
||||
assert_eq!(&url[BeforeQuery..AfterQuery], expected_slices.query);
|
||||
assert_eq!(
|
||||
&url[BeforeFragment..AfterFragment],
|
||||
expected_slices.fragment
|
||||
);
|
||||
assert_eq!(&url[..AfterFragment], expected_slices.full);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_make_relative() {
|
||||
let tests = [
|
||||
(
|
||||
"http://127.0.0.1:8080/test",
|
||||
"http://127.0.0.1:8080/test",
|
||||
"",
|
||||
),
|
||||
(
|
||||
"http://127.0.0.1:8080/test",
|
||||
"http://127.0.0.1:8080/test/",
|
||||
"test/",
|
||||
),
|
||||
(
|
||||
"http://127.0.0.1:8080/test/",
|
||||
"http://127.0.0.1:8080/test",
|
||||
"../test",
|
||||
),
|
||||
(
|
||||
"http://127.0.0.1:8080/",
|
||||
"http://127.0.0.1:8080/?foo=bar#123",
|
||||
"?foo=bar#123",
|
||||
),
|
||||
(
|
||||
"http://127.0.0.1:8080/",
|
||||
"http://127.0.0.1:8080/test/video",
|
||||
"test/video",
|
||||
),
|
||||
(
|
||||
"http://127.0.0.1:8080/test",
|
||||
"http://127.0.0.1:8080/test/video",
|
||||
"test/video",
|
||||
),
|
||||
(
|
||||
"http://127.0.0.1:8080/test/",
|
||||
"http://127.0.0.1:8080/test/video",
|
||||
"video",
|
||||
),
|
||||
(
|
||||
"http://127.0.0.1:8080/test",
|
||||
"http://127.0.0.1:8080/test2/video",
|
||||
"test2/video",
|
||||
),
|
||||
(
|
||||
"http://127.0.0.1:8080/test/",
|
||||
"http://127.0.0.1:8080/test2/video",
|
||||
"../test2/video",
|
||||
),
|
||||
(
|
||||
"http://127.0.0.1:8080/test/bla",
|
||||
"http://127.0.0.1:8080/test2/video",
|
||||
"../test2/video",
|
||||
),
|
||||
(
|
||||
"http://127.0.0.1:8080/test/bla/",
|
||||
"http://127.0.0.1:8080/test2/video",
|
||||
"../../test2/video",
|
||||
),
|
||||
(
|
||||
"http://127.0.0.1:8080/test/?foo=bar#123",
|
||||
"http://127.0.0.1:8080/test/video",
|
||||
"video",
|
||||
),
|
||||
(
|
||||
"http://127.0.0.1:8080/test/",
|
||||
"http://127.0.0.1:8080/test/video?baz=meh#456",
|
||||
"video?baz=meh#456",
|
||||
),
|
||||
(
|
||||
"http://127.0.0.1:8080/test",
|
||||
"http://127.0.0.1:8080/test?baz=meh#456",
|
||||
"?baz=meh#456",
|
||||
),
|
||||
(
|
||||
"http://127.0.0.1:8080/test/",
|
||||
"http://127.0.0.1:8080/test?baz=meh#456",
|
||||
"../test?baz=meh#456",
|
||||
),
|
||||
(
|
||||
"http://127.0.0.1:8080/test/",
|
||||
"http://127.0.0.1:8080/test/?baz=meh#456",
|
||||
"?baz=meh#456",
|
||||
),
|
||||
(
|
||||
"http://127.0.0.1:8080/test/?foo=bar#123",
|
||||
"http://127.0.0.1:8080/test/video?baz=meh#456",
|
||||
"video?baz=meh#456",
|
||||
),
|
||||
(
|
||||
"http://127.0.0.1:8080/file.txt",
|
||||
"http://127.0.0.1:8080/test/file.txt",
|
||||
"test/file.txt",
|
||||
),
|
||||
(
|
||||
"http://127.0.0.1:8080/not_equal.txt",
|
||||
"http://127.0.0.1:8080/test/file.txt",
|
||||
"test/file.txt",
|
||||
),
|
||||
];
|
||||
|
||||
for (base, uri, relative) in &tests {
|
||||
let base_uri = url::Url::parse(base).unwrap();
|
||||
let relative_uri = url::Url::parse(uri).unwrap();
|
||||
let make_relative = base_uri.make_relative(&relative_uri).unwrap();
|
||||
assert_eq!(
|
||||
make_relative, *relative,
|
||||
"base: {}, uri: {}, relative: {}",
|
||||
base, uri, relative
|
||||
);
|
||||
assert_eq!(
|
||||
base_uri.join(relative).unwrap().as_str(),
|
||||
*uri,
|
||||
"base: {}, uri: {}, relative: {}",
|
||||
base,
|
||||
uri,
|
||||
relative
|
||||
);
|
||||
}
|
||||
|
||||
let error_tests = [
|
||||
("http://127.0.0.1:8080/", "https://127.0.0.1:8080/test/"),
|
||||
("http://127.0.0.1:8080/", "http://127.0.0.1:8081/test/"),
|
||||
("http://127.0.0.1:8080/", "http://127.0.0.2:8080/test/"),
|
||||
("mailto:a@example.com", "mailto:b@example.com"),
|
||||
];
|
||||
|
||||
for (base, uri) in &error_tests {
|
||||
let base_uri = url::Url::parse(base).unwrap();
|
||||
let relative_uri = url::Url::parse(uri).unwrap();
|
||||
let make_relative = base_uri.make_relative(&relative_uri);
|
||||
assert_eq!(make_relative, None, "base: {}, uri: {}", base, uri);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_has_authority() {
|
||||
let url = Url::parse("mailto:joe@example.com").unwrap();
|
||||
assert!(!url.has_authority());
|
||||
let url = Url::parse("unix:/run/foo.socket").unwrap();
|
||||
assert!(!url.has_authority());
|
||||
let url = Url::parse("file:///tmp/foo").unwrap();
|
||||
assert!(url.has_authority());
|
||||
let url = Url::parse("http://example.com/tmp/foo").unwrap();
|
||||
assert!(url.has_authority());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_authority() {
|
||||
let url = Url::parse("mailto:joe@example.com").unwrap();
|
||||
assert_eq!(url.authority(), "");
|
||||
let url = Url::parse("unix:/run/foo.socket").unwrap();
|
||||
assert_eq!(url.authority(), "");
|
||||
let url = Url::parse("file:///tmp/foo").unwrap();
|
||||
assert_eq!(url.authority(), "");
|
||||
let url = Url::parse("http://example.com/tmp/foo").unwrap();
|
||||
assert_eq!(url.authority(), "example.com");
|
||||
let url = Url::parse("ftp://127.0.0.1:21/").unwrap();
|
||||
assert_eq!(url.authority(), "127.0.0.1");
|
||||
let url = Url::parse("ftp://user@127.0.0.1:2121/").unwrap();
|
||||
assert_eq!(url.authority(), "user@127.0.0.1:2121");
|
||||
let url = Url::parse("https://:@example.com/").unwrap();
|
||||
assert_eq!(url.authority(), "example.com");
|
||||
let url = Url::parse("https://:password@[::1]:8080/").unwrap();
|
||||
assert_eq!(url.authority(), ":password@[::1]:8080");
|
||||
let url = Url::parse("gopher://user:@àlex.example.com:70").unwrap();
|
||||
assert_eq!(url.authority(), "user@%C3%A0lex.example.com:70");
|
||||
let url = Url::parse("irc://àlex:àlex@àlex.рф.example.com:6667/foo").unwrap();
|
||||
assert_eq!(
|
||||
url.authority(),
|
||||
"%C3%A0lex:%C3%A0lex@%C3%A0lex.%D1%80%D1%84.example.com:6667"
|
||||
);
|
||||
let url = Url::parse("https://àlex:àlex@àlex.рф.example.com:443/foo").unwrap();
|
||||
assert_eq!(
|
||||
url.authority(),
|
||||
"%C3%A0lex:%C3%A0lex@xn--lex-8ka.xn--p1ai.example.com"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
/// https://github.com/servo/rust-url/issues/838
|
||||
fn test_file_with_drive() {
|
||||
let s1 = "fIlE:p:?../";
|
||||
let url = url::Url::parse(s1).unwrap();
|
||||
assert_eq!(url.to_string(), "file:///p:?../");
|
||||
assert_eq!(url.path(), "/p:");
|
||||
|
||||
let testcases = [
|
||||
("a", "file:///p:/a"),
|
||||
("", "file:///p:?../"),
|
||||
("?x", "file:///p:?x"),
|
||||
(".", "file:///p:/"),
|
||||
("..", "file:///p:/"),
|
||||
("../", "file:///p:/"),
|
||||
];
|
||||
|
||||
for case in &testcases {
|
||||
let url2 = url::Url::join(&url, case.0).unwrap();
|
||||
assert_eq!(url2.to_string(), case.1);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
/// Similar to test_file_with_drive, but with a path
|
||||
/// that could be confused for a drive.
|
||||
fn test_file_with_drive_and_path() {
|
||||
let s1 = "fIlE:p:/x|?../";
|
||||
let url = url::Url::parse(s1).unwrap();
|
||||
assert_eq!(url.to_string(), "file:///p:/x|?../");
|
||||
assert_eq!(url.path(), "/p:/x|");
|
||||
let s2 = "a";
|
||||
let url2 = url::Url::join(&url, s2).unwrap();
|
||||
assert_eq!(url2.to_string(), "file:///p:/a");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn issue_864() {
|
||||
let mut url = url::Url::parse("file://").unwrap();
|
||||
dbg!(&url);
|
||||
url.set_path("x");
|
||||
dbg!(&url);
|
||||
}
|
||||
|
|
Разница между файлами не показана из-за своего большого размера
Загрузить разницу
|
@ -0,0 +1,477 @@
|
|||
// Copyright 2013-2014 The rust-url developers.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
||||
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
//! Data-driven tests imported from web-platform-tests
|
||||
|
||||
use std::collections::HashMap;
|
||||
use std::fmt::Write;
|
||||
use std::panic;
|
||||
|
||||
use serde_json::Value;
|
||||
use url::Url;
|
||||
|
||||
#[derive(Debug, serde::Deserialize)]
|
||||
struct UrlTest {
|
||||
input: String,
|
||||
base: Option<String>,
|
||||
#[serde(flatten)]
|
||||
result: UrlTestResult,
|
||||
}
|
||||
|
||||
#[derive(Debug, serde::Deserialize)]
|
||||
#[serde(untagged)]
|
||||
#[allow(clippy::large_enum_variant)]
|
||||
enum UrlTestResult {
|
||||
Ok(UrlTestOk),
|
||||
Fail(UrlTestFail),
|
||||
}
|
||||
|
||||
#[derive(Debug, serde::Deserialize)]
|
||||
struct UrlTestOk {
|
||||
href: String,
|
||||
protocol: String,
|
||||
username: String,
|
||||
password: String,
|
||||
host: String,
|
||||
hostname: String,
|
||||
port: String,
|
||||
pathname: String,
|
||||
search: String,
|
||||
hash: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, serde::Deserialize)]
|
||||
struct UrlTestFail {
|
||||
failure: bool,
|
||||
}
|
||||
|
||||
#[derive(Debug, serde::Deserialize)]
|
||||
struct SetterTest {
|
||||
href: String,
|
||||
new_value: String,
|
||||
expected: SetterTestExpected,
|
||||
}
|
||||
|
||||
#[derive(Debug, serde::Deserialize)]
|
||||
struct SetterTestExpected {
|
||||
href: Option<String>,
|
||||
protocol: Option<String>,
|
||||
username: Option<String>,
|
||||
password: Option<String>,
|
||||
host: Option<String>,
|
||||
hostname: Option<String>,
|
||||
port: Option<String>,
|
||||
pathname: Option<String>,
|
||||
search: Option<String>,
|
||||
hash: Option<String>,
|
||||
}
|
||||
|
||||
fn main() {
|
||||
let mut filter = None;
|
||||
let mut args = std::env::args().skip(1);
|
||||
while filter.is_none() {
|
||||
if let Some(arg) = args.next() {
|
||||
if arg == "--test-threads" {
|
||||
args.next();
|
||||
continue;
|
||||
}
|
||||
filter = Some(arg);
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
let mut expected_failures = include_str!("expected_failures.txt")
|
||||
.lines()
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let mut errors = vec![];
|
||||
|
||||
// Copied from https://github.com/web-platform-tests/wpt/blob/master/url/
|
||||
let url_json: Vec<Value> = serde_json::from_str(include_str!("urltestdata.json"))
|
||||
.expect("JSON parse error in urltestdata.json");
|
||||
let url_tests = url_json
|
||||
.into_iter()
|
||||
.filter(|val| val.is_object())
|
||||
.map(|val| serde_json::from_value::<UrlTest>(val).expect("parsing failed"))
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let setter_json: HashMap<String, Value> =
|
||||
serde_json::from_str(include_str!("setters_tests.json"))
|
||||
.expect("JSON parse error in setters_tests.json");
|
||||
let setter_tests = setter_json
|
||||
.into_iter()
|
||||
.filter(|(k, _)| k != "comment")
|
||||
.map(|(k, v)| {
|
||||
let test = serde_json::from_value::<Vec<SetterTest>>(v).expect("parsing failed");
|
||||
(k, test)
|
||||
})
|
||||
.collect::<HashMap<_, _>>();
|
||||
|
||||
for url_test in url_tests {
|
||||
let mut name = format!("<{}>", url_test.input.escape_default());
|
||||
if let Some(base) = &url_test.base {
|
||||
write!(&mut name, " against <{}>", base.escape_default()).unwrap();
|
||||
}
|
||||
if should_skip(&name, filter.as_deref()) {
|
||||
continue;
|
||||
}
|
||||
print!("{} ... ", name);
|
||||
|
||||
let res = run_url_test(url_test);
|
||||
report(name, res, &mut errors, &mut expected_failures);
|
||||
}
|
||||
|
||||
for (kind, tests) in setter_tests {
|
||||
for test in tests {
|
||||
let name = format!(
|
||||
"<{}> set {} to <{}>",
|
||||
test.href.escape_default(),
|
||||
kind,
|
||||
test.new_value.escape_default()
|
||||
);
|
||||
if should_skip(&name, filter.as_deref()) {
|
||||
continue;
|
||||
}
|
||||
|
||||
print!("{} ... ", name);
|
||||
|
||||
let res = run_setter_test(&kind, test);
|
||||
report(name, res, &mut errors, &mut expected_failures);
|
||||
}
|
||||
}
|
||||
|
||||
println!();
|
||||
println!("====================");
|
||||
println!();
|
||||
|
||||
if !errors.is_empty() {
|
||||
println!("errors:");
|
||||
println!();
|
||||
|
||||
for (name, err) in errors {
|
||||
println!(" name: {}", name);
|
||||
println!(" err: {}", err);
|
||||
println!();
|
||||
}
|
||||
|
||||
std::process::exit(1);
|
||||
} else {
|
||||
println!("all tests passed");
|
||||
}
|
||||
|
||||
if !expected_failures.is_empty() && filter.is_none() {
|
||||
println!();
|
||||
println!("====================");
|
||||
println!();
|
||||
println!("tests were expected to fail but did not run:");
|
||||
println!();
|
||||
|
||||
for name in expected_failures {
|
||||
println!(" {}", name);
|
||||
}
|
||||
|
||||
println!();
|
||||
println!("if these tests were removed, update expected_failures.txt");
|
||||
println!();
|
||||
|
||||
std::process::exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
fn should_skip(name: &str, filter: Option<&str>) -> bool {
|
||||
match filter {
|
||||
Some(filter) => !name.contains(filter),
|
||||
None => false,
|
||||
}
|
||||
}
|
||||
|
||||
fn report(
|
||||
name: String,
|
||||
res: Result<(), String>,
|
||||
errors: &mut Vec<(String, String)>,
|
||||
expected_failures: &mut Vec<&str>,
|
||||
) {
|
||||
let expected_failure = expected_failures.contains(&&*name);
|
||||
expected_failures.retain(|&s| s != &*name);
|
||||
match res {
|
||||
Ok(()) => {
|
||||
if expected_failure {
|
||||
println!("🟠 (unexpected success)");
|
||||
errors.push((name, "unexpected success".to_string()));
|
||||
} else {
|
||||
println!("✅");
|
||||
}
|
||||
}
|
||||
Err(err) => {
|
||||
if expected_failure {
|
||||
println!("✅ (expected fail)");
|
||||
} else {
|
||||
println!("❌");
|
||||
errors.push((name, err));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn run_url_test(
|
||||
UrlTest {
|
||||
base,
|
||||
input,
|
||||
result,
|
||||
}: UrlTest,
|
||||
) -> Result<(), String> {
|
||||
let base = match base {
|
||||
Some(base) => {
|
||||
let base = panic::catch_unwind(|| Url::parse(&base))
|
||||
.map_err(|_| "panicked while parsing base".to_string())?
|
||||
.map_err(|e| format!("errored while parsing base: {}", e))?;
|
||||
Some(base)
|
||||
}
|
||||
None => None,
|
||||
};
|
||||
|
||||
let res = panic::catch_unwind(move || Url::options().base_url(base.as_ref()).parse(&input))
|
||||
.map_err(|_| "panicked while parsing input".to_string())?
|
||||
.map_err(|e| format!("errored while parsing input: {}", e));
|
||||
|
||||
match result {
|
||||
UrlTestResult::Ok(ok) => check_url_ok(res, ok),
|
||||
UrlTestResult::Fail(fail) => {
|
||||
assert!(fail.failure);
|
||||
if res.is_ok() {
|
||||
return Err("expected failure, but parsed successfully".to_string());
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn check_url_ok(res: Result<Url, String>, ok: UrlTestOk) -> Result<(), String> {
|
||||
let url = match res {
|
||||
Ok(url) => url,
|
||||
Err(err) => {
|
||||
return Err(format!("expected success, but errored: {:?}", err));
|
||||
}
|
||||
};
|
||||
|
||||
let href = url::quirks::href(&url);
|
||||
if href != ok.href {
|
||||
return Err(format!("expected href {:?}, but got {:?}", ok.href, href));
|
||||
}
|
||||
|
||||
let protocol = url::quirks::protocol(&url);
|
||||
if protocol != ok.protocol {
|
||||
return Err(format!(
|
||||
"expected protocol {:?}, but got {:?}",
|
||||
ok.protocol, protocol
|
||||
));
|
||||
}
|
||||
|
||||
let username = url::quirks::username(&url);
|
||||
if username != ok.username {
|
||||
return Err(format!(
|
||||
"expected username {:?}, but got {:?}",
|
||||
ok.username, username
|
||||
));
|
||||
}
|
||||
|
||||
let password = url::quirks::password(&url);
|
||||
if password != ok.password {
|
||||
return Err(format!(
|
||||
"expected password {:?}, but got {:?}",
|
||||
ok.password, password
|
||||
));
|
||||
}
|
||||
|
||||
let host = url::quirks::host(&url);
|
||||
if host != ok.host {
|
||||
return Err(format!("expected host {:?}, but got {:?}", ok.host, host));
|
||||
}
|
||||
|
||||
let hostname = url::quirks::hostname(&url);
|
||||
if hostname != ok.hostname {
|
||||
return Err(format!(
|
||||
"expected hostname {:?}, but got {:?}",
|
||||
ok.hostname, hostname
|
||||
));
|
||||
}
|
||||
|
||||
let port = url::quirks::port(&url);
|
||||
if port != ok.port {
|
||||
return Err(format!("expected port {:?}, but got {:?}", ok.port, port));
|
||||
}
|
||||
|
||||
let pathname = url::quirks::pathname(&url);
|
||||
if pathname != ok.pathname {
|
||||
return Err(format!(
|
||||
"expected pathname {:?}, but got {:?}",
|
||||
ok.pathname, pathname
|
||||
));
|
||||
}
|
||||
|
||||
let search = url::quirks::search(&url);
|
||||
if search != ok.search {
|
||||
return Err(format!(
|
||||
"expected search {:?}, but got {:?}",
|
||||
ok.search, search
|
||||
));
|
||||
}
|
||||
|
||||
let hash = url::quirks::hash(&url);
|
||||
if hash != ok.hash {
|
||||
return Err(format!("expected hash {:?}, but got {:?}", ok.hash, hash));
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn run_setter_test(
|
||||
kind: &str,
|
||||
SetterTest {
|
||||
href,
|
||||
new_value,
|
||||
expected,
|
||||
}: SetterTest,
|
||||
) -> Result<(), String> {
|
||||
let mut url = panic::catch_unwind(|| Url::parse(&href))
|
||||
.map_err(|_| "panicked while parsing href".to_string())?
|
||||
.map_err(|e| format!("errored while parsing href: {}", e))?;
|
||||
|
||||
let url = panic::catch_unwind(move || {
|
||||
match kind {
|
||||
"protocol" => {
|
||||
url::quirks::set_protocol(&mut url, &new_value).ok();
|
||||
}
|
||||
"username" => {
|
||||
url::quirks::set_username(&mut url, &new_value).ok();
|
||||
}
|
||||
"password" => {
|
||||
url::quirks::set_password(&mut url, &new_value).ok();
|
||||
}
|
||||
"host" => {
|
||||
url::quirks::set_host(&mut url, &new_value).ok();
|
||||
}
|
||||
"hostname" => {
|
||||
url::quirks::set_hostname(&mut url, &new_value).ok();
|
||||
}
|
||||
"port" => {
|
||||
url::quirks::set_port(&mut url, &new_value).ok();
|
||||
}
|
||||
"pathname" => url::quirks::set_pathname(&mut url, &new_value),
|
||||
"search" => url::quirks::set_search(&mut url, &new_value),
|
||||
"hash" => url::quirks::set_hash(&mut url, &new_value),
|
||||
_ => panic!("unknown setter kind: {:?}", kind),
|
||||
};
|
||||
url
|
||||
})
|
||||
.map_err(|_| "panicked while setting value".to_string())?;
|
||||
|
||||
if let Some(expected_href) = expected.href {
|
||||
let href = url::quirks::href(&url);
|
||||
if href != expected_href {
|
||||
return Err(format!(
|
||||
"expected href {:?}, but got {:?}",
|
||||
expected_href, href
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(expected_protocol) = expected.protocol {
|
||||
let protocol = url::quirks::protocol(&url);
|
||||
if protocol != expected_protocol {
|
||||
return Err(format!(
|
||||
"expected protocol {:?}, but got {:?}",
|
||||
expected_protocol, protocol
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(expected_username) = expected.username {
|
||||
let username = url::quirks::username(&url);
|
||||
if username != expected_username {
|
||||
return Err(format!(
|
||||
"expected username {:?}, but got {:?}",
|
||||
expected_username, username
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(expected_password) = expected.password {
|
||||
let password = url::quirks::password(&url);
|
||||
if password != expected_password {
|
||||
return Err(format!(
|
||||
"expected password {:?}, but got {:?}",
|
||||
expected_password, password
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(expected_host) = expected.host {
|
||||
let host = url::quirks::host(&url);
|
||||
if host != expected_host {
|
||||
return Err(format!(
|
||||
"expected host {:?}, but got {:?}",
|
||||
expected_host, host
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(expected_hostname) = expected.hostname {
|
||||
let hostname = url::quirks::hostname(&url);
|
||||
if hostname != expected_hostname {
|
||||
return Err(format!(
|
||||
"expected hostname {:?}, but got {:?}",
|
||||
expected_hostname, hostname
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(expected_port) = expected.port {
|
||||
let port = url::quirks::port(&url);
|
||||
if port != expected_port {
|
||||
return Err(format!(
|
||||
"expected port {:?}, but got {:?}",
|
||||
expected_port, port
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(expected_pathname) = expected.pathname {
|
||||
let pathname = url::quirks::pathname(&url);
|
||||
if pathname != expected_pathname {
|
||||
return Err(format!(
|
||||
"expected pathname {:?}, but got {:?}",
|
||||
expected_pathname, pathname
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(expected_search) = expected.search {
|
||||
let search = url::quirks::search(&url);
|
||||
if search != expected_search {
|
||||
return Err(format!(
|
||||
"expected search {:?}, but got {:?}",
|
||||
expected_search, search
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(expected_hash) = expected.hash {
|
||||
let hash = url::quirks::hash(&url);
|
||||
if hash != expected_hash {
|
||||
return Err(format!(
|
||||
"expected hash {:?}, but got {:?}",
|
||||
expected_hash, hash
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
|
@ -16,5 +16,5 @@ nserror = { path = "../../../../xpcom/rust/nserror" }
|
|||
nsstring = { path = "../../../../xpcom/rust/nsstring" }
|
||||
storage = { path = "../../../../storage/rust" }
|
||||
storage_variant = { path = "../../../../storage/variant" }
|
||||
url = "2.0"
|
||||
url = "2.4"
|
||||
xpcom = { path = "../../../../xpcom/rust/xpcom" }
|
||||
|
|
|
@ -213,7 +213,7 @@ impl<'s> Store<'s> {
|
|||
let title = String::from_utf16(&*raw_title)?;
|
||||
url.map(|url| Content::Bookmark {
|
||||
title,
|
||||
url_href: url.into_string(),
|
||||
url_href: url.into(),
|
||||
})
|
||||
}
|
||||
Kind::Folder | Kind::Livemark => {
|
||||
|
|
|
@ -15,7 +15,7 @@ anyhow = "1"
|
|||
bytes = "1.0"
|
||||
serde_json = "1"
|
||||
uniffi = { workspace = true }
|
||||
url = "2.1"
|
||||
url = "2.4"
|
||||
|
||||
[build-dependencies]
|
||||
uniffi = { workspace = true, features = ["build"] }
|
||||
|
|
|
@ -100,8 +100,7 @@ qcms = { path = "../../../../gfx/qcms", features = ["c_bindings", "neon"], defau
|
|||
wpf-gpu-raster = { git = "https://github.com/FirefoxGraphics/wpf-gpu-raster", rev = "99979da091fd58fba8477e7fcdf5ec0727102916" }
|
||||
aa-stroke = { git = "https://github.com/FirefoxGraphics/aa-stroke", rev = "ed4206ea11703580cd1d4fc63371a527b29d8252" }
|
||||
|
||||
# Force url to stay at 2.1.0. See bug 1734538.
|
||||
url = "=2.1.0"
|
||||
url = "2.4.1"
|
||||
|
||||
# Since we're building with at least rustc 1.63, enable rust 1.57 features (use of try_reserve methods).
|
||||
fallible_collections = { version = "0.4", features = ["rust_1_57"] }
|
||||
|
|
Загрузка…
Ссылка в новой задаче