зеркало из https://github.com/mozilla/gecko-dev.git
Bug 1610282 - Update glean-preview to include reset-data bugfix. r=chutten
The update includes: * Upgraded glean-core dependency * See full Glean changelog: https://github.com/mozilla/glean/blob/v24.0.0/CHANGELOG.md * Reset core client metrics when re-enabling upload (https://github.com/mozilla/glean/pull/620) Updates the glean-preview dependency in toolkit/components/telemetry/fog/Cargo.toml. glea Rest is updated with: cargo update -p gkrust-shared mach vendor rust Differential Revision: https://phabricator.services.mozilla.com/D60406 --HG-- extra : moz-landing-system : lando
This commit is contained in:
Родитель
083ae19512
Коммит
12faa0feec
Разница между файлами не показана из-за своего большого размера
Загрузить разницу
|
@ -1 +1 @@
|
|||
{"files":{"Cargo.toml":"5e068213602c259aafffdff7fc360c5e2711ae57e9f3bf683e7350815d715daf","LICENSE.md":"90d7e062634054e6866d3c81e6a2b3058a840e6af733e98e80bdfe1a7dec6912","build.rs":"b86fd53553e9012ea09ce6ec755418b9cfcaa57c1214cd11c26a44c99b87a424","readme.md":"8d9ee7f575a20798e09471169ff3b5e3dea6ab74c50d2128cfc77f4074e97149","src/config.rs":"e8a46d48be6615f6d35ca3156a37396752a0dc0de37f3ed4549526a94040758b","src/de/mod.rs":"029507a150cc9e008ea5ab4dfcc471bd36c46547444fb565fe63dba00f5ba8e4","src/de/read.rs":"9fac26aa120f5e346be69d46aee801d7ef03bf64d334bb951c0b646cba646cc5","src/error.rs":"ce6617bf8523392e6fc8b853b7768899a229b6b78dabc2918c0e2dd3f846aa01","src/internal.rs":"55a69c335cf15038eb76f7ba71b0828b20ee1d16adbc5e10e2087efbb74c55ea","src/lib.rs":"ff07f9a05b5b73e5268cb01e581d756647a85815b1a4d0a09302ab1185eafc42","src/ser/mod.rs":"40704ee175aeefadf948959cdb9d9d247a817d9d56ae66f8d78383bfe1ffcb44"},"package":"b8ab639324e3ee8774d296864fbc0dbbb256cf1a41c490b94cba90c082915f92"}
|
||||
{"files":{"Cargo.toml":"16c6c5374dd14773571dfab3254557ca0e3f7810e1fb27df6e27c2112e16c605","LICENSE.md":"90d7e062634054e6866d3c81e6a2b3058a840e6af733e98e80bdfe1a7dec6912","readme.md":"8d9ee7f575a20798e09471169ff3b5e3dea6ab74c50d2128cfc77f4074e97149","src/config.rs":"e3e6e264cdc736c442b9299d7ad39475457f0c69d2ea8fa0de14f8120c5f3023","src/de/mod.rs":"c431445d27366eaa05553fe1eb5dee320e87b7145b26fe50a56568fc83ccfe95","src/de/read.rs":"e188e291aef8c4ce41552390a28caacb26188c796e25c912d9730ad411a4abeb","src/error.rs":"ce6617bf8523392e6fc8b853b7768899a229b6b78dabc2918c0e2dd3f846aa01","src/internal.rs":"55a69c335cf15038eb76f7ba71b0828b20ee1d16adbc5e10e2087efbb74c55ea","src/lib.rs":"41258f970098e3b0421daf9fbaff34efa716039632f5d1b6409e22fe473c5775","src/ser/mod.rs":"323ca31c66188ba952faf6de111c91fe551a27ebc522c10a3cfe2e5348a74390"},"package":"5753e2a71534719bf3f4e57006c3a4f0d2c672a4b676eec84161f763eca87dbf"}
|
|
@ -12,9 +12,8 @@
|
|||
|
||||
[package]
|
||||
name = "bincode"
|
||||
version = "1.2.0"
|
||||
version = "1.2.1"
|
||||
authors = ["Ty Overby <ty@pre-alpha.com>", "Francesco Mazzoli <f@mazzo.li>", "David Tolnay <dtolnay@gmail.com>", "Daniel Griffen"]
|
||||
build = "build.rs"
|
||||
exclude = ["logo.png", "tests/*", "examples/*", ".gitignore", ".travis.yml"]
|
||||
publish = true
|
||||
description = "A binary serialization / deserialization strategy that uses Serde for transforming structs into bytes and vice versa!"
|
||||
|
@ -34,8 +33,6 @@ version = "0.11"
|
|||
|
||||
[dev-dependencies.serde_derive]
|
||||
version = "1.0.27"
|
||||
[build-dependencies.autocfg]
|
||||
version = "0.1.2"
|
||||
|
||||
[features]
|
||||
i128 = []
|
||||
|
|
|
@ -1,8 +0,0 @@
|
|||
extern crate autocfg;
|
||||
|
||||
fn main() {
|
||||
autocfg::rerun_path(file!());
|
||||
|
||||
let ac = autocfg::new();
|
||||
ac.emit_has_type("i128");
|
||||
}
|
|
@ -95,6 +95,7 @@ enum EndianOption {
|
|||
///
|
||||
/// When a byte limit is set, bincode will return `Err` on any deserialization that goes over the limit, or any
|
||||
/// serialization that goes over the limit.
|
||||
#[derive(Clone)]
|
||||
pub struct Config {
|
||||
limit: LimitOption,
|
||||
endian: EndianOption,
|
||||
|
|
|
@ -108,30 +108,9 @@ where
|
|||
impl_nums!(f32, deserialize_f32, visit_f32, read_f32);
|
||||
impl_nums!(f64, deserialize_f64, visit_f64, read_f64);
|
||||
|
||||
#[cfg(has_i128)]
|
||||
impl_nums!(u128, deserialize_u128, visit_u128, read_u128);
|
||||
|
||||
#[cfg(has_i128)]
|
||||
impl_nums!(i128, deserialize_i128, visit_i128, read_i128);
|
||||
|
||||
serde_if_integer128! {
|
||||
#[cfg(not(has_i128))]
|
||||
fn deserialize_u128<V>(self, visitor: V) -> Result<V::Value>
|
||||
where
|
||||
V: serde::de::Visitor<'de>
|
||||
{
|
||||
let _ = visitor;
|
||||
Err(DeError::custom("u128 is not supported. Use Rustc ≥ 1.26."))
|
||||
}
|
||||
|
||||
#[cfg(not(has_i128))]
|
||||
fn deserialize_i128<V>(self, visitor: V) -> Result<V::Value>
|
||||
where
|
||||
V: serde::de::Visitor<'de>
|
||||
{
|
||||
let _ = visitor;
|
||||
Err(DeError::custom("i128 is not supported. Use Rustc ≥ 1.26."))
|
||||
}
|
||||
impl_nums!(u128, deserialize_u128, visit_u128, read_u128);
|
||||
impl_nums!(i128, deserialize_i128, visit_i128, read_i128);
|
||||
}
|
||||
|
||||
#[inline]
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
use error::Result;
|
||||
use serde;
|
||||
use std::io;
|
||||
use std::{io, slice};
|
||||
|
||||
/// An optional Read trait for advanced Bincode usage.
|
||||
///
|
||||
|
@ -136,16 +136,32 @@ where
|
|||
R: io::Read,
|
||||
{
|
||||
fn fill_buffer(&mut self, length: usize) -> Result<()> {
|
||||
// We first reserve the space needed in our buffer.
|
||||
let current_length = self.temp_buffer.len();
|
||||
if length > current_length {
|
||||
self.temp_buffer.reserve_exact(length - current_length);
|
||||
}
|
||||
|
||||
// Then create a slice with the length as our desired length. This is
|
||||
// safe as long as we only write (no reads) to this buffer, because
|
||||
// `reserve_exact` above has allocated this space.
|
||||
let buf = unsafe {
|
||||
slice::from_raw_parts_mut(self.temp_buffer.as_mut_ptr(), length)
|
||||
};
|
||||
|
||||
// This method is assumed to properly handle slices which include
|
||||
// uninitialized bytes (as ours does). See discussion at the link below.
|
||||
// https://github.com/servo/bincode/issues/260
|
||||
self.reader.read_exact(buf)?;
|
||||
|
||||
// Only after `read_exact` successfully returns do we set the buffer
|
||||
// length. By doing this after the call to `read_exact`, we can avoid
|
||||
// exposing uninitialized memory in the case of `read_exact` returning
|
||||
// an error.
|
||||
unsafe {
|
||||
self.temp_buffer.set_len(length);
|
||||
}
|
||||
|
||||
self.reader.read_exact(&mut self.temp_buffer)?;
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
|
|
@ -21,9 +21,9 @@
|
|||
//! ### 128bit numbers
|
||||
//!
|
||||
//! Support for `i128` and `u128` is automatically enabled on Rust toolchains
|
||||
//! greater than or equal to `1.26.0`.
|
||||
//! greater than or equal to `1.26.0` and disabled for targets which do not support it
|
||||
|
||||
#![doc(html_root_url = "https://docs.rs/bincode/1.2.0")]
|
||||
#![doc(html_root_url = "https://docs.rs/bincode/1.2.1")]
|
||||
#![crate_name = "bincode"]
|
||||
#![crate_type = "rlib"]
|
||||
#![crate_type = "dylib"]
|
||||
|
|
|
@ -88,31 +88,13 @@ impl<'a, W: Write, O: Options> serde::Serializer for &'a mut Serializer<W, O> {
|
|||
self.writer.write_i64::<O::Endian>(v).map_err(Into::into)
|
||||
}
|
||||
|
||||
#[cfg(has_i128)]
|
||||
fn serialize_u128(self, v: u128) -> Result<()> {
|
||||
self.writer.write_u128::<O::Endian>(v).map_err(Into::into)
|
||||
}
|
||||
|
||||
#[cfg(has_i128)]
|
||||
fn serialize_i128(self, v: i128) -> Result<()> {
|
||||
self.writer.write_i128::<O::Endian>(v).map_err(Into::into)
|
||||
}
|
||||
|
||||
serde_if_integer128! {
|
||||
#[cfg(not(has_i128))]
|
||||
fn serialize_u128(self, v: u128) -> Result<()> {
|
||||
use serde::ser::Error;
|
||||
|
||||
let _ = v;
|
||||
Err(Error::custom("u128 is not supported. Use Rustc ≥ 1.26."))
|
||||
self.writer.write_u128::<O::Endian>(v).map_err(Into::into)
|
||||
}
|
||||
|
||||
#[cfg(not(has_i128))]
|
||||
fn serialize_i128(self, v: i128) -> Result<()> {
|
||||
use serde::ser::Error;
|
||||
|
||||
let _ = v;
|
||||
Err(Error::custom("i128 is not supported. Use Rustc ≥ 1.26."))
|
||||
self.writer.write_i128::<O::Endian>(v).map_err(Into::into)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -1 +1 @@
|
|||
{"files":{"AUTHORS.txt":"80aa54d9642f63fc62f20f60e0550f3e596de6ea69883769313c7f07a4be8f4c","CHANGELOG.md":"b9d2b2edfb98954c22e3a34c044bbd2f542cae703d06e5cf15245a1e26b32f76","Cargo.toml":"95d58b3f9a862b6bfd497e8aa87cc14ba6a43e7f6d1818094073e611db14ce43","LICENSE.txt":"46610329ff0b38effb9cb05979ff1ef761e465fed96b2eaca39e439d00129fd7","Makefile":"d76b0b1a44e90b31f2a6f97f662d65df585b1dc88253c30c01ea38d9a097a83e","README.md":"8df7579a4ce5ed034b85b91f6f3106573443138dcc568fd76063016ad2d5cc38","appveyor.yml":"b10751e92a0299968ac5cfd65e918d99e680b6ac679362655b92a393cd22c212","ci/fix-readme.sh":"750d262640a6fdc846623a569f37954bfe7604f9bcbc8f7475db38192e1da0fb","ci/travis.sh":"48eb316d163a9c5b37e4b1d4773e2f9934359a3a1dbddc3b6ae6a58ef15856b1","src/date.rs":"c8d61716eaecf8d0e1a887a0ac9bc06d2e5bf0b47eccc61e5683bdeb0f886ff8","src/datetime.rs":"34e71d822cfa70bb6d1041e3d865fcf629ffb2e29021713bd6aee8a3a6d1410f","src/div.rs":"02e6ce9c4fcafcc7931574108dd7ec0cd28b137edb52eaea654a09ab05fbaf90","src/format/mod.rs":"e36b2bee352df7f0addec7365edfd73607ebaa903d3ddb9249f5fe3c11d9da7a","src/format/parse.rs":"8d5b39483c48771932fd75a9d9892967bd7ef6f0c88d55be6a2d35d35ba21f52","src/format/parsed.rs":"a65cbc0ba13190028ca7c4de4a830b8a64acaf375285cae3a1da1bfd6e5d32f8","src/format/scan.rs":"9f8e4ce8001caf9ec76b3eddf7aa9cc5a68606165e3bb53977350c0a03536b79","src/format/strftime.rs":"532f88654cc1531e6ebdea89039bcf2c364e97478c83824f97f1c38277f3c827","src/lib.rs":"1dae4eb3a73db8dc8fd4f5d3e431fc773104a35c9efaa7a301d73f7b898fc464","src/naive/date.rs":"2fbd7069fb576416f2111298cdd59c729e70736abe53d6e69313a4e45f8a6e3d","src/naive/datetime.rs":"5ae4ed07dc199f7f4be27ef18130de385b56dceb01cefafe5e0f0eb9ed39ce7b","src/naive/internals.rs":"db79eda586b7daad5a2645d21bda80ae92f9bee9870d93d2209a7d228e4286c7","src/naive/isoweek.rs":"75101e996e0eccc6f9b2147095d82050e6dac94a741db60f654f4267bbe96fed","src/naive/time.rs":"cfa4936b341246eb0692e0a071d93707f3545825c74aee67749442ecd2aba655","src/offset/fixed.rs":"e0e41c7081e908a8ada1c1bb67fd003f8a36510c542c5088756214e276407cb9","src/offset/local.rs":"c63a88b8ab4af289cef15d04189f9656c8dfcff77fe8417bbd1182b75184f4e6","src/offset/mod.rs":"2aeeb0fa4c657e810f78ff239d7c52f07c33a2b7bdfc8b3765f4339dcafa0088","src/offset/utc.rs":"630f9513f88353892c9f554eed35d5ec204da9b3d65e7b3c44998836ba3d2d9b","src/oldtime.rs":"42f09a5679c8326ba8f0fe068b35ed1066801903c44b2abfd93f00ef5ec62dbc","src/round.rs":"f7ef334fe4d3014b8a6421202b4a50d316d74199ac154ff553548e8c2c58aa80"},"package":"45912881121cb26fad7c38c17ba7daa18764771836b34fab7d3fbd93ed633878"}
|
||||
{"files":{"AUTHORS.txt":"80aa54d9642f63fc62f20f60e0550f3e596de6ea69883769313c7f07a4be8f4c","CHANGELOG.md":"47c77da2a9058f98fac8c91eb29938b3887c94943c24d3a0ae6daa75368d8f88","Cargo.toml":"32e663be86a0444953c611928165a364dfb44c6dffd77840506e1ae8687d40c2","LICENSE.txt":"46610329ff0b38effb9cb05979ff1ef761e465fed96b2eaca39e439d00129fd7","README.md":"d23488fcb5eaee5bcc5bbf912aa9cb253dc1d6108ba64dd3eee582f5ea7e9c37","src/date.rs":"74d7a5de252dc9ae6cda2a228ba8db325a81cca30fad577693c3759cff3dac04","src/datetime.rs":"0dcdd8f4fa97a246fbf6007aed7bf35506c3d10ed7f9a564ed7cad6e55587944","src/div.rs":"02e6ce9c4fcafcc7931574108dd7ec0cd28b137edb52eaea654a09ab05fbaf90","src/format/mod.rs":"1999f9ee2c4000b34c10170b0027e7ced0131e40714fe9070b5dd7f169bd5060","src/format/parse.rs":"2581eff06fa8c48d8689e71fc247a05c1e1ad97bcaf5d7ef5591ba4b9977e676","src/format/parsed.rs":"282dce506a6194b1b3aabbad2e97aae9c37b22280753bd85ecbcf23c3bf6be9d","src/format/scan.rs":"6964c4f9bb179bea908a79f916fb5b53492c0852c3ac29fd270f883678d95fa0","src/format/strftime.rs":"2acc27cfd4092dee2e0b79652db24eaf5671d15ecb2e1bab44c697cfe3d9bb7c","src/lib.rs":"59e45ddeed3df7dbecdc80053f7d91d06a81c157f5bf2f5b72ba18d13477511b","src/naive/date.rs":"379837c06e101d223f707dab15967e89214eb0c04dc04534b0de961d90a089da","src/naive/datetime.rs":"483d7e4af7b6b0bd4c8d6980c86ab01260792882821b40d76d610f501051e7ba","src/naive/internals.rs":"d8b1e53bb9f1a8abde2e759b2d172332aa2d7f3697d373c8dfcadde33d02f443","src/naive/isoweek.rs":"0fa12fc77cb44f0747c014c65bfc2820526f508d7e3d1ad4d041af8826f5cbda","src/naive/time.rs":"72c89226d09845aa73d481aca2de484ee3225069f61196e8509df584eabb527d","src/offset/fixed.rs":"4f248ff75733112e96e5e6815cbbca6ae0072928a328899a5110eefb32c8126a","src/offset/local.rs":"9c3135aa30658cc6b731b4437738ebb1c1ac36de5a697ded59300abe5088e895","src/offset/mod.rs":"775575113cec481b97ab33ec4719a571f1c5c83b78e8269429e5a4fbb45cc610","src/offset/utc.rs":"069a58cf994e83b35c806d74ed80120e4254d7d026cca1205fb2850e7805116b","src/oldtime.rs":"a81af067568d0dbdb2ca29adad99d829e8c0b20b7b88b2c3053cf70aecd14416","src/round.rs":"f357a87008cb5d601eae71bc6b94d7d43d0bc39986288a7d324a3d6deefab5e6","tests/wasm.rs":"c25fd76cb495e04f58617e842e10e1b86f18eab358a71608825fe5c62c9ecf11"},"package":"31850b4a4d6bae316f7a09e691c944c28299298837edc0a03f755618c23cbc01"}
|
|
@ -6,7 +6,76 @@ This documents all notable changes to [Chrono](https://github.com/chronotope/chr
|
|||
Chrono obeys the principle of [Semantic Versioning](http://semver.org/).
|
||||
|
||||
There were/are numerous minor versions before 1.0 due to the language changes.
|
||||
Versions with only mechnical changes will be omitted from the following list.
|
||||
Versions with only mechanical changes will be omitted from the following list.
|
||||
|
||||
## 0.4.10
|
||||
|
||||
### Improvements
|
||||
|
||||
* `DateTime::parse_from_str` is more than 2x faster in some cases. (@michalsrb
|
||||
#358)
|
||||
* Significant improvements to no-std and alloc support (This should also make
|
||||
many format/serialization operations induce zero unnecessary allocations)
|
||||
(@CryZe #341)
|
||||
|
||||
### Features
|
||||
|
||||
* Functions that were accepting `Iterator` of `Item`s (for example
|
||||
`format_with_items`) now accept `Iterator` of `Borrow<Item>`, so one can
|
||||
use values or references. (@michalsrb #358)
|
||||
* Add built-in support for structs with nested `Option<Datetime>` etc fields
|
||||
(@manifest #302)
|
||||
|
||||
### Internal/doc improvements
|
||||
|
||||
* Use markdown footnotes on the `strftime` docs page (@qudlibetor #359)
|
||||
* Migrate from `try!` -> `?` (question mark) because it is now emitting
|
||||
deprecation warnings and has been stable since rustc 1.13.0
|
||||
* Deny dead code
|
||||
|
||||
## 0.4.9
|
||||
|
||||
### Fixes
|
||||
|
||||
* Make Datetime arithmatic adjust their offsets after discovering their new
|
||||
timestamps (@quodlibetor #337)
|
||||
* Put wasm-bindgen related code and dependencies behind a `wasmbind` feature
|
||||
gate. (@quodlibetor #335)
|
||||
|
||||
## 0.4.8
|
||||
|
||||
### Fixes
|
||||
|
||||
* Add '0' to single-digit days in rfc2822 date format (@wyhaya #323)
|
||||
* Correctly pad DelayedFormat (@SamokhinIlya #320)
|
||||
|
||||
### Features
|
||||
|
||||
* Support `wasm-unknown-unknown` via wasm-bindgen (in addition to
|
||||
emscripten/`wasm-unknown-emscripten`). (finished by @evq in #331, initial
|
||||
work by @jjpe #287)
|
||||
|
||||
## 0.4.7
|
||||
|
||||
### Fixes
|
||||
|
||||
* Disable libc default features so that CI continues to work on rust 1.13
|
||||
* Fix panic on negative inputs to timestamp_millis (@cmars #292)
|
||||
* Make `LocalResult` `Copy/Eq/Hash`
|
||||
|
||||
### Features
|
||||
|
||||
* Add `std::convert::From` conversions between the different timezone formats
|
||||
(@mqudsi #271)
|
||||
* Add `timestamp_nanos` methods (@jean-airoldie #308)
|
||||
* Documentation improvements
|
||||
|
||||
## 0.4.6
|
||||
|
||||
### Maintenance
|
||||
|
||||
* Doc improvements -- improve README CI verification, external links
|
||||
* winapi upgrade to 0.3
|
||||
|
||||
## 0.4.5
|
||||
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
# When uploading crates to the registry Cargo will automatically
|
||||
# "normalize" Cargo.toml files for maximal compatibility
|
||||
# with all versions of Cargo and also rewrite `path` dependencies
|
||||
# to registry (e.g. crates.io) dependencies
|
||||
# to registry (e.g., crates.io) dependencies
|
||||
#
|
||||
# If you believe there's an error in this file please file an
|
||||
# issue against the rust-lang/cargo repository. If you're
|
||||
|
@ -12,8 +12,9 @@
|
|||
|
||||
[package]
|
||||
name = "chrono"
|
||||
version = "0.4.6"
|
||||
version = "0.4.10"
|
||||
authors = ["Kang Seonghoon <public+rust@mearie.org>", "Brandon W Maister <quodlibetor@gmail.com>"]
|
||||
exclude = ["/ci/*", "/.travis.yml", "/appveyor.yml", "/Makefile"]
|
||||
description = "Date and time library for Rust"
|
||||
homepage = "https://github.com/chronotope/chrono"
|
||||
documentation = "https://docs.rs/chrono/"
|
||||
|
@ -43,8 +44,9 @@ version = "0.3.20"
|
|||
optional = true
|
||||
|
||||
[dependencies.serde]
|
||||
version = "1"
|
||||
version = "1.0.99"
|
||||
optional = true
|
||||
default-features = false
|
||||
|
||||
[dependencies.time]
|
||||
version = "0.1.39"
|
||||
|
@ -52,19 +54,37 @@ optional = true
|
|||
[dev-dependencies.bincode]
|
||||
version = "0.8.0"
|
||||
|
||||
[dev-dependencies.doc-comment]
|
||||
version = "0.3"
|
||||
|
||||
[dev-dependencies.num-iter]
|
||||
version = "0.1.35"
|
||||
default-features = false
|
||||
|
||||
[dev-dependencies.serde_derive]
|
||||
version = "1"
|
||||
default-features = false
|
||||
|
||||
[dev-dependencies.serde_json]
|
||||
version = "1"
|
||||
default-features = false
|
||||
|
||||
[features]
|
||||
clock = ["time"]
|
||||
default = ["clock"]
|
||||
alloc = []
|
||||
bench = ["std"]
|
||||
clock = ["time", "std"]
|
||||
default = ["clock", "std"]
|
||||
std = []
|
||||
wasmbind = ["wasm-bindgen", "js-sys"]
|
||||
[target."cfg(all(target_arch = \"wasm32\", not(target_os = \"emscripten\")))".dependencies.js-sys]
|
||||
version = "0.3"
|
||||
optional = true
|
||||
|
||||
[target."cfg(all(target_arch = \"wasm32\", not(target_os = \"emscripten\")))".dependencies.wasm-bindgen]
|
||||
version = "0.2"
|
||||
optional = true
|
||||
[target."cfg(all(target_arch = \"wasm32\", not(target_os = \"emscripten\")))".dev-dependencies.wasm-bindgen-test]
|
||||
version = "0.2"
|
||||
[badges.appveyor]
|
||||
repository = "chronotope/chrono"
|
||||
|
||||
|
|
|
@ -1,30 +0,0 @@
|
|||
# this Makefile is mostly for the packaging convenience.
|
||||
# casual users should use `cargo` to retrieve the appropriate version of Chrono.
|
||||
|
||||
.PHONY: all
|
||||
all:
|
||||
@echo 'Try `cargo build` instead.'
|
||||
|
||||
.PHONY: authors
|
||||
authors:
|
||||
echo 'Chrono is mainly written by Kang Seonghoon <public+rust@mearie.org>,' > AUTHORS.txt
|
||||
echo 'and also the following people (in ascending order):' >> AUTHORS.txt
|
||||
echo >> AUTHORS.txt
|
||||
git log --format='%aN <%aE>' | grep -v 'Kang Seonghoon' | sort -u >> AUTHORS.txt
|
||||
|
||||
.PHONY: readme README.md
|
||||
readme: README.md
|
||||
|
||||
README.md: src/lib.rs
|
||||
( ./ci/fix-readme.sh $< ) > $@
|
||||
|
||||
.PHONY: test
|
||||
test:
|
||||
TZ=UTC0 cargo test --features 'serde rustc-serialize bincode' --lib
|
||||
TZ=ACST-9:30 cargo test --features 'serde rustc-serialize bincode' --lib
|
||||
TZ=EST4 cargo test --features 'serde rustc-serialize bincode'
|
||||
|
||||
.PHONY: doc
|
||||
doc: authors readme
|
||||
cargo doc --features 'serde rustc-serialize bincode'
|
||||
|
|
@ -82,7 +82,7 @@ nanoseconds and does not represent "nominal" components such as days or
|
|||
months.
|
||||
|
||||
Chrono does not yet natively support
|
||||
the standard [`Duration`](https://docs.rs/time/0.1.40/time/struct.Duration.html) type,
|
||||
the standard [`Duration`](https://doc.rust-lang.org/std/time/struct.Duration.html) type,
|
||||
but it will be supported in the future.
|
||||
Meanwhile you can convert between two types with
|
||||
[`Duration::from_std`](https://docs.rs/time/0.1.40/time/struct.Duration.html#method.from_std)
|
||||
|
@ -93,7 +93,7 @@ methods.
|
|||
### Date and Time
|
||||
|
||||
Chrono provides a
|
||||
[**`DateTime`**](https://docs.rs/chrono/0.4.6/chrono/struct.DateTime.html)
|
||||
[**`DateTime`**](https://docs.rs/chrono/0.4/chrono/struct.DateTime.html)
|
||||
type to represent a date and a time in a timezone.
|
||||
|
||||
For more abstract moment-in-time tracking such as internal timekeeping
|
||||
|
@ -104,15 +104,15 @@ which tracks your system clock, or
|
|||
is an opaque but monotonically-increasing representation of a moment in time.
|
||||
|
||||
`DateTime` is timezone-aware and must be constructed from
|
||||
the [**`TimeZone`**](https://docs.rs/chrono/0.4.6/chrono/offset/trait.TimeZone.html) object,
|
||||
the [**`TimeZone`**](https://docs.rs/chrono/0.4/chrono/offset/trait.TimeZone.html) object,
|
||||
which defines how the local date is converted to and back from the UTC date.
|
||||
There are three well-known `TimeZone` implementations:
|
||||
|
||||
* [**`Utc`**](https://docs.rs/chrono/0.4.6/chrono/offset/struct.Utc.html) specifies the UTC time zone. It is most efficient.
|
||||
* [**`Utc`**](https://docs.rs/chrono/0.4/chrono/offset/struct.Utc.html) specifies the UTC time zone. It is most efficient.
|
||||
|
||||
* [**`Local`**](https://docs.rs/chrono/0.4.6/chrono/offset/struct.Local.html) specifies the system local time zone.
|
||||
* [**`Local`**](https://docs.rs/chrono/0.4/chrono/offset/struct.Local.html) specifies the system local time zone.
|
||||
|
||||
* [**`FixedOffset`**](https://docs.rs/chrono/0.4.6/chrono/offset/struct.FixedOffset.html) specifies
|
||||
* [**`FixedOffset`**](https://docs.rs/chrono/0.4/chrono/offset/struct.FixedOffset.html) specifies
|
||||
an arbitrary, fixed time zone such as UTC+09:00 or UTC-10:30.
|
||||
This often results from the parsed textual date and time.
|
||||
Since it stores the most information and does not depend on the system environment,
|
||||
|
@ -120,12 +120,12 @@ There are three well-known `TimeZone` implementations:
|
|||
|
||||
`DateTime`s with different `TimeZone` types are distinct and do not mix,
|
||||
but can be converted to each other using
|
||||
the [`DateTime::with_timezone`](https://docs.rs/chrono/0.4.6/chrono/struct.DateTime.html#method.with_timezone) method.
|
||||
the [`DateTime::with_timezone`](https://docs.rs/chrono/0.4/chrono/struct.DateTime.html#method.with_timezone) method.
|
||||
|
||||
You can get the current date and time in the UTC time zone
|
||||
([`Utc::now()`](https://docs.rs/chrono/0.4.6/chrono/offset/struct.Utc.html#method.now))
|
||||
([`Utc::now()`](https://docs.rs/chrono/0.4/chrono/offset/struct.Utc.html#method.now))
|
||||
or in the local time zone
|
||||
([`Local::now()`](https://docs.rs/chrono/0.4.6/chrono/offset/struct.Local.html#method.now)).
|
||||
([`Local::now()`](https://docs.rs/chrono/0.4/chrono/offset/struct.Local.html#method.now)).
|
||||
|
||||
```rust
|
||||
use chrono::prelude::*;
|
||||
|
@ -166,24 +166,26 @@ assert_eq!(dt, fixed_dt);
|
|||
```
|
||||
|
||||
Various properties are available to the date and time, and can be altered individually.
|
||||
Most of them are defined in the traits [`Datelike`](https://docs.rs/chrono/0.4.6/chrono/trait.Datelike.html) and
|
||||
[`Timelike`](https://docs.rs/chrono/0.4.6/chrono/trait.Timelike.html) which you should `use` before.
|
||||
Most of them are defined in the traits [`Datelike`](https://docs.rs/chrono/0.4/chrono/trait.Datelike.html) and
|
||||
[`Timelike`](https://docs.rs/chrono/0.4/chrono/trait.Timelike.html) which you should `use` before.
|
||||
Addition and subtraction is also supported.
|
||||
The following illustrates most supported operations to the date and time:
|
||||
|
||||
```rust
|
||||
extern crate time;
|
||||
|
||||
use chrono::prelude::*;
|
||||
use time::Duration;
|
||||
|
||||
// assume this returned `2014-11-28T21:45:59.324310806+09:00`:
|
||||
let dt = Local::now();
|
||||
let dt = FixedOffset::east(9*3600).ymd(2014, 11, 28).and_hms_nano(21, 45, 59, 324310806);
|
||||
|
||||
// property accessors
|
||||
assert_eq!((dt.year(), dt.month(), dt.day()), (2014, 11, 28));
|
||||
assert_eq!((dt.month0(), dt.day0()), (10, 27)); // for unfortunate souls
|
||||
assert_eq!((dt.hour(), dt.minute(), dt.second()), (21, 45, 59));
|
||||
assert_eq!(dt.weekday(), Weekday::Fri);
|
||||
assert_eq!(dt.weekday().number_from_monday(), 5); // Mon=1, ..., Sat=7
|
||||
assert_eq!(dt.weekday().number_from_monday(), 5); // Mon=1, ..., Sun=7
|
||||
assert_eq!(dt.ordinal(), 332); // the day of year
|
||||
assert_eq!(dt.num_days_from_ce(), 735565); // the number of days from and including Jan 1, 1
|
||||
|
||||
|
@ -210,15 +212,15 @@ assert_eq!(Utc.ymd(1970, 1, 1).and_hms(0, 0, 0) - Duration::seconds(1_000_000_00
|
|||
|
||||
### Formatting and Parsing
|
||||
|
||||
Formatting is done via the [`format`](https://docs.rs/chrono/0.4.6/chrono/struct.DateTime.html#method.format) method,
|
||||
Formatting is done via the [`format`](https://docs.rs/chrono/0.4/chrono/struct.DateTime.html#method.format) method,
|
||||
which format is equivalent to the familiar `strftime` format.
|
||||
|
||||
See [`format::strftime`](https://docs.rs/chrono/0.4.6/chrono/format/strftime/index.html#specifiers)
|
||||
See [`format::strftime`](https://docs.rs/chrono/0.4/chrono/format/strftime/index.html#specifiers)
|
||||
documentation for full syntax and list of specifiers.
|
||||
|
||||
The default `to_string` method and `{:?}` specifier also give a reasonable representation.
|
||||
Chrono also provides [`to_rfc2822`](https://docs.rs/chrono/0.4.6/chrono/struct.DateTime.html#method.to_rfc2822) and
|
||||
[`to_rfc3339`](https://docs.rs/chrono/0.4.6/chrono/struct.DateTime.html#method.to_rfc3339) methods
|
||||
Chrono also provides [`to_rfc2822`](https://docs.rs/chrono/0.4/chrono/struct.DateTime.html#method.to_rfc2822) and
|
||||
[`to_rfc3339`](https://docs.rs/chrono/0.4/chrono/struct.DateTime.html#method.to_rfc3339) methods
|
||||
for well-known formats.
|
||||
|
||||
```rust
|
||||
|
@ -248,23 +250,23 @@ Parsing can be done with three methods:
|
|||
([`std::fmt::Debug`](https://doc.rust-lang.org/std/fmt/trait.Debug.html))
|
||||
format specifier prints, and requires the offset to be present.
|
||||
|
||||
2. [`DateTime::parse_from_str`](https://docs.rs/chrono/0.4.6/chrono/struct.DateTime.html#method.parse_from_str) parses
|
||||
2. [`DateTime::parse_from_str`](https://docs.rs/chrono/0.4/chrono/struct.DateTime.html#method.parse_from_str) parses
|
||||
a date and time with offsets and returns `DateTime<FixedOffset>`.
|
||||
This should be used when the offset is a part of input and the caller cannot guess that.
|
||||
It *cannot* be used when the offset can be missing.
|
||||
[`DateTime::parse_from_rfc2822`](https://docs.rs/chrono/0.4.6/chrono/struct.DateTime.html#method.parse_from_rfc2822)
|
||||
[`DateTime::parse_from_rfc2822`](https://docs.rs/chrono/0.4/chrono/struct.DateTime.html#method.parse_from_rfc2822)
|
||||
and
|
||||
[`DateTime::parse_from_rfc3339`](https://docs.rs/chrono/0.4.6/chrono/struct.DateTime.html#method.parse_from_rfc3339)
|
||||
[`DateTime::parse_from_rfc3339`](https://docs.rs/chrono/0.4/chrono/struct.DateTime.html#method.parse_from_rfc3339)
|
||||
are similar but for well-known formats.
|
||||
|
||||
3. [`Offset::datetime_from_str`](https://docs.rs/chrono/0.4.6/chrono/offset/trait.TimeZone.html#method.datetime_from_str) is
|
||||
3. [`Offset::datetime_from_str`](https://docs.rs/chrono/0.4/chrono/offset/trait.TimeZone.html#method.datetime_from_str) is
|
||||
similar but returns `DateTime` of given offset.
|
||||
When the explicit offset is missing from the input, it simply uses given offset.
|
||||
It issues an error when the input contains an explicit offset different
|
||||
from the current offset.
|
||||
|
||||
More detailed control over the parsing process is available via
|
||||
[`format`](https://docs.rs/chrono/0.4.6/chrono/format/index.html) module.
|
||||
[`format`](https://docs.rs/chrono/0.4/chrono/format/index.html) module.
|
||||
|
||||
```rust
|
||||
use chrono::prelude::*;
|
||||
|
@ -296,23 +298,23 @@ assert!(Utc.datetime_from_str("Fri Nov 28 12:00:09", "%a %b %e %T").is_err());
|
|||
assert!(Utc.datetime_from_str("Sat Nov 28 12:00:09 2014", "%a %b %e %T %Y").is_err());
|
||||
```
|
||||
|
||||
Again : See [`format::strftime`](https://docs.rs/chrono/0.4.6/chrono/format/strftime/index.html#specifiers)
|
||||
Again : See [`format::strftime`](https://docs.rs/chrono/0.4/chrono/format/strftime/index.html#specifiers)
|
||||
documentation for full syntax and list of specifiers.
|
||||
|
||||
### Conversion from and to EPOCH timestamps
|
||||
|
||||
Use [`Utc.timestamp(seconds, nanoseconds)`](https://docs.rs/chrono/0.4.6/chrono/offset/trait.TimeZone.html#method.timestamp)
|
||||
to construct a [`DateTime<Utc>`](https://docs.rs/chrono/0.4.6/chrono/struct.DateTime.html) from a UNIX timestamp
|
||||
Use [`Utc.timestamp(seconds, nanoseconds)`](https://docs.rs/chrono/0.4/chrono/offset/trait.TimeZone.html#method.timestamp)
|
||||
to construct a [`DateTime<Utc>`](https://docs.rs/chrono/0.4/chrono/struct.DateTime.html) from a UNIX timestamp
|
||||
(seconds, nanoseconds that passed since January 1st 1970).
|
||||
|
||||
Use [`DateTime.timestamp`](https://docs.rs/chrono/0.4.6/chrono/struct.DateTime.html#method.timestamp) to get the timestamp (in seconds)
|
||||
from a [`DateTime`](https://docs.rs/chrono/0.4.6/chrono/struct.DateTime.html). Additionally, you can use
|
||||
[`DateTime.timestamp_subsec_nanos`](https://docs.rs/chrono/0.4.6/chrono/struct.DateTime.html#method.timestamp_subsec_nanos)
|
||||
Use [`DateTime.timestamp`](https://docs.rs/chrono/0.4/chrono/struct.DateTime.html#method.timestamp) to get the timestamp (in seconds)
|
||||
from a [`DateTime`](https://docs.rs/chrono/0.4/chrono/struct.DateTime.html). Additionally, you can use
|
||||
[`DateTime.timestamp_subsec_nanos`](https://docs.rs/chrono/0.4/chrono/struct.DateTime.html#method.timestamp_subsec_nanos)
|
||||
to get the number of additional number of nanoseconds.
|
||||
|
||||
```rust
|
||||
// We need the trait in scope to use Utc::timestamp().
|
||||
use chrono::TimeZone;
|
||||
use chrono::{DateTime, TimeZone, Utc};
|
||||
|
||||
// Construct a datetime from epoch:
|
||||
let dt = Utc.timestamp(1_500_000_000, 0);
|
||||
|
@ -325,7 +327,7 @@ assert_eq!(dt.timestamp(), 1_500_000_000);
|
|||
|
||||
### Individual date
|
||||
|
||||
Chrono also provides an individual date type ([**`Date`**](https://docs.rs/chrono/0.4.6/chrono/struct.Date.html)).
|
||||
Chrono also provides an individual date type ([**`Date`**](https://docs.rs/chrono/0.4/chrono/struct.Date.html)).
|
||||
It also has time zones attached, and have to be constructed via time zones.
|
||||
Most operations available to `DateTime` are also available to `Date` whenever appropriate.
|
||||
|
||||
|
@ -344,26 +346,26 @@ assert_eq!(Utc.ymd(2014, 11, 28).and_hms_milli(7, 8, 9, 10).format("%H%M%S").to_
|
|||
|
||||
There is no timezone-aware `Time` due to the lack of usefulness and also the complexity.
|
||||
|
||||
`DateTime` has [`date`](https://docs.rs/chrono/0.4.6/chrono/struct.DateTime.html#method.date) method
|
||||
`DateTime` has [`date`](https://docs.rs/chrono/0.4/chrono/struct.DateTime.html#method.date) method
|
||||
which returns a `Date` which represents its date component.
|
||||
There is also a [`time`](https://docs.rs/chrono/0.4.6/chrono/struct.DateTime.html#method.time) method,
|
||||
There is also a [`time`](https://docs.rs/chrono/0.4/chrono/struct.DateTime.html#method.time) method,
|
||||
which simply returns a naive local time described below.
|
||||
|
||||
### Naive date and time
|
||||
|
||||
Chrono provides naive counterparts to `Date`, (non-existent) `Time` and `DateTime`
|
||||
as [**`NaiveDate`**](https://docs.rs/chrono/0.4.6/chrono/naive/struct.NaiveDate.html),
|
||||
[**`NaiveTime`**](https://docs.rs/chrono/0.4.6/chrono/naive/struct.NaiveTime.html) and
|
||||
[**`NaiveDateTime`**](https://docs.rs/chrono/0.4.6/chrono/naive/struct.NaiveDateTime.html) respectively.
|
||||
as [**`NaiveDate`**](https://docs.rs/chrono/0.4/chrono/naive/struct.NaiveDate.html),
|
||||
[**`NaiveTime`**](https://docs.rs/chrono/0.4/chrono/naive/struct.NaiveTime.html) and
|
||||
[**`NaiveDateTime`**](https://docs.rs/chrono/0.4/chrono/naive/struct.NaiveDateTime.html) respectively.
|
||||
|
||||
They have almost equivalent interfaces as their timezone-aware twins,
|
||||
but are not associated to time zones obviously and can be quite low-level.
|
||||
They are mostly useful for building blocks for higher-level types.
|
||||
|
||||
Timezone-aware `DateTime` and `Date` types have two methods returning naive versions:
|
||||
[`naive_local`](https://docs.rs/chrono/0.4.6/chrono/struct.DateTime.html#method.naive_local) returns
|
||||
[`naive_local`](https://docs.rs/chrono/0.4/chrono/struct.DateTime.html#method.naive_local) returns
|
||||
a view to the naive local time,
|
||||
and [`naive_utc`](https://docs.rs/chrono/0.4.6/chrono/struct.DateTime.html#method.naive_utc) returns
|
||||
and [`naive_utc`](https://docs.rs/chrono/0.4/chrono/struct.DateTime.html#method.naive_utc) returns
|
||||
a view to the naive UTC time.
|
||||
|
||||
## Limitations
|
||||
|
@ -375,7 +377,7 @@ Date types are limited in about +/- 262,000 years from the common epoch.
|
|||
Time types are limited in the nanosecond accuracy.
|
||||
|
||||
[Leap seconds are supported in the representation but
|
||||
Chrono doesn't try to make use of them](https://docs.rs/chrono/0.4.6/chrono/naive/struct.NaiveTime.html#leap-second-handling).
|
||||
Chrono doesn't try to make use of them](https://docs.rs/chrono/0.4/chrono/naive/struct.NaiveTime.html#leap-second-handling).
|
||||
(The main reason is that leap seconds are not really predictable.)
|
||||
Almost *every* operation over the possible leap seconds will ignore them.
|
||||
Consider using `NaiveDateTime` with the implicit TAI (International Atomic Time) scale
|
||||
|
|
|
@ -1,21 +0,0 @@
|
|||
environment:
|
||||
matrix:
|
||||
- TARGET: 1.13.0-x86_64-pc-windows-gnu
|
||||
- TARGET: nightly-x86_64-pc-windows-msvc
|
||||
- TARGET: nightly-i686-pc-windows-msvc
|
||||
- TARGET: nightly-x86_64-pc-windows-gnu
|
||||
- TARGET: nightly-i686-pc-windows-gnu
|
||||
matrix:
|
||||
allow_failures:
|
||||
- channel: nightly
|
||||
install:
|
||||
- ps: Start-FileDownload "https://static.rust-lang.org/dist/rust-${env:TARGET}.exe" -FileName "rust-install.exe"
|
||||
- ps: .\rust-install.exe /VERYSILENT /NORESTART /DIR="C:\rust" | Out-Null
|
||||
- ps: $env:PATH="$env:PATH;C:\rust\bin"
|
||||
- rustc -vV
|
||||
- cargo -vV
|
||||
|
||||
build: false
|
||||
|
||||
test_script:
|
||||
- sh -c 'PATH=`rustc --print sysroot`/bin:$PATH ./ci/travis.sh'
|
|
@ -1,35 +0,0 @@
|
|||
#!/bin/bash
|
||||
|
||||
VERSION="$( cargo read-manifest | python -c 'import json, sys; print(json.load(sys.stdin)["version"])')"
|
||||
LIB="$1"
|
||||
|
||||
# Make the Chrono in the header a link to the docs
|
||||
awk '/^\/\/! # Chrono: / { print "[Chrono][docsrs]:", substr($0, index($0, $4))}' "$LIB"
|
||||
awk '/^\/\/! # Chrono: / { print "[Chrono][docsrs]:", substr($0, index($0, $4))}' "$LIB" | sed 's/./=/g'
|
||||
# Add all the badges
|
||||
echo '
|
||||
[![Chrono on Travis CI][travis-image]][travis]
|
||||
[![Chrono on Appveyor][appveyor-image]][appveyor]
|
||||
[![Chrono on crates.io][cratesio-image]][cratesio]
|
||||
[![Chrono on docs.rs][docsrs-image]][docsrs]
|
||||
[![Join the chat at https://gitter.im/chrono-rs/chrono][gitter-image]][gitter]
|
||||
|
||||
[travis-image]: https://travis-ci.org/chronotope/chrono.svg?branch=master
|
||||
[travis]: https://travis-ci.org/chronotope/chrono
|
||||
[appveyor-image]: https://ci.appveyor.com/api/projects/status/2ia91ofww4w31m2w/branch/master?svg=true
|
||||
[appveyor]: https://ci.appveyor.com/project/chronotope/chrono
|
||||
[cratesio-image]: https://img.shields.io/crates/v/chrono.svg
|
||||
[cratesio]: https://crates.io/crates/chrono
|
||||
[docsrs-image]: https://docs.rs/chrono/badge.svg
|
||||
[docsrs]: https://docs.rs/chrono
|
||||
[gitter-image]: https://badges.gitter.im/chrono-rs/chrono.svg
|
||||
[gitter]: https://gitter.im/chrono-rs/chrono'
|
||||
|
||||
# print the section between the header and the usage
|
||||
awk '/^\/\/! # Chrono:/,/^\/\/! ## /' "$LIB" | cut -b 5- | grep -v '^#' | \
|
||||
sed 's/](\.\//](https:\/\/docs.rs\/chrono\/'$VERSION'\/chrono\//g'
|
||||
echo
|
||||
# Replace relative doc links with links to this exact version of docs on
|
||||
# docs.rs
|
||||
awk '/^\/\/! ## /,!/^\/\/!/' "$LIB" | cut -b 5- | grep -v '^# ' | \
|
||||
sed 's/](\.\//](https:\/\/docs.rs\/chrono\/'$VERSION'\/chrono\//g' \
|
|
@ -1,100 +0,0 @@
|
|||
#!/bin/bash
|
||||
|
||||
# This is the script that's executed by travis, you can run it yourself to run
|
||||
# the exact same suite
|
||||
|
||||
set -e
|
||||
|
||||
DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
|
||||
channel() {
|
||||
if [ -n "${TRAVIS}" ]; then
|
||||
if [ "${TRAVIS_RUST_VERSION}" = "${CHANNEL}" ]; then
|
||||
pwd
|
||||
(set -x; cargo "$@")
|
||||
fi
|
||||
elif [ -n "${APPVEYOR}" ]; then
|
||||
if [ "${APPVEYOR_RUST_CHANNEL}" = "${CHANNEL}" ]; then
|
||||
pwd
|
||||
(set -x; cargo "$@")
|
||||
fi
|
||||
else
|
||||
pwd
|
||||
(set -x; cargo "+${CHANNEL}" "$@")
|
||||
fi
|
||||
}
|
||||
|
||||
build_and_test() {
|
||||
# interleave building and testing in hope that it saves time
|
||||
# also vary the local time zone to (hopefully) catch tz-dependent bugs
|
||||
# also avoid doc-testing multiple times---it takes a lot and rarely helps
|
||||
cargo clean
|
||||
channel build -v
|
||||
TZ=ACST-9:30 channel test -v --lib
|
||||
channel build -v --features rustc-serialize
|
||||
TZ=EST4 channel test -v --features rustc-serialize --lib
|
||||
channel build -v --features serde
|
||||
TZ=UTC0 channel test -v --features serde --lib
|
||||
channel build -v --features serde,rustc-serialize
|
||||
TZ=Asia/Katmandu channel test -v --features serde,rustc-serialize
|
||||
|
||||
# without default "clock" feature
|
||||
channel build -v --no-default-features
|
||||
TZ=ACST-9:30 channel test -v --no-default-features --lib
|
||||
channel build -v --no-default-features --features rustc-serialize
|
||||
TZ=EST4 channel test -v --no-default-features --features rustc-serialize --lib
|
||||
channel build -v --no-default-features --features serde
|
||||
TZ=UTC0 channel test -v --no-default-features --features serde --lib
|
||||
channel build -v --no-default-features --features serde,rustc-serialize
|
||||
TZ=Asia/Katmandu channel test -v --no-default-features --features serde,rustc-serialize --lib
|
||||
|
||||
if [[ "$CHANNEL" == stable ]]; then
|
||||
if [[ -n "$TRAVIS" ]] ; then
|
||||
check_readme
|
||||
fi
|
||||
fi
|
||||
}
|
||||
|
||||
build_only() {
|
||||
# Rust 1.13 doesn't support custom derive, so, to avoid doctests which
|
||||
# validate that, we just build there.
|
||||
cargo clean
|
||||
channel build -v
|
||||
channel build -v --features rustc-serialize
|
||||
channel build -v --features 'serde bincode'
|
||||
channel build -v --no-default-features
|
||||
}
|
||||
|
||||
run_clippy() {
|
||||
# cached installation will not work on a later nightly
|
||||
if [ -n "${TRAVIS}" ] && ! cargo install clippy --debug --force; then
|
||||
echo "COULD NOT COMPILE CLIPPY, IGNORING CLIPPY TESTS"
|
||||
exit
|
||||
fi
|
||||
|
||||
cargo clippy --features 'serde bincode rustc-serialize' -- -Dclippy
|
||||
}
|
||||
|
||||
check_readme() {
|
||||
make readme
|
||||
(set -x; git diff --exit-code -- README.md) ; echo $?
|
||||
}
|
||||
|
||||
rustc --version
|
||||
cargo --version
|
||||
|
||||
CHANNEL=nightly
|
||||
if [ "x${CLIPPY}" = xy ] ; then
|
||||
run_clippy
|
||||
else
|
||||
build_and_test
|
||||
fi
|
||||
|
||||
CHANNEL=beta
|
||||
build_and_test
|
||||
|
||||
CHANNEL=stable
|
||||
build_and_test
|
||||
|
||||
CHANNEL=1.13.0
|
||||
build_only
|
|
@ -3,16 +3,18 @@
|
|||
|
||||
//! ISO 8601 calendar date with time zone.
|
||||
|
||||
use std::{fmt, hash};
|
||||
use std::cmp::Ordering;
|
||||
use std::ops::{Add, Sub};
|
||||
use core::borrow::Borrow;
|
||||
use core::{fmt, hash};
|
||||
use core::cmp::Ordering;
|
||||
use core::ops::{Add, Sub};
|
||||
use oldtime::Duration as OldDuration;
|
||||
|
||||
use {Weekday, Datelike};
|
||||
use offset::{TimeZone, Utc};
|
||||
use naive::{self, NaiveDate, NaiveTime, IsoWeek};
|
||||
use DateTime;
|
||||
use format::{Item, DelayedFormat, StrftimeItems};
|
||||
#[cfg(any(feature = "alloc", feature = "std", test))]
|
||||
use format::{DelayedFormat, Item, StrftimeItems};
|
||||
|
||||
/// ISO 8601 calendar date with time zone.
|
||||
///
|
||||
|
@ -255,15 +257,17 @@ fn map_local<Tz: TimeZone, F>(d: &Date<Tz>, mut f: F) -> Option<Date<Tz>>
|
|||
|
||||
impl<Tz: TimeZone> Date<Tz> where Tz::Offset: fmt::Display {
|
||||
/// Formats the date with the specified formatting items.
|
||||
#[cfg(any(feature = "alloc", feature = "std", test))]
|
||||
#[inline]
|
||||
pub fn format_with_items<'a, I>(&self, items: I) -> DelayedFormat<I>
|
||||
where I: Iterator<Item=Item<'a>> + Clone {
|
||||
pub fn format_with_items<'a, I, B>(&self, items: I) -> DelayedFormat<I>
|
||||
where I: Iterator<Item=B> + Clone, B: Borrow<Item<'a>> {
|
||||
DelayedFormat::new_with_offset(Some(self.naive_local()), None, &self.offset, items)
|
||||
}
|
||||
|
||||
/// Formats the date with the specified format string.
|
||||
/// See the [`format::strftime` module](./format/strftime/index.html)
|
||||
/// on the supported escape sequences.
|
||||
#[cfg(any(feature = "alloc", feature = "std", test))]
|
||||
#[inline]
|
||||
pub fn format<'a>(&self, fmt: &'a str) -> DelayedFormat<StrftimeItems<'a>> {
|
||||
self.format_with_items(StrftimeItems::new(fmt))
|
||||
|
|
|
@ -3,12 +3,18 @@
|
|||
|
||||
//! ISO 8601 date and time with time zone.
|
||||
|
||||
use std::{str, fmt, hash};
|
||||
use std::cmp::Ordering;
|
||||
use std::ops::{Add, Sub};
|
||||
use core::{str, fmt, hash};
|
||||
use core::cmp::Ordering;
|
||||
use core::ops::{Add, Sub};
|
||||
#[cfg(any(feature = "std", test))]
|
||||
use std::time::{SystemTime, UNIX_EPOCH};
|
||||
use oldtime::Duration as OldDuration;
|
||||
|
||||
#[cfg(all(not(feature = "std"), feature = "alloc"))]
|
||||
use alloc::string::{String, ToString};
|
||||
#[cfg(feature = "std")]
|
||||
use std::string::ToString;
|
||||
|
||||
use {Weekday, Timelike, Datelike};
|
||||
#[cfg(feature="clock")]
|
||||
use offset::Local;
|
||||
|
@ -16,7 +22,10 @@ use offset::{TimeZone, Offset, Utc, FixedOffset};
|
|||
use naive::{NaiveTime, NaiveDateTime, IsoWeek};
|
||||
use Date;
|
||||
use format::{Item, Numeric, Pad, Fixed};
|
||||
use format::{parse, Parsed, ParseError, ParseResult, DelayedFormat, StrftimeItems};
|
||||
use format::{parse, Parsed, ParseError, ParseResult, StrftimeItems};
|
||||
#[cfg(any(feature = "alloc", feature = "std", test))]
|
||||
use format::DelayedFormat;
|
||||
use core::borrow::Borrow;
|
||||
|
||||
/// Specific formatting options for seconds. This may be extended in the
|
||||
/// future, so exhaustive matching in external code is not recommended.
|
||||
|
@ -202,7 +211,8 @@ impl<Tz: TimeZone> DateTime<Tz> {
|
|||
#[inline]
|
||||
pub fn checked_add_signed(self, rhs: OldDuration) -> Option<DateTime<Tz>> {
|
||||
let datetime = try_opt!(self.datetime.checked_add_signed(rhs));
|
||||
Some(DateTime { datetime: datetime, offset: self.offset })
|
||||
let tz = self.timezone();
|
||||
Some(tz.from_utc_datetime(&datetime))
|
||||
}
|
||||
|
||||
/// Subtracts given `Duration` from the current date and time.
|
||||
|
@ -211,7 +221,8 @@ impl<Tz: TimeZone> DateTime<Tz> {
|
|||
#[inline]
|
||||
pub fn checked_sub_signed(self, rhs: OldDuration) -> Option<DateTime<Tz>> {
|
||||
let datetime = try_opt!(self.datetime.checked_sub_signed(rhs));
|
||||
Some(DateTime { datetime: datetime, offset: self.offset })
|
||||
let tz = self.timezone();
|
||||
Some(tz.from_utc_datetime(&datetime))
|
||||
}
|
||||
|
||||
/// Subtracts another `DateTime` from the current date and time.
|
||||
|
@ -235,6 +246,75 @@ impl<Tz: TimeZone> DateTime<Tz> {
|
|||
}
|
||||
}
|
||||
|
||||
/// Convert a `DateTime<Utc>` instance into a `DateTime<FixedOffset>` instance.
|
||||
impl From<DateTime<Utc>> for DateTime<FixedOffset> {
|
||||
/// Convert this `DateTime<Utc>` instance into a `DateTime<FixedOffset>` instance.
|
||||
///
|
||||
/// Conversion is done via [`DateTime::with_timezone`]. Note that the converted value returned by
|
||||
/// this will be created with a fixed timezone offset of 0.
|
||||
fn from(src: DateTime<Utc>) -> Self {
|
||||
src.with_timezone(&FixedOffset::east(0))
|
||||
}
|
||||
}
|
||||
|
||||
/// Convert a `DateTime<Utc>` instance into a `DateTime<Local>` instance.
|
||||
#[cfg(feature="clock")]
|
||||
impl From<DateTime<Utc>> for DateTime<Local> {
|
||||
/// Convert this `DateTime<Utc>` instance into a `DateTime<Local>` instance.
|
||||
///
|
||||
/// Conversion is performed via [`DateTime::with_timezone`], accounting for the difference in timezones.
|
||||
fn from(src: DateTime<Utc>) -> Self {
|
||||
src.with_timezone(&Local)
|
||||
}
|
||||
}
|
||||
|
||||
/// Convert a `DateTime<FixedOffset>` instance into a `DateTime<Utc>` instance.
|
||||
impl From<DateTime<FixedOffset>> for DateTime<Utc> {
|
||||
/// Convert this `DateTime<FixedOffset>` instance into a `DateTime<Utc>` instance.
|
||||
///
|
||||
/// Conversion is performed via [`DateTime::with_timezone`], accounting for the timezone
|
||||
/// difference.
|
||||
fn from(src: DateTime<FixedOffset>) -> Self {
|
||||
src.with_timezone(&Utc)
|
||||
}
|
||||
}
|
||||
|
||||
/// Convert a `DateTime<FixedOffset>` instance into a `DateTime<Local>` instance.
|
||||
#[cfg(feature="clock")]
|
||||
impl From<DateTime<FixedOffset>> for DateTime<Local> {
|
||||
/// Convert this `DateTime<FixedOffset>` instance into a `DateTime<Local>` instance.
|
||||
///
|
||||
/// Conversion is performed via [`DateTime::with_timezone`]. Returns the equivalent value in local
|
||||
/// time.
|
||||
fn from(src: DateTime<FixedOffset>) -> Self {
|
||||
src.with_timezone(&Local)
|
||||
}
|
||||
}
|
||||
|
||||
/// Convert a `DateTime<Local>` instance into a `DateTime<Utc>` instance.
|
||||
#[cfg(feature="clock")]
|
||||
impl From<DateTime<Local>> for DateTime<Utc> {
|
||||
/// Convert this `DateTime<Local>` instance into a `DateTime<Utc>` instance.
|
||||
///
|
||||
/// Conversion is performed via [`DateTime::with_timezone`], accounting for the difference in
|
||||
/// timezones.
|
||||
fn from(src: DateTime<Local>) -> Self {
|
||||
src.with_timezone(&Utc)
|
||||
}
|
||||
}
|
||||
|
||||
/// Convert a `DateTime<Local>` instance into a `DateTime<FixedOffset>` instance.
|
||||
#[cfg(feature="clock")]
|
||||
impl From<DateTime<Local>> for DateTime<FixedOffset> {
|
||||
/// Convert this `DateTime<Local>` instance into a `DateTime<FixedOffset>` instance.
|
||||
///
|
||||
/// Conversion is performed via [`DateTime::with_timezone`]. Note that the converted value returned
|
||||
/// by this will be created with a fixed timezone offset of 0.
|
||||
fn from(src: DateTime<Local>) -> Self {
|
||||
src.with_timezone(&FixedOffset::east(0))
|
||||
}
|
||||
}
|
||||
|
||||
/// Maps the local datetime to other datetime with given conversion function.
|
||||
fn map_local<Tz: TimeZone, F>(dt: &DateTime<Tz>, mut f: F) -> Option<DateTime<Tz>>
|
||||
where F: FnMut(NaiveDateTime) -> Option<NaiveDateTime> {
|
||||
|
@ -247,7 +327,7 @@ impl DateTime<FixedOffset> {
|
|||
pub fn parse_from_rfc2822(s: &str) -> ParseResult<DateTime<FixedOffset>> {
|
||||
const ITEMS: &'static [Item<'static>] = &[Item::Fixed(Fixed::RFC2822)];
|
||||
let mut parsed = Parsed::new();
|
||||
try!(parse(&mut parsed, s, ITEMS.iter().cloned()));
|
||||
parse(&mut parsed, s, ITEMS.iter())?;
|
||||
parsed.to_datetime()
|
||||
}
|
||||
|
||||
|
@ -259,7 +339,7 @@ impl DateTime<FixedOffset> {
|
|||
pub fn parse_from_rfc3339(s: &str) -> ParseResult<DateTime<FixedOffset>> {
|
||||
const ITEMS: &'static [Item<'static>] = &[Item::Fixed(Fixed::RFC3339)];
|
||||
let mut parsed = Parsed::new();
|
||||
try!(parse(&mut parsed, s, ITEMS.iter().cloned()));
|
||||
parse(&mut parsed, s, ITEMS.iter())?;
|
||||
parsed.to_datetime()
|
||||
}
|
||||
|
||||
|
@ -285,22 +365,24 @@ impl DateTime<FixedOffset> {
|
|||
/// ```
|
||||
pub fn parse_from_str(s: &str, fmt: &str) -> ParseResult<DateTime<FixedOffset>> {
|
||||
let mut parsed = Parsed::new();
|
||||
try!(parse(&mut parsed, s, StrftimeItems::new(fmt)));
|
||||
parse(&mut parsed, s, StrftimeItems::new(fmt))?;
|
||||
parsed.to_datetime()
|
||||
}
|
||||
}
|
||||
|
||||
impl<Tz: TimeZone> DateTime<Tz> where Tz::Offset: fmt::Display {
|
||||
/// Returns an RFC 2822 date and time string such as `Tue, 1 Jul 2003 10:52:37 +0200`.
|
||||
#[cfg(any(feature = "alloc", feature = "std", test))]
|
||||
pub fn to_rfc2822(&self) -> String {
|
||||
const ITEMS: &'static [Item<'static>] = &[Item::Fixed(Fixed::RFC2822)];
|
||||
self.format_with_items(ITEMS.iter().cloned()).to_string()
|
||||
self.format_with_items(ITEMS.iter()).to_string()
|
||||
}
|
||||
|
||||
/// Returns an RFC 3339 and ISO 8601 date and time string such as `1996-12-19T16:39:57-08:00`.
|
||||
#[cfg(any(feature = "alloc", feature = "std", test))]
|
||||
pub fn to_rfc3339(&self) -> String {
|
||||
const ITEMS: &'static [Item<'static>] = &[Item::Fixed(Fixed::RFC3339)];
|
||||
self.format_with_items(ITEMS.iter().cloned()).to_string()
|
||||
self.format_with_items(ITEMS.iter()).to_string()
|
||||
}
|
||||
|
||||
/// Return an RFC 3339 and ISO 8601 date and time string with subseconds
|
||||
|
@ -327,6 +409,7 @@ impl<Tz: TimeZone> DateTime<Tz> where Tz::Offset: fmt::Display {
|
|||
/// assert_eq!(dt.to_rfc3339_opts(SecondsFormat::Secs, true),
|
||||
/// "2018-01-26T10:30:09+08:00");
|
||||
/// ```
|
||||
#[cfg(any(feature = "alloc", feature = "std", test))]
|
||||
pub fn to_rfc3339_opts(&self, secform: SecondsFormat, use_z: bool) -> String {
|
||||
use format::Numeric::*;
|
||||
use format::Pad::Zero;
|
||||
|
@ -368,19 +451,20 @@ impl<Tz: TimeZone> DateTime<Tz> where Tz::Offset: fmt::Display {
|
|||
match ssitem {
|
||||
None =>
|
||||
self.format_with_items(
|
||||
PREFIX.iter().chain([tzitem].iter()).cloned()
|
||||
PREFIX.iter().chain([tzitem].iter())
|
||||
).to_string(),
|
||||
Some(s) =>
|
||||
self.format_with_items(
|
||||
PREFIX.iter().chain([s, tzitem].iter()).cloned()
|
||||
PREFIX.iter().chain([s, tzitem].iter())
|
||||
).to_string(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Formats the combined date and time with the specified formatting items.
|
||||
#[cfg(any(feature = "alloc", feature = "std", test))]
|
||||
#[inline]
|
||||
pub fn format_with_items<'a, I>(&self, items: I) -> DelayedFormat<I>
|
||||
where I: Iterator<Item=Item<'a>> + Clone {
|
||||
pub fn format_with_items<'a, I, B>(&self, items: I) -> DelayedFormat<I>
|
||||
where I: Iterator<Item=B> + Clone, B: Borrow<Item<'a>> {
|
||||
let local = self.naive_local();
|
||||
DelayedFormat::new_with_offset(Some(local.date()), Some(local.time()), &self.offset, items)
|
||||
}
|
||||
|
@ -388,6 +472,7 @@ impl<Tz: TimeZone> DateTime<Tz> where Tz::Offset: fmt::Display {
|
|||
/// Formats the combined date and time with the specified format string.
|
||||
/// See the [`format::strftime` module](./format/strftime/index.html)
|
||||
/// on the supported escape sequences.
|
||||
#[cfg(any(feature = "alloc", feature = "std", test))]
|
||||
#[inline]
|
||||
pub fn format<'a>(&self, fmt: &'a str) -> DelayedFormat<StrftimeItems<'a>> {
|
||||
self.format_with_items(StrftimeItems::new(fmt))
|
||||
|
@ -537,24 +622,24 @@ impl str::FromStr for DateTime<FixedOffset> {
|
|||
|
||||
fn from_str(s: &str) -> ParseResult<DateTime<FixedOffset>> {
|
||||
const ITEMS: &'static [Item<'static>] = &[
|
||||
Item::Space(""), Item::Numeric(Numeric::Year, Pad::Zero),
|
||||
Item::Numeric(Numeric::Year, Pad::Zero),
|
||||
Item::Space(""), Item::Literal("-"),
|
||||
Item::Space(""), Item::Numeric(Numeric::Month, Pad::Zero),
|
||||
Item::Numeric(Numeric::Month, Pad::Zero),
|
||||
Item::Space(""), Item::Literal("-"),
|
||||
Item::Space(""), Item::Numeric(Numeric::Day, Pad::Zero),
|
||||
Item::Numeric(Numeric::Day, Pad::Zero),
|
||||
Item::Space(""), Item::Literal("T"), // XXX shouldn't this be case-insensitive?
|
||||
Item::Space(""), Item::Numeric(Numeric::Hour, Pad::Zero),
|
||||
Item::Numeric(Numeric::Hour, Pad::Zero),
|
||||
Item::Space(""), Item::Literal(":"),
|
||||
Item::Space(""), Item::Numeric(Numeric::Minute, Pad::Zero),
|
||||
Item::Numeric(Numeric::Minute, Pad::Zero),
|
||||
Item::Space(""), Item::Literal(":"),
|
||||
Item::Space(""), Item::Numeric(Numeric::Second, Pad::Zero),
|
||||
Item::Numeric(Numeric::Second, Pad::Zero),
|
||||
Item::Fixed(Fixed::Nanosecond),
|
||||
Item::Space(""), Item::Fixed(Fixed::TimezoneOffsetZ),
|
||||
Item::Space(""),
|
||||
];
|
||||
|
||||
let mut parsed = Parsed::new();
|
||||
try!(parse(&mut parsed, s, ITEMS.iter().cloned()));
|
||||
parse(&mut parsed, s, ITEMS.iter())?;
|
||||
parsed.to_datetime()
|
||||
}
|
||||
}
|
||||
|
@ -576,6 +661,7 @@ impl str::FromStr for DateTime<Local> {
|
|||
}
|
||||
}
|
||||
|
||||
#[cfg(any(feature = "std", test))]
|
||||
impl From<SystemTime> for DateTime<Utc> {
|
||||
fn from(t: SystemTime) -> DateTime<Utc> {
|
||||
let (sec, nsec) = match t.duration_since(UNIX_EPOCH) {
|
||||
|
@ -601,6 +687,7 @@ impl From<SystemTime> for DateTime<Local> {
|
|||
}
|
||||
}
|
||||
|
||||
#[cfg(any(feature = "std", test))]
|
||||
impl<Tz: TimeZone> From<DateTime<Tz>> for SystemTime {
|
||||
fn from(dt: DateTime<Tz>) -> SystemTime {
|
||||
use std::time::Duration;
|
||||
|
@ -616,11 +703,19 @@ impl<Tz: TimeZone> From<DateTime<Tz>> for SystemTime {
|
|||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_auto_conversion() {
|
||||
let utc_dt = Utc.ymd(2018, 9, 5).and_hms(23, 58, 0);
|
||||
let cdt_dt = FixedOffset::west(5 * 60 * 60).ymd(2018, 9, 5).and_hms(18, 58, 0);
|
||||
let utc_dt2: DateTime<Utc> = cdt_dt.into();
|
||||
assert_eq!(utc_dt, utc_dt2);
|
||||
}
|
||||
|
||||
#[cfg(all(test, any(feature = "rustc-serialize", feature = "serde")))]
|
||||
fn test_encodable_json<FUtc, FFixed, E>(to_string_utc: FUtc, to_string_fixed: FFixed)
|
||||
where FUtc: Fn(&DateTime<Utc>) -> Result<String, E>,
|
||||
FFixed: Fn(&DateTime<FixedOffset>) -> Result<String, E>,
|
||||
E: ::std::fmt::Debug
|
||||
E: ::core::fmt::Debug
|
||||
{
|
||||
assert_eq!(to_string_utc(&Utc.ymd(2014, 7, 24).and_hms(12, 34, 6)).ok(),
|
||||
Some(r#""2014-07-24T12:34:06Z""#.into()));
|
||||
|
@ -638,7 +733,7 @@ fn test_decodable_json<FUtc, FFixed, FLocal, E>(utc_from_str: FUtc,
|
|||
where FUtc: Fn(&str) -> Result<DateTime<Utc>, E>,
|
||||
FFixed: Fn(&str) -> Result<DateTime<FixedOffset>, E>,
|
||||
FLocal: Fn(&str) -> Result<DateTime<Local>, E>,
|
||||
E: ::std::fmt::Debug
|
||||
E: ::core::fmt::Debug
|
||||
{
|
||||
// should check against the offset as well (the normal DateTime comparison will ignore them)
|
||||
fn norm<Tz: TimeZone>(dt: &Option<DateTime<Tz>>) -> Option<(&DateTime<Tz>, &Tz::Offset)> {
|
||||
|
@ -675,7 +770,7 @@ fn test_decodable_json_timestamps<FUtc, FFixed, FLocal, E>(utc_from_str: FUtc,
|
|||
where FUtc: Fn(&str) -> Result<rustc_serialize::TsSeconds<Utc>, E>,
|
||||
FFixed: Fn(&str) -> Result<rustc_serialize::TsSeconds<FixedOffset>, E>,
|
||||
FLocal: Fn(&str) -> Result<rustc_serialize::TsSeconds<Local>, E>,
|
||||
E: ::std::fmt::Debug
|
||||
E: ::core::fmt::Debug
|
||||
{
|
||||
fn norm<Tz: TimeZone>(dt: &Option<DateTime<Tz>>) -> Option<(&DateTime<Tz>, &Tz::Offset)> {
|
||||
dt.as_ref().map(|dt| (dt, dt.offset()))
|
||||
|
@ -699,8 +794,8 @@ fn test_decodable_json_timestamps<FUtc, FFixed, FLocal, E>(utc_from_str: FUtc,
|
|||
|
||||
#[cfg(feature = "rustc-serialize")]
|
||||
pub mod rustc_serialize {
|
||||
use std::fmt;
|
||||
use std::ops::Deref;
|
||||
use core::fmt;
|
||||
use core::ops::Deref;
|
||||
use super::DateTime;
|
||||
#[cfg(feature="clock")]
|
||||
use offset::Local;
|
||||
|
@ -713,7 +808,7 @@ pub mod rustc_serialize {
|
|||
}
|
||||
}
|
||||
|
||||
// try!-like function to convert a LocalResult into a serde-ish Result
|
||||
// lik? function to convert a LocalResult into a serde-ish Result
|
||||
fn from<T, D>(me: LocalResult<T>, d: &mut D) -> Result<T, D::Error>
|
||||
where D: Decoder,
|
||||
T: fmt::Display,
|
||||
|
@ -828,25 +923,38 @@ pub mod rustc_serialize {
|
|||
/// documented at re-export site
|
||||
#[cfg(feature = "serde")]
|
||||
pub mod serde {
|
||||
use std::fmt;
|
||||
use core::fmt;
|
||||
use super::DateTime;
|
||||
#[cfg(feature="clock")]
|
||||
use offset::Local;
|
||||
use offset::{LocalResult, TimeZone, Utc, FixedOffset};
|
||||
use serdelib::{ser, de};
|
||||
use {SerdeError, ne_timestamp};
|
||||
|
||||
// try!-like function to convert a LocalResult into a serde-ish Result
|
||||
#[doc(hidden)]
|
||||
#[derive(Debug)]
|
||||
pub struct SecondsTimestampVisitor;
|
||||
|
||||
#[doc(hidden)]
|
||||
#[derive(Debug)]
|
||||
pub struct NanoSecondsTimestampVisitor;
|
||||
|
||||
#[doc(hidden)]
|
||||
#[derive(Debug)]
|
||||
pub struct MilliSecondsTimestampVisitor;
|
||||
|
||||
// lik? function to convert a LocalResult into a serde-ish Result
|
||||
fn serde_from<T, E, V>(me: LocalResult<T>, ts: &V) -> Result<T, E>
|
||||
where E: de::Error,
|
||||
V: fmt::Display,
|
||||
T: fmt::Display,
|
||||
where
|
||||
E: de::Error,
|
||||
V: fmt::Display,
|
||||
T: fmt::Display,
|
||||
{
|
||||
match me {
|
||||
LocalResult::None => Err(E::custom(
|
||||
format!("value is not a legal timestamp: {}", ts))),
|
||||
ne_timestamp(ts))),
|
||||
LocalResult::Ambiguous(min, max) => Err(E::custom(
|
||||
format!("value is an ambiguous timestamp: {}, could be either of {}, {}",
|
||||
ts, min, max))),
|
||||
SerdeError::Ambiguous { timestamp: ts, min: min, max: max })),
|
||||
LocalResult::Single(val) => Ok(val)
|
||||
}
|
||||
}
|
||||
|
@ -888,13 +996,13 @@ pub mod serde {
|
|||
/// # fn main() { example().unwrap(); }
|
||||
/// ```
|
||||
pub mod ts_nanoseconds {
|
||||
use std::fmt;
|
||||
use core::fmt;
|
||||
use serdelib::{ser, de};
|
||||
|
||||
use {DateTime, Utc};
|
||||
use offset::TimeZone;
|
||||
|
||||
use super::serde_from;
|
||||
use super::{serde_from, NanoSecondsTimestampVisitor};
|
||||
|
||||
/// Serialize a UTC datetime into an integer number of nanoseconds since the epoch
|
||||
///
|
||||
|
@ -965,17 +1073,15 @@ pub mod serde {
|
|||
pub fn deserialize<'de, D>(d: D) -> Result<DateTime<Utc>, D::Error>
|
||||
where D: de::Deserializer<'de>
|
||||
{
|
||||
Ok(try!(d.deserialize_i64(NanoSecondsTimestampVisitor)))
|
||||
Ok(d.deserialize_i64(NanoSecondsTimestampVisitor)?)
|
||||
}
|
||||
|
||||
struct NanoSecondsTimestampVisitor;
|
||||
|
||||
impl<'de> de::Visitor<'de> for NanoSecondsTimestampVisitor {
|
||||
type Value = DateTime<Utc>;
|
||||
|
||||
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result
|
||||
{
|
||||
write!(formatter, "a unix timestamp in seconds")
|
||||
write!(formatter, "a unix timestamp in nanoseconds")
|
||||
}
|
||||
|
||||
/// Deserialize a timestamp in nanoseconds since the epoch
|
||||
|
@ -998,6 +1104,152 @@ pub mod serde {
|
|||
}
|
||||
}
|
||||
|
||||
/// Ser/de to/from optional timestamps in nanoseconds
|
||||
///
|
||||
/// Intended for use with `serde`'s `with` attribute.
|
||||
///
|
||||
/// # Example:
|
||||
///
|
||||
/// ```rust
|
||||
/// # // We mark this ignored so that we can test on 1.13 (which does not
|
||||
/// # // support custom derive), and run tests with --ignored on beta and
|
||||
/// # // nightly to actually trigger these.
|
||||
/// #
|
||||
/// # #[macro_use] extern crate serde_derive;
|
||||
/// # #[macro_use] extern crate serde_json;
|
||||
/// # extern crate chrono;
|
||||
/// # use chrono::{TimeZone, DateTime, Utc};
|
||||
/// use chrono::serde::ts_nanoseconds_option;
|
||||
/// #[derive(Deserialize, Serialize)]
|
||||
/// struct S {
|
||||
/// #[serde(with = "ts_nanoseconds_option")]
|
||||
/// time: Option<DateTime<Utc>>
|
||||
/// }
|
||||
///
|
||||
/// # fn example() -> Result<S, serde_json::Error> {
|
||||
/// let time = Some(Utc.ymd(2018, 5, 17).and_hms_nano(02, 04, 59, 918355733));
|
||||
/// let my_s = S {
|
||||
/// time: time.clone(),
|
||||
/// };
|
||||
///
|
||||
/// let as_string = serde_json::to_string(&my_s)?;
|
||||
/// assert_eq!(as_string, r#"{"time":1526522699918355733}"#);
|
||||
/// let my_s: S = serde_json::from_str(&as_string)?;
|
||||
/// assert_eq!(my_s.time, time);
|
||||
/// # Ok(my_s)
|
||||
/// # }
|
||||
/// # fn main() { example().unwrap(); }
|
||||
/// ```
|
||||
pub mod ts_nanoseconds_option {
|
||||
use core::fmt;
|
||||
use serdelib::{ser, de};
|
||||
|
||||
use {DateTime, Utc};
|
||||
|
||||
use super::{ts_nanoseconds, NanoSecondsTimestampVisitor};
|
||||
|
||||
/// Serialize a UTC datetime into an integer number of nanoseconds since the epoch or none
|
||||
///
|
||||
/// Intended for use with `serde`s `serialize_with` attribute.
|
||||
///
|
||||
/// # Example:
|
||||
///
|
||||
/// ```rust
|
||||
/// # // We mark this ignored so that we can test on 1.13 (which does not
|
||||
/// # // support custom derive), and run tests with --ignored on beta and
|
||||
/// # // nightly to actually trigger these.
|
||||
/// #
|
||||
/// # #[macro_use] extern crate serde_derive;
|
||||
/// # #[macro_use] extern crate serde_json;
|
||||
/// # extern crate chrono;
|
||||
/// # use chrono::{TimeZone, DateTime, Utc};
|
||||
/// use chrono::serde::ts_nanoseconds_option::serialize as to_nano_tsopt;
|
||||
/// #[derive(Serialize)]
|
||||
/// struct S {
|
||||
/// #[serde(serialize_with = "to_nano_tsopt")]
|
||||
/// time: Option<DateTime<Utc>>
|
||||
/// }
|
||||
///
|
||||
/// # fn example() -> Result<String, serde_json::Error> {
|
||||
/// let my_s = S {
|
||||
/// time: Some(Utc.ymd(2018, 5, 17).and_hms_nano(02, 04, 59, 918355733)),
|
||||
/// };
|
||||
/// let as_string = serde_json::to_string(&my_s)?;
|
||||
/// assert_eq!(as_string, r#"{"time":1526522699918355733}"#);
|
||||
/// # Ok(as_string)
|
||||
/// # }
|
||||
/// # fn main() { example().unwrap(); }
|
||||
/// ```
|
||||
pub fn serialize<S>(opt: &Option<DateTime<Utc>>, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where S: ser::Serializer
|
||||
{
|
||||
match *opt {
|
||||
Some(ref dt) => ts_nanoseconds::serialize(dt, serializer),
|
||||
None => serializer.serialize_none(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Deserialize a `DateTime` from a nanosecond timestamp or none
|
||||
///
|
||||
/// Intended for use with `serde`s `deserialize_with` attribute.
|
||||
///
|
||||
/// # Example:
|
||||
///
|
||||
/// ```rust
|
||||
/// # // We mark this ignored so that we can test on 1.13 (which does not
|
||||
/// # // support custom derive), and run tests with --ignored on beta and
|
||||
/// # // nightly to actually trigger these.
|
||||
/// #
|
||||
/// # #[macro_use] extern crate serde_derive;
|
||||
/// # #[macro_use] extern crate serde_json;
|
||||
/// # extern crate chrono;
|
||||
/// # use chrono::{DateTime, Utc};
|
||||
/// use chrono::serde::ts_nanoseconds_option::deserialize as from_nano_tsopt;
|
||||
/// #[derive(Deserialize)]
|
||||
/// struct S {
|
||||
/// #[serde(deserialize_with = "from_nano_tsopt")]
|
||||
/// time: Option<DateTime<Utc>>
|
||||
/// }
|
||||
///
|
||||
/// # fn example() -> Result<S, serde_json::Error> {
|
||||
/// let my_s: S = serde_json::from_str(r#"{ "time": 1526522699918355733 }"#)?;
|
||||
/// # Ok(my_s)
|
||||
/// # }
|
||||
/// # fn main() { example().unwrap(); }
|
||||
/// ```
|
||||
pub fn deserialize<'de, D>(d: D) -> Result<Option<DateTime<Utc>>, D::Error>
|
||||
where D: de::Deserializer<'de>
|
||||
{
|
||||
Ok(d.deserialize_option(OptionNanoSecondsTimestampVisitor)?)
|
||||
}
|
||||
|
||||
struct OptionNanoSecondsTimestampVisitor;
|
||||
|
||||
impl<'de> de::Visitor<'de> for OptionNanoSecondsTimestampVisitor {
|
||||
type Value = Option<DateTime<Utc>>;
|
||||
|
||||
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result
|
||||
{
|
||||
formatter.write_str("a unix timestamp in nanoseconds or none")
|
||||
}
|
||||
|
||||
/// Deserialize a timestamp in seconds since the epoch
|
||||
fn visit_some<D>(self, d: D) -> Result<Option<DateTime<Utc>>, D::Error>
|
||||
where
|
||||
D: de::Deserializer<'de>,
|
||||
{
|
||||
d.deserialize_i64(NanoSecondsTimestampVisitor).map(|val| Some(val))
|
||||
}
|
||||
|
||||
/// Deserialize a timestamp in seconds since the epoch
|
||||
fn visit_none<E>(self) -> Result<Option<DateTime<Utc>>, E>
|
||||
where E: de::Error
|
||||
{
|
||||
Ok(None)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Ser/de to/from timestamps in milliseconds
|
||||
///
|
||||
/// Intended for use with `serde`s `with` attribute.
|
||||
|
@ -1035,13 +1287,13 @@ pub mod serde {
|
|||
/// # fn main() { example().unwrap(); }
|
||||
/// ```
|
||||
pub mod ts_milliseconds {
|
||||
use std::fmt;
|
||||
use core::fmt;
|
||||
use serdelib::{ser, de};
|
||||
|
||||
use {DateTime, Utc};
|
||||
use offset::TimeZone;
|
||||
|
||||
use super::serde_from;
|
||||
use super::{serde_from, MilliSecondsTimestampVisitor};
|
||||
|
||||
/// Serialize a UTC datetime into an integer number of milliseconds since the epoch
|
||||
///
|
||||
|
@ -1112,11 +1364,9 @@ pub mod serde {
|
|||
pub fn deserialize<'de, D>(d: D) -> Result<DateTime<Utc>, D::Error>
|
||||
where D: de::Deserializer<'de>
|
||||
{
|
||||
Ok(try!(d.deserialize_i64(MilliSecondsTimestampVisitor).map(|dt| dt.with_timezone(&Utc))))
|
||||
Ok(d.deserialize_i64(MilliSecondsTimestampVisitor).map(|dt| dt.with_timezone(&Utc))?)
|
||||
}
|
||||
|
||||
struct MilliSecondsTimestampVisitor;
|
||||
|
||||
impl<'de> de::Visitor<'de> for MilliSecondsTimestampVisitor {
|
||||
type Value = DateTime<Utc>;
|
||||
|
||||
|
@ -1145,6 +1395,152 @@ pub mod serde {
|
|||
}
|
||||
}
|
||||
|
||||
/// Ser/de to/from optional timestamps in milliseconds
|
||||
///
|
||||
/// Intended for use with `serde`s `with` attribute.
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// ```rust
|
||||
/// # // We mark this ignored so that we can test on 1.13 (which does not
|
||||
/// # // support custom derive), and run tests with --ignored on beta and
|
||||
/// # // nightly to actually trigger these.
|
||||
/// #
|
||||
/// # #[macro_use] extern crate serde_derive;
|
||||
/// # #[macro_use] extern crate serde_json;
|
||||
/// # extern crate chrono;
|
||||
/// # use chrono::{TimeZone, DateTime, Utc};
|
||||
/// use chrono::serde::ts_milliseconds_option;
|
||||
/// #[derive(Deserialize, Serialize)]
|
||||
/// struct S {
|
||||
/// #[serde(with = "ts_milliseconds_option")]
|
||||
/// time: Option<DateTime<Utc>>
|
||||
/// }
|
||||
///
|
||||
/// # fn example() -> Result<S, serde_json::Error> {
|
||||
/// let time = Some(Utc.ymd(2018, 5, 17).and_hms_milli(02, 04, 59, 918));
|
||||
/// let my_s = S {
|
||||
/// time: time.clone(),
|
||||
/// };
|
||||
///
|
||||
/// let as_string = serde_json::to_string(&my_s)?;
|
||||
/// assert_eq!(as_string, r#"{"time":1526522699918}"#);
|
||||
/// let my_s: S = serde_json::from_str(&as_string)?;
|
||||
/// assert_eq!(my_s.time, time);
|
||||
/// # Ok(my_s)
|
||||
/// # }
|
||||
/// # fn main() { example().unwrap(); }
|
||||
/// ```
|
||||
pub mod ts_milliseconds_option {
|
||||
use core::fmt;
|
||||
use serdelib::{ser, de};
|
||||
|
||||
use {DateTime, Utc};
|
||||
|
||||
use super::{ts_milliseconds, MilliSecondsTimestampVisitor};
|
||||
|
||||
/// Serialize a UTC datetime into an integer number of milliseconds since the epoch or none
|
||||
///
|
||||
/// Intended for use with `serde`s `serialize_with` attribute.
|
||||
///
|
||||
/// # Example:
|
||||
///
|
||||
/// ```rust
|
||||
/// # // We mark this ignored so that we can test on 1.13 (which does not
|
||||
/// # // support custom derive), and run tests with --ignored on beta and
|
||||
/// # // nightly to actually trigger these.
|
||||
/// #
|
||||
/// # #[macro_use] extern crate serde_derive;
|
||||
/// # #[macro_use] extern crate serde_json;
|
||||
/// # extern crate chrono;
|
||||
/// # use chrono::{TimeZone, DateTime, Utc};
|
||||
/// use chrono::serde::ts_milliseconds_option::serialize as to_milli_tsopt;
|
||||
/// #[derive(Serialize)]
|
||||
/// struct S {
|
||||
/// #[serde(serialize_with = "to_milli_tsopt")]
|
||||
/// time: Option<DateTime<Utc>>
|
||||
/// }
|
||||
///
|
||||
/// # fn example() -> Result<String, serde_json::Error> {
|
||||
/// let my_s = S {
|
||||
/// time: Some(Utc.ymd(2018, 5, 17).and_hms_milli(02, 04, 59, 918)),
|
||||
/// };
|
||||
/// let as_string = serde_json::to_string(&my_s)?;
|
||||
/// assert_eq!(as_string, r#"{"time":1526522699918}"#);
|
||||
/// # Ok(as_string)
|
||||
/// # }
|
||||
/// # fn main() { example().unwrap(); }
|
||||
/// ```
|
||||
pub fn serialize<S>(opt: &Option<DateTime<Utc>>, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where S: ser::Serializer
|
||||
{
|
||||
match *opt {
|
||||
Some(ref dt) => ts_milliseconds::serialize(dt, serializer),
|
||||
None => serializer.serialize_none(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Deserialize a `DateTime` from a millisecond timestamp or none
|
||||
///
|
||||
/// Intended for use with `serde`s `deserialize_with` attribute.
|
||||
///
|
||||
/// # Example:
|
||||
///
|
||||
/// ```rust
|
||||
/// # // We mark this ignored so that we can test on 1.13 (which does not
|
||||
/// # // support custom derive), and run tests with --ignored on beta and
|
||||
/// # // nightly to actually trigger these.
|
||||
/// #
|
||||
/// # #[macro_use] extern crate serde_derive;
|
||||
/// # #[macro_use] extern crate serde_json;
|
||||
/// # extern crate chrono;
|
||||
/// # use chrono::{DateTime, Utc};
|
||||
/// use chrono::serde::ts_milliseconds_option::deserialize as from_milli_tsopt;
|
||||
/// #[derive(Deserialize)]
|
||||
/// struct S {
|
||||
/// #[serde(deserialize_with = "from_milli_tsopt")]
|
||||
/// time: Option<DateTime<Utc>>
|
||||
/// }
|
||||
///
|
||||
/// # fn example() -> Result<S, serde_json::Error> {
|
||||
/// let my_s: S = serde_json::from_str(r#"{ "time": 1526522699918 }"#)?;
|
||||
/// # Ok(my_s)
|
||||
/// # }
|
||||
/// # fn main() { example().unwrap(); }
|
||||
/// ```
|
||||
pub fn deserialize<'de, D>(d: D) -> Result<Option<DateTime<Utc>>, D::Error>
|
||||
where D: de::Deserializer<'de>
|
||||
{
|
||||
Ok(d.deserialize_option(OptionMilliSecondsTimestampVisitor).map(|opt| opt.map(|dt| dt.with_timezone(&Utc)))?)
|
||||
}
|
||||
|
||||
struct OptionMilliSecondsTimestampVisitor;
|
||||
|
||||
impl<'de> de::Visitor<'de> for OptionMilliSecondsTimestampVisitor {
|
||||
type Value = Option<DateTime<Utc>>;
|
||||
|
||||
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result
|
||||
{
|
||||
formatter.write_str("a unix timestamp in milliseconds or none")
|
||||
}
|
||||
|
||||
/// Deserialize a timestamp in seconds since the epoch
|
||||
fn visit_some<D>(self, d: D) -> Result<Option<DateTime<Utc>>, D::Error>
|
||||
where
|
||||
D: de::Deserializer<'de>,
|
||||
{
|
||||
d.deserialize_i64(MilliSecondsTimestampVisitor).map(|val| Some(val))
|
||||
}
|
||||
|
||||
/// Deserialize a timestamp in seconds since the epoch
|
||||
fn visit_none<E>(self) -> Result<Option<DateTime<Utc>>, E>
|
||||
where E: de::Error
|
||||
{
|
||||
Ok(None)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Ser/de to/from timestamps in seconds
|
||||
///
|
||||
/// Intended for use with `serde`'s `with` attribute.
|
||||
|
@ -1182,13 +1578,13 @@ pub mod serde {
|
|||
/// # fn main() { example().unwrap(); }
|
||||
/// ```
|
||||
pub mod ts_seconds {
|
||||
use std::fmt;
|
||||
use core::fmt;
|
||||
use serdelib::{ser, de};
|
||||
|
||||
use {DateTime, Utc};
|
||||
use offset::TimeZone;
|
||||
|
||||
use super::serde_from;
|
||||
use super::{serde_from, SecondsTimestampVisitor};
|
||||
|
||||
/// Serialize a UTC datetime into an integer number of seconds since the epoch
|
||||
///
|
||||
|
@ -1259,11 +1655,9 @@ pub mod serde {
|
|||
pub fn deserialize<'de, D>(d: D) -> Result<DateTime<Utc>, D::Error>
|
||||
where D: de::Deserializer<'de>
|
||||
{
|
||||
Ok(try!(d.deserialize_i64(SecondsTimestampVisitor)))
|
||||
Ok(d.deserialize_i64(SecondsTimestampVisitor)?)
|
||||
}
|
||||
|
||||
struct SecondsTimestampVisitor;
|
||||
|
||||
impl<'de> de::Visitor<'de> for SecondsTimestampVisitor {
|
||||
type Value = DateTime<Utc>;
|
||||
|
||||
|
@ -1288,6 +1682,152 @@ pub mod serde {
|
|||
}
|
||||
}
|
||||
|
||||
/// Ser/de to/from optional timestamps in seconds
|
||||
///
|
||||
/// Intended for use with `serde`'s `with` attribute.
|
||||
///
|
||||
/// # Example:
|
||||
///
|
||||
/// ```rust
|
||||
/// # // We mark this ignored so that we can test on 1.13 (which does not
|
||||
/// # // support custom derive), and run tests with --ignored on beta and
|
||||
/// # // nightly to actually trigger these.
|
||||
/// #
|
||||
/// # #[macro_use] extern crate serde_derive;
|
||||
/// # #[macro_use] extern crate serde_json;
|
||||
/// # extern crate chrono;
|
||||
/// # use chrono::{TimeZone, DateTime, Utc};
|
||||
/// use chrono::serde::ts_seconds_option;
|
||||
/// #[derive(Deserialize, Serialize)]
|
||||
/// struct S {
|
||||
/// #[serde(with = "ts_seconds_option")]
|
||||
/// time: Option<DateTime<Utc>>
|
||||
/// }
|
||||
///
|
||||
/// # fn example() -> Result<S, serde_json::Error> {
|
||||
/// let time = Some(Utc.ymd(2015, 5, 15).and_hms(10, 0, 0));
|
||||
/// let my_s = S {
|
||||
/// time: time.clone(),
|
||||
/// };
|
||||
///
|
||||
/// let as_string = serde_json::to_string(&my_s)?;
|
||||
/// assert_eq!(as_string, r#"{"time":1431684000}"#);
|
||||
/// let my_s: S = serde_json::from_str(&as_string)?;
|
||||
/// assert_eq!(my_s.time, time);
|
||||
/// # Ok(my_s)
|
||||
/// # }
|
||||
/// # fn main() { example().unwrap(); }
|
||||
/// ```
|
||||
pub mod ts_seconds_option {
|
||||
use core::fmt;
|
||||
use serdelib::{ser, de};
|
||||
|
||||
use {DateTime, Utc};
|
||||
|
||||
use super::{ts_seconds, SecondsTimestampVisitor};
|
||||
|
||||
/// Serialize a UTC datetime into an integer number of seconds since the epoch or none
|
||||
///
|
||||
/// Intended for use with `serde`s `serialize_with` attribute.
|
||||
///
|
||||
/// # Example:
|
||||
///
|
||||
/// ```rust
|
||||
/// # // We mark this ignored so that we can test on 1.13 (which does not
|
||||
/// # // support custom derive), and run tests with --ignored on beta and
|
||||
/// # // nightly to actually trigger these.
|
||||
/// #
|
||||
/// # #[macro_use] extern crate serde_derive;
|
||||
/// # #[macro_use] extern crate serde_json;
|
||||
/// # extern crate chrono;
|
||||
/// # use chrono::{TimeZone, DateTime, Utc};
|
||||
/// use chrono::serde::ts_seconds_option::serialize as to_tsopt;
|
||||
/// #[derive(Serialize)]
|
||||
/// struct S {
|
||||
/// #[serde(serialize_with = "to_tsopt")]
|
||||
/// time: Option<DateTime<Utc>>
|
||||
/// }
|
||||
///
|
||||
/// # fn example() -> Result<String, serde_json::Error> {
|
||||
/// let my_s = S {
|
||||
/// time: Some(Utc.ymd(2015, 5, 15).and_hms(10, 0, 0)),
|
||||
/// };
|
||||
/// let as_string = serde_json::to_string(&my_s)?;
|
||||
/// assert_eq!(as_string, r#"{"time":1431684000}"#);
|
||||
/// # Ok(as_string)
|
||||
/// # }
|
||||
/// # fn main() { example().unwrap(); }
|
||||
/// ```
|
||||
pub fn serialize<S>(opt: &Option<DateTime<Utc>>, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where S: ser::Serializer
|
||||
{
|
||||
match *opt {
|
||||
Some(ref dt) => ts_seconds::serialize(dt, serializer),
|
||||
None => serializer.serialize_none(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Deserialize a `DateTime` from a seconds timestamp or none
|
||||
///
|
||||
/// Intended for use with `serde`s `deserialize_with` attribute.
|
||||
///
|
||||
/// # Example:
|
||||
///
|
||||
/// ```rust
|
||||
/// # // We mark this ignored so that we can test on 1.13 (which does not
|
||||
/// # // support custom derive), and run tests with --ignored on beta and
|
||||
/// # // nightly to actually trigger these.
|
||||
/// #
|
||||
/// # #[macro_use] extern crate serde_derive;
|
||||
/// # #[macro_use] extern crate serde_json;
|
||||
/// # extern crate chrono;
|
||||
/// # use chrono::{DateTime, Utc};
|
||||
/// use chrono::serde::ts_seconds_option::deserialize as from_tsopt;
|
||||
/// #[derive(Deserialize)]
|
||||
/// struct S {
|
||||
/// #[serde(deserialize_with = "from_tsopt")]
|
||||
/// time: Option<DateTime<Utc>>
|
||||
/// }
|
||||
///
|
||||
/// # fn example() -> Result<S, serde_json::Error> {
|
||||
/// let my_s: S = serde_json::from_str(r#"{ "time": 1431684000 }"#)?;
|
||||
/// # Ok(my_s)
|
||||
/// # }
|
||||
/// # fn main() { example().unwrap(); }
|
||||
/// ```
|
||||
pub fn deserialize<'de, D>(d: D) -> Result<Option<DateTime<Utc>>, D::Error>
|
||||
where D: de::Deserializer<'de>
|
||||
{
|
||||
Ok(d.deserialize_option(OptionSecondsTimestampVisitor)?)
|
||||
}
|
||||
|
||||
struct OptionSecondsTimestampVisitor;
|
||||
|
||||
impl<'de> de::Visitor<'de> for OptionSecondsTimestampVisitor {
|
||||
type Value = Option<DateTime<Utc>>;
|
||||
|
||||
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result
|
||||
{
|
||||
formatter.write_str("a unix timestamp in seconds or none")
|
||||
}
|
||||
|
||||
/// Deserialize a timestamp in seconds since the epoch
|
||||
fn visit_some<D>(self, d: D) -> Result<Option<DateTime<Utc>>, D::Error>
|
||||
where
|
||||
D: de::Deserializer<'de>,
|
||||
{
|
||||
d.deserialize_i64(SecondsTimestampVisitor).map(|val| Some(val))
|
||||
}
|
||||
|
||||
/// Deserialize a timestamp in seconds since the epoch
|
||||
fn visit_none<E>(self) -> Result<Option<DateTime<Utc>>, E>
|
||||
where E: de::Error
|
||||
{
|
||||
Ok(None)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<Tz: TimeZone> ser::Serialize for DateTime<Tz> {
|
||||
/// Serialize into a rfc3339 time string
|
||||
///
|
||||
|
@ -1324,7 +1864,7 @@ pub mod serde {
|
|||
fn visit_str<E>(self, value: &str) -> Result<DateTime<FixedOffset>, E>
|
||||
where E: de::Error
|
||||
{
|
||||
value.parse().map_err(|err| E::custom(format!("{}", err)))
|
||||
value.parse().map_err(|err: ::format::ParseError| E::custom(err))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1644,4 +2184,83 @@ mod tests {
|
|||
assert_eq!(SystemTime::from(epoch.with_timezone(&FixedOffset::east(32400))), UNIX_EPOCH);
|
||||
assert_eq!(SystemTime::from(epoch.with_timezone(&FixedOffset::west(28800))), UNIX_EPOCH);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_datetime_format_alignment() {
|
||||
let datetime = Utc.ymd(2007, 01, 02);
|
||||
|
||||
// Item::Literal
|
||||
let percent = datetime.format("%%");
|
||||
assert_eq!(" %", format!("{:>3}", percent));
|
||||
assert_eq!("% ", format!("{:<3}", percent));
|
||||
assert_eq!(" % ", format!("{:^3}", percent));
|
||||
|
||||
// Item::Numeric
|
||||
let year = datetime.format("%Y");
|
||||
assert_eq!(" 2007", format!("{:>6}", year));
|
||||
assert_eq!("2007 ", format!("{:<6}", year));
|
||||
assert_eq!(" 2007 ", format!("{:^6}", year));
|
||||
|
||||
// Item::Fixed
|
||||
let tz = datetime.format("%Z");
|
||||
assert_eq!(" UTC", format!("{:>5}", tz));
|
||||
assert_eq!("UTC ", format!("{:<5}", tz));
|
||||
assert_eq!(" UTC ", format!("{:^5}", tz));
|
||||
|
||||
// [Item::Numeric, Item::Space, Item::Literal, Item::Space, Item::Numeric]
|
||||
let ymd = datetime.format("%Y %B %d");
|
||||
let ymd_formatted = "2007 January 02";
|
||||
assert_eq!(format!(" {}", ymd_formatted), format!("{:>17}", ymd));
|
||||
assert_eq!(format!("{} ", ymd_formatted), format!("{:<17}", ymd));
|
||||
assert_eq!(format!(" {} ", ymd_formatted), format!("{:^17}", ymd));
|
||||
}
|
||||
|
||||
#[cfg(feature = "bench")]
|
||||
#[bench]
|
||||
fn bench_datetime_parse_from_rfc2822(bh: &mut test::Bencher) {
|
||||
bh.iter(|| {
|
||||
let str = test::black_box("Wed, 18 Feb 2015 23:16:09 +0000");
|
||||
DateTime::parse_from_rfc2822(str).unwrap()
|
||||
});
|
||||
}
|
||||
|
||||
#[cfg(feature = "bench")]
|
||||
#[bench]
|
||||
fn bench_datetime_parse_from_rfc3339(bh: &mut test::Bencher) {
|
||||
bh.iter(|| {
|
||||
let str = test::black_box("2015-02-18T23:59:60.234567+05:00");
|
||||
DateTime::parse_from_rfc3339(str).unwrap()
|
||||
});
|
||||
}
|
||||
|
||||
#[cfg(feature = "bench")]
|
||||
#[bench]
|
||||
fn bench_datetime_from_str(bh: &mut test::Bencher) {
|
||||
use std::str::FromStr;
|
||||
|
||||
bh.iter(|| {
|
||||
let str = test::black_box("2019-03-30T18:46:57.193Z");
|
||||
DateTime::<Utc>::from_str(str).unwrap()
|
||||
});
|
||||
}
|
||||
|
||||
#[cfg(feature = "bench")]
|
||||
#[bench]
|
||||
fn bench_datetime_to_rfc2822(bh: &mut test::Bencher) {
|
||||
let pst = FixedOffset::east(8 * 60 * 60);
|
||||
let dt = pst.ymd(2018, 1, 11).and_hms_nano(10, 5, 13, 084_660_000);
|
||||
bh.iter(|| {
|
||||
test::black_box(dt).to_rfc2822()
|
||||
});
|
||||
}
|
||||
|
||||
#[cfg(feature = "bench")]
|
||||
#[bench]
|
||||
fn bench_datetime_to_rfc3339(bh: &mut test::Bencher) {
|
||||
let pst = FixedOffset::east(8 * 60 * 60);
|
||||
let dt = pst.ymd(2018, 1, 11).and_hms_nano(10, 5, 13, 084_660_000);
|
||||
bh.iter(|| {
|
||||
test::black_box(dt).to_rfc3339()
|
||||
});
|
||||
}
|
||||
}
|
||||
|
|
|
@ -15,20 +15,33 @@
|
|||
//! currently Chrono supports [one built-in syntax closely resembling
|
||||
//! C's `strftime` format](./strftime/index.html).
|
||||
|
||||
use std::fmt;
|
||||
use std::str::FromStr;
|
||||
use std::error::Error;
|
||||
#![allow(ellipsis_inclusive_range_patterns)]
|
||||
|
||||
use {Datelike, Timelike, Weekday, ParseWeekdayError};
|
||||
use core::borrow::Borrow;
|
||||
use core::fmt;
|
||||
use core::str::FromStr;
|
||||
#[cfg(any(feature = "std", test))]
|
||||
use std::error::Error;
|
||||
#[cfg(feature = "alloc")]
|
||||
use alloc::boxed::Box;
|
||||
#[cfg(feature = "alloc")]
|
||||
use alloc::string::{String, ToString};
|
||||
|
||||
#[cfg(any(feature = "alloc", feature = "std", test))]
|
||||
use {Datelike, Timelike};
|
||||
use {Weekday, ParseWeekdayError};
|
||||
#[cfg(any(feature = "alloc", feature = "std", test))]
|
||||
use div::{div_floor, mod_floor};
|
||||
#[cfg(any(feature = "alloc", feature = "std", test))]
|
||||
use offset::{Offset, FixedOffset};
|
||||
#[cfg(any(feature = "alloc", feature = "std", test))]
|
||||
use naive::{NaiveDate, NaiveTime};
|
||||
|
||||
pub use self::strftime::StrftimeItems;
|
||||
pub use self::parsed::Parsed;
|
||||
pub use self::parse::parse;
|
||||
|
||||
/// An unhabitated type used for `InternalNumeric` and `InternalFixed` below.
|
||||
/// An uninhabited type used for `InternalNumeric` and `InternalFixed` below.
|
||||
#[derive(Clone, PartialEq, Eq)]
|
||||
enum Void {}
|
||||
|
||||
|
@ -53,7 +66,7 @@ pub enum Pad {
|
|||
///
|
||||
/// The **parsing width** is the maximal width to be scanned.
|
||||
/// The parser only tries to consume from one to given number of digits (greedily).
|
||||
/// It also trims the preceding whitespaces if any.
|
||||
/// It also trims the preceding whitespace if any.
|
||||
/// It cannot parse the negative number, so some date and time cannot be formatted then
|
||||
/// parsed with the same formatting items.
|
||||
#[derive(Clone, PartialEq, Eq, Debug)]
|
||||
|
@ -183,13 +196,13 @@ pub enum Fixed {
|
|||
TimezoneName,
|
||||
/// Offset from the local time to UTC (`+09:00` or `-04:00` or `+00:00`).
|
||||
///
|
||||
/// In the parser, the colon can be omitted and/or surrounded with any amount of whitespaces.
|
||||
/// In the parser, the colon can be omitted and/or surrounded with any amount of whitespace.
|
||||
/// The offset is limited from `-24:00` to `+24:00`,
|
||||
/// which is same to [`FixedOffset`](../offset/struct.FixedOffset.html)'s range.
|
||||
TimezoneOffsetColon,
|
||||
/// Offset from the local time to UTC (`+09:00` or `-04:00` or `Z`).
|
||||
///
|
||||
/// In the parser, the colon can be omitted and/or surrounded with any amount of whitespaces,
|
||||
/// In the parser, the colon can be omitted and/or surrounded with any amount of whitespace,
|
||||
/// and `Z` can be either in upper case or in lower case.
|
||||
/// The offset is limited from `-24:00` to `+24:00`,
|
||||
/// which is same to [`FixedOffset`](../offset/struct.FixedOffset.html)'s range.
|
||||
|
@ -243,10 +256,12 @@ pub enum Item<'a> {
|
|||
/// A literally printed and parsed text.
|
||||
Literal(&'a str),
|
||||
/// Same to `Literal` but with the string owned by the item.
|
||||
#[cfg(any(feature = "alloc", feature = "std", test))]
|
||||
OwnedLiteral(Box<str>),
|
||||
/// Whitespace. Prints literally but reads zero or more whitespace.
|
||||
Space(&'a str),
|
||||
/// Same to `Space` but with the string owned by the item.
|
||||
#[cfg(any(feature = "alloc", feature = "std", test))]
|
||||
OwnedSpace(Box<str>),
|
||||
/// Numeric item. Can be optionally padded to the maximal length (if any) when formatting;
|
||||
/// the parser simply ignores any padded whitespace and zeroes.
|
||||
|
@ -303,13 +318,7 @@ enum ParseErrorKind {
|
|||
/// Same to `Result<T, ParseError>`.
|
||||
pub type ParseResult<T> = Result<T, ParseError>;
|
||||
|
||||
impl fmt::Display for ParseError {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
self.description().fmt(f)
|
||||
}
|
||||
}
|
||||
|
||||
impl Error for ParseError {
|
||||
impl ParseError {
|
||||
fn description(&self) -> &str {
|
||||
match self.0 {
|
||||
ParseErrorKind::OutOfRange => "input is out of range",
|
||||
|
@ -323,6 +332,19 @@ impl Error for ParseError {
|
|||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for ParseError {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
self.description().fmt(f)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(any(feature = "std", test))]
|
||||
impl Error for ParseError {
|
||||
fn description(&self) -> &str {
|
||||
self.description()
|
||||
}
|
||||
}
|
||||
|
||||
// to be used in this module and submodules
|
||||
const OUT_OF_RANGE: ParseError = ParseError(ParseErrorKind::OutOfRange);
|
||||
const IMPOSSIBLE: ParseError = ParseError(ParseErrorKind::Impossible);
|
||||
|
@ -334,9 +356,16 @@ const BAD_FORMAT: ParseError = ParseError(ParseErrorKind::BadFormat);
|
|||
|
||||
/// Tries to format given arguments with given formatting items.
|
||||
/// Internally used by `DelayedFormat`.
|
||||
pub fn format<'a, I>(w: &mut fmt::Formatter, date: Option<&NaiveDate>, time: Option<&NaiveTime>,
|
||||
off: Option<&(String, FixedOffset)>, items: I) -> fmt::Result
|
||||
where I: Iterator<Item=Item<'a>> {
|
||||
#[cfg(any(feature = "alloc", feature = "std", test))]
|
||||
pub fn format<'a, I, B>(
|
||||
w: &mut fmt::Formatter,
|
||||
date: Option<&NaiveDate>,
|
||||
time: Option<&NaiveTime>,
|
||||
off: Option<&(String, FixedOffset)>,
|
||||
items: I,
|
||||
) -> fmt::Result
|
||||
where I: Iterator<Item=B> + Clone, B: Borrow<Item<'a>>
|
||||
{
|
||||
// full and abbreviated month and weekday names
|
||||
static SHORT_MONTHS: [&'static str; 12] =
|
||||
["Jan", "Feb", "Mar", "Apr", "May", "Jun", "Jul", "Aug", "Sep", "Oct", "Nov", "Dec"];
|
||||
|
@ -348,12 +377,16 @@ pub fn format<'a, I>(w: &mut fmt::Formatter, date: Option<&NaiveDate>, time: Opt
|
|||
static LONG_WEEKDAYS: [&'static str; 7] =
|
||||
["Monday", "Tuesday", "Wednesday", "Thursday", "Friday", "Saturday", "Sunday"];
|
||||
|
||||
for item in items {
|
||||
match item {
|
||||
Item::Literal(s) | Item::Space(s) => try!(write!(w, "{}", s)),
|
||||
Item::OwnedLiteral(ref s) | Item::OwnedSpace(ref s) => try!(write!(w, "{}", s)),
|
||||
use core::fmt::Write;
|
||||
let mut result = String::new();
|
||||
|
||||
Item::Numeric(spec, pad) => {
|
||||
for item in items {
|
||||
match item.borrow() {
|
||||
&Item::Literal(s) | &Item::Space(s) => result.push_str(s),
|
||||
#[cfg(any(feature = "alloc", feature = "std", test))]
|
||||
&Item::OwnedLiteral(ref s) | &Item::OwnedSpace(ref s) => result.push_str(s),
|
||||
|
||||
&Item::Numeric(ref spec, ref pad) => {
|
||||
use self::Numeric::*;
|
||||
|
||||
let week_from_sun = |d: &NaiveDate|
|
||||
|
@ -362,31 +395,31 @@ pub fn format<'a, I>(w: &mut fmt::Formatter, date: Option<&NaiveDate>, time: Opt
|
|||
(d.ordinal() as i32 - d.weekday().num_days_from_monday() as i32 + 7) / 7;
|
||||
|
||||
let (width, v) = match spec {
|
||||
Year => (4, date.map(|d| i64::from(d.year()))),
|
||||
YearDiv100 => (2, date.map(|d| div_floor(i64::from(d.year()), 100))),
|
||||
YearMod100 => (2, date.map(|d| mod_floor(i64::from(d.year()), 100))),
|
||||
IsoYear => (4, date.map(|d| i64::from(d.iso_week().year()))),
|
||||
IsoYearDiv100 => (2, date.map(|d| div_floor(
|
||||
&Year => (4, date.map(|d| i64::from(d.year()))),
|
||||
&YearDiv100 => (2, date.map(|d| div_floor(i64::from(d.year()), 100))),
|
||||
&YearMod100 => (2, date.map(|d| mod_floor(i64::from(d.year()), 100))),
|
||||
&IsoYear => (4, date.map(|d| i64::from(d.iso_week().year()))),
|
||||
&IsoYearDiv100 => (2, date.map(|d| div_floor(
|
||||
i64::from(d.iso_week().year()), 100))),
|
||||
IsoYearMod100 => (2, date.map(|d| mod_floor(
|
||||
&IsoYearMod100 => (2, date.map(|d| mod_floor(
|
||||
i64::from(d.iso_week().year()), 100))),
|
||||
Month => (2, date.map(|d| i64::from(d.month()))),
|
||||
Day => (2, date.map(|d| i64::from(d.day()))),
|
||||
WeekFromSun => (2, date.map(|d| i64::from(week_from_sun(d)))),
|
||||
WeekFromMon => (2, date.map(|d| i64::from(week_from_mon(d)))),
|
||||
IsoWeek => (2, date.map(|d| i64::from(d.iso_week().week()))),
|
||||
NumDaysFromSun => (1, date.map(|d| i64::from(d.weekday()
|
||||
&Month => (2, date.map(|d| i64::from(d.month()))),
|
||||
&Day => (2, date.map(|d| i64::from(d.day()))),
|
||||
&WeekFromSun => (2, date.map(|d| i64::from(week_from_sun(d)))),
|
||||
&WeekFromMon => (2, date.map(|d| i64::from(week_from_mon(d)))),
|
||||
&IsoWeek => (2, date.map(|d| i64::from(d.iso_week().week()))),
|
||||
&NumDaysFromSun => (1, date.map(|d| i64::from(d.weekday()
|
||||
.num_days_from_sunday()))),
|
||||
WeekdayFromMon => (1, date.map(|d| i64::from(d.weekday()
|
||||
&WeekdayFromMon => (1, date.map(|d| i64::from(d.weekday()
|
||||
.number_from_monday()))),
|
||||
Ordinal => (3, date.map(|d| i64::from(d.ordinal()))),
|
||||
Hour => (2, time.map(|t| i64::from(t.hour()))),
|
||||
Hour12 => (2, time.map(|t| i64::from(t.hour12().1))),
|
||||
Minute => (2, time.map(|t| i64::from(t.minute()))),
|
||||
Second => (2, time.map(|t| i64::from(t.second() +
|
||||
&Ordinal => (3, date.map(|d| i64::from(d.ordinal()))),
|
||||
&Hour => (2, time.map(|t| i64::from(t.hour()))),
|
||||
&Hour12 => (2, time.map(|t| i64::from(t.hour12().1))),
|
||||
&Minute => (2, time.map(|t| i64::from(t.minute()))),
|
||||
&Second => (2, time.map(|t| i64::from(t.second() +
|
||||
t.nanosecond() / 1_000_000_000))),
|
||||
Nanosecond => (9, time.map(|t| i64::from(t.nanosecond() % 1_000_000_000))),
|
||||
Timestamp => (1, match (date, time, off) {
|
||||
&Nanosecond => (9, time.map(|t| i64::from(t.nanosecond() % 1_000_000_000))),
|
||||
&Timestamp => (1, match (date, time, off) {
|
||||
(Some(d), Some(t), None) =>
|
||||
Some(d.and_time(*t).timestamp()),
|
||||
(Some(d), Some(t), Some(&(_, off))) =>
|
||||
|
@ -395,152 +428,184 @@ pub fn format<'a, I>(w: &mut fmt::Formatter, date: Option<&NaiveDate>, time: Opt
|
|||
}),
|
||||
|
||||
// for the future expansion
|
||||
Internal(ref int) => match int._dummy {},
|
||||
&Internal(ref int) => match int._dummy {},
|
||||
};
|
||||
|
||||
|
||||
if let Some(v) = v {
|
||||
if (spec == Year || spec == IsoYear) && !(0 <= v && v < 10_000) {
|
||||
if (spec == &Year || spec == &IsoYear) && !(0 <= v && v < 10_000) {
|
||||
// non-four-digit years require an explicit sign as per ISO 8601
|
||||
match pad {
|
||||
Pad::None => try!(write!(w, "{:+}", v)),
|
||||
Pad::Zero => try!(write!(w, "{:+01$}", v, width + 1)),
|
||||
Pad::Space => try!(write!(w, "{:+1$}", v, width + 1)),
|
||||
&Pad::None => write!(result, "{:+}", v),
|
||||
&Pad::Zero => write!(result, "{:+01$}", v, width + 1),
|
||||
&Pad::Space => write!(result, "{:+1$}", v, width + 1),
|
||||
}
|
||||
} else {
|
||||
match pad {
|
||||
Pad::None => try!(write!(w, "{}", v)),
|
||||
Pad::Zero => try!(write!(w, "{:01$}", v, width)),
|
||||
Pad::Space => try!(write!(w, "{:1$}", v, width)),
|
||||
&Pad::None => write!(result, "{}", v),
|
||||
&Pad::Zero => write!(result, "{:01$}", v, width),
|
||||
&Pad::Space => write!(result, "{:1$}", v, width),
|
||||
}
|
||||
}
|
||||
}?
|
||||
} else {
|
||||
return Err(fmt::Error); // insufficient arguments for given format
|
||||
return Err(fmt::Error) // insufficient arguments for given format
|
||||
}
|
||||
},
|
||||
|
||||
Item::Fixed(spec) => {
|
||||
&Item::Fixed(ref spec) => {
|
||||
use self::Fixed::*;
|
||||
|
||||
/// Prints an offset from UTC in the format of `+HHMM` or `+HH:MM`.
|
||||
/// `Z` instead of `+00[:]00` is allowed when `allow_zulu` is true.
|
||||
fn write_local_minus_utc(w: &mut fmt::Formatter, off: FixedOffset,
|
||||
allow_zulu: bool, use_colon: bool) -> fmt::Result {
|
||||
fn write_local_minus_utc(
|
||||
result: &mut String,
|
||||
off: FixedOffset,
|
||||
allow_zulu: bool,
|
||||
use_colon: bool,
|
||||
) -> fmt::Result {
|
||||
let off = off.local_minus_utc();
|
||||
if !allow_zulu || off != 0 {
|
||||
let (sign, off) = if off < 0 {('-', -off)} else {('+', off)};
|
||||
if use_colon {
|
||||
write!(w, "{}{:02}:{:02}", sign, off / 3600, off / 60 % 60)
|
||||
write!(result, "{}{:02}:{:02}", sign, off / 3600, off / 60 % 60)
|
||||
} else {
|
||||
write!(w, "{}{:02}{:02}", sign, off / 3600, off / 60 % 60)
|
||||
write!(result, "{}{:02}{:02}", sign, off / 3600, off / 60 % 60)
|
||||
}
|
||||
} else {
|
||||
write!(w, "Z")
|
||||
result.push_str("Z");
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
let ret = match spec {
|
||||
ShortMonthName =>
|
||||
date.map(|d| write!(w, "{}", SHORT_MONTHS[d.month0() as usize])),
|
||||
LongMonthName =>
|
||||
date.map(|d| write!(w, "{}", LONG_MONTHS[d.month0() as usize])),
|
||||
ShortWeekdayName =>
|
||||
date.map(|d| write!(w, "{}",
|
||||
SHORT_WEEKDAYS[d.weekday().num_days_from_monday() as usize])),
|
||||
LongWeekdayName =>
|
||||
date.map(|d| write!(w, "{}",
|
||||
LONG_WEEKDAYS[d.weekday().num_days_from_monday() as usize])),
|
||||
LowerAmPm =>
|
||||
time.map(|t| write!(w, "{}", if t.hour12().0 {"pm"} else {"am"})),
|
||||
UpperAmPm =>
|
||||
time.map(|t| write!(w, "{}", if t.hour12().0 {"PM"} else {"AM"})),
|
||||
Nanosecond =>
|
||||
&ShortMonthName =>
|
||||
date.map(|d| {
|
||||
result.push_str(SHORT_MONTHS[d.month0() as usize]);
|
||||
Ok(())
|
||||
}),
|
||||
&LongMonthName =>
|
||||
date.map(|d| {
|
||||
result.push_str(LONG_MONTHS[d.month0() as usize]);
|
||||
Ok(())
|
||||
}),
|
||||
&ShortWeekdayName =>
|
||||
date.map(|d| {
|
||||
result.push_str(
|
||||
SHORT_WEEKDAYS[d.weekday().num_days_from_monday() as usize]
|
||||
);
|
||||
Ok(())
|
||||
}),
|
||||
&LongWeekdayName =>
|
||||
date.map(|d| {
|
||||
result.push_str(
|
||||
LONG_WEEKDAYS[d.weekday().num_days_from_monday() as usize]
|
||||
);
|
||||
Ok(())
|
||||
}),
|
||||
&LowerAmPm =>
|
||||
time.map(|t| {
|
||||
result.push_str(if t.hour12().0 {"pm"} else {"am"});
|
||||
Ok(())
|
||||
}),
|
||||
&UpperAmPm =>
|
||||
time.map(|t| {
|
||||
result.push_str(if t.hour12().0 {"PM"} else {"AM"});
|
||||
Ok(())
|
||||
}),
|
||||
&Nanosecond =>
|
||||
time.map(|t| {
|
||||
let nano = t.nanosecond() % 1_000_000_000;
|
||||
if nano == 0 {
|
||||
Ok(())
|
||||
} else if nano % 1_000_000 == 0 {
|
||||
write!(w, ".{:03}", nano / 1_000_000)
|
||||
write!(result, ".{:03}", nano / 1_000_000)
|
||||
} else if nano % 1_000 == 0 {
|
||||
write!(w, ".{:06}", nano / 1_000)
|
||||
write!(result, ".{:06}", nano / 1_000)
|
||||
} else {
|
||||
write!(w, ".{:09}", nano)
|
||||
write!(result, ".{:09}", nano)
|
||||
}
|
||||
}),
|
||||
Nanosecond3 =>
|
||||
&Nanosecond3 =>
|
||||
time.map(|t| {
|
||||
let nano = t.nanosecond() % 1_000_000_000;
|
||||
write!(w, ".{:03}", nano / 1_000_000)
|
||||
write!(result, ".{:03}", nano / 1_000_000)
|
||||
}),
|
||||
Nanosecond6 =>
|
||||
&Nanosecond6 =>
|
||||
time.map(|t| {
|
||||
let nano = t.nanosecond() % 1_000_000_000;
|
||||
write!(w, ".{:06}", nano / 1_000)
|
||||
write!(result, ".{:06}", nano / 1_000)
|
||||
}),
|
||||
Nanosecond9 =>
|
||||
&Nanosecond9 =>
|
||||
time.map(|t| {
|
||||
let nano = t.nanosecond() % 1_000_000_000;
|
||||
write!(w, ".{:09}", nano)
|
||||
write!(result, ".{:09}", nano)
|
||||
}),
|
||||
Internal(InternalFixed { val: InternalInternal::Nanosecond3NoDot }) =>
|
||||
&Internal(InternalFixed { val: InternalInternal::Nanosecond3NoDot }) =>
|
||||
time.map(|t| {
|
||||
let nano = t.nanosecond() % 1_000_000_000;
|
||||
write!(w, "{:03}", nano / 1_000_000)
|
||||
write!(result, "{:03}", nano / 1_000_000)
|
||||
}),
|
||||
Internal(InternalFixed { val: InternalInternal::Nanosecond6NoDot }) =>
|
||||
&Internal(InternalFixed { val: InternalInternal::Nanosecond6NoDot }) =>
|
||||
time.map(|t| {
|
||||
let nano = t.nanosecond() % 1_000_000_000;
|
||||
write!(w, "{:06}", nano / 1_000)
|
||||
write!(result, "{:06}", nano / 1_000)
|
||||
}),
|
||||
Internal(InternalFixed { val: InternalInternal::Nanosecond9NoDot }) =>
|
||||
&Internal(InternalFixed { val: InternalInternal::Nanosecond9NoDot }) =>
|
||||
time.map(|t| {
|
||||
let nano = t.nanosecond() % 1_000_000_000;
|
||||
write!(w, "{:09}", nano)
|
||||
write!(result, "{:09}", nano)
|
||||
}),
|
||||
TimezoneName =>
|
||||
off.map(|&(ref name, _)| write!(w, "{}", *name)),
|
||||
TimezoneOffsetColon =>
|
||||
off.map(|&(_, off)| write_local_minus_utc(w, off, false, true)),
|
||||
TimezoneOffsetColonZ =>
|
||||
off.map(|&(_, off)| write_local_minus_utc(w, off, true, true)),
|
||||
TimezoneOffset =>
|
||||
off.map(|&(_, off)| write_local_minus_utc(w, off, false, false)),
|
||||
TimezoneOffsetZ =>
|
||||
off.map(|&(_, off)| write_local_minus_utc(w, off, true, false)),
|
||||
Internal(InternalFixed { val: InternalInternal::TimezoneOffsetPermissive }) =>
|
||||
&TimezoneName =>
|
||||
off.map(|&(ref name, _)| {
|
||||
result.push_str(name);
|
||||
Ok(())
|
||||
}),
|
||||
&TimezoneOffsetColon =>
|
||||
off.map(|&(_, off)| write_local_minus_utc(&mut result, off, false, true)),
|
||||
&TimezoneOffsetColonZ =>
|
||||
off.map(|&(_, off)| write_local_minus_utc(&mut result, off, true, true)),
|
||||
&TimezoneOffset =>
|
||||
off.map(|&(_, off)| write_local_minus_utc(&mut result, off, false, false)),
|
||||
&TimezoneOffsetZ =>
|
||||
off.map(|&(_, off)| write_local_minus_utc(&mut result, off, true, false)),
|
||||
&Internal(InternalFixed { val: InternalInternal::TimezoneOffsetPermissive }) =>
|
||||
panic!("Do not try to write %#z it is undefined"),
|
||||
RFC2822 => // same to `%a, %e %b %Y %H:%M:%S %z`
|
||||
&RFC2822 => // same to `%a, %e %b %Y %H:%M:%S %z`
|
||||
if let (Some(d), Some(t), Some(&(_, off))) = (date, time, off) {
|
||||
let sec = t.second() + t.nanosecond() / 1_000_000_000;
|
||||
try!(write!(w, "{}, {:2} {} {:04} {:02}:{:02}:{:02} ",
|
||||
SHORT_WEEKDAYS[d.weekday().num_days_from_monday() as usize],
|
||||
d.day(), SHORT_MONTHS[d.month0() as usize], d.year(),
|
||||
t.hour(), t.minute(), sec));
|
||||
Some(write_local_minus_utc(w, off, false, false))
|
||||
write!(
|
||||
result,
|
||||
"{}, {:02} {} {:04} {:02}:{:02}:{:02} ",
|
||||
SHORT_WEEKDAYS[d.weekday().num_days_from_monday() as usize],
|
||||
d.day(), SHORT_MONTHS[d.month0() as usize], d.year(),
|
||||
t.hour(), t.minute(), sec
|
||||
)?;
|
||||
Some(write_local_minus_utc(&mut result, off, false, false))
|
||||
} else {
|
||||
None
|
||||
},
|
||||
RFC3339 => // same to `%Y-%m-%dT%H:%M:%S%.f%:z`
|
||||
&RFC3339 => // same to `%Y-%m-%dT%H:%M:%S%.f%:z`
|
||||
if let (Some(d), Some(t), Some(&(_, off))) = (date, time, off) {
|
||||
// reuse `Debug` impls which already print ISO 8601 format.
|
||||
// this is faster in this way.
|
||||
try!(write!(w, "{:?}T{:?}", d, t));
|
||||
Some(write_local_minus_utc(w, off, false, true))
|
||||
write!(result, "{:?}T{:?}", d, t)?;
|
||||
Some(write_local_minus_utc(&mut result, off, false, true))
|
||||
} else {
|
||||
None
|
||||
},
|
||||
};
|
||||
|
||||
match ret {
|
||||
Some(ret) => try!(ret),
|
||||
Some(ret) => ret?,
|
||||
None => return Err(fmt::Error), // insufficient arguments for given format
|
||||
}
|
||||
},
|
||||
|
||||
Item::Error => return Err(fmt::Error),
|
||||
&Item::Error => return Err(fmt::Error),
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
w.pad(&result)
|
||||
}
|
||||
|
||||
mod parsed;
|
||||
|
@ -553,6 +618,7 @@ pub mod strftime;
|
|||
|
||||
/// A *temporary* object which can be used as an argument to `format!` or others.
|
||||
/// This is normally constructed via `format` methods of each date and time type.
|
||||
#[cfg(any(feature = "alloc", feature = "std", test))]
|
||||
#[derive(Debug)]
|
||||
pub struct DelayedFormat<I> {
|
||||
/// The date view, if any.
|
||||
|
@ -565,7 +631,8 @@ pub struct DelayedFormat<I> {
|
|||
items: I,
|
||||
}
|
||||
|
||||
impl<'a, I: Iterator<Item=Item<'a>> + Clone> DelayedFormat<I> {
|
||||
#[cfg(any(feature = "alloc", feature = "std", test))]
|
||||
impl<'a, I: Iterator<Item=B> + Clone, B: Borrow<Item<'a>>> DelayedFormat<I> {
|
||||
/// Makes a new `DelayedFormat` value out of local date and time.
|
||||
pub fn new(date: Option<NaiveDate>, time: Option<NaiveTime>, items: I) -> DelayedFormat<I> {
|
||||
DelayedFormat { date: date, time: time, off: None, items: items }
|
||||
|
@ -580,7 +647,8 @@ impl<'a, I: Iterator<Item=Item<'a>> + Clone> DelayedFormat<I> {
|
|||
}
|
||||
}
|
||||
|
||||
impl<'a, I: Iterator<Item=Item<'a>> + Clone> fmt::Display for DelayedFormat<I> {
|
||||
#[cfg(any(feature = "alloc", feature = "std", test))]
|
||||
impl<'a, I: Iterator<Item=B> + Clone, B: Borrow<Item<'a>>> fmt::Display for DelayedFormat<I> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
format(f, self.date.as_ref(), self.time.as_ref(), self.off.as_ref(), self.items.clone())
|
||||
}
|
||||
|
|
|
@ -4,7 +4,10 @@
|
|||
|
||||
//! Date and time parsing routines.
|
||||
|
||||
use std::usize;
|
||||
#![allow(deprecated)]
|
||||
|
||||
use core::borrow::Borrow;
|
||||
use core::usize;
|
||||
|
||||
use Weekday;
|
||||
|
||||
|
@ -30,7 +33,7 @@ fn set_weekday_with_number_from_monday(p: &mut Parsed, v: i64) -> ParseResult<()
|
|||
|
||||
fn parse_rfc2822<'a>(parsed: &mut Parsed, mut s: &'a str) -> ParseResult<(&'a str, ())> {
|
||||
macro_rules! try_consume {
|
||||
($e:expr) => ({ let (s_, v) = try!($e); s = s_; v })
|
||||
($e:expr) => ({ let (s_, v) = $e?; s = s_; v })
|
||||
}
|
||||
|
||||
// an adapted RFC 2822 syntax from Section 3.3 and 4.3:
|
||||
|
@ -87,14 +90,14 @@ fn parse_rfc2822<'a>(parsed: &mut Parsed, mut s: &'a str) -> ParseResult<(&'a st
|
|||
if let Ok((s_, weekday)) = scan::short_weekday(s) {
|
||||
if !s_.starts_with(',') { return Err(INVALID); }
|
||||
s = &s_[1..];
|
||||
try!(parsed.set_weekday(weekday));
|
||||
parsed.set_weekday(weekday)?;
|
||||
}
|
||||
|
||||
s = s.trim_left();
|
||||
try!(parsed.set_day(try_consume!(scan::number(s, 1, 2))));
|
||||
s = try!(scan::space(s)); // mandatory
|
||||
try!(parsed.set_month(1 + i64::from(try_consume!(scan::short_month0(s)))));
|
||||
s = try!(scan::space(s)); // mandatory
|
||||
parsed.set_day(try_consume!(scan::number(s, 1, 2)))?;
|
||||
s = scan::space(s)?; // mandatory
|
||||
parsed.set_month(1 + i64::from(try_consume!(scan::short_month0(s))))?;
|
||||
s = scan::space(s)?; // mandatory
|
||||
|
||||
// distinguish two- and three-digit years from four-digit years
|
||||
let prevlen = s.len();
|
||||
|
@ -106,20 +109,20 @@ fn parse_rfc2822<'a>(parsed: &mut Parsed, mut s: &'a str) -> ParseResult<(&'a st
|
|||
(3, _) => { year += 1900; } // 112 -> 2012, 009 -> 1909
|
||||
(_, _) => {} // 1987 -> 1987, 0654 -> 0654
|
||||
}
|
||||
try!(parsed.set_year(year));
|
||||
parsed.set_year(year)?;
|
||||
|
||||
s = try!(scan::space(s)); // mandatory
|
||||
try!(parsed.set_hour(try_consume!(scan::number(s, 2, 2))));
|
||||
s = try!(scan::char(s.trim_left(), b':')).trim_left(); // *S ":" *S
|
||||
try!(parsed.set_minute(try_consume!(scan::number(s, 2, 2))));
|
||||
s = scan::space(s)?; // mandatory
|
||||
parsed.set_hour(try_consume!(scan::number(s, 2, 2)))?;
|
||||
s = scan::char(s.trim_left(), b':')?.trim_left(); // *S ":" *S
|
||||
parsed.set_minute(try_consume!(scan::number(s, 2, 2)))?;
|
||||
if let Ok(s_) = scan::char(s.trim_left(), b':') { // [ ":" *S 2DIGIT ]
|
||||
try!(parsed.set_second(try_consume!(scan::number(s_, 2, 2))));
|
||||
parsed.set_second(try_consume!(scan::number(s_, 2, 2)))?;
|
||||
}
|
||||
|
||||
s = try!(scan::space(s)); // mandatory
|
||||
s = scan::space(s)?; // mandatory
|
||||
if let Some(offset) = try_consume!(scan::timezone_offset_2822(s)) {
|
||||
// only set the offset when it is definitely known (i.e. not `-0000`)
|
||||
try!(parsed.set_offset(i64::from(offset)));
|
||||
parsed.set_offset(i64::from(offset))?;
|
||||
}
|
||||
|
||||
Ok((s, ()))
|
||||
|
@ -127,7 +130,7 @@ fn parse_rfc2822<'a>(parsed: &mut Parsed, mut s: &'a str) -> ParseResult<(&'a st
|
|||
|
||||
fn parse_rfc3339<'a>(parsed: &mut Parsed, mut s: &'a str) -> ParseResult<(&'a str, ())> {
|
||||
macro_rules! try_consume {
|
||||
($e:expr) => ({ let (s_, v) = try!($e); s = s_; v })
|
||||
($e:expr) => ({ let (s_, v) = $e?; s = s_; v })
|
||||
}
|
||||
|
||||
// an adapted RFC 3339 syntax from Section 5.6:
|
||||
|
@ -157,11 +160,11 @@ fn parse_rfc3339<'a>(parsed: &mut Parsed, mut s: &'a str) -> ParseResult<(&'a st
|
|||
// note that this restriction is unique to RFC 3339 and not ISO 8601.
|
||||
// since this is not a typical Chrono behavior, we check it earlier.
|
||||
|
||||
try!(parsed.set_year(try_consume!(scan::number(s, 4, 4))));
|
||||
s = try!(scan::char(s, b'-'));
|
||||
try!(parsed.set_month(try_consume!(scan::number(s, 2, 2))));
|
||||
s = try!(scan::char(s, b'-'));
|
||||
try!(parsed.set_day(try_consume!(scan::number(s, 2, 2))));
|
||||
parsed.set_year(try_consume!(scan::number(s, 4, 4)))?;
|
||||
s = scan::char(s, b'-')?;
|
||||
parsed.set_month(try_consume!(scan::number(s, 2, 2)))?;
|
||||
s = scan::char(s, b'-')?;
|
||||
parsed.set_day(try_consume!(scan::number(s, 2, 2)))?;
|
||||
|
||||
s = match s.as_bytes().first() {
|
||||
Some(&b't') | Some(&b'T') => &s[1..],
|
||||
|
@ -169,19 +172,19 @@ fn parse_rfc3339<'a>(parsed: &mut Parsed, mut s: &'a str) -> ParseResult<(&'a st
|
|||
None => return Err(TOO_SHORT),
|
||||
};
|
||||
|
||||
try!(parsed.set_hour(try_consume!(scan::number(s, 2, 2))));
|
||||
s = try!(scan::char(s, b':'));
|
||||
try!(parsed.set_minute(try_consume!(scan::number(s, 2, 2))));
|
||||
s = try!(scan::char(s, b':'));
|
||||
try!(parsed.set_second(try_consume!(scan::number(s, 2, 2))));
|
||||
parsed.set_hour(try_consume!(scan::number(s, 2, 2)))?;
|
||||
s = scan::char(s, b':')?;
|
||||
parsed.set_minute(try_consume!(scan::number(s, 2, 2)))?;
|
||||
s = scan::char(s, b':')?;
|
||||
parsed.set_second(try_consume!(scan::number(s, 2, 2)))?;
|
||||
if s.starts_with('.') {
|
||||
let nanosecond = try_consume!(scan::nanosecond(&s[1..]));
|
||||
try!(parsed.set_nanosecond(nanosecond));
|
||||
parsed.set_nanosecond(nanosecond)?;
|
||||
}
|
||||
|
||||
let offset = try_consume!(scan::timezone_offset_zulu(s, |s| scan::char(s, b':')));
|
||||
if offset <= -86_400 || offset >= 86_400 { return Err(OUT_OF_RANGE); }
|
||||
try!(parsed.set_offset(i64::from(offset)));
|
||||
parsed.set_offset(i64::from(offset))?;
|
||||
|
||||
Ok((s, ()))
|
||||
}
|
||||
|
@ -202,65 +205,71 @@ fn parse_rfc3339<'a>(parsed: &mut Parsed, mut s: &'a str) -> ParseResult<(&'a st
|
|||
/// so one can prepend any number of whitespace then any number of zeroes before numbers.
|
||||
///
|
||||
/// - (Still) obeying the intrinsic parsing width. This allows, for example, parsing `HHMMSS`.
|
||||
pub fn parse<'a, I>(parsed: &mut Parsed, mut s: &str, items: I) -> ParseResult<()>
|
||||
where I: Iterator<Item=Item<'a>> {
|
||||
pub fn parse<'a, I, B>(parsed: &mut Parsed, mut s: &str, items: I) -> ParseResult<()>
|
||||
where I: Iterator<Item=B>, B: Borrow<Item<'a>> {
|
||||
macro_rules! try_consume {
|
||||
($e:expr) => ({ let (s_, v) = try!($e); s = s_; v })
|
||||
($e:expr) => ({ let (s_, v) = $e?; s = s_; v })
|
||||
}
|
||||
|
||||
for item in items {
|
||||
match item {
|
||||
Item::Literal(prefix) => {
|
||||
match item.borrow() {
|
||||
&Item::Literal(prefix) => {
|
||||
if s.len() < prefix.len() { return Err(TOO_SHORT); }
|
||||
if !s.starts_with(prefix) { return Err(INVALID); }
|
||||
s = &s[prefix.len()..];
|
||||
}
|
||||
|
||||
Item::OwnedLiteral(ref prefix) => {
|
||||
#[cfg(any(feature = "alloc", feature = "std", test))]
|
||||
&Item::OwnedLiteral(ref prefix) => {
|
||||
if s.len() < prefix.len() { return Err(TOO_SHORT); }
|
||||
if !s.starts_with(&prefix[..]) { return Err(INVALID); }
|
||||
s = &s[prefix.len()..];
|
||||
}
|
||||
|
||||
Item::Space(_) | Item::OwnedSpace(_) => {
|
||||
&Item::Space(_) => {
|
||||
s = s.trim_left();
|
||||
}
|
||||
|
||||
Item::Numeric(spec, _pad) => {
|
||||
#[cfg(any(feature = "alloc", feature = "std", test))]
|
||||
&Item::OwnedSpace(_) => {
|
||||
s = s.trim_left();
|
||||
}
|
||||
|
||||
&Item::Numeric(ref spec, ref _pad) => {
|
||||
use super::Numeric::*;
|
||||
type Setter = fn(&mut Parsed, i64) -> ParseResult<()>;
|
||||
|
||||
let (width, signed, set): (usize, bool, Setter) = match spec {
|
||||
Year => (4, true, Parsed::set_year),
|
||||
YearDiv100 => (2, false, Parsed::set_year_div_100),
|
||||
YearMod100 => (2, false, Parsed::set_year_mod_100),
|
||||
IsoYear => (4, true, Parsed::set_isoyear),
|
||||
IsoYearDiv100 => (2, false, Parsed::set_isoyear_div_100),
|
||||
IsoYearMod100 => (2, false, Parsed::set_isoyear_mod_100),
|
||||
Month => (2, false, Parsed::set_month),
|
||||
Day => (2, false, Parsed::set_day),
|
||||
WeekFromSun => (2, false, Parsed::set_week_from_sun),
|
||||
WeekFromMon => (2, false, Parsed::set_week_from_mon),
|
||||
IsoWeek => (2, false, Parsed::set_isoweek),
|
||||
NumDaysFromSun => (1, false, set_weekday_with_num_days_from_sunday),
|
||||
WeekdayFromMon => (1, false, set_weekday_with_number_from_monday),
|
||||
Ordinal => (3, false, Parsed::set_ordinal),
|
||||
Hour => (2, false, Parsed::set_hour),
|
||||
Hour12 => (2, false, Parsed::set_hour12),
|
||||
Minute => (2, false, Parsed::set_minute),
|
||||
Second => (2, false, Parsed::set_second),
|
||||
Nanosecond => (9, false, Parsed::set_nanosecond),
|
||||
Timestamp => (usize::MAX, false, Parsed::set_timestamp),
|
||||
&Year => (4, true, Parsed::set_year),
|
||||
&YearDiv100 => (2, false, Parsed::set_year_div_100),
|
||||
&YearMod100 => (2, false, Parsed::set_year_mod_100),
|
||||
&IsoYear => (4, true, Parsed::set_isoyear),
|
||||
&IsoYearDiv100 => (2, false, Parsed::set_isoyear_div_100),
|
||||
&IsoYearMod100 => (2, false, Parsed::set_isoyear_mod_100),
|
||||
&Month => (2, false, Parsed::set_month),
|
||||
&Day => (2, false, Parsed::set_day),
|
||||
&WeekFromSun => (2, false, Parsed::set_week_from_sun),
|
||||
&WeekFromMon => (2, false, Parsed::set_week_from_mon),
|
||||
&IsoWeek => (2, false, Parsed::set_isoweek),
|
||||
&NumDaysFromSun => (1, false, set_weekday_with_num_days_from_sunday),
|
||||
&WeekdayFromMon => (1, false, set_weekday_with_number_from_monday),
|
||||
&Ordinal => (3, false, Parsed::set_ordinal),
|
||||
&Hour => (2, false, Parsed::set_hour),
|
||||
&Hour12 => (2, false, Parsed::set_hour12),
|
||||
&Minute => (2, false, Parsed::set_minute),
|
||||
&Second => (2, false, Parsed::set_second),
|
||||
&Nanosecond => (9, false, Parsed::set_nanosecond),
|
||||
&Timestamp => (usize::MAX, false, Parsed::set_timestamp),
|
||||
|
||||
// for the future expansion
|
||||
Internal(ref int) => match int._dummy {},
|
||||
&Internal(ref int) => match int._dummy {},
|
||||
};
|
||||
|
||||
s = s.trim_left();
|
||||
let v = if signed {
|
||||
if s.starts_with('-') {
|
||||
let v = try_consume!(scan::number(&s[1..], 1, usize::MAX));
|
||||
try!(0i64.checked_sub(v).ok_or(OUT_OF_RANGE))
|
||||
0i64.checked_sub(v).ok_or(OUT_OF_RANGE)?
|
||||
} else if s.starts_with('+') {
|
||||
try_consume!(scan::number(&s[1..], 1, usize::MAX))
|
||||
} else {
|
||||
|
@ -270,94 +279,94 @@ pub fn parse<'a, I>(parsed: &mut Parsed, mut s: &str, items: I) -> ParseResult<(
|
|||
} else {
|
||||
try_consume!(scan::number(s, 1, width))
|
||||
};
|
||||
try!(set(parsed, v));
|
||||
set(parsed, v)?;
|
||||
}
|
||||
|
||||
Item::Fixed(spec) => {
|
||||
&Item::Fixed(ref spec) => {
|
||||
use super::Fixed::*;
|
||||
|
||||
match spec {
|
||||
ShortMonthName => {
|
||||
&ShortMonthName => {
|
||||
let month0 = try_consume!(scan::short_month0(s));
|
||||
try!(parsed.set_month(i64::from(month0) + 1));
|
||||
parsed.set_month(i64::from(month0) + 1)?;
|
||||
}
|
||||
|
||||
LongMonthName => {
|
||||
&LongMonthName => {
|
||||
let month0 = try_consume!(scan::short_or_long_month0(s));
|
||||
try!(parsed.set_month(i64::from(month0) + 1));
|
||||
parsed.set_month(i64::from(month0) + 1)?;
|
||||
}
|
||||
|
||||
ShortWeekdayName => {
|
||||
&ShortWeekdayName => {
|
||||
let weekday = try_consume!(scan::short_weekday(s));
|
||||
try!(parsed.set_weekday(weekday));
|
||||
parsed.set_weekday(weekday)?;
|
||||
}
|
||||
|
||||
LongWeekdayName => {
|
||||
&LongWeekdayName => {
|
||||
let weekday = try_consume!(scan::short_or_long_weekday(s));
|
||||
try!(parsed.set_weekday(weekday));
|
||||
parsed.set_weekday(weekday)?;
|
||||
}
|
||||
|
||||
LowerAmPm | UpperAmPm => {
|
||||
&LowerAmPm | &UpperAmPm => {
|
||||
if s.len() < 2 { return Err(TOO_SHORT); }
|
||||
let ampm = match (s.as_bytes()[0] | 32, s.as_bytes()[1] | 32) {
|
||||
(b'a',b'm') => false,
|
||||
(b'p',b'm') => true,
|
||||
_ => return Err(INVALID)
|
||||
};
|
||||
try!(parsed.set_ampm(ampm));
|
||||
parsed.set_ampm(ampm)?;
|
||||
s = &s[2..];
|
||||
}
|
||||
|
||||
Nanosecond | Nanosecond3 | Nanosecond6 | Nanosecond9 => {
|
||||
&Nanosecond | &Nanosecond3 | &Nanosecond6 | &Nanosecond9 => {
|
||||
if s.starts_with('.') {
|
||||
let nano = try_consume!(scan::nanosecond(&s[1..]));
|
||||
try!(parsed.set_nanosecond(nano));
|
||||
parsed.set_nanosecond(nano)?;
|
||||
}
|
||||
}
|
||||
|
||||
Internal(InternalFixed { val: InternalInternal::Nanosecond3NoDot }) => {
|
||||
&Internal(InternalFixed { val: InternalInternal::Nanosecond3NoDot }) => {
|
||||
if s.len() < 3 { return Err(TOO_SHORT); }
|
||||
let nano = try_consume!(scan::nanosecond_fixed(s, 3));
|
||||
try!(parsed.set_nanosecond(nano));
|
||||
parsed.set_nanosecond(nano)?;
|
||||
}
|
||||
|
||||
Internal(InternalFixed { val: InternalInternal::Nanosecond6NoDot }) => {
|
||||
&Internal(InternalFixed { val: InternalInternal::Nanosecond6NoDot }) => {
|
||||
if s.len() < 6 { return Err(TOO_SHORT); }
|
||||
let nano = try_consume!(scan::nanosecond_fixed(s, 6));
|
||||
try!(parsed.set_nanosecond(nano));
|
||||
parsed.set_nanosecond(nano)?;
|
||||
}
|
||||
|
||||
Internal(InternalFixed { val: InternalInternal::Nanosecond9NoDot }) => {
|
||||
&Internal(InternalFixed { val: InternalInternal::Nanosecond9NoDot }) => {
|
||||
if s.len() < 9 { return Err(TOO_SHORT); }
|
||||
let nano = try_consume!(scan::nanosecond_fixed(s, 9));
|
||||
try!(parsed.set_nanosecond(nano));
|
||||
parsed.set_nanosecond(nano)?;
|
||||
}
|
||||
|
||||
TimezoneName => return Err(BAD_FORMAT),
|
||||
&TimezoneName => return Err(BAD_FORMAT),
|
||||
|
||||
TimezoneOffsetColon | TimezoneOffset => {
|
||||
&TimezoneOffsetColon | &TimezoneOffset => {
|
||||
let offset = try_consume!(scan::timezone_offset(s.trim_left(),
|
||||
scan::colon_or_space));
|
||||
try!(parsed.set_offset(i64::from(offset)));
|
||||
parsed.set_offset(i64::from(offset))?;
|
||||
}
|
||||
|
||||
TimezoneOffsetColonZ | TimezoneOffsetZ => {
|
||||
&TimezoneOffsetColonZ | &TimezoneOffsetZ => {
|
||||
let offset = try_consume!(scan::timezone_offset_zulu(s.trim_left(),
|
||||
scan::colon_or_space));
|
||||
try!(parsed.set_offset(i64::from(offset)));
|
||||
parsed.set_offset(i64::from(offset))?;
|
||||
}
|
||||
Internal(InternalFixed { val: InternalInternal::TimezoneOffsetPermissive }) => {
|
||||
&Internal(InternalFixed { val: InternalInternal::TimezoneOffsetPermissive }) => {
|
||||
let offset = try_consume!(scan::timezone_offset_permissive(
|
||||
s.trim_left(), scan::colon_or_space));
|
||||
try!(parsed.set_offset(i64::from(offset)));
|
||||
parsed.set_offset(i64::from(offset))?;
|
||||
}
|
||||
|
||||
RFC2822 => try_consume!(parse_rfc2822(parsed, s)),
|
||||
RFC3339 => try_consume!(parse_rfc3339(parsed, s)),
|
||||
&RFC2822 => try_consume!(parse_rfc2822(parsed, s)),
|
||||
&RFC3339 => try_consume!(parse_rfc3339(parsed, s)),
|
||||
}
|
||||
}
|
||||
|
||||
Item::Error => {
|
||||
&Item::Error => {
|
||||
return Err(BAD_FORMAT);
|
||||
}
|
||||
}
|
||||
|
@ -380,7 +389,7 @@ fn test_parse() {
|
|||
// workaround for Rust issue #22255
|
||||
fn parse_all(s: &str, items: &[Item]) -> ParseResult<Parsed> {
|
||||
let mut parsed = Parsed::new();
|
||||
try!(parse(&mut parsed, s, items.iter().cloned()));
|
||||
parse(&mut parsed, s, items.iter())?;
|
||||
Ok(parsed)
|
||||
}
|
||||
|
||||
|
@ -671,6 +680,8 @@ fn test_rfc2822() {
|
|||
// Test data - (input, Ok(expected result after parse and format) or Err(error code))
|
||||
let testdates = [
|
||||
("Tue, 20 Jan 2015 17:35:20 -0800", Ok("Tue, 20 Jan 2015 17:35:20 -0800")), // normal case
|
||||
("Fri, 2 Jan 2015 17:35:20 -0800", Ok("Fri, 02 Jan 2015 17:35:20 -0800")), // folding whitespace
|
||||
("Fri, 02 Jan 2015 17:35:20 -0800", Ok("Fri, 02 Jan 2015 17:35:20 -0800")), // leading zero
|
||||
("20 Jan 2015 17:35:20 -0800", Ok("Tue, 20 Jan 2015 17:35:20 -0800")), // no day of week
|
||||
("20 JAN 2015 17:35:20 -0800", Ok("Tue, 20 Jan 2015 17:35:20 -0800")), // upper case month
|
||||
("Tue, 20 Jan 2015 17:35 -0800", Ok("Tue, 20 Jan 2015 17:35:00 -0800")), // no second
|
||||
|
@ -689,12 +700,12 @@ fn test_rfc2822() {
|
|||
|
||||
fn rfc2822_to_datetime(date: &str) -> ParseResult<DateTime<FixedOffset>> {
|
||||
let mut parsed = Parsed::new();
|
||||
try!(parse(&mut parsed, date, [Item::Fixed(Fixed::RFC2822)].iter().cloned()));
|
||||
parse(&mut parsed, date, [Item::Fixed(Fixed::RFC2822)].iter())?;
|
||||
parsed.to_datetime()
|
||||
}
|
||||
|
||||
fn fmt_rfc2822_datetime(dt: DateTime<FixedOffset>) -> String {
|
||||
dt.format_with_items([Item::Fixed(Fixed::RFC2822)].iter().cloned()).to_string()
|
||||
dt.format_with_items([Item::Fixed(Fixed::RFC2822)].iter()).to_string()
|
||||
}
|
||||
|
||||
// Test against test data above
|
||||
|
@ -770,12 +781,12 @@ fn test_rfc3339() {
|
|||
|
||||
fn rfc3339_to_datetime(date: &str) -> ParseResult<DateTime<FixedOffset>> {
|
||||
let mut parsed = Parsed::new();
|
||||
try!(parse(&mut parsed, date, [Item::Fixed(Fixed::RFC3339)].iter().cloned()));
|
||||
parse(&mut parsed, date, [Item::Fixed(Fixed::RFC3339)].iter())?;
|
||||
parsed.to_datetime()
|
||||
}
|
||||
|
||||
fn fmt_rfc3339_datetime(dt: DateTime<FixedOffset>) -> String {
|
||||
dt.format_with_items([Item::Fixed(Fixed::RFC3339)].iter().cloned()).to_string()
|
||||
dt.format_with_items([Item::Fixed(Fixed::RFC3339)].iter()).to_string()
|
||||
}
|
||||
|
||||
// Test against test data above
|
||||
|
|
|
@ -112,6 +112,7 @@ pub struct Parsed {
|
|||
|
||||
/// Checks if `old` is either empty or has the same value to `new` (i.e. "consistent"),
|
||||
/// and if it is empty, set `old` to `new` as well.
|
||||
#[inline]
|
||||
fn set_if_consistent<T: PartialEq>(old: &mut Option<T>, new: T) -> ParseResult<()> {
|
||||
if let Some(ref old) = *old {
|
||||
if *old == new {Ok(())} else {Err(IMPOSSIBLE)}
|
||||
|
@ -141,82 +142,97 @@ impl Parsed {
|
|||
}
|
||||
|
||||
/// Tries to set the [`year`](#structfield.year) field from given value.
|
||||
#[inline]
|
||||
pub fn set_year(&mut self, value: i64) -> ParseResult<()> {
|
||||
set_if_consistent(&mut self.year, try!(value.to_i32().ok_or(OUT_OF_RANGE)))
|
||||
set_if_consistent(&mut self.year, value.to_i32().ok_or(OUT_OF_RANGE)?)
|
||||
}
|
||||
|
||||
/// Tries to set the [`year_div_100`](#structfield.year_div_100) field from given value.
|
||||
#[inline]
|
||||
pub fn set_year_div_100(&mut self, value: i64) -> ParseResult<()> {
|
||||
if value < 0 { return Err(OUT_OF_RANGE); }
|
||||
set_if_consistent(&mut self.year_div_100, try!(value.to_i32().ok_or(OUT_OF_RANGE)))
|
||||
set_if_consistent(&mut self.year_div_100, value.to_i32().ok_or(OUT_OF_RANGE)?)
|
||||
}
|
||||
|
||||
/// Tries to set the [`year_mod_100`](#structfield.year_mod_100) field from given value.
|
||||
#[inline]
|
||||
pub fn set_year_mod_100(&mut self, value: i64) -> ParseResult<()> {
|
||||
if value < 0 { return Err(OUT_OF_RANGE); }
|
||||
set_if_consistent(&mut self.year_mod_100, try!(value.to_i32().ok_or(OUT_OF_RANGE)))
|
||||
set_if_consistent(&mut self.year_mod_100, value.to_i32().ok_or(OUT_OF_RANGE)?)
|
||||
}
|
||||
|
||||
/// Tries to set the [`isoyear`](#structfield.isoyear) field from given value.
|
||||
#[inline]
|
||||
pub fn set_isoyear(&mut self, value: i64) -> ParseResult<()> {
|
||||
set_if_consistent(&mut self.isoyear, try!(value.to_i32().ok_or(OUT_OF_RANGE)))
|
||||
set_if_consistent(&mut self.isoyear, value.to_i32().ok_or(OUT_OF_RANGE)?)
|
||||
}
|
||||
|
||||
/// Tries to set the [`isoyear_div_100`](#structfield.isoyear_div_100) field from given value.
|
||||
#[inline]
|
||||
pub fn set_isoyear_div_100(&mut self, value: i64) -> ParseResult<()> {
|
||||
if value < 0 { return Err(OUT_OF_RANGE); }
|
||||
set_if_consistent(&mut self.isoyear_div_100, try!(value.to_i32().ok_or(OUT_OF_RANGE)))
|
||||
set_if_consistent(&mut self.isoyear_div_100, value.to_i32().ok_or(OUT_OF_RANGE)?)
|
||||
}
|
||||
|
||||
/// Tries to set the [`isoyear_mod_100`](#structfield.isoyear_mod_100) field from given value.
|
||||
#[inline]
|
||||
pub fn set_isoyear_mod_100(&mut self, value: i64) -> ParseResult<()> {
|
||||
if value < 0 { return Err(OUT_OF_RANGE); }
|
||||
set_if_consistent(&mut self.isoyear_mod_100, try!(value.to_i32().ok_or(OUT_OF_RANGE)))
|
||||
set_if_consistent(&mut self.isoyear_mod_100, value.to_i32().ok_or(OUT_OF_RANGE)?)
|
||||
}
|
||||
|
||||
/// Tries to set the [`month`](#structfield.month) field from given value.
|
||||
#[inline]
|
||||
pub fn set_month(&mut self, value: i64) -> ParseResult<()> {
|
||||
set_if_consistent(&mut self.month, try!(value.to_u32().ok_or(OUT_OF_RANGE)))
|
||||
set_if_consistent(&mut self.month, value.to_u32().ok_or(OUT_OF_RANGE)?)
|
||||
}
|
||||
|
||||
/// Tries to set the [`week_from_sun`](#structfield.week_from_sun) field from given value.
|
||||
#[inline]
|
||||
pub fn set_week_from_sun(&mut self, value: i64) -> ParseResult<()> {
|
||||
set_if_consistent(&mut self.week_from_sun, try!(value.to_u32().ok_or(OUT_OF_RANGE)))
|
||||
set_if_consistent(&mut self.week_from_sun, value.to_u32().ok_or(OUT_OF_RANGE)?)
|
||||
}
|
||||
|
||||
/// Tries to set the [`week_from_mon`](#structfield.week_from_mon) field from given value.
|
||||
#[inline]
|
||||
pub fn set_week_from_mon(&mut self, value: i64) -> ParseResult<()> {
|
||||
set_if_consistent(&mut self.week_from_mon, try!(value.to_u32().ok_or(OUT_OF_RANGE)))
|
||||
set_if_consistent(&mut self.week_from_mon, value.to_u32().ok_or(OUT_OF_RANGE)?)
|
||||
}
|
||||
|
||||
/// Tries to set the [`isoweek`](#structfield.isoweek) field from given value.
|
||||
#[inline]
|
||||
pub fn set_isoweek(&mut self, value: i64) -> ParseResult<()> {
|
||||
set_if_consistent(&mut self.isoweek, try!(value.to_u32().ok_or(OUT_OF_RANGE)))
|
||||
set_if_consistent(&mut self.isoweek, value.to_u32().ok_or(OUT_OF_RANGE)?)
|
||||
}
|
||||
|
||||
/// Tries to set the [`weekday`](#structfield.weekday) field from given value.
|
||||
#[inline]
|
||||
pub fn set_weekday(&mut self, value: Weekday) -> ParseResult<()> {
|
||||
set_if_consistent(&mut self.weekday, value)
|
||||
}
|
||||
|
||||
/// Tries to set the [`ordinal`](#structfield.ordinal) field from given value.
|
||||
#[inline]
|
||||
pub fn set_ordinal(&mut self, value: i64) -> ParseResult<()> {
|
||||
set_if_consistent(&mut self.ordinal, try!(value.to_u32().ok_or(OUT_OF_RANGE)))
|
||||
set_if_consistent(&mut self.ordinal, value.to_u32().ok_or(OUT_OF_RANGE)?)
|
||||
}
|
||||
|
||||
/// Tries to set the [`day`](#structfield.day) field from given value.
|
||||
#[inline]
|
||||
pub fn set_day(&mut self, value: i64) -> ParseResult<()> {
|
||||
set_if_consistent(&mut self.day, try!(value.to_u32().ok_or(OUT_OF_RANGE)))
|
||||
set_if_consistent(&mut self.day, value.to_u32().ok_or(OUT_OF_RANGE)?)
|
||||
}
|
||||
|
||||
/// Tries to set the [`hour_div_12`](#structfield.hour_div_12) field from given value.
|
||||
/// (`false` for AM, `true` for PM)
|
||||
#[inline]
|
||||
pub fn set_ampm(&mut self, value: bool) -> ParseResult<()> {
|
||||
set_if_consistent(&mut self.hour_div_12, if value {1} else {0})
|
||||
}
|
||||
|
||||
/// Tries to set the [`hour_mod_12`](#structfield.hour_mod_12) field from
|
||||
/// given hour number in 12-hour clocks.
|
||||
#[inline]
|
||||
pub fn set_hour12(&mut self, value: i64) -> ParseResult<()> {
|
||||
if value < 1 || value > 12 { return Err(OUT_OF_RANGE); }
|
||||
set_if_consistent(&mut self.hour_mod_12, value as u32 % 12)
|
||||
|
@ -224,36 +240,42 @@ impl Parsed {
|
|||
|
||||
/// Tries to set both [`hour_div_12`](#structfield.hour_div_12) and
|
||||
/// [`hour_mod_12`](#structfield.hour_mod_12) fields from given value.
|
||||
#[inline]
|
||||
pub fn set_hour(&mut self, value: i64) -> ParseResult<()> {
|
||||
let v = try!(value.to_u32().ok_or(OUT_OF_RANGE));
|
||||
try!(set_if_consistent(&mut self.hour_div_12, v / 12));
|
||||
try!(set_if_consistent(&mut self.hour_mod_12, v % 12));
|
||||
let v = value.to_u32().ok_or(OUT_OF_RANGE)?;
|
||||
set_if_consistent(&mut self.hour_div_12, v / 12)?;
|
||||
set_if_consistent(&mut self.hour_mod_12, v % 12)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Tries to set the [`minute`](#structfield.minute) field from given value.
|
||||
#[inline]
|
||||
pub fn set_minute(&mut self, value: i64) -> ParseResult<()> {
|
||||
set_if_consistent(&mut self.minute, try!(value.to_u32().ok_or(OUT_OF_RANGE)))
|
||||
set_if_consistent(&mut self.minute, value.to_u32().ok_or(OUT_OF_RANGE)?)
|
||||
}
|
||||
|
||||
/// Tries to set the [`second`](#structfield.second) field from given value.
|
||||
#[inline]
|
||||
pub fn set_second(&mut self, value: i64) -> ParseResult<()> {
|
||||
set_if_consistent(&mut self.second, try!(value.to_u32().ok_or(OUT_OF_RANGE)))
|
||||
set_if_consistent(&mut self.second, value.to_u32().ok_or(OUT_OF_RANGE)?)
|
||||
}
|
||||
|
||||
/// Tries to set the [`nanosecond`](#structfield.nanosecond) field from given value.
|
||||
#[inline]
|
||||
pub fn set_nanosecond(&mut self, value: i64) -> ParseResult<()> {
|
||||
set_if_consistent(&mut self.nanosecond, try!(value.to_u32().ok_or(OUT_OF_RANGE)))
|
||||
set_if_consistent(&mut self.nanosecond, value.to_u32().ok_or(OUT_OF_RANGE)?)
|
||||
}
|
||||
|
||||
/// Tries to set the [`timestamp`](#structfield.timestamp) field from given value.
|
||||
#[inline]
|
||||
pub fn set_timestamp(&mut self, value: i64) -> ParseResult<()> {
|
||||
set_if_consistent(&mut self.timestamp, value)
|
||||
}
|
||||
|
||||
/// Tries to set the [`offset`](#structfield.offset) field from given value.
|
||||
#[inline]
|
||||
pub fn set_offset(&mut self, value: i64) -> ParseResult<()> {
|
||||
set_if_consistent(&mut self.offset, try!(value.to_i32().ok_or(OUT_OF_RANGE)))
|
||||
set_if_consistent(&mut self.offset, value.to_i32().ok_or(OUT_OF_RANGE)?)
|
||||
}
|
||||
|
||||
/// Returns a parsed naive date out of given fields.
|
||||
|
@ -294,7 +316,7 @@ impl Parsed {
|
|||
(None, Some(q), Some(r @ 0...99)) => {
|
||||
if q < 0 { return Err(OUT_OF_RANGE); }
|
||||
let y = q.checked_mul(100).and_then(|v| v.checked_add(r));
|
||||
Ok(Some(try!(y.ok_or(OUT_OF_RANGE))))
|
||||
Ok(Some(y.ok_or(OUT_OF_RANGE)?))
|
||||
},
|
||||
|
||||
// we only have modulo. try to interpret a modulo as a conventional two-digit year.
|
||||
|
@ -308,9 +330,9 @@ impl Parsed {
|
|||
}
|
||||
|
||||
let given_year =
|
||||
try!(resolve_year(self.year, self.year_div_100, self.year_mod_100));
|
||||
resolve_year(self.year, self.year_div_100, self.year_mod_100)?;
|
||||
let given_isoyear =
|
||||
try!(resolve_year(self.isoyear, self.isoyear_div_100, self.isoyear_mod_100));
|
||||
resolve_year(self.isoyear, self.isoyear_div_100, self.isoyear_mod_100)?;
|
||||
|
||||
// verify the normal year-month-day date.
|
||||
let verify_ymd = |date: NaiveDate| {
|
||||
|
@ -366,20 +388,20 @@ impl Parsed {
|
|||
let (verified, parsed_date) = match (given_year, given_isoyear, self) {
|
||||
(Some(year), _, &Parsed { month: Some(month), day: Some(day), .. }) => {
|
||||
// year, month, day
|
||||
let date = try!(NaiveDate::from_ymd_opt(year, month, day).ok_or(OUT_OF_RANGE));
|
||||
let date = NaiveDate::from_ymd_opt(year, month, day).ok_or(OUT_OF_RANGE)?;
|
||||
(verify_isoweekdate(date) && verify_ordinal(date), date)
|
||||
},
|
||||
|
||||
(Some(year), _, &Parsed { ordinal: Some(ordinal), .. }) => {
|
||||
// year, day of the year
|
||||
let date = try!(NaiveDate::from_yo_opt(year, ordinal).ok_or(OUT_OF_RANGE));
|
||||
let date = NaiveDate::from_yo_opt(year, ordinal).ok_or(OUT_OF_RANGE)?;
|
||||
(verify_ymd(date) && verify_isoweekdate(date) && verify_ordinal(date), date)
|
||||
},
|
||||
|
||||
(Some(year), _, &Parsed { week_from_sun: Some(week_from_sun),
|
||||
weekday: Some(weekday), .. }) => {
|
||||
// year, week (starting at 1st Sunday), day of the week
|
||||
let newyear = try!(NaiveDate::from_yo_opt(year, 1).ok_or(OUT_OF_RANGE));
|
||||
let newyear = NaiveDate::from_yo_opt(year, 1).ok_or(OUT_OF_RANGE)?;
|
||||
let firstweek = match newyear.weekday() {
|
||||
Weekday::Sun => 0,
|
||||
Weekday::Mon => 6,
|
||||
|
@ -394,8 +416,8 @@ impl Parsed {
|
|||
if week_from_sun > 53 { return Err(OUT_OF_RANGE); } // can it overflow?
|
||||
let ndays = firstweek + (week_from_sun as i32 - 1) * 7 +
|
||||
weekday.num_days_from_sunday() as i32;
|
||||
let date = try!(newyear.checked_add_signed(OldDuration::days(i64::from(ndays)))
|
||||
.ok_or(OUT_OF_RANGE));
|
||||
let date = newyear.checked_add_signed(OldDuration::days(i64::from(ndays)))
|
||||
.ok_or(OUT_OF_RANGE)?;
|
||||
if date.year() != year { return Err(OUT_OF_RANGE); } // early exit for correct error
|
||||
|
||||
(verify_ymd(date) && verify_isoweekdate(date) && verify_ordinal(date), date)
|
||||
|
@ -404,7 +426,7 @@ impl Parsed {
|
|||
(Some(year), _, &Parsed { week_from_mon: Some(week_from_mon),
|
||||
weekday: Some(weekday), .. }) => {
|
||||
// year, week (starting at 1st Monday), day of the week
|
||||
let newyear = try!(NaiveDate::from_yo_opt(year, 1).ok_or(OUT_OF_RANGE));
|
||||
let newyear = NaiveDate::from_yo_opt(year, 1).ok_or(OUT_OF_RANGE)?;
|
||||
let firstweek = match newyear.weekday() {
|
||||
Weekday::Sun => 1,
|
||||
Weekday::Mon => 0,
|
||||
|
@ -419,8 +441,8 @@ impl Parsed {
|
|||
if week_from_mon > 53 { return Err(OUT_OF_RANGE); } // can it overflow?
|
||||
let ndays = firstweek + (week_from_mon as i32 - 1) * 7 +
|
||||
weekday.num_days_from_monday() as i32;
|
||||
let date = try!(newyear.checked_add_signed(OldDuration::days(i64::from(ndays)))
|
||||
.ok_or(OUT_OF_RANGE));
|
||||
let date = newyear.checked_add_signed(OldDuration::days(i64::from(ndays)))
|
||||
.ok_or(OUT_OF_RANGE)?;
|
||||
if date.year() != year { return Err(OUT_OF_RANGE); } // early exit for correct error
|
||||
|
||||
(verify_ymd(date) && verify_isoweekdate(date) && verify_ordinal(date), date)
|
||||
|
@ -429,7 +451,7 @@ impl Parsed {
|
|||
(_, Some(isoyear), &Parsed { isoweek: Some(isoweek), weekday: Some(weekday), .. }) => {
|
||||
// ISO year, week, day of the week
|
||||
let date = NaiveDate::from_isoywd_opt(isoyear, isoweek, weekday);
|
||||
let date = try!(date.ok_or(OUT_OF_RANGE));
|
||||
let date = date.ok_or(OUT_OF_RANGE)?;
|
||||
(verify_ymd(date) && verify_ordinal(date), date)
|
||||
},
|
||||
|
||||
|
@ -525,9 +547,9 @@ impl Parsed {
|
|||
}
|
||||
|
||||
// reconstruct date and time fields from timestamp
|
||||
let ts = try!(timestamp.checked_add(i64::from(offset)).ok_or(OUT_OF_RANGE));
|
||||
let ts = timestamp.checked_add(i64::from(offset)).ok_or(OUT_OF_RANGE)?;
|
||||
let datetime = NaiveDateTime::from_timestamp_opt(ts, 0);
|
||||
let mut datetime = try!(datetime.ok_or(OUT_OF_RANGE));
|
||||
let mut datetime = datetime.ok_or(OUT_OF_RANGE)?;
|
||||
|
||||
// fill year, ordinal, hour, minute and second fields from timestamp.
|
||||
// if existing fields are consistent, this will allow the full date/time reconstruction.
|
||||
|
@ -544,21 +566,21 @@ impl Parsed {
|
|||
}
|
||||
// ...and we have the correct candidates for other fields.
|
||||
} else {
|
||||
try!(parsed.set_second(i64::from(datetime.second())));
|
||||
parsed.set_second(i64::from(datetime.second()))?;
|
||||
}
|
||||
try!(parsed.set_year (i64::from(datetime.year())));
|
||||
try!(parsed.set_ordinal(i64::from(datetime.ordinal()))); // more efficient than ymd
|
||||
try!(parsed.set_hour (i64::from(datetime.hour())));
|
||||
try!(parsed.set_minute (i64::from(datetime.minute())));
|
||||
parsed.set_year (i64::from(datetime.year()))?;
|
||||
parsed.set_ordinal(i64::from(datetime.ordinal()))?; // more efficient than ymd
|
||||
parsed.set_hour (i64::from(datetime.hour()))?;
|
||||
parsed.set_minute (i64::from(datetime.minute()))?;
|
||||
|
||||
// validate other fields (e.g. week) and return
|
||||
let date = try!(parsed.to_naive_date());
|
||||
let time = try!(parsed.to_naive_time());
|
||||
let date = parsed.to_naive_date()?;
|
||||
let time = parsed.to_naive_time()?;
|
||||
Ok(date.and_time(time))
|
||||
} else {
|
||||
// reproduce the previous error(s)
|
||||
try!(date);
|
||||
try!(time);
|
||||
date?;
|
||||
time?;
|
||||
unreachable!()
|
||||
}
|
||||
}
|
||||
|
@ -575,9 +597,9 @@ impl Parsed {
|
|||
/// plus a time zone offset.
|
||||
/// Either way those fields have to be consistent to each other.
|
||||
pub fn to_datetime(&self) -> ParseResult<DateTime<FixedOffset>> {
|
||||
let offset = try!(self.offset.ok_or(NOT_ENOUGH));
|
||||
let datetime = try!(self.to_naive_datetime_with_offset(offset));
|
||||
let offset = try!(FixedOffset::east_opt(offset).ok_or(OUT_OF_RANGE));
|
||||
let offset = self.offset.ok_or(NOT_ENOUGH)?;
|
||||
let datetime = self.to_naive_datetime_with_offset(offset)?;
|
||||
let offset = FixedOffset::east_opt(offset).ok_or(OUT_OF_RANGE)?;
|
||||
match offset.from_local_datetime(&datetime) {
|
||||
LocalResult::None => Err(IMPOSSIBLE),
|
||||
LocalResult::Single(t) => Ok(t),
|
||||
|
@ -602,7 +624,7 @@ impl Parsed {
|
|||
// an empty `nanosecond` is always equal to zero, so missing nanosecond is fine.
|
||||
let nanosecond = self.nanosecond.unwrap_or(0);
|
||||
let dt = NaiveDateTime::from_timestamp_opt(timestamp, nanosecond);
|
||||
let dt = try!(dt.ok_or(OUT_OF_RANGE));
|
||||
let dt = dt.ok_or(OUT_OF_RANGE)?;
|
||||
guessed_offset = tz.offset_from_utc_datetime(&dt).fix().local_minus_utc();
|
||||
}
|
||||
|
||||
|
@ -617,7 +639,7 @@ impl Parsed {
|
|||
|
||||
// `guessed_offset` should be correct when `self.timestamp` is given.
|
||||
// it will be 0 otherwise, but this is fine as the algorithm ignores offset for that case.
|
||||
let datetime = try!(self.to_naive_datetime_with_offset(guessed_offset));
|
||||
let datetime = self.to_naive_datetime_with_offset(guessed_offset)?;
|
||||
match tz.from_local_datetime(&datetime) {
|
||||
LocalResult::None => Err(IMPOSSIBLE),
|
||||
LocalResult::Single(t) => if check_offset(&t) {Ok(t)} else {Err(IMPOSSIBLE)},
|
||||
|
|
|
@ -5,6 +5,8 @@
|
|||
* Various scanning routines for the parser.
|
||||
*/
|
||||
|
||||
#![allow(deprecated)]
|
||||
|
||||
use Weekday;
|
||||
use super::{ParseResult, TOO_SHORT, INVALID, OUT_OF_RANGE};
|
||||
|
||||
|
@ -28,23 +30,35 @@ fn equals(s: &str, pattern: &str) -> bool {
|
|||
/// The absence of digits at all is an unconditional error.
|
||||
/// More than `max` digits are consumed up to the first `max` digits.
|
||||
/// Any number that does not fit in `i64` is an error.
|
||||
#[inline]
|
||||
pub fn number(s: &str, min: usize, max: usize) -> ParseResult<(&str, i64)> {
|
||||
assert!(min <= max);
|
||||
|
||||
// limit `s` to given number of digits
|
||||
let mut window = s.as_bytes();
|
||||
if window.len() > max { window = &window[..max]; }
|
||||
|
||||
// scan digits
|
||||
let upto = window.iter().position(|&c| c < b'0' || b'9' < c)
|
||||
.unwrap_or_else(|| window.len());
|
||||
if upto < min {
|
||||
return Err(if window.is_empty() {TOO_SHORT} else {INVALID});
|
||||
// We are only interested in ascii numbers, so we can work with the `str` as bytes. We stop on
|
||||
// the first non-numeric byte, which may be another ascii character or beginning of multi-byte
|
||||
// UTF-8 character.
|
||||
let bytes = s.as_bytes();
|
||||
if bytes.len() < min {
|
||||
return Err(TOO_SHORT);
|
||||
}
|
||||
|
||||
// we can overflow here, which is the only possible cause of error from `parse`.
|
||||
let v: i64 = try!(s[..upto].parse().map_err(|_| OUT_OF_RANGE));
|
||||
Ok((&s[upto..], v))
|
||||
let mut n = 0i64;
|
||||
for (i, c) in bytes.iter().take(max).cloned().enumerate() { // cloned() = copied()
|
||||
if c < b'0' || b'9' < c {
|
||||
if i < min {
|
||||
return Err(INVALID);
|
||||
} else {
|
||||
return Ok((&s[i..], n));
|
||||
}
|
||||
}
|
||||
|
||||
n = match n.checked_mul(10).and_then(|n| n.checked_add((c - b'0') as i64)) {
|
||||
Some(n) => n,
|
||||
None => return Err(OUT_OF_RANGE),
|
||||
};
|
||||
}
|
||||
|
||||
Ok((&s[::core::cmp::min(max, bytes.len())..], n))
|
||||
}
|
||||
|
||||
/// Tries to consume at least one digits as a fractional second.
|
||||
|
@ -52,13 +66,13 @@ pub fn number(s: &str, min: usize, max: usize) -> ParseResult<(&str, i64)> {
|
|||
pub fn nanosecond(s: &str) -> ParseResult<(&str, i64)> {
|
||||
// record the number of digits consumed for later scaling.
|
||||
let origlen = s.len();
|
||||
let (s, v) = try!(number(s, 1, 9));
|
||||
let (s, v) = number(s, 1, 9)?;
|
||||
let consumed = origlen - s.len();
|
||||
|
||||
// scale the number accordingly.
|
||||
static SCALE: [i64; 10] = [0, 100_000_000, 10_000_000, 1_000_000, 100_000, 10_000,
|
||||
1_000, 100, 10, 1];
|
||||
let v = try!(v.checked_mul(SCALE[consumed]).ok_or(OUT_OF_RANGE));
|
||||
let v = v.checked_mul(SCALE[consumed]).ok_or(OUT_OF_RANGE)?;
|
||||
|
||||
// if there are more than 9 digits, skip next digits.
|
||||
let s = s.trim_left_matches(|c: char| '0' <= c && c <= '9');
|
||||
|
@ -70,12 +84,12 @@ pub fn nanosecond(s: &str) -> ParseResult<(&str, i64)> {
|
|||
/// Returns the number of whole nanoseconds (0--999,999,999).
|
||||
pub fn nanosecond_fixed(s: &str, digits: usize) -> ParseResult<(&str, i64)> {
|
||||
// record the number of digits consumed for later scaling.
|
||||
let (s, v) = try!(number(s, digits, digits));
|
||||
let (s, v) = number(s, digits, digits)?;
|
||||
|
||||
// scale the number accordingly.
|
||||
static SCALE: [i64; 10] = [0, 100_000_000, 10_000_000, 1_000_000, 100_000, 10_000,
|
||||
1_000, 100, 10, 1];
|
||||
let v = try!(v.checked_mul(SCALE[digits]).ok_or(OUT_OF_RANGE));
|
||||
let v = v.checked_mul(SCALE[digits]).ok_or(OUT_OF_RANGE)?;
|
||||
|
||||
Ok((s, v))
|
||||
}
|
||||
|
@ -126,7 +140,7 @@ pub fn short_or_long_month0(s: &str) -> ParseResult<(&str, u8)> {
|
|||
static LONG_MONTH_SUFFIXES: [&'static str; 12] =
|
||||
["uary", "ruary", "ch", "il", "", "e", "y", "ust", "tember", "ober", "ember", "ember"];
|
||||
|
||||
let (mut s, month0) = try!(short_month0(s));
|
||||
let (mut s, month0) = short_month0(s)?;
|
||||
|
||||
// tries to consume the suffix if possible
|
||||
let suffix = LONG_MONTH_SUFFIXES[month0 as usize];
|
||||
|
@ -144,7 +158,7 @@ pub fn short_or_long_weekday(s: &str) -> ParseResult<(&str, Weekday)> {
|
|||
static LONG_WEEKDAY_SUFFIXES: [&'static str; 7] =
|
||||
["day", "sday", "nesday", "rsday", "day", "urday", "day"];
|
||||
|
||||
let (mut s, weekday) = try!(short_weekday(s));
|
||||
let (mut s, weekday) = short_weekday(s)?;
|
||||
|
||||
// tries to consume the suffix if possible
|
||||
let suffix = LONG_WEEKDAY_SUFFIXES[weekday.num_days_from_monday() as usize];
|
||||
|
@ -211,14 +225,14 @@ fn timezone_offset_internal<F>(mut s: &str, mut consume_colon: F, allow_missing_
|
|||
s = &s[1..];
|
||||
|
||||
// hours (00--99)
|
||||
let hours = match try!(digits(s)) {
|
||||
let hours = match digits(s)? {
|
||||
(h1 @ b'0'...b'9', h2 @ b'0'...b'9') => i32::from((h1 - b'0') * 10 + (h2 - b'0')),
|
||||
_ => return Err(INVALID),
|
||||
};
|
||||
s = &s[2..];
|
||||
|
||||
// colons (and possibly other separators)
|
||||
s = try!(consume_colon(s));
|
||||
s = consume_colon(s)?;
|
||||
|
||||
// minutes (00--59)
|
||||
// if the next two items are digits then we have to add minutes
|
||||
|
@ -293,7 +307,7 @@ pub fn timezone_offset_2822(s: &str) -> ParseResult<(&str, Option<i32>)> {
|
|||
Ok((s, None)) // recommended by RFC 2822: consume but treat it as -0000
|
||||
}
|
||||
} else {
|
||||
let (s_, offset) = try!(timezone_offset(s, |s| Ok(s)));
|
||||
let (s_, offset) = timezone_offset(s, |s| Ok(s))?;
|
||||
if offset == 0 && s.starts_with('-') { // -0000 is not same to +0000
|
||||
Ok((s_, None))
|
||||
} else {
|
||||
|
|
|
@ -11,9 +11,9 @@ The following specifiers are available both to formatting and parsing.
|
|||
| Spec. | Example | Description |
|
||||
|-------|----------|----------------------------------------------------------------------------|
|
||||
| | | **DATE SPECIFIERS:** |
|
||||
| `%Y` | `2001` | The full proleptic Gregorian year, zero-padded to 4 digits. [1] |
|
||||
| `%C` | `20` | The proleptic Gregorian year divided by 100, zero-padded to 2 digits. [2] |
|
||||
| `%y` | `01` | The proleptic Gregorian year modulo 100, zero-padded to 2 digits. [2] |
|
||||
| `%Y` | `2001` | The full proleptic Gregorian year, zero-padded to 4 digits. [^1] |
|
||||
| `%C` | `20` | The proleptic Gregorian year divided by 100, zero-padded to 2 digits. [^2] |
|
||||
| `%y` | `01` | The proleptic Gregorian year modulo 100, zero-padded to 2 digits. [^2] |
|
||||
| | | |
|
||||
| `%m` | `07` | Month number (01--12), zero-padded to 2 digits. |
|
||||
| `%b` | `Jul` | Abbreviated month name. Always 3 letters. |
|
||||
|
@ -28,12 +28,12 @@ The following specifiers are available both to formatting and parsing.
|
|||
| `%w` | `0` | Sunday = 0, Monday = 1, ..., Saturday = 6. |
|
||||
| `%u` | `7` | Monday = 1, Tuesday = 2, ..., Sunday = 7. (ISO 8601) |
|
||||
| | | |
|
||||
| `%U` | `28` | Week number starting with Sunday (00--53), zero-padded to 2 digits. [3] |
|
||||
| `%U` | `28` | Week number starting with Sunday (00--53), zero-padded to 2 digits. [^3] |
|
||||
| `%W` | `27` | Same to `%U`, but week 1 starts with the first Monday in that year instead.|
|
||||
| | | |
|
||||
| `%G` | `2001` | Same to `%Y` but uses the year number in ISO 8601 week date. [4] |
|
||||
| `%g` | `01` | Same to `%y` but uses the year number in ISO 8601 week date. [4] |
|
||||
| `%V` | `27` | Same to `%U` but uses the week number in ISO 8601 week date (01--53). [4] |
|
||||
| `%G` | `2001` | Same to `%Y` but uses the year number in ISO 8601 week date. [^4] |
|
||||
| `%g` | `01` | Same to `%y` but uses the year number in ISO 8601 week date. [^4] |
|
||||
| `%V` | `27` | Same to `%U` but uses the week number in ISO 8601 week date (01--53). [^4] |
|
||||
| | | |
|
||||
| `%j` | `189` | Day of the year (001--366), zero-padded to 3 digits. |
|
||||
| | | |
|
||||
|
@ -52,15 +52,15 @@ The following specifiers are available both to formatting and parsing.
|
|||
| `%p` | `AM` | `AM` or `PM` in 12-hour clocks. |
|
||||
| | | |
|
||||
| `%M` | `34` | Minute number (00--59), zero-padded to 2 digits. |
|
||||
| `%S` | `60` | Second number (00--60), zero-padded to 2 digits. [5] |
|
||||
| `%f` | `026490000` | The fractional seconds (in nanoseconds) since last whole second. [8] |
|
||||
| `%.f` | `.026490`| Similar to `.%f` but left-aligned. These all consume the leading dot. [8] |
|
||||
| `%.3f`| `.026` | Similar to `.%f` but left-aligned but fixed to a length of 3. [8] |
|
||||
| `%.6f`| `.026490` | Similar to `.%f` but left-aligned but fixed to a length of 6. [8] |
|
||||
| `%.9f`| `.026490000` | Similar to `.%f` but left-aligned but fixed to a length of 9. [8] |
|
||||
| `%3f` | `026` | Similar to `%.3f` but without the leading dot. [8] |
|
||||
| `%6f` | `026490` | Similar to `%.6f` but without the leading dot. [8] |
|
||||
| `%9f` | `026490000` | Similar to `%.9f` but without the leading dot. [8] |
|
||||
| `%S` | `60` | Second number (00--60), zero-padded to 2 digits. [^5] |
|
||||
| `%f` | `026490000` | The fractional seconds (in nanoseconds) since last whole second. [^8] |
|
||||
| `%.f` | `.026490`| Similar to `.%f` but left-aligned. These all consume the leading dot. [^8] |
|
||||
| `%.3f`| `.026` | Similar to `.%f` but left-aligned but fixed to a length of 3. [^8] |
|
||||
| `%.6f`| `.026490` | Similar to `.%f` but left-aligned but fixed to a length of 6. [^8] |
|
||||
| `%.9f`| `.026490000` | Similar to `.%f` but left-aligned but fixed to a length of 9. [^8] |
|
||||
| `%3f` | `026` | Similar to `%.3f` but without the leading dot. [^8] |
|
||||
| `%6f` | `026490` | Similar to `%.6f` but without the leading dot. [^8] |
|
||||
| `%9f` | `026490000` | Similar to `%.9f` but without the leading dot. [^8] |
|
||||
| | | |
|
||||
| `%R` | `00:34` | Hour-minute format. Same to `%H:%M`. |
|
||||
| `%T` | `00:34:60` | Hour-minute-second format. Same to `%H:%M:%S`. |
|
||||
|
@ -75,9 +75,9 @@ The following specifiers are available both to formatting and parsing.
|
|||
| | | |
|
||||
| | | **DATE & TIME SPECIFIERS:** |
|
||||
|`%c`|`Sun Jul 8 00:34:60 2001`|`ctime` date & time format. Same to `%a %b %e %T %Y` sans `\n`.|
|
||||
| `%+` | `2001-07-08T00:34:60.026490+09:30` | ISO 8601 / RFC 3339 date & time format. [6] |
|
||||
| `%+` | `2001-07-08T00:34:60.026490+09:30` | ISO 8601 / RFC 3339 date & time format. [^6] |
|
||||
| | | |
|
||||
| `%s` | `994518299` | UNIX timestamp, the number of seconds since 1970-01-01 00:00 UTC. [7] |
|
||||
| `%s` | `994518299` | UNIX timestamp, the number of seconds since 1970-01-01 00:00 UTC. [^7]|
|
||||
| | | |
|
||||
| | | **SPECIAL SPECIFIERS:** |
|
||||
| `%t` | | Literal tab (`\t`). |
|
||||
|
@ -95,59 +95,62 @@ Modifier | Description
|
|||
|
||||
Notes:
|
||||
|
||||
1. `%Y`:
|
||||
[^1]: `%Y`:
|
||||
Negative years are allowed in formatting but not in parsing.
|
||||
|
||||
2. `%C`, `%y`:
|
||||
[^2]: `%C`, `%y`:
|
||||
This is floor division, so 100 BCE (year number -99) will print `-1` and `99` respectively.
|
||||
|
||||
3. `%U`:
|
||||
[^3]: `%U`:
|
||||
Week 1 starts with the first Sunday in that year.
|
||||
It is possible to have week 0 for days before the first Sunday.
|
||||
|
||||
4. `%G`, `%g`, `%V`:
|
||||
[^4]: `%G`, `%g`, `%V`:
|
||||
Week 1 is the first week with at least 4 days in that year.
|
||||
Week 0 does not exist, so this should be used with `%G` or `%g`.
|
||||
|
||||
5. `%S`:
|
||||
[^5]: `%S`:
|
||||
It accounts for leap seconds, so `60` is possible.
|
||||
|
||||
6. `%+`:
|
||||
Same to `%Y-%m-%dT%H:%M:%S%.f%:z`,
|
||||
i.e. 0, 3, 6 or 9 fractional digits for seconds and colons in the time zone offset.
|
||||
[^6]: `%+`: Same as `%Y-%m-%dT%H:%M:%S%.f%:z`, i.e. 0, 3, 6 or 9 fractional
|
||||
digits for seconds and colons in the time zone offset.
|
||||
<br>
|
||||
<br>
|
||||
The typical `strftime` implementations have different (and locale-dependent)
|
||||
formats for this specifier. While Chrono's format for `%+` is far more
|
||||
stable, it is best to avoid this specifier if you want to control the exact
|
||||
output.
|
||||
|
||||
The typical `strftime` implementations have
|
||||
different (and locale-dependent) formats for this specifier.
|
||||
While Chrono's format for `%+` is far more stable,
|
||||
it is best to avoid this specifier if you want to control the exact output.
|
||||
|
||||
7. `%s`:
|
||||
[^7]: `%s`:
|
||||
This is not padded and can be negative.
|
||||
For the purpose of Chrono, it only accounts for non-leap seconds
|
||||
so it slightly differs from ISO C `strftime` behavior.
|
||||
|
||||
8. `%f`, `%.f`, `%.3f`, `%.6f`, `%.9f`, `%3f`, `%6f`, `%9f`:
|
||||
|
||||
[^8]: `%f`, `%.f`, `%.3f`, `%.6f`, `%.9f`, `%3f`, `%6f`, `%9f`:
|
||||
<br>
|
||||
The default `%f` is right-aligned and always zero-padded to 9 digits
|
||||
for the compatibility with glibc and others,
|
||||
so it always counts the number of nanoseconds since the last whole second.
|
||||
E.g. 7ms after the last second will print `007000000`,
|
||||
and parsing `7000000` will yield the same.
|
||||
|
||||
<br>
|
||||
<br>
|
||||
The variant `%.f` is left-aligned and print 0, 3, 6 or 9 fractional digits
|
||||
according to the precision.
|
||||
E.g. 70ms after the last second under `%.f` will print `.070` (note: not `.07`),
|
||||
and parsing `.07`, `.070000` etc. will yield the same.
|
||||
Note that they can print or read nothing if the fractional part is zero or
|
||||
the next character is not `.`.
|
||||
|
||||
<br>
|
||||
<br>
|
||||
The variant `%.3f`, `%.6f` and `%.9f` are left-aligned and print 3, 6 or 9 fractional digits
|
||||
according to the number preceding `f`.
|
||||
E.g. 70ms after the last second under `%.3f` will print `.070` (note: not `.07`),
|
||||
and parsing `.07`, `.070000` etc. will yield the same.
|
||||
Note that they can read nothing if the fractional part is zero or
|
||||
the next character is not `.` however will print with the specified length.
|
||||
|
||||
<br>
|
||||
<br>
|
||||
The variant `%3f`, `%6f` and `%9f` are left-aligned and print 3, 6 or 9 fractional digits
|
||||
according to the number preceding `f`, but without the leading dot.
|
||||
E.g. 70ms after the last second under `%3f` will print `070` (note: not `07`),
|
||||
|
|
|
@ -66,7 +66,7 @@
|
|||
//! months.
|
||||
//!
|
||||
//! Chrono does not yet natively support
|
||||
//! the standard [`Duration`](https://docs.rs/time/0.1.40/time/struct.Duration.html) type,
|
||||
//! the standard [`Duration`](https://doc.rust-lang.org/std/time/struct.Duration.html) type,
|
||||
//! but it will be supported in the future.
|
||||
//! Meanwhile you can convert between two types with
|
||||
//! [`Duration::from_std`](https://docs.rs/time/0.1.40/time/struct.Duration.html#method.from_std)
|
||||
|
@ -158,22 +158,22 @@
|
|||
//! The following illustrates most supported operations to the date and time:
|
||||
//!
|
||||
//! ```rust
|
||||
//! # extern crate chrono; extern crate time; fn main() {
|
||||
//! # extern crate chrono;
|
||||
//! extern crate time;
|
||||
//!
|
||||
//! # fn main() {
|
||||
//! use chrono::prelude::*;
|
||||
//! use time::Duration;
|
||||
//!
|
||||
//! # /* we intentionally fake the datetime...
|
||||
//! // assume this returned `2014-11-28T21:45:59.324310806+09:00`:
|
||||
//! let dt = Local::now();
|
||||
//! # */ // up to here. we now define a fixed datetime for the illustrative purpose.
|
||||
//! # let dt = FixedOffset::east(9*3600).ymd(2014, 11, 28).and_hms_nano(21, 45, 59, 324310806);
|
||||
//! let dt = FixedOffset::east(9*3600).ymd(2014, 11, 28).and_hms_nano(21, 45, 59, 324310806);
|
||||
//!
|
||||
//! // property accessors
|
||||
//! assert_eq!((dt.year(), dt.month(), dt.day()), (2014, 11, 28));
|
||||
//! assert_eq!((dt.month0(), dt.day0()), (10, 27)); // for unfortunate souls
|
||||
//! assert_eq!((dt.hour(), dt.minute(), dt.second()), (21, 45, 59));
|
||||
//! assert_eq!(dt.weekday(), Weekday::Fri);
|
||||
//! assert_eq!(dt.weekday().number_from_monday(), 5); // Mon=1, ..., Sat=7
|
||||
//! assert_eq!(dt.weekday().number_from_monday(), 5); // Mon=1, ..., Sun=7
|
||||
//! assert_eq!(dt.ordinal(), 332); // the day of year
|
||||
//! assert_eq!(dt.num_days_from_ce(), 735565); // the number of days from and including Jan 1, 1
|
||||
//!
|
||||
|
@ -302,10 +302,8 @@
|
|||
//! to get the number of additional number of nanoseconds.
|
||||
//!
|
||||
//! ```rust
|
||||
//! # use chrono::DateTime;
|
||||
//! # use chrono::Utc;
|
||||
//! // We need the trait in scope to use Utc::timestamp().
|
||||
//! use chrono::TimeZone;
|
||||
//! use chrono::{DateTime, TimeZone, Utc};
|
||||
//!
|
||||
//! // Construct a datetime from epoch:
|
||||
//! let dt = Utc.timestamp(1_500_000_000, 0);
|
||||
|
@ -385,9 +383,12 @@
|
|||
|
||||
#![doc(html_root_url = "https://docs.rs/chrono/latest/")]
|
||||
|
||||
#![cfg_attr(bench, feature(test))] // lib stability features as per RFC #507
|
||||
#![cfg_attr(feature = "bench", feature(test))] // lib stability features as per RFC #507
|
||||
#![deny(missing_docs)]
|
||||
#![deny(missing_debug_implementations)]
|
||||
#![deny(dead_code)]
|
||||
|
||||
#![cfg_attr(not(any(feature = "std", test)), no_std)]
|
||||
|
||||
// The explicit 'static lifetimes are still needed for rustc 1.13-16
|
||||
// backward compatibility, and this appeases clippy. If minimum rustc
|
||||
|
@ -405,6 +406,13 @@
|
|||
trivially_copy_pass_by_ref,
|
||||
))]
|
||||
|
||||
#[cfg(feature = "alloc")]
|
||||
extern crate alloc;
|
||||
#[cfg(any(feature = "std", test))]
|
||||
extern crate std as core;
|
||||
#[cfg(all(feature = "std", not(feature="alloc")))]
|
||||
extern crate std as alloc;
|
||||
|
||||
#[cfg(feature="clock")]
|
||||
extern crate time as oldtime;
|
||||
extern crate num_integer;
|
||||
|
@ -413,6 +421,18 @@ extern crate num_traits;
|
|||
extern crate rustc_serialize;
|
||||
#[cfg(feature = "serde")]
|
||||
extern crate serde as serdelib;
|
||||
#[cfg(test)]
|
||||
#[macro_use]
|
||||
extern crate doc_comment;
|
||||
#[cfg(all(target_arch = "wasm32", feature="wasmbind"))]
|
||||
extern crate wasm_bindgen;
|
||||
#[cfg(all(target_arch = "wasm32", feature="wasmbind"))]
|
||||
extern crate js_sys;
|
||||
#[cfg(feature = "bench")]
|
||||
extern crate test;
|
||||
|
||||
#[cfg(test)]
|
||||
doctest!("../README.md");
|
||||
|
||||
// this reexport is to aid the transition and should not be in the prelude!
|
||||
pub use oldtime::Duration;
|
||||
|
@ -451,7 +471,7 @@ mod div;
|
|||
mod oldtime;
|
||||
pub mod offset;
|
||||
pub mod naive {
|
||||
//! Date and time types which do not concern about the timezones.
|
||||
//! Date and time types unconcerned with timezones.
|
||||
//!
|
||||
//! They are primarily building blocks for other types
|
||||
//! (e.g. [`TimeZone`](../offset/trait.TimeZone.html)),
|
||||
|
@ -503,6 +523,41 @@ pub mod serde {
|
|||
pub use super::datetime::serde::*;
|
||||
}
|
||||
|
||||
// Until rust 1.18 there is no "pub(crate)" so to share this we need it in the root
|
||||
|
||||
#[cfg(feature = "serde")]
|
||||
enum SerdeError<V: fmt::Display, D: fmt::Display> {
|
||||
NonExistent { timestamp: V },
|
||||
Ambiguous { timestamp: V, min: D, max: D },
|
||||
}
|
||||
|
||||
/// Construct a [`SerdeError::NonExistent`]
|
||||
#[cfg(feature = "serde")]
|
||||
fn ne_timestamp<T: fmt::Display>(ts: T) -> SerdeError<T, u8> {
|
||||
SerdeError::NonExistent::<T, u8> { timestamp: ts }
|
||||
}
|
||||
|
||||
#[cfg(feature = "serde")]
|
||||
impl<V: fmt::Display, D: fmt::Display> fmt::Debug for SerdeError<V, D> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
write!(f, "ChronoSerdeError({})", self)
|
||||
}
|
||||
}
|
||||
|
||||
// impl<V: fmt::Display, D: fmt::Debug> core::error::Error for SerdeError<V, D> {}
|
||||
#[cfg(feature = "serde")]
|
||||
impl<V: fmt::Display, D: fmt::Display> fmt::Display for SerdeError<V, D> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
match self {
|
||||
&SerdeError::NonExistent { ref timestamp } => write!(
|
||||
f, "value is not a legal timestamp: {}", timestamp),
|
||||
&SerdeError::Ambiguous { ref timestamp, ref min, ref max } => write!(
|
||||
f, "value is an ambiguous timestamp: {}, could be either of {}, {}",
|
||||
timestamp, min, max),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// The day of week.
|
||||
///
|
||||
/// The order of the days of week depends on the context.
|
||||
|
@ -637,6 +692,20 @@ impl Weekday {
|
|||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for Weekday {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
f.write_str(match *self {
|
||||
Weekday::Mon => "Mon",
|
||||
Weekday::Tue => "Tue",
|
||||
Weekday::Wed => "Wed",
|
||||
Weekday::Thu => "Thu",
|
||||
Weekday::Fri => "Fri",
|
||||
Weekday::Sat => "Sat",
|
||||
Weekday::Sun => "Sun",
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/// Any weekday can be represented as an integer from 0 to 6, which equals to
|
||||
/// [`Weekday::num_days_from_monday`](#method.num_days_from_monday) in this implementation.
|
||||
/// Do not heavily depend on this though; use explicit methods whenever possible.
|
||||
|
@ -670,7 +739,7 @@ impl num_traits::FromPrimitive for Weekday {
|
|||
}
|
||||
}
|
||||
|
||||
use std::fmt;
|
||||
use core::fmt;
|
||||
|
||||
/// An error resulting from reading `Weekday` value with `FromStr`.
|
||||
#[derive(Clone, PartialEq)]
|
||||
|
@ -689,14 +758,14 @@ impl fmt::Debug for ParseWeekdayError {
|
|||
#[cfg(feature = "serde")]
|
||||
mod weekday_serde {
|
||||
use super::Weekday;
|
||||
use std::fmt;
|
||||
use core::fmt;
|
||||
use serdelib::{ser, de};
|
||||
|
||||
impl ser::Serialize for Weekday {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where S: ser::Serializer
|
||||
{
|
||||
serializer.serialize_str(&format!("{:?}", self))
|
||||
serializer.collect_str(&self)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -880,16 +949,18 @@ pub trait Datelike: Sized {
|
|||
/// Returns `None` when the resulting value would be invalid.
|
||||
fn with_ordinal0(&self, ordinal0: u32) -> Option<Self>;
|
||||
|
||||
/// Returns the number of days since January 1, Year 1 (aka Day 1) in the
|
||||
/// proleptic Gregorian calendar.
|
||||
/// Counts the days in the proleptic Gregorian calendar, with January 1, Year 1 (CE) as day 1.
|
||||
///
|
||||
/// # Example:
|
||||
/// # Examples
|
||||
///
|
||||
/// ~~~
|
||||
/// ```
|
||||
/// use chrono::{NaiveDate, Datelike};
|
||||
/// assert_eq!(NaiveDate::from_ymd(1970, 1, 1).num_days_from_ce(), 719163);
|
||||
///
|
||||
/// assert_eq!(NaiveDate::from_ymd(1970, 1, 1).num_days_from_ce(), 719_163);
|
||||
/// assert_eq!(NaiveDate::from_ymd(2, 1, 1).num_days_from_ce(), 366);
|
||||
/// assert_eq!(NaiveDate::from_ymd(1, 1, 1).num_days_from_ce(), 1);
|
||||
/// assert_eq!(NaiveDate::from_ymd(0, 1, 1).num_days_from_ce(), -365);
|
||||
/// ~~~
|
||||
/// ```
|
||||
fn num_days_from_ce(&self) -> i32 {
|
||||
// we know this wouldn't overflow since year is limited to 1/2^13 of i32's full range.
|
||||
let mut year = self.year() - 1;
|
||||
|
|
|
@ -3,8 +3,10 @@
|
|||
|
||||
//! ISO 8601 calendar date without timezone.
|
||||
|
||||
use std::{str, fmt};
|
||||
use std::ops::{Add, Sub, AddAssign, SubAssign};
|
||||
#[cfg(any(feature = "alloc", feature = "std", test))]
|
||||
use core::borrow::Borrow;
|
||||
use core::{str, fmt};
|
||||
use core::ops::{Add, Sub, AddAssign, SubAssign};
|
||||
use num_traits::ToPrimitive;
|
||||
use oldtime::Duration as OldDuration;
|
||||
|
||||
|
@ -12,7 +14,9 @@ use {Weekday, Datelike};
|
|||
use div::div_mod_floor;
|
||||
use naive::{NaiveTime, NaiveDateTime, IsoWeek};
|
||||
use format::{Item, Numeric, Pad};
|
||||
use format::{parse, Parsed, ParseError, ParseResult, DelayedFormat, StrftimeItems};
|
||||
use format::{parse, Parsed, ParseError, ParseResult, StrftimeItems};
|
||||
#[cfg(any(feature = "alloc", feature = "std", test))]
|
||||
use format::DelayedFormat;
|
||||
|
||||
use super::isoweek;
|
||||
use super::internals::{self, DateImpl, Of, Mdf, YearFlags};
|
||||
|
@ -330,10 +334,10 @@ impl NaiveDate {
|
|||
}
|
||||
}
|
||||
|
||||
/// Makes a new `NaiveDate` from the number of days since January 1, 1 (Day 1)
|
||||
/// in the proleptic Gregorian calendar.
|
||||
/// Makes a new `NaiveDate` from a day's number in the proleptic Gregorian calendar, with
|
||||
/// January 1, 1 being day 1.
|
||||
///
|
||||
/// Panics on the out-of-range date.
|
||||
/// Panics if the date is out of range.
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
|
@ -378,10 +382,10 @@ impl NaiveDate {
|
|||
NaiveDate::from_num_days_from_ce_opt(days).expect("out-of-range date")
|
||||
}
|
||||
|
||||
/// Makes a new `NaiveDate` from the number of days since January 1, 1 (Day 1)
|
||||
/// in the proleptic Gregorian calendar.
|
||||
/// Makes a new `NaiveDate` from a day's number in the proleptic Gregorian calendar, with
|
||||
/// January 1, 1 being day 1.
|
||||
///
|
||||
/// Returns `None` on the out-of-range date.
|
||||
/// Returns `None` if the date is out of range.
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
|
@ -451,7 +455,7 @@ impl NaiveDate {
|
|||
/// ~~~~
|
||||
pub fn parse_from_str(s: &str, fmt: &str) -> ParseResult<NaiveDate> {
|
||||
let mut parsed = Parsed::new();
|
||||
try!(parse(&mut parsed, s, StrftimeItems::new(fmt)));
|
||||
parse(&mut parsed, s, StrftimeItems::new(fmt))?;
|
||||
parsed.to_naive_date()
|
||||
}
|
||||
|
||||
|
@ -916,9 +920,10 @@ impl NaiveDate {
|
|||
/// # let d = NaiveDate::from_ymd(2015, 9, 5);
|
||||
/// assert_eq!(format!("{}", d.format_with_items(fmt)), "2015-09-05");
|
||||
/// ~~~~
|
||||
#[cfg(any(feature = "alloc", feature = "std", test))]
|
||||
#[inline]
|
||||
pub fn format_with_items<'a, I>(&self, items: I) -> DelayedFormat<I>
|
||||
where I: Iterator<Item=Item<'a>> + Clone {
|
||||
pub fn format_with_items<'a, I, B>(&self, items: I) -> DelayedFormat<I>
|
||||
where I: Iterator<Item=B> + Clone, B: Borrow<Item<'a>> {
|
||||
DelayedFormat::new(Some(*self), None, items)
|
||||
}
|
||||
|
||||
|
@ -954,6 +959,7 @@ impl NaiveDate {
|
|||
/// assert_eq!(format!("{}", d.format("%Y-%m-%d")), "2015-09-05");
|
||||
/// assert_eq!(format!("{}", d.format("%A, %-d %B, %C%y")), "Saturday, 5 September, 2015");
|
||||
/// ~~~~
|
||||
#[cfg(any(feature = "alloc", feature = "std", test))]
|
||||
#[inline]
|
||||
pub fn format<'a>(&self, fmt: &'a str) -> DelayedFormat<StrftimeItems<'a>> {
|
||||
self.format_with_items(StrftimeItems::new(fmt))
|
||||
|
@ -1387,7 +1393,7 @@ impl SubAssign<OldDuration> for NaiveDate {
|
|||
|
||||
/// Subtracts another `NaiveDate` from the current date.
|
||||
/// Returns a `Duration` of integral numbers.
|
||||
///
|
||||
///
|
||||
/// This does not overflow or underflow at all,
|
||||
/// as all possible output fits in the range of `Duration`.
|
||||
///
|
||||
|
@ -1503,16 +1509,16 @@ impl str::FromStr for NaiveDate {
|
|||
|
||||
fn from_str(s: &str) -> ParseResult<NaiveDate> {
|
||||
const ITEMS: &'static [Item<'static>] = &[
|
||||
Item::Space(""), Item::Numeric(Numeric::Year, Pad::Zero),
|
||||
Item::Numeric(Numeric::Year, Pad::Zero),
|
||||
Item::Space(""), Item::Literal("-"),
|
||||
Item::Space(""), Item::Numeric(Numeric::Month, Pad::Zero),
|
||||
Item::Numeric(Numeric::Month, Pad::Zero),
|
||||
Item::Space(""), Item::Literal("-"),
|
||||
Item::Space(""), Item::Numeric(Numeric::Day, Pad::Zero),
|
||||
Item::Numeric(Numeric::Day, Pad::Zero),
|
||||
Item::Space(""),
|
||||
];
|
||||
|
||||
let mut parsed = Parsed::new();
|
||||
try!(parse(&mut parsed, s, ITEMS.iter().cloned()));
|
||||
parse(&mut parsed, s, ITEMS.iter())?;
|
||||
parsed.to_naive_date()
|
||||
}
|
||||
}
|
||||
|
@ -1600,7 +1606,7 @@ mod rustc_serialize {
|
|||
|
||||
#[cfg(feature = "serde")]
|
||||
mod serde {
|
||||
use std::fmt;
|
||||
use core::fmt;
|
||||
use super::NaiveDate;
|
||||
use serdelib::{ser, de};
|
||||
|
||||
|
@ -1629,15 +1635,23 @@ mod serde {
|
|||
impl<'de> de::Visitor<'de> for NaiveDateVisitor {
|
||||
type Value = NaiveDate;
|
||||
|
||||
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result
|
||||
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result
|
||||
{
|
||||
write!(formatter, "a formatted date string")
|
||||
}
|
||||
|
||||
#[cfg(any(feature = "std", test))]
|
||||
fn visit_str<E>(self, value: &str) -> Result<NaiveDate, E>
|
||||
where E: de::Error
|
||||
{
|
||||
value.parse().map_err(|err| E::custom(format!("{}", err)))
|
||||
value.parse().map_err(E::custom)
|
||||
}
|
||||
|
||||
#[cfg(not(any(feature = "std", test)))]
|
||||
fn visit_str<E>(self, value: &str) -> Result<NaiveDate, E>
|
||||
where E: de::Error
|
||||
{
|
||||
value.parse().map_err(E::custom)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -3,8 +3,10 @@
|
|||
|
||||
//! ISO 8601 date and time without timezone.
|
||||
|
||||
use std::{str, fmt, hash};
|
||||
use std::ops::{Add, Sub, AddAssign, SubAssign};
|
||||
#[cfg(any(feature = "alloc", feature = "std", test))]
|
||||
use core::borrow::Borrow;
|
||||
use core::{str, fmt, hash};
|
||||
use core::ops::{Add, Sub, AddAssign, SubAssign};
|
||||
use num_traits::ToPrimitive;
|
||||
use oldtime::Duration as OldDuration;
|
||||
|
||||
|
@ -12,7 +14,9 @@ use {Weekday, Timelike, Datelike};
|
|||
use div::div_mod_floor;
|
||||
use naive::{NaiveTime, NaiveDate, IsoWeek};
|
||||
use format::{Item, Numeric, Pad, Fixed};
|
||||
use format::{parse, Parsed, ParseError, ParseResult, DelayedFormat, StrftimeItems};
|
||||
use format::{parse, Parsed, ParseError, ParseResult, StrftimeItems};
|
||||
#[cfg(any(feature = "alloc", feature = "std", test))]
|
||||
use format::DelayedFormat;
|
||||
|
||||
/// The tight upper bound guarantees that a duration with `|Duration| >= 2^MAX_SECS_BITS`
|
||||
/// will always overflow the addition with any date and time type.
|
||||
|
@ -206,7 +210,7 @@ impl NaiveDateTime {
|
|||
/// ~~~~
|
||||
pub fn parse_from_str(s: &str, fmt: &str) -> ParseResult<NaiveDateTime> {
|
||||
let mut parsed = Parsed::new();
|
||||
try!(parse(&mut parsed, s, StrftimeItems::new(fmt)));
|
||||
parse(&mut parsed, s, StrftimeItems::new(fmt))?;
|
||||
parsed.to_naive_datetime_with_offset(0) // no offset adjustment
|
||||
}
|
||||
|
||||
|
@ -305,21 +309,33 @@ impl NaiveDateTime {
|
|||
/// Note that this does *not* account for the timezone!
|
||||
/// The true "UNIX timestamp" would count seconds since the midnight *UTC* on the epoch.
|
||||
///
|
||||
/// # Panics
|
||||
///
|
||||
/// Note also that this does reduce the number of years that can be
|
||||
/// represented from ~584 Billion to ~584. (If this is a problem,
|
||||
/// please file an issue to let me know what domain needs nanosecond
|
||||
/// precision over millenia, I'm curious.)
|
||||
/// represented from ~584 Billion to ~584 years. The dates that can be
|
||||
/// represented as nanoseconds are between 1677-09-21T00:12:44.0 and
|
||||
/// 2262-04-11T23:47:16.854775804.
|
||||
///
|
||||
/// (If this is a problem, please file an issue to let me know what domain
|
||||
/// needs nanosecond precision over millenia, I'm curious.)
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// ~~~~
|
||||
/// use chrono::NaiveDate;
|
||||
/// use chrono::{NaiveDate, NaiveDateTime};
|
||||
///
|
||||
/// let dt = NaiveDate::from_ymd(1970, 1, 1).and_hms_nano(0, 0, 1, 444);
|
||||
/// assert_eq!(dt.timestamp_nanos(), 1_000_000_444);
|
||||
///
|
||||
/// let dt = NaiveDate::from_ymd(2001, 9, 9).and_hms_nano(1, 46, 40, 555);
|
||||
/// assert_eq!(dt.timestamp_nanos(), 1_000_000_000_000_000_555);
|
||||
///
|
||||
/// const A_BILLION: i64 = 1_000_000_000;
|
||||
/// let nanos = dt.timestamp_nanos();
|
||||
/// assert_eq!(nanos, 1_000_000_000_000_000_555);
|
||||
/// assert_eq!(
|
||||
/// dt,
|
||||
/// NaiveDateTime::from_timestamp(nanos / A_BILLION, (nanos % A_BILLION) as u32)
|
||||
/// );
|
||||
/// ~~~~
|
||||
#[inline]
|
||||
pub fn timestamp_nanos(&self) -> i64 {
|
||||
|
@ -633,9 +649,10 @@ impl NaiveDateTime {
|
|||
/// # let dt = NaiveDate::from_ymd(2015, 9, 5).and_hms(23, 56, 4);
|
||||
/// assert_eq!(format!("{}", dt.format_with_items(fmt)), "2015-09-05 23:56:04");
|
||||
/// ~~~~
|
||||
#[cfg(any(feature = "alloc", feature = "std", test))]
|
||||
#[inline]
|
||||
pub fn format_with_items<'a, I>(&self, items: I) -> DelayedFormat<I>
|
||||
where I: Iterator<Item=Item<'a>> + Clone {
|
||||
pub fn format_with_items<'a, I, B>(&self, items: I) -> DelayedFormat<I>
|
||||
where I: Iterator<Item=B> + Clone, B: Borrow<Item<'a>> {
|
||||
DelayedFormat::new(Some(self.date), Some(self.time), items)
|
||||
}
|
||||
|
||||
|
@ -671,6 +688,7 @@ impl NaiveDateTime {
|
|||
/// assert_eq!(format!("{}", dt.format("%Y-%m-%d %H:%M:%S")), "2015-09-05 23:56:04");
|
||||
/// assert_eq!(format!("{}", dt.format("around %l %p on %b %-d")), "around 11 PM on Sep 5");
|
||||
/// ~~~~
|
||||
#[cfg(any(feature = "alloc", feature = "std", test))]
|
||||
#[inline]
|
||||
pub fn format<'a>(&self, fmt: &'a str) -> DelayedFormat<StrftimeItems<'a>> {
|
||||
self.format_with_items(StrftimeItems::new(fmt))
|
||||
|
@ -1456,22 +1474,22 @@ impl str::FromStr for NaiveDateTime {
|
|||
|
||||
fn from_str(s: &str) -> ParseResult<NaiveDateTime> {
|
||||
const ITEMS: &'static [Item<'static>] = &[
|
||||
Item::Space(""), Item::Numeric(Numeric::Year, Pad::Zero),
|
||||
Item::Numeric(Numeric::Year, Pad::Zero),
|
||||
Item::Space(""), Item::Literal("-"),
|
||||
Item::Space(""), Item::Numeric(Numeric::Month, Pad::Zero),
|
||||
Item::Numeric(Numeric::Month, Pad::Zero),
|
||||
Item::Space(""), Item::Literal("-"),
|
||||
Item::Space(""), Item::Numeric(Numeric::Day, Pad::Zero),
|
||||
Item::Numeric(Numeric::Day, Pad::Zero),
|
||||
Item::Space(""), Item::Literal("T"), // XXX shouldn't this be case-insensitive?
|
||||
Item::Space(""), Item::Numeric(Numeric::Hour, Pad::Zero),
|
||||
Item::Numeric(Numeric::Hour, Pad::Zero),
|
||||
Item::Space(""), Item::Literal(":"),
|
||||
Item::Space(""), Item::Numeric(Numeric::Minute, Pad::Zero),
|
||||
Item::Numeric(Numeric::Minute, Pad::Zero),
|
||||
Item::Space(""), Item::Literal(":"),
|
||||
Item::Space(""), Item::Numeric(Numeric::Second, Pad::Zero),
|
||||
Item::Numeric(Numeric::Second, Pad::Zero),
|
||||
Item::Fixed(Fixed::Nanosecond), Item::Space(""),
|
||||
];
|
||||
|
||||
let mut parsed = Parsed::new();
|
||||
try!(parse(&mut parsed, s, ITEMS.iter().cloned()));
|
||||
parse(&mut parsed, s, ITEMS.iter())?;
|
||||
parsed.to_naive_datetime_with_offset(0)
|
||||
}
|
||||
}
|
||||
|
@ -1651,7 +1669,7 @@ pub mod rustc_serialize {
|
|||
/// Tools to help serializing/deserializing `NaiveDateTime`s
|
||||
#[cfg(feature = "serde")]
|
||||
pub mod serde {
|
||||
use std::fmt;
|
||||
use core::fmt;
|
||||
use super::{NaiveDateTime};
|
||||
use serdelib::{ser, de};
|
||||
|
||||
|
@ -1690,7 +1708,7 @@ pub mod serde {
|
|||
fn visit_str<E>(self, value: &str) -> Result<NaiveDateTime, E>
|
||||
where E: de::Error
|
||||
{
|
||||
value.parse().map_err(|err| E::custom(format!("{}", err)))
|
||||
value.parse().map_err(E::custom)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1738,10 +1756,10 @@ pub mod serde {
|
|||
/// # fn main() { example().unwrap(); }
|
||||
/// ```
|
||||
pub mod ts_nanoseconds {
|
||||
use std::fmt;
|
||||
use core::fmt;
|
||||
use serdelib::{ser, de};
|
||||
|
||||
use NaiveDateTime;
|
||||
use {NaiveDateTime, ne_timestamp};
|
||||
|
||||
/// Serialize a UTC datetime into an integer number of nanoseconds since the epoch
|
||||
///
|
||||
|
@ -1816,7 +1834,7 @@ pub mod serde {
|
|||
pub fn deserialize<'de, D>(d: D) -> Result<NaiveDateTime, D::Error>
|
||||
where D: de::Deserializer<'de>
|
||||
{
|
||||
Ok(try!(d.deserialize_i64(NaiveDateTimeFromNanoSecondsVisitor)))
|
||||
Ok(d.deserialize_i64(NaiveDateTimeFromNanoSecondsVisitor)?)
|
||||
}
|
||||
|
||||
struct NaiveDateTimeFromNanoSecondsVisitor;
|
||||
|
@ -1834,7 +1852,7 @@ pub mod serde {
|
|||
{
|
||||
NaiveDateTime::from_timestamp_opt(value / 1_000_000_000,
|
||||
(value % 1_000_000_000) as u32)
|
||||
.ok_or_else(|| E::custom(format!("value is not a legal timestamp: {}", value)))
|
||||
.ok_or_else(|| E::custom(ne_timestamp(value)))
|
||||
}
|
||||
|
||||
fn visit_u64<E>(self, value: u64) -> Result<NaiveDateTime, E>
|
||||
|
@ -1842,7 +1860,7 @@ pub mod serde {
|
|||
{
|
||||
NaiveDateTime::from_timestamp_opt(value as i64 / 1_000_000_000,
|
||||
(value as i64 % 1_000_000_000) as u32)
|
||||
.ok_or_else(|| E::custom(format!("value is not a legal timestamp: {}", value)))
|
||||
.ok_or_else(|| E::custom(ne_timestamp(value)))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1883,10 +1901,10 @@ pub mod serde {
|
|||
/// # fn main() { example().unwrap(); }
|
||||
/// ```
|
||||
pub mod ts_milliseconds {
|
||||
use std::fmt;
|
||||
use core::fmt;
|
||||
use serdelib::{ser, de};
|
||||
|
||||
use NaiveDateTime;
|
||||
use {NaiveDateTime, ne_timestamp};
|
||||
|
||||
/// Serialize a UTC datetime into an integer number of milliseconds since the epoch
|
||||
///
|
||||
|
@ -1961,7 +1979,7 @@ pub mod serde {
|
|||
pub fn deserialize<'de, D>(d: D) -> Result<NaiveDateTime, D::Error>
|
||||
where D: de::Deserializer<'de>
|
||||
{
|
||||
Ok(try!(d.deserialize_i64(NaiveDateTimeFromMilliSecondsVisitor)))
|
||||
Ok(d.deserialize_i64(NaiveDateTimeFromMilliSecondsVisitor)?)
|
||||
}
|
||||
|
||||
struct NaiveDateTimeFromMilliSecondsVisitor;
|
||||
|
@ -1979,7 +1997,7 @@ pub mod serde {
|
|||
{
|
||||
NaiveDateTime::from_timestamp_opt(value / 1000,
|
||||
((value % 1000) * 1_000_000) as u32)
|
||||
.ok_or_else(|| E::custom(format!("value is not a legal timestamp: {}", value)))
|
||||
.ok_or_else(|| E::custom(ne_timestamp(value)))
|
||||
}
|
||||
|
||||
fn visit_u64<E>(self, value: u64) -> Result<NaiveDateTime, E>
|
||||
|
@ -1987,7 +2005,7 @@ pub mod serde {
|
|||
{
|
||||
NaiveDateTime::from_timestamp_opt((value / 1000) as i64,
|
||||
((value % 1000) * 1_000_000) as u32)
|
||||
.ok_or_else(|| E::custom(format!("value is not a legal timestamp: {}", value)))
|
||||
.ok_or_else(|| E::custom(ne_timestamp(value)))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -2028,10 +2046,10 @@ pub mod serde {
|
|||
/// # fn main() { example().unwrap(); }
|
||||
/// ```
|
||||
pub mod ts_seconds {
|
||||
use std::fmt;
|
||||
use core::fmt;
|
||||
use serdelib::{ser, de};
|
||||
|
||||
use NaiveDateTime;
|
||||
use {NaiveDateTime, ne_timestamp};
|
||||
|
||||
/// Serialize a UTC datetime into an integer number of seconds since the epoch
|
||||
///
|
||||
|
@ -2106,7 +2124,7 @@ pub mod serde {
|
|||
pub fn deserialize<'de, D>(d: D) -> Result<NaiveDateTime, D::Error>
|
||||
where D: de::Deserializer<'de>
|
||||
{
|
||||
Ok(try!(d.deserialize_i64(NaiveDateTimeFromSecondsVisitor)))
|
||||
Ok(d.deserialize_i64(NaiveDateTimeFromSecondsVisitor)?)
|
||||
}
|
||||
|
||||
struct NaiveDateTimeFromSecondsVisitor;
|
||||
|
@ -2123,14 +2141,14 @@ pub mod serde {
|
|||
where E: de::Error
|
||||
{
|
||||
NaiveDateTime::from_timestamp_opt(value, 0)
|
||||
.ok_or_else(|| E::custom(format!("value is not a legal timestamp: {}", value)))
|
||||
.ok_or_else(|| E::custom(ne_timestamp(value)))
|
||||
}
|
||||
|
||||
fn visit_u64<E>(self, value: u64) -> Result<NaiveDateTime, E>
|
||||
where E: de::Error
|
||||
{
|
||||
NaiveDateTime::from_timestamp_opt(value as i64, 0)
|
||||
.ok_or_else(|| E::custom(format!("value is not a legal timestamp: {}", value)))
|
||||
.ok_or_else(|| E::custom(ne_timestamp(value)))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -2348,4 +2366,24 @@ mod tests {
|
|||
let time = base + Duration::microseconds(t);
|
||||
assert_eq!(t, time.signed_duration_since(base).num_microseconds().unwrap());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_nanosecond_range() {
|
||||
const A_BILLION: i64 = 1_000_000_000;
|
||||
let maximum = "2262-04-11T23:47:16.854775804";
|
||||
let parsed: NaiveDateTime = maximum.parse().unwrap();
|
||||
let nanos = parsed.timestamp_nanos();
|
||||
assert_eq!(
|
||||
parsed,
|
||||
NaiveDateTime::from_timestamp(nanos / A_BILLION, (nanos % A_BILLION) as u32)
|
||||
);
|
||||
|
||||
let minimum = "1677-09-21T00:12:44.000000000";
|
||||
let parsed: NaiveDateTime = minimum.parse().unwrap();
|
||||
let nanos = parsed.timestamp_nanos();
|
||||
assert_eq!(
|
||||
parsed,
|
||||
NaiveDateTime::from_timestamp(nanos / A_BILLION, (nanos % A_BILLION) as u32)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -15,7 +15,7 @@
|
|||
|
||||
#![allow(dead_code)] // some internal methods have been left for consistency
|
||||
|
||||
use std::{i32, fmt};
|
||||
use core::{i32, fmt};
|
||||
use num_traits::FromPrimitive;
|
||||
use Weekday;
|
||||
use div::{div_rem, mod_floor};
|
||||
|
@ -470,7 +470,6 @@ impl fmt::Debug for Mdf {
|
|||
#[cfg(test)]
|
||||
mod tests {
|
||||
#[cfg(test)] extern crate num_iter;
|
||||
#[cfg(bench)] extern crate test;
|
||||
|
||||
use Weekday;
|
||||
use super::{Of, Mdf};
|
||||
|
@ -517,7 +516,7 @@ mod tests {
|
|||
assert_eq!(GF.nisoweeks(), 52);
|
||||
}
|
||||
|
||||
#[cfg(bench)]
|
||||
#[cfg(feature = "bench")]
|
||||
#[bench]
|
||||
fn bench_year_flags_from_year(bh: &mut test::Bencher) {
|
||||
bh.iter(|| {
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
|
||||
//! ISO 8601 week.
|
||||
|
||||
use std::fmt;
|
||||
use core::fmt;
|
||||
|
||||
use super::internals::{DateImpl, Of, YearFlags};
|
||||
|
||||
|
|
|
@ -3,14 +3,18 @@
|
|||
|
||||
//! ISO 8601 time without timezone.
|
||||
|
||||
use std::{str, fmt, hash};
|
||||
use std::ops::{Add, Sub, AddAssign, SubAssign};
|
||||
#[cfg(any(feature = "alloc", feature = "std", test))]
|
||||
use core::borrow::Borrow;
|
||||
use core::{str, fmt, hash};
|
||||
use core::ops::{Add, Sub, AddAssign, SubAssign};
|
||||
use oldtime::Duration as OldDuration;
|
||||
|
||||
use Timelike;
|
||||
use div::div_mod_floor;
|
||||
use format::{Item, Numeric, Pad, Fixed};
|
||||
use format::{parse, Parsed, ParseError, ParseResult, DelayedFormat, StrftimeItems};
|
||||
use format::{parse, Parsed, ParseError, ParseResult, StrftimeItems};
|
||||
#[cfg(any(feature = "alloc", feature = "std", test))]
|
||||
use format::DelayedFormat;
|
||||
|
||||
/// ISO 8601 time without timezone.
|
||||
/// Allows for the nanosecond precision and optional leap second representation.
|
||||
|
@ -492,7 +496,7 @@ impl NaiveTime {
|
|||
/// ~~~~
|
||||
pub fn parse_from_str(s: &str, fmt: &str) -> ParseResult<NaiveTime> {
|
||||
let mut parsed = Parsed::new();
|
||||
try!(parse(&mut parsed, s, StrftimeItems::new(fmt)));
|
||||
parse(&mut parsed, s, StrftimeItems::new(fmt))?;
|
||||
parsed.to_naive_time()
|
||||
}
|
||||
|
||||
|
@ -681,7 +685,7 @@ impl NaiveTime {
|
|||
// `rhs.frac`|========================================>|
|
||||
// | | | `self - rhs` | |
|
||||
|
||||
use std::cmp::Ordering;
|
||||
use core::cmp::Ordering;
|
||||
|
||||
let secs = i64::from(self.secs) - i64::from(rhs.secs);
|
||||
let frac = i64::from(self.frac) - i64::from(rhs.frac);
|
||||
|
@ -723,9 +727,10 @@ impl NaiveTime {
|
|||
/// # let t = NaiveTime::from_hms(23, 56, 4);
|
||||
/// assert_eq!(format!("{}", t.format_with_items(fmt)), "23:56:04");
|
||||
/// ~~~~
|
||||
#[cfg(any(feature = "alloc", feature = "std", test))]
|
||||
#[inline]
|
||||
pub fn format_with_items<'a, I>(&self, items: I) -> DelayedFormat<I>
|
||||
where I: Iterator<Item=Item<'a>> + Clone {
|
||||
pub fn format_with_items<'a, I, B>(&self, items: I) -> DelayedFormat<I>
|
||||
where I: Iterator<Item=B> + Clone, B: Borrow<Item<'a>> {
|
||||
DelayedFormat::new(None, Some(*self), items)
|
||||
}
|
||||
|
||||
|
@ -763,6 +768,7 @@ impl NaiveTime {
|
|||
/// assert_eq!(format!("{}", t.format("%H:%M:%S%.6f")), "23:56:04.012345");
|
||||
/// assert_eq!(format!("{}", t.format("%-I:%M %p")), "11:56 PM");
|
||||
/// ~~~~
|
||||
#[cfg(any(feature = "alloc", feature = "std", test))]
|
||||
#[inline]
|
||||
pub fn format<'a>(&self, fmt: &'a str) -> DelayedFormat<StrftimeItems<'a>> {
|
||||
self.format_with_items(StrftimeItems::new(fmt))
|
||||
|
@ -1230,7 +1236,7 @@ impl fmt::Debug for NaiveTime {
|
|||
(sec, self.frac)
|
||||
};
|
||||
|
||||
try!(write!(f, "{:02}:{:02}:{:02}", hour, min, sec));
|
||||
write!(f, "{:02}:{:02}:{:02}", hour, min, sec)?;
|
||||
if nano == 0 {
|
||||
Ok(())
|
||||
} else if nano % 1_000_000 == 0 {
|
||||
|
@ -1299,16 +1305,16 @@ impl str::FromStr for NaiveTime {
|
|||
|
||||
fn from_str(s: &str) -> ParseResult<NaiveTime> {
|
||||
const ITEMS: &'static [Item<'static>] = &[
|
||||
Item::Space(""), Item::Numeric(Numeric::Hour, Pad::Zero),
|
||||
Item::Numeric(Numeric::Hour, Pad::Zero),
|
||||
Item::Space(""), Item::Literal(":"),
|
||||
Item::Space(""), Item::Numeric(Numeric::Minute, Pad::Zero),
|
||||
Item::Numeric(Numeric::Minute, Pad::Zero),
|
||||
Item::Space(""), Item::Literal(":"),
|
||||
Item::Space(""), Item::Numeric(Numeric::Second, Pad::Zero),
|
||||
Item::Numeric(Numeric::Second, Pad::Zero),
|
||||
Item::Fixed(Fixed::Nanosecond), Item::Space(""),
|
||||
];
|
||||
|
||||
let mut parsed = Parsed::new();
|
||||
try!(parse(&mut parsed, s, ITEMS.iter().cloned()));
|
||||
parse(&mut parsed, s, ITEMS.iter())?;
|
||||
parsed.to_naive_time()
|
||||
}
|
||||
}
|
||||
|
@ -1411,7 +1417,7 @@ mod rustc_serialize {
|
|||
|
||||
#[cfg(feature = "serde")]
|
||||
mod serde {
|
||||
use std::fmt;
|
||||
use core::fmt;
|
||||
use super::NaiveTime;
|
||||
use serdelib::{ser, de};
|
||||
|
||||
|
@ -1431,7 +1437,7 @@ mod serde {
|
|||
impl<'de> de::Visitor<'de> for NaiveTimeVisitor {
|
||||
type Value = NaiveTime;
|
||||
|
||||
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result
|
||||
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result
|
||||
{
|
||||
write!(formatter, "a formatted time string")
|
||||
}
|
||||
|
@ -1439,7 +1445,7 @@ mod serde {
|
|||
fn visit_str<E>(self, value: &str) -> Result<NaiveTime, E>
|
||||
where E: de::Error
|
||||
{
|
||||
value.parse().map_err(|err| E::custom(format!("{}", err)))
|
||||
value.parse().map_err(E::custom)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -3,8 +3,8 @@
|
|||
|
||||
//! The time zone which has a fixed offset from UTC.
|
||||
|
||||
use std::ops::{Add, Sub};
|
||||
use std::fmt;
|
||||
use core::ops::{Add, Sub};
|
||||
use core::fmt;
|
||||
use oldtime::Duration as OldDuration;
|
||||
|
||||
use Timelike;
|
||||
|
@ -86,11 +86,13 @@ impl FixedOffset {
|
|||
}
|
||||
|
||||
/// Returns the number of seconds to add to convert from UTC to the local time.
|
||||
#[inline]
|
||||
pub fn local_minus_utc(&self) -> i32 {
|
||||
self.local_minus_utc
|
||||
}
|
||||
|
||||
/// Returns the number of seconds to add to convert from the local time to UTC.
|
||||
#[inline]
|
||||
pub fn utc_minus_local(&self) -> i32 {
|
||||
-self.local_minus_utc
|
||||
}
|
||||
|
|
|
@ -87,9 +87,21 @@ impl Local {
|
|||
}
|
||||
|
||||
/// Returns a `DateTime` which corresponds to the current date.
|
||||
#[cfg(not(all(target_arch = "wasm32", feature = "wasmbind")))]
|
||||
pub fn now() -> DateTime<Local> {
|
||||
tm_to_datetime(oldtime::now())
|
||||
}
|
||||
|
||||
/// Returns a `DateTime` which corresponds to the current date.
|
||||
#[cfg(all(target_arch = "wasm32", feature = "wasmbind"))]
|
||||
pub fn now() -> DateTime<Local> {
|
||||
use super::Utc;
|
||||
let now: DateTime<Utc> = super::Utc::now();
|
||||
|
||||
// Workaround missing timezone logic in `time` crate
|
||||
let offset = FixedOffset::west((js_sys::Date::new_0().get_timezone_offset() as i32) * 60);
|
||||
DateTime::from_utc(now.naive_utc(), offset)
|
||||
}
|
||||
}
|
||||
|
||||
impl TimeZone for Local {
|
||||
|
@ -179,4 +191,3 @@ mod tests {
|
|||
"unexpected timestr {:?}", timestr);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -18,15 +18,15 @@
|
|||
//! and provides implementations for 1 and 3.
|
||||
//! An `TimeZone` instance can be reconstructed from the corresponding `Offset` instance.
|
||||
|
||||
use std::fmt;
|
||||
use core::fmt;
|
||||
|
||||
use format::{parse, ParseResult, Parsed, StrftimeItems};
|
||||
use naive::{NaiveDate, NaiveDateTime, NaiveTime};
|
||||
use Weekday;
|
||||
use naive::{NaiveDate, NaiveTime, NaiveDateTime};
|
||||
use {Date, DateTime};
|
||||
use format::{parse, Parsed, ParseResult, StrftimeItems};
|
||||
|
||||
/// The conversion result from the local time to the timezone-aware datetime types.
|
||||
#[derive(Clone, PartialEq, Debug)]
|
||||
#[derive(Clone, PartialEq, Debug, Copy, Eq, Hash)]
|
||||
pub enum LocalResult<T> {
|
||||
/// Given local time representation is invalid.
|
||||
/// This can occur when, for example, the positive timezone transition.
|
||||
|
@ -41,17 +41,26 @@ pub enum LocalResult<T> {
|
|||
impl<T> LocalResult<T> {
|
||||
/// Returns `Some` only when the conversion result is unique, or `None` otherwise.
|
||||
pub fn single(self) -> Option<T> {
|
||||
match self { LocalResult::Single(t) => Some(t), _ => None }
|
||||
match self {
|
||||
LocalResult::Single(t) => Some(t),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns `Some` for the earliest possible conversion result, or `None` if none.
|
||||
pub fn earliest(self) -> Option<T> {
|
||||
match self { LocalResult::Single(t) | LocalResult::Ambiguous(t,_) => Some(t), _ => None }
|
||||
match self {
|
||||
LocalResult::Single(t) | LocalResult::Ambiguous(t, _) => Some(t),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns `Some` for the latest possible conversion result, or `None` if none.
|
||||
pub fn latest(self) -> Option<T> {
|
||||
match self { LocalResult::Single(t) | LocalResult::Ambiguous(_,t) => Some(t), _ => None }
|
||||
match self {
|
||||
LocalResult::Single(t) | LocalResult::Ambiguous(_, t) => Some(t),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Maps a `LocalResult<T>` into `LocalResult<U>` with given function.
|
||||
|
@ -72,8 +81,9 @@ impl<Tz: TimeZone> LocalResult<Date<Tz>> {
|
|||
#[inline]
|
||||
pub fn and_time(self, time: NaiveTime) -> LocalResult<DateTime<Tz>> {
|
||||
match self {
|
||||
LocalResult::Single(d) => d.and_time(time)
|
||||
.map_or(LocalResult::None, LocalResult::Single),
|
||||
LocalResult::Single(d) => d
|
||||
.and_time(time)
|
||||
.map_or(LocalResult::None, LocalResult::Single),
|
||||
_ => LocalResult::None,
|
||||
}
|
||||
}
|
||||
|
@ -85,8 +95,9 @@ impl<Tz: TimeZone> LocalResult<Date<Tz>> {
|
|||
#[inline]
|
||||
pub fn and_hms_opt(self, hour: u32, min: u32, sec: u32) -> LocalResult<DateTime<Tz>> {
|
||||
match self {
|
||||
LocalResult::Single(d) => d.and_hms_opt(hour, min, sec)
|
||||
.map_or(LocalResult::None, LocalResult::Single),
|
||||
LocalResult::Single(d) => d
|
||||
.and_hms_opt(hour, min, sec)
|
||||
.map_or(LocalResult::None, LocalResult::Single),
|
||||
_ => LocalResult::None,
|
||||
}
|
||||
}
|
||||
|
@ -97,11 +108,17 @@ impl<Tz: TimeZone> LocalResult<Date<Tz>> {
|
|||
///
|
||||
/// Propagates any error. Ambiguous result would be discarded.
|
||||
#[inline]
|
||||
pub fn and_hms_milli_opt(self, hour: u32, min: u32, sec: u32,
|
||||
milli: u32) -> LocalResult<DateTime<Tz>> {
|
||||
pub fn and_hms_milli_opt(
|
||||
self,
|
||||
hour: u32,
|
||||
min: u32,
|
||||
sec: u32,
|
||||
milli: u32,
|
||||
) -> LocalResult<DateTime<Tz>> {
|
||||
match self {
|
||||
LocalResult::Single(d) => d.and_hms_milli_opt(hour, min, sec, milli)
|
||||
.map_or(LocalResult::None, LocalResult::Single),
|
||||
LocalResult::Single(d) => d
|
||||
.and_hms_milli_opt(hour, min, sec, milli)
|
||||
.map_or(LocalResult::None, LocalResult::Single),
|
||||
_ => LocalResult::None,
|
||||
}
|
||||
}
|
||||
|
@ -112,11 +129,17 @@ impl<Tz: TimeZone> LocalResult<Date<Tz>> {
|
|||
///
|
||||
/// Propagates any error. Ambiguous result would be discarded.
|
||||
#[inline]
|
||||
pub fn and_hms_micro_opt(self, hour: u32, min: u32, sec: u32,
|
||||
micro: u32) -> LocalResult<DateTime<Tz>> {
|
||||
pub fn and_hms_micro_opt(
|
||||
self,
|
||||
hour: u32,
|
||||
min: u32,
|
||||
sec: u32,
|
||||
micro: u32,
|
||||
) -> LocalResult<DateTime<Tz>> {
|
||||
match self {
|
||||
LocalResult::Single(d) => d.and_hms_micro_opt(hour, min, sec, micro)
|
||||
.map_or(LocalResult::None, LocalResult::Single),
|
||||
LocalResult::Single(d) => d
|
||||
.and_hms_micro_opt(hour, min, sec, micro)
|
||||
.map_or(LocalResult::None, LocalResult::Single),
|
||||
_ => LocalResult::None,
|
||||
}
|
||||
}
|
||||
|
@ -127,15 +150,20 @@ impl<Tz: TimeZone> LocalResult<Date<Tz>> {
|
|||
///
|
||||
/// Propagates any error. Ambiguous result would be discarded.
|
||||
#[inline]
|
||||
pub fn and_hms_nano_opt(self, hour: u32, min: u32, sec: u32,
|
||||
nano: u32) -> LocalResult<DateTime<Tz>> {
|
||||
pub fn and_hms_nano_opt(
|
||||
self,
|
||||
hour: u32,
|
||||
min: u32,
|
||||
sec: u32,
|
||||
nano: u32,
|
||||
) -> LocalResult<DateTime<Tz>> {
|
||||
match self {
|
||||
LocalResult::Single(d) => d.and_hms_nano_opt(hour, min, sec, nano)
|
||||
.map_or(LocalResult::None, LocalResult::Single),
|
||||
LocalResult::Single(d) => d
|
||||
.and_hms_nano_opt(hour, min, sec, nano)
|
||||
.map_or(LocalResult::None, LocalResult::Single),
|
||||
_ => LocalResult::None,
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
impl<T: fmt::Debug> LocalResult<T> {
|
||||
|
@ -144,7 +172,7 @@ impl<T: fmt::Debug> LocalResult<T> {
|
|||
match self {
|
||||
LocalResult::None => panic!("No such local time"),
|
||||
LocalResult::Single(t) => t,
|
||||
LocalResult::Ambiguous(t1,t2) => {
|
||||
LocalResult::Ambiguous(t1, t2) => {
|
||||
panic!("Ambiguous local time, ranging from {:?} to {:?}", t1, t2)
|
||||
}
|
||||
}
|
||||
|
@ -345,10 +373,36 @@ pub trait TimeZone: Sized + Clone {
|
|||
/// };
|
||||
/// ~~~~
|
||||
fn timestamp_millis_opt(&self, millis: i64) -> LocalResult<DateTime<Self>> {
|
||||
let (secs, millis) = (millis / 1000, millis % 1000);
|
||||
let (mut secs, mut millis) = (millis / 1000, millis % 1000);
|
||||
if millis < 0 {
|
||||
secs -= 1;
|
||||
millis += 1000;
|
||||
}
|
||||
self.timestamp_opt(secs, millis as u32 * 1_000_000)
|
||||
}
|
||||
|
||||
/// Makes a new `DateTime` from the number of non-leap nanoseconds
|
||||
/// since January 1, 1970 0:00:00 UTC (aka "UNIX timestamp").
|
||||
///
|
||||
/// Unlike [`timestamp_millis`](#method.timestamp_millis), this never
|
||||
/// panics.
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// ~~~~
|
||||
/// use chrono::{Utc, TimeZone};
|
||||
///
|
||||
/// assert_eq!(Utc.timestamp_nanos(1431648000000000).timestamp(), 1431648);
|
||||
/// ~~~~
|
||||
fn timestamp_nanos(&self, nanos: i64) -> DateTime<Self> {
|
||||
let (mut secs, mut nanos) = (nanos / 1_000_000_000, nanos % 1_000_000_000);
|
||||
if nanos < 0 {
|
||||
secs -= 1;
|
||||
nanos += 1_000_000_000;
|
||||
}
|
||||
self.timestamp_opt(secs, nanos as u32).unwrap()
|
||||
}
|
||||
|
||||
/// Parses a string with the specified format string and
|
||||
/// returns a `DateTime` with the current offset.
|
||||
/// See the [`format::strftime` module](../format/strftime/index.html)
|
||||
|
@ -361,7 +415,7 @@ pub trait TimeZone: Sized + Clone {
|
|||
/// with parsed `FixedOffset`.
|
||||
fn datetime_from_str(&self, s: &str, fmt: &str) -> ParseResult<DateTime<Self>> {
|
||||
let mut parsed = Parsed::new();
|
||||
try!(parse(&mut parsed, s, StrftimeItems::new(fmt)));
|
||||
parse(&mut parsed, s, StrftimeItems::new(fmt))?;
|
||||
parsed.to_datetime_with_timezone(self)
|
||||
}
|
||||
|
||||
|
@ -384,9 +438,8 @@ pub trait TimeZone: Sized + Clone {
|
|||
|
||||
/// Converts the local `NaiveDateTime` to the timezone-aware `DateTime` if possible.
|
||||
fn from_local_datetime(&self, local: &NaiveDateTime) -> LocalResult<DateTime<Self>> {
|
||||
self.offset_from_local_datetime(local).map(|offset| {
|
||||
DateTime::from_utc(*local - offset.fix(), offset)
|
||||
})
|
||||
self.offset_from_local_datetime(local)
|
||||
.map(|offset| DateTime::from_utc(*local - offset.fix(), offset))
|
||||
}
|
||||
|
||||
/// Creates the offset for given UTC `NaiveDate`. This cannot fail.
|
||||
|
@ -408,12 +461,72 @@ pub trait TimeZone: Sized + Clone {
|
|||
}
|
||||
}
|
||||
|
||||
mod utc;
|
||||
mod fixed;
|
||||
#[cfg(feature="clock")]
|
||||
#[cfg(feature = "clock")]
|
||||
mod local;
|
||||
mod utc;
|
||||
|
||||
pub use self::utc::Utc;
|
||||
pub use self::fixed::FixedOffset;
|
||||
#[cfg(feature="clock")]
|
||||
#[cfg(feature = "clock")]
|
||||
pub use self::local::Local;
|
||||
pub use self::utc::Utc;
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_negative_millis() {
|
||||
let dt = Utc.timestamp_millis(-1000);
|
||||
assert_eq!(dt.to_string(), "1969-12-31 23:59:59 UTC");
|
||||
let dt = Utc.timestamp_millis(-7000);
|
||||
assert_eq!(dt.to_string(), "1969-12-31 23:59:53 UTC");
|
||||
let dt = Utc.timestamp_millis(-7001);
|
||||
assert_eq!(dt.to_string(), "1969-12-31 23:59:52.999 UTC");
|
||||
let dt = Utc.timestamp_millis(-7003);
|
||||
assert_eq!(dt.to_string(), "1969-12-31 23:59:52.997 UTC");
|
||||
let dt = Utc.timestamp_millis(-999);
|
||||
assert_eq!(dt.to_string(), "1969-12-31 23:59:59.001 UTC");
|
||||
let dt = Utc.timestamp_millis(-1);
|
||||
assert_eq!(dt.to_string(), "1969-12-31 23:59:59.999 UTC");
|
||||
let dt = Utc.timestamp_millis(-60000);
|
||||
assert_eq!(dt.to_string(), "1969-12-31 23:59:00 UTC");
|
||||
let dt = Utc.timestamp_millis(-3600000);
|
||||
assert_eq!(dt.to_string(), "1969-12-31 23:00:00 UTC");
|
||||
|
||||
for (millis, expected) in &[
|
||||
(-7000, "1969-12-31 23:59:53 UTC"),
|
||||
(-7001, "1969-12-31 23:59:52.999 UTC"),
|
||||
(-7003, "1969-12-31 23:59:52.997 UTC"),
|
||||
] {
|
||||
match Utc.timestamp_millis_opt(*millis) {
|
||||
LocalResult::Single(dt) => {
|
||||
assert_eq!(dt.to_string(), *expected);
|
||||
},
|
||||
e => panic!("Got {:?} instead of an okay answer", e),
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_negative_nanos() {
|
||||
let dt = Utc.timestamp_nanos(-1_000_000_000);
|
||||
assert_eq!(dt.to_string(), "1969-12-31 23:59:59 UTC");
|
||||
let dt = Utc.timestamp_nanos(-999_999_999);
|
||||
assert_eq!(dt.to_string(), "1969-12-31 23:59:59.000000001 UTC");
|
||||
let dt = Utc.timestamp_nanos(-1);
|
||||
assert_eq!(dt.to_string(), "1969-12-31 23:59:59.999999999 UTC");
|
||||
let dt = Utc.timestamp_nanos(-60_000_000_000);
|
||||
assert_eq!(dt.to_string(), "1969-12-31 23:59:00 UTC");
|
||||
let dt = Utc.timestamp_nanos(-3_600_000_000_000);
|
||||
assert_eq!(dt.to_string(), "1969-12-31 23:00:00 UTC");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_nanos_never_panics() {
|
||||
Utc.timestamp_nanos(i64::max_value());
|
||||
Utc.timestamp_nanos(i64::default());
|
||||
Utc.timestamp_nanos(i64::min_value());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -3,8 +3,8 @@
|
|||
|
||||
//! The UTC (Coordinated Universal Time) time zone.
|
||||
|
||||
use std::fmt;
|
||||
#[cfg(feature="clock")]
|
||||
use core::fmt;
|
||||
#[cfg(all(feature="clock", not(all(target_arch = "wasm32", feature = "wasmbind"))))]
|
||||
use oldtime;
|
||||
|
||||
use naive::{NaiveDate, NaiveDateTime};
|
||||
|
@ -38,11 +38,23 @@ impl Utc {
|
|||
pub fn today() -> Date<Utc> { Utc::now().date() }
|
||||
|
||||
/// Returns a `DateTime` which corresponds to the current date.
|
||||
#[cfg(not(all(target_arch = "wasm32", feature = "wasmbind")))]
|
||||
pub fn now() -> DateTime<Utc> {
|
||||
let spec = oldtime::get_time();
|
||||
let naive = NaiveDateTime::from_timestamp(spec.sec, spec.nsec as u32);
|
||||
DateTime::from_utc(naive, Utc)
|
||||
}
|
||||
|
||||
/// Returns a `DateTime` which corresponds to the current date.
|
||||
#[cfg(all(target_arch = "wasm32", feature = "wasmbind"))]
|
||||
pub fn now() -> DateTime<Utc> {
|
||||
let now = js_sys::Date::new_0();
|
||||
let millisecs_since_unix_epoch: u64 = now.get_time() as u64;
|
||||
let secs = millisecs_since_unix_epoch / 1000;
|
||||
let nanos = 1_000_000 * (millisecs_since_unix_epoch - 1000 * secs);
|
||||
let naive = NaiveDateTime::from_timestamp(secs as i64, nanos as u32);
|
||||
DateTime::from_utc(naive, Utc)
|
||||
}
|
||||
}
|
||||
|
||||
impl TimeZone for Utc {
|
||||
|
@ -72,4 +84,3 @@ impl fmt::Debug for Utc {
|
|||
impl fmt::Display for Utc {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "UTC") }
|
||||
}
|
||||
|
||||
|
|
|
@ -10,10 +10,11 @@
|
|||
|
||||
//! Temporal quantification
|
||||
|
||||
use std::{fmt, i64};
|
||||
use core::{fmt, i64};
|
||||
#[cfg(any(feature = "std", test))]
|
||||
use std::error::Error;
|
||||
use std::ops::{Add, Sub, Mul, Div, Neg};
|
||||
use std::time::Duration as StdDuration;
|
||||
use core::ops::{Add, Sub, Mul, Div, Neg};
|
||||
use core::time::Duration as StdDuration;
|
||||
|
||||
/// The number of nanoseconds in a microsecond.
|
||||
const NANOS_PER_MICRO: i32 = 1000;
|
||||
|
@ -363,20 +364,20 @@ impl fmt::Display for Duration {
|
|||
let hasdate = days != 0;
|
||||
let hastime = (secs != 0 || abs.nanos != 0) || !hasdate;
|
||||
|
||||
try!(write!(f, "{}P", sign));
|
||||
write!(f, "{}P", sign)?;
|
||||
|
||||
if hasdate {
|
||||
try!(write!(f, "{}D", days));
|
||||
write!(f, "{}D", days)?;
|
||||
}
|
||||
if hastime {
|
||||
if abs.nanos == 0 {
|
||||
try!(write!(f, "T{}S", secs));
|
||||
write!(f, "T{}S", secs)?;
|
||||
} else if abs.nanos % NANOS_PER_MILLI == 0 {
|
||||
try!(write!(f, "T{}.{:03}S", secs, abs.nanos / NANOS_PER_MILLI));
|
||||
write!(f, "T{}.{:03}S", secs, abs.nanos / NANOS_PER_MILLI)?;
|
||||
} else if abs.nanos % NANOS_PER_MICRO == 0 {
|
||||
try!(write!(f, "T{}.{:06}S", secs, abs.nanos / NANOS_PER_MICRO));
|
||||
write!(f, "T{}.{:06}S", secs, abs.nanos / NANOS_PER_MICRO)?;
|
||||
} else {
|
||||
try!(write!(f, "T{}.{:09}S", secs, abs.nanos));
|
||||
write!(f, "T{}.{:09}S", secs, abs.nanos)?;
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
|
@ -392,15 +393,22 @@ impl fmt::Display for Duration {
|
|||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
pub struct OutOfRangeError(());
|
||||
|
||||
impl OutOfRangeError {
|
||||
fn description(&self) -> &str {
|
||||
"Source duration value is out of range for the target type"
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for OutOfRangeError {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
write!(f, "{}", self.description())
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(any(feature = "std", test))]
|
||||
impl Error for OutOfRangeError {
|
||||
fn description(&self) -> &str {
|
||||
"Source duration value is out of range for the target type"
|
||||
self.description()
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
// See README.md and LICENSE.txt for details.
|
||||
|
||||
use Timelike;
|
||||
use std::ops::{Add, Sub};
|
||||
use core::ops::{Add, Sub};
|
||||
use oldtime::Duration;
|
||||
|
||||
/// Extension trait for subsecond rounding or truncation to a maximum number
|
||||
|
|
|
@ -0,0 +1,28 @@
|
|||
#[cfg(all(target_arch = "wasm32", feature = "wasmbind"))]
|
||||
mod test {
|
||||
extern crate chrono;
|
||||
extern crate wasm_bindgen_test;
|
||||
|
||||
use self::chrono::prelude::*;
|
||||
use self::wasm_bindgen_test::*;
|
||||
|
||||
#[wasm_bindgen_test]
|
||||
fn now() {
|
||||
let utc: DateTime<Utc> = Utc::now();
|
||||
let local: DateTime<Local> = Local::now();
|
||||
|
||||
// Ensure time fetched is correct
|
||||
let actual = Utc.datetime_from_str(env!("NOW"), "%s").unwrap();
|
||||
assert!(utc - actual < chrono::Duration::minutes(5));
|
||||
|
||||
// Ensure offset retrieved when getting local time is correct
|
||||
let expected_offset = match env!("TZ") {
|
||||
"ACST-9:30" => FixedOffset::east(19 * 30 * 60),
|
||||
"Asia/Katmandu" => FixedOffset::east(23 * 15 * 60), // No DST thankfully
|
||||
"EST4" => FixedOffset::east(-4 * 60 * 60),
|
||||
"UTC0" => FixedOffset::east(0),
|
||||
_ => panic!("unexpected TZ"),
|
||||
};
|
||||
assert_eq!(&expected_offset, local.offset());
|
||||
}
|
||||
}
|
Различия файлов скрыты, потому что одна или несколько строк слишком длинны
|
@ -1,61 +1,33 @@
|
|||
# This file is automatically @generated by Cargo.
|
||||
# It is not intended for manual editing.
|
||||
[[package]]
|
||||
name = "addr2line"
|
||||
version = "0.10.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"fallible-iterator 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"gimli 0.19.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"intervaltree 0.2.4 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"lazycell 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"smallvec 0.6.13 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "arrayref"
|
||||
version = "0.3.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "arrayvec"
|
||||
version = "0.4.12"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"nodrop 0.1.14 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "arrayvec"
|
||||
version = "0.5.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "atty"
|
||||
version = "0.2.13"
|
||||
version = "0.2.14"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"hermit-abi 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "autocfg"
|
||||
version = "0.1.7"
|
||||
version = "1.0.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "backtrace"
|
||||
version = "0.3.40"
|
||||
version = "0.3.41"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"addr2line 0.10.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"backtrace-sys 0.1.32 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"findshlibs 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"goblin 0.0.24 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"memmap 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"rustc-demangle 0.1.16 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
|
@ -64,26 +36,17 @@ name = "backtrace-sys"
|
|||
version = "0.1.32"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"cc 1.0.48 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"cc 1.0.50 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "base64"
|
||||
version = "0.10.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"byteorder 1.3.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "bincode"
|
||||
version = "1.2.0"
|
||||
version = "1.2.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"autocfg 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"byteorder 1.3.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"serde 1.0.103 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"serde 1.0.104 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -91,16 +54,6 @@ name = "bitflags"
|
|||
version = "1.2.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "blake2b_simd"
|
||||
version = "0.5.9"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"arrayref 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"arrayvec 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"constant_time_eq 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "byteorder"
|
||||
version = "1.3.2"
|
||||
|
@ -116,7 +69,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "cc"
|
||||
version = "1.0.48"
|
||||
version = "1.0.50"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
|
@ -129,71 +82,19 @@ name = "chrono"
|
|||
version = "0.4.10"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"num-integer 0.1.41 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"num-traits 0.2.10 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"serde 1.0.103 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"num-integer 0.1.42 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"num-traits 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"serde 1.0.104 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"time 0.1.42 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "cloudabi"
|
||||
version = "0.0.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"bitflags 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "color-backtrace"
|
||||
version = "0.2.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"atty 0.2.13 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"backtrace 0.3.40 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"term 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "constant_time_eq"
|
||||
version = "0.1.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "crossbeam-utils"
|
||||
version = "0.6.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "ctor"
|
||||
version = "0.1.12"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"syn 1.0.11 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "dirs"
|
||||
version = "2.0.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"dirs-sys 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "dirs-sys"
|
||||
version = "0.3.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"redox_users 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"syn 1.0.13 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -201,10 +102,10 @@ name = "env_logger"
|
|||
version = "0.7.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"atty 0.2.13 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"atty 0.2.14 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"humantime 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"termcolor 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"termcolor 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -212,7 +113,7 @@ name = "failure"
|
|||
version = "0.1.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"backtrace 0.3.40 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"backtrace 0.3.41 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"failure_derive 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
|
@ -221,17 +122,12 @@ name = "failure_derive"
|
|||
version = "0.1.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"proc-macro2 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"proc-macro2 1.0.7 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"syn 1.0.11 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"syn 1.0.13 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"synstructure 0.12.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "fallible-iterator"
|
||||
version = "0.2.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "ffi-support"
|
||||
version = "0.3.5"
|
||||
|
@ -243,72 +139,43 @@ dependencies = [
|
|||
"log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "findshlibs"
|
||||
version = "0.5.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "fuchsia-cprng"
|
||||
version = "0.1.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "getrandom"
|
||||
version = "0.1.13"
|
||||
version = "0.1.14"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"wasi 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "gimli"
|
||||
version = "0.19.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"arrayvec 0.4.12 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"byteorder 1.3.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"fallible-iterator 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"stable_deref_trait 1.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"wasi 0.9.0+wasi-snapshot-preview1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "glean-core"
|
||||
version = "22.0.0"
|
||||
version = "24.0.0"
|
||||
dependencies = [
|
||||
"bincode 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"bincode 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"chrono 0.4.10 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"color-backtrace 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"ctor 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"env_logger 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"failure 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"ffi-support 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"iso8601 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"once_cell 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"regex 1.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"regex 1.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"rkv 0.10.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"serde 1.0.103 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"serde 1.0.104 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"serde_json 1.0.44 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"tempfile 3.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"uuid 0.8.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "goblin"
|
||||
version = "0.0.24"
|
||||
name = "hermit-abi"
|
||||
version = "0.1.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"plain 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"scroll 0.9.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -316,7 +183,7 @@ name = "humantime"
|
|||
version = "1.3.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"quick-error 1.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"quick-error 1.2.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -329,14 +196,6 @@ dependencies = [
|
|||
"unicode-normalization 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "intervaltree"
|
||||
version = "0.2.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"smallvec 0.6.13 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "iso8601"
|
||||
version = "0.3.0"
|
||||
|
@ -355,11 +214,6 @@ name = "lazy_static"
|
|||
version = "1.4.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "lazycell"
|
||||
version = "1.2.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "libc"
|
||||
version = "0.2.66"
|
||||
|
@ -381,7 +235,7 @@ name = "lmdb-rkv-sys"
|
|||
version = "0.9.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"cc 1.0.48 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"cc 1.0.50 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"pkg-config 0.3.17 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
@ -399,28 +253,9 @@ name = "matches"
|
|||
version = "0.1.8"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "maybe-uninit"
|
||||
version = "2.0.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "memchr"
|
||||
version = "2.2.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "memmap"
|
||||
version = "0.7.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "nodrop"
|
||||
version = "0.1.14"
|
||||
version = "2.3.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
|
@ -428,25 +263,25 @@ name = "nom"
|
|||
version = "4.2.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"memchr 2.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"memchr 2.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"version_check 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "num-integer"
|
||||
version = "0.1.41"
|
||||
version = "0.1.42"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"autocfg 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"num-traits 0.2.10 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"autocfg 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"num-traits 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "num-traits"
|
||||
version = "0.2.10"
|
||||
version = "0.2.11"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"autocfg 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"autocfg 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -459,7 +294,7 @@ name = "ordered-float"
|
|||
version = "1.0.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"num-traits 0.2.10 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"num-traits 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -472,11 +307,6 @@ name = "pkg-config"
|
|||
version = "0.3.17"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "plain"
|
||||
version = "0.2.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "ppv-lite86"
|
||||
version = "0.2.6"
|
||||
|
@ -484,15 +314,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||
|
||||
[[package]]
|
||||
name = "proc-macro2"
|
||||
version = "0.4.30"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "proc-macro2"
|
||||
version = "1.0.6"
|
||||
version = "1.0.7"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"unicode-xid 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
|
@ -500,31 +322,23 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "quick-error"
|
||||
version = "1.2.2"
|
||||
version = "1.2.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "quote"
|
||||
version = "0.6.13"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"proc-macro2 0.4.30 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "quote"
|
||||
version = "1.0.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"proc-macro2 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"proc-macro2 1.0.7 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rand"
|
||||
version = "0.7.2"
|
||||
version = "0.7.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"getrandom 0.1.13 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"getrandom 0.1.14 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"rand_chacha 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"rand_core 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
|
@ -540,25 +354,12 @@ dependencies = [
|
|||
"rand_core 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rand_core"
|
||||
version = "0.3.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"rand_core 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rand_core"
|
||||
version = "0.4.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "rand_core"
|
||||
version = "0.5.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"getrandom 0.1.13 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"getrandom 0.1.14 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -569,54 +370,22 @@ dependencies = [
|
|||
"rand_core 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rand_os"
|
||||
version = "0.1.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"cloudabi 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"fuchsia-cprng 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"rand_core 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"rdrand 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rdrand"
|
||||
version = "0.4.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"rand_core 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "redox_syscall"
|
||||
version = "0.1.56"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "redox_users"
|
||||
version = "0.3.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"failure 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"rand_os 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"redox_syscall 0.1.56 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"rust-argon2 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "regex"
|
||||
version = "1.3.1"
|
||||
version = "1.3.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"regex-syntax 0.6.12 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"regex-syntax 0.6.13 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "regex-syntax"
|
||||
version = "0.6.12"
|
||||
version = "0.6.13"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
|
@ -633,95 +402,45 @@ version = "0.10.2"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"arrayref 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"bincode 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"bincode 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"bitflags 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"byteorder 1.3.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"failure 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"lmdb-rkv 0.12.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"ordered-float 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"serde 1.0.103 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"serde_derive 1.0.103 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"url 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"serde 1.0.104 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"serde_derive 1.0.104 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"url 2.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"uuid 0.7.4 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rust-argon2"
|
||||
version = "0.5.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"base64 0.10.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"blake2b_simd 0.5.9 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"crossbeam-utils 0.6.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rustc-demangle"
|
||||
version = "0.1.16"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "rustc_version"
|
||||
version = "0.2.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"semver 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "ryu"
|
||||
version = "1.0.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "scroll"
|
||||
version = "0.9.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"rustc_version 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"scroll_derive 0.9.5 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "scroll_derive"
|
||||
version = "0.9.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"proc-macro2 0.4.30 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"quote 0.6.13 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"syn 0.15.44 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "semver"
|
||||
version = "0.9.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"semver-parser 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "semver-parser"
|
||||
version = "0.7.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "serde"
|
||||
version = "1.0.103"
|
||||
version = "1.0.104"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"serde_derive 1.0.103 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"serde_derive 1.0.104 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "serde_derive"
|
||||
version = "1.0.103"
|
||||
version = "1.0.104"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"proc-macro2 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"proc-macro2 1.0.7 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"syn 1.0.11 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"syn 1.0.13 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -731,43 +450,20 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||
dependencies = [
|
||||
"itoa 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"ryu 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"serde 1.0.103 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"serde 1.0.104 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "smallvec"
|
||||
version = "0.6.13"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"maybe-uninit 2.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "smallvec"
|
||||
version = "1.0.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "stable_deref_trait"
|
||||
version = "1.1.1"
|
||||
version = "1.1.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "syn"
|
||||
version = "0.15.44"
|
||||
version = "1.0.13"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"proc-macro2 0.4.30 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"quote 0.6.13 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "syn"
|
||||
version = "1.0.11"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"proc-macro2 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"proc-macro2 1.0.7 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"unicode-xid 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
@ -777,9 +473,9 @@ name = "synstructure"
|
|||
version = "0.12.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"proc-macro2 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"proc-macro2 1.0.7 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"syn 1.0.11 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"syn 1.0.13 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"unicode-xid 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
|
@ -790,27 +486,18 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||
dependencies = [
|
||||
"cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"rand 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"rand 0.7.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"redox_syscall 0.1.56 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"remove_dir_all 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "term"
|
||||
version = "0.6.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"dirs 2.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "termcolor"
|
||||
version = "1.0.5"
|
||||
version = "1.1.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"wincolor 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"winapi-util 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -836,14 +523,9 @@ name = "unicode-normalization"
|
|||
version = "0.1.11"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"smallvec 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"smallvec 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "unicode-xid"
|
||||
version = "0.1.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "unicode-xid"
|
||||
version = "0.2.0"
|
||||
|
@ -851,7 +533,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||
|
||||
[[package]]
|
||||
name = "url"
|
||||
version = "2.1.0"
|
||||
version = "2.1.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"idna 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
|
@ -869,7 +551,7 @@ name = "uuid"
|
|||
version = "0.8.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"rand 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"rand 0.7.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -879,7 +561,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||
|
||||
[[package]]
|
||||
name = "wasi"
|
||||
version = "0.7.0"
|
||||
version = "0.9.0+wasi-snapshot-preview1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
|
@ -898,7 +580,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||
|
||||
[[package]]
|
||||
name = "winapi-util"
|
||||
version = "0.1.2"
|
||||
version = "0.1.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
|
@ -909,126 +591,77 @@ name = "winapi-x86_64-pc-windows-gnu"
|
|||
version = "0.4.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "wincolor"
|
||||
version = "1.0.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"winapi-util 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[metadata]
|
||||
"checksum addr2line 0.10.0 (registry+https://github.com/rust-lang/crates.io-index)" = "95b06ae5a8a3bae54910c9029a52f83203ce2001c71b10b1faae3a337fee4ab5"
|
||||
"checksum arrayref 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)" = "0d382e583f07208808f6b1249e60848879ba3543f57c32277bf52d69c2f0f0ee"
|
||||
"checksum arrayvec 0.4.12 (registry+https://github.com/rust-lang/crates.io-index)" = "cd9fd44efafa8690358b7408d253adf110036b88f55672a933f01d616ad9b1b9"
|
||||
"checksum arrayvec 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "cff77d8686867eceff3105329d4698d96c2391c176d5d03adc90c7389162b5b8"
|
||||
"checksum atty 0.2.13 (registry+https://github.com/rust-lang/crates.io-index)" = "1803c647a3ec87095e7ae7acfca019e98de5ec9a7d01343f611cf3152ed71a90"
|
||||
"checksum autocfg 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)" = "1d49d90015b3c36167a20fe2810c5cd875ad504b39cff3d4eae7977e6b7c1cb2"
|
||||
"checksum backtrace 0.3.40 (registry+https://github.com/rust-lang/crates.io-index)" = "924c76597f0d9ca25d762c25a4d369d51267536465dc5064bdf0eb073ed477ea"
|
||||
"checksum atty 0.2.14 (registry+https://github.com/rust-lang/crates.io-index)" = "d9b39be18770d11421cdb1b9947a45dd3f37e93092cbf377614828a319d5fee8"
|
||||
"checksum autocfg 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "f8aac770f1885fd7e387acedd76065302551364496e46b3dd00860b2f8359b9d"
|
||||
"checksum backtrace 0.3.41 (registry+https://github.com/rust-lang/crates.io-index)" = "a4ed64ae6d9ebfd9893193c4b2532b1292ec97bd8271c9d7d0fa90cd78a34cba"
|
||||
"checksum backtrace-sys 0.1.32 (registry+https://github.com/rust-lang/crates.io-index)" = "5d6575f128516de27e3ce99689419835fce9643a9b215a14d2b5b685be018491"
|
||||
"checksum base64 0.10.1 (registry+https://github.com/rust-lang/crates.io-index)" = "0b25d992356d2eb0ed82172f5248873db5560c4721f564b13cb5193bda5e668e"
|
||||
"checksum bincode 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b8ab639324e3ee8774d296864fbc0dbbb256cf1a41c490b94cba90c082915f92"
|
||||
"checksum bincode 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "5753e2a71534719bf3f4e57006c3a4f0d2c672a4b676eec84161f763eca87dbf"
|
||||
"checksum bitflags 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "cf1de2fe8c75bc145a2f577add951f8134889b4795d47466a54a5c846d691693"
|
||||
"checksum blake2b_simd 0.5.9 (registry+https://github.com/rust-lang/crates.io-index)" = "b83b7baab1e671718d78204225800d6b170e648188ac7dc992e9d6bddf87d0c0"
|
||||
"checksum byteorder 1.3.2 (registry+https://github.com/rust-lang/crates.io-index)" = "a7c3dd8985a7111efc5c80b44e23ecdd8c007de8ade3b96595387e812b957cf5"
|
||||
"checksum c2-chacha 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "214238caa1bf3a496ec3392968969cab8549f96ff30652c9e56885329315f6bb"
|
||||
"checksum cc 1.0.48 (registry+https://github.com/rust-lang/crates.io-index)" = "f52a465a666ca3d838ebbf08b241383421412fe7ebb463527bba275526d89f76"
|
||||
"checksum cc 1.0.50 (registry+https://github.com/rust-lang/crates.io-index)" = "95e28fa049fda1c330bcf9d723be7663a899c4679724b34c81e9f5a326aab8cd"
|
||||
"checksum cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)" = "4785bdd1c96b2a846b2bd7cc02e86b6b3dbf14e7e53446c4f54c92a361040822"
|
||||
"checksum chrono 0.4.10 (registry+https://github.com/rust-lang/crates.io-index)" = "31850b4a4d6bae316f7a09e691c944c28299298837edc0a03f755618c23cbc01"
|
||||
"checksum cloudabi 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)" = "ddfc5b9aa5d4507acaf872de71051dfd0e309860e88966e1051e462a077aac4f"
|
||||
"checksum color-backtrace 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "57d105df24e2165b813e8b1ef3a2bac2f8099ca6d81264b56314ed0dd86b6ef0"
|
||||
"checksum constant_time_eq 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "995a44c877f9212528ccc74b21a232f66ad69001e40ede5bcee2ac9ef2657120"
|
||||
"checksum crossbeam-utils 0.6.6 (registry+https://github.com/rust-lang/crates.io-index)" = "04973fa96e96579258a5091af6003abde64af786b860f18622b82e026cca60e6"
|
||||
"checksum ctor 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)" = "cd8ce37ad4184ab2ce004c33bf6379185d3b1c95801cab51026bd271bf68eedc"
|
||||
"checksum dirs 2.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "13aea89a5c93364a98e9b37b2fa237effbb694d5cfe01c5b70941f7eb087d5e3"
|
||||
"checksum dirs-sys 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)" = "afa0b23de8fd801745c471deffa6e12d248f962c9fd4b4c33787b055599bde7b"
|
||||
"checksum env_logger 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)" = "44533bbbb3bb3c1fa17d9f2e4e38bbbaf8396ba82193c4cb1b6445d711445d36"
|
||||
"checksum failure 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)" = "f8273f13c977665c5db7eb2b99ae520952fe5ac831ae4cd09d80c4c7042b5ed9"
|
||||
"checksum failure_derive 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)" = "0bc225b78e0391e4b8683440bf2e63c2deeeb2ce5189eab46e2b68c6d3725d08"
|
||||
"checksum fallible-iterator 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "4443176a9f2c162692bd3d352d745ef9413eec5782a80d8fd6f8a1ac692a07f7"
|
||||
"checksum ffi-support 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)" = "efee06d8ac3e85a6e9759a0ed2682235a70832ebe10953849b92cdced8688660"
|
||||
"checksum findshlibs 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b1260d61e4fe2a6ab845ffdc426a0bd68ffb240b91cf0ec5a8d1170cec535bd8"
|
||||
"checksum fuchsia-cprng 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "a06f77d526c1a601b7c4cdd98f54b5eaabffc14d5f2f0296febdc7f357c6d3ba"
|
||||
"checksum getrandom 0.1.13 (registry+https://github.com/rust-lang/crates.io-index)" = "e7db7ca94ed4cd01190ceee0d8a8052f08a247aa1b469a7f68c6a3b71afcf407"
|
||||
"checksum gimli 0.19.0 (registry+https://github.com/rust-lang/crates.io-index)" = "162d18ae5f2e3b90a993d202f1ba17a5633c2484426f8bcae201f86194bacd00"
|
||||
"checksum goblin 0.0.24 (registry+https://github.com/rust-lang/crates.io-index)" = "e3fa261d919c1ae9d1e4533c4a2f99e10938603c4208d56c05bec7a872b661b0"
|
||||
"checksum getrandom 0.1.14 (registry+https://github.com/rust-lang/crates.io-index)" = "7abc8dd8451921606d809ba32e95b6111925cd2906060d2dcc29c070220503eb"
|
||||
"checksum hermit-abi 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)" = "eff2656d88f158ce120947499e971d743c05dbcbed62e5bd2f38f1698bbc3772"
|
||||
"checksum humantime 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "df004cfca50ef23c36850aaaa59ad52cc70d0e90243c3c7737a4dd32dc7a3c4f"
|
||||
"checksum idna 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "02e2673c30ee86b5b96a9cb52ad15718aa1f966f5ab9ad54a8b95d5ca33120a9"
|
||||
"checksum intervaltree 0.2.4 (registry+https://github.com/rust-lang/crates.io-index)" = "af39074dd8d5eff756ddea3d8f34c7ae287d4dadb6f29fb1b67ca6b3f5036482"
|
||||
"checksum iso8601 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "43e86914a73535f3f541a765adea0a9fafcf53fa6adb73662c4988fd9233766f"
|
||||
"checksum itoa 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)" = "501266b7edd0174f8530248f87f99c88fbe60ca4ef3dd486835b8d8d53136f7f"
|
||||
"checksum lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
|
||||
"checksum lazycell 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "b294d6fa9ee409a054354afc4352b0b9ef7ca222c69b8812cbea9e7d2bf3783f"
|
||||
"checksum libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)" = "d515b1f41455adea1313a4a2ac8a8a477634fbae63cc6100e3aebb207ce61558"
|
||||
"checksum lmdb-rkv 0.12.3 (registry+https://github.com/rust-lang/crates.io-index)" = "605061e5465304475be2041f19967a900175ea1b6d8f47fbab84a84fb8c48452"
|
||||
"checksum lmdb-rkv-sys 0.9.6 (registry+https://github.com/rust-lang/crates.io-index)" = "7982ba0460e939e26a52ee12c8075deab0ebd44ed21881f656841b70e021b7c8"
|
||||
"checksum log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)" = "14b6052be84e6b71ab17edffc2eeabf5c2c3ae1fdb464aae35ac50c67a44e1f7"
|
||||
"checksum matches 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)" = "7ffc5c5338469d4d3ea17d269fa8ea3512ad247247c30bd2df69e68309ed0a08"
|
||||
"checksum maybe-uninit 2.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "60302e4db3a61da70c0cb7991976248362f30319e88850c487b9b95bbf059e00"
|
||||
"checksum memchr 2.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "88579771288728879b57485cc7d6b07d648c9f0141eb955f8ab7f9d45394468e"
|
||||
"checksum memmap 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "6585fd95e7bb50d6cc31e20d4cf9afb4e2ba16c5846fc76793f11218da9c475b"
|
||||
"checksum nodrop 0.1.14 (registry+https://github.com/rust-lang/crates.io-index)" = "72ef4a56884ca558e5ddb05a1d1e7e1bfd9a68d9ed024c21704cc98872dae1bb"
|
||||
"checksum memchr 2.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "3197e20c7edb283f87c071ddfc7a2cca8f8e0b888c242959846a6fce03c72223"
|
||||
"checksum nom 4.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "2ad2a91a8e869eeb30b9cb3119ae87773a8f4ae617f41b1eb9c154b2905f7bd6"
|
||||
"checksum num-integer 0.1.41 (registry+https://github.com/rust-lang/crates.io-index)" = "b85e541ef8255f6cf42bbfe4ef361305c6c135d10919ecc26126c4e5ae94bc09"
|
||||
"checksum num-traits 0.2.10 (registry+https://github.com/rust-lang/crates.io-index)" = "d4c81ffc11c212fa327657cb19dd85eb7419e163b5b076bede2bdb5c974c07e4"
|
||||
"checksum num-integer 0.1.42 (registry+https://github.com/rust-lang/crates.io-index)" = "3f6ea62e9d81a77cd3ee9a2a5b9b609447857f3d358704331e4ef39eb247fcba"
|
||||
"checksum num-traits 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)" = "c62be47e61d1842b9170f0fdeec8eba98e60e90e5446449a0545e5152acd7096"
|
||||
"checksum once_cell 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "891f486f630e5c5a4916c7e16c4b24a53e78c860b646e9f8e005e4f16847bfed"
|
||||
"checksum ordered-float 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "18869315e81473c951eb56ad5558bbc56978562d3ecfb87abb7a1e944cea4518"
|
||||
"checksum percent-encoding 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "d4fd5641d01c8f18a23da7b6fe29298ff4b55afcccdf78973b24cf3175fee32e"
|
||||
"checksum pkg-config 0.3.17 (registry+https://github.com/rust-lang/crates.io-index)" = "05da548ad6865900e60eaba7f589cc0783590a92e940c26953ff81ddbab2d677"
|
||||
"checksum plain 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "b4596b6d070b27117e987119b4dac604f3c58cfb0b191112e24771b2faeac1a6"
|
||||
"checksum ppv-lite86 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)" = "74490b50b9fbe561ac330df47c08f3f33073d2d00c150f719147d7c54522fa1b"
|
||||
"checksum proc-macro2 0.4.30 (registry+https://github.com/rust-lang/crates.io-index)" = "cf3d2011ab5c909338f7887f4fc896d35932e29146c12c8d01da6b22a80ba759"
|
||||
"checksum proc-macro2 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)" = "9c9e470a8dc4aeae2dee2f335e8f533e2d4b347e1434e5671afc49b054592f27"
|
||||
"checksum quick-error 1.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "9274b940887ce9addde99c4eee6b5c44cc494b182b97e73dc8ffdcb3397fd3f0"
|
||||
"checksum quote 0.6.13 (registry+https://github.com/rust-lang/crates.io-index)" = "6ce23b6b870e8f94f81fb0a363d65d86675884b34a09043c81e5562f11c1f8e1"
|
||||
"checksum proc-macro2 1.0.7 (registry+https://github.com/rust-lang/crates.io-index)" = "0319972dcae462681daf4da1adeeaa066e3ebd29c69be96c6abb1259d2ee2bcc"
|
||||
"checksum quick-error 1.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "a1d01941d82fa2ab50be1e79e6714289dd7cde78eba4c074bc5a4374f650dfe0"
|
||||
"checksum quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "053a8c8bcc71fcce321828dc897a98ab9760bef03a4fc36693c231e5b3216cfe"
|
||||
"checksum rand 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)" = "3ae1b169243eaf61759b8475a998f0a385e42042370f3a7dbaf35246eacc8412"
|
||||
"checksum rand 0.7.3 (registry+https://github.com/rust-lang/crates.io-index)" = "6a6b1679d49b24bbfe0c803429aa1874472f50d9b363131f0e89fc356b544d03"
|
||||
"checksum rand_chacha 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "03a2a90da8c7523f554344f921aa97283eadf6ac484a6d2a7d0212fa7f8d6853"
|
||||
"checksum rand_core 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "7a6fdeb83b075e8266dcc8762c22776f6877a63111121f5f8c7411e5be7eed4b"
|
||||
"checksum rand_core 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "9c33a3c44ca05fa6f1807d8e6743f3824e8509beca625669633be0acbdf509dc"
|
||||
"checksum rand_core 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "90bde5296fc891b0cef12a6d03ddccc162ce7b2aff54160af9338f8d40df6d19"
|
||||
"checksum rand_hc 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ca3129af7b92a17112d59ad498c6f81eaf463253766b90396d39ea7a39d6613c"
|
||||
"checksum rand_os 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "7b75f676a1e053fc562eafbb47838d67c84801e38fc1ba459e8f180deabd5071"
|
||||
"checksum rdrand 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "678054eb77286b51581ba43620cc911abf02758c91f93f479767aed0f90458b2"
|
||||
"checksum redox_syscall 0.1.56 (registry+https://github.com/rust-lang/crates.io-index)" = "2439c63f3f6139d1b57529d16bc3b8bb855230c8efcc5d3a896c8bea7c3b1e84"
|
||||
"checksum redox_users 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "4ecedbca3bf205f8d8f5c2b44d83cd0690e39ee84b951ed649e9f1841132b66d"
|
||||
"checksum regex 1.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "dc220bd33bdce8f093101afe22a037b8eb0e5af33592e6a9caafff0d4cb81cbd"
|
||||
"checksum regex-syntax 0.6.12 (registry+https://github.com/rust-lang/crates.io-index)" = "11a7e20d1cce64ef2fed88b66d347f88bd9babb82845b2b858f3edbf59a4f716"
|
||||
"checksum regex 1.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "b5508c1941e4e7cb19965abef075d35a9a8b5cdf0846f30b4050e9b55dc55e87"
|
||||
"checksum regex-syntax 0.6.13 (registry+https://github.com/rust-lang/crates.io-index)" = "e734e891f5b408a29efbf8309e656876276f49ab6a6ac208600b4419bd893d90"
|
||||
"checksum remove_dir_all 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)" = "4a83fa3702a688b9359eccba92d153ac33fd2e8462f9e0e3fdf155239ea7792e"
|
||||
"checksum rkv 0.10.2 (registry+https://github.com/rust-lang/crates.io-index)" = "9aab7c645d32e977e186448b0a5c2c3139a91a7f630cfd8a8c314d1d145e78bf"
|
||||
"checksum rust-argon2 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "4ca4eaef519b494d1f2848fc602d18816fed808a981aedf4f1f00ceb7c9d32cf"
|
||||
"checksum rustc-demangle 0.1.16 (registry+https://github.com/rust-lang/crates.io-index)" = "4c691c0e608126e00913e33f0ccf3727d5fc84573623b8d65b2df340b5201783"
|
||||
"checksum rustc_version 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "138e3e0acb6c9fb258b19b67cb8abd63c00679d2851805ea151465464fe9030a"
|
||||
"checksum ryu 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "bfa8506c1de11c9c4e4c38863ccbe02a305c8188e85a05a784c9e11e1c3910c8"
|
||||
"checksum scroll 0.9.2 (registry+https://github.com/rust-lang/crates.io-index)" = "2f84d114ef17fd144153d608fba7c446b0145d038985e7a8cc5d08bb0ce20383"
|
||||
"checksum scroll_derive 0.9.5 (registry+https://github.com/rust-lang/crates.io-index)" = "8f1aa96c45e7f5a91cb7fabe7b279f02fea7126239fc40b732316e8b6a2d0fcb"
|
||||
"checksum semver 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)" = "1d7eb9ef2c18661902cc47e535f9bc51b78acd254da71d375c2f6720d9a40403"
|
||||
"checksum semver-parser 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "388a1df253eca08550bef6c72392cfe7c30914bf41df5269b68cbd6ff8f570a3"
|
||||
"checksum serde 1.0.103 (registry+https://github.com/rust-lang/crates.io-index)" = "1217f97ab8e8904b57dd22eb61cde455fa7446a9c1cf43966066da047c1f3702"
|
||||
"checksum serde_derive 1.0.103 (registry+https://github.com/rust-lang/crates.io-index)" = "a8c6faef9a2e64b0064f48570289b4bf8823b7581f1d6157c1b52152306651d0"
|
||||
"checksum serde 1.0.104 (registry+https://github.com/rust-lang/crates.io-index)" = "414115f25f818d7dfccec8ee535d76949ae78584fc4f79a6f45a904bf8ab4449"
|
||||
"checksum serde_derive 1.0.104 (registry+https://github.com/rust-lang/crates.io-index)" = "128f9e303a5a29922045a830221b8f78ec74a5f544944f3d5984f8ec3895ef64"
|
||||
"checksum serde_json 1.0.44 (registry+https://github.com/rust-lang/crates.io-index)" = "48c575e0cc52bdd09b47f330f646cf59afc586e9c4e3ccd6fc1f625b8ea1dad7"
|
||||
"checksum smallvec 0.6.13 (registry+https://github.com/rust-lang/crates.io-index)" = "f7b0758c52e15a8b5e3691eae6cc559f08eee9406e548a4477ba4e67770a82b6"
|
||||
"checksum smallvec 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "4ecf3b85f68e8abaa7555aa5abdb1153079387e60b718283d732f03897fcfc86"
|
||||
"checksum stable_deref_trait 1.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "dba1a27d3efae4351c8051072d619e3ade2820635c3958d826bfea39d59b54c8"
|
||||
"checksum syn 0.15.44 (registry+https://github.com/rust-lang/crates.io-index)" = "9ca4b3b69a77cbe1ffc9e198781b7acb0c7365a883670e8f1c1bc66fba79a5c5"
|
||||
"checksum syn 1.0.11 (registry+https://github.com/rust-lang/crates.io-index)" = "dff0acdb207ae2fe6d5976617f887eb1e35a2ba52c13c7234c790960cdad9238"
|
||||
"checksum smallvec 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "44e59e0c9fa00817912ae6e4e6e3c4fe04455e75699d06eedc7d85917ed8e8f4"
|
||||
"checksum syn 1.0.13 (registry+https://github.com/rust-lang/crates.io-index)" = "1e4ff033220a41d1a57d8125eab57bf5263783dfdcc18688b1dacc6ce9651ef8"
|
||||
"checksum synstructure 0.12.3 (registry+https://github.com/rust-lang/crates.io-index)" = "67656ea1dc1b41b1451851562ea232ec2e5a80242139f7e679ceccfb5d61f545"
|
||||
"checksum tempfile 3.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "7a6e24d9338a0a5be79593e2fa15a648add6138caa803e2d5bc782c371732ca9"
|
||||
"checksum term 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)" = "c0863a3345e70f61d613eab32ee046ccd1bcc5f9105fe402c61fcd0c13eeb8b5"
|
||||
"checksum termcolor 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)" = "96d6098003bde162e4277c70665bd87c326f5a0c3f3fbfb285787fa482d54e6e"
|
||||
"checksum termcolor 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "bb6bfa289a4d7c5766392812c0a1f4c1ba45afa1ad47803c11e1f407d846d75f"
|
||||
"checksum time 0.1.42 (registry+https://github.com/rust-lang/crates.io-index)" = "db8dcfca086c1143c9270ac42a2bbd8a7ee477b78ac8e45b19abfb0cbede4b6f"
|
||||
"checksum unicode-bidi 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)" = "49f2bd0c6468a8230e1db229cff8029217cf623c767ea5d60bfbd42729ea54d5"
|
||||
"checksum unicode-normalization 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)" = "b561e267b2326bb4cebfc0ef9e68355c7abe6c6f522aeac2f5bf95d56c59bdcf"
|
||||
"checksum unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "fc72304796d0818e357ead4e000d19c9c174ab23dc11093ac919054d20a6a7fc"
|
||||
"checksum unicode-xid 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "826e7639553986605ec5979c7dd957c7895e93eabed50ab2ffa7f6128a75097c"
|
||||
"checksum url 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "75b414f6c464c879d7f9babf951f23bc3743fb7313c081b2e6ca719067ea9d61"
|
||||
"checksum url 2.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "829d4a8476c35c9bf0bbce5a3b23f4106f79728039b726d292bb93bc106787cb"
|
||||
"checksum uuid 0.7.4 (registry+https://github.com/rust-lang/crates.io-index)" = "90dbc611eb48397705a6b0f6e917da23ae517e4d127123d2cf7674206627d32a"
|
||||
"checksum uuid 0.8.1 (registry+https://github.com/rust-lang/crates.io-index)" = "9fde2f6a4bea1d6e007c4ad38c6839fa71cbb63b6dbf5b595aa38dc9b1093c11"
|
||||
"checksum version_check 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "914b1a6776c4c929a602fafd8bc742e06365d4bcbe48c30f9cca5824f70dc9dd"
|
||||
"checksum wasi 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b89c3ce4ce14bdc6fb6beaf9ec7928ca331de5df7e5ea278375642a2f478570d"
|
||||
"checksum wasi 0.9.0+wasi-snapshot-preview1 (registry+https://github.com/rust-lang/crates.io-index)" = "cccddf32554fecc6acb585f82a32a72e28b48f8c4c1883ddfeeeaa96f7d8e519"
|
||||
"checksum winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)" = "8093091eeb260906a183e6ae1abdba2ef5ef2257a21801128899c3fc699229c6"
|
||||
"checksum winapi-i686-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6"
|
||||
"checksum winapi-util 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "7168bab6e1daee33b4557efd0e95d5ca70a03706d39fa5f3fe7a236f584b03c9"
|
||||
"checksum winapi-util 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "4ccfbf554c6ad11084fb7517daca16cfdcaccbdadba4fc336f032a8b12c2ad80"
|
||||
"checksum winapi-x86_64-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
|
||||
"checksum wincolor 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "96f5016b18804d24db43cebf3c77269e7569b8954a8464501c216cc5e070eaa9"
|
||||
|
|
|
@ -13,7 +13,7 @@
|
|||
[package]
|
||||
edition = "2018"
|
||||
name = "glean-core"
|
||||
version = "22.0.0"
|
||||
version = "24.0.0"
|
||||
authors = ["Jan-Erik Rediger <jrediger@mozilla.com>", "The Glean Team <glean-team@mozilla.com>"]
|
||||
include = ["README.md", "LICENSE", "src/**/*", "examples/**/*", "tests/**/*", "Cargo.toml"]
|
||||
description = "A modern Telemetry library"
|
||||
|
@ -22,15 +22,12 @@ keywords = ["telemetry"]
|
|||
license = "MPL-2.0"
|
||||
repository = "https://github.com/mozilla/glean"
|
||||
[dependencies.bincode]
|
||||
version = "1.1.3"
|
||||
version = "1.2.1"
|
||||
|
||||
[dependencies.chrono]
|
||||
version = "0.4.6"
|
||||
version = "0.4.10"
|
||||
features = ["serde"]
|
||||
|
||||
[dependencies.failure]
|
||||
version = "0.1.5"
|
||||
|
||||
[dependencies.ffi-support]
|
||||
version = "0.3.5"
|
||||
|
||||
|
@ -38,13 +35,13 @@ version = "0.3.5"
|
|||
version = "1.4.0"
|
||||
|
||||
[dependencies.log]
|
||||
version = "0.4.6"
|
||||
version = "0.4.8"
|
||||
|
||||
[dependencies.once_cell]
|
||||
version = "1.2.0"
|
||||
|
||||
[dependencies.regex]
|
||||
version = "1.3.0"
|
||||
version = "1.3.3"
|
||||
features = ["std"]
|
||||
default-features = false
|
||||
|
||||
|
@ -52,20 +49,17 @@ default-features = false
|
|||
version = "0.10.2"
|
||||
|
||||
[dependencies.serde]
|
||||
version = "1.0.102"
|
||||
version = "1.0.104"
|
||||
features = ["derive"]
|
||||
|
||||
[dependencies.serde_json]
|
||||
version = "1.0.41"
|
||||
version = "1.0.44"
|
||||
|
||||
[dependencies.uuid]
|
||||
version = "0.8.1"
|
||||
features = ["v4"]
|
||||
[dev-dependencies.color-backtrace]
|
||||
version = "0.2.3"
|
||||
|
||||
[dev-dependencies.ctor]
|
||||
version = "0.1.9"
|
||||
version = "0.1.12"
|
||||
|
||||
[dev-dependencies.env_logger]
|
||||
version = "0.7.1"
|
||||
|
@ -76,7 +70,7 @@ default-features = false
|
|||
version = "0.3"
|
||||
|
||||
[dev-dependencies.tempfile]
|
||||
version = "3.0.7"
|
||||
version = "3.1.0"
|
||||
[badges.circle-ci]
|
||||
branch = "master"
|
||||
repository = "mozilla/glean"
|
||||
|
|
|
@ -44,7 +44,7 @@ let call_counter: CounterMetric = CounterMetric::new(CommonMetricData {
|
|||
|
||||
call_counter.add(&glean, 1);
|
||||
|
||||
glean.send_ping(&ping).unwrap();
|
||||
glean.submit_ping(&ping).unwrap();
|
||||
```
|
||||
|
||||
## License
|
||||
|
|
|
@ -7,7 +7,6 @@ use tempfile::Builder;
|
|||
|
||||
fn main() {
|
||||
env_logger::init();
|
||||
color_backtrace::install();
|
||||
|
||||
let mut args = env::args().skip(1);
|
||||
|
||||
|
|
|
@ -1,100 +0,0 @@
|
|||
// This Source Code Form is subject to the terms of the Mozilla Public
|
||||
// License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
// file, You can obtain one at https://mozilla.org/MPL/2.0/.
|
||||
|
||||
//! A module containing glean-core code for supporting data migration
|
||||
//! (i.e. sequence numbers) from glean-ac. This is a temporary module
|
||||
//! planned to be removed in 2020, after the transition from glean-ac
|
||||
//! is complete.
|
||||
|
||||
use crate::util::truncate_string_at_boundary;
|
||||
use std::collections::HashMap;
|
||||
|
||||
use super::Glean;
|
||||
use super::PingMaker;
|
||||
|
||||
const GLEAN_AC_SEQUENCE_SUFFIX: &str = "_seq";
|
||||
|
||||
/// Stores the sequence numbers from glean-ac in glean-core.
|
||||
pub(super) fn migrate_sequence_numbers(glean: &Glean, seq_numbers: HashMap<String, i32>) {
|
||||
let ping_maker = PingMaker::new();
|
||||
|
||||
for (store_name_with_suffix, next_seq) in seq_numbers.into_iter() {
|
||||
// Note: glean-ac stores the sequence numbers as '<ping_name>_seq',
|
||||
// glean-core requires '<ping_name>#sequence'.
|
||||
if !store_name_with_suffix.ends_with(GLEAN_AC_SEQUENCE_SUFFIX) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Negative or 0 counters are definitively not worth importing.
|
||||
if next_seq <= 0 {
|
||||
continue;
|
||||
}
|
||||
|
||||
let truncated_len = store_name_with_suffix
|
||||
.len()
|
||||
.saturating_sub(GLEAN_AC_SEQUENCE_SUFFIX.len());
|
||||
let store_name = truncate_string_at_boundary(store_name_with_suffix, truncated_len);
|
||||
|
||||
ping_maker.set_ping_seq(glean, &store_name, next_seq);
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use super::*;
|
||||
use crate::tests::new_glean;
|
||||
|
||||
#[test]
|
||||
fn invalid_storage_names_must_not_be_migrated() {
|
||||
let (glean, _) = new_glean();
|
||||
|
||||
let mut ac_seq_numbers = HashMap::new();
|
||||
ac_seq_numbers.insert(String::from("control_seq"), 3);
|
||||
ac_seq_numbers.insert(String::from("ignored_seq-lol"), 85);
|
||||
|
||||
let ping_maker = PingMaker::new();
|
||||
migrate_sequence_numbers(&glean, ac_seq_numbers);
|
||||
|
||||
assert_eq!(3, ping_maker.get_ping_seq(&glean, "control"));
|
||||
// The next one should not have been migrated, so we expect
|
||||
// it to start from 0 instead of 85.
|
||||
assert_eq!(0, ping_maker.get_ping_seq(&glean, "ignored"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn invalid_sequence_numbers_must_not_be_migrated() {
|
||||
let (glean, _) = new_glean();
|
||||
|
||||
let mut ac_seq_numbers = HashMap::new();
|
||||
ac_seq_numbers.insert(String::from("control_seq"), 3);
|
||||
ac_seq_numbers.insert(String::from("ignored_seq"), -85);
|
||||
|
||||
let ping_maker = PingMaker::new();
|
||||
migrate_sequence_numbers(&glean, ac_seq_numbers);
|
||||
|
||||
assert_eq!(3, ping_maker.get_ping_seq(&glean, "control"));
|
||||
// The next one should not have been migrated, so we expect
|
||||
// it to start from 0 instead of 85.
|
||||
assert_eq!(0, ping_maker.get_ping_seq(&glean, "ignored"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn valid_sequence_numbers_must_be_migrated() {
|
||||
let (glean, _) = new_glean();
|
||||
|
||||
let mut ac_seq_numbers = HashMap::new();
|
||||
ac_seq_numbers.insert(String::from("custom_seq"), 3);
|
||||
ac_seq_numbers.insert(String::from("other_seq"), 7);
|
||||
ac_seq_numbers.insert(String::from("ignored_seq-lol"), 85);
|
||||
|
||||
let ping_maker = PingMaker::new();
|
||||
migrate_sequence_numbers(&glean, ac_seq_numbers);
|
||||
|
||||
assert_eq!(3, ping_maker.get_ping_seq(&glean, "custom"));
|
||||
assert_eq!(7, ping_maker.get_ping_seq(&glean, "other"));
|
||||
// The next one should not have been migrated, so we expect
|
||||
// it to start from 0 instead of 85.
|
||||
assert_eq!(0, ping_maker.get_ping_seq(&glean, "ignored"));
|
||||
}
|
||||
}
|
|
@ -5,6 +5,7 @@
|
|||
use std::collections::btree_map::Entry;
|
||||
use std::collections::BTreeMap;
|
||||
use std::fs;
|
||||
use std::str;
|
||||
use std::sync::RwLock;
|
||||
|
||||
use rkv::{Rkv, SingleStore, StoreOptions};
|
||||
|
@ -18,15 +19,9 @@ use crate::Result;
|
|||
#[derive(Debug)]
|
||||
pub struct Database {
|
||||
rkv: Rkv,
|
||||
// Metrics with 'application' lifetime only live as long
|
||||
// as the application lives: they don't need to be persisted
|
||||
// to disk using rkv. Store them in a map.
|
||||
app_lifetime_data: RwLock<BTreeMap<String, Metric>>,
|
||||
// If the `delay_ping_lifetime_io` Glean config option is `true`,
|
||||
// we will save metrics with 'ping' lifetime data in a map temporarily
|
||||
// so as to persist them to disk using rkv in bulk on shutdown,
|
||||
// or after a given interval, instead of everytime a new metric
|
||||
// is created / updated.
|
||||
// so as to persist them to disk using rkv in bulk on demand.
|
||||
ping_lifetime_data: Option<RwLock<BTreeMap<String, Metric>>>,
|
||||
}
|
||||
|
||||
|
@ -35,16 +30,22 @@ impl Database {
|
|||
///
|
||||
/// This opens the underlying rkv store and creates
|
||||
/// the underlying directory structure.
|
||||
///
|
||||
/// It also loads any Lifetime::Ping data that might be
|
||||
/// persisted, in case `delay_ping_lifetime_io` is set.
|
||||
pub fn new(data_path: &str, delay_ping_lifetime_io: bool) -> Result<Self> {
|
||||
Ok(Self {
|
||||
let db = Self {
|
||||
rkv: Self::open_rkv(data_path)?,
|
||||
app_lifetime_data: RwLock::new(BTreeMap::new()),
|
||||
ping_lifetime_data: if delay_ping_lifetime_io {
|
||||
Some(RwLock::new(BTreeMap::new()))
|
||||
} else {
|
||||
None
|
||||
},
|
||||
})
|
||||
};
|
||||
|
||||
db.load_ping_lifetime_data();
|
||||
|
||||
Ok(db)
|
||||
}
|
||||
|
||||
/// Creates the storage directories and inits rkv.
|
||||
|
@ -78,6 +79,40 @@ impl Database {
|
|||
}
|
||||
}
|
||||
|
||||
/// Loads Lifetime::Ping data from rkv to memory,
|
||||
/// if `delay_ping_lifetime_io` is set to true.
|
||||
///
|
||||
/// Does nothing if it isn't or if there is not data to load.
|
||||
fn load_ping_lifetime_data(&self) {
|
||||
if let Some(ping_lifetime_data) = &self.ping_lifetime_data {
|
||||
let mut data = ping_lifetime_data
|
||||
.write()
|
||||
.expect("Can't read ping lifetime data");
|
||||
|
||||
let store: SingleStore = unwrap_or!(
|
||||
self.rkv
|
||||
.open_single(Lifetime::Ping.as_str(), StoreOptions::create()),
|
||||
return
|
||||
);
|
||||
|
||||
let reader = unwrap_or!(self.rkv.read(), return);
|
||||
let mut iter = unwrap_or!(store.iter_start(&reader), return);
|
||||
|
||||
while let Some(Ok((metric_name, value))) = iter.next() {
|
||||
let metric_name = match str::from_utf8(metric_name) {
|
||||
Ok(metric_name) => metric_name.to_string(),
|
||||
_ => continue,
|
||||
};
|
||||
let metric: Metric = match value.expect("Value missing in iteration") {
|
||||
rkv::Value::Blob(blob) => unwrap_or!(bincode::deserialize(blob), continue),
|
||||
_ => continue,
|
||||
};
|
||||
|
||||
data.insert(metric_name, metric);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Iterates with the provided transaction function over the requested data
|
||||
/// from the given storage.
|
||||
///
|
||||
|
@ -112,23 +147,7 @@ impl Database {
|
|||
let iter_start = Self::get_storage_key(storage_name, metric_key);
|
||||
let len = iter_start.len();
|
||||
|
||||
// Lifetime::Application data is not persisted to disk
|
||||
if lifetime == Lifetime::Application {
|
||||
let data = self
|
||||
.app_lifetime_data
|
||||
.read()
|
||||
.expect("Can't read app lifetime data");
|
||||
for (key, value) in data.iter() {
|
||||
if key.starts_with(&iter_start) {
|
||||
let key = &key[len..];
|
||||
transaction_fn(key.as_bytes(), value);
|
||||
}
|
||||
}
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
// Lifetime::Ping data is not persisted to disk if
|
||||
// Lifetime::Ping data is not immediately persisted to disk if
|
||||
// Glean has `delay_ping_lifetime_io` set to true
|
||||
if lifetime == Lifetime::Ping {
|
||||
if let Some(ping_lifetime_data) = &self.ping_lifetime_data {
|
||||
|
@ -189,15 +208,6 @@ impl Database {
|
|||
) -> bool {
|
||||
let key = Self::get_storage_key(storage_name, Some(metric_identifier));
|
||||
|
||||
// Lifetime::Application data is not persisted to disk
|
||||
if lifetime == Lifetime::Application {
|
||||
return self
|
||||
.app_lifetime_data
|
||||
.read()
|
||||
.map(|data| data.contains_key(&key))
|
||||
.unwrap_or(false);
|
||||
}
|
||||
|
||||
// Lifetime::Ping data is not persisted to disk if
|
||||
// Glean has `delay_ping_lifetime_io` set to true
|
||||
if lifetime == Lifetime::Ping {
|
||||
|
@ -224,16 +234,11 @@ impl Database {
|
|||
///
|
||||
/// ## Panics
|
||||
///
|
||||
/// * This function will panic for `Lifetime::Application`.
|
||||
/// * This function will **not** panic on database errors.
|
||||
pub fn write_with_store<F>(&self, store_name: Lifetime, mut transaction_fn: F) -> Result<()>
|
||||
where
|
||||
F: FnMut(rkv::Writer, SingleStore) -> Result<()>,
|
||||
{
|
||||
if store_name == Lifetime::Application {
|
||||
panic!("Can't write with store for application-lifetime data");
|
||||
}
|
||||
|
||||
let store: SingleStore = self
|
||||
.rkv
|
||||
.open_single(store_name.as_str(), StoreOptions::create())?;
|
||||
|
@ -273,16 +278,7 @@ impl Database {
|
|||
) -> Result<()> {
|
||||
let final_key = Self::get_storage_key(storage_name, Some(key));
|
||||
|
||||
if lifetime == Lifetime::Application {
|
||||
let mut data = self
|
||||
.app_lifetime_data
|
||||
.write()
|
||||
.expect("Can't read app lifetime data");
|
||||
data.insert(final_key, metric.clone());
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
// Lifetime::Ping data is not persisted to disk if
|
||||
// Lifetime::Ping data is not immediately persisted to disk if
|
||||
// Glean has `delay_ping_lifetime_io` set to true
|
||||
if lifetime == Lifetime::Ping {
|
||||
if let Some(ping_lifetime_data) = &self.ping_lifetime_data {
|
||||
|
@ -346,24 +342,6 @@ impl Database {
|
|||
{
|
||||
let final_key = Self::get_storage_key(storage_name, Some(key));
|
||||
|
||||
if lifetime == Lifetime::Application {
|
||||
let mut data = self
|
||||
.app_lifetime_data
|
||||
.write()
|
||||
.expect("Can't access app lifetime data as writable");
|
||||
let entry = data.entry(final_key);
|
||||
match entry {
|
||||
Entry::Vacant(entry) => {
|
||||
entry.insert(transform(None));
|
||||
}
|
||||
Entry::Occupied(mut entry) => {
|
||||
let old_value = entry.get().clone();
|
||||
entry.insert(transform(Some(old_value)));
|
||||
}
|
||||
}
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
// Lifetime::Ping data is not persisted to disk if
|
||||
// Glean has `delay_ping_lifetime_io` set to true
|
||||
if lifetime == Lifetime::Ping {
|
||||
|
@ -423,14 +401,13 @@ impl Database {
|
|||
///
|
||||
/// * This function will **not** panic on database errors.
|
||||
pub fn clear_ping_lifetime_storage(&self, storage_name: &str) -> Result<()> {
|
||||
// Lifetime::Ping might have data saved to `ping_lifetime_data`
|
||||
// Lifetime::Ping data will be saved to `ping_lifetime_data`
|
||||
// in case `delay_ping_lifetime_io` is set to true
|
||||
if let Some(ping_lifetime_data) = &self.ping_lifetime_data {
|
||||
ping_lifetime_data
|
||||
.write()
|
||||
.expect("Can't access ping lifetime data as writable")
|
||||
.clear();
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
self.write_with_store(Lifetime::Ping, |mut writer, store| {
|
||||
|
@ -486,15 +463,6 @@ impl Database {
|
|||
) -> Result<()> {
|
||||
let final_key = Self::get_storage_key(storage_name, Some(metric_name));
|
||||
|
||||
if lifetime == Lifetime::Application {
|
||||
let mut data = self
|
||||
.app_lifetime_data
|
||||
.write()
|
||||
.expect("Can't access app lifetime data as writable");
|
||||
data.remove(&final_key);
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
// Lifetime::Ping data is not persisted to disk if
|
||||
// Glean has `delay_ping_lifetime_io` set to true
|
||||
if lifetime == Lifetime::Ping {
|
||||
|
@ -503,17 +471,41 @@ impl Database {
|
|||
.write()
|
||||
.expect("Can't access app lifetime data as writable");
|
||||
data.remove(&final_key);
|
||||
return Ok(());
|
||||
}
|
||||
}
|
||||
|
||||
self.write_with_store(lifetime, |mut writer, store| {
|
||||
store.delete(&mut writer, final_key.clone())?;
|
||||
if let Err(e) = store.delete(&mut writer, final_key.clone()) {
|
||||
if self.ping_lifetime_data.is_some() {
|
||||
// If ping_lifetime_data exists, it might be
|
||||
// that data is in memory, but not yet in rkv.
|
||||
return Ok(());
|
||||
}
|
||||
return Err(e.into());
|
||||
}
|
||||
writer.commit()?;
|
||||
Ok(())
|
||||
})
|
||||
}
|
||||
|
||||
/// Clears all the metrics in the database, for the provided lifetime.
|
||||
///
|
||||
/// Errors are logged.
|
||||
///
|
||||
/// ## Panics
|
||||
///
|
||||
/// * This function will **not** panic on database errors.
|
||||
pub fn clear_lifetime(&self, lifetime: Lifetime) {
|
||||
let res = self.write_with_store(lifetime, |mut writer, store| {
|
||||
store.clear(&mut writer)?;
|
||||
writer.commit()?;
|
||||
Ok(())
|
||||
});
|
||||
if let Err(e) = res {
|
||||
log::error!("Could not clear store for lifetime {:?}: {:?}", lifetime, e);
|
||||
}
|
||||
}
|
||||
|
||||
/// Clears all metrics in the database.
|
||||
///
|
||||
/// Errors are logged.
|
||||
|
@ -522,28 +514,45 @@ impl Database {
|
|||
///
|
||||
/// * This function will **not** panic on database errors.
|
||||
pub fn clear_all(&self) {
|
||||
for lifetime in [Lifetime::User, Lifetime::Ping].iter() {
|
||||
let res = self.write_with_store(*lifetime, |mut writer, store| {
|
||||
store.clear(&mut writer)?;
|
||||
writer.commit()?;
|
||||
Ok(())
|
||||
});
|
||||
if let Err(e) = res {
|
||||
log::error!("Could not clear store for lifetime {:?}: {:?}", lifetime, e);
|
||||
}
|
||||
}
|
||||
|
||||
self.app_lifetime_data
|
||||
.write()
|
||||
.expect("Can't access app lifetime data as writable")
|
||||
.clear();
|
||||
|
||||
if let Some(ping_lifetime_data) = &self.ping_lifetime_data {
|
||||
ping_lifetime_data
|
||||
.write()
|
||||
.expect("Can't access ping lifetime data as writable")
|
||||
.clear();
|
||||
}
|
||||
|
||||
for lifetime in [Lifetime::User, Lifetime::Ping, Lifetime::Application].iter() {
|
||||
self.clear_lifetime(*lifetime);
|
||||
}
|
||||
}
|
||||
|
||||
/// Persist ping_lifetime_data to disk.
|
||||
///
|
||||
/// Does nothing in case there is nothing to persist.
|
||||
///
|
||||
/// ## Panics
|
||||
///
|
||||
/// * This function will **not** panic on database errors.
|
||||
pub fn persist_ping_lifetime_data(&self) -> Result<()> {
|
||||
if let Some(ping_lifetime_data) = &self.ping_lifetime_data {
|
||||
let data = ping_lifetime_data
|
||||
.read()
|
||||
.expect("Can't read ping lifetime data");
|
||||
|
||||
self.write_with_store(Lifetime::Ping, |mut writer, store| {
|
||||
for (key, value) in data.iter() {
|
||||
let encoded =
|
||||
bincode::serialize(&value).expect("IMPOSSIBLE: Serializing metric failed");
|
||||
// There is no need for `get_storage_key` here because
|
||||
// the key is already formatted from when it was saved
|
||||
// to ping_lifetime_data.
|
||||
store.put(&mut writer, &key, &rkv::Value::Blob(&encoded))?;
|
||||
}
|
||||
writer.commit()?;
|
||||
Ok(())
|
||||
})?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -773,7 +782,7 @@ mod test {
|
|||
*lifetime,
|
||||
test_storage,
|
||||
&format!("{}_{}", metric_id_pattern, value),
|
||||
&Metric::String(value.to_string()),
|
||||
&Metric::String((*value).to_string()),
|
||||
)
|
||||
.unwrap();
|
||||
}
|
||||
|
@ -812,51 +821,188 @@ mod test {
|
|||
}
|
||||
|
||||
#[test]
|
||||
fn test_deferred_ping_lifetime_collection() {
|
||||
fn test_delayed_ping_lifetime_persistence() {
|
||||
// Init the database in a temporary directory.
|
||||
let dir = tempdir().unwrap();
|
||||
let str_dir = dir.path().display().to_string();
|
||||
let db = Database::new(&str_dir, true).unwrap();
|
||||
let test_storage = "test-storage";
|
||||
|
||||
assert!(db.ping_lifetime_data.is_some());
|
||||
|
||||
// Attempt to record a known value.
|
||||
let test_value = "test-value";
|
||||
let test_storage = "test-storage1";
|
||||
let test_metric_id = "telemetry_test.test_name";
|
||||
let test_value1 = "test-value1";
|
||||
let test_metric_id1 = "telemetry_test.test_name1";
|
||||
db.record_per_lifetime(
|
||||
Lifetime::Ping,
|
||||
test_storage,
|
||||
test_metric_id,
|
||||
&Metric::String(test_value.to_string()),
|
||||
test_metric_id1,
|
||||
&Metric::String(test_value1.to_string()),
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
// Verify that the data is correctly recorded.
|
||||
let mut found_metrics = 0;
|
||||
let mut snapshotter = |metric_name: &[u8], metric: &Metric| {
|
||||
found_metrics += 1;
|
||||
let metric_id = String::from_utf8_lossy(metric_name).into_owned();
|
||||
assert_eq!(test_metric_id, metric_id);
|
||||
match metric {
|
||||
Metric::String(s) => assert_eq!(test_value, s),
|
||||
_ => panic!("Unexpected data found"),
|
||||
}
|
||||
};
|
||||
// Attempt to persist data.
|
||||
db.persist_ping_lifetime_data().unwrap();
|
||||
|
||||
db.iter_store_from(Lifetime::Ping, test_storage, None, &mut snapshotter);
|
||||
assert_eq!(1, found_metrics, "We only expect 1 Lifetime.Ping metric.");
|
||||
// Attempt to record another known value.
|
||||
let test_value2 = "test-value2";
|
||||
let test_metric_id2 = "telemetry_test.test_name2";
|
||||
db.record_per_lifetime(
|
||||
Lifetime::Ping,
|
||||
test_storage,
|
||||
test_metric_id2,
|
||||
&Metric::String(test_value2.to_string()),
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
// Make sure data was **not** persisted with rkv.
|
||||
let store: SingleStore = unwrap_or!(
|
||||
db.rkv
|
||||
.open_single(Lifetime::Ping.as_str(), StoreOptions::create()),
|
||||
panic!()
|
||||
);
|
||||
let reader = unwrap_or!(db.rkv.read(), panic!());
|
||||
assert!(store
|
||||
.get(&reader, &test_metric_id)
|
||||
.unwrap_or(None)
|
||||
.is_none());
|
||||
{
|
||||
// At this stage we expect `test_value1` to be persisted and in memory,
|
||||
// since it was recorded before calling `persist_ping_lifetime_data`,
|
||||
// and `test_value2` to be only in memory, since it was recorded after.
|
||||
let store: SingleStore = db
|
||||
.rkv
|
||||
.open_single(Lifetime::Ping.as_str(), StoreOptions::create())
|
||||
.unwrap();
|
||||
let reader = db.rkv.read().unwrap();
|
||||
|
||||
// Verify that test_value1 is in rkv.
|
||||
assert!(store
|
||||
.get(&reader, format!("{}#{}", test_storage, test_metric_id1))
|
||||
.unwrap_or(None)
|
||||
.is_some());
|
||||
// Verifiy that test_value2 is **not** in rkv.
|
||||
assert!(store
|
||||
.get(&reader, format!("{}#{}", test_storage, test_metric_id2))
|
||||
.unwrap_or(None)
|
||||
.is_none());
|
||||
|
||||
let data = match &db.ping_lifetime_data {
|
||||
Some(ping_lifetime_data) => ping_lifetime_data,
|
||||
None => panic!("Expected `ping_lifetime_data` to exist here!"),
|
||||
};
|
||||
let data = data.read().unwrap();
|
||||
// Verify that test_value1 is also in memory.
|
||||
assert!(data
|
||||
.get(&format!("{}#{}", test_storage, test_metric_id1))
|
||||
.is_some());
|
||||
// Verify that test_value2 is in memory.
|
||||
assert!(data
|
||||
.get(&format!("{}#{}", test_storage, test_metric_id2))
|
||||
.is_some());
|
||||
}
|
||||
|
||||
// Attempt to persist data again.
|
||||
db.persist_ping_lifetime_data().unwrap();
|
||||
|
||||
{
|
||||
// At this stage we expect `test_value1` and `test_value2` to
|
||||
// be persisted, since both were created before a call to
|
||||
// `persist_ping_lifetime_data`.
|
||||
let store: SingleStore = db
|
||||
.rkv
|
||||
.open_single(Lifetime::Ping.as_str(), StoreOptions::create())
|
||||
.unwrap();
|
||||
let reader = db.rkv.read().unwrap();
|
||||
|
||||
// Verify that test_value1 is in rkv.
|
||||
assert!(store
|
||||
.get(&reader, format!("{}#{}", test_storage, test_metric_id1))
|
||||
.unwrap_or(None)
|
||||
.is_some());
|
||||
// Verifiy that test_value2 is also in rkv.
|
||||
assert!(store
|
||||
.get(&reader, format!("{}#{}", test_storage, test_metric_id2))
|
||||
.unwrap_or(None)
|
||||
.is_some());
|
||||
|
||||
let data = match &db.ping_lifetime_data {
|
||||
Some(ping_lifetime_data) => ping_lifetime_data,
|
||||
None => panic!("Expected `ping_lifetime_data` to exist here!"),
|
||||
};
|
||||
let data = data.read().unwrap();
|
||||
// Verify that test_value1 is also in memory.
|
||||
assert!(data
|
||||
.get(&format!("{}#{}", test_storage, test_metric_id1))
|
||||
.is_some());
|
||||
// Verify that test_value2 is also in memory.
|
||||
assert!(data
|
||||
.get(&format!("{}#{}", test_storage, test_metric_id2))
|
||||
.is_some());
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_load_ping_lifetime_data_from_memory() {
|
||||
// Init the database in a temporary directory.
|
||||
let dir = tempdir().unwrap();
|
||||
let str_dir = dir.path().display().to_string();
|
||||
|
||||
let test_storage = "test-storage";
|
||||
let test_value = "test-value";
|
||||
let test_metric_id = "telemetry_test.test_name";
|
||||
|
||||
{
|
||||
let db = Database::new(&str_dir, true).unwrap();
|
||||
|
||||
// Attempt to record a known value.
|
||||
db.record_per_lifetime(
|
||||
Lifetime::Ping,
|
||||
test_storage,
|
||||
test_metric_id,
|
||||
&Metric::String(test_value.to_string()),
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
// Verify that test_value is in memory.
|
||||
let data = match &db.ping_lifetime_data {
|
||||
Some(ping_lifetime_data) => ping_lifetime_data,
|
||||
None => panic!("Expected `ping_lifetime_data` to exist here!"),
|
||||
};
|
||||
let data = data.read().unwrap();
|
||||
assert!(data
|
||||
.get(&format!("{}#{}", test_storage, test_metric_id))
|
||||
.is_some());
|
||||
|
||||
// Attempt to persist data.
|
||||
db.persist_ping_lifetime_data().unwrap();
|
||||
|
||||
// Verify that test_value is now in rkv.
|
||||
let store: SingleStore = db
|
||||
.rkv
|
||||
.open_single(Lifetime::Ping.as_str(), StoreOptions::create())
|
||||
.unwrap();
|
||||
let reader = db.rkv.read().unwrap();
|
||||
assert!(store
|
||||
.get(&reader, format!("{}#{}", test_storage, test_metric_id))
|
||||
.unwrap_or(None)
|
||||
.is_some());
|
||||
}
|
||||
|
||||
// Now create a new instace of the db and check if data was
|
||||
// correctly loaded from rkv to memory.
|
||||
{
|
||||
let db = Database::new(&str_dir, true).unwrap();
|
||||
|
||||
// Verify that test_value is in memory.
|
||||
let data = match &db.ping_lifetime_data {
|
||||
Some(ping_lifetime_data) => ping_lifetime_data,
|
||||
None => panic!("Expected `ping_lifetime_data` to exist here!"),
|
||||
};
|
||||
let data = data.read().unwrap();
|
||||
assert!(data
|
||||
.get(&format!("{}#{}", test_storage, test_metric_id))
|
||||
.is_some());
|
||||
|
||||
// Verify that test_value is also in rkv.
|
||||
let store: SingleStore = db
|
||||
.rkv
|
||||
.open_single(Lifetime::Ping.as_str(), StoreOptions::create())
|
||||
.unwrap();
|
||||
let reader = db.rkv.read().unwrap();
|
||||
assert!(store
|
||||
.get(&reader, format!("{}#{}", test_storage, test_metric_id))
|
||||
.unwrap_or(None)
|
||||
.is_some());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -3,8 +3,6 @@ use std::fmt::{self, Display};
|
|||
use std::io;
|
||||
use std::result;
|
||||
|
||||
use failure::{self, Backtrace, Context, Fail};
|
||||
|
||||
use ffi_support::{handle_map::HandleError, ExternError};
|
||||
|
||||
use rkv::error::StoreError;
|
||||
|
@ -20,53 +18,44 @@ pub type Result<T> = result::Result<T, Error>;
|
|||
|
||||
/// A list enumerating the categories of errors in this crate.
|
||||
///
|
||||
/// [`Error`]: https://doc.rust-lang.org/stable/std/error/trait.Error.html
|
||||
///
|
||||
/// This list is intended to grow over time and it is not recommended to
|
||||
/// exhaustively match against it.
|
||||
///
|
||||
/// It is used with the [`Error`] struct.
|
||||
///
|
||||
/// [`Error`]: std.struct.Error.html
|
||||
#[derive(Debug, Fail)]
|
||||
#[derive(Debug)]
|
||||
pub enum ErrorKind {
|
||||
/// Lifetime conversion failed
|
||||
#[fail(display = "Lifetime conversion from {} failed", _0)]
|
||||
Lifetime(i32),
|
||||
|
||||
/// FFI-Support error
|
||||
#[fail(display = "Invalid handle: {}", _0)]
|
||||
Handle(HandleError),
|
||||
|
||||
/// IO error
|
||||
#[fail(display = "An I/O error occurred: {}", _0)]
|
||||
IoError(io::Error),
|
||||
|
||||
/// IO error
|
||||
#[fail(display = "An Rkv error occurred: {}", _0)]
|
||||
Rkv(StoreError),
|
||||
|
||||
/// JSON error
|
||||
#[fail(display = "A JSON error occurred: {}", _0)]
|
||||
Json(serde_json::error::Error),
|
||||
|
||||
/// TimeUnit conversion failed
|
||||
#[fail(display = "TimeUnit conversion from {} failed", _0)]
|
||||
TimeUnit(i32),
|
||||
|
||||
/// MemoryUnit conversion failed
|
||||
#[fail(display = "MemoryUnit conversion from {} failed", _0)]
|
||||
MemoryUnit(i32),
|
||||
|
||||
/// HistogramType conversion failed
|
||||
#[fail(display = "HistogramType conversion from {} failed", _0)]
|
||||
HistogramType(i32),
|
||||
|
||||
/// OsString conversion failed
|
||||
#[fail(display = "OsString conversion from {:?} failed", _0)]
|
||||
OsString(OsString),
|
||||
|
||||
/// Unknown error
|
||||
#[fail(display = "Invalid UTF-8 byte sequence in string.")]
|
||||
Utf8Error,
|
||||
|
||||
#[doc(hidden)]
|
||||
__NonExhaustive,
|
||||
}
|
||||
|
||||
/// A specialized [`Error`] type for this crate's operations.
|
||||
|
@ -74,60 +63,51 @@ pub enum ErrorKind {
|
|||
/// [`Error`]: https://doc.rust-lang.org/stable/std/error/trait.Error.html
|
||||
#[derive(Debug)]
|
||||
pub struct Error {
|
||||
inner: Context<ErrorKind>,
|
||||
kind: ErrorKind,
|
||||
}
|
||||
|
||||
impl Error {
|
||||
/// Access the [`ErrorKind`] member.
|
||||
///
|
||||
/// [`ErrorKind`]: enum.ErrorKind.html
|
||||
pub fn kind(&self) -> &ErrorKind {
|
||||
&*self.inner.get_context()
|
||||
}
|
||||
|
||||
/// Return a new UTF-8 error
|
||||
///
|
||||
/// This is exposed in order to expose conversion errors on the FFI layer.
|
||||
pub fn utf8_error() -> Error {
|
||||
Error {
|
||||
inner: Context::new(ErrorKind::Utf8Error),
|
||||
kind: ErrorKind::Utf8Error,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Fail for Error {
|
||||
fn cause(&self) -> Option<&dyn Fail> {
|
||||
self.inner.cause()
|
||||
}
|
||||
|
||||
fn backtrace(&self) -> Option<&Backtrace> {
|
||||
self.inner.backtrace()
|
||||
}
|
||||
}
|
||||
impl std::error::Error for Error {}
|
||||
|
||||
impl Display for Error {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
Display::fmt(&self.inner, f)
|
||||
use ErrorKind::*;
|
||||
match &self.kind {
|
||||
Lifetime(l) => write!(f, "Lifetime conversion from {} failed", l),
|
||||
Handle(e) => write!(f, "Invalid handle: {}", e),
|
||||
IoError(e) => write!(f, "An I/O error occurred: {}", e),
|
||||
Rkv(e) => write!(f, "An Rkv error occurred: {}", e),
|
||||
Json(e) => write!(f, "A JSON error occurred: {}", e),
|
||||
TimeUnit(t) => write!(f, "TimeUnit conversion from {} failed", t),
|
||||
MemoryUnit(m) => write!(f, "MemoryUnit conversion from {} failed", m),
|
||||
HistogramType(h) => write!(f, "HistogramType conversion from {} failed", h),
|
||||
OsString(s) => write!(f, "OsString conversion from {:?} failed", s),
|
||||
Utf8Error => write!(f, "Invalid UTF-8 byte sequence in string."),
|
||||
__NonExhaustive => write!(f, "Unknown error"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<ErrorKind> for Error {
|
||||
fn from(kind: ErrorKind) -> Error {
|
||||
let inner = Context::new(kind);
|
||||
Error { inner }
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Context<ErrorKind>> for Error {
|
||||
fn from(inner: Context<ErrorKind>) -> Error {
|
||||
Error { inner }
|
||||
Error { kind }
|
||||
}
|
||||
}
|
||||
|
||||
impl From<HandleError> for Error {
|
||||
fn from(error: HandleError) -> Error {
|
||||
Error {
|
||||
inner: Context::new(ErrorKind::Handle(error)),
|
||||
kind: ErrorKind::Handle(error),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -135,7 +115,7 @@ impl From<HandleError> for Error {
|
|||
impl From<io::Error> for Error {
|
||||
fn from(error: io::Error) -> Error {
|
||||
Error {
|
||||
inner: Context::new(ErrorKind::IoError(error)),
|
||||
kind: ErrorKind::IoError(error),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -143,7 +123,7 @@ impl From<io::Error> for Error {
|
|||
impl From<StoreError> for Error {
|
||||
fn from(error: StoreError) -> Error {
|
||||
Error {
|
||||
inner: Context::new(ErrorKind::Rkv(error)),
|
||||
kind: ErrorKind::Rkv(error),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -157,7 +137,7 @@ impl From<Error> for ExternError {
|
|||
impl From<serde_json::error::Error> for Error {
|
||||
fn from(error: serde_json::error::Error) -> Error {
|
||||
Error {
|
||||
inner: Context::new(ErrorKind::Json(error)),
|
||||
kind: ErrorKind::Json(error),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -165,7 +145,7 @@ impl From<serde_json::error::Error> for Error {
|
|||
impl From<OsString> for Error {
|
||||
fn from(error: OsString) -> Error {
|
||||
Error {
|
||||
inner: Context::new(ErrorKind::OsString(error)),
|
||||
kind: ErrorKind::OsString(error),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -23,6 +23,9 @@ use crate::Glean;
|
|||
use crate::Lifetime;
|
||||
|
||||
/// The possible error types for metric recording.
|
||||
/// Note: the cases in this enum must be kept in sync with the ones
|
||||
/// in the platform-specific code (e.g. ErrorType.kt) and with the
|
||||
/// metrics in the registry files.
|
||||
#[derive(Debug)]
|
||||
pub enum ErrorType {
|
||||
/// For when the value to be recorded does not match the metric-specific restrictions
|
||||
|
@ -31,6 +34,8 @@ pub enum ErrorType {
|
|||
InvalidLabel,
|
||||
/// For when the metric caught an invalid state while recording
|
||||
InvalidState,
|
||||
/// For when the value to be recorded overflows the metric-specific upper range
|
||||
InvalidOverflow,
|
||||
}
|
||||
|
||||
impl ErrorType {
|
||||
|
@ -40,6 +45,7 @@ impl ErrorType {
|
|||
ErrorType::InvalidValue => "invalid_value",
|
||||
ErrorType::InvalidLabel => "invalid_label",
|
||||
ErrorType::InvalidState => "invalid_state",
|
||||
ErrorType::InvalidOverflow => "invalid_overflow",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -52,6 +58,7 @@ impl TryFrom<i32> for ErrorType {
|
|||
0 => Ok(ErrorType::InvalidValue),
|
||||
1 => Ok(ErrorType::InvalidLabel),
|
||||
2 => Ok(ErrorType::InvalidState),
|
||||
4 => Ok(ErrorType::InvalidOverflow),
|
||||
e => Err(ErrorKind::Lifetime(e).into()),
|
||||
}
|
||||
}
|
||||
|
|
|
@ -21,7 +21,7 @@ use crate::Glean;
|
|||
use crate::Result;
|
||||
|
||||
/// Represents the data for a single event.
|
||||
#[derive(Debug, Clone, Deserialize, Serialize)]
|
||||
#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, Eq)]
|
||||
pub struct RecordedEventData {
|
||||
pub timestamp: u64,
|
||||
pub category: String,
|
||||
|
@ -140,7 +140,7 @@ impl EventDatabase {
|
|||
|
||||
let mut ping_sent = false;
|
||||
for store_name in store_names {
|
||||
if let Err(err) = glean.send_ping_by_name(&store_name) {
|
||||
if let Err(err) = glean.submit_ping_by_name(&store_name) {
|
||||
log::error!(
|
||||
"Error flushing existing events to the '{}' ping: {}",
|
||||
store_name,
|
||||
|
@ -183,7 +183,7 @@ impl EventDatabase {
|
|||
let event_json = serde_json::to_string(&event).unwrap(); // safe unwrap, event can always be serialized
|
||||
|
||||
// Store the event in memory and on disk to each of the stores.
|
||||
let mut stores_to_send: Vec<&str> = Vec::new();
|
||||
let mut stores_to_submit: Vec<&str> = Vec::new();
|
||||
{
|
||||
let mut db = self.event_stores.write().unwrap(); // safe unwrap, only error case is poisoning
|
||||
for store_name in meta.send_in_pings.iter() {
|
||||
|
@ -191,15 +191,15 @@ impl EventDatabase {
|
|||
store.push(event.clone());
|
||||
self.write_event_to_disk(store_name, &event_json);
|
||||
if store.len() == glean.get_max_events() {
|
||||
stores_to_send.push(&store_name);
|
||||
stores_to_submit.push(&store_name);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// If any of the event stores reached maximum size, send the pings
|
||||
// If any of the event stores reached maximum size, submit the pings
|
||||
// containing those events immediately.
|
||||
for store_name in stores_to_send {
|
||||
if let Err(err) = glean.send_ping_by_name(store_name) {
|
||||
for store_name in stores_to_submit {
|
||||
if let Err(err) = glean.submit_ping_by_name(store_name) {
|
||||
log::error!(
|
||||
"Got more than {} events, but could not send {} ping: {}",
|
||||
glean.get_max_events(),
|
||||
|
@ -364,4 +364,76 @@ mod test {
|
|||
assert_eq!(1, events.len());
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn stable_serialization() {
|
||||
let event_empty = RecordedEventData {
|
||||
timestamp: 2,
|
||||
category: "cat".to_string(),
|
||||
name: "name".to_string(),
|
||||
extra: None,
|
||||
};
|
||||
|
||||
let mut data = HashMap::new();
|
||||
data.insert("a key".to_string(), "a value".to_string());
|
||||
let event_data = RecordedEventData {
|
||||
timestamp: 2,
|
||||
category: "cat".to_string(),
|
||||
name: "name".to_string(),
|
||||
extra: Some(data),
|
||||
};
|
||||
|
||||
let event_empty_json = ::serde_json::to_string_pretty(&event_empty).unwrap();
|
||||
let event_data_json = ::serde_json::to_string_pretty(&event_data).unwrap();
|
||||
|
||||
assert_eq!(
|
||||
event_empty,
|
||||
serde_json::from_str(&event_empty_json).unwrap()
|
||||
);
|
||||
assert_eq!(event_data, serde_json::from_str(&event_data_json).unwrap());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn deserialize_existing_data() {
|
||||
let event_empty_json = r#"
|
||||
{
|
||||
"timestamp": 2,
|
||||
"category": "cat",
|
||||
"name": "name"
|
||||
}
|
||||
"#;
|
||||
|
||||
let event_data_json = r#"
|
||||
{
|
||||
"timestamp": 2,
|
||||
"category": "cat",
|
||||
"name": "name",
|
||||
"extra": {
|
||||
"a key": "a value"
|
||||
}
|
||||
}
|
||||
"#;
|
||||
|
||||
let event_empty = RecordedEventData {
|
||||
timestamp: 2,
|
||||
category: "cat".to_string(),
|
||||
name: "name".to_string(),
|
||||
extra: None,
|
||||
};
|
||||
|
||||
let mut data = HashMap::new();
|
||||
data.insert("a key".to_string(), "a value".to_string());
|
||||
let event_data = RecordedEventData {
|
||||
timestamp: 2,
|
||||
category: "cat".to_string(),
|
||||
name: "name".to_string(),
|
||||
extra: Some(data),
|
||||
};
|
||||
|
||||
assert_eq!(
|
||||
event_empty,
|
||||
serde_json::from_str(&event_empty_json).unwrap()
|
||||
);
|
||||
assert_eq!(event_data, serde_json::from_str(&event_data_json).unwrap());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -24,7 +24,7 @@ impl InternalPings {
|
|||
baseline: PingType::new("baseline", true, false),
|
||||
metrics: PingType::new("metrics", true, false),
|
||||
events: PingType::new("events", true, false),
|
||||
deletion_request: PingType::new("deletion_request", true, true),
|
||||
deletion_request: PingType::new("deletion-request", true, true),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -19,8 +19,10 @@ use chrono::{DateTime, FixedOffset};
|
|||
use lazy_static::lazy_static;
|
||||
use uuid::Uuid;
|
||||
|
||||
// This needs to be included first, and the space below prevents rustfmt from
|
||||
// alphabetizing it.
|
||||
mod macros;
|
||||
pub mod ac_migration;
|
||||
|
||||
mod common_metric_data;
|
||||
mod database;
|
||||
mod error;
|
||||
|
@ -34,7 +36,6 @@ pub mod ping;
|
|||
pub mod storage;
|
||||
mod util;
|
||||
|
||||
use crate::ac_migration::migrate_sequence_numbers;
|
||||
pub use crate::common_metric_data::{CommonMetricData, Lifetime};
|
||||
use crate::database::Database;
|
||||
pub use crate::error::{Error, Result};
|
||||
|
@ -100,7 +101,7 @@ pub struct Configuration {
|
|||
///
|
||||
/// call_counter.add(&glean, 1);
|
||||
///
|
||||
/// glean.send_ping(&ping).unwrap();
|
||||
/// glean.submit_ping(&ping).unwrap();
|
||||
/// ```
|
||||
///
|
||||
/// ## Note
|
||||
|
@ -119,6 +120,7 @@ pub struct Glean {
|
|||
ping_registry: HashMap<String, PingType>,
|
||||
start_time: DateTime<FixedOffset>,
|
||||
max_events: usize,
|
||||
is_first_run: bool,
|
||||
}
|
||||
|
||||
impl Glean {
|
||||
|
@ -147,44 +149,12 @@ impl Glean {
|
|||
ping_registry: HashMap::new(),
|
||||
start_time: local_now_with_offset(),
|
||||
max_events: cfg.max_events.unwrap_or(DEFAULT_MAX_EVENTS),
|
||||
is_first_run: false,
|
||||
};
|
||||
glean.on_change_upload_enabled(cfg.upload_enabled);
|
||||
Ok(glean)
|
||||
}
|
||||
|
||||
/// Create and initialize a new Glean object.
|
||||
///
|
||||
/// This will attempt to delete any previously existing database and
|
||||
/// then create the necessary directories and files in `data_path`.
|
||||
/// This will also initialize the core metrics.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `cfg` - an instance of the Glean `Configuration`.
|
||||
/// * `new_sequence_nums` - a map of ("<pingName>_seq", sequence number)
|
||||
/// used to initialize Glean with sequence numbers imported from glean-ac.
|
||||
pub fn with_sequence_numbers(
|
||||
cfg: Configuration,
|
||||
new_sequence_nums: HashMap<String, i32>,
|
||||
) -> Result<Self> {
|
||||
log::info!("Creating new Glean (migrating data)");
|
||||
|
||||
// Delete the database directory, if it exists. Bail out if there's
|
||||
// errors, as I'm not sure what else could be done if we can't even
|
||||
// delete a directory we own.
|
||||
let db_path = Path::new(&cfg.data_path).join("db");
|
||||
if db_path.exists() {
|
||||
std::fs::remove_dir_all(db_path)?;
|
||||
}
|
||||
|
||||
let glean = Self::new(cfg)?;
|
||||
|
||||
// Set sequence numbers coming through the FFI.
|
||||
migrate_sequence_numbers(&glean, new_sequence_nums);
|
||||
|
||||
Ok(glean)
|
||||
}
|
||||
|
||||
/// For tests make it easy to create a Glean object using only the required configuration.
|
||||
#[cfg(test)]
|
||||
pub(crate) fn with_options(
|
||||
|
@ -224,6 +194,10 @@ impl Glean {
|
|||
.is_none()
|
||||
{
|
||||
self.core_metrics.first_run_date.set(self, None);
|
||||
// The `first_run_date` field is generated on the very first run
|
||||
// and persisted across upload toggling. We can assume that, the only
|
||||
// time it is set, that's indeed our "first run".
|
||||
self.is_first_run = true;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -235,7 +209,7 @@ impl Glean {
|
|||
/// # Return value
|
||||
///
|
||||
/// `true` if at least one ping was generated, `false` otherwise.
|
||||
pub fn on_ready_to_send_pings(&self) -> bool {
|
||||
pub fn on_ready_to_submit_pings(&self) -> bool {
|
||||
self.event_data_store.flush_pending_events_on_startup(&self)
|
||||
}
|
||||
|
||||
|
@ -261,9 +235,9 @@ impl Glean {
|
|||
pub fn set_upload_enabled(&mut self, flag: bool) -> bool {
|
||||
log::info!("Upload enabled: {:?}", flag);
|
||||
|
||||
// When upload is disabled, send a deletion-request ping
|
||||
// When upload is disabled, submit a deletion-request ping
|
||||
if !flag {
|
||||
if let Err(err) = self.internal_pings.deletion_request.send(self) {
|
||||
if let Err(err) = self.internal_pings.deletion_request.submit(self) {
|
||||
log::error!("Failed to send deletion-request ping on optout: {}", err);
|
||||
}
|
||||
}
|
||||
|
@ -408,7 +382,7 @@ impl Glean {
|
|||
)
|
||||
}
|
||||
|
||||
/// Send a ping.
|
||||
/// Collect and submit a ping for eventual uploading.
|
||||
///
|
||||
/// The ping content is assembled as soon as possible, but upload is not
|
||||
/// guaranteed to happen immediately, as that depends on the upload
|
||||
|
@ -418,7 +392,12 @@ impl Glean {
|
|||
///
|
||||
/// Returns true if a ping was assembled and queued, false otherwise.
|
||||
/// Returns an error if collecting or writing the ping to disk failed.
|
||||
pub fn send_ping(&self, ping: &PingType) -> Result<bool> {
|
||||
pub fn submit_ping(&self, ping: &PingType) -> Result<bool> {
|
||||
if !self.is_upload_enabled() {
|
||||
log::error!("Glean must be enabled before sending pings.");
|
||||
return Ok(false);
|
||||
}
|
||||
|
||||
let ping_maker = PingMaker::new();
|
||||
let doc_id = Uuid::new_v4().to_string();
|
||||
let url_path = self.make_path(&ping.name, &doc_id);
|
||||
|
@ -451,26 +430,26 @@ impl Glean {
|
|||
}
|
||||
}
|
||||
|
||||
/// Send a list of pings by name.
|
||||
/// Collect and submit a ping for eventual uploading by name.
|
||||
///
|
||||
/// See `send_ping` for detailed information.
|
||||
/// See `submit_ping` for detailed information.
|
||||
///
|
||||
/// Returns true if at least one ping was assembled and queued, false otherwise.
|
||||
pub fn send_pings_by_name(&self, ping_names: &[String]) -> bool {
|
||||
pub fn submit_pings_by_name(&self, ping_names: &[String]) -> bool {
|
||||
// TODO: 1553813: glean-ac collects and stores pings in parallel and then joins them all before queueing the worker.
|
||||
// This here is writing them out sequentially.
|
||||
|
||||
let mut result = false;
|
||||
|
||||
for ping_name in ping_names {
|
||||
if let Ok(true) = self.send_ping_by_name(ping_name) {
|
||||
if let Ok(true) = self.submit_ping_by_name(ping_name) {
|
||||
result = true;
|
||||
}
|
||||
}
|
||||
result
|
||||
}
|
||||
|
||||
/// Send a ping by name.
|
||||
/// Collect and submit a ping by name for eventual uploading.
|
||||
///
|
||||
/// The ping content is assembled as soon as possible, but upload is not
|
||||
/// guaranteed to happen immediately, as that depends on the upload
|
||||
|
@ -480,13 +459,13 @@ impl Glean {
|
|||
///
|
||||
/// Returns true if a ping was assembled and queued, false otherwise.
|
||||
/// Returns an error if collecting or writing the ping to disk failed.
|
||||
pub fn send_ping_by_name(&self, ping_name: &str) -> Result<bool> {
|
||||
pub fn submit_ping_by_name(&self, ping_name: &str) -> Result<bool> {
|
||||
match self.get_ping_by_name(ping_name) {
|
||||
None => {
|
||||
log::error!("Attempted to send unknown ping '{}'", ping_name);
|
||||
log::error!("Attempted to submit unknown ping '{}'", ping_name);
|
||||
Ok(false)
|
||||
}
|
||||
Some(ping) => self.send_ping(ping),
|
||||
Some(ping) => self.submit_ping(ping),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -543,6 +522,28 @@ impl Glean {
|
|||
metric.set_inactive(&self);
|
||||
}
|
||||
|
||||
/// Persist Lifetime::Ping data that might be in memory
|
||||
/// in case `delay_ping_lifetime_io` is set or was set
|
||||
/// at a previous time.
|
||||
///
|
||||
/// If there is no data to persist, this function does nothing.
|
||||
pub fn persist_ping_lifetime_data(&self) -> Result<()> {
|
||||
self.data_store.persist_ping_lifetime_data()
|
||||
}
|
||||
|
||||
/// ** This is not meant to be used directly.**
|
||||
///
|
||||
/// Clear all the metrics that have `Lifetime::Application`.
|
||||
pub fn clear_application_lifetime_metrics(&self) {
|
||||
log::debug!("Clearing Lifetime::Application metrics");
|
||||
self.data_store.clear_lifetime(Lifetime::Application);
|
||||
}
|
||||
|
||||
/// Return whether or not this is the first run on this profile.
|
||||
pub fn is_first_run(&self) -> bool {
|
||||
self.is_first_run
|
||||
}
|
||||
|
||||
/// **Test-only API (exported for FFI purposes).**
|
||||
///
|
||||
/// Check if an experiment is currently active.
|
||||
|
|
|
@ -43,9 +43,9 @@ fn experiment_id_and_branch_get_truncated_if_too_long() {
|
|||
glean.set_experiment_active(very_long_id.clone(), very_long_branch_id.clone(), None);
|
||||
|
||||
// Generate the expected id and branch strings.
|
||||
let mut expected_id = very_long_id.clone();
|
||||
let mut expected_id = very_long_id;
|
||||
expected_id.truncate(100);
|
||||
let mut expected_branch_id = very_long_branch_id.clone();
|
||||
let mut expected_branch_id = very_long_branch_id;
|
||||
expected_branch_id.truncate(100);
|
||||
|
||||
assert!(
|
||||
|
@ -54,7 +54,7 @@ fn experiment_id_and_branch_get_truncated_if_too_long() {
|
|||
);
|
||||
|
||||
// Make sure the branch id was truncated as well.
|
||||
let experiment_data = glean.test_get_experiment_data_as_json(expected_id.clone());
|
||||
let experiment_data = glean.test_get_experiment_data_as_json(expected_id);
|
||||
assert!(
|
||||
!experiment_data.is_none(),
|
||||
"Experiment data must be available"
|
||||
|
@ -84,7 +84,7 @@ fn limits_on_experiments_extras_are_applied_correctly() {
|
|||
}
|
||||
|
||||
// Mark the experiment as active.
|
||||
glean.set_experiment_active(experiment_id.clone(), branch_id.clone(), Some(extras));
|
||||
glean.set_experiment_active(experiment_id.clone(), branch_id, Some(extras));
|
||||
|
||||
// Make sure it is active
|
||||
assert!(
|
||||
|
@ -93,7 +93,7 @@ fn limits_on_experiments_extras_are_applied_correctly() {
|
|||
);
|
||||
|
||||
// Get the data
|
||||
let experiment_data = glean.test_get_experiment_data_as_json(experiment_id.clone());
|
||||
let experiment_data = glean.test_get_experiment_data_as_json(experiment_id);
|
||||
assert!(
|
||||
!experiment_data.is_none(),
|
||||
"Experiment data must be available"
|
||||
|
@ -135,11 +135,7 @@ fn experiments_status_is_correctly_toggled() {
|
|||
.collect();
|
||||
|
||||
// Activate an experiment.
|
||||
glean.set_experiment_active(
|
||||
experiment_id.clone(),
|
||||
branch_id.clone(),
|
||||
Some(extra.clone()),
|
||||
);
|
||||
glean.set_experiment_active(experiment_id.clone(), branch_id, Some(extra.clone()));
|
||||
|
||||
// Check that the experiment is marekd as active.
|
||||
assert!(
|
||||
|
@ -156,12 +152,12 @@ fn experiments_status_is_correctly_toggled() {
|
|||
|
||||
let parsed_data: RecordedExperimentData =
|
||||
::serde_json::from_str(&experiment_data.unwrap()).unwrap();
|
||||
assert_eq!(parsed_data.extra.unwrap(), extra.clone());
|
||||
assert_eq!(parsed_data.extra.unwrap(), extra);
|
||||
|
||||
// Disable the experiment and check that is no longer available.
|
||||
glean.set_experiment_inactive(experiment_id.clone());
|
||||
assert!(
|
||||
!glean.test_is_experiment_active(experiment_id.clone()),
|
||||
!glean.test_is_experiment_active(experiment_id),
|
||||
"The experiment must not be available any more."
|
||||
);
|
||||
}
|
||||
|
@ -334,27 +330,6 @@ fn disabling_when_already_disabled_is_a_noop() {
|
|||
assert!(!glean.set_upload_enabled(false));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn glean_inits_with_migration_when_no_db_dir_exists() {
|
||||
let dir = tempfile::tempdir().unwrap();
|
||||
let tmpname = dir.path().display().to_string();
|
||||
|
||||
let cfg = Configuration {
|
||||
data_path: tmpname,
|
||||
application_id: GLOBAL_APPLICATION_ID.to_string(),
|
||||
upload_enabled: false,
|
||||
max_events: None,
|
||||
delay_ping_lifetime_io: false,
|
||||
};
|
||||
|
||||
let mut ac_seq_numbers = HashMap::new();
|
||||
ac_seq_numbers.insert(String::from("custom_seq"), 3);
|
||||
|
||||
let mut glean = Glean::with_sequence_numbers(cfg, ac_seq_numbers).unwrap();
|
||||
|
||||
assert!(!glean.set_upload_enabled(false));
|
||||
}
|
||||
|
||||
// Test that the enum variants keep a stable discriminant when serialized.
|
||||
// Discriminant values are taken from a stable ordering from v20.0.0.
|
||||
// New metrics after that should be added in order.
|
||||
|
@ -418,3 +393,20 @@ fn correct_order() {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_first_run() {
|
||||
let dir = tempfile::tempdir().unwrap();
|
||||
let tmpname = dir.path().display().to_string();
|
||||
{
|
||||
let glean = Glean::with_options(&tmpname, GLOBAL_APPLICATION_ID, true).unwrap();
|
||||
// Check that this is indeed the first run.
|
||||
assert!(glean.is_first_run());
|
||||
}
|
||||
|
||||
{
|
||||
// Other runs must be not marked as "first run".
|
||||
let glean = Glean::with_options(&tmpname, GLOBAL_APPLICATION_ID, true).unwrap();
|
||||
assert!(!glean.is_first_run());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -31,10 +31,9 @@ const MAX_EXPERIMENT_VALUE_LEN: usize = MAX_EXPERIMENTS_IDS_LEN;
|
|||
const MAX_EXPERIMENTS_EXTRAS_SIZE: usize = 20;
|
||||
|
||||
/// The data for a single experiment.
|
||||
#[derive(Debug, Clone, Deserialize, Serialize)]
|
||||
#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, Eq)]
|
||||
pub struct RecordedExperimentData {
|
||||
pub branch: String,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub extra: Option<HashMap<String, String>>,
|
||||
}
|
||||
|
||||
|
@ -218,3 +217,60 @@ impl ExperimentMetric {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn stable_serialization() {
|
||||
let experiment_empty = RecordedExperimentData {
|
||||
branch: "branch".into(),
|
||||
extra: None,
|
||||
};
|
||||
|
||||
let mut data = HashMap::new();
|
||||
data.insert("a key".to_string(), "a value".to_string());
|
||||
let experiment_data = RecordedExperimentData {
|
||||
branch: "branch".into(),
|
||||
extra: Some(data),
|
||||
};
|
||||
|
||||
let experiment_empty_bin = bincode::serialize(&experiment_empty).unwrap();
|
||||
let experiment_data_bin = bincode::serialize(&experiment_data).unwrap();
|
||||
|
||||
assert_eq!(
|
||||
experiment_empty,
|
||||
bincode::deserialize(&experiment_empty_bin).unwrap()
|
||||
);
|
||||
assert_eq!(
|
||||
experiment_data,
|
||||
bincode::deserialize(&experiment_data_bin).unwrap()
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[rustfmt::skip] // Let's not add newlines unnecessary
|
||||
fn deserialize_old_encoding() {
|
||||
// generated by `bincode::serialize` as of Glean commit ac27fceb7c0d5a7288d7d569e8c5c5399a53afb2
|
||||
// empty was generated from: `RecordedExperimentData { branch: "branch".into(), extra: None, }`
|
||||
let empty_bin = vec![6, 0, 0, 0, 0, 0, 0, 0, 98, 114, 97, 110, 99, 104];
|
||||
// data was generated from: RecordedExperimentData { branch: "branch".into(), extra: Some({"a key": "a value"}), };
|
||||
let data_bin = vec![6, 0, 0, 0, 0, 0, 0, 0, 98, 114, 97, 110, 99, 104,
|
||||
1, 1, 0, 0, 0, 0, 0, 0, 0, 5, 0, 0, 0, 0, 0, 0, 0,
|
||||
97, 32, 107, 101, 121, 7, 0, 0, 0, 0, 0, 0, 0, 97,
|
||||
32, 118, 97, 108, 117, 101];
|
||||
|
||||
|
||||
let mut data = HashMap::new();
|
||||
data.insert("a key".to_string(), "a value".to_string());
|
||||
let experiment_data = RecordedExperimentData { branch: "branch".into(), extra: Some(data), };
|
||||
|
||||
// We can't actually decode old experiment data.
|
||||
// Luckily Glean did store experiments in the database before commit ac27fceb7c0d5a7288d7d569e8c5c5399a53afb2.
|
||||
let experiment_empty: Result<RecordedExperimentData, _> = bincode::deserialize(&empty_bin);
|
||||
assert!(experiment_empty.is_err());
|
||||
|
||||
assert_eq!(experiment_data, bincode::deserialize(&data_bin).unwrap());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -36,7 +36,7 @@ impl PingType {
|
|||
}
|
||||
}
|
||||
|
||||
/// Send the ping.
|
||||
/// Submit the ping for eventual uploading
|
||||
///
|
||||
/// ## Arguments
|
||||
///
|
||||
|
@ -44,8 +44,8 @@ impl PingType {
|
|||
///
|
||||
/// ## Return value
|
||||
///
|
||||
/// See [`Glean#send_ping`](../struct.Glean.html#method.send_ping) for details.
|
||||
pub fn send(&self, glean: &Glean) -> Result<bool> {
|
||||
glean.send_ping(self)
|
||||
/// See [`Glean#submit_ping`](../struct.Glean.html#method.submit_ping) for details.
|
||||
pub fn submit(&self, glean: &Glean) -> Result<bool> {
|
||||
glean.submit_ping(self)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -171,7 +171,7 @@ impl TimingDistributionMetric {
|
|||
|
||||
if duration > MAX_SAMPLE_TIME {
|
||||
let msg = "Sample is longer than 10 minutes";
|
||||
record_error(glean, &self.meta, ErrorType::InvalidValue, msg, None);
|
||||
record_error(glean, &self.meta, ErrorType::InvalidOverflow, msg, None);
|
||||
duration = MAX_SAMPLE_TIME;
|
||||
}
|
||||
|
||||
|
@ -226,10 +226,11 @@ impl TimingDistributionMetric {
|
|||
/// ## Notes
|
||||
///
|
||||
/// Discards any negative value in `samples` and report an `ErrorType::InvalidValue`
|
||||
/// for each of them.
|
||||
/// for each of them. Reports an `ErrorType::InvalidOverflow` error for samples that
|
||||
/// are longer than `MAX_SAMPLE_TIME`.
|
||||
pub fn accumulate_samples_signed(&mut self, glean: &Glean, samples: Vec<i64>) {
|
||||
let mut num_negative_samples = 0;
|
||||
let mut num_too_log_samples = 0;
|
||||
let mut num_too_long_samples = 0;
|
||||
|
||||
glean.storage().record_with(glean, &self.meta, |old_value| {
|
||||
let mut hist = match old_value {
|
||||
|
@ -244,7 +245,7 @@ impl TimingDistributionMetric {
|
|||
let sample = sample as u64;
|
||||
let mut sample = self.time_unit.as_nanos(sample);
|
||||
if sample > MAX_SAMPLE_TIME {
|
||||
num_too_log_samples += 1;
|
||||
num_too_long_samples += 1;
|
||||
sample = MAX_SAMPLE_TIME;
|
||||
}
|
||||
|
||||
|
@ -265,17 +266,17 @@ impl TimingDistributionMetric {
|
|||
);
|
||||
}
|
||||
|
||||
if num_too_log_samples > 0 {
|
||||
if num_too_long_samples > 0 {
|
||||
let msg = format!(
|
||||
"Accumulated {} samples longer than 10 minutes",
|
||||
num_too_log_samples
|
||||
num_too_long_samples
|
||||
);
|
||||
record_error(
|
||||
glean,
|
||||
&self.meta,
|
||||
ErrorType::InvalidValue,
|
||||
ErrorType::InvalidOverflow,
|
||||
msg,
|
||||
num_too_log_samples,
|
||||
num_too_long_samples,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -48,29 +48,6 @@ impl PingMaker {
|
|||
Self
|
||||
}
|
||||
|
||||
/// Set the next ping sequence number to the provided one.
|
||||
///
|
||||
/// This function stores the next sequence number (the one that
|
||||
/// will be returned next time `get_ping_seq` is called) in the
|
||||
/// glean-core store. The main purpose of this function is to allow
|
||||
/// overriding sequence numbers with migration data coming from
|
||||
/// glean-ac.
|
||||
pub(super) fn set_ping_seq(&self, glean: &Glean, storage_name: &str, next_seq: i32) {
|
||||
// Sequence numbers are stored as a counter under a name that includes the storage name
|
||||
let seq = CounterMetric::new(CommonMetricData {
|
||||
name: format!("{}#sequence", storage_name),
|
||||
// We don't need a category, the name is already unique
|
||||
category: "".into(),
|
||||
send_in_pings: vec![INTERNAL_STORAGE.into()],
|
||||
lifetime: Lifetime::User,
|
||||
..Default::default()
|
||||
});
|
||||
|
||||
// It's safe to add `next_seq` because the glean-ac migration code
|
||||
// clears the glean-core database before the migration starts.
|
||||
seq.add(glean, next_seq);
|
||||
}
|
||||
|
||||
/// Get, and then increment, the sequence number for a given ping.
|
||||
///
|
||||
/// This is crate-internal exclusively for enabling the migration tests.
|
||||
|
@ -244,9 +221,9 @@ impl PingMaker {
|
|||
/// The directory will be created inside the `data_path`.
|
||||
/// The `pings` directory (and its parents) is created if it does not exist.
|
||||
fn get_pings_dir(&self, data_path: &Path, ping_type: Option<&str>) -> std::io::Result<PathBuf> {
|
||||
// Use a special directory for deletion_request pings
|
||||
// Use a special directory for deletion-request pings
|
||||
let pings_dir = match ping_type {
|
||||
Some(ping_type) if ping_type == "deletion_request" => {
|
||||
Some(ping_type) if ping_type == "deletion-request" => {
|
||||
data_path.join("deletion_request")
|
||||
}
|
||||
_ => data_path.join("pending_pings"),
|
||||
|
@ -338,17 +315,4 @@ mod test {
|
|||
assert_eq!(0, ping_maker.get_ping_seq(&glean, "custom"));
|
||||
assert_eq!(1, ping_maker.get_ping_seq(&glean, "custom"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn set_ping_seq_must_correctly_set_sequence_numbers() {
|
||||
let (glean, _) = new_glean();
|
||||
let ping_maker = PingMaker::new();
|
||||
|
||||
ping_maker.set_ping_seq(&glean, "custom", 3);
|
||||
assert_eq!(3, ping_maker.get_ping_seq(&glean, "custom"));
|
||||
|
||||
ping_maker.set_ping_seq(&glean, "other", 7);
|
||||
assert_eq!(7, ping_maker.get_ping_seq(&glean, "other"));
|
||||
assert_eq!(8, ping_maker.get_ping_seq(&glean, "other"));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -9,24 +9,20 @@ use serde_json::json;
|
|||
|
||||
use glean_core::metrics::*;
|
||||
use glean_core::storage::StorageManager;
|
||||
use glean_core::{CommonMetricData, Glean, Lifetime};
|
||||
use glean_core::{CommonMetricData, Lifetime};
|
||||
|
||||
// SKIPPED from glean-ac: string deserializer should correctly parse integers
|
||||
// This test doesn't really apply to rkv
|
||||
|
||||
#[test]
|
||||
fn boolean_serializer_should_correctly_serialize_boolean() {
|
||||
let (_t, tmpname) = tempdir();
|
||||
let cfg = glean_core::Configuration {
|
||||
data_path: tmpname,
|
||||
application_id: GLOBAL_APPLICATION_ID.into(),
|
||||
upload_enabled: true,
|
||||
max_events: None,
|
||||
delay_ping_lifetime_io: false,
|
||||
};
|
||||
let (mut tempdir, _) = tempdir();
|
||||
|
||||
{
|
||||
let glean = Glean::new(cfg.clone()).unwrap();
|
||||
// We give tempdir to the `new_glean` function...
|
||||
let (glean, dir) = new_glean(Some(tempdir));
|
||||
// And then we get it back once that function returns.
|
||||
tempdir = dir;
|
||||
|
||||
let metric = BooleanMetric::new(CommonMetricData {
|
||||
name: "boolean_metric".into(),
|
||||
|
@ -51,7 +47,7 @@ fn boolean_serializer_should_correctly_serialize_boolean() {
|
|||
// Make a new Glean instance here, which should force reloading of the data from disk
|
||||
// so we can ensure it persisted, because it has User lifetime
|
||||
{
|
||||
let glean = Glean::new(cfg.clone()).unwrap();
|
||||
let (glean, _t) = new_glean(Some(tempdir));
|
||||
let snapshot = StorageManager
|
||||
.snapshot_as_json(glean.storage(), "store1", true)
|
||||
.unwrap();
|
||||
|
@ -64,7 +60,7 @@ fn boolean_serializer_should_correctly_serialize_boolean() {
|
|||
|
||||
#[test]
|
||||
fn set_properly_sets_the_value_in_all_stores() {
|
||||
let (glean, _t) = new_glean();
|
||||
let (glean, _t) = new_glean(None);
|
||||
let store_names: Vec<String> = vec!["store1".into(), "store2".into()];
|
||||
|
||||
let metric = BooleanMetric::new(CommonMetricData {
|
||||
|
|
|
@ -45,8 +45,11 @@ pub const GLOBAL_APPLICATION_ID: &str = "org.mozilla.glean.test.app";
|
|||
|
||||
// Create a new instance of Glean with a temporary directory.
|
||||
// We need to keep the `TempDir` alive, so that it's not deleted before we stop using it.
|
||||
pub fn new_glean() -> (Glean, tempfile::TempDir) {
|
||||
let dir = tempfile::tempdir().unwrap();
|
||||
pub fn new_glean(tempdir: Option<tempfile::TempDir>) -> (Glean, tempfile::TempDir) {
|
||||
let dir = match tempdir {
|
||||
Some(tempdir) => tempdir,
|
||||
None => tempfile::tempdir().unwrap(),
|
||||
};
|
||||
let tmpname = dir.path().display().to_string();
|
||||
|
||||
let cfg = glean_core::Configuration {
|
||||
|
@ -91,7 +94,7 @@ pub fn get_queued_pings(data_path: &Path) -> Result<Vec<(String, JsonValue)>> {
|
|||
get_pings(&data_path.join("pending_pings"))
|
||||
}
|
||||
|
||||
/// Get a vector of the currently queued `deletion_request` pings.
|
||||
/// Get a vector of the currently queued `deletion-request` pings.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
|
@ -99,7 +102,7 @@ pub fn get_queued_pings(data_path: &Path) -> Result<Vec<(String, JsonValue)>> {
|
|||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// A vector of all queued `deletion_request` pings. Each entry is a pair `(url, json_data)`,
|
||||
/// A vector of all queued `deletion-request` pings. Each entry is a pair `(url, json_data)`,
|
||||
/// where `url` is the endpoint the ping will go to, and `json_data` is the JSON payload.
|
||||
pub fn get_deletion_pings(data_path: &Path) -> Result<Vec<(String, JsonValue)>> {
|
||||
get_pings(&data_path.join("deletion_request"))
|
||||
|
|
|
@ -10,7 +10,7 @@ use serde_json::json;
|
|||
use glean_core::metrics::*;
|
||||
use glean_core::storage::StorageManager;
|
||||
use glean_core::{test_get_num_recorded_errors, ErrorType};
|
||||
use glean_core::{CommonMetricData, Glean, Lifetime};
|
||||
use glean_core::{CommonMetricData, Lifetime};
|
||||
|
||||
// Tests ported from glean-ac
|
||||
|
||||
|
@ -19,17 +19,13 @@ use glean_core::{CommonMetricData, Glean, Lifetime};
|
|||
|
||||
#[test]
|
||||
fn counter_serializer_should_correctly_serialize_counters() {
|
||||
let (_t, tmpname) = tempdir();
|
||||
let cfg = glean_core::Configuration {
|
||||
data_path: tmpname,
|
||||
application_id: GLOBAL_APPLICATION_ID.into(),
|
||||
upload_enabled: true,
|
||||
max_events: None,
|
||||
delay_ping_lifetime_io: false,
|
||||
};
|
||||
let (mut tempdir, _) = tempdir();
|
||||
|
||||
{
|
||||
let glean = Glean::new(cfg.clone()).unwrap();
|
||||
// We give tempdir to the `new_glean` function...
|
||||
let (glean, dir) = new_glean(Some(tempdir));
|
||||
// And then we get it back once that function returns.
|
||||
tempdir = dir;
|
||||
|
||||
let metric = CounterMetric::new(CommonMetricData {
|
||||
name: "counter_metric".into(),
|
||||
|
@ -54,7 +50,7 @@ fn counter_serializer_should_correctly_serialize_counters() {
|
|||
// Make a new Glean instance here, which should force reloading of the data from disk
|
||||
// so we can ensure it persisted, because it has User lifetime
|
||||
{
|
||||
let glean = Glean::new(cfg).unwrap();
|
||||
let (glean, _t) = new_glean(Some(tempdir));
|
||||
let snapshot = StorageManager
|
||||
.snapshot_as_json(glean.storage(), "store1", true)
|
||||
.unwrap();
|
||||
|
@ -67,7 +63,7 @@ fn counter_serializer_should_correctly_serialize_counters() {
|
|||
|
||||
#[test]
|
||||
fn set_value_properly_sets_the_value_in_all_stores() {
|
||||
let (glean, _t) = new_glean();
|
||||
let (glean, _t) = new_glean(None);
|
||||
let store_names: Vec<String> = vec!["store1".into(), "store2".into()];
|
||||
|
||||
let metric = CounterMetric::new(CommonMetricData {
|
||||
|
@ -98,7 +94,7 @@ fn set_value_properly_sets_the_value_in_all_stores() {
|
|||
|
||||
#[test]
|
||||
fn counters_must_not_increment_when_passed_zero_or_negative() {
|
||||
let (glean, _t) = new_glean();
|
||||
let (glean, _t) = new_glean(None);
|
||||
|
||||
let metric = CounterMetric::new(CommonMetricData {
|
||||
name: "counter_metric".into(),
|
||||
|
@ -135,7 +131,7 @@ fn counters_must_not_increment_when_passed_zero_or_negative() {
|
|||
|
||||
#[test]
|
||||
fn transformation_works() {
|
||||
let (glean, _t) = new_glean();
|
||||
let (glean, _t) = new_glean(None);
|
||||
|
||||
let counter: CounterMetric = CounterMetric::new(CommonMetricData {
|
||||
name: "transformation".into(),
|
||||
|
@ -162,7 +158,7 @@ fn transformation_works() {
|
|||
|
||||
#[test]
|
||||
fn saturates_at_boundary() {
|
||||
let (glean, _t) = new_glean();
|
||||
let (glean, _t) = new_glean(None);
|
||||
|
||||
let counter: CounterMetric = CounterMetric::new(CommonMetricData {
|
||||
name: "transformation".into(),
|
||||
|
|
|
@ -10,7 +10,7 @@ use serde_json::json;
|
|||
use glean_core::metrics::*;
|
||||
use glean_core::storage::StorageManager;
|
||||
use glean_core::{test_get_num_recorded_errors, ErrorType};
|
||||
use glean_core::{CommonMetricData, Glean, Lifetime};
|
||||
use glean_core::{CommonMetricData, Lifetime};
|
||||
|
||||
// Tests ported from glean-ac
|
||||
|
||||
|
@ -19,18 +19,11 @@ mod linear {
|
|||
|
||||
#[test]
|
||||
fn serializer_should_correctly_serialize_custom_distribution() {
|
||||
let (_t, tmpname) = tempdir();
|
||||
|
||||
let cfg = glean_core::Configuration {
|
||||
data_path: tmpname,
|
||||
application_id: GLOBAL_APPLICATION_ID.into(),
|
||||
upload_enabled: true,
|
||||
max_events: None,
|
||||
delay_ping_lifetime_io: false,
|
||||
};
|
||||
let (mut tempdir, _) = tempdir();
|
||||
|
||||
{
|
||||
let glean = Glean::new(cfg.clone()).unwrap();
|
||||
let (glean, dir) = new_glean(Some(tempdir));
|
||||
tempdir = dir;
|
||||
|
||||
let metric = CustomDistributionMetric::new(
|
||||
CommonMetricData {
|
||||
|
@ -59,7 +52,7 @@ mod linear {
|
|||
// Make a new Glean instance here, which should force reloading of the data from disk
|
||||
// so we can ensure it persisted, because it has User lifetime
|
||||
{
|
||||
let glean = Glean::new(cfg.clone()).unwrap();
|
||||
let (glean, _) = new_glean(Some(tempdir));
|
||||
let snapshot = StorageManager
|
||||
.snapshot_as_json(glean.storage(), "store1", true)
|
||||
.unwrap();
|
||||
|
@ -73,7 +66,7 @@ mod linear {
|
|||
|
||||
#[test]
|
||||
fn set_value_properly_sets_the_value_in_all_stores() {
|
||||
let (glean, _t) = new_glean();
|
||||
let (glean, _t) = new_glean(None);
|
||||
let store_names: Vec<String> = vec!["store1".into(), "store2".into()];
|
||||
|
||||
let metric = CustomDistributionMetric::new(
|
||||
|
@ -114,7 +107,7 @@ mod linear {
|
|||
|
||||
#[test]
|
||||
fn the_accumulate_samples_api_correctly_stores_memory_values() {
|
||||
let (glean, _t) = new_glean();
|
||||
let (glean, _t) = new_glean(None);
|
||||
|
||||
let metric = CustomDistributionMetric::new(
|
||||
CommonMetricData {
|
||||
|
@ -162,7 +155,7 @@ mod linear {
|
|||
|
||||
#[test]
|
||||
fn the_accumulate_samples_api_correctly_handles_negative_values() {
|
||||
let (glean, _t) = new_glean();
|
||||
let (glean, _t) = new_glean(None);
|
||||
|
||||
let metric = CustomDistributionMetric::new(
|
||||
CommonMetricData {
|
||||
|
@ -211,7 +204,7 @@ mod linear {
|
|||
|
||||
#[test]
|
||||
fn json_snapshotting_works() {
|
||||
let (glean, _t) = new_glean();
|
||||
let (glean, _t) = new_glean(None);
|
||||
let metric = CustomDistributionMetric::new(
|
||||
CommonMetricData {
|
||||
name: "distribution".into(),
|
||||
|
@ -239,18 +232,11 @@ mod exponential {
|
|||
|
||||
#[test]
|
||||
fn serializer_should_correctly_serialize_custom_distribution() {
|
||||
let (_t, tmpname) = tempdir();
|
||||
|
||||
let cfg = glean_core::Configuration {
|
||||
data_path: tmpname,
|
||||
application_id: GLOBAL_APPLICATION_ID.into(),
|
||||
upload_enabled: true,
|
||||
max_events: None,
|
||||
delay_ping_lifetime_io: false,
|
||||
};
|
||||
let (mut tempdir, _) = tempdir();
|
||||
|
||||
{
|
||||
let glean = Glean::new(cfg.clone()).unwrap();
|
||||
let (glean, dir) = new_glean(Some(tempdir));
|
||||
tempdir = dir;
|
||||
|
||||
let metric = CustomDistributionMetric::new(
|
||||
CommonMetricData {
|
||||
|
@ -279,7 +265,7 @@ mod exponential {
|
|||
// Make a new Glean instance here, which should force reloading of the data from disk
|
||||
// so we can ensure it persisted, because it has User lifetime
|
||||
{
|
||||
let glean = Glean::new(cfg.clone()).unwrap();
|
||||
let (glean, _) = new_glean(Some(tempdir));
|
||||
let snapshot = StorageManager
|
||||
.snapshot_as_json(glean.storage(), "store1", true)
|
||||
.unwrap();
|
||||
|
@ -293,7 +279,7 @@ mod exponential {
|
|||
|
||||
#[test]
|
||||
fn set_value_properly_sets_the_value_in_all_stores() {
|
||||
let (glean, _t) = new_glean();
|
||||
let (glean, _t) = new_glean(None);
|
||||
let store_names: Vec<String> = vec!["store1".into(), "store2".into()];
|
||||
|
||||
let metric = CustomDistributionMetric::new(
|
||||
|
@ -334,7 +320,7 @@ mod exponential {
|
|||
|
||||
#[test]
|
||||
fn the_accumulate_samples_api_correctly_stores_memory_values() {
|
||||
let (glean, _t) = new_glean();
|
||||
let (glean, _t) = new_glean(None);
|
||||
|
||||
let metric = CustomDistributionMetric::new(
|
||||
CommonMetricData {
|
||||
|
@ -382,7 +368,7 @@ mod exponential {
|
|||
|
||||
#[test]
|
||||
fn the_accumulate_samples_api_correctly_handles_negative_values() {
|
||||
let (glean, _t) = new_glean();
|
||||
let (glean, _t) = new_glean(None);
|
||||
|
||||
let metric = CustomDistributionMetric::new(
|
||||
CommonMetricData {
|
||||
|
@ -431,7 +417,7 @@ mod exponential {
|
|||
|
||||
#[test]
|
||||
fn json_snapshotting_works() {
|
||||
let (glean, _t) = new_glean();
|
||||
let (glean, _t) = new_glean(None);
|
||||
let metric = CustomDistributionMetric::new(
|
||||
CommonMetricData {
|
||||
name: "distribution".into(),
|
||||
|
|
|
@ -10,7 +10,7 @@ use serde_json::json;
|
|||
|
||||
use glean_core::metrics::*;
|
||||
use glean_core::storage::StorageManager;
|
||||
use glean_core::{CommonMetricData, Glean, Lifetime};
|
||||
use glean_core::{CommonMetricData, Lifetime};
|
||||
|
||||
// SKIPPED from glean-ac: datetime deserializer should correctly parse integers
|
||||
// This test doesn't really apply to rkv
|
||||
|
@ -18,17 +18,13 @@ use glean_core::{CommonMetricData, Glean, Lifetime};
|
|||
#[test]
|
||||
fn datetime_serializer_should_correctly_serialize_datetime() {
|
||||
let expected_value = "1983-04-13T12:09+00:00";
|
||||
let (_t, tmpname) = tempdir();
|
||||
let cfg = glean_core::Configuration {
|
||||
data_path: tmpname,
|
||||
application_id: GLOBAL_APPLICATION_ID.into(),
|
||||
upload_enabled: true,
|
||||
max_events: None,
|
||||
delay_ping_lifetime_io: false,
|
||||
};
|
||||
let (mut tempdir, _) = tempdir();
|
||||
|
||||
{
|
||||
let glean = Glean::new(cfg.clone()).unwrap();
|
||||
// We give tempdir to the `new_glean` function...
|
||||
let (glean, dir) = new_glean(Some(tempdir));
|
||||
// And then we get it back once that function returns.
|
||||
tempdir = dir;
|
||||
|
||||
let metric = DatetimeMetric::new(
|
||||
CommonMetricData {
|
||||
|
@ -60,7 +56,7 @@ fn datetime_serializer_should_correctly_serialize_datetime() {
|
|||
// Make a new Glean instance here, which should force reloading of the data from disk
|
||||
// so we can ensure it persisted, because it has User lifetime
|
||||
{
|
||||
let glean = Glean::new(cfg.clone()).unwrap();
|
||||
let (glean, _) = new_glean(Some(tempdir));
|
||||
let snapshot = StorageManager
|
||||
.snapshot_as_json(glean.storage(), "store1", true)
|
||||
.unwrap();
|
||||
|
@ -73,7 +69,7 @@ fn datetime_serializer_should_correctly_serialize_datetime() {
|
|||
|
||||
#[test]
|
||||
fn set_value_properly_sets_the_value_in_all_stores() {
|
||||
let (glean, _t) = new_glean();
|
||||
let (glean, _t) = new_glean(None);
|
||||
let store_names: Vec<String> = vec!["store1".into(), "store2".into()];
|
||||
|
||||
let metric = DatetimeMetric::new(
|
||||
|
@ -112,7 +108,7 @@ fn set_value_properly_sets_the_value_in_all_stores() {
|
|||
|
||||
#[test]
|
||||
fn test_that_truncation_works() {
|
||||
let (glean, _t) = new_glean();
|
||||
let (glean, _t) = new_glean(None);
|
||||
|
||||
// `1985-07-03T12:09:14.000560274+01:00`
|
||||
let high_res_datetime = FixedOffset::east(3600)
|
||||
|
|
|
@ -15,7 +15,7 @@ use glean_core::{CommonMetricData, Lifetime};
|
|||
fn record_properly_records_without_optional_arguments() {
|
||||
let store_names = vec!["store1".into(), "store2".into()];
|
||||
|
||||
let (glean, _t) = new_glean();
|
||||
let (glean, _t) = new_glean(None);
|
||||
|
||||
let metric = EventMetric::new(
|
||||
CommonMetricData {
|
||||
|
@ -42,7 +42,7 @@ fn record_properly_records_without_optional_arguments() {
|
|||
|
||||
#[test]
|
||||
fn record_properly_records_with_optional_arguments() {
|
||||
let (glean, _t) = new_glean();
|
||||
let (glean, _t) = new_glean(None);
|
||||
|
||||
let store_names = vec!["store1".into(), "store2".into()];
|
||||
|
||||
|
@ -83,7 +83,7 @@ fn record_properly_records_with_optional_arguments() {
|
|||
|
||||
#[test]
|
||||
fn snapshot_returns_none_if_nothing_is_recorded_in_the_store() {
|
||||
let (glean, _t) = new_glean();
|
||||
let (glean, _t) = new_glean(None);
|
||||
|
||||
assert!(glean
|
||||
.event_storage()
|
||||
|
@ -93,7 +93,7 @@ fn snapshot_returns_none_if_nothing_is_recorded_in_the_store() {
|
|||
|
||||
#[test]
|
||||
fn snapshot_correctly_clears_the_stores() {
|
||||
let (glean, _t) = new_glean();
|
||||
let (glean, _t) = new_glean(None);
|
||||
|
||||
let store_names = vec!["store1".into(), "store2".into()];
|
||||
|
||||
|
@ -101,7 +101,7 @@ fn snapshot_correctly_clears_the_stores() {
|
|||
CommonMetricData {
|
||||
name: "test_event_clear".into(),
|
||||
category: "telemetry".into(),
|
||||
send_in_pings: store_names.clone(),
|
||||
send_in_pings: store_names,
|
||||
disabled: false,
|
||||
lifetime: Lifetime::Ping,
|
||||
..Default::default()
|
||||
|
@ -147,7 +147,7 @@ fn snapshot_correctly_clears_the_stores() {
|
|||
|
||||
#[test]
|
||||
fn test_sending_of_event_ping_when_it_fills_up() {
|
||||
let (mut glean, _t) = new_glean();
|
||||
let (mut glean, _t) = new_glean(None);
|
||||
|
||||
let store_names: Vec<String> = vec!["events".into()];
|
||||
|
||||
|
@ -159,7 +159,7 @@ fn test_sending_of_event_ping_when_it_fills_up() {
|
|||
CommonMetricData {
|
||||
name: "click".into(),
|
||||
category: "ui".into(),
|
||||
send_in_pings: store_names.clone(),
|
||||
send_in_pings: store_names,
|
||||
disabled: false,
|
||||
lifetime: Lifetime::Ping,
|
||||
..Default::default()
|
||||
|
@ -199,7 +199,7 @@ fn test_sending_of_event_ping_when_it_fills_up() {
|
|||
|
||||
#[test]
|
||||
fn extra_keys_must_be_recorded_and_truncated_if_needed() {
|
||||
let (glean, _t) = new_glean();
|
||||
let (glean, _t) = new_glean(None);
|
||||
|
||||
let store_names: Vec<String> = vec!["store1".into()];
|
||||
|
||||
|
@ -207,7 +207,7 @@ fn extra_keys_must_be_recorded_and_truncated_if_needed() {
|
|||
CommonMetricData {
|
||||
name: "testEvent".into(),
|
||||
category: "ui".into(),
|
||||
send_in_pings: store_names.clone(),
|
||||
send_in_pings: store_names,
|
||||
disabled: false,
|
||||
lifetime: Lifetime::Ping,
|
||||
..Default::default()
|
||||
|
@ -240,7 +240,7 @@ fn extra_keys_must_be_recorded_and_truncated_if_needed() {
|
|||
|
||||
#[test]
|
||||
fn snapshot_sorts_the_timestamps() {
|
||||
let (glean, _t) = new_glean();
|
||||
let (glean, _t) = new_glean(None);
|
||||
|
||||
let metric = EventMetric::new(
|
||||
CommonMetricData {
|
||||
|
|
|
@ -9,12 +9,11 @@ use serde_json::json;
|
|||
|
||||
use glean_core::metrics::*;
|
||||
use glean_core::storage::StorageManager;
|
||||
use glean_core::Glean;
|
||||
use glean_core::{CommonMetricData, Lifetime};
|
||||
|
||||
#[test]
|
||||
fn can_create_labeled_counter_metric() {
|
||||
let (glean, _t) = new_glean();
|
||||
let (glean, _t) = new_glean(None);
|
||||
let mut labeled = LabeledMetric::new(
|
||||
CounterMetric::new(CommonMetricData {
|
||||
name: "labeled_metric".into(),
|
||||
|
@ -46,7 +45,7 @@ fn can_create_labeled_counter_metric() {
|
|||
|
||||
#[test]
|
||||
fn can_create_labeled_string_metric() {
|
||||
let (glean, _t) = new_glean();
|
||||
let (glean, _t) = new_glean(None);
|
||||
let mut labeled = LabeledMetric::new(
|
||||
StringMetric::new(CommonMetricData {
|
||||
name: "labeled_metric".into(),
|
||||
|
@ -78,7 +77,7 @@ fn can_create_labeled_string_metric() {
|
|||
|
||||
#[test]
|
||||
fn can_create_labeled_bool_metric() {
|
||||
let (glean, _t) = new_glean();
|
||||
let (glean, _t) = new_glean(None);
|
||||
let mut labeled = LabeledMetric::new(
|
||||
BooleanMetric::new(CommonMetricData {
|
||||
name: "labeled_metric".into(),
|
||||
|
@ -110,7 +109,7 @@ fn can_create_labeled_bool_metric() {
|
|||
|
||||
#[test]
|
||||
fn can_use_multiple_labels() {
|
||||
let (glean, _t) = new_glean();
|
||||
let (glean, _t) = new_glean(None);
|
||||
let mut labeled = LabeledMetric::new(
|
||||
CounterMetric::new(CommonMetricData {
|
||||
name: "labeled_metric".into(),
|
||||
|
@ -148,7 +147,7 @@ fn can_use_multiple_labels() {
|
|||
|
||||
#[test]
|
||||
fn labels_are_checked_against_static_list() {
|
||||
let (glean, _t) = new_glean();
|
||||
let (glean, _t) = new_glean(None);
|
||||
let mut labeled = LabeledMetric::new(
|
||||
CounterMetric::new(CommonMetricData {
|
||||
name: "labeled_metric".into(),
|
||||
|
@ -193,7 +192,7 @@ fn labels_are_checked_against_static_list() {
|
|||
|
||||
#[test]
|
||||
fn dynamic_labels_too_long() {
|
||||
let (glean, _t) = new_glean();
|
||||
let (glean, _t) = new_glean(None);
|
||||
let mut labeled = LabeledMetric::new(
|
||||
CounterMetric::new(CommonMetricData {
|
||||
name: "labeled_metric".into(),
|
||||
|
@ -228,7 +227,7 @@ fn dynamic_labels_too_long() {
|
|||
|
||||
#[test]
|
||||
fn dynamic_labels_regex_mimsatch() {
|
||||
let (glean, _t) = new_glean();
|
||||
let (glean, _t) = new_glean(None);
|
||||
let mut labeled = LabeledMetric::new(
|
||||
CounterMetric::new(CommonMetricData {
|
||||
name: "labeled_metric".into(),
|
||||
|
@ -274,7 +273,7 @@ fn dynamic_labels_regex_mimsatch() {
|
|||
|
||||
#[test]
|
||||
fn dynamic_labels_regex_allowed() {
|
||||
let (glean, _t) = new_glean();
|
||||
let (glean, _t) = new_glean(None);
|
||||
let mut labeled = LabeledMetric::new(
|
||||
CounterMetric::new(CommonMetricData {
|
||||
name: "labeled_metric".into(),
|
||||
|
@ -327,16 +326,10 @@ fn dynamic_labels_regex_allowed() {
|
|||
|
||||
#[test]
|
||||
fn seen_labels_get_reloaded_from_disk() {
|
||||
let (_t, tmpname) = tempdir();
|
||||
let cfg = glean_core::Configuration {
|
||||
data_path: tmpname,
|
||||
application_id: GLOBAL_APPLICATION_ID.into(),
|
||||
upload_enabled: true,
|
||||
max_events: None,
|
||||
delay_ping_lifetime_io: false,
|
||||
};
|
||||
let (mut tempdir, _) = tempdir();
|
||||
|
||||
let glean = Glean::new(cfg.clone()).unwrap();
|
||||
let (glean, dir) = new_glean(Some(tempdir));
|
||||
tempdir = dir;
|
||||
|
||||
let mut labeled = LabeledMetric::new(
|
||||
CounterMetric::new(CommonMetricData {
|
||||
|
@ -376,7 +369,7 @@ fn seen_labels_get_reloaded_from_disk() {
|
|||
|
||||
// Force a reload
|
||||
{
|
||||
let glean = Glean::new(cfg.clone()).unwrap();
|
||||
let (glean, _) = new_glean(Some(tempdir));
|
||||
|
||||
// Try to store another label
|
||||
labeled.get("new_label").add(&glean, 40);
|
||||
|
|
|
@ -56,7 +56,7 @@ fn serializer_should_correctly_serialize_memory_distribution() {
|
|||
// Make a new Glean instance here, which should force reloading of the data from disk
|
||||
// so we can ensure it persisted, because it has User lifetime
|
||||
{
|
||||
let glean = Glean::new(cfg.clone()).unwrap();
|
||||
let glean = Glean::new(cfg).unwrap();
|
||||
let snapshot = StorageManager
|
||||
.snapshot_as_json(glean.storage(), "store1", true)
|
||||
.unwrap();
|
||||
|
@ -70,7 +70,7 @@ fn serializer_should_correctly_serialize_memory_distribution() {
|
|||
|
||||
#[test]
|
||||
fn set_value_properly_sets_the_value_in_all_stores() {
|
||||
let (glean, _t) = new_glean();
|
||||
let (glean, _t) = new_glean(None);
|
||||
let store_names: Vec<String> = vec!["store1".into(), "store2".into()];
|
||||
|
||||
let metric = MemoryDistributionMetric::new(
|
||||
|
@ -108,7 +108,7 @@ fn set_value_properly_sets_the_value_in_all_stores() {
|
|||
|
||||
#[test]
|
||||
fn the_accumulate_samples_api_correctly_stores_memory_values() {
|
||||
let (glean, _t) = new_glean();
|
||||
let (glean, _t) = new_glean(None);
|
||||
|
||||
let metric = MemoryDistributionMetric::new(
|
||||
CommonMetricData {
|
||||
|
@ -155,7 +155,7 @@ fn the_accumulate_samples_api_correctly_stores_memory_values() {
|
|||
|
||||
#[test]
|
||||
fn the_accumulate_samples_api_correctly_handles_negative_values() {
|
||||
let (glean, _t) = new_glean();
|
||||
let (glean, _t) = new_glean(None);
|
||||
|
||||
let metric = MemoryDistributionMetric::new(
|
||||
CommonMetricData {
|
||||
|
|
|
@ -10,7 +10,7 @@ use glean_core::CommonMetricData;
|
|||
|
||||
#[test]
|
||||
fn stores_strings() {
|
||||
let (glean, _t) = new_glean();
|
||||
let (glean, _t) = new_glean(None);
|
||||
let metric = StringMetric::new(CommonMetricData::new("local", "string", "baseline"));
|
||||
|
||||
assert_eq!(None, metric.test_get_value(&glean, "baseline"));
|
||||
|
@ -24,7 +24,7 @@ fn stores_strings() {
|
|||
|
||||
#[test]
|
||||
fn stores_counters() {
|
||||
let (glean, _t) = new_glean();
|
||||
let (glean, _t) = new_glean(None);
|
||||
let metric = CounterMetric::new(CommonMetricData::new("local", "counter", "baseline"));
|
||||
|
||||
assert_eq!(None, metric.test_get_value(&glean, "baseline"));
|
||||
|
|
|
@ -10,7 +10,7 @@ use glean_core::CommonMetricData;
|
|||
|
||||
#[test]
|
||||
fn write_ping_to_disk() {
|
||||
let (mut glean, _temp) = new_glean();
|
||||
let (mut glean, _temp) = new_glean(None);
|
||||
|
||||
let ping = PingType::new("metrics", true, false);
|
||||
glean.register_ping_type(&ping);
|
||||
|
@ -24,14 +24,14 @@ fn write_ping_to_disk() {
|
|||
});
|
||||
counter.add(&glean, 1);
|
||||
|
||||
assert!(ping.send(&glean).unwrap());
|
||||
assert!(ping.submit(&glean).unwrap());
|
||||
|
||||
assert_eq!(1, get_queued_pings(glean.get_data_path()).unwrap().len());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn disabling_upload_clears_pending_pings() {
|
||||
let (mut glean, _) = new_glean();
|
||||
let (mut glean, _) = new_glean(None);
|
||||
|
||||
let ping = PingType::new("metrics", true, false);
|
||||
glean.register_ping_type(&ping);
|
||||
|
@ -45,7 +45,7 @@ fn disabling_upload_clears_pending_pings() {
|
|||
});
|
||||
|
||||
counter.add(&glean, 1);
|
||||
assert!(ping.send(&glean).unwrap());
|
||||
assert!(ping.submit(&glean).unwrap());
|
||||
assert_eq!(1, get_queued_pings(glean.get_data_path()).unwrap().len());
|
||||
// At this point no deletion_request ping should exist
|
||||
// (that is: it's directory should not exist at all)
|
||||
|
@ -60,13 +60,13 @@ fn disabling_upload_clears_pending_pings() {
|
|||
assert_eq!(0, get_queued_pings(glean.get_data_path()).unwrap().len());
|
||||
|
||||
counter.add(&glean, 1);
|
||||
assert!(ping.send(&glean).unwrap());
|
||||
assert!(ping.submit(&glean).unwrap());
|
||||
assert_eq!(1, get_queued_pings(glean.get_data_path()).unwrap().len());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn empty_pings_with_flag_are_sent() {
|
||||
let (mut glean, _) = new_glean();
|
||||
let (mut glean, _) = new_glean(None);
|
||||
|
||||
let ping1 = PingType::new("custom-ping1", true, true);
|
||||
glean.register_ping_type(&ping1);
|
||||
|
@ -76,10 +76,10 @@ fn empty_pings_with_flag_are_sent() {
|
|||
// No data is stored in either of the custom pings
|
||||
|
||||
// Sending this should succeed.
|
||||
assert_eq!(true, ping1.send(&glean).unwrap());
|
||||
assert_eq!(true, ping1.submit(&glean).unwrap());
|
||||
assert_eq!(1, get_queued_pings(glean.get_data_path()).unwrap().len());
|
||||
|
||||
// Sending this should fail.
|
||||
assert_eq!(false, ping2.send(&glean).unwrap());
|
||||
assert_eq!(false, ping2.submit(&glean).unwrap());
|
||||
assert_eq!(1, get_queued_pings(glean.get_data_path()).unwrap().len());
|
||||
}
|
||||
|
|
|
@ -156,7 +156,7 @@ fn seq_number_must_be_sequential() {
|
|||
|
||||
#[test]
|
||||
fn test_clear_pending_pings() {
|
||||
let (mut glean, _) = new_glean();
|
||||
let (mut glean, _) = new_glean(None);
|
||||
let ping_maker = PingMaker::new();
|
||||
let ping_type = PingType::new("store1", true, false);
|
||||
glean.register_ping_type(&ping_type);
|
||||
|
@ -172,7 +172,7 @@ fn test_clear_pending_pings() {
|
|||
});
|
||||
metric.set(&glean, true);
|
||||
|
||||
assert!(glean.send_ping(&ping_type).is_ok());
|
||||
assert!(glean.submit_ping(&ping_type).is_ok());
|
||||
assert_eq!(1, get_queued_pings(glean.get_data_path()).unwrap().len());
|
||||
|
||||
assert!(ping_maker
|
||||
|
@ -180,3 +180,25 @@ fn test_clear_pending_pings() {
|
|||
.is_ok());
|
||||
assert_eq!(0, get_queued_pings(glean.get_data_path()).unwrap().len());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_no_pings_submitted_if_upload_disabled() {
|
||||
// Regression test, bug 1603571
|
||||
|
||||
let (mut glean, _) = new_glean(None);
|
||||
let ping_type = PingType::new("store1", true, true);
|
||||
glean.register_ping_type(&ping_type);
|
||||
|
||||
assert!(glean.submit_ping(&ping_type).is_ok());
|
||||
assert_eq!(1, get_queued_pings(glean.get_data_path()).unwrap().len());
|
||||
|
||||
// Disable upload, then try to sumbit
|
||||
glean.set_upload_enabled(false);
|
||||
|
||||
assert!(glean.submit_ping(&ping_type).is_ok());
|
||||
assert_eq!(0, get_queued_pings(glean.get_data_path()).unwrap().len());
|
||||
|
||||
// Test again through the direct call
|
||||
assert!(ping_type.submit(&glean).is_ok());
|
||||
assert_eq!(0, get_queued_pings(glean.get_data_path()).unwrap().len());
|
||||
}
|
||||
|
|
|
@ -10,7 +10,7 @@ use serde_json::json;
|
|||
use glean_core::metrics::*;
|
||||
use glean_core::storage::StorageManager;
|
||||
use glean_core::{test_get_num_recorded_errors, ErrorType};
|
||||
use glean_core::{CommonMetricData, Glean, Lifetime};
|
||||
use glean_core::{CommonMetricData, Lifetime};
|
||||
|
||||
// Tests ported from glean-ac
|
||||
|
||||
|
@ -19,17 +19,13 @@ use glean_core::{CommonMetricData, Glean, Lifetime};
|
|||
|
||||
#[test]
|
||||
fn quantity_serializer_should_correctly_serialize_quantities() {
|
||||
let (_t, tmpname) = tempdir();
|
||||
let cfg = glean_core::Configuration {
|
||||
data_path: tmpname,
|
||||
application_id: GLOBAL_APPLICATION_ID.into(),
|
||||
upload_enabled: true,
|
||||
max_events: None,
|
||||
delay_ping_lifetime_io: false,
|
||||
};
|
||||
let (mut tempdir, _) = tempdir();
|
||||
|
||||
{
|
||||
let glean = Glean::new(cfg.clone()).unwrap();
|
||||
// We give tempdir to the `new_glean` function...
|
||||
let (glean, dir) = new_glean(Some(tempdir));
|
||||
// And then we get it back once that function returns.
|
||||
tempdir = dir;
|
||||
|
||||
let metric = QuantityMetric::new(CommonMetricData {
|
||||
name: "quantity_metric".into(),
|
||||
|
@ -54,7 +50,7 @@ fn quantity_serializer_should_correctly_serialize_quantities() {
|
|||
// Make a new Glean instance here, which should force reloading of the data from disk
|
||||
// so we can ensure it persisted, because it has User lifetime
|
||||
{
|
||||
let glean = Glean::new(cfg).unwrap();
|
||||
let (glean, _) = new_glean(Some(tempdir));
|
||||
let snapshot = StorageManager
|
||||
.snapshot_as_json(glean.storage(), "store1", true)
|
||||
.unwrap();
|
||||
|
@ -67,7 +63,7 @@ fn quantity_serializer_should_correctly_serialize_quantities() {
|
|||
|
||||
#[test]
|
||||
fn set_value_properly_sets_the_value_in_all_stores() {
|
||||
let (glean, _t) = new_glean();
|
||||
let (glean, _t) = new_glean(None);
|
||||
let store_names: Vec<String> = vec!["store1".into(), "store2".into()];
|
||||
|
||||
let metric = QuantityMetric::new(CommonMetricData {
|
||||
|
@ -98,7 +94,7 @@ fn set_value_properly_sets_the_value_in_all_stores() {
|
|||
|
||||
#[test]
|
||||
fn quantities_must_not_set_when_passed_negative() {
|
||||
let (glean, _t) = new_glean();
|
||||
let (glean, _t) = new_glean(None);
|
||||
|
||||
let metric = QuantityMetric::new(CommonMetricData {
|
||||
name: "quantity_metric".into(),
|
||||
|
|
|
@ -13,7 +13,7 @@ use glean_core::{CommonMetricData, Lifetime};
|
|||
|
||||
#[test]
|
||||
fn snapshot_returns_none_if_nothing_is_recorded_in_the_store() {
|
||||
let (glean, _t) = new_glean();
|
||||
let (glean, _t) = new_glean(None);
|
||||
assert!(StorageManager
|
||||
.snapshot(glean.storage(), "unknown_store", true)
|
||||
.is_none())
|
||||
|
@ -21,7 +21,7 @@ fn snapshot_returns_none_if_nothing_is_recorded_in_the_store() {
|
|||
|
||||
#[test]
|
||||
fn can_snapshot() {
|
||||
let (glean, _t) = new_glean();
|
||||
let (glean, _t) = new_glean(None);
|
||||
|
||||
let local_metric = StringMetric::new(CommonMetricData {
|
||||
name: "can_snapshot_local_metric".into(),
|
||||
|
@ -39,13 +39,13 @@ fn can_snapshot() {
|
|||
|
||||
#[test]
|
||||
fn snapshot_correctly_clears_the_stores() {
|
||||
let (glean, _t) = new_glean();
|
||||
let (glean, _t) = new_glean(None);
|
||||
let store_names: Vec<String> = vec!["store1".into(), "store2".into()];
|
||||
|
||||
let metric = CounterMetric::new(CommonMetricData {
|
||||
name: "metric".into(),
|
||||
category: "telemetry".into(),
|
||||
send_in_pings: store_names.clone(),
|
||||
send_in_pings: store_names,
|
||||
disabled: false,
|
||||
lifetime: Lifetime::Ping,
|
||||
..Default::default()
|
||||
|
@ -71,7 +71,7 @@ fn storage_is_thread_safe() {
|
|||
use std::sync::{Arc, Barrier, Mutex};
|
||||
use std::thread;
|
||||
|
||||
let (glean, _t) = new_glean();
|
||||
let (glean, _t) = new_glean(None);
|
||||
let glean = Arc::new(Mutex::new(glean));
|
||||
|
||||
let threadsafe_metric = CounterMetric::new(CommonMetricData {
|
||||
|
|
|
@ -10,24 +10,20 @@ use serde_json::json;
|
|||
use glean_core::metrics::*;
|
||||
use glean_core::storage::StorageManager;
|
||||
use glean_core::{test_get_num_recorded_errors, ErrorType};
|
||||
use glean_core::{CommonMetricData, Glean, Lifetime};
|
||||
use glean_core::{CommonMetricData, Lifetime};
|
||||
|
||||
// SKIPPED from glean-ac: string deserializer should correctly parse integers
|
||||
// This test doesn't really apply to rkv
|
||||
|
||||
#[test]
|
||||
fn string_serializer_should_correctly_serialize_strings() {
|
||||
let (_t, tmpname) = tempdir();
|
||||
let cfg = glean_core::Configuration {
|
||||
data_path: tmpname,
|
||||
application_id: GLOBAL_APPLICATION_ID.into(),
|
||||
upload_enabled: true,
|
||||
max_events: None,
|
||||
delay_ping_lifetime_io: false,
|
||||
};
|
||||
let (mut tempdir, _) = tempdir();
|
||||
|
||||
{
|
||||
let glean = Glean::new(cfg.clone()).unwrap();
|
||||
// We give tempdir to the `new_glean` function...
|
||||
let (glean, dir) = new_glean(Some(tempdir));
|
||||
// And then we get it back once that function returns.
|
||||
tempdir = dir;
|
||||
|
||||
let metric = StringMetric::new(CommonMetricData {
|
||||
name: "string_metric".into(),
|
||||
|
@ -52,7 +48,7 @@ fn string_serializer_should_correctly_serialize_strings() {
|
|||
// Make a new Glean instance here, which should force reloading of the data from disk
|
||||
// so we can ensure it persisted, because it has User lifetime
|
||||
{
|
||||
let glean = Glean::new(cfg.clone()).unwrap();
|
||||
let (glean, _) = new_glean(Some(tempdir));
|
||||
let snapshot = StorageManager
|
||||
.snapshot_as_json(glean.storage(), "store1", true)
|
||||
.unwrap();
|
||||
|
@ -65,7 +61,7 @@ fn string_serializer_should_correctly_serialize_strings() {
|
|||
|
||||
#[test]
|
||||
fn set_properly_sets_the_value_in_all_stores() {
|
||||
let (glean, _t) = new_glean();
|
||||
let (glean, _t) = new_glean(None);
|
||||
let store_names: Vec<String> = vec!["store1".into(), "store2".into()];
|
||||
|
||||
let metric = StringMetric::new(CommonMetricData {
|
||||
|
@ -97,7 +93,7 @@ fn set_properly_sets_the_value_in_all_stores() {
|
|||
|
||||
#[test]
|
||||
fn long_string_values_are_truncated() {
|
||||
let (glean, _t) = new_glean();
|
||||
let (glean, _t) = new_glean(None);
|
||||
|
||||
let metric = StringMetric::new(CommonMetricData {
|
||||
name: "string_metric".into(),
|
||||
|
|
|
@ -9,11 +9,11 @@ use serde_json::json;
|
|||
|
||||
use glean_core::metrics::*;
|
||||
use glean_core::storage::StorageManager;
|
||||
use glean_core::{test_get_num_recorded_errors, CommonMetricData, ErrorType, Glean, Lifetime};
|
||||
use glean_core::{test_get_num_recorded_errors, CommonMetricData, ErrorType, Lifetime};
|
||||
|
||||
#[test]
|
||||
fn list_can_store_multiple_items() {
|
||||
let (glean, _t) = new_glean();
|
||||
let (glean, _t) = new_glean(None);
|
||||
|
||||
let list: StringListMetric = StringListMetric::new(CommonMetricData {
|
||||
name: "list".into(),
|
||||
|
@ -43,17 +43,13 @@ fn list_can_store_multiple_items() {
|
|||
|
||||
#[test]
|
||||
fn stringlist_serializer_should_correctly_serialize_stringlists() {
|
||||
let (_t, tmpname) = tempdir();
|
||||
let cfg = glean_core::Configuration {
|
||||
data_path: tmpname,
|
||||
application_id: GLOBAL_APPLICATION_ID.into(),
|
||||
upload_enabled: true,
|
||||
max_events: None,
|
||||
delay_ping_lifetime_io: false,
|
||||
};
|
||||
let (mut tempdir, _) = tempdir();
|
||||
|
||||
{
|
||||
let glean = Glean::new(cfg.clone()).unwrap();
|
||||
// We give tempdir to the `new_glean` function...
|
||||
let (glean, dir) = new_glean(Some(tempdir));
|
||||
// And then we get it back once that function returns.
|
||||
tempdir = dir;
|
||||
|
||||
let metric = StringListMetric::new(CommonMetricData {
|
||||
name: "string_list_metric".into(),
|
||||
|
@ -67,7 +63,7 @@ fn stringlist_serializer_should_correctly_serialize_stringlists() {
|
|||
}
|
||||
|
||||
{
|
||||
let glean = Glean::new(cfg.clone()).unwrap();
|
||||
let (glean, _) = new_glean(Some(tempdir));
|
||||
|
||||
let snapshot = StorageManager
|
||||
.snapshot_as_json(glean.storage(), "store1", true)
|
||||
|
@ -81,7 +77,7 @@ fn stringlist_serializer_should_correctly_serialize_stringlists() {
|
|||
|
||||
#[test]
|
||||
fn set_properly_sets_the_value_in_all_stores() {
|
||||
let (glean, _t) = new_glean();
|
||||
let (glean, _t) = new_glean(None);
|
||||
let store_names: Vec<String> = vec!["store1".into(), "store2".into()];
|
||||
|
||||
let metric = StringListMetric::new(CommonMetricData {
|
||||
|
@ -109,7 +105,7 @@ fn set_properly_sets_the_value_in_all_stores() {
|
|||
|
||||
#[test]
|
||||
fn long_string_values_are_truncated() {
|
||||
let (glean, _t) = new_glean();
|
||||
let (glean, _t) = new_glean(None);
|
||||
|
||||
let metric = StringListMetric::new(CommonMetricData {
|
||||
name: "string_list_metric".into(),
|
||||
|
@ -152,7 +148,7 @@ fn long_string_values_are_truncated() {
|
|||
|
||||
#[test]
|
||||
fn disabled_string_lists_dont_record() {
|
||||
let (glean, _t) = new_glean();
|
||||
let (glean, _t) = new_glean(None);
|
||||
|
||||
let metric = StringListMetric::new(CommonMetricData {
|
||||
name: "string_list_metric".into(),
|
||||
|
@ -181,7 +177,7 @@ fn disabled_string_lists_dont_record() {
|
|||
|
||||
#[test]
|
||||
fn string_lists_dont_exceed_max_items() {
|
||||
let (glean, _t) = new_glean();
|
||||
let (glean, _t) = new_glean(None);
|
||||
|
||||
let metric = StringListMetric::new(CommonMetricData {
|
||||
name: "string_list_metric".into(),
|
||||
|
@ -230,7 +226,7 @@ fn string_lists_dont_exceed_max_items() {
|
|||
|
||||
#[test]
|
||||
fn set_does_not_record_error_when_receiving_empty_list() {
|
||||
let (glean, _t) = new_glean();
|
||||
let (glean, _t) = new_glean(None);
|
||||
|
||||
let metric = StringListMetric::new(CommonMetricData {
|
||||
name: "string_list_metric".into(),
|
||||
|
|
|
@ -12,25 +12,21 @@ use serde_json::json;
|
|||
use glean_core::metrics::*;
|
||||
use glean_core::storage::StorageManager;
|
||||
use glean_core::{test_get_num_recorded_errors, ErrorType};
|
||||
use glean_core::{CommonMetricData, Glean, Lifetime};
|
||||
use glean_core::{CommonMetricData, Lifetime};
|
||||
|
||||
// Tests ported from glean-ac
|
||||
|
||||
#[test]
|
||||
fn serializer_should_correctly_serialize_timespans() {
|
||||
let (_t, tmpname) = tempdir();
|
||||
let cfg = glean_core::Configuration {
|
||||
data_path: tmpname,
|
||||
application_id: GLOBAL_APPLICATION_ID.into(),
|
||||
upload_enabled: true,
|
||||
max_events: None,
|
||||
delay_ping_lifetime_io: false,
|
||||
};
|
||||
let (mut tempdir, _) = tempdir();
|
||||
|
||||
let duration = 60;
|
||||
|
||||
{
|
||||
let glean = Glean::new(cfg.clone()).unwrap();
|
||||
// We give tempdir to the `new_glean` function...
|
||||
let (glean, dir) = new_glean(Some(tempdir));
|
||||
// And then we get it back once that function returns.
|
||||
tempdir = dir;
|
||||
|
||||
let mut metric = TimespanMetric::new(
|
||||
CommonMetricData {
|
||||
|
@ -56,7 +52,7 @@ fn serializer_should_correctly_serialize_timespans() {
|
|||
// Make a new Glean instance here, which should force reloading of the data from disk
|
||||
// so we can ensure it persisted, because it has User lifetime
|
||||
{
|
||||
let glean = Glean::new(cfg.clone()).unwrap();
|
||||
let (glean, _) = new_glean(Some(tempdir));
|
||||
let snapshot = StorageManager
|
||||
.snapshot_as_json(glean.storage(), "store1", true)
|
||||
.unwrap();
|
||||
|
@ -70,7 +66,7 @@ fn serializer_should_correctly_serialize_timespans() {
|
|||
|
||||
#[test]
|
||||
fn single_elapsed_time_must_be_recorded() {
|
||||
let (glean, _t) = new_glean();
|
||||
let (glean, _t) = new_glean(None);
|
||||
|
||||
let mut metric = TimespanMetric::new(
|
||||
CommonMetricData {
|
||||
|
@ -100,7 +96,7 @@ fn single_elapsed_time_must_be_recorded() {
|
|||
|
||||
#[test]
|
||||
fn second_timer_run_is_skipped() {
|
||||
let (glean, _t) = new_glean();
|
||||
let (glean, _t) = new_glean(None);
|
||||
|
||||
let mut metric = TimespanMetric::new(
|
||||
CommonMetricData {
|
||||
|
@ -142,7 +138,7 @@ fn second_timer_run_is_skipped() {
|
|||
|
||||
#[test]
|
||||
fn recorded_time_conforms_to_resolution() {
|
||||
let (glean, _t) = new_glean();
|
||||
let (glean, _t) = new_glean(None);
|
||||
|
||||
let mut ns_metric = TimespanMetric::new(
|
||||
CommonMetricData {
|
||||
|
@ -188,7 +184,7 @@ fn recorded_time_conforms_to_resolution() {
|
|||
|
||||
#[test]
|
||||
fn cancel_does_not_store() {
|
||||
let (glean, _t) = new_glean();
|
||||
let (glean, _t) = new_glean(None);
|
||||
|
||||
let mut metric = TimespanMetric::new(
|
||||
CommonMetricData {
|
||||
|
@ -210,7 +206,7 @@ fn cancel_does_not_store() {
|
|||
|
||||
#[test]
|
||||
fn nothing_stored_before_stop() {
|
||||
let (glean, _t) = new_glean();
|
||||
let (glean, _t) = new_glean(None);
|
||||
|
||||
let mut metric = TimespanMetric::new(
|
||||
CommonMetricData {
|
||||
|
@ -236,7 +232,7 @@ fn nothing_stored_before_stop() {
|
|||
|
||||
#[test]
|
||||
fn set_raw_time() {
|
||||
let (glean, _t) = new_glean();
|
||||
let (glean, _t) = new_glean(None);
|
||||
|
||||
let metric = TimespanMetric::new(
|
||||
CommonMetricData {
|
||||
|
@ -259,7 +255,7 @@ fn set_raw_time() {
|
|||
|
||||
#[test]
|
||||
fn set_raw_time_does_nothing_when_timer_running() {
|
||||
let (glean, _t) = new_glean();
|
||||
let (glean, _t) = new_glean(None);
|
||||
|
||||
let mut metric = TimespanMetric::new(
|
||||
CommonMetricData {
|
||||
|
@ -291,7 +287,7 @@ fn set_raw_time_does_nothing_when_timer_running() {
|
|||
|
||||
#[test]
|
||||
fn timespan_is_not_tracked_across_upload_toggle() {
|
||||
let (mut glean, _t) = new_glean();
|
||||
let (mut glean, _t) = new_glean(None);
|
||||
|
||||
let mut metric = TimespanMetric::new(
|
||||
CommonMetricData {
|
||||
|
|
|
@ -59,7 +59,7 @@ fn serializer_should_correctly_serialize_timing_distribution() {
|
|||
// Make a new Glean instance here, which should force reloading of the data from disk
|
||||
// so we can ensure it persisted, because it has User lifetime
|
||||
{
|
||||
let glean = Glean::new(cfg.clone()).unwrap();
|
||||
let glean = Glean::new(cfg).unwrap();
|
||||
let snapshot = StorageManager
|
||||
.snapshot_as_json(glean.storage(), "store1", true)
|
||||
.unwrap();
|
||||
|
@ -73,7 +73,7 @@ fn serializer_should_correctly_serialize_timing_distribution() {
|
|||
|
||||
#[test]
|
||||
fn set_value_properly_sets_the_value_in_all_stores() {
|
||||
let (glean, _t) = new_glean();
|
||||
let (glean, _t) = new_glean(None);
|
||||
let store_names: Vec<String> = vec!["store1".into(), "store2".into()];
|
||||
|
||||
let duration = 1;
|
||||
|
@ -111,7 +111,7 @@ fn set_value_properly_sets_the_value_in_all_stores() {
|
|||
|
||||
#[test]
|
||||
fn timing_distributions_must_not_accumulate_negative_values() {
|
||||
let (glean, _t) = new_glean();
|
||||
let (glean, _t) = new_glean(None);
|
||||
|
||||
let duration = 60;
|
||||
let time_unit = TimeUnit::Nanosecond;
|
||||
|
@ -149,7 +149,7 @@ fn timing_distributions_must_not_accumulate_negative_values() {
|
|||
|
||||
#[test]
|
||||
fn the_accumulate_samples_api_correctly_stores_timing_values() {
|
||||
let (glean, _t) = new_glean();
|
||||
let (glean, _t) = new_glean(None);
|
||||
|
||||
let mut metric = TimingDistributionMetric::new(
|
||||
CommonMetricData {
|
||||
|
@ -196,7 +196,7 @@ fn the_accumulate_samples_api_correctly_stores_timing_values() {
|
|||
|
||||
#[test]
|
||||
fn the_accumulate_samples_api_correctly_handles_negative_values() {
|
||||
let (glean, _t) = new_glean();
|
||||
let (glean, _t) = new_glean(None);
|
||||
|
||||
let mut metric = TimingDistributionMetric::new(
|
||||
CommonMetricData {
|
||||
|
@ -238,9 +238,56 @@ fn the_accumulate_samples_api_correctly_handles_negative_values() {
|
|||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn the_accumulate_samples_api_correctly_handles_overflowing_values() {
|
||||
let (glean, _t) = new_glean(None);
|
||||
|
||||
let mut metric = TimingDistributionMetric::new(
|
||||
CommonMetricData {
|
||||
name: "distribution".into(),
|
||||
category: "telemetry".into(),
|
||||
send_in_pings: vec!["store1".into()],
|
||||
disabled: false,
|
||||
lifetime: Lifetime::Ping,
|
||||
..Default::default()
|
||||
},
|
||||
TimeUnit::Nanosecond,
|
||||
);
|
||||
|
||||
// The MAX_SAMPLE_TIME is the same from `metrics/timing_distribution.rs`.
|
||||
const MAX_SAMPLE_TIME: u64 = 1000 * 1000 * 1000 * 60 * 10;
|
||||
let overflowing_val = MAX_SAMPLE_TIME as i64 + 1;
|
||||
// Accumulate the samples.
|
||||
metric.accumulate_samples_signed(&glean, [overflowing_val, 1, 2, 3].to_vec());
|
||||
|
||||
let val = metric
|
||||
.test_get_value(&glean, "store1")
|
||||
.expect("Value should be stored");
|
||||
|
||||
// Overflowing values are truncated to MAX_SAMPLE_TIME and recorded.
|
||||
assert_eq!(val.sum(), MAX_SAMPLE_TIME + 6);
|
||||
assert_eq!(val.count(), 4);
|
||||
|
||||
// We should get a sample in each of the first 3 buckets.
|
||||
assert_eq!(1, val.values()[&1]);
|
||||
assert_eq!(1, val.values()[&2]);
|
||||
assert_eq!(1, val.values()[&3]);
|
||||
|
||||
// 1 error should be reported.
|
||||
assert_eq!(
|
||||
Ok(1),
|
||||
test_get_num_recorded_errors(
|
||||
&glean,
|
||||
metric.meta(),
|
||||
ErrorType::InvalidOverflow,
|
||||
Some("store1")
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn large_nanoseconds_values() {
|
||||
let (glean, _t) = new_glean();
|
||||
let (glean, _t) = new_glean(None);
|
||||
|
||||
let mut metric = TimingDistributionMetric::new(
|
||||
CommonMetricData {
|
||||
|
|
|
@ -9,11 +9,11 @@ use serde_json::json;
|
|||
|
||||
use glean_core::metrics::*;
|
||||
use glean_core::storage::StorageManager;
|
||||
use glean_core::{CommonMetricData, Glean, Lifetime};
|
||||
use glean_core::{CommonMetricData, Lifetime};
|
||||
|
||||
#[test]
|
||||
fn uuid_is_generated_and_stored() {
|
||||
let (mut glean, _t) = new_glean();
|
||||
let (mut glean, _t) = new_glean(None);
|
||||
|
||||
let uuid: UuidMetric = UuidMetric::new(CommonMetricData {
|
||||
name: "uuid".into(),
|
||||
|
@ -41,17 +41,13 @@ fn uuid_is_generated_and_stored() {
|
|||
fn uuid_serializer_should_correctly_serialize_uuids() {
|
||||
let value = uuid::Uuid::new_v4();
|
||||
|
||||
let (_t, tmpname) = tempdir();
|
||||
let cfg = glean_core::Configuration {
|
||||
data_path: tmpname,
|
||||
application_id: GLOBAL_APPLICATION_ID.into(),
|
||||
upload_enabled: true,
|
||||
max_events: None,
|
||||
delay_ping_lifetime_io: false,
|
||||
};
|
||||
let (mut tempdir, _) = tempdir();
|
||||
|
||||
{
|
||||
let glean = Glean::new(cfg.clone()).unwrap();
|
||||
// We give tempdir to the `new_glean` function...
|
||||
let (glean, dir) = new_glean(Some(tempdir));
|
||||
// And then we get it back once that function returns.
|
||||
tempdir = dir;
|
||||
|
||||
let metric = UuidMetric::new(CommonMetricData {
|
||||
name: "uuid_metric".into(),
|
||||
|
@ -76,7 +72,7 @@ fn uuid_serializer_should_correctly_serialize_uuids() {
|
|||
// Make a new Glean instance here, which should force reloading of the data from disk
|
||||
// so we can ensure it persisted, because it has User lifetime
|
||||
{
|
||||
let glean = Glean::new(cfg.clone()).unwrap();
|
||||
let (glean, _) = new_glean(Some(tempdir));
|
||||
let snapshot = StorageManager
|
||||
.snapshot_as_json(glean.storage(), "store1", true)
|
||||
.unwrap();
|
||||
|
@ -89,7 +85,7 @@ fn uuid_serializer_should_correctly_serialize_uuids() {
|
|||
|
||||
#[test]
|
||||
fn set_properly_sets_the_value_in_all_stores() {
|
||||
let (glean, _t) = new_glean();
|
||||
let (glean, _t) = new_glean(None);
|
||||
let store_names: Vec<String> = vec!["store1".into(), "store2".into()];
|
||||
let value = uuid::Uuid::new_v4();
|
||||
|
||||
|
|
|
@ -1 +1 @@
|
|||
{"files":{"CHANGELOG.md":"9adfab85300058ddcaf7c7805150bb64effb1d4d367ff6e5bc28d9e3ded226be","Cargo.toml":"a847452858135864ba7778ba219b734d3663a4a97494a81cc2d9d0d167146bd5","LICENSE":"1f256ecad192880510e84ad60474eab7589218784b9a50bc7ceee34c2b91f1d5","README.md":"f234c25515132b7205fcf05545f1d5661bba6f2c189aca848290c7e32b832952","src/configuration.rs":"16e3ec9be802ac37b39d2ad4d1a4702d7e5b462071ef7f0265aea84de520641f","src/core_metrics.rs":"0490dfefaccdbb36ead40af02fa4aa75ebb98056884f2eef4340fe27a24f6cfd","src/lib.rs":"5764c3d3760dd071729ffd4de18d51668db76707f370b4214269c19bb98e890b","src/metrics/mod.rs":"3f0dc73758bc5836362742b4ab424032f4f398151c6e85ea879a4e7641614347","src/metrics/ping.rs":"27f5153b33060b817304e11ec58cb17184a6264f29fc45cd7042464c3a9263b5","src/system.rs":"7dbe5007bdaa3d4547c992757c26b7543e059f179078f06b946adad0a0bb4e34"},"package":"cb3bf34b889307680056e1493552dc6385becaea073ed99743a5e1a340ed2342"}
|
||||
{"files":{"CHANGELOG.md":"82c0aee7d1e503b71a31115208e303f6e689a4da91c98a95c8827bdc81080bf4","Cargo.toml":"b4dccc5d6540f08052daec134025c0510b8ea87f7d3a12d893616b56ba619efa","LICENSE":"1f256ecad192880510e84ad60474eab7589218784b9a50bc7ceee34c2b91f1d5","README.md":"f234c25515132b7205fcf05545f1d5661bba6f2c189aca848290c7e32b832952","src/configuration.rs":"16e3ec9be802ac37b39d2ad4d1a4702d7e5b462071ef7f0265aea84de520641f","src/core_metrics.rs":"0490dfefaccdbb36ead40af02fa4aa75ebb98056884f2eef4340fe27a24f6cfd","src/lib.rs":"f8dc2468495e9c145bad61adad365e76cb99df3968c64f70ae2a77ec28010506","src/metrics/mod.rs":"3f0dc73758bc5836362742b4ab424032f4f398151c6e85ea879a4e7641614347","src/metrics/ping.rs":"27f5153b33060b817304e11ec58cb17184a6264f29fc45cd7042464c3a9263b5","src/system.rs":"7dbe5007bdaa3d4547c992757c26b7543e059f179078f06b946adad0a0bb4e34","src/test.rs":"c9870591227a849eaf52f914abdd310ec5e01ce6ddf9946a3ac3990b601f8fb6"},"package":"182218f42395f369a1a5372d334699458a606aa557dd2e23998ba25704964e2c"}
|
|
@ -1,3 +1,9 @@
|
|||
# v0.0.5 (2020-01-15)
|
||||
|
||||
* Upgraded Glean dependency
|
||||
* See [full Glean changelog](https://github.com/mozilla/glean/blob/v24.0.0/CHANGELOG.md)
|
||||
* Reset core client metrics when re-enabling upload ([#620](https://github.com/mozilla/glean/pull/620))
|
||||
|
||||
# v0.0.4 (2019-12-20)
|
||||
|
||||
* Set target architecture in `client_info` ([#603](https://github.com/mozilla/glean/pull/603))
|
||||
|
|
|
@ -13,7 +13,7 @@
|
|||
[package]
|
||||
edition = "2018"
|
||||
name = "glean-preview"
|
||||
version = "0.0.4"
|
||||
version = "0.0.5"
|
||||
authors = ["Jan-Erik Rediger <jrediger@mozilla.com>", "The Glean Team <glean-team@mozilla.com>"]
|
||||
include = ["README.md", "LICENSE", "CHANGELOG.md", "src/**/*", "tests/**/*", "Cargo.toml"]
|
||||
description = "Nice Glean SDK Rust API"
|
||||
|
@ -22,7 +22,7 @@ keywords = ["telemetry", "glean"]
|
|||
license = "MPL-2.0"
|
||||
repository = "https://github.com/mozilla/glean"
|
||||
[dependencies.glean-core]
|
||||
version = "22.0.0"
|
||||
version = "24.0.0"
|
||||
|
||||
[dependencies.lazy_static]
|
||||
version = "1.4.0"
|
||||
|
@ -38,7 +38,7 @@ default-features = false
|
|||
version = "0.4.8"
|
||||
|
||||
[dev-dependencies.tempfile]
|
||||
version = "3.0.7"
|
||||
version = "3.1.0"
|
||||
[badges.circle-ci]
|
||||
branch = "master"
|
||||
repository = "mozilla/glean"
|
||||
|
|
|
@ -50,17 +50,24 @@ mod core_metrics;
|
|||
pub mod metrics;
|
||||
mod system;
|
||||
|
||||
static GLEAN: OnceCell<Mutex<Glean>> = OnceCell::new();
|
||||
#[derive(Debug)]
|
||||
struct GleanWrapper {
|
||||
instance: Glean,
|
||||
channel: Option<String>,
|
||||
client_info: ClientInfoMetrics,
|
||||
}
|
||||
|
||||
static GLEAN: OnceCell<Mutex<GleanWrapper>> = OnceCell::new();
|
||||
|
||||
/// Get a reference to the global Glean object.
|
||||
///
|
||||
/// Panics if no global Glean object was set.
|
||||
fn global_glean() -> &'static Mutex<Glean> {
|
||||
fn global_glean() -> &'static Mutex<GleanWrapper> {
|
||||
GLEAN.get().unwrap()
|
||||
}
|
||||
|
||||
/// Set or replace the global Glean object.
|
||||
fn setup_glean(glean: Glean) -> Result<()> {
|
||||
fn setup_glean(glean: GleanWrapper) -> Result<()> {
|
||||
if GLEAN.get().is_none() {
|
||||
GLEAN.set(Mutex::new(glean)).unwrap();
|
||||
} else {
|
||||
|
@ -75,7 +82,15 @@ where
|
|||
F: Fn(&Glean) -> R,
|
||||
{
|
||||
let lock = global_glean().lock().unwrap();
|
||||
f(&lock)
|
||||
f(&lock.instance)
|
||||
}
|
||||
|
||||
fn with_glean_wrapper_mut<F, R>(f: F) -> R
|
||||
where
|
||||
F: Fn(&mut GleanWrapper) -> R,
|
||||
{
|
||||
let mut lock = global_glean().lock().unwrap();
|
||||
f(&mut lock)
|
||||
}
|
||||
|
||||
fn with_glean_mut<F, R>(f: F) -> R
|
||||
|
@ -83,38 +98,49 @@ where
|
|||
F: Fn(&mut Glean) -> R,
|
||||
{
|
||||
let mut lock = global_glean().lock().unwrap();
|
||||
f(&mut lock)
|
||||
f(&mut lock.instance)
|
||||
}
|
||||
|
||||
/// Create and initialize a new Glean object.
|
||||
///
|
||||
/// See `glean_core::Glean::new`.
|
||||
pub fn initialize(cfg: Configuration, client_info: ClientInfoMetrics) -> Result<()> {
|
||||
let channel = cfg.channel;
|
||||
let cfg = glean_core::Configuration {
|
||||
let core_cfg = glean_core::Configuration {
|
||||
upload_enabled: cfg.upload_enabled,
|
||||
data_path: cfg.data_path,
|
||||
application_id: cfg.application_id,
|
||||
data_path: cfg.data_path.clone(),
|
||||
application_id: cfg.application_id.clone(),
|
||||
max_events: cfg.max_events,
|
||||
delay_ping_lifetime_io: cfg.delay_ping_lifetime_io,
|
||||
};
|
||||
let glean = Glean::new(cfg)?;
|
||||
let glean = Glean::new(core_cfg)?;
|
||||
|
||||
// First initialize core metrics
|
||||
initialize_core_metrics(&glean, client_info, channel);
|
||||
initialize_core_metrics(&glean, &client_info, cfg.channel.clone());
|
||||
|
||||
// Now make this the global object available to others.
|
||||
setup_glean(glean)?;
|
||||
let wrapper = GleanWrapper {
|
||||
instance: glean,
|
||||
channel: cfg.channel,
|
||||
client_info,
|
||||
};
|
||||
setup_glean(wrapper)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn initialize_core_metrics(glean: &Glean, client_info: ClientInfoMetrics, channel: Option<String>) {
|
||||
fn initialize_core_metrics(
|
||||
glean: &Glean,
|
||||
client_info: &ClientInfoMetrics,
|
||||
channel: Option<String>,
|
||||
) {
|
||||
let core_metrics = core_metrics::InternalMetrics::new();
|
||||
|
||||
core_metrics.app_build.set(glean, client_info.app_build);
|
||||
core_metrics
|
||||
.app_build
|
||||
.set(glean, &client_info.app_build[..]);
|
||||
core_metrics
|
||||
.app_display_version
|
||||
.set(glean, client_info.app_display_version);
|
||||
.set(glean, &client_info.app_display_version[..]);
|
||||
if let Some(app_channel) = channel {
|
||||
core_metrics.app_channel.set(glean, app_channel);
|
||||
}
|
||||
|
@ -132,10 +158,18 @@ fn initialize_core_metrics(glean: &Glean, client_info: ClientInfoMetrics, channe
|
|||
/// Set whether upload is enabled or not.
|
||||
///
|
||||
/// See `glean_core::Glean.set_upload_enabled`.
|
||||
pub fn set_upload_enabled(flag: bool) -> bool {
|
||||
with_glean_mut(|glean| {
|
||||
glean.set_upload_enabled(flag);
|
||||
glean.is_upload_enabled()
|
||||
pub fn set_upload_enabled(enabled: bool) -> bool {
|
||||
with_glean_wrapper_mut(|glean| {
|
||||
let old_enabled = glean.instance.is_upload_enabled();
|
||||
glean.instance.set_upload_enabled(enabled);
|
||||
|
||||
if !old_enabled && enabled {
|
||||
// If uploading is being re-enabled, we have to restore the
|
||||
// application-lifetime metrics.
|
||||
initialize_core_metrics(&glean.instance, &glean.client_info, glean.channel.clone());
|
||||
}
|
||||
|
||||
enabled
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -181,5 +215,8 @@ pub fn submit_ping_by_name(ping: &str) -> bool {
|
|||
///
|
||||
/// Returns true if at least one ping was assembled and queued, false otherwise.
|
||||
pub fn submit_pings_by_name(pings: &[String]) -> bool {
|
||||
with_glean(|glean| glean.send_pings_by_name(pings))
|
||||
with_glean(|glean| glean.submit_pings_by_name(pings))
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test;
|
||||
|
|
|
@ -0,0 +1,77 @@
|
|||
use super::*;
|
||||
|
||||
const GLOBAL_APPLICATION_ID: &str = "org.mozilla.fogotype.test";
|
||||
|
||||
// Create a new instance of Glean with a temporary directory.
|
||||
// We need to keep the `TempDir` alive, so that it's not deleted before we stop using it.
|
||||
fn new_glean() -> tempfile::TempDir {
|
||||
let dir = tempfile::tempdir().unwrap();
|
||||
let tmpname = dir.path().display().to_string();
|
||||
|
||||
let cfg = Configuration {
|
||||
data_path: tmpname,
|
||||
application_id: GLOBAL_APPLICATION_ID.into(),
|
||||
upload_enabled: true,
|
||||
max_events: None,
|
||||
delay_ping_lifetime_io: false,
|
||||
channel: Some("testing".into()),
|
||||
};
|
||||
|
||||
initialize(cfg, ClientInfoMetrics::unknown()).unwrap();
|
||||
dir
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn it_initializes() {
|
||||
env_logger::try_init().ok();
|
||||
let _ = new_glean();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn it_toggles_upload() {
|
||||
env_logger::try_init().ok();
|
||||
|
||||
let _t = new_glean();
|
||||
|
||||
assert!(crate::is_upload_enabled());
|
||||
crate::set_upload_enabled(false);
|
||||
assert!(!crate::is_upload_enabled());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn client_info_reset_after_toggle() {
|
||||
env_logger::try_init().ok();
|
||||
|
||||
let _t = new_glean();
|
||||
|
||||
assert!(crate::is_upload_enabled());
|
||||
|
||||
// Metrics are identified by category.name, so it's safe to recreate the objects here.
|
||||
let core_metrics = core_metrics::InternalMetrics::new();
|
||||
|
||||
// At start we should have a value.
|
||||
with_glean(|glean| {
|
||||
assert!(core_metrics
|
||||
.os
|
||||
.test_get_value(glean, "glean_client_info")
|
||||
.is_some());
|
||||
});
|
||||
|
||||
// Disabling upload clears everything.
|
||||
crate::set_upload_enabled(false);
|
||||
with_glean(|glean| {
|
||||
assert!(!core_metrics
|
||||
.os
|
||||
.test_get_value(glean, "glean_client_info")
|
||||
.is_some());
|
||||
});
|
||||
|
||||
// Re-enabling upload should reset the values.
|
||||
crate::set_upload_enabled(true);
|
||||
with_glean(|glean| {
|
||||
assert!(core_metrics
|
||||
.os
|
||||
.test_get_value(glean, "glean_client_info")
|
||||
.is_some());
|
||||
});
|
||||
}
|
|
@ -1 +1 @@
|
|||
{"files":{"CHANGELOG.md":"817c711701c9925f2494c5a2e09c679efd4026e121395b0167e722eb827eb2bf","Cargo.toml":"86f094aa5d1cea5b682852ec8a80a3cb954fc3d5267bd201e2e956d89b17f4aa","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"6485b8ed310d3f0340bf1ad1f47645069ce4069dcc6bb46c7d5c6faf41de1fdb","README.md":"ebfbd97d184c4474295e36e30e45779bb40bc7c07610734058dfa272a922927e","appveyor.yml":"d5376a881aef6dc3bed6a6b51f1f1c3801b88fc77a12895f6953190256e965bc","src/lib.rs":"43acb02c1d3440235e464b6ab51e52824a7d604390e278d098054e76e686b798","src/macros.rs":"728a5309f35c84895ff1935c712f57464e2239dddb09b6fd84ff289a66c34b46","src/serde.rs":"a5ae50d6f5ac538a5ca57ee58031a2582afa6989c741574584756632a4131ba7","tests/filters.rs":"6aee024d4594b4fde772e35c5d3318e5aa5d9aa4eaeafb04e4c7c922be4ac837"},"package":"c84ec4b527950aa83a329754b01dbe3f58361d1c5efacd1f6d68c494d08a17c6"}
|
||||
{"files":{"CHANGELOG.md":"4a384a648f3c5f9deed5463bbcdb92cf95dfe86d755767c0c21140d0539c1243","Cargo.toml":"cde739d8e087cc2515bc06d8ce327648606575e6dd51b74fb0ffd6a329381ba1","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"6485b8ed310d3f0340bf1ad1f47645069ce4069dcc6bb46c7d5c6faf41de1fdb","README.md":"0a6436eede658249802b63f1e693b28c9f80a2da2697bb80a2677e6c253c3a7d","build.rs":"16a49ad8a5b11a610382f904c72435ce3201b0807c584128c7e61fbb59c76b09","src/kv/error.rs":"296f6af53062a4526bb75eb4dbf955c75aed3b6332c260dc5bd7f0bc68c2d8c7","src/kv/key.rs":"68077f9ad149b28ccdeacd9e95c44c292b68d7fa0823aac5e022e2f3df6120e6","src/kv/mod.rs":"3397573933689b11ca5ad526193166c1e4f665e5692c38cd8fdb63a5aa61f7bf","src/kv/source.rs":"3783ac96b54e24fe6525f9e3cec91425800267f842704d947fea27ee344025a2","src/kv/value/impls.rs":"c8d4cb4e746e7b5a6e1f1993145c2b5309ac556f7ffc757fb80bb10b89bfa40d","src/kv/value/internal.rs":"b7e7d94d018d0207dfb30350a2ce916dc124c48c2e056e7637c159132c707a59","src/kv/value/mod.rs":"a4732af6fb9b3fad8ddf9dd5f9da90689eb9023984ff1f455f871ed32fde4aef","src/kv/value/test.rs":"99e0b87dd8822e7f148a3acfd1dd050718e2aee3ecd9314849076bf043a0a3e9","src/lib.rs":"c594c4a71bff2118e120c9c64e1e2e866dc76489af007965b6c67e57bf1a1797","src/macros.rs":"68cbe1a49a3ac846d6b80d291f427a70dbad0f57dac8beecac7449723293bd99","src/serde.rs":"4677851fba9d43adcddeb94c416af9e06eb70d4fb86e2840256e1edfc4638fef","tests/filters.rs":"90cae6653ba6dc3a462bab2a0b96cb66d5d21882492118bfd6999c0baff1b0e5"},"package":"14b6052be84e6b71ab17edffc2eeabf5c2c3ae1fdb464aae35ac50c67a44e1f7"}
|
|
@ -2,6 +2,26 @@
|
|||
|
||||
## [Unreleased]
|
||||
|
||||
## [0.4.8] - 2019-07-28
|
||||
|
||||
### New
|
||||
|
||||
* Support attempting to get `Record` fields as static strings.
|
||||
|
||||
## [0.4.7] - 2019-07-06
|
||||
|
||||
### New
|
||||
|
||||
* Support for embedded environments with thread-unsafe initialization.
|
||||
* Initial unstable support for capturing structured data under the `kv_unstable`
|
||||
feature gate. This new API doesn't affect existing users and may change in future
|
||||
patches (so those changes may not appear in the changelog until it stabilizes).
|
||||
|
||||
### Improved
|
||||
|
||||
* Docs for using `log` with the 2018 edition.
|
||||
* Error messages for macros missing arguments.
|
||||
|
||||
## [0.4.6] - 2018-10-27
|
||||
|
||||
### Improved
|
||||
|
@ -112,7 +132,9 @@ version using log 0.4.x to avoid losing module and file information.
|
|||
|
||||
Look at the [release tags] for information about older releases.
|
||||
|
||||
[Unreleased]: https://github.com/rust-lang-nursery/log/compare/0.4.6...HEAD
|
||||
[Unreleased]: https://github.com/rust-lang-nursery/log/compare/0.4.8...HEAD
|
||||
[0.4.8]: https://github.com/rust-lang-nursery/log/compare/0.4.7...0.4.8
|
||||
[0.4.7]: https://github.com/rust-lang-nursery/log/compare/0.4.6...0.4.7
|
||||
[0.4.6]: https://github.com/rust-lang-nursery/log/compare/0.4.5...0.4.6
|
||||
[0.4.5]: https://github.com/rust-lang-nursery/log/compare/0.4.4...0.4.5
|
||||
[0.4.4]: https://github.com/rust-lang-nursery/log/compare/0.4.3...0.4.4
|
||||
|
|
|
@ -12,18 +12,19 @@
|
|||
|
||||
[package]
|
||||
name = "log"
|
||||
version = "0.4.6"
|
||||
version = "0.4.8"
|
||||
authors = ["The Rust Project Developers"]
|
||||
build = "build.rs"
|
||||
exclude = ["rfcs/**/*", "/.travis.yml", "/appveyor.yml"]
|
||||
description = "A lightweight logging facade for Rust\n"
|
||||
homepage = "https://github.com/rust-lang/log"
|
||||
documentation = "https://docs.rs/log"
|
||||
readme = "README.md"
|
||||
keywords = ["logging"]
|
||||
categories = ["development-tools::debugging"]
|
||||
license = "MIT/Apache-2.0"
|
||||
license = "MIT OR Apache-2.0"
|
||||
repository = "https://github.com/rust-lang/log"
|
||||
[package.metadata.docs.rs]
|
||||
features = ["std", "serde"]
|
||||
features = ["std", "serde", "kv_unstable_sval"]
|
||||
|
||||
[[test]]
|
||||
name = "filters"
|
||||
|
@ -35,10 +36,21 @@ version = "0.1.2"
|
|||
version = "1.0"
|
||||
optional = true
|
||||
default-features = false
|
||||
|
||||
[dependencies.sval]
|
||||
version = "0.4.2"
|
||||
optional = true
|
||||
default-features = false
|
||||
[dev-dependencies.serde_test]
|
||||
version = "1.0"
|
||||
|
||||
[dev-dependencies.sval]
|
||||
version = "0.4.2"
|
||||
features = ["test"]
|
||||
|
||||
[features]
|
||||
kv_unstable = []
|
||||
kv_unstable_sval = ["kv_unstable", "sval/fmt"]
|
||||
max_level_debug = []
|
||||
max_level_error = []
|
||||
max_level_info = []
|
||||
|
|
|
@ -3,7 +3,7 @@ log
|
|||
|
||||
A Rust library providing a lightweight logging *facade*.
|
||||
|
||||
[![Build Status](https://travis-ci.org/rust-lang-nursery/log.svg?branch=master)](https://travis-ci.org/rust-lang-nursery/log)
|
||||
[![Build Status](https://travis-ci.com/rust-lang-nursery/log.svg?branch=master)](https://travis-ci.com/rust-lang-nursery/log)
|
||||
[![Build status](https://ci.appveyor.com/api/projects/status/nopdjmmjt45xcrki?svg=true)](https://ci.appveyor.com/project/alexcrichton/log)
|
||||
[![Latest version](https://img.shields.io/crates/v/log.svg)](https://crates.io/crates/log)
|
||||
[![Documentation](https://docs.rs/log/badge.svg)](https://docs.rs/log)
|
||||
|
@ -16,6 +16,13 @@ logging implementation. Libraries can use the logging API provided by this
|
|||
crate, and the consumer of those libraries can choose the logging
|
||||
implementation that is most suitable for its use case.
|
||||
|
||||
|
||||
## Minimum supported `rustc`
|
||||
|
||||
`1.16.0+`
|
||||
|
||||
This version is explicitly tested in CI and may be bumped in any release as needed. Maintaining compatibility with older compilers is a priority though, so the bar for bumping the minimum supported version is set very high. Any changes to the supported minimum version will be called out in the release notes.
|
||||
|
||||
## Usage
|
||||
|
||||
## In libraries
|
||||
|
@ -29,8 +36,7 @@ log = "0.4"
|
|||
```
|
||||
|
||||
```rust
|
||||
#[macro_use]
|
||||
extern crate log;
|
||||
use log::{info, trace, warn};
|
||||
|
||||
pub fn shave_the_yak(yak: &mut Yak) {
|
||||
trace!("Commencing yak shaving");
|
||||
|
@ -50,19 +56,9 @@ pub fn shave_the_yak(yak: &mut Yak) {
|
|||
}
|
||||
```
|
||||
|
||||
If you use Rust 2018, you can use instead the following code to import the crate macros:
|
||||
|
||||
```rust
|
||||
use log::{info, trace, warn};
|
||||
|
||||
pub fn shave_the_yak(yak: &mut Yak) {
|
||||
// …
|
||||
}
|
||||
```
|
||||
|
||||
## In executables
|
||||
|
||||
In order to produce log output executables have to use a logger implementation compatible with the facade.
|
||||
In order to produce log output, executables have to use a logger implementation compatible with the facade.
|
||||
There are many available implementations to chose from, here are some of the most popular ones:
|
||||
|
||||
* Simple minimal loggers:
|
||||
|
@ -79,6 +75,8 @@ There are many available implementations to chose from, here are some of the mos
|
|||
* [`syslog`](https://docs.rs/syslog/*/syslog/)
|
||||
* [`slog-stdlog`](https://docs.rs/slog-stdlog/*/slog_stdlog/)
|
||||
* [`android_log`](https://docs.rs/android_log/*/android_log/)
|
||||
* For WebAssembly binaries:
|
||||
* [`console_log`](https://docs.rs/console_log/*/console_log/)
|
||||
|
||||
Executables should choose a logger implementation and initialize it early in the
|
||||
runtime of the program. Logger implementations will typically include a
|
||||
|
|
|
@ -1,19 +0,0 @@
|
|||
environment:
|
||||
matrix:
|
||||
- TARGET: x86_64-pc-windows-msvc
|
||||
- TARGET: i686-pc-windows-msvc
|
||||
- TARGET: i686-pc-windows-gnu
|
||||
install:
|
||||
- ps: Start-FileDownload "https://static.rust-lang.org/dist/rust-nightly-${env:TARGET}.exe"
|
||||
- rust-nightly-%TARGET%.exe /VERYSILENT /NORESTART /DIR="C:\Program Files (x86)\Rust"
|
||||
- SET PATH=%PATH%;C:\Program Files (x86)\Rust\bin
|
||||
- SET PATH=%PATH%;C:\MinGW\bin
|
||||
- rustc -V
|
||||
- cargo -V
|
||||
|
||||
build: false
|
||||
|
||||
test_script:
|
||||
- cargo test --verbose
|
||||
- cargo test --verbose --features serde
|
||||
- cargo test --verbose --features std
|
|
@ -0,0 +1,14 @@
|
|||
//! This build script detects target platforms that lack proper support for
|
||||
//! atomics and sets `cfg` flags accordingly.
|
||||
|
||||
use std::env;
|
||||
|
||||
fn main() {
|
||||
let target = env::var("TARGET").unwrap();
|
||||
|
||||
if !target.starts_with("thumbv6") {
|
||||
println!("cargo:rustc-cfg=atomic_cas");
|
||||
}
|
||||
|
||||
println!("cargo:rerun-if-changed=build.rs");
|
||||
}
|
|
@ -0,0 +1,88 @@
|
|||
use std::fmt;
|
||||
|
||||
/// An error encountered while working with structured data.
|
||||
#[derive(Debug)]
|
||||
pub struct Error {
|
||||
inner: Inner
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
enum Inner {
|
||||
#[cfg(feature = "std")]
|
||||
Boxed(std_support::BoxedError),
|
||||
Msg(&'static str),
|
||||
Fmt,
|
||||
}
|
||||
|
||||
impl Error {
|
||||
/// Create an error from a message.
|
||||
pub fn msg(msg: &'static str) -> Self {
|
||||
Error {
|
||||
inner: Inner::Msg(msg),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for Error {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
use self::Inner::*;
|
||||
match &self.inner {
|
||||
#[cfg(feature = "std")]
|
||||
&Boxed(ref err) => err.fmt(f),
|
||||
&Msg(ref msg) => msg.fmt(f),
|
||||
&Fmt => fmt::Error.fmt(f),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<fmt::Error> for Error {
|
||||
fn from(_: fmt::Error) -> Self {
|
||||
Error {
|
||||
inner: Inner::Fmt,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Error> for fmt::Error {
|
||||
fn from(_: Error) -> Self {
|
||||
fmt::Error
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "std")]
|
||||
mod std_support {
|
||||
use super::*;
|
||||
use std::{error, io};
|
||||
|
||||
pub(super) type BoxedError = Box<error::Error + Send + Sync>;
|
||||
|
||||
impl Error {
|
||||
/// Create an error from a standard error type.
|
||||
pub fn boxed<E>(err: E) -> Self
|
||||
where
|
||||
E: Into<BoxedError>,
|
||||
{
|
||||
Error {
|
||||
inner: Inner::Boxed(err.into())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl error::Error for Error {
|
||||
fn description(&self) -> &str {
|
||||
"key values error"
|
||||
}
|
||||
}
|
||||
|
||||
impl From<io::Error> for Error {
|
||||
fn from(err: io::Error) -> Self {
|
||||
Error::boxed(err)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Error> for io::Error {
|
||||
fn from(err: Error) -> Self {
|
||||
io::Error::new(io::ErrorKind::Other, err)
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,143 @@
|
|||
//! Structured keys.
|
||||
|
||||
use std::fmt;
|
||||
use std::cmp;
|
||||
use std::hash;
|
||||
use std::borrow::Borrow;
|
||||
|
||||
/// A type that can be converted into a [`Key`](struct.Key.html).
|
||||
pub trait ToKey {
|
||||
/// Perform the conversion.
|
||||
fn to_key(&self) -> Key;
|
||||
}
|
||||
|
||||
impl<'a, T> ToKey for &'a T
|
||||
where
|
||||
T: ToKey + ?Sized,
|
||||
{
|
||||
fn to_key(&self) -> Key {
|
||||
(**self).to_key()
|
||||
}
|
||||
}
|
||||
|
||||
impl<'k> ToKey for Key<'k> {
|
||||
fn to_key(&self) -> Key {
|
||||
Key {
|
||||
key: self.key,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl ToKey for str {
|
||||
fn to_key(&self) -> Key {
|
||||
Key::from_str(self)
|
||||
}
|
||||
}
|
||||
|
||||
/// A key in a structured key-value pair.
|
||||
#[derive(Clone)]
|
||||
pub struct Key<'k> {
|
||||
key: &'k str,
|
||||
}
|
||||
|
||||
impl<'k> Key<'k> {
|
||||
/// Get a key from a borrowed string.
|
||||
pub fn from_str(key: &'k str) -> Self {
|
||||
Key {
|
||||
key: key,
|
||||
}
|
||||
}
|
||||
|
||||
/// Get a borrowed string from this key.
|
||||
pub fn as_str(&self) -> &str {
|
||||
self.key
|
||||
}
|
||||
}
|
||||
|
||||
impl<'k> fmt::Debug for Key<'k> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
self.key.fmt(f)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'k> fmt::Display for Key<'k> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
self.key.fmt(f)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'k> hash::Hash for Key<'k> {
|
||||
fn hash<H>(&self, state: &mut H)
|
||||
where
|
||||
H: hash::Hasher,
|
||||
{
|
||||
self.as_str().hash(state)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'k, 'ko> PartialEq<Key<'ko>> for Key<'k> {
|
||||
fn eq(&self, other: &Key<'ko>) -> bool {
|
||||
self.as_str().eq(other.as_str())
|
||||
}
|
||||
}
|
||||
|
||||
impl<'k> Eq for Key<'k> {}
|
||||
|
||||
impl<'k, 'ko> PartialOrd<Key<'ko>> for Key<'k> {
|
||||
fn partial_cmp(&self, other: &Key<'ko>) -> Option<cmp::Ordering> {
|
||||
self.as_str().partial_cmp(other.as_str())
|
||||
}
|
||||
}
|
||||
|
||||
impl<'k> Ord for Key<'k> {
|
||||
fn cmp(&self, other: &Self) -> cmp::Ordering {
|
||||
self.as_str().cmp(other.as_str())
|
||||
}
|
||||
}
|
||||
|
||||
impl<'k> AsRef<str> for Key<'k> {
|
||||
fn as_ref(&self) -> &str {
|
||||
self.as_str()
|
||||
}
|
||||
}
|
||||
|
||||
impl<'k> Borrow<str> for Key<'k> {
|
||||
fn borrow(&self) -> &str {
|
||||
self.as_str()
|
||||
}
|
||||
}
|
||||
|
||||
impl<'k> From<&'k str> for Key<'k> {
|
||||
fn from(s: &'k str) -> Self {
|
||||
Key::from_str(s)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "std")]
|
||||
mod std_support {
|
||||
use super::*;
|
||||
|
||||
use std::borrow::Cow;
|
||||
|
||||
impl ToKey for String {
|
||||
fn to_key(&self) -> Key {
|
||||
Key::from_str(self)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> ToKey for Cow<'a, str> {
|
||||
fn to_key(&self) -> Key {
|
||||
Key::from_str(self)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn key_from_string() {
|
||||
assert_eq!("a key", Key::from_str("a key").as_str());
|
||||
}
|
||||
}
|
|
@ -0,0 +1,23 @@
|
|||
//! **UNSTABLE:** Structured key-value pairs.
|
||||
//!
|
||||
//! This module is unstable and breaking changes may be made
|
||||
//! at any time. See [the tracking issue](https://github.com/rust-lang-nursery/log/issues/328)
|
||||
//! for more details.
|
||||
//!
|
||||
//! Add the `kv_unstable` feature to your `Cargo.toml` to enable
|
||||
//! this module:
|
||||
//!
|
||||
//! ```toml
|
||||
//! [dependencies.log]
|
||||
//! features = ["kv_unstable"]
|
||||
//! ```
|
||||
|
||||
mod error;
|
||||
mod source;
|
||||
mod key;
|
||||
pub mod value;
|
||||
|
||||
pub use self::error::Error;
|
||||
pub use self::source::{Source, Visitor};
|
||||
pub use self::key::{Key, ToKey};
|
||||
pub use self::value::{Value, ToValue};
|
|
@ -0,0 +1,396 @@
|
|||
//! Sources for key-value pairs.
|
||||
|
||||
use std::fmt;
|
||||
use kv::{Error, Key, ToKey, Value, ToValue};
|
||||
|
||||
/// A source of key-value pairs.
|
||||
///
|
||||
/// The source may be a single pair, a set of pairs, or a filter over a set of pairs.
|
||||
/// Use the [`Visitor`](trait.Visitor.html) trait to inspect the structured data
|
||||
/// in a source.
|
||||
pub trait Source {
|
||||
/// Visit key-value pairs.
|
||||
///
|
||||
/// A source doesn't have to guarantee any ordering or uniqueness of key-value pairs.
|
||||
/// If the given visitor returns an error then the source may early-return with it,
|
||||
/// even if there are more key-value pairs.
|
||||
///
|
||||
/// # Implementation notes
|
||||
///
|
||||
/// A source should yield the same key-value pairs to a subsequent visitor unless
|
||||
/// that visitor itself fails.
|
||||
fn visit<'kvs>(&'kvs self, visitor: &mut Visitor<'kvs>) -> Result<(), Error>;
|
||||
|
||||
/// Get the value for a given key.
|
||||
///
|
||||
/// If the key appears multiple times in the source then which key is returned
|
||||
/// is implementation specific.
|
||||
///
|
||||
/// # Implementation notes
|
||||
///
|
||||
/// A source that can provide a more efficient implementation of this method
|
||||
/// should override it.
|
||||
fn get<'v>(&'v self, key: Key) -> Option<Value<'v>> {
|
||||
struct Get<'k, 'v> {
|
||||
key: Key<'k>,
|
||||
found: Option<Value<'v>>,
|
||||
}
|
||||
|
||||
impl<'k, 'kvs> Visitor<'kvs> for Get<'k, 'kvs> {
|
||||
fn visit_pair(&mut self, key: Key<'kvs>, value: Value<'kvs>) -> Result<(), Error> {
|
||||
if self.key == key {
|
||||
self.found = Some(value);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
let mut get = Get {
|
||||
key,
|
||||
found: None,
|
||||
};
|
||||
|
||||
let _ = self.visit(&mut get);
|
||||
get.found
|
||||
}
|
||||
|
||||
/// Count the number of key-value pairs that can be visited.
|
||||
///
|
||||
/// # Implementation notes
|
||||
///
|
||||
/// A source that knows the number of key-value pairs upfront may provide a more
|
||||
/// efficient implementation.
|
||||
///
|
||||
/// A subsequent call to `visit` should yield the same number of key-value pairs
|
||||
/// to the visitor, unless that visitor fails part way through.
|
||||
fn count(&self) -> usize {
|
||||
struct Count(usize);
|
||||
|
||||
impl<'kvs> Visitor<'kvs> for Count {
|
||||
fn visit_pair(&mut self, _: Key<'kvs>, _: Value<'kvs>) -> Result<(), Error> {
|
||||
self.0 += 1;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
let mut count = Count(0);
|
||||
let _ = self.visit(&mut count);
|
||||
count.0
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, T> Source for &'a T
|
||||
where
|
||||
T: Source + ?Sized,
|
||||
{
|
||||
fn visit<'kvs>(&'kvs self, visitor: &mut Visitor<'kvs>) -> Result<(), Error> {
|
||||
Source::visit(&**self, visitor)
|
||||
}
|
||||
|
||||
fn get<'v>(&'v self, key: Key) -> Option<Value<'v>> {
|
||||
Source::get(&**self, key)
|
||||
}
|
||||
|
||||
fn count(&self) -> usize {
|
||||
Source::count(&**self)
|
||||
}
|
||||
}
|
||||
|
||||
impl<K, V> Source for (K, V)
|
||||
where
|
||||
K: ToKey,
|
||||
V: ToValue,
|
||||
{
|
||||
fn visit<'kvs>(&'kvs self, visitor: &mut Visitor<'kvs>) -> Result<(), Error> {
|
||||
visitor.visit_pair(self.0.to_key(), self.1.to_value())
|
||||
}
|
||||
|
||||
fn get<'v>(&'v self, key: Key) -> Option<Value<'v>> {
|
||||
if self.0.to_key() == key {
|
||||
Some(self.1.to_value())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
fn count(&self) -> usize {
|
||||
1
|
||||
}
|
||||
}
|
||||
|
||||
impl<S> Source for [S]
|
||||
where
|
||||
S: Source,
|
||||
{
|
||||
fn visit<'kvs>(&'kvs self, visitor: &mut Visitor<'kvs>) -> Result<(), Error> {
|
||||
for source in self {
|
||||
source.visit(visitor)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn count(&self) -> usize {
|
||||
self.len()
|
||||
}
|
||||
}
|
||||
|
||||
impl<S> Source for Option<S>
|
||||
where
|
||||
S: Source,
|
||||
{
|
||||
fn visit<'kvs>(&'kvs self, visitor: &mut Visitor<'kvs>) -> Result<(), Error> {
|
||||
if let Some(ref source) = *self {
|
||||
source.visit(visitor)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn count(&self) -> usize {
|
||||
self.as_ref().map(Source::count).unwrap_or(0)
|
||||
}
|
||||
}
|
||||
|
||||
/// A visitor for the key-value pairs in a [`Source`](trait.Source.html).
|
||||
pub trait Visitor<'kvs> {
|
||||
/// Visit a key-value pair.
|
||||
fn visit_pair(&mut self, key: Key<'kvs>, value: Value<'kvs>) -> Result<(), Error>;
|
||||
}
|
||||
|
||||
impl<'a, 'kvs, T> Visitor<'kvs> for &'a mut T
|
||||
where
|
||||
T: Visitor<'kvs> + ?Sized,
|
||||
{
|
||||
fn visit_pair(&mut self, key: Key<'kvs>, value: Value<'kvs>) -> Result<(), Error> {
|
||||
(**self).visit_pair(key, value)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, 'b: 'a, 'kvs> Visitor<'kvs> for fmt::DebugMap<'a, 'b> {
|
||||
fn visit_pair(&mut self, key: Key<'kvs>, value: Value<'kvs>) -> Result<(), Error> {
|
||||
self.entry(&key, &value);
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, 'b: 'a, 'kvs> Visitor<'kvs> for fmt::DebugList<'a, 'b> {
|
||||
fn visit_pair(&mut self, key: Key<'kvs>, value: Value<'kvs>) -> Result<(), Error> {
|
||||
self.entry(&(key, value));
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, 'b: 'a, 'kvs> Visitor<'kvs> for fmt::DebugSet<'a, 'b> {
|
||||
fn visit_pair(&mut self, key: Key<'kvs>, value: Value<'kvs>) -> Result<(), Error> {
|
||||
self.entry(&(key, value));
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, 'b: 'a, 'kvs> Visitor<'kvs> for fmt::DebugTuple<'a, 'b> {
|
||||
fn visit_pair(&mut self, key: Key<'kvs>, value: Value<'kvs>) -> Result<(), Error> {
|
||||
self.field(&key);
|
||||
self.field(&value);
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "std")]
|
||||
mod std_support {
|
||||
use super::*;
|
||||
use std::borrow::Borrow;
|
||||
use std::collections::{BTreeMap, HashMap};
|
||||
use std::hash::{BuildHasher, Hash};
|
||||
|
||||
impl<S> Source for Box<S>
|
||||
where
|
||||
S: Source + ?Sized,
|
||||
{
|
||||
fn visit<'kvs>(&'kvs self, visitor: &mut Visitor<'kvs>) -> Result<(), Error> {
|
||||
Source::visit(&**self, visitor)
|
||||
}
|
||||
|
||||
fn get<'v>(&'v self, key: Key) -> Option<Value<'v>> {
|
||||
Source::get(&**self, key)
|
||||
}
|
||||
|
||||
fn count(&self) -> usize {
|
||||
Source::count(&**self)
|
||||
}
|
||||
}
|
||||
|
||||
impl<S> Source for Vec<S>
|
||||
where
|
||||
S: Source,
|
||||
{
|
||||
fn visit<'kvs>(&'kvs self, visitor: &mut Visitor<'kvs>) -> Result<(), Error> {
|
||||
Source::visit(&**self, visitor)
|
||||
}
|
||||
|
||||
fn get<'v>(&'v self, key: Key) -> Option<Value<'v>> {
|
||||
Source::get(&**self, key)
|
||||
}
|
||||
|
||||
fn count(&self) -> usize {
|
||||
Source::count(&**self)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'kvs, V> Visitor<'kvs> for Box<V>
|
||||
where
|
||||
V: Visitor<'kvs> + ?Sized,
|
||||
{
|
||||
fn visit_pair(&mut self, key: Key<'kvs>, value: Value<'kvs>) -> Result<(), Error> {
|
||||
(**self).visit_pair(key, value)
|
||||
}
|
||||
}
|
||||
|
||||
impl<K, V, S> Source for HashMap<K, V, S>
|
||||
where
|
||||
K: ToKey + Borrow<str> + Eq + Hash,
|
||||
V: ToValue,
|
||||
S: BuildHasher,
|
||||
{
|
||||
fn visit<'kvs>(&'kvs self, visitor: &mut Visitor<'kvs>) -> Result<(), Error> {
|
||||
for (key, value) in self {
|
||||
visitor.visit_pair(key.to_key(), value.to_value())?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn get<'v>(&'v self, key: Key) -> Option<Value<'v>> {
|
||||
HashMap::get(self, key.as_str()).map(|v| v.to_value())
|
||||
}
|
||||
|
||||
fn count(&self) -> usize {
|
||||
self.len()
|
||||
}
|
||||
}
|
||||
|
||||
impl<K, V> Source for BTreeMap<K, V>
|
||||
where
|
||||
K: ToKey + Borrow<str> + Ord,
|
||||
V: ToValue,
|
||||
{
|
||||
fn visit<'kvs>(&'kvs self, visitor: &mut Visitor<'kvs>) -> Result<(), Error> {
|
||||
for (key, value) in self {
|
||||
visitor.visit_pair(key.to_key(), value.to_value())?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn get<'v>(&'v self, key: Key) -> Option<Value<'v>> {
|
||||
BTreeMap::get(self, key.as_str()).map(|v| v.to_value())
|
||||
}
|
||||
|
||||
fn count(&self) -> usize {
|
||||
self.len()
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use kv::value::test::Token;
|
||||
use std::collections::{BTreeMap, HashMap};
|
||||
|
||||
#[test]
|
||||
fn count() {
|
||||
assert_eq!(1, Source::count(&Box::new(("a", 1))));
|
||||
assert_eq!(2, Source::count(&vec![("a", 1), ("b", 2)]));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn get() {
|
||||
let source = vec![("a", 1), ("b", 2), ("a", 1)];
|
||||
assert_eq!(
|
||||
Token::I64(1),
|
||||
Source::get(&source, Key::from_str("a")).unwrap().to_token()
|
||||
);
|
||||
|
||||
let source = Box::new(Option::None::<(&str, i32)>);
|
||||
assert!(Source::get(&source, Key::from_str("a")).is_none());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn hash_map() {
|
||||
let mut map = HashMap::new();
|
||||
map.insert("a", 1);
|
||||
map.insert("b", 2);
|
||||
|
||||
assert_eq!(2, Source::count(&map));
|
||||
assert_eq!(
|
||||
Token::I64(1),
|
||||
Source::get(&map, Key::from_str("a")).unwrap().to_token()
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn btree_map() {
|
||||
let mut map = BTreeMap::new();
|
||||
map.insert("a", 1);
|
||||
map.insert("b", 2);
|
||||
|
||||
assert_eq!(2, Source::count(&map));
|
||||
assert_eq!(
|
||||
Token::I64(1),
|
||||
Source::get(&map, Key::from_str("a")).unwrap().to_token()
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use kv::value::test::Token;
|
||||
|
||||
#[test]
|
||||
fn source_is_object_safe() {
|
||||
fn _check(_: &Source) {}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn visitor_is_object_safe() {
|
||||
fn _check(_: &Visitor) {}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn count() {
|
||||
struct OnePair {
|
||||
key: &'static str,
|
||||
value: i32,
|
||||
}
|
||||
|
||||
impl Source for OnePair {
|
||||
fn visit<'kvs>(&'kvs self, visitor: &mut Visitor<'kvs>) -> Result<(), Error> {
|
||||
visitor.visit_pair(self.key.to_key(), self.value.to_value())
|
||||
}
|
||||
}
|
||||
|
||||
assert_eq!(1, Source::count(&("a", 1)));
|
||||
assert_eq!(2, Source::count(&[("a", 1), ("b", 2)] as &[_]));
|
||||
assert_eq!(0, Source::count(&Option::None::<(&str, i32)>));
|
||||
assert_eq!(1, Source::count(&OnePair { key: "a", value: 1 }));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn get() {
|
||||
let source = &[("a", 1), ("b", 2), ("a", 1)] as &[_];
|
||||
assert_eq!(
|
||||
Token::I64(1),
|
||||
Source::get(source, Key::from_str("a")).unwrap().to_token()
|
||||
);
|
||||
assert_eq!(
|
||||
Token::I64(2),
|
||||
Source::get(source, Key::from_str("b")).unwrap().to_token()
|
||||
);
|
||||
assert!(Source::get(&source, Key::from_str("c")).is_none());
|
||||
|
||||
let source = Option::None::<(&str, i32)>;
|
||||
assert!(Source::get(&source, Key::from_str("a")).is_none());
|
||||
}
|
||||
}
|
|
@ -0,0 +1,269 @@
|
|||
use std::fmt;
|
||||
|
||||
use super::{ToValue, Value, Primitive};
|
||||
|
||||
impl ToValue for usize {
|
||||
fn to_value(&self) -> Value {
|
||||
Value::from(*self)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'v> From<usize> for Value<'v> {
|
||||
fn from(value: usize) -> Self {
|
||||
Value::from_primitive(Primitive::Unsigned(value as u64))
|
||||
}
|
||||
}
|
||||
|
||||
impl ToValue for isize {
|
||||
fn to_value(&self) -> Value {
|
||||
Value::from(*self)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'v> From<isize> for Value<'v> {
|
||||
fn from(value: isize) -> Self {
|
||||
Value::from_primitive(Primitive::Signed(value as i64))
|
||||
}
|
||||
}
|
||||
|
||||
impl ToValue for u8 {
|
||||
fn to_value(&self) -> Value {
|
||||
Value::from(*self)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'v> From<u8> for Value<'v> {
|
||||
fn from(value: u8) -> Self {
|
||||
Value::from_primitive(Primitive::Unsigned(value as u64))
|
||||
}
|
||||
}
|
||||
|
||||
impl ToValue for u16 {
|
||||
fn to_value(&self) -> Value {
|
||||
Value::from(*self)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'v> From<u16> for Value<'v> {
|
||||
fn from(value: u16) -> Self {
|
||||
Value::from_primitive(Primitive::Unsigned(value as u64))
|
||||
}
|
||||
}
|
||||
|
||||
impl ToValue for u32 {
|
||||
fn to_value(&self) -> Value {
|
||||
Value::from(*self)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'v> From<u32> for Value<'v> {
|
||||
fn from(value: u32) -> Self {
|
||||
Value::from_primitive(Primitive::Unsigned(value as u64))
|
||||
}
|
||||
}
|
||||
|
||||
impl ToValue for u64 {
|
||||
fn to_value(&self) -> Value {
|
||||
Value::from(*self)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'v> From<u64> for Value<'v> {
|
||||
fn from(value: u64) -> Self {
|
||||
Value::from_primitive(Primitive::Unsigned(value))
|
||||
}
|
||||
}
|
||||
|
||||
impl ToValue for i8 {
|
||||
fn to_value(&self) -> Value {
|
||||
Value::from(*self)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'v> From<i8> for Value<'v> {
|
||||
fn from(value: i8) -> Self {
|
||||
Value::from_primitive(Primitive::Signed(value as i64))
|
||||
}
|
||||
}
|
||||
|
||||
impl ToValue for i16 {
|
||||
fn to_value(&self) -> Value {
|
||||
Value::from(*self)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'v> From<i16> for Value<'v> {
|
||||
fn from(value: i16) -> Self {
|
||||
Value::from_primitive(Primitive::Signed(value as i64))
|
||||
}
|
||||
}
|
||||
|
||||
impl ToValue for i32 {
|
||||
fn to_value(&self) -> Value {
|
||||
Value::from(*self)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'v> From<i32> for Value<'v> {
|
||||
fn from(value: i32) -> Self {
|
||||
Value::from_primitive(Primitive::Signed(value as i64))
|
||||
}
|
||||
}
|
||||
|
||||
impl ToValue for i64 {
|
||||
fn to_value(&self) -> Value {
|
||||
Value::from(*self)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'v> From<i64> for Value<'v> {
|
||||
fn from(value: i64) -> Self {
|
||||
Value::from_primitive(Primitive::Signed(value))
|
||||
}
|
||||
}
|
||||
|
||||
impl ToValue for f32 {
|
||||
fn to_value(&self) -> Value {
|
||||
Value::from(*self)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'v> From<f32> for Value<'v> {
|
||||
fn from(value: f32) -> Self {
|
||||
Value::from_primitive(Primitive::Float(value as f64))
|
||||
}
|
||||
}
|
||||
|
||||
impl ToValue for f64 {
|
||||
fn to_value(&self) -> Value {
|
||||
Value::from(*self)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'v> From<f64> for Value<'v> {
|
||||
fn from(value: f64) -> Self {
|
||||
Value::from_primitive(Primitive::Float(value))
|
||||
}
|
||||
}
|
||||
|
||||
impl ToValue for bool {
|
||||
fn to_value(&self) -> Value {
|
||||
Value::from(*self)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'v> From<bool> for Value<'v> {
|
||||
fn from(value: bool) -> Self {
|
||||
Value::from_primitive(Primitive::Bool(value))
|
||||
}
|
||||
}
|
||||
|
||||
impl ToValue for char {
|
||||
fn to_value(&self) -> Value {
|
||||
Value::from(*self)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'v> From<char> for Value<'v> {
|
||||
fn from(value: char) -> Self {
|
||||
Value::from_primitive(Primitive::Char(value))
|
||||
}
|
||||
}
|
||||
|
||||
impl<'v> ToValue for &'v str {
|
||||
fn to_value(&self) -> Value {
|
||||
Value::from(*self)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'v> From<&'v str> for Value<'v> {
|
||||
fn from(value: &'v str) -> Self {
|
||||
Value::from_primitive(Primitive::Str(value))
|
||||
}
|
||||
}
|
||||
|
||||
impl ToValue for () {
|
||||
fn to_value(&self) -> Value {
|
||||
Value::from_primitive(Primitive::None)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> ToValue for Option<T>
|
||||
where
|
||||
T: ToValue,
|
||||
{
|
||||
fn to_value(&self) -> Value {
|
||||
match *self {
|
||||
Some(ref value) => value.to_value(),
|
||||
None => Value::from_primitive(Primitive::None),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'v> ToValue for fmt::Arguments<'v> {
|
||||
fn to_value(&self) -> Value {
|
||||
Value::from_debug(self)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "std")]
|
||||
mod std_support {
|
||||
use super::*;
|
||||
|
||||
use std::borrow::Cow;
|
||||
|
||||
impl<T> ToValue for Box<T>
|
||||
where
|
||||
T: ToValue + ?Sized,
|
||||
{
|
||||
fn to_value(&self) -> Value {
|
||||
(**self).to_value()
|
||||
}
|
||||
}
|
||||
|
||||
impl ToValue for String {
|
||||
fn to_value(&self) -> Value {
|
||||
Value::from_primitive(Primitive::Str(&*self))
|
||||
}
|
||||
}
|
||||
|
||||
impl<'v> ToValue for Cow<'v, str> {
|
||||
fn to_value(&self) -> Value {
|
||||
Value::from_primitive(Primitive::Str(&*self))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use kv::value::test::Token;
|
||||
|
||||
#[test]
|
||||
fn test_to_value_display() {
|
||||
assert_eq!(42u64.to_value().to_string(), "42");
|
||||
assert_eq!(42i64.to_value().to_string(), "42");
|
||||
assert_eq!(42.01f64.to_value().to_string(), "42.01");
|
||||
assert_eq!(true.to_value().to_string(), "true");
|
||||
assert_eq!('a'.to_value().to_string(), "'a'");
|
||||
assert_eq!(format_args!("a {}", "value").to_value().to_string(), "a value");
|
||||
assert_eq!("a loong string".to_value().to_string(), "\"a loong string\"");
|
||||
assert_eq!(Some(true).to_value().to_string(), "true");
|
||||
assert_eq!(().to_value().to_string(), "None");
|
||||
assert_eq!(Option::None::<bool>.to_value().to_string(), "None");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_to_value_structured() {
|
||||
assert_eq!(42u64.to_value().to_token(), Token::U64(42));
|
||||
assert_eq!(42i64.to_value().to_token(), Token::I64(42));
|
||||
assert_eq!(42.01f64.to_value().to_token(), Token::F64(42.01));
|
||||
assert_eq!(true.to_value().to_token(), Token::Bool(true));
|
||||
assert_eq!('a'.to_value().to_token(), Token::Char('a'));
|
||||
assert_eq!(format_args!("a {}", "value").to_value().to_token(), Token::Str("a value".into()));
|
||||
assert_eq!("a loong string".to_value().to_token(), Token::Str("a loong string".into()));
|
||||
assert_eq!(Some(true).to_value().to_token(), Token::Bool(true));
|
||||
assert_eq!(().to_value().to_token(), Token::None);
|
||||
assert_eq!(Option::None::<bool>.to_value().to_token(), Token::None);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,264 @@
|
|||
use std::fmt;
|
||||
|
||||
use super::{Fill, Slot, Error};
|
||||
use kv;
|
||||
|
||||
// `Visitor` is an internal API for visiting the structure of a value.
|
||||
// It's not intended to be public (at this stage).
|
||||
|
||||
/// A container for a structured value for a specific kind of visitor.
|
||||
#[derive(Clone, Copy)]
|
||||
pub(super) enum Inner<'v> {
|
||||
/// A simple primitive value that can be copied without allocating.
|
||||
Primitive(Primitive<'v>),
|
||||
/// A value that can be filled.
|
||||
Fill(&'v Fill),
|
||||
/// A debuggable value.
|
||||
Debug(&'v fmt::Debug),
|
||||
/// A displayable value.
|
||||
Display(&'v fmt::Display),
|
||||
|
||||
#[cfg(feature = "kv_unstable_sval")]
|
||||
/// A structured value from `sval`.
|
||||
Sval(&'v sval_support::Value),
|
||||
}
|
||||
|
||||
impl<'v> Inner<'v> {
|
||||
pub(super) fn visit(&self, visitor: &mut Visitor) -> Result<(), Error> {
|
||||
match *self {
|
||||
Inner::Primitive(value) => match value {
|
||||
Primitive::Signed(value) => visitor.i64(value),
|
||||
Primitive::Unsigned(value) => visitor.u64(value),
|
||||
Primitive::Float(value) => visitor.f64(value),
|
||||
Primitive::Bool(value) => visitor.bool(value),
|
||||
Primitive::Char(value) => visitor.char(value),
|
||||
Primitive::Str(value) => visitor.str(value),
|
||||
Primitive::None => visitor.none(),
|
||||
},
|
||||
Inner::Fill(value) => value.fill(&mut Slot::new(visitor)),
|
||||
Inner::Debug(value) => visitor.debug(value),
|
||||
Inner::Display(value) => visitor.display(value),
|
||||
|
||||
#[cfg(feature = "kv_unstable_sval")]
|
||||
Inner::Sval(value) => visitor.sval(value),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// The internal serialization contract.
|
||||
pub(super) trait Visitor {
|
||||
fn debug(&mut self, v: &fmt::Debug) -> Result<(), Error>;
|
||||
fn display(&mut self, v: &fmt::Display) -> Result<(), Error> {
|
||||
self.debug(&format_args!("{}", v))
|
||||
}
|
||||
|
||||
fn u64(&mut self, v: u64) -> Result<(), Error>;
|
||||
fn i64(&mut self, v: i64) -> Result<(), Error>;
|
||||
fn f64(&mut self, v: f64) -> Result<(), Error>;
|
||||
fn bool(&mut self, v: bool) -> Result<(), Error>;
|
||||
fn char(&mut self, v: char) -> Result<(), Error>;
|
||||
fn str(&mut self, v: &str) -> Result<(), Error>;
|
||||
fn none(&mut self) -> Result<(), Error>;
|
||||
|
||||
#[cfg(feature = "kv_unstable_sval")]
|
||||
fn sval(&mut self, v: &sval_support::Value) -> Result<(), Error>;
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy)]
|
||||
pub(super) enum Primitive<'v> {
|
||||
Signed(i64),
|
||||
Unsigned(u64),
|
||||
Float(f64),
|
||||
Bool(bool),
|
||||
Char(char),
|
||||
Str(&'v str),
|
||||
None,
|
||||
}
|
||||
|
||||
mod fmt_support {
|
||||
use super::*;
|
||||
|
||||
impl<'v> kv::Value<'v> {
|
||||
/// Get a value from a debuggable type.
|
||||
pub fn from_debug<T>(value: &'v T) -> Self
|
||||
where
|
||||
T: fmt::Debug,
|
||||
{
|
||||
kv::Value {
|
||||
inner: Inner::Debug(value),
|
||||
}
|
||||
}
|
||||
|
||||
/// Get a value from a displayable type.
|
||||
pub fn from_display<T>(value: &'v T) -> Self
|
||||
where
|
||||
T: fmt::Display,
|
||||
{
|
||||
kv::Value {
|
||||
inner: Inner::Display(value),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'v> fmt::Debug for kv::Value<'v> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
self.visit(&mut FmtVisitor(f))?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl<'v> fmt::Display for kv::Value<'v> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
self.visit(&mut FmtVisitor(f))?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
struct FmtVisitor<'a, 'b: 'a>(&'a mut fmt::Formatter<'b>);
|
||||
|
||||
impl<'a, 'b: 'a> Visitor for FmtVisitor<'a, 'b> {
|
||||
fn debug(&mut self, v: &fmt::Debug) -> Result<(), Error> {
|
||||
v.fmt(self.0)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn u64(&mut self, v: u64) -> Result<(), Error> {
|
||||
self.debug(&format_args!("{:?}", v))
|
||||
}
|
||||
|
||||
fn i64(&mut self, v: i64) -> Result<(), Error> {
|
||||
self.debug(&format_args!("{:?}", v))
|
||||
}
|
||||
|
||||
fn f64(&mut self, v: f64) -> Result<(), Error> {
|
||||
self.debug(&format_args!("{:?}", v))
|
||||
}
|
||||
|
||||
fn bool(&mut self, v: bool) -> Result<(), Error> {
|
||||
self.debug(&format_args!("{:?}", v))
|
||||
}
|
||||
|
||||
fn char(&mut self, v: char) -> Result<(), Error> {
|
||||
self.debug(&format_args!("{:?}", v))
|
||||
}
|
||||
|
||||
fn str(&mut self, v: &str) -> Result<(), Error> {
|
||||
self.debug(&format_args!("{:?}", v))
|
||||
}
|
||||
|
||||
fn none(&mut self) -> Result<(), Error> {
|
||||
self.debug(&format_args!("None"))
|
||||
}
|
||||
|
||||
#[cfg(feature = "kv_unstable_sval")]
|
||||
fn sval(&mut self, v: &sval_support::Value) -> Result<(), Error> {
|
||||
sval_support::fmt(self.0, v)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "kv_unstable_sval")]
|
||||
pub(super) mod sval_support {
|
||||
use super::*;
|
||||
|
||||
extern crate sval;
|
||||
|
||||
impl<'v> kv::Value<'v> {
|
||||
/// Get a value from a structured type.
|
||||
pub fn from_sval<T>(value: &'v T) -> Self
|
||||
where
|
||||
T: sval::Value,
|
||||
{
|
||||
kv::Value {
|
||||
inner: Inner::Sval(value),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'v> sval::Value for kv::Value<'v> {
|
||||
fn stream(&self, s: &mut sval::value::Stream) -> sval::value::Result {
|
||||
self.visit(&mut SvalVisitor(s)).map_err(Error::into_sval)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
pub(in kv::value) use self::sval::Value;
|
||||
|
||||
pub(super) fn fmt(f: &mut fmt::Formatter, v: &sval::Value) -> Result<(), Error> {
|
||||
sval::fmt::debug(f, v)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
impl Error {
|
||||
fn from_sval(_: sval::value::Error) -> Self {
|
||||
Error::msg("`sval` serialization failed")
|
||||
}
|
||||
|
||||
fn into_sval(self) -> sval::value::Error {
|
||||
sval::value::Error::msg("`sval` serialization failed")
|
||||
}
|
||||
}
|
||||
|
||||
struct SvalVisitor<'a, 'b: 'a>(&'a mut sval::value::Stream<'b>);
|
||||
|
||||
impl<'a, 'b: 'a> Visitor for SvalVisitor<'a, 'b> {
|
||||
fn debug(&mut self, v: &fmt::Debug) -> Result<(), Error> {
|
||||
self.0.fmt(format_args!("{:?}", v)).map_err(Error::from_sval)
|
||||
}
|
||||
|
||||
fn u64(&mut self, v: u64) -> Result<(), Error> {
|
||||
self.0.u64(v).map_err(Error::from_sval)
|
||||
}
|
||||
|
||||
fn i64(&mut self, v: i64) -> Result<(), Error> {
|
||||
self.0.i64(v).map_err(Error::from_sval)
|
||||
}
|
||||
|
||||
fn f64(&mut self, v: f64) -> Result<(), Error> {
|
||||
self.0.f64(v).map_err(Error::from_sval)
|
||||
}
|
||||
|
||||
fn bool(&mut self, v: bool) -> Result<(), Error> {
|
||||
self.0.bool(v).map_err(Error::from_sval)
|
||||
}
|
||||
|
||||
fn char(&mut self, v: char) -> Result<(), Error> {
|
||||
self.0.char(v).map_err(Error::from_sval)
|
||||
}
|
||||
|
||||
fn str(&mut self, v: &str) -> Result<(), Error> {
|
||||
self.0.str(v).map_err(Error::from_sval)
|
||||
}
|
||||
|
||||
fn none(&mut self) -> Result<(), Error> {
|
||||
self.0.none().map_err(Error::from_sval)
|
||||
}
|
||||
|
||||
fn sval(&mut self, v: &sval::Value) -> Result<(), Error> {
|
||||
self.0.any(v).map_err(Error::from_sval)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use kv::value::test::Token;
|
||||
|
||||
#[test]
|
||||
fn test_from_sval() {
|
||||
assert_eq!(kv::Value::from_sval(&42u64).to_token(), Token::Sval);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_sval_structured() {
|
||||
let value = kv::Value::from(42u64);
|
||||
let expected = vec![sval::test::Token::Unsigned(42)];
|
||||
|
||||
assert_eq!(sval::test::tokens(value), expected);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,155 @@
|
|||
//! Structured values.
|
||||
|
||||
use std::fmt;
|
||||
|
||||
mod internal;
|
||||
mod impls;
|
||||
|
||||
#[cfg(test)]
|
||||
pub(in kv) mod test;
|
||||
|
||||
pub use kv::Error;
|
||||
|
||||
use self::internal::{Inner, Visitor, Primitive};
|
||||
|
||||
/// A type that can be converted into a [`Value`](struct.Value.html).
|
||||
pub trait ToValue {
|
||||
/// Perform the conversion.
|
||||
fn to_value(&self) -> Value;
|
||||
}
|
||||
|
||||
impl<'a, T> ToValue for &'a T
|
||||
where
|
||||
T: ToValue + ?Sized,
|
||||
{
|
||||
fn to_value(&self) -> Value {
|
||||
(**self).to_value()
|
||||
}
|
||||
}
|
||||
|
||||
impl<'v> ToValue for Value<'v> {
|
||||
fn to_value(&self) -> Value {
|
||||
Value {
|
||||
inner: self.inner,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// A type that requires extra work to convert into a [`Value`](struct.Value.html).
|
||||
///
|
||||
/// This trait is a more advanced initialization API than [`ToValue`](trait.ToValue.html).
|
||||
/// It's intended for erased values coming from other logging frameworks that may need
|
||||
/// to perform extra work to determine the concrete type to use.
|
||||
pub trait Fill {
|
||||
/// Fill a value.
|
||||
fn fill(&self, slot: &mut Slot) -> Result<(), Error>;
|
||||
}
|
||||
|
||||
impl<'a, T> Fill for &'a T
|
||||
where
|
||||
T: Fill + ?Sized,
|
||||
{
|
||||
fn fill(&self, slot: &mut Slot) -> Result<(), Error> {
|
||||
(**self).fill(slot)
|
||||
}
|
||||
}
|
||||
|
||||
/// A value slot to fill using the [`Fill`](trait.Fill.html) trait.
|
||||
pub struct Slot<'a> {
|
||||
filled: bool,
|
||||
visitor: &'a mut Visitor,
|
||||
}
|
||||
|
||||
impl<'a> fmt::Debug for Slot<'a> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
f.debug_struct("Slot").finish()
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Slot<'a> {
|
||||
fn new(visitor: &'a mut Visitor) -> Self {
|
||||
Slot {
|
||||
visitor,
|
||||
filled: false,
|
||||
}
|
||||
}
|
||||
|
||||
/// Fill the slot with a value.
|
||||
///
|
||||
/// The given value doesn't need to satisfy any particular lifetime constraints.
|
||||
///
|
||||
/// # Panics
|
||||
///
|
||||
/// Calling `fill` more than once will panic.
|
||||
pub fn fill(&mut self, value: Value) -> Result<(), Error> {
|
||||
assert!(!self.filled, "the slot has already been filled");
|
||||
self.filled = true;
|
||||
|
||||
value.visit(self.visitor)
|
||||
}
|
||||
}
|
||||
|
||||
/// A value in a structured key-value pair.
|
||||
pub struct Value<'v> {
|
||||
inner: Inner<'v>,
|
||||
}
|
||||
|
||||
impl<'v> Value<'v> {
|
||||
/// Get a value from an internal `Visit`.
|
||||
fn from_primitive(value: Primitive<'v>) -> Self {
|
||||
Value {
|
||||
inner: Inner::Primitive(value),
|
||||
}
|
||||
}
|
||||
|
||||
/// Get a value from a fillable slot.
|
||||
pub fn from_fill<T>(value: &'v T) -> Self
|
||||
where
|
||||
T: Fill,
|
||||
{
|
||||
Value {
|
||||
inner: Inner::Fill(value),
|
||||
}
|
||||
}
|
||||
|
||||
fn visit(&self, visitor: &mut Visitor) -> Result<(), Error> {
|
||||
self.inner.visit(visitor)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn fill_value() {
|
||||
struct TestFill;
|
||||
|
||||
impl Fill for TestFill {
|
||||
fn fill(&self, slot: &mut Slot) -> Result<(), Error> {
|
||||
let dbg: &fmt::Debug = &1;
|
||||
|
||||
slot.fill(Value::from_debug(&dbg))
|
||||
}
|
||||
}
|
||||
|
||||
assert_eq!("1", Value::from_fill(&TestFill).to_string());
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic]
|
||||
fn fill_multiple_times_panics() {
|
||||
struct BadFill;
|
||||
|
||||
impl Fill for BadFill {
|
||||
fn fill(&self, slot: &mut Slot) -> Result<(), Error> {
|
||||
slot.fill(42.into())?;
|
||||
slot.fill(6789.into())?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
let _ = Value::from_fill(&BadFill).to_string();
|
||||
}
|
||||
}
|
|
@ -0,0 +1,81 @@
|
|||
// Test support for inspecting Values
|
||||
|
||||
use std::fmt;
|
||||
use std::str;
|
||||
|
||||
use super::{Value, Error};
|
||||
use super::internal;
|
||||
|
||||
#[derive(Debug, PartialEq)]
|
||||
pub(in kv) enum Token {
|
||||
U64(u64),
|
||||
I64(i64),
|
||||
F64(f64),
|
||||
Char(char),
|
||||
Bool(bool),
|
||||
Str(String),
|
||||
None,
|
||||
|
||||
#[cfg(feature = "kv_unstable_sval")]
|
||||
Sval,
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
impl<'v> Value<'v> {
|
||||
pub(in kv) fn to_token(&self) -> Token {
|
||||
struct TestVisitor(Option<Token>);
|
||||
|
||||
impl internal::Visitor for TestVisitor {
|
||||
fn debug(&mut self, v: &fmt::Debug) -> Result<(), Error> {
|
||||
self.0 = Some(Token::Str(format!("{:?}", v)));
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn u64(&mut self, v: u64) -> Result<(), Error> {
|
||||
self.0 = Some(Token::U64(v));
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn i64(&mut self, v: i64) -> Result<(), Error> {
|
||||
self.0 = Some(Token::I64(v));
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn f64(&mut self, v: f64) -> Result<(), Error> {
|
||||
self.0 = Some(Token::F64(v));
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn bool(&mut self, v: bool) -> Result<(), Error> {
|
||||
self.0 = Some(Token::Bool(v));
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn char(&mut self, v: char) -> Result<(), Error> {
|
||||
self.0 = Some(Token::Char(v));
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn str(&mut self, v: &str) -> Result<(), Error> {
|
||||
self.0 = Some(Token::Str(v.into()));
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn none(&mut self) -> Result<(), Error> {
|
||||
self.0 = Some(Token::None);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[cfg(feature = "kv_unstable_sval")]
|
||||
fn sval(&mut self, _: &internal::sval_support::Value) -> Result<(), Error> {
|
||||
self.0 = Some(Token::Sval);
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
let mut visitor = TestVisitor(None);
|
||||
self.visit(&mut visitor).unwrap();
|
||||
|
||||
visitor.0.unwrap()
|
||||
}
|
||||
}
|
|
@ -48,14 +48,12 @@
|
|||
//!
|
||||
//! ### Examples
|
||||
//!
|
||||
//! ```rust
|
||||
//! # #![allow(unstable)]
|
||||
//! #[macro_use]
|
||||
//! extern crate log;
|
||||
//!
|
||||
//! ```edition2018
|
||||
//! # #[derive(Debug)] pub struct Yak(String);
|
||||
//! # impl Yak { fn shave(&mut self, _: u32) {} }
|
||||
//! # fn find_a_razor() -> Result<u32, u32> { Ok(1) }
|
||||
//! use log::{info, warn};
|
||||
//!
|
||||
//! pub fn shave_the_yak(yak: &mut Yak) {
|
||||
//! info!(target: "yak_events", "Commencing yak shaving for {:?}", yak);
|
||||
//!
|
||||
|
@ -115,9 +113,7 @@
|
|||
//! logs all messages at the [`Error`][level_link], [`Warn`][level_link] or
|
||||
//! [`Info`][level_link] levels to stdout:
|
||||
//!
|
||||
//! ```rust
|
||||
//! extern crate log;
|
||||
//!
|
||||
//! ```edition2018
|
||||
//! use log::{Record, Level, Metadata};
|
||||
//!
|
||||
//! struct SimpleLogger;
|
||||
|
@ -150,8 +146,7 @@
|
|||
//! provide a function that wraps a call to [`set_logger`] and
|
||||
//! [`set_max_level`], handling initialization of the logger:
|
||||
//!
|
||||
//! ```rust
|
||||
//! # extern crate log;
|
||||
//! ```edition2018
|
||||
//! # use log::{Level, Metadata};
|
||||
//! # struct SimpleLogger;
|
||||
//! # impl log::Log for SimpleLogger {
|
||||
|
@ -181,8 +176,7 @@
|
|||
//! identical to `set_logger` except that it takes a `Box<Log>` rather than a
|
||||
//! `&'static Log`:
|
||||
//!
|
||||
//! ```rust
|
||||
//! # extern crate log;
|
||||
//! ```edition2018
|
||||
//! # use log::{Level, LevelFilter, Log, SetLoggerError, Metadata};
|
||||
//! # struct SimpleLogger;
|
||||
//! # impl log::Log for SimpleLogger {
|
||||
|
@ -221,6 +215,9 @@
|
|||
//! These features control the value of the `STATIC_MAX_LEVEL` constant. The logging macros check
|
||||
//! this value before logging a message. By default, no levels are disabled.
|
||||
//!
|
||||
//! Libraries should avoid using the max level features because they're global and can't be changed
|
||||
//! once they're set.
|
||||
//!
|
||||
//! For example, a crate can disable trace level logs in debug builds and trace, debug, and info
|
||||
//! level logs in release builds with the following configuration:
|
||||
//!
|
||||
|
@ -270,17 +267,17 @@
|
|||
#![doc(
|
||||
html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png",
|
||||
html_favicon_url = "https://www.rust-lang.org/favicon.ico",
|
||||
html_root_url = "https://docs.rs/log/0.4.6"
|
||||
html_root_url = "https://docs.rs/log/0.4.8"
|
||||
)]
|
||||
#![warn(missing_docs)]
|
||||
#![deny(missing_debug_implementations)]
|
||||
#![cfg_attr(not(feature = "std"), no_std)]
|
||||
#![cfg_attr(all(not(feature = "std"), not(test)), no_std)]
|
||||
// When compiled for the rustc compiler itself we want to make sure that this is
|
||||
// an unstable crate
|
||||
#![cfg_attr(rustbuild, feature(staged_api, rustc_private))]
|
||||
#![cfg_attr(rustbuild, unstable(feature = "rustc_private", issue = "27812"))]
|
||||
|
||||
#[cfg(not(feature = "std"))]
|
||||
#[cfg(all(not(feature = "std"), not(test)))]
|
||||
extern crate core as std;
|
||||
|
||||
#[macro_use]
|
||||
|
@ -292,15 +289,26 @@ use std::error;
|
|||
use std::fmt;
|
||||
use std::mem;
|
||||
use std::str::FromStr;
|
||||
use std::sync::atomic::{AtomicUsize, Ordering, ATOMIC_USIZE_INIT};
|
||||
use std::sync::atomic::{AtomicUsize, Ordering};
|
||||
|
||||
// FIXME: ATOMIC_USIZE_INIT was deprecated in rust 1.34. Silence the
|
||||
// deprecation warning until our MSRV >= 1.24, where we can use the
|
||||
// replacement const fn `AtomicUsize::new`
|
||||
#[allow(deprecated)]
|
||||
use std::sync::atomic::ATOMIC_USIZE_INIT;
|
||||
|
||||
#[macro_use]
|
||||
mod macros;
|
||||
mod serde;
|
||||
|
||||
#[cfg(feature = "kv_unstable")]
|
||||
pub mod kv;
|
||||
|
||||
// The LOGGER static holds a pointer to the global logger. It is protected by
|
||||
// the STATE static which determines whether LOGGER has been initialized yet.
|
||||
static mut LOGGER: &'static Log = &NopLogger;
|
||||
|
||||
#[allow(deprecated)]
|
||||
static STATE: AtomicUsize = ATOMIC_USIZE_INIT;
|
||||
|
||||
// There are three different states that we care about: the logger's
|
||||
|
@ -310,6 +318,7 @@ const UNINITIALIZED: usize = 0;
|
|||
const INITIALIZING: usize = 1;
|
||||
const INITIALIZED: usize = 2;
|
||||
|
||||
#[allow(deprecated)]
|
||||
static MAX_LOG_LEVEL_FILTER: AtomicUsize = ATOMIC_USIZE_INIT;
|
||||
|
||||
static LOG_LEVEL_NAMES: [&'static str; 6] = ["OFF", "ERROR", "WARN", "INFO", "DEBUG", "TRACE"];
|
||||
|
@ -629,7 +638,7 @@ impl FromStr for LevelFilter {
|
|||
|
||||
impl fmt::Display for LevelFilter {
|
||||
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
|
||||
write!(fmt, "{}", LOG_LEVEL_NAMES[*self as usize])
|
||||
fmt.pad(LOG_LEVEL_NAMES[*self as usize])
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -660,6 +669,22 @@ impl LevelFilter {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Debug)]
|
||||
enum MaybeStaticStr<'a> {
|
||||
Static(&'static str),
|
||||
Borrowed(&'a str),
|
||||
}
|
||||
|
||||
impl<'a> MaybeStaticStr<'a> {
|
||||
#[inline]
|
||||
fn get(&self) -> &'a str {
|
||||
match *self {
|
||||
MaybeStaticStr::Static(s) => s,
|
||||
MaybeStaticStr::Borrowed(s) => s,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// The "payload" of a log message.
|
||||
///
|
||||
/// # Use
|
||||
|
@ -678,8 +703,7 @@ impl LevelFilter {
|
|||
/// The following example shows a simple logger that displays the level,
|
||||
/// module path, and message of any `Record` that is passed to it.
|
||||
///
|
||||
/// ```rust
|
||||
/// # extern crate log;
|
||||
/// ```edition2018
|
||||
/// struct SimpleLogger;
|
||||
///
|
||||
/// impl log::Log for SimpleLogger {
|
||||
|
@ -710,9 +734,28 @@ impl LevelFilter {
|
|||
pub struct Record<'a> {
|
||||
metadata: Metadata<'a>,
|
||||
args: fmt::Arguments<'a>,
|
||||
module_path: Option<&'a str>,
|
||||
file: Option<&'a str>,
|
||||
module_path: Option<MaybeStaticStr<'a>>,
|
||||
file: Option<MaybeStaticStr<'a>>,
|
||||
line: Option<u32>,
|
||||
#[cfg(feature = "kv_unstable")]
|
||||
key_values: KeyValues<'a>,
|
||||
}
|
||||
|
||||
// This wrapper type is only needed so we can
|
||||
// `#[derive(Debug)]` on `Record`. It also
|
||||
// provides a useful `Debug` implementation for
|
||||
// the underlying `Source`.
|
||||
#[cfg(feature = "kv_unstable")]
|
||||
#[derive(Clone)]
|
||||
struct KeyValues<'a>(&'a kv::Source);
|
||||
|
||||
#[cfg(feature = "kv_unstable")]
|
||||
impl<'a> fmt::Debug for KeyValues<'a> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
let mut visitor = f.debug_map();
|
||||
self.0.visit(&mut visitor)?;
|
||||
visitor.finish()
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Record<'a> {
|
||||
|
@ -749,13 +792,31 @@ impl<'a> Record<'a> {
|
|||
/// The module path of the message.
|
||||
#[inline]
|
||||
pub fn module_path(&self) -> Option<&'a str> {
|
||||
self.module_path
|
||||
self.module_path.map(|s| s.get())
|
||||
}
|
||||
|
||||
/// The module path of the message, if it is a `'static` string.
|
||||
#[inline]
|
||||
pub fn module_path_static(&self) -> Option<&'static str> {
|
||||
match self.module_path {
|
||||
Some(MaybeStaticStr::Static(s)) => Some(s),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
/// The source file containing the message.
|
||||
#[inline]
|
||||
pub fn file(&self) -> Option<&'a str> {
|
||||
self.file
|
||||
self.file.map(|s| s.get())
|
||||
}
|
||||
|
||||
/// The module path of the message, if it is a `'static` string.
|
||||
#[inline]
|
||||
pub fn file_static(&self) -> Option<&'static str> {
|
||||
match self.file {
|
||||
Some(MaybeStaticStr::Static(s)) => Some(s),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
/// The line containing the message.
|
||||
|
@ -763,6 +824,32 @@ impl<'a> Record<'a> {
|
|||
pub fn line(&self) -> Option<u32> {
|
||||
self.line
|
||||
}
|
||||
|
||||
/// The structued key-value pairs associated with the message.
|
||||
#[cfg(feature = "kv_unstable")]
|
||||
#[inline]
|
||||
pub fn key_values(&self) -> &kv::Source {
|
||||
self.key_values.0
|
||||
}
|
||||
|
||||
/// Create a new [`Builder`](struct.Builder.html) based on this record.
|
||||
#[cfg(feature = "kv_unstable")]
|
||||
#[inline]
|
||||
pub fn to_builder(&self) -> RecordBuilder {
|
||||
RecordBuilder {
|
||||
record: Record {
|
||||
metadata: Metadata {
|
||||
level: self.metadata.level,
|
||||
target: self.metadata.target,
|
||||
},
|
||||
args: self.args,
|
||||
module_path: self.module_path,
|
||||
file: self.file,
|
||||
line: self.line,
|
||||
key_values: self.key_values.clone(),
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Builder for [`Record`](struct.Record.html).
|
||||
|
@ -774,7 +861,7 @@ impl<'a> Record<'a> {
|
|||
/// # Examples
|
||||
///
|
||||
///
|
||||
/// ```rust
|
||||
/// ```edition2018
|
||||
/// use log::{Level, Record};
|
||||
///
|
||||
/// let record = Record::builder()
|
||||
|
@ -789,7 +876,7 @@ impl<'a> Record<'a> {
|
|||
///
|
||||
/// Alternatively, use [`MetadataBuilder`](struct.MetadataBuilder.html):
|
||||
///
|
||||
/// ```rust
|
||||
/// ```edition2018
|
||||
/// use log::{Record, Level, MetadataBuilder};
|
||||
///
|
||||
/// let error_metadata = MetadataBuilder::new()
|
||||
|
@ -825,7 +912,20 @@ impl<'a> RecordBuilder<'a> {
|
|||
/// [`Metadata::builder().build()`]: struct.MetadataBuilder.html#method.build
|
||||
#[inline]
|
||||
pub fn new() -> RecordBuilder<'a> {
|
||||
RecordBuilder {
|
||||
#[cfg(feature = "kv_unstable")]
|
||||
return RecordBuilder {
|
||||
record: Record {
|
||||
args: format_args!(""),
|
||||
metadata: Metadata::builder().build(),
|
||||
module_path: None,
|
||||
file: None,
|
||||
line: None,
|
||||
key_values: KeyValues(&Option::None::<(kv::Key, kv::Value)>),
|
||||
},
|
||||
};
|
||||
|
||||
#[cfg(not(feature = "kv_unstable"))]
|
||||
return RecordBuilder {
|
||||
record: Record {
|
||||
args: format_args!(""),
|
||||
metadata: Metadata::builder().build(),
|
||||
|
@ -833,7 +933,7 @@ impl<'a> RecordBuilder<'a> {
|
|||
file: None,
|
||||
line: None,
|
||||
},
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
/// Set [`args`](struct.Record.html#method.args).
|
||||
|
@ -867,14 +967,28 @@ impl<'a> RecordBuilder<'a> {
|
|||
/// Set [`module_path`](struct.Record.html#method.module_path)
|
||||
#[inline]
|
||||
pub fn module_path(&mut self, path: Option<&'a str>) -> &mut RecordBuilder<'a> {
|
||||
self.record.module_path = path;
|
||||
self.record.module_path = path.map(MaybeStaticStr::Borrowed);
|
||||
self
|
||||
}
|
||||
|
||||
/// Set [`module_path`](struct.Record.html#method.module_path) to a `'static` string
|
||||
#[inline]
|
||||
pub fn module_path_static(&mut self, path: Option<&'static str>) -> &mut RecordBuilder<'a> {
|
||||
self.record.module_path = path.map(MaybeStaticStr::Static);
|
||||
self
|
||||
}
|
||||
|
||||
/// Set [`file`](struct.Record.html#method.file)
|
||||
#[inline]
|
||||
pub fn file(&mut self, file: Option<&'a str>) -> &mut RecordBuilder<'a> {
|
||||
self.record.file = file;
|
||||
self.record.file = file.map(MaybeStaticStr::Borrowed);
|
||||
self
|
||||
}
|
||||
|
||||
/// Set [`file`](struct.Record.html#method.file) to a `'static` string.
|
||||
#[inline]
|
||||
pub fn file_static(&mut self, file: Option<&'static str>) -> &mut RecordBuilder<'a> {
|
||||
self.record.file = file.map(MaybeStaticStr::Static);
|
||||
self
|
||||
}
|
||||
|
||||
|
@ -885,6 +999,14 @@ impl<'a> RecordBuilder<'a> {
|
|||
self
|
||||
}
|
||||
|
||||
/// Set [`key_values`](struct.Record.html#method.key_values)
|
||||
#[cfg(feature = "kv_unstable")]
|
||||
#[inline]
|
||||
pub fn key_values(&mut self, kvs: &'a kv::Source) -> &mut RecordBuilder<'a> {
|
||||
self.record.key_values = KeyValues(kvs);
|
||||
self
|
||||
}
|
||||
|
||||
/// Invoke the builder and return a `Record`
|
||||
#[inline]
|
||||
pub fn build(&self) -> Record<'a> {
|
||||
|
@ -910,10 +1032,7 @@ impl<'a> RecordBuilder<'a> {
|
|||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```rust
|
||||
/// # #[macro_use]
|
||||
/// # extern crate log;
|
||||
/// #
|
||||
/// ```edition2018
|
||||
/// use log::{Record, Level, Metadata};
|
||||
///
|
||||
/// struct MyLogger;
|
||||
|
@ -967,7 +1086,7 @@ impl<'a> Metadata<'a> {
|
|||
///
|
||||
/// # Example
|
||||
///
|
||||
/// ```rust
|
||||
/// ```edition2018
|
||||
/// let target = "myApp";
|
||||
/// use log::{Level, MetadataBuilder};
|
||||
/// let metadata = MetadataBuilder::new()
|
||||
|
@ -1090,7 +1209,7 @@ pub fn max_level() -> LevelFilter {
|
|||
/// An error is returned if a logger has already been set.
|
||||
///
|
||||
/// [`set_logger`]: fn.set_logger.html
|
||||
#[cfg(feature = "std")]
|
||||
#[cfg(all(feature = "std", atomic_cas))]
|
||||
pub fn set_boxed_logger(logger: Box<Log>) -> Result<(), SetLoggerError> {
|
||||
set_logger_inner(|| unsafe { &*Box::into_raw(logger) })
|
||||
}
|
||||
|
@ -1104,17 +1223,21 @@ pub fn set_boxed_logger(logger: Box<Log>) -> Result<(), SetLoggerError> {
|
|||
/// implementations should provide an initialization method that installs the
|
||||
/// logger internally.
|
||||
///
|
||||
/// # Availability
|
||||
///
|
||||
/// This method is available even when the `std` feature is disabled. However,
|
||||
/// it is currently unavailable on `thumbv6` targets, which lack support for
|
||||
/// some atomic operations which are used by this function. Even on those
|
||||
/// targets, [`set_logger_racy`] will be available.
|
||||
///
|
||||
/// # Errors
|
||||
///
|
||||
/// An error is returned if a logger has already been set.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```rust
|
||||
/// # #[macro_use]
|
||||
/// # extern crate log;
|
||||
/// #
|
||||
/// use log::{Record, Level, Metadata, LevelFilter};
|
||||
/// ```edition2018
|
||||
/// use log::{error, info, warn, Record, Level, Metadata, LevelFilter};
|
||||
///
|
||||
/// static MY_LOGGER: MyLogger = MyLogger;
|
||||
///
|
||||
|
@ -1142,10 +1265,14 @@ pub fn set_boxed_logger(logger: Box<Log>) -> Result<(), SetLoggerError> {
|
|||
/// error!("oops");
|
||||
/// # }
|
||||
/// ```
|
||||
///
|
||||
/// [`set_logger_racy`]: fn.set_logger_racy.html
|
||||
#[cfg(atomic_cas)]
|
||||
pub fn set_logger(logger: &'static Log) -> Result<(), SetLoggerError> {
|
||||
set_logger_inner(|| logger)
|
||||
}
|
||||
|
||||
#[cfg(atomic_cas)]
|
||||
fn set_logger_inner<F>(make_logger: F) -> Result<(), SetLoggerError>
|
||||
where
|
||||
F: FnOnce() -> &'static Log,
|
||||
|
@ -1166,6 +1293,40 @@ where
|
|||
}
|
||||
}
|
||||
|
||||
/// A thread-unsafe version of [`set_logger`].
|
||||
///
|
||||
/// This function is available on all platforms, even those that do not have
|
||||
/// support for atomics that is needed by [`set_logger`].
|
||||
///
|
||||
/// In almost all cases, [`set_logger`] should be preferred.
|
||||
///
|
||||
/// # Safety
|
||||
///
|
||||
/// This function is only safe to call when no other logger initialization
|
||||
/// function is called while this function still executes.
|
||||
///
|
||||
/// This can be upheld by (for example) making sure that **there are no other
|
||||
/// threads**, and (on embedded) that **interrupts are disabled**.
|
||||
///
|
||||
/// It is safe to use other logging functions while this function runs
|
||||
/// (including all logging macros).
|
||||
///
|
||||
/// [`set_logger`]: fn.set_logger.html
|
||||
pub unsafe fn set_logger_racy(logger: &'static Log) -> Result<(), SetLoggerError> {
|
||||
match STATE.load(Ordering::SeqCst) {
|
||||
UNINITIALIZED => {
|
||||
LOGGER = logger;
|
||||
STATE.store(INITIALIZED, Ordering::SeqCst);
|
||||
Ok(())
|
||||
}
|
||||
INITIALIZING => {
|
||||
// This is just plain UB, since we were racing another initialization function
|
||||
unreachable!("set_logger_racy must not be used with other initialization functions")
|
||||
}
|
||||
_ => Err(SetLoggerError(())),
|
||||
}
|
||||
}
|
||||
|
||||
/// The type returned by [`set_logger`] if [`set_logger`] has already been called.
|
||||
///
|
||||
/// [`set_logger`]: fn.set_logger.html
|
||||
|
@ -1227,15 +1388,15 @@ pub fn logger() -> &'static Log {
|
|||
pub fn __private_api_log(
|
||||
args: fmt::Arguments,
|
||||
level: Level,
|
||||
&(target, module_path, file, line): &(&str, &str, &str, u32),
|
||||
&(target, module_path, file, line): &(&str, &'static str, &'static str, u32),
|
||||
) {
|
||||
logger().log(
|
||||
&Record::builder()
|
||||
.args(args)
|
||||
.level(level)
|
||||
.target(target)
|
||||
.module_path(Some(module_path))
|
||||
.file(Some(file))
|
||||
.module_path_static(Some(module_path))
|
||||
.file_static(Some(file))
|
||||
.line(Some(line))
|
||||
.build(),
|
||||
);
|
||||
|
@ -1466,4 +1627,42 @@ mod tests {
|
|||
assert_eq!(record_test.file(), Some("bar"));
|
||||
assert_eq!(record_test.line(), Some(30));
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[cfg(feature = "kv_unstable")]
|
||||
fn test_record_key_values_builder() {
|
||||
use super::Record;
|
||||
use kv::{self, Visitor};
|
||||
|
||||
struct TestVisitor {
|
||||
seen_pairs: usize,
|
||||
}
|
||||
|
||||
impl<'kvs> Visitor<'kvs> for TestVisitor {
|
||||
fn visit_pair(
|
||||
&mut self,
|
||||
_: kv::Key<'kvs>,
|
||||
_: kv::Value<'kvs>
|
||||
) -> Result<(), kv::Error> {
|
||||
self.seen_pairs += 1;
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
let kvs: &[(&str, i32)] = &[
|
||||
("a", 1),
|
||||
("b", 2)
|
||||
];
|
||||
let record_test = Record::builder()
|
||||
.key_values(&kvs)
|
||||
.build();
|
||||
|
||||
let mut visitor = TestVisitor {
|
||||
seen_pairs: 0,
|
||||
};
|
||||
|
||||
record_test.key_values().visit(&mut visitor).unwrap();
|
||||
|
||||
assert_eq!(2, visitor.seen_pairs);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -15,10 +15,8 @@
|
|||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```rust
|
||||
/// # #[macro_use]
|
||||
/// # extern crate log;
|
||||
/// use log::Level;
|
||||
/// ```edition2018
|
||||
/// use log::{log, Level};
|
||||
///
|
||||
/// # fn main() {
|
||||
/// let data = (42, "Forty-two");
|
||||
|
@ -48,9 +46,9 @@ macro_rules! log {
|
|||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```rust
|
||||
/// # #[macro_use]
|
||||
/// # extern crate log;
|
||||
/// ```edition2018
|
||||
/// use log::error;
|
||||
///
|
||||
/// # fn main() {
|
||||
/// let (err_info, port) = ("No connection", 22);
|
||||
///
|
||||
|
@ -60,11 +58,11 @@ macro_rules! log {
|
|||
/// ```
|
||||
#[macro_export(local_inner_macros)]
|
||||
macro_rules! error {
|
||||
(target: $target:expr, $($arg:tt)*) => (
|
||||
log!(target: $target, $crate::Level::Error, $($arg)*);
|
||||
(target: $target:expr, $($arg:tt)+) => (
|
||||
log!(target: $target, $crate::Level::Error, $($arg)+);
|
||||
);
|
||||
($($arg:tt)*) => (
|
||||
log!($crate::Level::Error, $($arg)*);
|
||||
($($arg:tt)+) => (
|
||||
log!($crate::Level::Error, $($arg)+);
|
||||
)
|
||||
}
|
||||
|
||||
|
@ -72,9 +70,9 @@ macro_rules! error {
|
|||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```rust
|
||||
/// # #[macro_use]
|
||||
/// # extern crate log;
|
||||
/// ```edition2018
|
||||
/// use log::warn;
|
||||
///
|
||||
/// # fn main() {
|
||||
/// let warn_description = "Invalid Input";
|
||||
///
|
||||
|
@ -84,11 +82,11 @@ macro_rules! error {
|
|||
/// ```
|
||||
#[macro_export(local_inner_macros)]
|
||||
macro_rules! warn {
|
||||
(target: $target:expr, $($arg:tt)*) => (
|
||||
log!(target: $target, $crate::Level::Warn, $($arg)*);
|
||||
(target: $target:expr, $($arg:tt)+) => (
|
||||
log!(target: $target, $crate::Level::Warn, $($arg)+);
|
||||
);
|
||||
($($arg:tt)*) => (
|
||||
log!($crate::Level::Warn, $($arg)*);
|
||||
($($arg:tt)+) => (
|
||||
log!($crate::Level::Warn, $($arg)+);
|
||||
)
|
||||
}
|
||||
|
||||
|
@ -96,9 +94,9 @@ macro_rules! warn {
|
|||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```rust
|
||||
/// # #[macro_use]
|
||||
/// # extern crate log;
|
||||
/// ```edition2018
|
||||
/// use log::info;
|
||||
///
|
||||
/// # fn main() {
|
||||
/// # struct Connection { port: u32, speed: f32 }
|
||||
/// let conn_info = Connection { port: 40, speed: 3.20 };
|
||||
|
@ -110,11 +108,11 @@ macro_rules! warn {
|
|||
/// ```
|
||||
#[macro_export(local_inner_macros)]
|
||||
macro_rules! info {
|
||||
(target: $target:expr, $($arg:tt)*) => (
|
||||
log!(target: $target, $crate::Level::Info, $($arg)*);
|
||||
(target: $target:expr, $($arg:tt)+) => (
|
||||
log!(target: $target, $crate::Level::Info, $($arg)+);
|
||||
);
|
||||
($($arg:tt)*) => (
|
||||
log!($crate::Level::Info, $($arg)*);
|
||||
($($arg:tt)+) => (
|
||||
log!($crate::Level::Info, $($arg)+);
|
||||
)
|
||||
}
|
||||
|
||||
|
@ -122,9 +120,9 @@ macro_rules! info {
|
|||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```rust
|
||||
/// # #[macro_use]
|
||||
/// # extern crate log;
|
||||
/// ```edition2018
|
||||
/// use log::debug;
|
||||
///
|
||||
/// # fn main() {
|
||||
/// # struct Position { x: f32, y: f32 }
|
||||
/// let pos = Position { x: 3.234, y: -1.223 };
|
||||
|
@ -135,11 +133,11 @@ macro_rules! info {
|
|||
/// ```
|
||||
#[macro_export(local_inner_macros)]
|
||||
macro_rules! debug {
|
||||
(target: $target:expr, $($arg:tt)*) => (
|
||||
log!(target: $target, $crate::Level::Debug, $($arg)*);
|
||||
(target: $target:expr, $($arg:tt)+) => (
|
||||
log!(target: $target, $crate::Level::Debug, $($arg)+);
|
||||
);
|
||||
($($arg:tt)*) => (
|
||||
log!($crate::Level::Debug, $($arg)*);
|
||||
($($arg:tt)+) => (
|
||||
log!($crate::Level::Debug, $($arg)+);
|
||||
)
|
||||
}
|
||||
|
||||
|
@ -147,9 +145,9 @@ macro_rules! debug {
|
|||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```rust
|
||||
/// # #[macro_use]
|
||||
/// # extern crate log;
|
||||
/// ```edition2018
|
||||
/// use log::trace;
|
||||
///
|
||||
/// # fn main() {
|
||||
/// # struct Position { x: f32, y: f32 }
|
||||
/// let pos = Position { x: 3.234, y: -1.223 };
|
||||
|
@ -162,11 +160,11 @@ macro_rules! debug {
|
|||
/// ```
|
||||
#[macro_export(local_inner_macros)]
|
||||
macro_rules! trace {
|
||||
(target: $target:expr, $($arg:tt)*) => (
|
||||
log!(target: $target, $crate::Level::Trace, $($arg)*);
|
||||
(target: $target:expr, $($arg:tt)+) => (
|
||||
log!(target: $target, $crate::Level::Trace, $($arg)+);
|
||||
);
|
||||
($($arg:tt)*) => (
|
||||
log!($crate::Level::Trace, $($arg)*);
|
||||
($($arg:tt)+) => (
|
||||
log!($crate::Level::Trace, $($arg)+);
|
||||
)
|
||||
}
|
||||
|
||||
|
@ -178,10 +176,9 @@ macro_rules! trace {
|
|||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```rust
|
||||
/// # #[macro_use]
|
||||
/// # extern crate log;
|
||||
/// ```edition2018
|
||||
/// use log::Level::Debug;
|
||||
/// use log::{debug, log_enabled};
|
||||
///
|
||||
/// # fn foo() {
|
||||
/// if log_enabled!(Debug) {
|
||||
|
|
|
@ -1,9 +1,11 @@
|
|||
#![cfg(feature = "serde")]
|
||||
|
||||
extern crate serde;
|
||||
use self::serde::de::{
|
||||
Deserialize, DeserializeSeed, Deserializer, EnumAccess, Error, Unexpected, VariantAccess,
|
||||
Visitor,
|
||||
};
|
||||
use self::serde::ser::{Serialize, Serializer};
|
||||
use self::serde::de::{Deserialize, DeserializeSeed, Deserializer, Visitor, EnumAccess,
|
||||
Unexpected, VariantAccess, Error};
|
||||
|
||||
use {Level, LevelFilter, LOG_LEVEL_NAMES};
|
||||
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
#[macro_use]
|
||||
extern crate log;
|
||||
|
||||
use log::{Level, LevelFilter, Log, Metadata, Record};
|
||||
use std::sync::{Arc, Mutex};
|
||||
use log::{Level, LevelFilter, Log, Record, Metadata};
|
||||
|
||||
#[cfg(feature = "std")]
|
||||
use log::set_boxed_logger;
|
||||
|
@ -30,7 +30,9 @@ impl Log for Logger {
|
|||
}
|
||||
|
||||
fn main() {
|
||||
let me = Arc::new(State { last_log: Mutex::new(None) });
|
||||
let me = Arc::new(State {
|
||||
last_log: Mutex::new(None),
|
||||
});
|
||||
let a = me.clone();
|
||||
set_boxed_logger(Box::new(Logger(me))).unwrap();
|
||||
|
||||
|
@ -56,7 +58,11 @@ fn test(a: &State, filter: LevelFilter) {
|
|||
last(&a, t(Level::Trace, filter));
|
||||
|
||||
fn t(lvl: Level, filter: LevelFilter) -> Option<Level> {
|
||||
if lvl <= filter { Some(lvl) } else { None }
|
||||
if lvl <= filter {
|
||||
Some(lvl)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
Различия файлов скрыты, потому что одна или несколько строк слишком длинны
|
@ -1,3 +1,31 @@
|
|||
1.3.3 (2020-01-09)
|
||||
==================
|
||||
This is a small maintenance release that upgrades the dependency on
|
||||
`thread_local` from `0.3` to `1.0`. The minimum supported Rust version remains
|
||||
at Rust 1.28.
|
||||
|
||||
|
||||
1.3.2 (2020-01-09)
|
||||
==================
|
||||
This is a small maintenance release with some house cleaning and bug fixes.
|
||||
|
||||
New features:
|
||||
|
||||
* [FEATURE #631](https://github.com/rust-lang/regex/issues/631):
|
||||
Add a `Match::range` method an a `From<Match> for Range` impl.
|
||||
|
||||
Bug fixes:
|
||||
|
||||
* [BUG #521](https://github.com/rust-lang/regex/issues/521):
|
||||
Corrects `/-/.splitn("a", 2)` to return `["a"]` instead of `["a", ""]`.
|
||||
* [BUG #594](https://github.com/rust-lang/regex/pull/594):
|
||||
Improve error reporting when writing `\p\`.
|
||||
* [BUG #627](https://github.com/rust-lang/regex/issues/627):
|
||||
Corrects `/-/.split("a-")` to return `["a", ""]` instead of `["a"]`.
|
||||
* [BUG #633](https://github.com/rust-lang/regex/pull/633):
|
||||
Squash deprecation warnings for the `std::error::Error::description` method.
|
||||
|
||||
|
||||
1.3.1 (2019-09-04)
|
||||
==================
|
||||
This is a maintenance release with no changes in order to try to work-around
|
||||
|
|
|
@ -4,236 +4,234 @@
|
|||
name = "aho-corasick"
|
||||
version = "0.7.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "58fb5e95d83b38284460a5fda7d6470aa0b8844d283a0b614b8535e880800d2d"
|
||||
dependencies = [
|
||||
"memchr 2.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"memchr",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "autocfg"
|
||||
version = "0.1.6"
|
||||
version = "0.1.7"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1d49d90015b3c36167a20fe2810c5cd875ad504b39cff3d4eae7977e6b7c1cb2"
|
||||
|
||||
[[package]]
|
||||
name = "bitflags"
|
||||
version = "1.1.0"
|
||||
version = "1.2.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "cf1de2fe8c75bc145a2f577add951f8134889b4795d47466a54a5c846d691693"
|
||||
|
||||
[[package]]
|
||||
name = "cloudabi"
|
||||
version = "0.0.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ddfc5b9aa5d4507acaf872de71051dfd0e309860e88966e1051e462a077aac4f"
|
||||
dependencies = [
|
||||
"bitflags 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"bitflags",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "doc-comment"
|
||||
version = "0.3.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "923dea538cea0aa3025e8685b20d6ee21ef99c4f77e954a30febbaac5ec73a97"
|
||||
|
||||
[[package]]
|
||||
name = "fuchsia-cprng"
|
||||
version = "0.1.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a06f77d526c1a601b7c4cdd98f54b5eaabffc14d5f2f0296febdc7f357c6d3ba"
|
||||
|
||||
[[package]]
|
||||
name = "lazy_static"
|
||||
version = "1.4.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
|
||||
|
||||
[[package]]
|
||||
name = "libc"
|
||||
version = "0.2.62"
|
||||
version = "0.2.66"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d515b1f41455adea1313a4a2ac8a8a477634fbae63cc6100e3aebb207ce61558"
|
||||
|
||||
[[package]]
|
||||
name = "memchr"
|
||||
version = "2.2.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "88579771288728879b57485cc7d6b07d648c9f0141eb955f8ab7f9d45394468e"
|
||||
|
||||
[[package]]
|
||||
name = "quickcheck"
|
||||
version = "0.8.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9c35d9c36a562f37eca96e79f66d5fd56eefbc22560dacc4a864cabd2d277456"
|
||||
dependencies = [
|
||||
"rand 0.6.5 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"rand_core 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"rand",
|
||||
"rand_core 0.4.2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rand"
|
||||
version = "0.6.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6d71dacdc3c88c1fde3885a3be3fbab9f35724e6ce99467f7d9c5026132184ca"
|
||||
dependencies = [
|
||||
"autocfg 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"rand_chacha 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"rand_core 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"rand_hc 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"rand_isaac 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"rand_jitter 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"rand_os 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"rand_pcg 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"rand_xorshift 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"autocfg",
|
||||
"libc",
|
||||
"rand_chacha",
|
||||
"rand_core 0.4.2",
|
||||
"rand_hc",
|
||||
"rand_isaac",
|
||||
"rand_jitter",
|
||||
"rand_os",
|
||||
"rand_pcg",
|
||||
"rand_xorshift",
|
||||
"winapi",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rand_chacha"
|
||||
version = "0.1.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "556d3a1ca6600bfcbab7c7c91ccb085ac7fbbcd70e008a98742e7847f4f7bcef"
|
||||
dependencies = [
|
||||
"autocfg 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"rand_core 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"autocfg",
|
||||
"rand_core 0.3.1",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rand_core"
|
||||
version = "0.3.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7a6fdeb83b075e8266dcc8762c22776f6877a63111121f5f8c7411e5be7eed4b"
|
||||
dependencies = [
|
||||
"rand_core 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"rand_core 0.4.2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rand_core"
|
||||
version = "0.4.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9c33a3c44ca05fa6f1807d8e6743f3824e8509beca625669633be0acbdf509dc"
|
||||
|
||||
[[package]]
|
||||
name = "rand_hc"
|
||||
version = "0.1.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7b40677c7be09ae76218dc623efbf7b18e34bced3f38883af07bb75630a21bc4"
|
||||
dependencies = [
|
||||
"rand_core 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"rand_core 0.3.1",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rand_isaac"
|
||||
version = "0.1.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ded997c9d5f13925be2a6fd7e66bf1872597f759fd9dd93513dd7e92e5a5ee08"
|
||||
dependencies = [
|
||||
"rand_core 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"rand_core 0.3.1",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rand_jitter"
|
||||
version = "0.1.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1166d5c91dc97b88d1decc3285bb0a99ed84b05cfd0bc2341bdf2d43fc41e39b"
|
||||
dependencies = [
|
||||
"libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"rand_core 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"libc",
|
||||
"rand_core 0.4.2",
|
||||
"winapi",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rand_os"
|
||||
version = "0.1.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7b75f676a1e053fc562eafbb47838d67c84801e38fc1ba459e8f180deabd5071"
|
||||
dependencies = [
|
||||
"cloudabi 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"fuchsia-cprng 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"rand_core 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"rdrand 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"cloudabi",
|
||||
"fuchsia-cprng",
|
||||
"libc",
|
||||
"rand_core 0.4.2",
|
||||
"rdrand",
|
||||
"winapi",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rand_pcg"
|
||||
version = "0.1.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "abf9b09b01790cfe0364f52bf32995ea3c39f4d2dd011eac241d2914146d0b44"
|
||||
dependencies = [
|
||||
"autocfg 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"rand_core 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"autocfg",
|
||||
"rand_core 0.4.2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rand_xorshift"
|
||||
version = "0.1.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "cbf7e9e623549b0e21f6e97cf8ecf247c1a8fd2e8a992ae265314300b2455d5c"
|
||||
dependencies = [
|
||||
"rand_core 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"rand_core 0.3.1",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rdrand"
|
||||
version = "0.4.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "678054eb77286b51581ba43620cc911abf02758c91f93f479767aed0f90458b2"
|
||||
dependencies = [
|
||||
"rand_core 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"rand_core 0.3.1",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "regex"
|
||||
version = "1.3.1"
|
||||
version = "1.3.3"
|
||||
dependencies = [
|
||||
"aho-corasick 0.7.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"doc-comment 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"memchr 2.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"quickcheck 0.8.5 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"rand 0.6.5 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"regex-syntax 0.6.12 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"thread_local 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"aho-corasick",
|
||||
"doc-comment",
|
||||
"lazy_static",
|
||||
"memchr",
|
||||
"quickcheck",
|
||||
"rand",
|
||||
"regex-syntax",
|
||||
"thread_local",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "regex-syntax"
|
||||
version = "0.6.12"
|
||||
version = "0.6.13"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e734e891f5b408a29efbf8309e656876276f49ab6a6ac208600b4419bd893d90"
|
||||
|
||||
[[package]]
|
||||
name = "thread_local"
|
||||
version = "0.3.6"
|
||||
version = "1.0.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "88ddf1ad580c7e3d1efff877d972bcc93f995556b9087a5a259630985c88ceab"
|
||||
dependencies = [
|
||||
"lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"lazy_static",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "winapi"
|
||||
version = "0.3.8"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8093091eeb260906a183e6ae1abdba2ef5ef2257a21801128899c3fc699229c6"
|
||||
dependencies = [
|
||||
"winapi-i686-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"winapi-x86_64-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"winapi-i686-pc-windows-gnu",
|
||||
"winapi-x86_64-pc-windows-gnu",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "winapi-i686-pc-windows-gnu"
|
||||
version = "0.4.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6"
|
||||
|
||||
[[package]]
|
||||
name = "winapi-x86_64-pc-windows-gnu"
|
||||
version = "0.4.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[metadata]
|
||||
"checksum aho-corasick 0.7.6 (registry+https://github.com/rust-lang/crates.io-index)" = "58fb5e95d83b38284460a5fda7d6470aa0b8844d283a0b614b8535e880800d2d"
|
||||
"checksum autocfg 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)" = "b671c8fb71b457dd4ae18c4ba1e59aa81793daacc361d82fcd410cef0d491875"
|
||||
"checksum bitflags 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "3d155346769a6855b86399e9bc3814ab343cd3d62c7e985113d46a0ec3c281fd"
|
||||
"checksum cloudabi 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)" = "ddfc5b9aa5d4507acaf872de71051dfd0e309860e88966e1051e462a077aac4f"
|
||||
"checksum doc-comment 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "923dea538cea0aa3025e8685b20d6ee21ef99c4f77e954a30febbaac5ec73a97"
|
||||
"checksum fuchsia-cprng 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "a06f77d526c1a601b7c4cdd98f54b5eaabffc14d5f2f0296febdc7f357c6d3ba"
|
||||
"checksum lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
|
||||
"checksum libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)" = "34fcd2c08d2f832f376f4173a231990fa5aef4e99fb569867318a227ef4c06ba"
|
||||
"checksum memchr 2.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "88579771288728879b57485cc7d6b07d648c9f0141eb955f8ab7f9d45394468e"
|
||||
"checksum quickcheck 0.8.5 (registry+https://github.com/rust-lang/crates.io-index)" = "9c35d9c36a562f37eca96e79f66d5fd56eefbc22560dacc4a864cabd2d277456"
|
||||
"checksum rand 0.6.5 (registry+https://github.com/rust-lang/crates.io-index)" = "6d71dacdc3c88c1fde3885a3be3fbab9f35724e6ce99467f7d9c5026132184ca"
|
||||
"checksum rand_chacha 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "556d3a1ca6600bfcbab7c7c91ccb085ac7fbbcd70e008a98742e7847f4f7bcef"
|
||||
"checksum rand_core 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "7a6fdeb83b075e8266dcc8762c22776f6877a63111121f5f8c7411e5be7eed4b"
|
||||
"checksum rand_core 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "9c33a3c44ca05fa6f1807d8e6743f3824e8509beca625669633be0acbdf509dc"
|
||||
"checksum rand_hc 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "7b40677c7be09ae76218dc623efbf7b18e34bced3f38883af07bb75630a21bc4"
|
||||
"checksum rand_isaac 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "ded997c9d5f13925be2a6fd7e66bf1872597f759fd9dd93513dd7e92e5a5ee08"
|
||||
"checksum rand_jitter 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "1166d5c91dc97b88d1decc3285bb0a99ed84b05cfd0bc2341bdf2d43fc41e39b"
|
||||
"checksum rand_os 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "7b75f676a1e053fc562eafbb47838d67c84801e38fc1ba459e8f180deabd5071"
|
||||
"checksum rand_pcg 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "abf9b09b01790cfe0364f52bf32995ea3c39f4d2dd011eac241d2914146d0b44"
|
||||
"checksum rand_xorshift 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "cbf7e9e623549b0e21f6e97cf8ecf247c1a8fd2e8a992ae265314300b2455d5c"
|
||||
"checksum rdrand 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "678054eb77286b51581ba43620cc911abf02758c91f93f479767aed0f90458b2"
|
||||
"checksum regex-syntax 0.6.12 (registry+https://github.com/rust-lang/crates.io-index)" = "11a7e20d1cce64ef2fed88b66d347f88bd9babb82845b2b858f3edbf59a4f716"
|
||||
"checksum thread_local 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)" = "c6b53e329000edc2b34dbe8545fd20e55a333362d0a321909685a19bd28c3f1b"
|
||||
"checksum winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)" = "8093091eeb260906a183e6ae1abdba2ef5ef2257a21801128899c3fc699229c6"
|
||||
"checksum winapi-i686-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6"
|
||||
"checksum winapi-x86_64-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
|
||||
checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
|
||||
|
|
|
@ -12,7 +12,7 @@
|
|||
|
||||
[package]
|
||||
name = "regex"
|
||||
version = "1.3.1"
|
||||
version = "1.3.3"
|
||||
authors = ["The Rust Project Developers"]
|
||||
exclude = ["/.travis.yml", "/appveyor.yml", "/ci/*", "/scripts/*"]
|
||||
autotests = false
|
||||
|
@ -21,17 +21,17 @@ homepage = "https://github.com/rust-lang/regex"
|
|||
documentation = "https://docs.rs/regex"
|
||||
readme = "README.md"
|
||||
categories = ["text-processing"]
|
||||
license = "MIT/Apache-2.0"
|
||||
license = "MIT OR Apache-2.0"
|
||||
repository = "https://github.com/rust-lang/regex"
|
||||
[profile.test]
|
||||
debug = true
|
||||
|
||||
[profile.bench]
|
||||
debug = true
|
||||
|
||||
[profile.release]
|
||||
debug = true
|
||||
|
||||
[profile.test]
|
||||
debug = true
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
bench = false
|
||||
|
@ -84,7 +84,7 @@ version = "0.6.12"
|
|||
default-features = false
|
||||
|
||||
[dependencies.thread_local]
|
||||
version = "0.3.6"
|
||||
version = "1"
|
||||
optional = true
|
||||
[dev-dependencies.doc-comment]
|
||||
version = "0.3"
|
||||
|
|
|
@ -26,7 +26,7 @@ mod imp {
|
|||
}
|
||||
|
||||
pub fn get_or(&self, create: impl FnOnce() -> T) -> CachedGuard<T> {
|
||||
CachedGuard(self.0.get_or(|| Box::new(create())))
|
||||
CachedGuard(self.0.get_or(|| create()))
|
||||
}
|
||||
}
|
||||
|
||||
|
|
Некоторые файлы не были показаны из-за слишком большого количества измененных файлов Показать больше
Загрузка…
Ссылка в новой задаче