Bug 1833151 - [css-nesting] Update cssparser again. r=tlouw,supply-chain-reviewers,zrhoffman

This changes the cssparser setup to:

 * Avoid having to do copies of the ParsingContext all over the place,
   which is useful because I plan to stash more nesting state in there.

 * Use the new RuleBodyParser which allows parsing qualified rules,
   declarations, and so on. Though we still don't use this anywhere.

The next step is to join NestedRuleParser and PropertyDeclarationParser,
so that we can parse declarations in a lot of the nested rules as well.

Differential Revision: https://phabricator.services.mozilla.com/D178053
This commit is contained in:
Emilio Cobos Álvarez 2023-05-16 18:02:52 +00:00
Родитель a9fa81b9b0
Коммит 3abb2e9d89
36 изменённых файлов: 778 добавлений и 792 удалений

Просмотреть файл

@ -105,6 +105,11 @@ git = "https://github.com/mozilla/uniffi-rs.git"
rev = "bc7ff8977bf38d0fdd1a458810b14f434d4dc4de"
replace-with = "vendored-sources"
[source."git+https://github.com/servo/rust-cssparser?rev=6ce91afdf292c4290118843e7421e146f0a4c48b"]
git = "https://github.com/servo/rust-cssparser"
rev = "6ce91afdf292c4290118843e7421e146f0a4c48b"
replace-with = "vendored-sources"
# Take advantage of the fact that cargo will treat lines starting with #
# as comments to add preprocessing directives. This file can thus by copied

6
Cargo.lock сгенерированный
Просмотреть файл

@ -1017,8 +1017,7 @@ dependencies = [
[[package]]
name = "cssparser"
version = "0.31.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c18bf42b9ab91d1d7ce0192f5b432c9ab6a5728206f25ab6cb0eb61fea25bccc"
source = "git+https://github.com/servo/rust-cssparser?rev=6ce91afdf292c4290118843e7421e146f0a4c48b#6ce91afdf292c4290118843e7421e146f0a4c48b"
dependencies = [
"cssparser-macros",
"dtoa-short",
@ -1033,8 +1032,7 @@ dependencies = [
[[package]]
name = "cssparser-macros"
version = "0.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dfae75de57f2b2e85e8768c3ea840fd159c8f33e2b6522c7835b7abac81be16e"
source = "git+https://github.com/servo/rust-cssparser?rev=6ce91afdf292c4290118843e7421e146f0a4c48b#6ce91afdf292c4290118843e7421e146f0a4c48b"
dependencies = [
"quote",
"syn",

Просмотреть файл

@ -152,6 +152,10 @@ moz_asserts = { path = "mozglue/static/rust/moz_asserts" }
# Workaround for https://github.com/rust-lang/cargo/issues/11232
rure = { path = "third_party/rust/rure" }
# To-be-published changes.
cssparser = { git = "https://github.com/servo/rust-cssparser", rev = "6ce91afdf292c4290118843e7421e146f0a4c48b" }
cssparser-macros = { git = "https://github.com/servo/rust-cssparser", rev = "6ce91afdf292c4290118843e7421e146f0a4c48b" }
# Other overrides
chardetng = { git = "https://github.com/hsivonen/chardetng", rev="3484d3e3ebdc8931493aa5df4d7ee9360a90e76b" }
chardetng_c = { git = "https://github.com/hsivonen/chardetng_c", rev="ed8a4c6f900a90d4dbc1d64b856e61490a1c3570" }

Просмотреть файл

@ -13,7 +13,7 @@ use crate::str::CssStringWriter;
use crate::values::specified::Integer;
use crate::values::CustomIdent;
use crate::Atom;
use cssparser::{AtRuleParser, DeclarationListParser, DeclarationParser, QualifiedRuleParser};
use cssparser::{AtRuleParser, RuleBodyParser, RuleBodyItemParser, DeclarationParser, QualifiedRuleParser};
use cssparser::{CowRcStr, Parser, SourceLocation, Token};
use selectors::parser::SelectorParseErrorKind;
use std::fmt::{self, Write};
@ -86,11 +86,11 @@ pub fn parse_counter_style_body<'i, 't>(
let start = input.current_source_location();
let mut rule = CounterStyleRuleData::empty(name, location);
{
let parser = CounterStyleRuleParser {
context: context,
let mut parser = CounterStyleRuleParser {
context,
rule: &mut rule,
};
let mut iter = DeclarationListParser::new(input, parser);
let mut iter = RuleBodyParser::new(input, &mut parser);
while let Some(declaration) = iter.next() {
if let Err((error, slice)) = declaration {
let location = error.location;
@ -159,6 +159,11 @@ impl<'a, 'b, 'i> QualifiedRuleParser<'i> for CounterStyleRuleParser<'a, 'b> {
type Error = StyleParseErrorKind<'i>;
}
impl<'a, 'b, 'i> RuleBodyItemParser<'i, (), StyleParseErrorKind<'i>> for CounterStyleRuleParser<'a, 'b> {
fn parse_qualified(&self) -> bool { false }
fn parse_declarations(&self) -> bool { true }
}
macro_rules! checker {
($self:ident._($value:ident)) => {};
($self:ident. $checker:ident($value:ident)) => {
@ -219,15 +224,17 @@ macro_rules! counter_style_descriptors {
type Declaration = ();
type Error = StyleParseErrorKind<'i>;
fn parse_value<'t>(&mut self, name: CowRcStr<'i>, input: &mut Parser<'i, 't>)
-> Result<(), ParseError<'i>> {
fn parse_value<'t>(
&mut self,
name: CowRcStr<'i>,
input: &mut Parser<'i, 't>,
) -> Result<(), ParseError<'i>> {
match_ignore_ascii_case! { &*name,
$(
$name => {
// DeclarationParser also calls parse_entirely
// so wed normally not need to,
// but in this case we do because we set the value as a side effect
// rather than returning it.
// DeclarationParser also calls parse_entirely so wed normally not
// need to, but in this case we do because we set the value as a side
// effect rather than returning it.
let value = input.parse_entirely(|i| Parse::parse(self.context, i))?;
self.rule.$ident = Some(value)
},

Просмотреть файл

@ -25,8 +25,8 @@ use crate::values::specified::{Angle, NonNegativePercentage};
#[cfg(feature = "gecko")]
use cssparser::UnicodeRange;
use cssparser::{
AtRuleParser, CowRcStr, DeclarationListParser, DeclarationParser, Parser, QualifiedRuleParser,
SourceLocation,
AtRuleParser, CowRcStr, RuleBodyParser, RuleBodyItemParser, DeclarationParser, Parser,
QualifiedRuleParser, SourceLocation,
};
use selectors::parser::SelectorParseErrorKind;
use std::fmt::{self, Write};
@ -465,11 +465,11 @@ pub fn parse_font_face_block(
) -> FontFaceRuleData {
let mut rule = FontFaceRuleData::empty(location);
{
let parser = FontFaceRuleParser {
context: context,
let mut parser = FontFaceRuleParser {
context,
rule: &mut rule,
};
let mut iter = DeclarationListParser::new(input, parser);
let mut iter = RuleBodyParser::new(input, &mut parser);
while let Some(declaration) = iter.next() {
if let Err((error, slice)) = declaration {
let location = error.location;
@ -549,6 +549,11 @@ impl<'a, 'b, 'i> QualifiedRuleParser<'i> for FontFaceRuleParser<'a, 'b> {
type Error = StyleParseErrorKind<'i>;
}
impl<'a, 'b, 'i> RuleBodyItemParser<'i, (), StyleParseErrorKind<'i>> for FontFaceRuleParser<'a, 'b> {
fn parse_qualified(&self) -> bool { false }
fn parse_declarations(&self) -> bool { true }
}
impl Parse for Source {
fn parse<'i, 't>(
context: &ParserContext,

Просмотреть файл

@ -9,6 +9,7 @@ use crate::error_reporting::{ContextualParseError, ParseErrorReporter};
use crate::stylesheets::{CssRuleType, Namespaces, Origin, UrlExtraData};
use crate::use_counters::UseCounters;
use cssparser::{Parser, SourceLocation, UnicodeRange};
use std::borrow::Cow;
use style_traits::{OneOrMoreSeparated, ParseError, ParsingMode, Separator};
/// Asserts that all ParsingMode flags have a matching ParsingMode value in gecko.
@ -53,7 +54,7 @@ pub struct ParserContext<'a> {
/// The active error reporter, or none if error reporting is disabled.
error_reporter: Option<&'a dyn ParseErrorReporter>,
/// The currently active namespaces.
pub namespaces: Option<&'a Namespaces>,
pub namespaces: Cow<'a, Namespaces>,
/// The use counters we want to record while parsing style rules, if any.
pub use_counters: Option<&'a UseCounters>,
}
@ -67,6 +68,7 @@ impl<'a> ParserContext<'a> {
rule_type: Option<CssRuleType>,
parsing_mode: ParsingMode,
quirks_mode: QuirksMode,
namespaces: Cow<'a, Namespaces>,
error_reporter: Option<&'a dyn ParseErrorReporter>,
use_counters: Option<&'a UseCounters>,
) -> Self {
@ -77,29 +79,17 @@ impl<'a> ParserContext<'a> {
parsing_mode,
quirks_mode,
error_reporter,
namespaces: None,
namespaces,
use_counters,
}
}
/// Create a parser context based on a previous context, but with a modified
/// rule type.
#[inline]
pub fn new_with_rule_type(
context: &'a ParserContext,
rule_type: CssRuleType,
namespaces: &'a Namespaces,
) -> ParserContext<'a> {
Self {
stylesheet_origin: context.stylesheet_origin,
url_data: context.url_data,
rule_type: Some(rule_type),
parsing_mode: context.parsing_mode,
quirks_mode: context.quirks_mode,
namespaces: Some(namespaces),
error_reporter: context.error_reporter,
use_counters: context.use_counters,
}
/// Temporarily sets the rule_type and executes the callback function, returning its result.
pub fn nest_for_rule<R>(&mut self, rule_type: CssRuleType, cb: impl FnOnce(&mut Self) -> R) -> R {
let old_rule_type = std::mem::replace(&mut self.rule_type, Some(rule_type));
let r = cb(self);
self.rule_type = old_rule_type;
r
}
/// Whether we're in a @page rule.

Просмотреть файл

@ -26,8 +26,8 @@ use crate::str::{CssString, CssStringWriter};
use crate::stylesheets::{layer_rule::LayerOrder, CssRuleType, Origin, UrlExtraData};
use crate::values::computed::Context;
use cssparser::{
parse_important, AtRuleParser, CowRcStr, DeclarationListParser, DeclarationParser, Delimiter,
ParseErrorKind, Parser, ParserInput, QualifiedRuleParser,
parse_important, AtRuleParser, CowRcStr, DeclarationParser, Delimiter, ParseErrorKind, Parser,
ParserInput, QualifiedRuleParser, RuleBodyItemParser, RuleBodyParser,
};
use itertools::Itertools;
use selectors::SelectorList;
@ -582,8 +582,9 @@ impl PropertyDeclarationBlock {
.all_shorthand
.declarations()
.any(|decl| {
!self.contains(decl.id()) ||
self.declarations
!self.contains(decl.id())
|| self
.declarations
.iter()
.enumerate()
.find(|&(_, ref d)| d.id() == decl.id())
@ -625,9 +626,9 @@ impl PropertyDeclarationBlock {
}
return DeclarationUpdate::UpdateInPlace { pos };
}
if !needs_append &&
id.logical_group() == Some(logical_group) &&
id.is_logical() != longhand_id.is_logical()
if !needs_append
&& id.logical_group() == Some(logical_group)
&& id.is_logical() != longhand_id.is_logical()
{
needs_append = true;
}
@ -1292,6 +1293,7 @@ pub fn parse_style_attribute(
Some(rule_type),
ParsingMode::DEFAULT,
quirks_mode,
/* namespaces = */ Default::default(),
error_reporter,
None,
);
@ -1322,6 +1324,7 @@ pub fn parse_one_declaration_into(
Some(rule_type),
parsing_mode,
quirks_mode,
/* namespaces = */ Default::default(),
error_reporter,
None,
);
@ -1411,6 +1414,12 @@ impl<'a, 'b, 'i> DeclarationParser<'i> for PropertyDeclarationParser<'a, 'b> {
}
}
impl<'a, 'b, 'i> RuleBodyItemParser<'i, Importance, StyleParseErrorKind<'i>> for PropertyDeclarationParser<'a, 'b> {
fn parse_declarations(&self) -> bool { true }
// TODO(emilio): Nesting.
fn parse_qualified(&self) -> bool { false }
}
type SmallParseErrorVec<'i> = SmallVec<[(ParseError<'i>, &'i str, Option<PropertyId>); 2]>;
fn alias_of_known_property(name: &str) -> Option<PropertyId> {
@ -1498,12 +1507,12 @@ pub fn parse_property_declaration_list(
) -> PropertyDeclarationBlock {
let mut declarations = SourcePropertyDeclaration::new();
let mut block = PropertyDeclarationBlock::new();
let parser = PropertyDeclarationParser {
let mut parser = PropertyDeclarationParser {
context,
last_parsed_property_id: None,
declarations: &mut declarations,
};
let mut iter = DeclarationListParser::new(input, parser);
let mut iter = RuleBodyParser::new(input, &mut parser);
let mut errors = SmallParseErrorVec::new();
while let Some(declaration) = iter.next() {
match declaration {

Просмотреть файл

@ -1758,6 +1758,7 @@ impl UnparsedValue {
None,
ParsingMode::DEFAULT,
quirks_mode,
/* namespaces = */ Default::default(),
None,
None,
);

Просмотреть файл

@ -19,8 +19,8 @@ use crate::values::computed::font::FamilyName;
use crate::values::serialize_atom_identifier;
use crate::Atom;
use cssparser::{
AtRuleParser, BasicParseErrorKind, CowRcStr, DeclarationListParser, DeclarationParser, Parser,
ParserState, QualifiedRuleParser, RuleListParser, SourceLocation, Token,
AtRuleParser, BasicParseErrorKind, CowRcStr, RuleBodyParser, RuleBodyItemParser, Parser,
ParserState, QualifiedRuleParser, DeclarationParser, SourceLocation, Token,
};
use std::fmt::{self, Write};
use style_traits::{CssWriter, ParseError, StyleParseErrorKind, ToCss};
@ -215,13 +215,21 @@ where
let value = input.parse_entirely(|i| T::parse(self.context, i))?;
let new = FFVDeclaration {
name: Atom::from(&*name),
value: value,
value,
};
update_or_push(&mut self.declarations, new);
Ok(())
}
}
impl<'a, 'b, 'i, T> RuleBodyItemParser<'i, (), StyleParseErrorKind<'i>> for FFVDeclarationsParser<'a, 'b, T>
where
T: Parse,
{
fn parse_declarations(&self) -> bool { true }
fn parse_qualified(&self) -> bool { false }
}
macro_rules! font_feature_values_blocks {
(
blocks = [
@ -265,18 +273,16 @@ macro_rules! font_feature_values_blocks {
location: SourceLocation,
) -> Self {
let mut rule = FontFeatureValuesRule::new(family_names, location);
{
let mut iter = RuleListParser::new_for_nested_rule(input, FontFeatureValuesRuleParser {
context: context,
rule: &mut rule,
});
while let Some(result) = iter.next() {
if let Err((error, slice)) = result {
let location = error.location;
let error = ContextualParseError::UnsupportedRule(slice, error);
context.log_css_error(location, error);
}
let mut parser = FontFeatureValuesRuleParser {
context,
rule: &mut rule,
};
let mut iter = RuleBodyParser::new(input, &mut parser);
while let Some(result) = iter.next() {
if let Err((error, slice)) = result {
let location = error.location;
let error = ContextualParseError::UnsupportedRule(slice, error);
context.log_css_error(location, error);
}
}
rule
@ -348,9 +354,8 @@ macro_rules! font_feature_values_blocks {
/// Updates with new value if same `ident` exists, otherwise pushes to the vector.
fn update_or_push<T>(vec: &mut Vec<FFVDeclaration<T>>, element: FFVDeclaration<T>) {
let position = vec.iter().position(|ref val| val.name == element.name);
if let Some(index) = position {
vec[index].value = element.value;
if let Some(item) = vec.iter_mut().find(|item| item.name == element.name) {
item.value = element.value;
} else {
vec.push(element);
}
@ -409,12 +414,12 @@ macro_rules! font_feature_values_blocks {
match prelude {
$(
BlockType::$ident_camel => {
let parser = FFVDeclarationsParser {
let mut parser = FFVDeclarationsParser {
context: &self.context,
declarations: &mut self.rule.$ident,
};
let mut iter = DeclarationListParser::new(input, parser);
let mut iter = RuleBodyParser::new(input, &mut parser);
while let Some(declaration) = iter.next() {
if let Err((error, slice)) = declaration {
let location = error.location;
@ -431,6 +436,16 @@ macro_rules! font_feature_values_blocks {
Ok(())
}
}
impl<'a, 'i> DeclarationParser<'i> for FontFeatureValuesRuleParser<'a> {
type Declaration = ();
type Error = StyleParseErrorKind<'i>;
}
impl<'a, 'i> RuleBodyItemParser<'i, (), StyleParseErrorKind<'i>> for FontFeatureValuesRuleParser<'a> {
fn parse_declarations(&self) -> bool { false }
fn parse_qualified(&self) -> bool { true }
}
}
}

Просмотреть файл

@ -7,26 +7,27 @@
//! [font-palette-values]: https://drafts.csswg.org/css-fonts/#font-palette-values
use crate::error_reporting::ContextualParseError;
use crate::parser::{Parse, ParserContext};
use crate::gecko_bindings::bindings::Gecko_AppendPaletteValueHashEntry;
use crate::gecko_bindings::bindings::{Gecko_SetFontPaletteBase, Gecko_SetFontPaletteOverride};
use crate::gecko_bindings::structs::gfx::FontPaletteValueSet;
use crate::gecko_bindings::structs::gfx::FontPaletteValueSet_PaletteValues_kLight;
use crate::gecko_bindings::structs::gfx::FontPaletteValueSet_PaletteValues_kDark;
use crate::gecko_bindings::structs::gfx::FontPaletteValueSet_PaletteValues_kLight;
use crate::parser::{Parse, ParserContext};
use crate::shared_lock::{SharedRwLockReadGuard, ToCssWithGuard};
use crate::str::CssStringWriter;
use crate::stylesheets::font_feature_values_rule::parse_family_name_list;
use crate::values::computed::font::FamilyName;
use crate::values::specified::Color as SpecifiedColor;
use crate::values::specified::NonNegativeInteger;
use crate::values::DashedIdent;
use cssparser::{AtRuleParser, CowRcStr};
use cssparser::{DeclarationParser, DeclarationListParser, Parser};
use cssparser::{QualifiedRuleParser, SourceLocation};
use std::fmt::{self, Write};
use style_traits::{CssWriter, ParseError, StyleParseErrorKind, ToCss};
use style_traits::{Comma, OneOrMoreSeparated};
use cssparser::{
AtRuleParser, CowRcStr, DeclarationParser, Parser, QualifiedRuleParser, RuleBodyItemParser,
RuleBodyParser, SourceLocation,
};
use selectors::parser::SelectorParseErrorKind;
use crate::stylesheets::font_feature_values_rule::parse_family_name_list;
use std::fmt::{self, Write};
use style_traits::{Comma, OneOrMoreSeparated};
use style_traits::{CssWriter, ParseError, StyleParseErrorKind, ToCss};
#[allow(missing_docs)]
#[derive(Clone, Debug, MallocSizeOf, PartialEq, ToShmem)]
@ -118,18 +119,17 @@ impl FontPaletteValuesRule {
location: SourceLocation,
) -> Self {
let mut rule = FontPaletteValuesRule::new(name, location);
{
let parser = FontPaletteValuesDeclarationParser {
context: context,
rule: &mut rule,
};
let mut iter = DeclarationListParser::new(input, parser);
while let Some(declaration) = iter.next() {
if let Err((error, slice)) = declaration {
let location = error.location;
let error = ContextualParseError::UnsupportedFontPaletteValuesDescriptor(slice, error);
context.log_css_error(location, error);
}
let mut parser = FontPaletteValuesDeclarationParser {
context,
rule: &mut rule,
};
let mut iter = RuleBodyParser::new(input, &mut parser);
while let Some(declaration) = iter.next() {
if let Err((error, slice)) = declaration {
let location = error.location;
let error =
ContextualParseError::UnsupportedFontPaletteValuesDescriptor(slice, error);
context.log_css_error(location, error);
}
}
rule
@ -163,19 +163,18 @@ impl FontPaletteValuesRule {
for ref family in self.family_names.iter() {
let family = family.name.to_ascii_lowercase();
let palette_values = unsafe {
Gecko_AppendPaletteValueHashEntry(
dest,
family.as_ptr(),
self.name.0.as_ptr()
)
Gecko_AppendPaletteValueHashEntry(dest, family.as_ptr(), self.name.0.as_ptr())
};
if let Some(base_palette) = &self.base_palette {
unsafe {
Gecko_SetFontPaletteBase(palette_values, match &base_palette {
FontPaletteBase::Light => FontPaletteValueSet_PaletteValues_kLight,
FontPaletteBase::Dark => FontPaletteValueSet_PaletteValues_kDark,
FontPaletteBase::Index(i) => i.0.value() as i32,
});
Gecko_SetFontPaletteBase(
palette_values,
match &base_palette {
FontPaletteBase::Light => FontPaletteValueSet_PaletteValues_kLight,
FontPaletteBase::Dark => FontPaletteValueSet_PaletteValues_kDark,
FontPaletteBase::Index(i) => i.0.value() as i32,
},
);
}
}
for c in &self.override_colors {
@ -229,13 +228,13 @@ fn parse_override_colors<'i, 't>(
}
impl<'a, 'b, 'i> DeclarationParser<'i> for FontPaletteValuesDeclarationParser<'a> {
type Declaration = ();
type Error = StyleParseErrorKind<'i>;
type Declaration = ();
type Error = StyleParseErrorKind<'i>;
fn parse_value<'t>(
&mut self,
name: CowRcStr<'i>,
input: &mut Parser<'i, 't>,
fn parse_value<'t>(
&mut self,
name: CowRcStr<'i>,
input: &mut Parser<'i, 't>,
) -> Result<(), ParseError<'i>> {
match_ignore_ascii_case! { &*name,
"font-family" => {
@ -252,3 +251,14 @@ impl<'a, 'b, 'i> DeclarationParser<'i> for FontPaletteValuesDeclarationParser<'a
Ok(())
}
}
impl<'a, 'i> RuleBodyItemParser<'i, (), StyleParseErrorKind<'i>>
for FontPaletteValuesDeclarationParser<'a>
{
fn parse_declarations(&self) -> bool {
true
}
fn parse_qualified(&self) -> bool {
false
}
}

Просмотреть файл

@ -13,7 +13,7 @@ use crate::shared_lock::{
};
use crate::str::CssStringWriter;
use crate::stylesheets::{
layer_rule::LayerName, stylesheet::Namespaces, supports_rule::SupportsCondition, CssRule,
layer_rule::LayerName, supports_rule::SupportsCondition, CssRule,
CssRuleType, StylesheetInDocument,
};
use crate::values::CssUrl;
@ -212,8 +212,7 @@ impl ImportRule {
/// whole import rule or parse the media query list or what not.
pub fn parse_layer_and_supports<'i, 't>(
input: &mut Parser<'i, 't>,
context: &ParserContext,
namespaces: &Namespaces,
context: &mut ParserContext,
) -> (ImportLayer, Option<ImportSupportsCondition>) {
let layer = if input
.try_parse(|input| input.expect_ident_matching("layer"))
@ -237,9 +236,9 @@ impl ImportRule {
input
.try_parse(SupportsCondition::parse_for_import)
.map(|condition| {
let eval_context =
ParserContext::new_with_rule_type(context, CssRuleType::Style, namespaces);
let enabled = condition.eval(&eval_context, namespaces);
let enabled = context.nest_for_rule(CssRuleType::Style, |context| {
condition.eval(context)
});
ImportSupportsCondition { condition, enabled }
})
.ok()

Просмотреть файл

@ -19,11 +19,12 @@ use crate::stylesheets::rule_parser::VendorPrefix;
use crate::stylesheets::{CssRuleType, StylesheetContents};
use crate::values::{serialize_percentage, KeyframesName};
use cssparser::{
parse_one_rule, DeclarationListParser, DeclarationParser, ParserState, SourceLocation, Token,
parse_one_rule, AtRuleParser, CowRcStr, DeclarationParser, Parser, ParserInput, ParserState,
QualifiedRuleParser, RuleBodyItemParser, RuleBodyParser, SourceLocation, Token,
};
use cssparser::{AtRuleParser, CowRcStr, Parser, ParserInput, QualifiedRuleParser, RuleListParser};
use servo_arc::Arc;
use std::fmt::{self, Write};
use std::borrow::Cow;
use style_traits::{CssWriter, ParseError, ParsingMode, StyleParseErrorKind, ToCss};
/// A [`@keyframes`][keyframes] rule.
@ -217,16 +218,16 @@ impl Keyframe {
Some(CssRuleType::Keyframe),
ParsingMode::DEFAULT,
parent_stylesheet_contents.quirks_mode,
Cow::Borrowed(&*namespaces),
None,
None,
);
context.namespaces = Some(&*namespaces);
let mut input = ParserInput::new(css);
let mut input = Parser::new(&mut input);
let mut declarations = SourcePropertyDeclaration::new();
let mut rule_parser = KeyframeListParser {
context: &context,
context: &mut context,
shared_lock: &lock,
declarations: &mut declarations,
};
@ -526,43 +527,39 @@ impl KeyframesAnimation {
/// 40%, 60%, 100% {
/// width: 100%;
/// }
struct KeyframeListParser<'a> {
context: &'a ParserContext<'a>,
struct KeyframeListParser<'a, 'b> {
context: &'a mut ParserContext<'b>,
shared_lock: &'a SharedRwLock,
declarations: &'a mut SourcePropertyDeclaration,
}
/// Parses a keyframe list from CSS input.
pub fn parse_keyframe_list(
context: &ParserContext,
pub fn parse_keyframe_list<'a>(
context: &mut ParserContext<'a>,
input: &mut Parser,
shared_lock: &SharedRwLock,
) -> Vec<Arc<Locked<Keyframe>>> {
debug_assert!(
context.namespaces.is_some(),
"Parsing a keyframe list from a context without namespaces?"
);
let mut declarations = SourcePropertyDeclaration::new();
RuleListParser::new_for_nested_rule(
input,
KeyframeListParser {
context,
shared_lock,
declarations: &mut declarations,
},
)
.filter_map(Result::ok)
.collect()
let mut parser = KeyframeListParser {
context,
shared_lock,
declarations: &mut declarations,
};
RuleBodyParser::new(input, &mut parser).filter_map(Result::ok) .collect()
}
impl<'a, 'i> AtRuleParser<'i> for KeyframeListParser<'a> {
impl<'a, 'b, 'i> AtRuleParser<'i> for KeyframeListParser<'a, 'b> {
type Prelude = ();
type AtRule = Arc<Locked<Keyframe>>;
type Error = StyleParseErrorKind<'i>;
}
impl<'a, 'i> QualifiedRuleParser<'i> for KeyframeListParser<'a> {
impl<'a, 'b, 'i> DeclarationParser<'i> for KeyframeListParser<'a, 'b> {
type Declaration = Arc<Locked<Keyframe>>;
type Error = StyleParseErrorKind<'i>;
}
impl<'a, 'b, 'i> QualifiedRuleParser<'i> for KeyframeListParser<'a, 'b> {
type Prelude = KeyframeSelector;
type QualifiedRule = Arc<Locked<Keyframe>>;
type Error = StyleParseErrorKind<'i>;
@ -589,33 +586,30 @@ impl<'a, 'i> QualifiedRuleParser<'i> for KeyframeListParser<'a> {
start: &ParserState,
input: &mut Parser<'i, 't>,
) -> Result<Self::QualifiedRule, ParseError<'i>> {
let context = ParserContext::new_with_rule_type(
self.context,
CssRuleType::Keyframe,
self.context.namespaces.unwrap(),
);
let parser = KeyframeDeclarationParser {
context: &context,
declarations: self.declarations,
};
let mut iter = DeclarationListParser::new(input, parser);
let mut block = PropertyDeclarationBlock::new();
while let Some(declaration) = iter.next() {
match declaration {
Ok(()) => {
block.extend(iter.parser.declarations.drain(), Importance::Normal);
},
Err((error, slice)) => {
iter.parser.declarations.clear();
let location = error.location;
let error =
ContextualParseError::UnsupportedKeyframePropertyDeclaration(slice, error);
context.log_css_error(location, error);
},
let declarations = &mut self.declarations;
self.context.nest_for_rule(CssRuleType::Keyframe, |context| {
let mut parser = KeyframeDeclarationParser {
context: &context,
declarations,
};
let mut iter = RuleBodyParser::new(input, &mut parser);
while let Some(declaration) = iter.next() {
match declaration {
Ok(()) => {
block.extend(iter.parser.declarations.drain(), Importance::Normal);
},
Err((error, slice)) => {
iter.parser.declarations.clear();
let location = error.location;
let error =
ContextualParseError::UnsupportedKeyframePropertyDeclaration(slice, error);
context.log_css_error(location, error);
},
}
// `parse_important` is not called here, `!important` is not allowed in keyframe blocks.
}
// `parse_important` is not called here, `!important` is not allowed in keyframe blocks.
}
});
Ok(Arc::new(self.shared_lock.wrap(Keyframe {
selector,
block: Arc::new(self.shared_lock.wrap(block)),
@ -624,6 +618,11 @@ impl<'a, 'i> QualifiedRuleParser<'i> for KeyframeListParser<'a> {
}
}
impl<'a, 'b, 'i> RuleBodyItemParser<'i, Arc<Locked<Keyframe>>, StyleParseErrorKind<'i>> for KeyframeListParser<'a, 'b> {
fn parse_qualified(&self) -> bool { true }
fn parse_declarations(&self) -> bool { false }
}
struct KeyframeDeclarationParser<'a, 'b: 'a> {
context: &'a ParserContext<'b>,
declarations: &'a mut SourcePropertyDeclaration,
@ -668,3 +667,8 @@ impl<'a, 'b, 'i> DeclarationParser<'i> for KeyframeDeclarationParser<'a, 'b> {
Ok(())
}
}
impl<'a, 'b, 'i> RuleBodyItemParser<'i, (), StyleParseErrorKind<'i>> for KeyframeDeclarationParser<'a, 'b> {
fn parse_qualified(&self) -> bool { false }
fn parse_declarations(&self) -> bool { true }
}

Просмотреть файл

@ -39,6 +39,7 @@ use cssparser::{parse_one_rule, Parser, ParserInput};
#[cfg(feature = "gecko")]
use malloc_size_of::{MallocSizeOfOps, MallocUnconditionalShallowSizeOf};
use servo_arc::Arc;
use std::borrow::Cow;
use std::fmt;
#[cfg(feature = "gecko")]
use std::mem::{self, ManuallyDrop};
@ -401,12 +402,14 @@ impl CssRule {
allow_import_rules: AllowImportRules,
) -> Result<Self, RulesMutateError> {
let url_data = parent_stylesheet_contents.url_data.read();
let namespaces = parent_stylesheet_contents.namespaces.read();
let context = ParserContext::new(
parent_stylesheet_contents.origin,
&url_data,
None,
ParsingMode::DEFAULT,
parent_stylesheet_contents.quirks_mode,
Cow::Borrowed(&*namespaces),
None,
None,
);
@ -414,8 +417,6 @@ impl CssRule {
let mut input = ParserInput::new(css);
let mut input = Parser::new(&mut input);
let mut guard = parent_stylesheet_contents.namespaces.write();
// nested rules are in the body state
let mut rule_parser = TopLevelRuleParser {
context,
@ -423,7 +424,6 @@ impl CssRule {
loader,
state,
dom_error: None,
namespaces: &mut *guard,
insert_rule_context: Some(insert_rule_context),
allow_import_rules,
};

Просмотреть файл

@ -19,7 +19,6 @@ use crate::stylesheets::font_feature_values_rule::parse_family_name_list;
use crate::stylesheets::import_rule::{ImportRule, ImportLayer, ImportSupportsCondition};
use crate::stylesheets::keyframes_rule::parse_keyframe_list;
use crate::stylesheets::layer_rule::{LayerBlockRule, LayerName, LayerStatementRule};
use crate::stylesheets::stylesheet::Namespaces;
use crate::stylesheets::supports_rule::SupportsCondition;
use crate::stylesheets::{
viewport_rule, AllowImportRules, CorsMode, CssRule, CssRuleType, CssRules, DocumentRule,
@ -31,8 +30,8 @@ use crate::values::computed::font::FamilyName;
use crate::values::{CssUrl, CustomIdent, DashedIdent, KeyframesName};
use crate::{Namespace, Prefix};
use cssparser::{
AtRuleParser, BasicParseError, BasicParseErrorKind, CowRcStr, Parser, ParserState,
QualifiedRuleParser, RuleListParser, SourcePosition,
AtRuleParser, BasicParseError, BasicParseErrorKind, CowRcStr, DeclarationParser, Parser,
ParserState, QualifiedRuleParser, RuleBodyParser, RuleBodyItemParser, SourcePosition,
};
use selectors::SelectorList;
use servo_arc::Arc;
@ -83,9 +82,6 @@ pub struct TopLevelRuleParser<'a> {
/// A reference to a stylesheet loader if applicable, for `@import` rules.
pub loader: Option<&'a dyn StylesheetLoader>,
/// The top-level parser context.
///
/// This won't contain any namespaces, and only nested parsers created with
/// `ParserContext::new_with_rule_type` will.
pub context: ParserContext<'a>,
/// The current state of the parser.
pub state: State,
@ -93,22 +89,17 @@ pub struct TopLevelRuleParser<'a> {
/// place (e.g. an @import rule was found while in the `Body` state). Reset
/// to `false` when `take_had_hierarchy_error` is called.
pub dom_error: Option<RulesMutateError>,
/// The namespace map we use for parsing. Needs to start as `Some()`, and
/// will be taken out after parsing namespace rules, and that reference will
/// be moved to `ParserContext`.
pub namespaces: &'a mut Namespaces,
/// The info we need insert a rule in a list.
pub insert_rule_context: Option<InsertRuleContext<'a>>,
/// Whether @import rules will be allowed.
pub allow_import_rules: AllowImportRules,
}
impl<'b> TopLevelRuleParser<'b> {
fn nested<'a: 'b>(&'a self) -> NestedRuleParser<'a, 'b> {
impl<'a> TopLevelRuleParser<'a> {
fn nested<'b>(&'b mut self) -> NestedRuleParser<'b, 'a> {
NestedRuleParser {
shared_lock: self.shared_lock,
context: &self.context,
namespaces: &self.namespaces,
context: &mut self.context,
}
}
@ -241,7 +232,7 @@ impl<'a, 'i> AtRuleParser<'i> for TopLevelRuleParser<'a> {
let url_string = input.expect_url_or_string()?.as_ref().to_owned();
let url = CssUrl::parse_from_string(url_string, &self.context, CorsMode::None);
let (layer, supports) = ImportRule::parse_layer_and_supports(input, &self.context, self.namespaces);
let (layer, supports) = ImportRule::parse_layer_and_supports(input, &mut self.context);
let media = MediaList::parse(&self.context, input);
let media = Arc::new(self.shared_lock.wrap(media));
@ -334,11 +325,12 @@ impl<'a, 'i> AtRuleParser<'i> for TopLevelRuleParser<'a> {
CssRule::Import(import_rule)
},
AtRulePrelude::Namespace(prefix, url) => {
let namespaces = self.context.namespaces.to_mut();
let prefix = if let Some(prefix) = prefix {
self.namespaces.prefixes.insert(prefix.clone(), url.clone());
namespaces.prefixes.insert(prefix.clone(), url.clone());
Some(prefix)
} else {
self.namespaces.default = Some(url.clone());
namespaces.default = Some(url.clone());
None
};
@ -398,40 +390,40 @@ impl<'a, 'i> QualifiedRuleParser<'i> for TopLevelRuleParser<'a> {
}
}
#[derive(Clone)] // shallow, relatively cheap .clone
struct NestedRuleParser<'a, 'b: 'a> {
shared_lock: &'a SharedRwLock,
context: &'a ParserContext<'b>,
namespaces: &'a Namespaces,
context: &'a mut ParserContext<'b>,
}
impl<'a, 'b> NestedRuleParser<'a, 'b> {
fn nest_for_rule<R>(&mut self, rule_type: CssRuleType, cb: impl FnOnce(&mut Self) -> R) -> R {
let old_rule_type = self.context.rule_type.take();
self.context.rule_type = Some(rule_type);
let r = cb(self);
self.context.rule_type = old_rule_type;
r
}
fn parse_nested_rules(
&mut self,
input: &mut Parser,
rule_type: CssRuleType,
) -> Arc<Locked<CssRules>> {
let context = ParserContext::new_with_rule_type(self.context, rule_type, self.namespaces);
let nested_parser = NestedRuleParser {
shared_lock: self.shared_lock,
context: &context,
namespaces: self.namespaces,
};
let mut iter = RuleListParser::new_for_nested_rule(input, nested_parser);
let mut rules = Vec::new();
while let Some(result) = iter.next() {
match result {
Ok(rule) => rules.push(rule),
Err((error, slice)) => {
let location = error.location;
let error = ContextualParseError::InvalidRule(slice, error);
self.context.log_css_error(location, error);
},
self.nest_for_rule(rule_type, |parser| {
let mut iter = RuleBodyParser::new(input, parser);
let mut rules = Vec::new();
while let Some(result) = iter.next() {
match result {
Ok(rule) => rules.push(rule),
Err((error, slice)) => {
let location = error.location;
let error = ContextualParseError::InvalidRule(slice, error);
iter.parser.context.log_css_error(location, error);
},
}
}
}
CssRules::new(rules, self.shared_lock)
CssRules::new(rules, iter.parser.shared_lock)
})
}
}
@ -524,61 +516,45 @@ impl<'a, 'b, 'i> AtRuleParser<'i> for NestedRuleParser<'a, 'b> {
) -> Result<CssRule, ParseError<'i>> {
match prelude {
AtRulePrelude::FontFace => {
let context = ParserContext::new_with_rule_type(
self.context,
CssRuleType::FontFace,
self.namespaces,
);
Ok(CssRule::FontFace(Arc::new(self.shared_lock.wrap(
parse_font_face_block(&context, input, start.source_location()).into(),
))))
self.nest_for_rule(CssRuleType::FontFace, |p| {
Ok(CssRule::FontFace(Arc::new(p.shared_lock.wrap(
parse_font_face_block(&p.context, input, start.source_location()).into(),
))))
})
},
AtRulePrelude::FontFeatureValues(family_names) => {
let context = ParserContext::new_with_rule_type(
self.context,
CssRuleType::FontFeatureValues,
self.namespaces,
);
Ok(CssRule::FontFeatureValues(Arc::new(self.shared_lock.wrap(
FontFeatureValuesRule::parse(
&context,
input,
family_names,
start.source_location(),
),
))))
self.nest_for_rule(CssRuleType::FontFeatureValues, |p| {
Ok(CssRule::FontFeatureValues(Arc::new(p.shared_lock.wrap(
FontFeatureValuesRule::parse(
&p.context,
input,
family_names,
start.source_location(),
),
))))
})
},
AtRulePrelude::FontPaletteValues(name) => {
let context = ParserContext::new_with_rule_type(
self.context,
CssRuleType::FontPaletteValues,
self.namespaces,
);
Ok(CssRule::FontPaletteValues(Arc::new(self.shared_lock.wrap(
FontPaletteValuesRule::parse(
&context,
input,
name,
start.source_location(),
),
))))
self.nest_for_rule(CssRuleType::FontPaletteValues, |p| {
Ok(CssRule::FontPaletteValues(Arc::new(p.shared_lock.wrap(
FontPaletteValuesRule::parse(
&p.context,
input,
name,
start.source_location(),
),
))))
})
},
AtRulePrelude::CounterStyle(name) => {
let context = ParserContext::new_with_rule_type(
self.context,
CssRuleType::CounterStyle,
self.namespaces,
);
Ok(CssRule::CounterStyle(Arc::new(
self.shared_lock.wrap(
parse_counter_style_body(name, &context, input, start.source_location())?
.into(),
),
)))
self.nest_for_rule(CssRuleType::CounterStyle, |p| {
Ok(CssRule::CounterStyle(Arc::new(
p.shared_lock.wrap(
parse_counter_style_body(name, &p.context, input, start.source_location())?
.into(),
),
)))
})
},
AtRulePrelude::Media(media_queries) => {
Ok(CssRule::Media(Arc::new(self.shared_lock.wrap(MediaRule {
@ -588,13 +564,9 @@ impl<'a, 'b, 'i> AtRuleParser<'i> for NestedRuleParser<'a, 'b> {
}))))
},
AtRulePrelude::Supports(condition) => {
let eval_context = ParserContext::new_with_rule_type(
self.context,
CssRuleType::Style,
self.namespaces,
);
let enabled = condition.eval(&eval_context, self.namespaces);
let enabled = self.nest_for_rule(CssRuleType::Style, |p| {
condition.eval(&p.context)
});
Ok(CssRule::Supports(Arc::new(self.shared_lock.wrap(
SupportsRule {
condition,
@ -605,40 +577,28 @@ impl<'a, 'b, 'i> AtRuleParser<'i> for NestedRuleParser<'a, 'b> {
))))
},
AtRulePrelude::Viewport => {
let context = ParserContext::new_with_rule_type(
self.context,
CssRuleType::Viewport,
self.namespaces,
);
Ok(CssRule::Viewport(Arc::new(
self.shared_lock.wrap(ViewportRule::parse(&context, input)?),
)))
self.nest_for_rule(CssRuleType::Viewport, |p| {
Ok(CssRule::Viewport(Arc::new(
p.shared_lock.wrap(ViewportRule::parse(&p.context, input)?),
)))
})
},
AtRulePrelude::Keyframes(name, vendor_prefix) => {
let context = ParserContext::new_with_rule_type(
self.context,
CssRuleType::Keyframes,
self.namespaces,
);
Ok(CssRule::Keyframes(Arc::new(self.shared_lock.wrap(
KeyframesRule {
name,
keyframes: parse_keyframe_list(&context, input, self.shared_lock),
vendor_prefix,
source_location: start.source_location(),
},
))))
self.nest_for_rule(CssRuleType::Keyframe, |p| {
Ok(CssRule::Keyframes(Arc::new(p.shared_lock.wrap(
KeyframesRule {
name,
keyframes: parse_keyframe_list(&mut p.context, input, p.shared_lock),
vendor_prefix,
source_location: start.source_location(),
},
))))
})
},
AtRulePrelude::Page(selectors) => {
let context = ParserContext::new_with_rule_type(
self.context,
CssRuleType::Page,
self.namespaces,
);
let declarations = parse_property_declaration_list(&context, input, None);
let declarations = self.nest_for_rule(CssRuleType::Page, |p| {
parse_property_declaration_list(&p.context, input, None)
});
Ok(CssRule::Page(Arc::new(self.shared_lock.wrap(PageRule {
selectors,
block: Arc::new(self.shared_lock.wrap(declarations)),
@ -751,7 +711,7 @@ impl<'a, 'b, 'i> QualifiedRuleParser<'i> for NestedRuleParser<'a, 'b> {
) -> Result<Self::Prelude, ParseError<'i>> {
let selector_parser = SelectorParser {
stylesheet_origin: self.context.stylesheet_origin,
namespaces: self.namespaces,
namespaces: &self.context.namespaces,
url_data: self.context.url_data,
for_supports_rule: false,
};
@ -768,10 +728,9 @@ impl<'a, 'b, 'i> QualifiedRuleParser<'i> for NestedRuleParser<'a, 'b> {
start: &ParserState,
input: &mut Parser<'i, 't>,
) -> Result<CssRule, ParseError<'i>> {
let context =
ParserContext::new_with_rule_type(self.context, CssRuleType::Style, self.namespaces);
let declarations = parse_property_declaration_list(&context, input, Some(&selectors));
let declarations = self.nest_for_rule(CssRuleType::Style, |p| {
parse_property_declaration_list(&p.context, input, Some(&selectors))
});
let block = Arc::new(self.shared_lock.wrap(declarations));
Ok(CssRule::Style(Arc::new(self.shared_lock.wrap(StyleRule {
selectors,
@ -781,3 +740,14 @@ impl<'a, 'b, 'i> QualifiedRuleParser<'i> for NestedRuleParser<'a, 'b> {
}))))
}
}
impl<'a, 'b, 'i> DeclarationParser<'i> for NestedRuleParser<'a, 'b> {
type Declaration = CssRule;
type Error = StyleParseErrorKind<'i>;
}
impl<'a, 'b, 'i> RuleBodyItemParser<'i, CssRule, StyleParseErrorKind<'i>> for NestedRuleParser<'a, 'b> {
fn parse_qualified(&self) -> bool { true }
// TODO: Nesting.
fn parse_declarations(&self) -> bool { false }
}

Просмотреть файл

@ -15,13 +15,12 @@ use crate::stylesheets::rules_iterator::{NestedRuleIterationCondition, RulesIter
use crate::stylesheets::{CssRule, CssRules, Origin, UrlExtraData};
use crate::use_counters::UseCounters;
use crate::{Namespace, Prefix};
use cssparser::{Parser, ParserInput, RuleListParser};
use cssparser::{Parser, ParserInput, StyleSheetParser};
use fxhash::FxHashMap;
#[cfg(feature = "gecko")]
use malloc_size_of::{MallocSizeOfOps, MallocUnconditionalShallowSizeOf};
use parking_lot::RwLock;
use servo_arc::Arc;
use std::mem;
use std::sync::atomic::{AtomicBool, Ordering};
use style_traits::ParsingMode;
@ -86,12 +85,10 @@ impl StylesheetContents {
allow_import_rules: AllowImportRules,
sanitization_data: Option<&mut SanitizationData>,
) -> Arc<Self> {
let namespaces = RwLock::new(Namespaces::default());
let (rules, source_map_url, source_url) = Stylesheet::parse_rules(
let (namespaces, rules, source_map_url, source_url) = Stylesheet::parse_rules(
css,
&url_data,
origin,
&mut *namespaces.write(),
&shared_lock,
stylesheet_loader,
error_reporter,
@ -106,7 +103,7 @@ impl StylesheetContents {
rules: CssRules::new(rules, &shared_lock),
origin,
url_data: RwLock::new(url_data),
namespaces,
namespaces: RwLock::new(namespaces),
quirks_mode,
source_map_url: RwLock::new(source_map_url),
source_url: RwLock::new(source_url),
@ -415,14 +412,11 @@ impl Stylesheet {
line_number_offset: u32,
allow_import_rules: AllowImportRules,
) {
let namespaces = RwLock::new(Namespaces::default());
// FIXME: Consider adding use counters to Servo?
let (rules, source_map_url, source_url) = Self::parse_rules(
let (namespaces, rules, source_map_url, source_url) = Self::parse_rules(
css,
&url_data,
existing.contents.origin,
&mut *namespaces.write(),
&existing.shared_lock,
stylesheet_loader,
error_reporter,
@ -434,10 +428,7 @@ impl Stylesheet {
);
*existing.contents.url_data.write() = url_data;
mem::swap(
&mut *existing.contents.namespaces.write(),
&mut *namespaces.write(),
);
*existing.contents.namespaces.write() = namespaces;
// Acquire the lock *after* parsing, to minimize the exclusive section.
let mut guard = existing.shared_lock.write();
@ -450,7 +441,6 @@ impl Stylesheet {
css: &str,
url_data: &UrlExtraData,
origin: Origin,
namespaces: &mut Namespaces,
shared_lock: &SharedRwLock,
stylesheet_loader: Option<&dyn StylesheetLoader>,
error_reporter: Option<&dyn ParseErrorReporter>,
@ -459,7 +449,7 @@ impl Stylesheet {
use_counters: Option<&UseCounters>,
allow_import_rules: AllowImportRules,
mut sanitization_data: Option<&mut SanitizationData>,
) -> (Vec<CssRule>, Option<String>, Option<String>) {
) -> (Namespaces, Vec<CssRule>, Option<String>, Option<String>) {
let mut rules = Vec::new();
let mut input = ParserInput::new_with_line_number_offset(css, line_number_offset);
let mut input = Parser::new(&mut input);
@ -470,23 +460,23 @@ impl Stylesheet {
None,
ParsingMode::DEFAULT,
quirks_mode,
/* namespaces = */ Default::default(),
error_reporter,
use_counters,
);
let rule_parser = TopLevelRuleParser {
let mut rule_parser = TopLevelRuleParser {
shared_lock,
loader: stylesheet_loader,
context,
state: State::Start,
dom_error: None,
insert_rule_context: None,
namespaces,
allow_import_rules,
};
{
let mut iter = RuleListParser::new_for_stylesheet(&mut input, rule_parser);
let mut iter = StyleSheetParser::new(&mut input, &mut rule_parser);
loop {
let result = match iter.next() {
@ -521,7 +511,7 @@ impl Stylesheet {
let source_map_url = input.current_source_map_url().map(String::from);
let source_url = input.current_source_url().map(String::from);
(rules, source_map_url, source_url)
(rule_parser.context.namespaces.into_owned(), rules, source_map_url, source_url)
}
/// Creates an empty stylesheet and parses it with a given base url, origin

Просмотреть файл

@ -11,7 +11,7 @@ use crate::selector_parser::{SelectorImpl, SelectorParser};
use crate::shared_lock::{DeepCloneParams, DeepCloneWithLock, Locked};
use crate::shared_lock::{SharedRwLock, SharedRwLockReadGuard, ToCssWithGuard};
use crate::str::CssStringWriter;
use crate::stylesheets::{CssRuleType, CssRules, Namespaces};
use crate::stylesheets::{CssRuleType, CssRules};
use cssparser::parse_important;
use cssparser::{Delimiter, Parser, SourceLocation, Token};
use cssparser::{ParseError as CssParseError, ParserInput};
@ -226,15 +226,15 @@ impl SupportsCondition {
}
/// Evaluate a supports condition
pub fn eval(&self, cx: &ParserContext, namespaces: &Namespaces) -> bool {
pub fn eval(&self, cx: &ParserContext) -> bool {
match *self {
SupportsCondition::Not(ref cond) => !cond.eval(cx, namespaces),
SupportsCondition::Parenthesized(ref cond) => cond.eval(cx, namespaces),
SupportsCondition::And(ref vec) => vec.iter().all(|c| c.eval(cx, namespaces)),
SupportsCondition::Or(ref vec) => vec.iter().any(|c| c.eval(cx, namespaces)),
SupportsCondition::Not(ref cond) => !cond.eval(cx),
SupportsCondition::Parenthesized(ref cond) => cond.eval(cx),
SupportsCondition::And(ref vec) => vec.iter().all(|c| c.eval(cx)),
SupportsCondition::Or(ref vec) => vec.iter().any(|c| c.eval(cx)),
SupportsCondition::Declaration(ref decl) => decl.eval(cx),
SupportsCondition::MozBoolPref(ref name) => eval_moz_bool_pref(name, cx),
SupportsCondition::Selector(ref selector) => selector.eval(cx, namespaces),
SupportsCondition::Selector(ref selector) => selector.eval(cx),
SupportsCondition::FontFormat(ref format) => eval_font_format(format),
SupportsCondition::FontTech(ref tech) => eval_font_tech(tech),
SupportsCondition::FutureSyntax(_) => false,
@ -360,13 +360,13 @@ impl ToCss for RawSelector {
impl RawSelector {
/// Tries to evaluate a `selector()` function.
pub fn eval(&self, context: &ParserContext, namespaces: &Namespaces) -> bool {
pub fn eval(&self, context: &ParserContext) -> bool {
let mut input = ParserInput::new(&self.0);
let mut input = Parser::new(&mut input);
input
.parse_entirely(|input| -> Result<(), CssParseError<()>> {
let parser = SelectorParser {
namespaces,
namespaces: &context.namespaces,
stylesheet_origin: context.stylesheet_origin,
url_data: context.url_data,
for_supports_rule: true,

Просмотреть файл

@ -25,8 +25,8 @@ use crate::values::specified::{self, NoCalcLength};
use crate::values::specified::{NonNegativeLengthPercentageOrAuto, ViewportPercentageLength};
use app_units::Au;
use cssparser::{
parse_important, AtRuleParser, CowRcStr, DeclarationListParser, DeclarationParser, Parser,
QualifiedRuleParser,
parse_important, AtRuleParser, CowRcStr, DeclarationParser, Parser, QualifiedRuleParser,
RuleBodyItemParser, RuleBodyParser,
};
use euclid::Size2D;
use selectors::parser::SelectorParseErrorKind;
@ -241,15 +241,17 @@ fn parse_shorthand<'i, 't>(
}
}
type ViewportDeclarations = Vec<ViewportDescriptorDeclaration>;
impl<'a, 'b, 'i> AtRuleParser<'i> for ViewportRuleParser<'a, 'b> {
type Prelude = ();
type AtRule = Vec<ViewportDescriptorDeclaration>;
type AtRule = ViewportDeclarations;
type Error = StyleParseErrorKind<'i>;
}
impl<'a, 'b, 'i> QualifiedRuleParser<'i> for ViewportRuleParser<'a, 'b> {
type Prelude = ();
type QualifiedRule = Vec<ViewportDescriptorDeclaration>;
type QualifiedRule = ViewportDeclarations;
type Error = StyleParseErrorKind<'i>;
}
@ -316,6 +318,11 @@ impl<'a, 'b, 'i> DeclarationParser<'i> for ViewportRuleParser<'a, 'b> {
}
}
impl<'a, 'b, 'i> RuleBodyItemParser<'i, ViewportDeclarations, StyleParseErrorKind<'i>> for ViewportRuleParser<'a, 'b> {
fn parse_declarations(&self) -> bool { true }
fn parse_qualified(&self) -> bool { false }
}
/// A `@viewport` rule.
#[derive(Clone, Debug, PartialEq, ToShmem)]
#[cfg_attr(feature = "servo", derive(MallocSizeOf))]
@ -345,10 +352,10 @@ impl ViewportRule {
context: &ParserContext,
input: &mut Parser<'i, 't>,
) -> Result<Self, ParseError<'i>> {
let parser = ViewportRuleParser { context };
let mut parser = ViewportRuleParser { context };
let mut cascade = Cascade::new();
let mut parser = DeclarationListParser::new(input, parser);
let mut parser = RuleBodyParser::new(input, &mut parser);
while let Some(result) = parser.next() {
match result {
Ok(declarations) => {
@ -463,9 +470,7 @@ impl ViewportRule {
let declarations: Vec<_> = declarations.into_iter().filter_map(|entry| entry).collect();
if !declarations.is_empty() {
Some(ViewportRule {
declarations: declarations,
})
Some(ViewportRule { declarations })
} else {
None
}
@ -792,8 +797,8 @@ impl MaybeNew for ViewportConstraints {
min_zoom: min_zoom.map(PinchZoomFactor::new),
max_zoom: max_zoom.map(PinchZoomFactor::new),
user_zoom: user_zoom,
orientation: orientation,
user_zoom,
orientation,
})
}
}

Просмотреть файл

@ -879,12 +879,7 @@ impl Parse for Attr {
/// Get the Namespace for a given prefix from the namespace map.
fn get_namespace_for_prefix(prefix: &Prefix, context: &ParserContext) -> Option<Namespace> {
context
.namespaces
.as_ref()?
.prefixes
.get(prefix)
.map(|x| x.clone())
context.namespaces.prefixes.get(prefix).cloned()
}
impl Attr {

Просмотреть файл

@ -2814,6 +2814,7 @@ pub extern "C" fn Servo_PageRule_SetSelectorText(
None,
ParsingMode::DEFAULT,
QuirksMode::NoQuirks,
/* namespaces = */ Default::default(),
None,
None,
);
@ -3354,6 +3355,7 @@ pub unsafe extern "C" fn Servo_FontFaceRule_SetDescriptor(
Some(CssRuleType::FontFace),
ParsingMode::DEFAULT,
QuirksMode::NoQuirks,
/* namespaces = */ Default::default(),
None,
None,
);
@ -3748,6 +3750,7 @@ macro_rules! counter_style_descriptors {
Some(CssRuleType::CounterStyle),
ParsingMode::DEFAULT,
QuirksMode::NoQuirks,
/* namespaces = */ Default::default(),
None,
None,
);
@ -4420,6 +4423,7 @@ pub extern "C" fn Servo_ParseEasing(
Some(CssRuleType::Style),
ParsingMode::DEFAULT,
QuirksMode::NoQuirks,
/* namespaces = */ Default::default(),
None,
None,
);
@ -4977,6 +4981,7 @@ pub unsafe extern "C" fn Servo_MediaList_SetText(
Some(CssRuleType::Media),
ParsingMode::DEFAULT,
QuirksMode::NoQuirks,
/* namespaces = */ Default::default(),
// TODO(emilio): Looks like error reporting could be useful here?
None,
None,
@ -5018,6 +5023,7 @@ pub extern "C" fn Servo_MediaList_AppendMedium(list: &LockedMediaList, new_mediu
Some(CssRuleType::Media),
ParsingMode::DEFAULT,
QuirksMode::NoQuirks,
/* namespaces = */ Default::default(),
None,
None,
);
@ -5039,6 +5045,7 @@ pub extern "C" fn Servo_MediaList_DeleteMedium(
Some(CssRuleType::Media),
ParsingMode::DEFAULT,
QuirksMode::NoQuirks,
/* namespaces = */ Default::default(),
None,
None,
);
@ -5553,6 +5560,7 @@ pub unsafe extern "C" fn Servo_DeclarationBlock_SetFontFamily(
Some(CssRuleType::Style),
ParsingMode::DEFAULT,
QuirksMode::NoQuirks,
/* namespaces = */ Default::default(),
None,
None,
);
@ -5587,6 +5595,7 @@ pub unsafe extern "C" fn Servo_DeclarationBlock_SetBackgroundImage(
Some(CssRuleType::Style),
ParsingMode::DEFAULT,
QuirksMode::NoQuirks,
/* namespaces = */ Default::default(),
None,
None,
);
@ -5686,12 +5695,12 @@ pub extern "C" fn Servo_CSSSupports(
Some(CssRuleType::Style),
ParsingMode::DEFAULT,
quirks_mode,
/* namespaces = */ Default::default(),
None,
None,
);
let namespaces = Default::default();
cond.eval(&context, &namespaces)
cond.eval(&context)
}
#[no_mangle]
@ -5702,19 +5711,19 @@ pub extern "C" fn Servo_CSSSupportsForImport(after_rule: &nsACString) -> bool {
// NOTE(emilio): The supports API is not associated to any stylesheet,
// so the fact that there is no namespace map here is fine.
let context = ParserContext::new(
let mut context = ParserContext::new(
Origin::Author,
unsafe { dummy_url_data() },
Some(CssRuleType::Style),
ParsingMode::DEFAULT,
QuirksMode::NoQuirks,
/* namespaces = */ Default::default(),
None,
None,
);
let namespaces = Default::default();
let (_layer, supports) =
ImportRule::parse_layer_and_supports(&mut input, &context, &namespaces);
ImportRule::parse_layer_and_supports(&mut input, &mut context);
supports.map_or(true, |s| s.enabled)
}
@ -7008,6 +7017,7 @@ pub unsafe extern "C" fn Servo_IsValidCSSColor(value: &nsACString) -> bool {
Some(CssRuleType::Style),
ParsingMode::DEFAULT,
QuirksMode::NoQuirks,
/* namespaces = */ Default::default(),
None,
None,
);
@ -7036,6 +7046,7 @@ pub unsafe extern "C" fn Servo_ComputeColor(
Some(CssRuleType::Style),
ParsingMode::DEFAULT,
QuirksMode::NoQuirks,
/* namespaces = */ Default::default(),
reporter.as_ref().map(|e| e as &dyn ParseErrorReporter),
None,
);
@ -7098,6 +7109,7 @@ pub unsafe extern "C" fn Servo_IntersectionObserverRootMargin_Parse(
Some(CssRuleType::Style),
ParsingMode::DEFAULT,
QuirksMode::NoQuirks,
/* namespaces = */ Default::default(),
None,
None,
);
@ -7138,6 +7150,7 @@ pub extern "C" fn Servo_ParseTransformIntoMatrix(
Some(CssRuleType::Style),
ParsingMode::DEFAULT,
QuirksMode::NoQuirks,
/* namespaces = */ Default::default(),
None,
None,
);
@ -7181,6 +7194,7 @@ pub unsafe extern "C" fn Servo_ParseFontShorthandForMatching(
Some(CssRuleType::FontFace),
ParsingMode::DEFAULT,
QuirksMode::NoQuirks,
/* namespaces = */ Default::default(),
None,
None,
);
@ -7269,6 +7283,7 @@ pub unsafe extern "C" fn Servo_SourceSizeList_Parse(value: &nsACString) -> *mut
Some(CssRuleType::Style),
ParsingMode::DEFAULT,
QuirksMode::NoQuirks,
/* namespaces = */ Default::default(),
None,
None,
);
@ -7556,6 +7571,7 @@ pub extern "C" fn Servo_GenericFontFamily_Parse(input: &nsACString) -> GenericFo
Some(CssRuleType::Style),
ParsingMode::DEFAULT,
QuirksMode::NoQuirks,
/* namespaces = */ Default::default(),
None,
None,
);
@ -7575,6 +7591,7 @@ pub extern "C" fn Servo_ColorScheme_Parse(input: &nsACString, out: &mut u8) -> b
Some(CssRuleType::Style),
ParsingMode::DEFAULT,
QuirksMode::NoQuirks,
/* namespaces = */ Default::default(),
None,
None,
);
@ -7679,6 +7696,7 @@ where
None,
ParsingMode::DEFAULT,
QuirksMode::NoQuirks,
/* namespaces = */ Default::default(),
None,
None,
);

Просмотреть файл

@ -782,6 +782,12 @@ was developed by other mozilla folks. Unsafe code there is reasonable (utf-8
casts for serialization and parsing).
"""
[[audits.cssparser]]
who = "Emilio Cobos Álvarez <emilio@crisal.io>"
criteria = "safe-to-deploy"
version = "0.31.0@git:6ce91afdf292c4290118843e7421e146f0a4c48b"
notes = "Local changes authored by me that aren't published yet."
[[audits.cssparser]]
who = "Bobby Holley <bobbyholley@gmail.com>"
criteria = "safe-to-deploy"
@ -803,7 +809,7 @@ to a match expression.
[[audits.cssparser-macros]]
who = "Emilio Cobos Álvarez <emilio@crisal.io>"
criteria = "safe-to-deploy"
delta = "0.6.0 -> 0.6.0@git:3e1bd05139cb7174ace395d498ca7128feb8f69d"
version = "0.6.0@git:6ce91afdf292c4290118843e7421e146f0a4c48b"
notes = "We are pulling this package from a non crates.io source until the changes are published. No changes were made to the code."
[[audits.cstr]]

Просмотреть файл

@ -35,6 +35,14 @@ notes = "This is a crate Henri wrote which is also published. We should probably
audit-as-crates-io = true
notes = "This is a pinned version of the upstream code, presumably to get a fix that hadn't been released yet. We should consider switching to the latest official release."
[policy.cssparser]
audit-as-crates-io = true
notes = "Upstream code plus nesting changes that haven't been published yet authored by us"
[policy.cssparser-macros]
audit-as-crates-io = true
notes = "Needed because this crate lives along cssparser"
[policy.d3d12]
audit-as-crates-io = true
notes = "Unpublished wgpu revisions point to unpublished d3d12 revisions."

Просмотреть файл

@ -1 +1 @@
{"files":{"Cargo.toml":"0da70f27e2b97898b01ce464320726e76a2a1d2079355b4517ace23cca95eec6","LICENSE":"fab3dd6bdab226f1c08630b1dd917e11fcb4ec5e1e020e2c16f83a0a13863e85","lib.rs":"ece2f6d1a33f80180bd69075578a5b95036f45d5effe25ad828d53c62e640524"},"package":"dfae75de57f2b2e85e8768c3ea840fd159c8f33e2b6522c7835b7abac81be16e"}
{"files":{"Cargo.toml":"43dbb4a9920bd25625edb3f1dc7a9bdc856d13113bbe8fe7e04e728c4362603b","LICENSE":"fab3dd6bdab226f1c08630b1dd917e11fcb4ec5e1e020e2c16f83a0a13863e85","lib.rs":"0e8f69571d62b9516d4f3158b7c354b1f29de78d82ac2b19977652c0810dbdfa"},"package":null}

19
third_party/rust/cssparser-macros/Cargo.toml поставляемый
Просмотреть файл

@ -3,12 +3,11 @@
# When uploading crates to the registry Cargo will automatically
# "normalize" Cargo.toml files for maximal compatibility
# with all versions of Cargo and also rewrite `path` dependencies
# to registry (e.g., crates.io) dependencies
# to registry (e.g., crates.io) dependencies.
#
# If you believe there's an error in this file please file an
# issue against the rust-lang/cargo repository. If you're
# editing this file be aware that the upstream Cargo.toml
# will likely look very different (and much more reasonable)
# If you are reading this file be aware that the original Cargo.toml
# will likely look very different (and much more reasonable).
# See Cargo.toml.orig for the original contents.
[package]
edition = "2018"
@ -23,9 +22,13 @@ repository = "https://github.com/servo/rust-cssparser"
[lib]
path = "lib.rs"
proc-macro = true
[dependencies.quote]
version = "1"
[dependencies]
quote = "1"
[dependencies.syn]
version = "1"
features = ["full", "extra-traits"]
features = [
"full",
"extra-traits",
]

147
third_party/rust/cssparser-macros/lib.rs поставляемый
Просмотреть файл

@ -37,3 +37,150 @@ pub fn _cssparser_internal_max_len(input: TokenStream) -> TokenStream {
)
.into()
}
fn get_byte_from_expr_lit(expr: &syn::Expr) -> u8 {
match *expr {
syn::Expr::Lit(syn::ExprLit { ref lit, .. }) => {
if let syn::Lit::Byte(ref byte) = *lit {
byte.value()
} else {
panic!("Found a pattern that wasn't a byte")
}
}
_ => unreachable!(),
}
}
/// Parse a pattern and fill the table accordingly
fn parse_pat_to_table<'a>(
pat: &'a syn::Pat,
case_id: u8,
wildcard: &mut Option<&'a syn::Ident>,
table: &mut [u8; 256],
) {
match pat {
&syn::Pat::Lit(syn::PatLit { ref expr, .. }) => {
let value = get_byte_from_expr_lit(expr);
if table[value as usize] == 0 {
table[value as usize] = case_id;
}
}
&syn::Pat::Range(syn::PatRange { ref lo, ref hi, .. }) => {
let lo = get_byte_from_expr_lit(lo);
let hi = get_byte_from_expr_lit(hi);
for value in lo..hi {
if table[value as usize] == 0 {
table[value as usize] = case_id;
}
}
if table[hi as usize] == 0 {
table[hi as usize] = case_id;
}
}
&syn::Pat::Wild(_) => {
for byte in table.iter_mut() {
if *byte == 0 {
*byte = case_id;
}
}
}
&syn::Pat::Ident(syn::PatIdent { ref ident, .. }) => {
assert_eq!(*wildcard, None);
*wildcard = Some(ident);
for byte in table.iter_mut() {
if *byte == 0 {
*byte = case_id;
}
}
}
&syn::Pat::Or(syn::PatOr { ref cases, .. }) => {
for case in cases {
parse_pat_to_table(case, case_id, wildcard, table);
}
}
_ => {
panic!("Unexpected pattern: {:?}. Buggy code ?", pat);
}
}
}
/// Expand a TokenStream corresponding to the `match_byte` macro.
///
/// ## Example
///
/// ```rust
/// match_byte! { tokenizer.next_byte_unchecked(),
/// b'a'..b'z' => { ... }
/// b'0'..b'9' => { ... }
/// b'\n' | b'\\' => { ... }
/// foo => { ... }
/// }
/// ```
///
#[proc_macro]
pub fn match_byte(input: TokenStream) -> TokenStream {
use syn::spanned::Spanned;
struct MatchByte {
expr: syn::Expr,
arms: Vec<syn::Arm>,
}
impl syn::parse::Parse for MatchByte {
fn parse(input: syn::parse::ParseStream) -> syn::Result<Self> {
Ok(MatchByte {
expr: {
let expr = input.parse()?;
input.parse::<syn::Token![,]>()?;
expr
},
arms: {
let mut arms = Vec::new();
while !input.is_empty() {
let arm = input.call(syn::Arm::parse)?;
assert!(arm.guard.is_none(), "match_byte doesn't support guards");
assert!(
arm.attrs.is_empty(),
"match_byte doesn't support attributes"
);
arms.push(arm);
}
arms
},
})
}
}
let MatchByte { expr, arms } = syn::parse_macro_input!(input);
let mut cases = Vec::new();
let mut table = [0u8; 256];
let mut match_body = Vec::new();
let mut wildcard = None;
for (i, ref arm) in arms.iter().enumerate() {
let case_id = i + 1;
let index = case_id as isize;
let name = syn::Ident::new(&format!("Case{}", case_id), arm.span());
let pat = &arm.pat;
parse_pat_to_table(pat, case_id as u8, &mut wildcard, &mut table);
cases.push(quote::quote!(#name = #index));
let body = &arm.body;
match_body.push(quote::quote!(Case::#name => { #body }))
}
let en = quote::quote!(enum Case {
#(#cases),*
});
let mut table_content = Vec::new();
for entry in table.iter() {
let name: syn::Path = syn::parse_str(&format!("Case::Case{}", entry)).unwrap();
table_content.push(name);
}
let table = quote::quote!(static __CASES: [Case; 256] = [#(#table_content),*];);
if let Some(binding) = wildcard {
quote::quote!({ #en #table let #binding = #expr; match __CASES[#binding as usize] { #(#match_body),* }})
} else {
quote::quote!({ #en #table match __CASES[#expr as usize] { #(#match_body),* }})
}.into()
}

Просмотреть файл

@ -1 +1 @@
{"files":{"Cargo.toml":"5f6e32fdce3042448124b22bdf3ae54e74c45d621bfdac799e11714c34b105c3","LICENSE":"fab3dd6bdab226f1c08630b1dd917e11fcb4ec5e1e020e2c16f83a0a13863e85","README.md":"53a6805edd80f642473514cb93f1f4197e17a911d66a2dfcefc3dc5e82bac206","build.rs":"b30f35bfbd713943822a19ce6ebe5c99017f603cb001ed37354020549aec71fc","build/match_byte.rs":"f57faf0597cb7b3e32999c5fb1215a43a5603121588c67d5031f720362171e1c","docs/404.html":"025861f76f8d1f6d67c20ab624c6e418f4f824385e2dd8ad8732c4ea563c6a2e","docs/index.html":"025861f76f8d1f6d67c20ab624c6e418f4f824385e2dd8ad8732c4ea563c6a2e","src/color.rs":"df7d97636896df02b7ba56abf6f74f121d8320615f9e6ef6d43e6f272d3600cb","src/cow_rc_str.rs":"22d6829ab54c51486af4bacf5f184a6c95c15febdbbd5630a98b995ed0ee3e55","src/from_bytes.rs":"b1cf15c4e975523fef46b575598737a39f3c63e5ce0b2bfd6ec627c69c6ea54a","src/lib.rs":"a1ba5ab4bab5f4b635682ee5b4933339973cea0f1f9abe8155f5a78387122b41","src/macros.rs":"481a43293dc49460479ecb5fb2d9e44b2ec80bfff533344384cb9cb422f3964b","src/nth.rs":"2fc26915f0a36cb22ac45dd9a7ecbdc64c327b2ec135370258ec3db9f9985460","src/parser.rs":"c47a34d302dc458cb84a7ec9377cc94eb613d2365efa414b783f130d77dd6fe6","src/rules_and_declarations.rs":"011d0411106762b4241928907bab225d4c1a1191c220ed852ec228c1a38b8578","src/serializer.rs":"3a0155521676deea9a6327c2ed00af6d5dabb29a97e2341d0f565f8c2b66d0a3","src/size_of_tests.rs":"da0cbcaa304f7800e9122e2bce0a11d42a70b9012e646a723cb23ee74a6b858c","src/tests.rs":"58dba29c73a59fb45a0fe1810cfee1d018aa427dcd6bae8cc5d8f17d5230fba8","src/tokenizer.rs":"6fd5b9f3a89a87bf9334cc4765ad76e080ab6aaec73d996f052558480a3f007a","src/unicode_range.rs":"20d96f06fbb73921e308cc340c9fe065e27f19843005689fb259007a6a372bcc"},"package":"c18bf42b9ab91d1d7ce0192f5b432c9ab6a5728206f25ab6cb0eb61fea25bccc"}
{"files":{".github/workflows/main.yml":"73c57dbb2c5471de2fcba828e356d09931ae89176f3bb695029b169dbb3f696f","Cargo.toml":"cffe5d5b5b66f65b9e4ca2aee56cf046604075ccfff0a89ca37ef4bb59164d02","LICENSE":"fab3dd6bdab226f1c08630b1dd917e11fcb4ec5e1e020e2c16f83a0a13863e85","README.md":"53a6805edd80f642473514cb93f1f4197e17a911d66a2dfcefc3dc5e82bac206","docs/.nojekyll":"e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855","docs/404.html":"025861f76f8d1f6d67c20ab624c6e418f4f824385e2dd8ad8732c4ea563c6a2e","docs/index.html":"025861f76f8d1f6d67c20ab624c6e418f4f824385e2dd8ad8732c4ea563c6a2e","src/color.rs":"df7d97636896df02b7ba56abf6f74f121d8320615f9e6ef6d43e6f272d3600cb","src/cow_rc_str.rs":"22d6829ab54c51486af4bacf5f184a6c95c15febdbbd5630a98b995ed0ee3e55","src/from_bytes.rs":"b1cf15c4e975523fef46b575598737a39f3c63e5ce0b2bfd6ec627c69c6ea54a","src/lib.rs":"d4e37941cfa90e42deb7d0537483ac0b2bac033adf4bb0007b000cd807c588f5","src/macros.rs":"883df01d4a8dfc661b8ffa17fab694ff6fb271e99e9c6b023e73d49bbaa6e5d6","src/nth.rs":"2fc26915f0a36cb22ac45dd9a7ecbdc64c327b2ec135370258ec3db9f9985460","src/parser.rs":"50fa448b316902d5f4aa18725220633c28bb7d82cbaff125177d79d4cf8dae58","src/rules_and_declarations.rs":"cb08533dd4b239a6cbd59d8fcca1447c8089940dd519edea9c9a9b51fa999d49","src/serializer.rs":"3a0155521676deea9a6327c2ed00af6d5dabb29a97e2341d0f565f8c2b66d0a3","src/size_of_tests.rs":"da0cbcaa304f7800e9122e2bce0a11d42a70b9012e646a723cb23ee74a6b858c","src/tests.rs":"6c950841e8c454c50ae439543fa8718f935058a6bb1b1bf77033439daeff08a7","src/tokenizer.rs":"9900460d9bad82b7a41829d01537094203fafc7092c954aac969546ea53d5bba","src/unicode_range.rs":"20d96f06fbb73921e308cc340c9fe065e27f19843005689fb259007a6a372bcc"},"package":null}

63
third_party/rust/cssparser/.github/workflows/main.yml поставляемый Normal file
Просмотреть файл

@ -0,0 +1,63 @@
name: CI
on:
push:
branches: [auto]
pull_request:
workflow_dispatch:
jobs:
linux-ci:
name: Linux
runs-on: ubuntu-latest
strategy:
matrix:
toolchain:
- nightly
- beta
- stable
- 1.63.0
features:
-
- --features dummy_match_byte
include:
- toolchain: nightly
features: --features bench
- toolchain: nightly
features: --features bench,dummy_match_byte
steps:
- uses: actions/checkout@v2
- name: Install toolchain
uses: actions-rs/toolchain@v1
with:
profile: minimal
toolchain: ${{ matrix.toolchain }}
override: true
- name: Cargo build
run: cargo build ${{ matrix.features }}
- name: Cargo doc
run: cargo doc ${{ matrix.features }}
- name: Cargo test
run: cargo test ${{ matrix.features }}
- name: macros build
run: cargo build
working-directory: macros
build_result:
name: homu build finished
runs-on: ubuntu-latest
needs:
- "linux-ci"
steps:
- name: Mark the job as successful
run: exit 0
if: success()
- name: Mark the job as unsuccessful
run: exit 1
if: "!success()"

43
third_party/rust/cssparser/Cargo.toml поставляемый
Просмотреть файл

@ -15,7 +15,6 @@ rust-version = "1.63"
name = "cssparser"
version = "0.31.0"
authors = ["Simon Sapin <simon.sapin@exyr.org>"]
build = "build.rs"
exclude = [
"src/css-parsing-tests/**",
"src/big-data-url.css",
@ -31,14 +30,14 @@ keywords = [
license = "MPL-2.0"
repository = "https://github.com/servo/rust-cssparser"
[dependencies]
dtoa-short = "0.3"
itoa = "1.0"
smallvec = "1.0"
[dependencies.cssparser-macros]
version = "0.6"
[dependencies.dtoa-short]
version = "0.3"
[dependencies.itoa]
version = "1.0"
path = "./macros"
[dependencies.phf]
version = ">=0.8,<=0.11"
@ -48,23 +47,14 @@ features = ["macros"]
version = "1.0"
optional = true
[dependencies.smallvec]
version = "1.0"
[dev-dependencies]
difference = "2.0"
encoding_rs = "0.8"
serde_json = "1.0"
[dev-dependencies.difference]
version = "2.0"
[dev-dependencies.encoding_rs]
version = "0.8"
[dev-dependencies.serde_json]
version = "1.0"
[build-dependencies.proc-macro2]
version = "1"
[build-dependencies.quote]
version = "1"
[build-dependencies]
proc-macro2 = "1"
quote = "1"
[build-dependencies.syn]
version = "1"
@ -77,3 +67,10 @@ features = [
[features]
bench = []
dummy_match_byte = []
[workspace]
members = [
".",
"./macros",
"./procedural-masquerade",
]

41
third_party/rust/cssparser/build.rs поставляемый
Просмотреть файл

@ -1,41 +0,0 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#[cfg(feature = "dummy_match_byte")]
mod codegen {
pub fn main() {}
}
#[cfg(not(feature = "dummy_match_byte"))]
#[path = "build/match_byte.rs"]
mod match_byte;
#[cfg(not(feature = "dummy_match_byte"))]
mod codegen {
use std::env;
use std::path::Path;
use std::thread::Builder;
pub fn main() {
let manifest_dir = env::var("CARGO_MANIFEST_DIR").unwrap();
let input = Path::new(&manifest_dir).join("src/tokenizer.rs");
let output = Path::new(&env::var("OUT_DIR").unwrap()).join("tokenizer.rs");
println!("cargo:rerun-if-changed={}", input.display());
// We have stack overflows on Servo's CI.
let handle = Builder::new()
.stack_size(128 * 1024 * 1024)
.spawn(move || {
crate::match_byte::expand(&input, &output);
})
.unwrap();
handle.join().unwrap();
}
}
fn main() {
codegen::main();
}

209
third_party/rust/cssparser/build/match_byte.rs поставляемый
Просмотреть файл

@ -1,209 +0,0 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use quote::{quote, ToTokens};
use std::fs::File;
use std::io::{Read, Write};
use std::path::Path;
use syn;
use syn::fold::Fold;
use syn::parse::{Parse, ParseStream, Result};
use syn::{parse_quote, Token};
use proc_macro2::{Span, TokenStream};
struct MatchByteParser {}
pub fn expand(from: &Path, to: &Path) {
let mut source = String::new();
File::open(from)
.unwrap()
.read_to_string(&mut source)
.unwrap();
let ast = syn::parse_file(&source).expect("Parsing rules.rs module");
let mut m = MatchByteParser {};
let ast = m.fold_file(ast);
let code = ast
.into_token_stream()
.to_string()
.replace("{ ", "{\n")
.replace(" }", "\n}");
File::create(to)
.unwrap()
.write_all(code.as_bytes())
.unwrap();
}
struct MatchByte {
expr: syn::Expr,
arms: Vec<syn::Arm>,
}
impl Parse for MatchByte {
fn parse(input: ParseStream) -> Result<Self> {
Ok(MatchByte {
expr: {
let expr = input.parse()?;
input.parse::<Token![,]>()?;
expr
},
arms: {
let mut arms = Vec::new();
while !input.is_empty() {
arms.push(input.call(syn::Arm::parse)?);
}
arms
},
})
}
}
fn get_byte_from_expr_lit(expr: &Box<syn::Expr>) -> u8 {
match **expr {
syn::Expr::Lit(syn::ExprLit { ref lit, .. }) => {
if let syn::Lit::Byte(ref byte) = *lit {
byte.value()
} else {
panic!("Found a pattern that wasn't a byte")
}
}
_ => unreachable!(),
}
}
/// Parse a pattern and fill the table accordingly
fn parse_pat_to_table<'a>(
pat: &'a syn::Pat,
case_id: u8,
wildcard: &mut Option<&'a syn::Ident>,
table: &mut [u8; 256],
) {
match pat {
&syn::Pat::Lit(syn::PatLit { ref expr, .. }) => {
let value = get_byte_from_expr_lit(expr);
if table[value as usize] == 0 {
table[value as usize] = case_id;
}
}
&syn::Pat::Range(syn::PatRange { ref lo, ref hi, .. }) => {
let lo = get_byte_from_expr_lit(lo);
let hi = get_byte_from_expr_lit(hi);
for value in lo..hi {
if table[value as usize] == 0 {
table[value as usize] = case_id;
}
}
if table[hi as usize] == 0 {
table[hi as usize] = case_id;
}
}
&syn::Pat::Wild(_) => {
for byte in table.iter_mut() {
if *byte == 0 {
*byte = case_id;
}
}
}
&syn::Pat::Ident(syn::PatIdent { ref ident, .. }) => {
assert_eq!(*wildcard, None);
*wildcard = Some(ident);
for byte in table.iter_mut() {
if *byte == 0 {
*byte = case_id;
}
}
}
&syn::Pat::Or(syn::PatOr { ref cases, .. }) => {
for case in cases {
parse_pat_to_table(case, case_id, wildcard, table);
}
}
_ => {
panic!("Unexpected pattern: {:?}. Buggy code ?", pat);
}
}
}
/// Expand a TokenStream corresponding to the `match_byte` macro.
///
/// ## Example
///
/// ```rust
/// match_byte! { tokenizer.next_byte_unchecked(),
/// b'a'..b'z' => { ... }
/// b'0'..b'9' => { ... }
/// b'\n' | b'\\' => { ... }
/// foo => { ... }
/// }
/// ```
///
fn expand_match_byte(body: &TokenStream) -> syn::Expr {
let match_byte: MatchByte = syn::parse2(body.clone()).unwrap();
let expr = match_byte.expr;
let mut cases = Vec::new();
let mut table = [0u8; 256];
let mut match_body = Vec::new();
let mut wildcard = None;
for (i, ref arm) in match_byte.arms.iter().enumerate() {
let case_id = i + 1;
let index = case_id as isize;
let name = syn::Ident::new(&format!("Case{}", case_id), Span::call_site());
parse_pat_to_table(&arm.pat, case_id as u8, &mut wildcard, &mut table);
cases.push(quote!(#name = #index));
let body = &arm.body;
match_body.push(quote!(Case::#name => { #body }))
}
let en = quote!(enum Case {
#(#cases),*
});
let mut table_content = Vec::new();
for entry in table.iter() {
let name: syn::Path = syn::parse_str(&format!("Case::Case{}", entry)).unwrap();
table_content.push(name);
}
let table = quote!(static __CASES: [Case; 256] = [#(#table_content),*];);
let expr = if let Some(binding) = wildcard {
quote!({ #en #table let #binding = #expr; match __CASES[#binding as usize] { #(#match_body),* }})
} else {
quote!({ #en #table match __CASES[#expr as usize] { #(#match_body),* }})
};
syn::parse2(expr.into()).unwrap()
}
impl Fold for MatchByteParser {
fn fold_stmt(&mut self, stmt: syn::Stmt) -> syn::Stmt {
match stmt {
syn::Stmt::Item(syn::Item::Macro(syn::ItemMacro { ref mac, .. })) => {
if mac.path == parse_quote!(match_byte) {
return syn::fold::fold_stmt(
self,
syn::Stmt::Expr(expand_match_byte(&mac.tokens)),
);
}
}
_ => {}
}
syn::fold::fold_stmt(self, stmt)
}
fn fold_expr(&mut self, expr: syn::Expr) -> syn::Expr {
match expr {
syn::Expr::Macro(syn::ExprMacro { ref mac, .. }) => {
if mac.path == parse_quote!(match_byte) {
return syn::fold::fold_expr(self, expand_match_byte(&mac.tokens));
}
}
_ => {}
}
syn::fold::fold_expr(self, expr)
}
}

0
third_party/rust/cssparser/docs/.nojekyll поставляемый Normal file
Просмотреть файл

10
third_party/rust/cssparser/src/lib.rs поставляемый
Просмотреть файл

@ -82,9 +82,9 @@ pub use crate::nth::parse_nth;
pub use crate::parser::{BasicParseError, BasicParseErrorKind, ParseError, ParseErrorKind};
pub use crate::parser::{Delimiter, Delimiters, Parser, ParserInput, ParserState};
pub use crate::rules_and_declarations::{parse_important, parse_one_declaration};
pub use crate::rules_and_declarations::{parse_one_rule, RuleListParser};
pub use crate::rules_and_declarations::{parse_one_rule, StyleSheetParser};
pub use crate::rules_and_declarations::{AtRuleParser, QualifiedRuleParser};
pub use crate::rules_and_declarations::{DeclarationListParser, DeclarationParser};
pub use crate::rules_and_declarations::{RuleBodyParser, RuleBodyItemParser, DeclarationParser};
pub use crate::serializer::{serialize_identifier, serialize_name, serialize_string};
pub use crate::serializer::{CssStringWriter, ToCss, TokenSerializationType};
pub use crate::tokenizer::{SourceLocation, SourcePosition, Token};
@ -97,14 +97,8 @@ pub use phf as _cssparser_internal_phf;
mod macros;
mod rules_and_declarations;
#[cfg(feature = "dummy_match_byte")]
mod tokenizer;
#[cfg(not(feature = "dummy_match_byte"))]
mod tokenizer {
include!(concat!(env!("OUT_DIR"), "/tokenizer.rs"));
}
mod color;
mod cow_rc_str;
mod from_bytes;

11
third_party/rust/cssparser/src/macros.rs поставляемый
Просмотреть файл

@ -183,14 +183,3 @@ pub fn _cssparser_internal_to_lowercase<'a>(
},
)
}
#[cfg(feature = "dummy_match_byte")]
macro_rules! match_byte {
($value:expr, $($rest:tt)* ) => {
match $value {
$(
$rest
)+
}
};
}

5
third_party/rust/cssparser/src/parser.rs поставляемый
Просмотреть файл

@ -700,10 +700,7 @@ impl<'i: 't, 't> Parser<'i, 't> {
/// Caller must deal with the fact that the resulting list might be empty,
/// if there's no valid component on the list.
#[inline]
pub fn parse_comma_separated_ignoring_errors<F, T, E: 'i>(
&mut self,
parse_one: F,
) -> Vec<T>
pub fn parse_comma_separated_ignoring_errors<F, T, E: 'i>(&mut self, parse_one: F) -> Vec<T>
where
F: for<'tt> FnMut(&mut Parser<'i, 'tt>) -> Result<T, ParseError<'i, E>>,
{

Просмотреть файл

@ -4,8 +4,7 @@
// https://drafts.csswg.org/css-syntax/#parsing
use super::{BasicParseError, BasicParseErrorKind, Delimiter};
use super::{ParseError, Parser, Token};
use super::{BasicParseError, BasicParseErrorKind, Delimiter, Delimiters, ParseError, Parser, Token};
use crate::cow_rc_str::CowRcStr;
use crate::parser::{parse_nested_block, parse_until_after, parse_until_before, ParserState};
@ -50,14 +49,9 @@ pub trait DeclarationParser<'i> {
&mut self,
name: CowRcStr<'i>,
input: &mut Parser<'i, 't>,
) -> Result<Self::Declaration, ParseError<'i, Self::Error>>;
/// Whether to try to parse qualified rules along with declarations. See
/// <https://github.com/w3c/csswg-drafts/issues/7961> for the current state of the discussion.
/// This is a low effort opt-in to be able to experiment with it, but it's likely to be needed
/// when nesting is less experimental as well (e.g., you probably don't want to allow nesting
/// in a style attribute anyways).
fn enable_nesting(&self) -> bool { false }
) -> Result<Self::Declaration, ParseError<'i, Self::Error>> {
Err(input.new_error(BasicParseErrorKind::UnexpectedToken(Token::Ident(name))))
}
}
/// A trait to provide various parsing of at-rules.
@ -99,8 +93,6 @@ pub trait AtRuleParser<'i> {
name: CowRcStr<'i>,
input: &mut Parser<'i, 't>,
) -> Result<Self::Prelude, ParseError<'i, Self::Error>> {
let _ = name;
let _ = input;
Err(input.new_error(BasicParseErrorKind::AtRuleInvalid(name)))
}
@ -140,7 +132,6 @@ pub trait AtRuleParser<'i> {
) -> Result<Self::AtRule, ParseError<'i, Self::Error>> {
let _ = prelude;
let _ = start;
let _ = input;
Err(input.new_error(BasicParseErrorKind::AtRuleBodyInvalid))
}
}
@ -178,7 +169,6 @@ pub trait QualifiedRuleParser<'i> {
&mut self,
input: &mut Parser<'i, 't>,
) -> Result<Self::Prelude, ParseError<'i, Self::Error>> {
let _ = input;
Err(input.new_error(BasicParseErrorKind::QualifiedRuleInvalid))
}
@ -197,24 +187,35 @@ pub trait QualifiedRuleParser<'i> {
) -> Result<Self::QualifiedRule, ParseError<'i, Self::Error>> {
let _ = prelude;
let _ = start;
let _ = input;
Err(input.new_error(BasicParseErrorKind::QualifiedRuleInvalid))
}
}
/// Provides an iterator for declaration list parsing.
pub struct DeclarationListParser<'i, 't, 'a, P> {
/// The input given to `DeclarationListParser::new`
/// Provides an iterator for rule bodies and declaration lists.
pub struct RuleBodyParser<'i, 't, 'a, P, I, E> {
/// The input given to the parser.
pub input: &'a mut Parser<'i, 't>,
/// The parser given to `DeclarationListParser::new`
pub parser: P,
pub parser: &'a mut P,
_phantom: std::marker::PhantomData<(I, E)>,
}
impl<'i, 't, 'a, I, P, E: 'i> DeclarationListParser<'i, 't, 'a, P>
where
P: DeclarationParser<'i, Declaration = I, Error = E> + AtRuleParser<'i, AtRule = I, Error = E>,
/// A parser for a rule body item.
pub trait RuleBodyItemParser<'i, DeclOrRule, Error: 'i>:
DeclarationParser<'i, Declaration = DeclOrRule, Error = Error>
+ QualifiedRuleParser<'i, QualifiedRule = DeclOrRule, Error = Error>
+ AtRuleParser<'i, AtRule = DeclOrRule, Error = Error>
{
/// Whether we should attempt to parse declarations. If you know you won't, returning false
/// here is slightly faster.
fn parse_declarations(&self) -> bool;
/// Whether we should attempt to parse qualified rules. If you know you won't, returning false
/// would be slightly faster.
fn parse_qualified(&self) -> bool;
}
impl<'i, 't, 'a, P, I, E> RuleBodyParser<'i, 't, 'a, P, I, E> {
/// Create a new `DeclarationListParser` for the given `input` and `parser`.
///
/// Note that all CSS declaration lists can on principle contain at-rules.
@ -229,55 +230,69 @@ where
/// The return type for finished declarations and at-rules also needs to be the same,
/// since `<DeclarationListParser as Iterator>::next` can return either.
/// It could be a custom enum.
pub fn new(input: &'a mut Parser<'i, 't>, parser: P) -> Self {
DeclarationListParser { input, parser }
pub fn new(input: &'a mut Parser<'i, 't>, parser: &'a mut P) -> Self {
Self {
input,
parser,
_phantom: std::marker::PhantomData,
}
}
}
/// `DeclarationListParser` is an iterator that yields `Ok(_)` for a valid declaration or at-rule
/// or `Err(())` for an invalid one.
impl<'i, 't, 'a, I, P, E: 'i> Iterator for DeclarationListParser<'i, 't, 'a, P>
impl<'i, 't, 'a, I, P, E: 'i> Iterator for RuleBodyParser<'i, 't, 'a, P, I, E>
where
P: DeclarationParser<'i, Declaration = I, Error = E>
+ AtRuleParser<'i, AtRule = I, Error = E>
+ QualifiedRuleParser<'i, QualifiedRule = I, Error = E>,
P: RuleBodyItemParser<'i, I, E>,
{
type Item = Result<I, (ParseError<'i, E>, &'i str)>;
fn next(&mut self) -> Option<Self::Item> {
loop {
self.input.skip_whitespace();
let start = self.input.state();
match self.input.next_including_whitespace_and_comments() {
Ok(&Token::WhiteSpace(_)) | Ok(&Token::Comment(_)) | Ok(&Token::Semicolon) => {
continue
}
Ok(&Token::Ident(ref name)) => {
match self.input.next_including_whitespace_and_comments().ok()? {
Token::Comment(..) => continue,
Token::Semicolon if self.parser.parse_declarations() => continue,
Token::Ident(ref name) if self.parser.parse_declarations() => {
let name = name.clone();
let parse_qualified = self.parser.parse_qualified();
let delimiters = if parse_qualified {
Delimiter::Semicolon | Delimiter::CurlyBracketBlock
} else {
Delimiter::Semicolon
};
let mut result = {
let parser = &mut self.parser;
parse_until_after(self.input, Delimiter::Semicolon, |input| {
parse_until_after(self.input, delimiters, |input| {
input.expect_colon()?;
parser.parse_value(name, input)
})
};
if result.is_err() && self.parser.enable_nesting() {
if result.is_err() && parse_qualified {
self.input.reset(&start);
result = parse_qualified_rule(&start, self.input, &mut self.parser);
result =
parse_qualified_rule(&start, self.input, &mut *self.parser, delimiters);
}
return Some(result.map_err(|e| (e, self.input.slice_from(start.position()))));
}
Ok(&Token::AtKeyword(ref name)) => {
Token::AtKeyword(ref name) => {
let name = name.clone();
return Some(parse_at_rule(&start, name, self.input, &mut self.parser));
return Some(parse_at_rule(&start, name, self.input, &mut *self.parser));
}
Ok(token) => {
let result = if self.parser.enable_nesting() {
token => {
let result = if self.parser.parse_qualified() {
self.input.reset(&start);
// XXX do we need to, if we fail, consume only until the next semicolon,
// rather than until the next `{`?
parse_qualified_rule(&start, self.input, &mut self.parser)
// TODO(emilio, nesting): do we need to, if we fail, consume only until the
// next semicolon, rather than until the next `{`?
parse_qualified_rule(
&start,
self.input,
&mut *self.parser,
Delimiter::CurlyBracketBlock,
)
} else {
let token = token.clone();
self.input.parse_until_after(Delimiter::Semicolon, |_| {
@ -286,66 +301,44 @@ where
};
return Some(result.map_err(|e| (e, self.input.slice_from(start.position()))));
}
Err(..) => return None,
}
}
}
}
/// Provides an iterator for rule list parsing.
pub struct RuleListParser<'i, 't, 'a, P> {
/// The input given to `RuleListParser::new`
/// Provides an iterator for rule list parsing at the top-level of a stylesheet.
pub struct StyleSheetParser<'i, 't, 'a, P> {
/// The input given.
pub input: &'a mut Parser<'i, 't>,
/// The parser given to `RuleListParser::new`
pub parser: P,
/// The parser given.
pub parser: &'a mut P,
is_stylesheet: bool,
any_rule_so_far: bool,
}
impl<'i, 't, 'a, R, P, E: 'i> RuleListParser<'i, 't, 'a, P>
impl<'i, 't, 'a, R, P, E: 'i> StyleSheetParser<'i, 't, 'a, P>
where
P: QualifiedRuleParser<'i, QualifiedRule = R, Error = E>
+ AtRuleParser<'i, AtRule = R, Error = E>,
{
/// Create a new `RuleListParser` for the given `input` at the top-level of a stylesheet
/// and the given `parser`.
///
/// The given `parser` needs to implement both `QualifiedRuleParser` and `AtRuleParser` traits.
/// However, either of them can be an empty `impl`
/// since the traits provide default implementations of their methods.
/// However, either of them can be an empty `impl` since the traits provide default
/// implementations of their methods.
///
/// The return type for finished qualified rules and at-rules also needs to be the same,
/// since `<RuleListParser as Iterator>::next` can return either.
/// It could be a custom enum.
pub fn new_for_stylesheet(input: &'a mut Parser<'i, 't>, parser: P) -> Self {
RuleListParser {
/// since `<RuleListParser as Iterator>::next` can return either. It could be a custom enum.
pub fn new(input: &'a mut Parser<'i, 't>, parser: &'a mut P) -> Self {
Self {
input,
parser,
is_stylesheet: true,
any_rule_so_far: false,
}
}
/// Same is `new_for_stylesheet`, but should be used for rule lists inside a block
/// such as the body of an `@media` rule.
///
/// This differs in that `<!--` and `-->` tokens
/// should only be ignored at the stylesheet top-level.
/// (This is to deal with legacy workarounds for `<style>` HTML element parsing.)
pub fn new_for_nested_rule(input: &'a mut Parser<'i, 't>, parser: P) -> Self {
RuleListParser {
input,
parser,
is_stylesheet: false,
any_rule_so_far: false,
}
}
}
/// `RuleListParser` is an iterator that yields `Ok(_)` for a rule or `Err(())` for an invalid one.
impl<'i, 't, 'a, R, P, E: 'i> Iterator for RuleListParser<'i, 't, 'a, P>
impl<'i, 't, 'a, R, P, E: 'i> Iterator for StyleSheetParser<'i, 't, 'a, P>
where
P: QualifiedRuleParser<'i, QualifiedRule = R, Error = E>
+ AtRuleParser<'i, AtRule = R, Error = E>,
@ -354,13 +347,8 @@ where
fn next(&mut self) -> Option<Self::Item> {
loop {
if self.is_stylesheet {
self.input.skip_cdc_and_cdo()
} else {
self.input.skip_whitespace()
}
self.input.skip_cdc_and_cdo();
let start = self.input.state();
let at_keyword = match self.input.next_byte()? {
b'@' => match self.input.next_including_whitespace_and_comments() {
Ok(&Token::AtKeyword(ref name)) => Some(name.clone()),
@ -373,7 +361,7 @@ where
};
if let Some(name) = at_keyword {
let first_stylesheet_rule = self.is_stylesheet && !self.any_rule_so_far;
let first_stylesheet_rule = !self.any_rule_so_far;
self.any_rule_so_far = true;
if first_stylesheet_rule && name.eq_ignore_ascii_case("charset") {
let delimiters = Delimiter::Semicolon | Delimiter::CurlyBracketBlock;
@ -384,12 +372,17 @@ where
&start,
name.clone(),
self.input,
&mut self.parser,
&mut *self.parser,
));
}
} else {
self.any_rule_so_far = true;
let result = parse_qualified_rule(&start, self.input, &mut self.parser);
let result = parse_qualified_rule(
&start,
self.input,
&mut *self.parser,
Delimiter::CurlyBracketBlock,
);
return Some(result.map_err(|e| (e, self.input.slice_from(start.position()))));
}
}
@ -441,7 +434,7 @@ where
if let Some(name) = at_keyword {
parse_at_rule(&start, name, input, parser).map_err(|e| e.0)
} else {
parse_qualified_rule(&start, input, parser)
parse_qualified_rule(&start, input, parser, Delimiter::CurlyBracketBlock)
}
})
}
@ -485,19 +478,14 @@ fn parse_qualified_rule<'i, 't, P, E>(
start: &ParserState,
input: &mut Parser<'i, 't>,
parser: &mut P,
delimiters: Delimiters,
) -> Result<<P as QualifiedRuleParser<'i>>::QualifiedRule, ParseError<'i, E>>
where
P: QualifiedRuleParser<'i, Error = E>,
{
let prelude = parse_until_before(input, Delimiter::CurlyBracketBlock, |input| {
parser.parse_prelude(input)
});
match *input.next()? {
Token::CurlyBracketBlock => {
// Do this here so that we consume the `{` even if the prelude is `Err`.
let prelude = prelude?;
parse_nested_block(input, |input| parser.parse_block(prelude, &start, input))
}
_ => unreachable!(),
}
let prelude = parse_until_before(input, delimiters, |input| parser.parse_prelude(input));
input.expect_curly_bracket_block()?;
// Do this here so that we consume the `{` even if the prelude is `Err`.
let prelude = prelude?;
parse_nested_block(input, |input| parser.parse_block(prelude, &start, input))
}

27
third_party/rust/cssparser/src/tests.rs поставляемый
Просмотреть файл

@ -16,10 +16,10 @@ use self::test::Bencher;
use super::{
parse_important, parse_nth, parse_one_declaration, parse_one_rule, stylesheet_encoding,
AtRuleParser, BasicParseError, BasicParseErrorKind, Color, CowRcStr, DeclarationListParser,
DeclarationParser, Delimiter, EncodingSupport, ParseError, ParseErrorKind, Parser, ParserInput,
ParserState, QualifiedRuleParser, RuleListParser, SourceLocation, ToCss, Token,
TokenSerializationType, UnicodeRange, RGBA,
AtRuleParser, BasicParseError, BasicParseErrorKind, Color, CowRcStr, DeclarationParser,
Delimiter, EncodingSupport, ParseError, ParseErrorKind, Parser, ParserInput, ParserState,
QualifiedRuleParser, RuleBodyItemParser, RuleBodyParser, SourceLocation, StyleSheetParser,
ToCss, Token, TokenSerializationType, UnicodeRange, RGBA,
};
macro_rules! JArray {
@ -83,7 +83,7 @@ fn assert_json_eq(results: Value, mut expected: Value, message: &str) {
fn run_raw_json_tests<F: Fn(Value, Value) -> ()>(json_data: &str, run: F) {
let items = match serde_json::from_str(json_data) {
Ok(Value::Array(items)) => items,
_ => panic!("Invalid JSON"),
other => panic!("Invalid JSON: {:?}", other),
};
assert!(items.len() % 2 == 0);
let mut input = None;
@ -136,7 +136,7 @@ fn declaration_list() {
include_str!("css-parsing-tests/declaration_list.json"),
|input| {
Value::Array(
DeclarationListParser::new(input, JsonParser)
RuleBodyParser::new(input, &mut JsonParser)
.map(|result| result.unwrap_or(JArray!["error", "invalid"]))
.collect(),
)
@ -158,7 +158,7 @@ fn one_declaration() {
fn rule_list() {
run_json_tests(include_str!("css-parsing-tests/rule_list.json"), |input| {
Value::Array(
RuleListParser::new_for_nested_rule(input, JsonParser)
RuleBodyParser::new(input, &mut JsonParser)
.map(|result| result.unwrap_or(JArray!["error", "invalid"]))
.collect(),
)
@ -169,7 +169,7 @@ fn rule_list() {
fn stylesheet() {
run_json_tests(include_str!("css-parsing-tests/stylesheet.json"), |input| {
Value::Array(
RuleListParser::new_for_stylesheet(input, JsonParser)
StyleSheetParser::new(input, &mut JsonParser)
.map(|result| result.unwrap_or(JArray!["error", "invalid"]))
.collect(),
)
@ -234,7 +234,7 @@ fn stylesheet_from_bytes() {
let (css_unicode, used_encoding, _) = encoding.decode(&css);
let mut input = ParserInput::new(&css_unicode);
let input = &mut Parser::new(&mut input);
let rules = RuleListParser::new_for_stylesheet(input, JsonParser)
let rules = StyleSheetParser::new(input, &mut JsonParser)
.map(|result| result.unwrap_or(JArray!["error", "invalid"]))
.collect::<Vec<_>>();
JArray![rules, used_encoding.name().to_lowercase()]
@ -1072,6 +1072,15 @@ impl<'i> QualifiedRuleParser<'i> for JsonParser {
}
}
impl<'i> RuleBodyItemParser<'i, Value, ()> for JsonParser {
fn parse_qualified(&self) -> bool {
true
}
fn parse_declarations(&self) -> bool {
true
}
}
fn component_values_to_json(input: &mut Parser) -> Vec<Value> {
let mut values = vec![];
while let Ok(token) = input.next_including_whitespace().map(|t| t.clone()) {

72
third_party/rust/cssparser/src/tokenizer.rs поставляемый
Просмотреть файл

@ -10,6 +10,20 @@ use crate::parser::ParserState;
use std::char;
use std::ops::Range;
#[cfg(not(feature = "dummy_match_byte"))]
use cssparser_macros::match_byte;
#[cfg(feature = "dummy_match_byte")]
macro_rules! match_byte {
($value:expr, $($rest:tt)* ) => {
match $value {
$(
$rest
)+
}
};
}
/// One of the pieces the CSS input is broken into.
///
/// Some components use `Cow` in order to borrow from the original input string
@ -222,7 +236,7 @@ impl<'a> Tokenizer<'a> {
#[inline]
pub fn with_first_line_number(input: &str, first_line_number: u32) -> Tokenizer {
Tokenizer {
input: input,
input,
position: 0,
current_line_start_position: 0,
current_line_number: first_line_number,
@ -469,9 +483,7 @@ impl<'a> Tokenizer<'a> {
return
}
}
_ => {
return
}
_ => return,
}
}
}
@ -546,10 +558,8 @@ fn next_token<'a>(tokenizer: &mut Tokenizer<'a>) -> Result<Token<'a>, ()> {
b' ' | b'\t' => {
consume_whitespace(tokenizer, false)
},
b'\n' | b'\x0C' | b'\r' => {
consume_whitespace(tokenizer, true)
},
b'"' => { consume_string(tokenizer, false) },
b'\n' | b'\x0C' | b'\r' => consume_whitespace(tokenizer, true),
b'"' => consume_string(tokenizer, false),
b'#' => {
tokenizer.advance(1);
if is_ident_start(tokenizer) { IDHash(consume_name(tokenizer)) }
@ -564,7 +574,7 @@ fn next_token<'a>(tokenizer: &mut Tokenizer<'a>) -> Result<Token<'a>, ()> {
if tokenizer.starts_with(b"$=") { tokenizer.advance(2); SuffixMatch }
else { tokenizer.advance(1); Delim('$') }
},
b'\'' => { consume_string(tokenizer, true) },
b'\'' => consume_string(tokenizer, true),
b'(' => { tokenizer.advance(1); ParenthesisBlock },
b')' => { tokenizer.advance(1); CloseParenthesis },
b'*' => {
@ -625,7 +635,7 @@ fn next_token<'a>(tokenizer: &mut Tokenizer<'a>) -> Result<Token<'a>, ()> {
Delim('/')
}
}
b'0'..=b'9' => { consume_numeric(tokenizer) },
b'0'..=b'9' => consume_numeric(tokenizer),
b':' => { tokenizer.advance(1); Colon },
b';' => { tokenizer.advance(1); Semicolon },
b'<' => {
@ -642,7 +652,7 @@ fn next_token<'a>(tokenizer: &mut Tokenizer<'a>) -> Result<Token<'a>, ()> {
if is_ident_start(tokenizer) { AtKeyword(consume_name(tokenizer)) }
else { Delim('@') }
},
b'a'..=b'z' | b'A'..=b'Z' | b'_' | b'\0' => { consume_ident_like(tokenizer) },
b'a'..=b'z' | b'A'..=b'Z' | b'_' | b'\0' => consume_ident_like(tokenizer),
b'[' => { tokenizer.advance(1); SquareBracketBlock },
b'\\' => {
if !tokenizer.has_newline_at(1) { consume_ident_like(tokenizer) }
@ -880,18 +890,18 @@ fn consume_quoted_string<'a>(
fn is_ident_start(tokenizer: &mut Tokenizer) -> bool {
!tokenizer.is_eof()
&& match_byte! { tokenizer.next_byte_unchecked(),
b'a'..=b'z' | b'A'..=b'Z' | b'_' | b'\0' => { true },
b'a'..=b'z' | b'A'..=b'Z' | b'_' | b'\0' => true,
b'-' => {
tokenizer.has_at_least(1) && match_byte! { tokenizer.byte_at(1),
b'a'..=b'z' | b'A'..=b'Z' | b'-' | b'_' | b'\0' => {
true
}
b'\\' => { !tokenizer.has_newline_at(1) }
b => { !b.is_ascii() },
b'\\' => !tokenizer.has_newline_at(1),
b => !b.is_ascii(),
}
},
b'\\' => { !tokenizer.has_newline_at(1) },
b => { !b.is_ascii() },
b'\\' => !tokenizer.has_newline_at(1),
b => !b.is_ascii(),
}
}
@ -919,7 +929,7 @@ fn consume_name<'a>(tokenizer: &mut Tokenizer<'a>) -> CowRcStr<'a> {
return tokenizer.slice_from(start_pos).into();
}
match_byte! { tokenizer.next_byte_unchecked(),
b'a'..=b'z' | b'A'..=b'Z' | b'0'..=b'9' | b'_' | b'-' => { tokenizer.advance(1) },
b'a'..=b'z' | b'A'..=b'Z' | b'0'..=b'9' | b'_' | b'-' => tokenizer.advance(1),
b'\\' | b'\0' => {
// * The tokenizers input is UTF-8 since its `&str`.
// * start_pos is at a code point boundary
@ -983,9 +993,9 @@ fn consume_name<'a>(tokenizer: &mut Tokenizer<'a>) -> CowRcStr<'a> {
fn byte_to_hex_digit(b: u8) -> Option<u32> {
Some(match_byte! { b,
b'0' ..= b'9' => { b - b'0' },
b'a' ..= b'f' => { b - b'a' + 10 },
b'A' ..= b'F' => { b - b'A' + 10 },
b'0' ..= b'9' => b - b'0',
b'a' ..= b'f' => b - b'a' + 10,
b'A' ..= b'F' => b - b'A' + 10,
_ => {
return None
}
@ -1091,24 +1101,24 @@ fn consume_numeric<'a>(tokenizer: &mut Tokenizer<'a>) -> Token<'a> {
tokenizer.advance(1);
return Percentage {
unit_value: (value / 100.) as f32,
int_value: int_value,
has_sign: has_sign,
int_value,
has_sign,
};
}
let value = value as f32;
if is_ident_start(tokenizer) {
let unit = consume_name(tokenizer);
Dimension {
value: value,
int_value: int_value,
has_sign: has_sign,
unit: unit,
value,
int_value,
has_sign,
unit,
}
} else {
Number {
value: value,
int_value: int_value,
has_sign: has_sign,
value,
int_value,
has_sign,
}
}
}
@ -1150,7 +1160,7 @@ fn consume_unquoted_url<'a>(tokenizer: &mut Tokenizer<'a>) -> Result<Token<'a>,
last_newline = offset;
}
}
b'"' | b'\'' => { return Err(()) }, // Do not advance
b'"' | b'\'' => return Err(()), // Do not advance
b')' => {
// Don't use advance, because we may be skipping
// newlines here, and we want to avoid the assert.
@ -1390,6 +1400,6 @@ fn consume_escape(tokenizer: &mut Tokenizer) -> char {
tokenizer.advance(1);
'\u{FFFD}'
}
_ => { tokenizer.consume_char() }
_ => tokenizer.consume_char(),
}
}