зеркало из https://github.com/mozilla/gecko-dev.git
servo: Merge #4535 - Stop using some obsolete features (from servo:pre-rustup_20141221); r=jdm
This prepares for the rust upgrade currently being conducted. Source-Repo: https://github.com/servo/servo Source-Revision: e8fac3681b690adb0796b2a807ac95bd9c13597a
This commit is contained in:
Родитель
19cd65303c
Коммит
62056757ed
|
@ -780,7 +780,7 @@ impl<Window: WindowMethods> IOCompositor<Window> {
|
|||
}
|
||||
|
||||
fn on_load_url_window_event(&mut self, url_string: String) {
|
||||
debug!("osmain: loading URL `{:s}`", url_string);
|
||||
debug!("osmain: loading URL `{}`", url_string);
|
||||
self.got_load_complete_message = false;
|
||||
let root_pipeline_id = match self.scene.root {
|
||||
Some(ref layer) => layer.extra_data.borrow().pipeline.id.clone(),
|
||||
|
|
|
@ -756,7 +756,7 @@ impl<LTF: LayoutTaskFactory, STF: ScriptTaskFactory> Constellation<LTF, STF> {
|
|||
|
||||
fn handle_load_url_msg(&mut self, source_id: PipelineId, load_data: LoadData) {
|
||||
let url = load_data.url.to_string();
|
||||
debug!("Constellation: received message to load {:s}", url);
|
||||
debug!("Constellation: received message to load {}", url);
|
||||
// Make sure no pending page would be overridden.
|
||||
let source_frame = self.current_frame().as_ref().unwrap().find(source_id).expect(
|
||||
"Constellation: received a LoadUrlMsg from a pipeline_id associated
|
||||
|
|
|
@ -85,7 +85,7 @@ impl ActorRegistry {
|
|||
}
|
||||
|
||||
pub fn register_script_actor(&self, script_id: String, actor: String) {
|
||||
println!("registering {:s} ({:s})", actor.as_slice(), script_id.as_slice());
|
||||
println!("registering {} ({})", actor.as_slice(), script_id.as_slice());
|
||||
let mut script_actors = self.script_actors.borrow_mut();
|
||||
script_actors.insert(script_id, actor);
|
||||
}
|
||||
|
@ -103,19 +103,19 @@ impl ActorRegistry {
|
|||
|
||||
pub fn actor_to_script(&self, actor: String) -> String {
|
||||
for (key, value) in self.script_actors.borrow().iter() {
|
||||
println!("checking {:s}", value.as_slice());
|
||||
println!("checking {}", value.as_slice());
|
||||
if value.as_slice() == actor.as_slice() {
|
||||
return key.to_string();
|
||||
}
|
||||
}
|
||||
panic!("couldn't find actor named {:s}", actor)
|
||||
panic!("couldn't find actor named {}", actor)
|
||||
}
|
||||
|
||||
/// Create a unique name based on a monotonically increasing suffix
|
||||
pub fn new_name(&self, prefix: &str) -> String {
|
||||
let suffix = self.next.get();
|
||||
self.next.set(suffix + 1);
|
||||
format!("{:s}{:u}", prefix, suffix)
|
||||
format!("{}{}", prefix, suffix)
|
||||
}
|
||||
|
||||
/// Add an actor to the registry of known actors that can receive messages.
|
||||
|
@ -154,11 +154,11 @@ impl ActorRegistry {
|
|||
-> Result<(), ()> {
|
||||
let to = msg.get(&"to".to_string()).unwrap().as_string().unwrap();
|
||||
match self.actors.get(&to.to_string()) {
|
||||
None => println!("message received for unknown actor \"{:s}\"", to),
|
||||
None => println!("message received for unknown actor \"{}\"", to),
|
||||
Some(actor) => {
|
||||
let msg_type = msg.get(&"type".to_string()).unwrap().as_string().unwrap();
|
||||
if !try!(actor.handle_message(self, &msg_type.to_string(), msg, stream)) {
|
||||
println!("unexpected message type \"{:s}\" found for actor \"{:s}\"",
|
||||
println!("unexpected message type \"{}\" found for actor \"{}\"",
|
||||
msg_type, to);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -163,7 +163,7 @@ impl Actor for ConsoleActor {
|
|||
messages.push(json::from_str(json::encode(&message).as_slice()).unwrap().as_object().unwrap().clone());*/
|
||||
}
|
||||
|
||||
s => println!("unrecognized message type requested: \"{:s}\"", s),
|
||||
s => println!("unrecognized message type requested: \"{}\"", s),
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -16,7 +16,7 @@ pub trait JsonPacketStream {
|
|||
impl JsonPacketStream for TcpStream {
|
||||
fn write_json_packet<'a, T: Encodable<json::Encoder<'a>,IoError>>(&mut self, obj: &T) {
|
||||
let s = json::encode(obj).replace("__type__", "type");
|
||||
println!("<- {:s}", s);
|
||||
println!("<- {}", s);
|
||||
self.write_str(s.len().to_string().as_slice()).unwrap();
|
||||
self.write_u8(':' as u8).unwrap();
|
||||
self.write_str(s.as_slice()).unwrap();
|
||||
|
@ -35,7 +35,7 @@ impl JsonPacketStream for TcpStream {
|
|||
let packet_len = num::from_str_radix(packet_len_str.as_slice(), 10).unwrap();
|
||||
let packet_buf = self.read_exact(packet_len).unwrap();
|
||||
let packet = String::from_utf8(packet_buf).unwrap();
|
||||
println!("{:s}", packet);
|
||||
println!("{}", packet);
|
||||
return Ok(json::from_str(packet.as_slice()).unwrap())
|
||||
},
|
||||
Err(ref e) if e.kind == EndOfFile =>
|
||||
|
|
|
@ -185,7 +185,7 @@ impl Font {
|
|||
let result = self.handle.get_table_for_tag(tag);
|
||||
let status = if result.is_some() { "Found" } else { "Didn't find" };
|
||||
|
||||
debug!("{:s} font table[{:s}] with family={}, face={}",
|
||||
debug!("{} font table[{}] with family={}, face={}",
|
||||
status, tag.tag_to_str(),
|
||||
self.handle.family_name(), self.handle.face_name());
|
||||
|
||||
|
|
|
@ -181,7 +181,7 @@ impl FontCache {
|
|||
// TODO(Issue #188): look up localized font family names if canonical name not found
|
||||
// look up canonical name
|
||||
if self.local_families.contains_key(family_name) {
|
||||
debug!("FontList: Found font family with name={:s}", family_name.as_slice());
|
||||
debug!("FontList: Found font family with name={}", family_name.as_slice());
|
||||
let s = &mut self.local_families[*family_name];
|
||||
|
||||
if s.templates.len() == 0 {
|
||||
|
@ -199,7 +199,7 @@ impl FontCache {
|
|||
|
||||
None
|
||||
} else {
|
||||
debug!("FontList: Couldn't find font family with name={:s}", family_name.as_slice());
|
||||
debug!("FontList: Couldn't find font family with name={}", family_name.as_slice());
|
||||
None
|
||||
}
|
||||
}
|
||||
|
|
|
@ -19,7 +19,7 @@ pub fn get_available_families(callback: |String|) {
|
|||
}
|
||||
|
||||
pub fn get_variations_for_family(family_name: &str, callback: |String|) {
|
||||
debug!("Looking for faces of family: {:s}", family_name);
|
||||
debug!("Looking for faces of family: {}", family_name);
|
||||
|
||||
let family_collection =
|
||||
core_text::font_collection::create_for_family(family_name.as_slice());
|
||||
|
|
|
@ -304,7 +304,7 @@ impl Shaper {
|
|||
debug!("{} -> {}", i, loc);
|
||||
}
|
||||
|
||||
debug!("text: {:s}", text);
|
||||
debug!("text: {}", text);
|
||||
debug!("(char idx): char->(glyph index):");
|
||||
for (i, ch) in text.char_indices() {
|
||||
debug!("{}: {} --> {:d}", i, ch, *byte_to_glyph.get(i).unwrap() as int);
|
||||
|
|
|
@ -1237,7 +1237,7 @@ impl Flow for InlineFlow {
|
|||
fn build_display_list(&mut self, layout_context: &LayoutContext) {
|
||||
// TODO(#228): Once we form lines and have their cached bounds, we can be smarter and
|
||||
// not recurse on a line if nothing in it can intersect the dirty region.
|
||||
debug!("Flow: building display list for {:u} inline fragments", self.fragments.len());
|
||||
debug!("Flow: building display list for {} inline fragments", self.fragments.len());
|
||||
|
||||
let mut display_list = box DisplayList::new();
|
||||
for fragment in self.fragments.fragments.iter_mut() {
|
||||
|
|
|
@ -717,7 +717,7 @@ impl LayoutTask {
|
|||
mem::transmute(&mut node)
|
||||
};
|
||||
|
||||
debug!("layout: received layout request for: {:s}", data.url.serialize());
|
||||
debug!("layout: received layout request for: {}", data.url.serialize());
|
||||
debug!("layout: parsed Node tree");
|
||||
if log_enabled!(log::DEBUG) {
|
||||
node.dump();
|
||||
|
|
|
@ -41,7 +41,7 @@ impl TextRunScanner {
|
|||
|
||||
pub fn scan_for_runs(&mut self, font_context: &mut FontContext, mut fragments: DList<Fragment>)
|
||||
-> InlineFragments {
|
||||
debug!("TextRunScanner: scanning {:u} fragments for text runs...", fragments.len());
|
||||
debug!("TextRunScanner: scanning {} fragments for text runs...", fragments.len());
|
||||
|
||||
// FIXME(pcwalton): We want to be sure not to allocate multiple times, since this is a
|
||||
// performance-critical spot, but this may overestimate and allocate too much memory.
|
||||
|
|
|
@ -219,7 +219,7 @@ impl<'ln> LayoutNode<'ln> {
|
|||
}
|
||||
|
||||
s.push_str(self.debug_str().as_slice());
|
||||
println!("{:s}", s);
|
||||
println!("{}", s);
|
||||
|
||||
for kid in self.children() {
|
||||
kid.dump_indent(indent + 1);
|
||||
|
|
|
@ -62,13 +62,13 @@ fn load(load_data: LoadData, start_chan: Sender<TargetedLoadResponse>) {
|
|||
match url.scheme.as_slice() {
|
||||
"http" | "https" => {}
|
||||
_ => {
|
||||
let s = format!("{:s} request, but we don't support that scheme", url.scheme);
|
||||
let s = format!("{} request, but we don't support that scheme", url.scheme);
|
||||
send_error(url, s, senders);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
info!("requesting {:s}", url.serialize());
|
||||
info!("requesting {}", url.serialize());
|
||||
|
||||
let mut req = match Request::new(load_data.method.clone(), url.clone()) {
|
||||
Ok(req) => req,
|
||||
|
|
|
@ -87,8 +87,8 @@ impl<NodeAddress: Send> ImageHolder<NodeAddress> {
|
|||
};
|
||||
match port.recv() {
|
||||
ImageResponseMsg::ImageReady(image) => self.image = Some(image),
|
||||
ImageResponseMsg::ImageNotReady => debug!("image not ready for {:s}", self.url.serialize()),
|
||||
ImageResponseMsg::ImageFailed => debug!("image decoding failed for {:s}", self.url.serialize()),
|
||||
ImageResponseMsg::ImageNotReady => debug!("image not ready for {}", self.url.serialize()),
|
||||
ImageResponseMsg::ImageFailed => debug!("image decoding failed for {}", self.url.serialize()),
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -251,11 +251,11 @@ impl ImageCache {
|
|||
|
||||
spawn_named("ImageCacheTask (prefetch)", proc() {
|
||||
let url = url_clone;
|
||||
debug!("image_cache_task: started fetch for {:s}", url.serialize());
|
||||
debug!("image_cache_task: started fetch for {}", url.serialize());
|
||||
|
||||
let image = load_image_data(url.clone(), resource_task.clone());
|
||||
to_cache.send(Msg::StorePrefetchedImageData(url.clone(), image));
|
||||
debug!("image_cache_task: ended fetch for {:s}", url.serialize());
|
||||
debug!("image_cache_task: ended fetch for {}", url.serialize());
|
||||
});
|
||||
|
||||
self.set_state(url, ImageState::Prefetching(AfterPrefetch::DoNotDecode));
|
||||
|
@ -315,11 +315,11 @@ impl ImageCache {
|
|||
|
||||
self.task_pool.execute(proc() {
|
||||
let url = url_clone;
|
||||
debug!("image_cache_task: started image decode for {:s}", url.serialize());
|
||||
debug!("image_cache_task: started image decode for {}", url.serialize());
|
||||
let image = load_from_memory(data.as_slice());
|
||||
let image = image.map(|image| Arc::new(box image));
|
||||
to_cache.send(Msg::StoreImage(url.clone(), image));
|
||||
debug!("image_cache_task: ended image decode for {:s}", url.serialize());
|
||||
debug!("image_cache_task: ended image decode for {}", url.serialize());
|
||||
});
|
||||
|
||||
self.set_state(url, ImageState::Decoding);
|
||||
|
|
|
@ -234,13 +234,13 @@ impl ResourceManager {
|
|||
"data" => data_loader::factory,
|
||||
"about" => about_loader::factory,
|
||||
_ => {
|
||||
debug!("resource_task: no loader for scheme {:s}", load_data.url.scheme);
|
||||
debug!("resource_task: no loader for scheme {}", load_data.url.scheme);
|
||||
start_sending(senders, Metadata::default(load_data.url))
|
||||
.send(ProgressMsg::Done(Err("no loader for scheme".to_string())));
|
||||
return
|
||||
}
|
||||
};
|
||||
debug!("resource_task: loading url: {:s}", load_data.url.serialize());
|
||||
debug!("resource_task: loading url: {}", load_data.url.serialize());
|
||||
|
||||
loader(load_data, self.sniffer_task.clone());
|
||||
}
|
||||
|
|
|
@ -77,7 +77,7 @@ fn find_node_by_unique_id(page: &Rc<Page>, pipeline: PipelineId, node_id: String
|
|||
}
|
||||
}
|
||||
|
||||
panic!("couldn't find node with unique id {:s}", node_id)
|
||||
panic!("couldn't find node with unique id {}", node_id)
|
||||
}
|
||||
|
||||
pub fn handle_get_children(page: &Rc<Page>, pipeline: PipelineId, node_id: String, reply: Sender<Vec<NodeInfo>>) {
|
||||
|
|
|
@ -87,7 +87,7 @@ pub fn trace_jsval(tracer: *mut JSTracer, description: &str, val: JSVal) {
|
|||
(*tracer).debugPrinter = None;
|
||||
(*tracer).debugPrintIndex = -1;
|
||||
(*tracer).debugPrintArg = name.as_ptr() as *const libc::c_void;
|
||||
debug!("tracing value {:s}", description);
|
||||
debug!("tracing value {}", description);
|
||||
JS_CallTracer(tracer, val.to_gcthing(), val.trace_kind());
|
||||
}
|
||||
}
|
||||
|
@ -105,7 +105,7 @@ pub fn trace_object(tracer: *mut JSTracer, description: &str, obj: *mut JSObject
|
|||
(*tracer).debugPrinter = None;
|
||||
(*tracer).debugPrintIndex = -1;
|
||||
(*tracer).debugPrintArg = name.as_ptr() as *const libc::c_void;
|
||||
debug!("tracing {:s}", description);
|
||||
debug!("tracing {}", description);
|
||||
JS_CallTracer(tracer, obj as *mut libc::c_void, JSTRACE_OBJECT);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -28,23 +28,23 @@ impl Console {
|
|||
|
||||
impl<'a> ConsoleMethods for JSRef<'a, Console> {
|
||||
fn Log(self, message: DOMString) {
|
||||
println!("{:s}", message);
|
||||
println!("{}", message);
|
||||
}
|
||||
|
||||
fn Debug(self, message: DOMString) {
|
||||
println!("{:s}", message);
|
||||
println!("{}", message);
|
||||
}
|
||||
|
||||
fn Info(self, message: DOMString) {
|
||||
println!("{:s}", message);
|
||||
println!("{}", message);
|
||||
}
|
||||
|
||||
fn Warn(self, message: DOMString) {
|
||||
println!("{:s}", message);
|
||||
println!("{}", message);
|
||||
}
|
||||
|
||||
fn Error(self, message: DOMString) {
|
||||
println!("{:s}", message);
|
||||
println!("{}", message);
|
||||
}
|
||||
|
||||
fn Assert(self, condition: bool, message: Option<DOMString>) {
|
||||
|
@ -53,7 +53,7 @@ impl<'a> ConsoleMethods for JSRef<'a, Console> {
|
|||
Some(ref message) => message.as_slice(),
|
||||
None => "no message",
|
||||
};
|
||||
println!("Assertion failed: {:s}", message);
|
||||
println!("Assertion failed: {}", message);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -685,7 +685,7 @@ impl<'a> AttributeHandlers for JSRef<'a, Element> {
|
|||
let name = match prefix {
|
||||
None => qname.local.clone(),
|
||||
Some(ref prefix) => {
|
||||
let name = format!("{:s}:{:s}", *prefix, qname.local.as_slice());
|
||||
let name = format!("{}:{}", *prefix, qname.local.as_slice());
|
||||
Atom::from_slice(name.as_slice())
|
||||
},
|
||||
};
|
||||
|
@ -905,7 +905,7 @@ impl<'a> ElementMethods for JSRef<'a, Element> {
|
|||
fn TagName(self) -> DOMString {
|
||||
let qualified_name = match self.prefix {
|
||||
Some(ref prefix) => {
|
||||
(format!("{:s}:{:s}",
|
||||
(format!("{}:{}",
|
||||
prefix.as_slice(),
|
||||
self.local_name.as_slice())).into_maybe_owned()
|
||||
},
|
||||
|
|
|
@ -63,7 +63,7 @@ impl<'a> PrivateHTMLAnchorElementHelpers for JSRef<'a, HTMLAnchorElement> {
|
|||
match attr {
|
||||
Some(ref href) => {
|
||||
let value = href.r().Value();
|
||||
debug!("clicked on link to {:s}", value);
|
||||
debug!("clicked on link to {}", value);
|
||||
let node: JSRef<Node> = NodeCast::from_ref(self);
|
||||
let doc = node.owner_doc().root();
|
||||
doc.r().load_anchor_href(value);
|
||||
|
|
|
@ -133,7 +133,7 @@ impl<'a> PrivateHTMLLinkElementHelpers for JSRef<'a, HTMLLinkElement> {
|
|||
let LayoutChan(ref layout_chan) = window.page().layout_chan;
|
||||
layout_chan.send(Msg::LoadStylesheet(url));
|
||||
}
|
||||
Err(e) => debug!("Parsing url {:s} failed: {}", href, e)
|
||||
Err(e) => debug!("Parsing url {} failed: {}", href, e)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -228,7 +228,7 @@ impl<'a> HTMLScriptElementHelpers for JSRef<'a, HTMLScriptElement> {
|
|||
true
|
||||
},
|
||||
Some(ref s) => {
|
||||
debug!("script type={:s}", *s);
|
||||
debug!("script type={}", *s);
|
||||
SCRIPT_JS_MIMES.contains(&s.to_ascii_lower().as_slice().trim_chars(HTML_SPACE_CHARACTERS))
|
||||
},
|
||||
None => {
|
||||
|
@ -241,7 +241,7 @@ impl<'a> HTMLScriptElementHelpers for JSRef<'a, HTMLScriptElement> {
|
|||
true
|
||||
},
|
||||
Some(ref s) => {
|
||||
debug!("script language={:s}", *s);
|
||||
debug!("script language={}", *s);
|
||||
SCRIPT_JS_MIMES.contains(&format!("text/{}", s).to_ascii_lower().as_slice())
|
||||
},
|
||||
None => {
|
||||
|
|
|
@ -495,7 +495,7 @@ impl<'a> NodeHelpers<'a> for JSRef<'a, Node> {
|
|||
}
|
||||
|
||||
s.push_str(self.debug_str().as_slice());
|
||||
debug!("{:s}", s);
|
||||
debug!("{}", s);
|
||||
|
||||
// FIXME: this should have a pure version?
|
||||
for kid in self.children() {
|
||||
|
|
|
@ -185,7 +185,7 @@ pub fn base64_atob(atob: DOMString) -> Fallible<DOMString> {
|
|||
impl<'a> WindowMethods for JSRef<'a, Window> {
|
||||
fn Alert(self, s: DOMString) {
|
||||
// Right now, just print to the console
|
||||
println!("ALERT: {:s}", s);
|
||||
println!("ALERT: {}", s);
|
||||
}
|
||||
|
||||
fn Close(self) {
|
||||
|
@ -273,7 +273,7 @@ impl<'a> WindowMethods for JSRef<'a, Window> {
|
|||
}
|
||||
|
||||
fn Debug(self, message: DOMString) {
|
||||
debug!("{:s}", message);
|
||||
debug!("{}", message);
|
||||
}
|
||||
|
||||
fn Gc(self) {
|
||||
|
|
|
@ -595,7 +595,7 @@ impl<'a> XMLHttpRequestMethods for JSRef<'a, XMLHttpRequest> {
|
|||
referer_url.serialize_host().map(|ref h| buf.push_str(h.as_slice()));
|
||||
referer_url.port().as_ref().map(|&p| {
|
||||
buf.push_str(":".as_slice());
|
||||
buf.push_str(format!("{:u}", p).as_slice());
|
||||
buf.push_str(format!("{}", p).as_slice());
|
||||
});
|
||||
referer_url.serialize_path().map(|ref h| buf.push_str(h.as_slice()));
|
||||
self.request_headers.borrow_mut().set_raw("Referer".into_string(), vec![buf.into_bytes()]);
|
||||
|
|
|
@ -111,7 +111,7 @@ impl<'a> TreeSink<TrustedNodeAddress> for servohtmlparser::Sink {
|
|||
}
|
||||
|
||||
fn parse_error(&mut self, msg: MaybeOwned<'static>) {
|
||||
debug!("Parse error: {:s}", msg);
|
||||
debug!("Parse error: {}", msg);
|
||||
}
|
||||
|
||||
fn set_quirks_mode(&mut self, mode: QuirksMode) {
|
||||
|
@ -180,7 +180,7 @@ pub fn parse_html(document: JSRef<Document>,
|
|||
HTMLInput::InputUrl(load_response) => {
|
||||
match load_response.metadata.content_type {
|
||||
Some((ref t, _)) if t.as_slice().eq_ignore_ascii_case("image") => {
|
||||
let page = format!("<html><body><img src='{:s}' /></body></html>", url.serialize());
|
||||
let page = format!("<html><body><img src='{}' /></body></html>", url.serialize());
|
||||
parser.parse_chunk(page);
|
||||
},
|
||||
_ => {
|
||||
|
@ -192,7 +192,7 @@ pub fn parse_html(document: JSRef<Document>,
|
|||
parser.parse_chunk(data);
|
||||
}
|
||||
Done(Err(err)) => {
|
||||
panic!("Failed to load page URL {:s}, error: {:s}", url.serialize(), err);
|
||||
panic!("Failed to load page URL {}, error: {}", url.serialize(), err);
|
||||
}
|
||||
Done(Ok(())) => break,
|
||||
}
|
||||
|
|
|
@ -1143,7 +1143,7 @@ impl ScriptTask {
|
|||
match maybe_node {
|
||||
Some(el) => {
|
||||
let node = NodeCast::from_ref(el);
|
||||
debug!("clicked on {:s}", node.debug_str());
|
||||
debug!("clicked on {}", node.debug_str());
|
||||
// Prevent click event if form control element is disabled.
|
||||
if node.click_event_filter_by_disabled_state() { return; }
|
||||
match *page.frame() {
|
||||
|
|
|
@ -28,5 +28,5 @@ impl<T, I: Iterator<Result<T, SyntaxError>>> Iterator<T> for ErrorLoggerIterator
|
|||
/// to log CSS parse errors to stderr.
|
||||
pub fn log_css_error(location: SourceLocation, message: &str) {
|
||||
// TODO eventually this will got into a "web console" or something.
|
||||
info!("{:u}:{:u} {:s}", location.line, location.column, message)
|
||||
info!("{}:{} {}", location.line, location.column, message)
|
||||
}
|
||||
|
|
|
@ -68,7 +68,7 @@ pub fn parse_font_face_rule(rule: AtRule, parent_rules: &mut Vec<CSSRule>, base_
|
|||
for item in ErrorLoggerIterator(parse_declaration_list(block.into_iter())) {
|
||||
match item {
|
||||
DeclarationListItem::AtRule(rule) => log_css_error(
|
||||
rule.location, format!("Unsupported at-rule in declaration list: @{:s}", rule.name).as_slice()),
|
||||
rule.location, format!("Unsupported at-rule in declaration list: @{}", rule.name).as_slice()),
|
||||
DeclarationListItem::Declaration(Declaration{ location, name, value, important }) => {
|
||||
if important {
|
||||
log_css_error(location, "!important is not allowed on @font-face descriptors");
|
||||
|
@ -94,7 +94,7 @@ pub fn parse_font_face_rule(rule: AtRule, parent_rules: &mut Vec<CSSRule>, base_
|
|||
};
|
||||
},
|
||||
_ => {
|
||||
log_css_error(location, format!("Unsupported declaration {:s}", name).as_slice());
|
||||
log_css_error(location, format!("Unsupported declaration {}", name).as_slice());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -787,11 +787,9 @@ pub mod longhands {
|
|||
|
||||
<%self:single_component_value name="list-style-image">
|
||||
pub use super::computed_as_specified as to_computed_value;
|
||||
#[deriving(Clone)]
|
||||
pub type SpecifiedValue = Option<Url>;
|
||||
pub mod computed_value {
|
||||
use url::Url;
|
||||
#[deriving(Clone, PartialEq)]
|
||||
pub type T = Option<Url>;
|
||||
}
|
||||
pub fn from_component_value(input: &ComponentValue, base_url: &Url)
|
||||
|
@ -823,7 +821,6 @@ pub mod longhands {
|
|||
use super::super::super::common_types::computed;
|
||||
pub type T = Option<computed::Image>;
|
||||
}
|
||||
#[deriving(Clone)]
|
||||
pub type SpecifiedValue = common_specified::CSSImage;
|
||||
#[inline]
|
||||
pub fn get_initial_value() -> computed_value::T {
|
||||
|
@ -2420,7 +2417,7 @@ pub fn parse_property_declaration_list<I: Iterator<Node>>(input: I, base_url: &U
|
|||
for item in items.into_iter().rev() {
|
||||
match item {
|
||||
DeclarationListItem::AtRule(rule) => log_css_error(
|
||||
rule.location, format!("Unsupported at-rule in declaration list: @{:s}", rule.name).as_slice()),
|
||||
rule.location, format!("Unsupported at-rule in declaration list: @{}", rule.name).as_slice()),
|
||||
DeclarationListItem::Declaration(Declaration{ location: l, name: n, value: v, important: i}) => {
|
||||
// TODO: only keep the last valid declaration for a given name.
|
||||
let (list, seen) = if i {
|
||||
|
|
|
@ -153,7 +153,7 @@ pub fn parse_nested_at_rule(context: &ParserContext,
|
|||
}
|
||||
"font-face" => parse_font_face_rule(rule, parent_rules, base_url),
|
||||
_ => log_css_error(rule.location,
|
||||
format!("Unsupported at-rule: @{:s}", lower_name).as_slice())
|
||||
format!("Unsupported at-rule: @{}", lower_name).as_slice())
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -179,13 +179,13 @@ fn parse_lists(file: &Path, servo_args: &[String], render_mode: RenderMode, id_o
|
|||
file_left: parts[2],
|
||||
file_right: parts[3],
|
||||
},
|
||||
_ => panic!("reftest line: '{:s}' doesn't match '[CONDITIONS] KIND LEFT RIGHT'", line),
|
||||
_ => panic!("reftest line: '{}' doesn't match '[CONDITIONS] KIND LEFT RIGHT'", line),
|
||||
};
|
||||
|
||||
let kind = match test_line.kind {
|
||||
"==" => ReftestKind::Same,
|
||||
"!=" => ReftestKind::Different,
|
||||
part => panic!("reftest line: '{:s}' has invalid kind '{:s}'", line, part)
|
||||
part => panic!("reftest line: '{}' has invalid kind '{}'", line, part)
|
||||
};
|
||||
|
||||
// If we're running this directly, file.dir_path() might be relative.
|
||||
|
@ -248,7 +248,7 @@ fn make_test(reftest: Reftest) -> TestDescAndFn {
|
|||
}
|
||||
|
||||
fn capture(reftest: &Reftest, side: uint) -> (u32, u32, Vec<u8>) {
|
||||
let png_filename = format!("/tmp/servo-reftest-{:06u}-{:u}.png", reftest.id, side);
|
||||
let png_filename = format!("/tmp/servo-reftest-{:06}-{}.png", reftest.id, side);
|
||||
let mut command = Command::new(os::self_exe_path().unwrap().join("servo"));
|
||||
command
|
||||
.args(reftest.servo_args.as_slice())
|
||||
|
@ -314,7 +314,7 @@ fn check_reftest(reftest: Reftest) {
|
|||
}).collect::<Vec<u8>>();
|
||||
|
||||
if pixels.iter().any(|&a| a < 255) {
|
||||
let output_str = format!("/tmp/servo-reftest-{:06u}-diff.png", reftest.id);
|
||||
let output_str = format!("/tmp/servo-reftest-{:06}-diff.png", reftest.id);
|
||||
let output = from_str::<Path>(output_str.as_slice()).unwrap();
|
||||
|
||||
let mut img = png::Image {
|
||||
|
|
Загрузка…
Ссылка в новой задаче