зеркало из https://github.com/mozilla/gecko-dev.git
servo: Merge #7203 - Add automated style nit checks to test-tidy (from jdm:style); r=Ms2ger
Expands on the work by @wilmoz and cleans up the existing errors. Closes #7180. Closes #7111. Source-Repo: https://github.com/servo/servo Source-Revision: e74825f9fde8e222f4ba9bb24b2c2a3864c73e5f
This commit is contained in:
Родитель
42e2e6ee91
Коммит
6a81d499fc
|
@ -66,13 +66,13 @@ pub enum FromPaintMsg {
|
|||
}
|
||||
|
||||
impl Serialize for FromPaintMsg {
|
||||
fn serialize<S>(&self, _: &mut S) -> Result<(),S::Error> where S: Serializer {
|
||||
fn serialize<S>(&self, _: &mut S) -> Result<(), S::Error> where S: Serializer {
|
||||
panic!("can't serialize a `FromPaintMsg`!")
|
||||
}
|
||||
}
|
||||
|
||||
impl Deserialize for FromPaintMsg {
|
||||
fn deserialize<D>(_: &mut D) -> Result<FromPaintMsg,D::Error> where D: Deserializer {
|
||||
fn deserialize<D>(_: &mut D) -> Result<FromPaintMsg, D::Error> where D: Deserializer {
|
||||
panic!("can't deserialize a `FromPaintMsg`!")
|
||||
}
|
||||
}
|
||||
|
|
|
@ -52,7 +52,7 @@ use url::Url;
|
|||
use util::geometry::{Au, PagePx, ScreenPx, ViewportPx};
|
||||
use util::opts;
|
||||
|
||||
const BUFFER_MAP_SIZE : usize = 10000000;
|
||||
const BUFFER_MAP_SIZE: usize = 10000000;
|
||||
|
||||
/// Holds the state when running reftests that determines when it is
|
||||
/// safe to save the output image.
|
||||
|
@ -163,8 +163,8 @@ pub struct IOCompositor<Window: WindowMethods> {
|
|||
}
|
||||
|
||||
pub struct ScrollEvent {
|
||||
delta: TypedPoint2D<DevicePixel,f32>,
|
||||
cursor: TypedPoint2D<DevicePixel,i32>,
|
||||
delta: TypedPoint2D<DevicePixel, f32>,
|
||||
cursor: TypedPoint2D<DevicePixel, i32>,
|
||||
}
|
||||
|
||||
#[derive(PartialEq)]
|
||||
|
@ -249,7 +249,7 @@ pub fn reporter_name() -> String {
|
|||
|
||||
impl<Window: WindowMethods> IOCompositor<Window> {
|
||||
fn new(window: Rc<Window>,
|
||||
sender: Box<CompositorProxy+Send>,
|
||||
sender: Box<CompositorProxy + Send>,
|
||||
receiver: Box<CompositorReceiver>,
|
||||
constellation_chan: ConstellationChan,
|
||||
time_profiler_chan: time::ProfilerChan,
|
||||
|
@ -312,7 +312,7 @@ impl<Window: WindowMethods> IOCompositor<Window> {
|
|||
}
|
||||
|
||||
pub fn create(window: Rc<Window>,
|
||||
sender: Box<CompositorProxy+Send>,
|
||||
sender: Box<CompositorProxy + Send>,
|
||||
receiver: Box<CompositorReceiver>,
|
||||
constellation_chan: ConstellationChan,
|
||||
time_profiler_chan: time::ProfilerChan,
|
||||
|
|
|
@ -365,7 +365,7 @@ impl CompositorLayer for Layer<CompositorData> {
|
|||
let content_size = calculate_content_size_for_layer(self);
|
||||
let min_x = (layer_size.width - content_size.width).get().min(0.0);
|
||||
let min_y = (layer_size.height - content_size.height).get().min(0.0);
|
||||
let new_offset : TypedPoint2D<LayerPixel, f32> =
|
||||
let new_offset: TypedPoint2D<LayerPixel, f32> =
|
||||
Point2D::new(Length::new(new_offset.x.get().clamp(&min_x, &0.0)),
|
||||
Length::new(new_offset.y.get().clamp(&min_y, &0.0)));
|
||||
|
||||
|
|
|
@ -37,7 +37,7 @@ pub trait CompositorProxy : 'static + Send {
|
|||
/// Sends a message to the compositor.
|
||||
fn send(&self, msg: Msg);
|
||||
/// Clones the compositor proxy.
|
||||
fn clone_compositor_proxy(&self) -> Box<CompositorProxy+'static+Send>;
|
||||
fn clone_compositor_proxy(&self) -> Box<CompositorProxy + 'static + Send>;
|
||||
}
|
||||
|
||||
/// The port that the compositor receives messages on. As above, this is a trait supplied by the
|
||||
|
@ -88,7 +88,7 @@ pub fn run_script_listener_thread(compositor_proxy: Box<CompositorProxy + 'stati
|
|||
}
|
||||
|
||||
/// Implementation of the abstract `PaintListener` interface.
|
||||
impl PaintListener for Box<CompositorProxy+'static+Send> {
|
||||
impl PaintListener for Box<CompositorProxy + 'static + Send> {
|
||||
fn native_display(&mut self) -> Option<NativeDisplay> {
|
||||
let (chan, port) = channel();
|
||||
self.send(Msg::GetNativeDisplay(chan));
|
||||
|
@ -204,7 +204,7 @@ pub enum Msg {
|
|||
}
|
||||
|
||||
impl Debug for Msg {
|
||||
fn fmt(&self, f: &mut Formatter) -> Result<(),Error> {
|
||||
fn fmt(&self, f: &mut Formatter) -> Result<(), Error> {
|
||||
match *self {
|
||||
Msg::Exit(..) => write!(f, "Exit"),
|
||||
Msg::ShutdownComplete(..) => write!(f, "ShutdownComplete"),
|
||||
|
@ -240,7 +240,7 @@ pub struct CompositorTask;
|
|||
|
||||
impl CompositorTask {
|
||||
pub fn create<Window>(window: Option<Rc<Window>>,
|
||||
sender: Box<CompositorProxy+Send>,
|
||||
sender: Box<CompositorProxy + Send>,
|
||||
receiver: Box<CompositorReceiver>,
|
||||
constellation_chan: ConstellationChan,
|
||||
time_profiler_chan: time::ProfilerChan,
|
||||
|
|
|
@ -219,7 +219,7 @@ enum ExitPipelineMode {
|
|||
}
|
||||
|
||||
impl<LTF: LayoutTaskFactory, STF: ScriptTaskFactory> Constellation<LTF, STF> {
|
||||
pub fn start(compositor_proxy: Box<CompositorProxy+Send>,
|
||||
pub fn start(compositor_proxy: Box<CompositorProxy + Send>,
|
||||
resource_task: ResourceTask,
|
||||
image_cache_task: ImageCacheTask,
|
||||
font_cache_task: FontCacheTask,
|
||||
|
@ -1019,7 +1019,7 @@ impl<LTF: LayoutTaskFactory, STF: ScriptTaskFactory> Constellation<LTF, STF> {
|
|||
|
||||
fn load_url_for_webdriver(&mut self,
|
||||
pipeline_id: PipelineId,
|
||||
load_data:LoadData,
|
||||
load_data: LoadData,
|
||||
reply: IpcSender<webdriver_msg::LoadStatus>) {
|
||||
let new_pipeline_id = self.load_url(pipeline_id, load_data);
|
||||
if let Some(id) = new_pipeline_id {
|
||||
|
|
|
@ -67,23 +67,23 @@ impl Pipeline {
|
|||
/// Starts a paint task, layout task, and possibly a script task.
|
||||
/// Returns the channels wrapped in a struct.
|
||||
/// If script_pipeline is not None, then subpage_id must also be not None.
|
||||
pub fn create<LTF,STF>(id: PipelineId,
|
||||
parent_info: Option<(PipelineId, SubpageId)>,
|
||||
constellation_chan: ConstellationChan,
|
||||
compositor_proxy: Box<CompositorProxy+'static+Send>,
|
||||
devtools_chan: Option<Sender<DevtoolsControlMsg>>,
|
||||
image_cache_task: ImageCacheTask,
|
||||
font_cache_task: FontCacheTask,
|
||||
resource_task: ResourceTask,
|
||||
storage_task: StorageTask,
|
||||
time_profiler_chan: time::ProfilerChan,
|
||||
mem_profiler_chan: profile_mem::ProfilerChan,
|
||||
window_rect: Option<TypedRect<PagePx, f32>>,
|
||||
script_chan: Option<Sender<ConstellationControlMsg>>,
|
||||
load_data: LoadData,
|
||||
device_pixel_ratio: ScaleFactor<ViewportPx, DevicePixel, f32>)
|
||||
-> (Pipeline, PipelineContent)
|
||||
where LTF: LayoutTaskFactory, STF:ScriptTaskFactory {
|
||||
pub fn create<LTF, STF>(id: PipelineId,
|
||||
parent_info: Option<(PipelineId, SubpageId)>,
|
||||
constellation_chan: ConstellationChan,
|
||||
compositor_proxy: Box<CompositorProxy + 'static + Send>,
|
||||
devtools_chan: Option<Sender<DevtoolsControlMsg>>,
|
||||
image_cache_task: ImageCacheTask,
|
||||
font_cache_task: FontCacheTask,
|
||||
resource_task: ResourceTask,
|
||||
storage_task: StorageTask,
|
||||
time_profiler_chan: time::ProfilerChan,
|
||||
mem_profiler_chan: profile_mem::ProfilerChan,
|
||||
window_rect: Option<TypedRect<PagePx, f32>>,
|
||||
script_chan: Option<Sender<ConstellationControlMsg>>,
|
||||
load_data: LoadData,
|
||||
device_pixel_ratio: ScaleFactor<ViewportPx, DevicePixel, f32>)
|
||||
-> (Pipeline, PipelineContent)
|
||||
where LTF: LayoutTaskFactory, STF: ScriptTaskFactory {
|
||||
let (layout_to_paint_chan, layout_to_paint_port) = util::ipc::optional_ipc_channel();
|
||||
let (chrome_to_paint_chan, chrome_to_paint_port) = channel();
|
||||
let (paint_shutdown_chan, paint_shutdown_port) = channel();
|
||||
|
@ -305,7 +305,7 @@ pub struct PipelineContent {
|
|||
}
|
||||
|
||||
impl PipelineContent {
|
||||
pub fn start_all<LTF,STF>(mut self) where LTF: LayoutTaskFactory, STF: ScriptTaskFactory {
|
||||
pub fn start_all<LTF, STF>(mut self) where LTF: LayoutTaskFactory, STF: ScriptTaskFactory {
|
||||
let layout_pair = ScriptTaskFactory::create_layout_channel(None::<&mut STF>);
|
||||
let (script_to_compositor_chan, script_to_compositor_port) = ipc::channel().unwrap();
|
||||
|
||||
|
|
|
@ -30,7 +30,7 @@ enum ToScrollingTimerMsg {
|
|||
}
|
||||
|
||||
impl ScrollingTimerProxy {
|
||||
pub fn new(compositor_proxy: Box<CompositorProxy+Send>) -> ScrollingTimerProxy {
|
||||
pub fn new(compositor_proxy: Box<CompositorProxy + Send>) -> ScrollingTimerProxy {
|
||||
let (to_scrolling_timer_sender, to_scrolling_timer_receiver) = channel();
|
||||
Builder::new().spawn(move || {
|
||||
let mut scrolling_timer = ScrollingTimer {
|
||||
|
|
|
@ -75,7 +75,7 @@ pub enum WindowEvent {
|
|||
}
|
||||
|
||||
impl Debug for WindowEvent {
|
||||
fn fmt(&self, f: &mut Formatter) -> Result<(),Error> {
|
||||
fn fmt(&self, f: &mut Formatter) -> Result<(), Error> {
|
||||
match *self {
|
||||
WindowEvent::Idle => write!(f, "Idle"),
|
||||
WindowEvent::Refresh => write!(f, "Refresh"),
|
||||
|
@ -130,7 +130,7 @@ pub trait WindowMethods {
|
|||
/// This is part of the windowing system because its implementation often involves OS-specific
|
||||
/// magic to wake the up window's event loop.
|
||||
fn create_compositor_channel(_: &Option<Rc<Self>>)
|
||||
-> (Box<CompositorProxy+Send>, Box<CompositorReceiver>);
|
||||
-> (Box<CompositorProxy + Send>, Box<CompositorReceiver>);
|
||||
|
||||
/// Requests that the window system prepare a composite. Typically this will involve making
|
||||
/// some type of platform-specific graphics context current. Returns true if the composite may
|
||||
|
|
|
@ -86,8 +86,8 @@ impl Actor + Send {
|
|||
|
||||
/// A list of known, owned actors.
|
||||
pub struct ActorRegistry {
|
||||
actors: HashMap<String, Box<Actor+Send>>,
|
||||
new_actors: RefCell<Vec<Box<Actor+Send>>>,
|
||||
actors: HashMap<String, Box<Actor + Send>>,
|
||||
new_actors: RefCell<Vec<Box<Actor + Send>>>,
|
||||
old_actors: RefCell<Vec<String>>,
|
||||
script_actors: RefCell<HashMap<String, String>>,
|
||||
shareable: Option<Arc<Mutex<ActorRegistry>>>,
|
||||
|
@ -167,11 +167,11 @@ impl ActorRegistry {
|
|||
}
|
||||
|
||||
/// Add an actor to the registry of known actors that can receive messages.
|
||||
pub fn register(&mut self, actor: Box<Actor+Send>) {
|
||||
pub fn register(&mut self, actor: Box<Actor + Send>) {
|
||||
self.actors.insert(actor.name().to_string(), actor);
|
||||
}
|
||||
|
||||
pub fn register_later(&self, actor: Box<Actor+Send>) {
|
||||
pub fn register_later(&self, actor: Box<Actor + Send>) {
|
||||
let mut actors = self.new_actors.borrow_mut();
|
||||
actors.push(actor);
|
||||
}
|
||||
|
|
|
@ -93,7 +93,7 @@ impl Actor for HighlighterActor {
|
|||
}
|
||||
|
||||
#[derive(RustcEncodable)]
|
||||
struct ModifyAttributeReply{
|
||||
struct ModifyAttributeReply {
|
||||
from: String,
|
||||
}
|
||||
|
||||
|
@ -119,7 +119,7 @@ impl Actor for NodeActor {
|
|||
registry.actor_to_script(target.to_string()),
|
||||
modifications))
|
||||
.unwrap();
|
||||
let reply = ModifyAttributeReply{
|
||||
let reply = ModifyAttributeReply {
|
||||
from: self.name(),
|
||||
};
|
||||
stream.write_json_packet(&reply);
|
||||
|
|
|
@ -471,11 +471,11 @@ impl StackingContext {
|
|||
mut point: Point2D<Au>,
|
||||
result: &mut Vec<DisplayItemMetadata>,
|
||||
topmost_only: bool) {
|
||||
fn hit_test_in_list<'a,I>(point: Point2D<Au>,
|
||||
result: &mut Vec<DisplayItemMetadata>,
|
||||
topmost_only: bool,
|
||||
iterator: I)
|
||||
where I: Iterator<Item=&'a DisplayItem> {
|
||||
fn hit_test_in_list<'a, I>(point: Point2D<Au>,
|
||||
result: &mut Vec<DisplayItemMetadata>,
|
||||
topmost_only: bool,
|
||||
iterator: I)
|
||||
where I: Iterator<Item=&'a DisplayItem> {
|
||||
for item in iterator {
|
||||
// TODO(pcwalton): Use a precise algorithm here. This will allow us to properly hit
|
||||
// test elements with `border-radius`, for example.
|
||||
|
@ -1016,7 +1016,7 @@ pub enum BoxShadowClipMode {
|
|||
|
||||
pub enum DisplayItemIterator<'a> {
|
||||
Empty,
|
||||
Parent(linked_list::Iter<'a,DisplayItem>),
|
||||
Parent(linked_list::Iter<'a, DisplayItem>),
|
||||
}
|
||||
|
||||
impl<'a> Iterator for DisplayItemIterator<'a> {
|
||||
|
|
|
@ -45,10 +45,10 @@ impl DisplayListOptimizer {
|
|||
}
|
||||
|
||||
/// Adds display items that intersect the visible rect to `result_list`.
|
||||
fn add_in_bounds_display_items<'a,I>(&self,
|
||||
result_list: &mut LinkedList<DisplayItem>,
|
||||
display_items: I)
|
||||
where I: Iterator<Item=&'a DisplayItem> {
|
||||
fn add_in_bounds_display_items<'a, I>(&self,
|
||||
result_list: &mut LinkedList<DisplayItem>,
|
||||
display_items: I)
|
||||
where I: Iterator<Item=&'a DisplayItem> {
|
||||
for display_item in display_items {
|
||||
if self.visible_rect.intersects(&display_item.base().bounds) &&
|
||||
display_item.base().clip.might_intersect_rect(&self.visible_rect) {
|
||||
|
@ -58,10 +58,10 @@ impl DisplayListOptimizer {
|
|||
}
|
||||
|
||||
/// Adds child stacking contexts whose boundaries intersect the visible rect to `result_list`.
|
||||
fn add_in_bounds_stacking_contexts<'a,I>(&self,
|
||||
result_list: &mut LinkedList<Arc<StackingContext>>,
|
||||
stacking_contexts: I)
|
||||
where I: Iterator<Item=&'a Arc<StackingContext>> {
|
||||
fn add_in_bounds_stacking_contexts<'a, I>(&self,
|
||||
result_list: &mut LinkedList<Arc<StackingContext>>,
|
||||
stacking_contexts: I)
|
||||
where I: Iterator<Item=&'a Arc<StackingContext>> {
|
||||
for stacking_context in stacking_contexts {
|
||||
if stacking_context.layer.is_none() {
|
||||
// Transform this stacking context to get it into the same space as
|
||||
|
|
|
@ -95,8 +95,8 @@ pub struct Font {
|
|||
pub requested_pt_size: Au,
|
||||
pub actual_pt_size: Au,
|
||||
pub shaper: Option<Shaper>,
|
||||
pub shape_cache: HashCache<ShapeCacheEntry,Arc<GlyphStore>>,
|
||||
pub glyph_advance_cache: HashCache<u32,FractionalPixel>,
|
||||
pub shape_cache: HashCache<ShapeCacheEntry, Arc<GlyphStore>>,
|
||||
pub glyph_advance_cache: HashCache<u32, FractionalPixel>,
|
||||
}
|
||||
|
||||
bitflags! {
|
||||
|
|
|
@ -82,7 +82,7 @@ pub struct FontContext {
|
|||
paint_font_cache: Vec<PaintFontCacheEntry>,
|
||||
|
||||
layout_font_group_cache:
|
||||
HashMap<LayoutFontGroupCacheKey,Rc<FontGroup>,DefaultState<FnvHasher>>,
|
||||
HashMap<LayoutFontGroupCacheKey, Rc<FontGroup>, DefaultState<FnvHasher>>,
|
||||
}
|
||||
|
||||
impl FontContext {
|
||||
|
@ -273,7 +273,7 @@ impl FontContext {
|
|||
}
|
||||
|
||||
let paint_font = Rc::new(RefCell::new(create_scaled_font(template, pt_size)));
|
||||
self.paint_font_cache.push(PaintFontCacheEntry{
|
||||
self.paint_font_cache.push(PaintFontCacheEntry {
|
||||
font: paint_font.clone(),
|
||||
pt_size: pt_size,
|
||||
identifier: template.identifier.clone(),
|
||||
|
|
|
@ -751,11 +751,11 @@ impl<'a> PaintContext<'a> {
|
|||
border: &SideOffsets2D<f32>,
|
||||
radius: &BorderRadii<AzFloat>,
|
||||
color: Color) {
|
||||
let scaled_border = SideOffsets2D::new((1.0/3.0) * border.top,
|
||||
(1.0/3.0) * border.right,
|
||||
(1.0/3.0) * border.bottom,
|
||||
(1.0/3.0) * border.left);
|
||||
let inner_scaled_bounds = self.compute_scaled_bounds(bounds, border, 2.0/3.0);
|
||||
let scaled_border = SideOffsets2D::new((1.0 / 3.0) * border.top,
|
||||
(1.0 / 3.0) * border.right,
|
||||
(1.0 / 3.0) * border.bottom,
|
||||
(1.0 / 3.0) * border.left);
|
||||
let inner_scaled_bounds = self.compute_scaled_bounds(bounds, border, 2.0 / 3.0);
|
||||
// draw the outer portion of the double border.
|
||||
self.draw_solid_border_segment(direction, bounds, &scaled_border, radius, color);
|
||||
// draw the inner portion of the double border.
|
||||
|
@ -786,7 +786,7 @@ impl<'a> PaintContext<'a> {
|
|||
let lighter_color;
|
||||
let mut darker_color = color::black();
|
||||
if color != darker_color {
|
||||
darker_color = self.scale_color(color, if is_groove { 1.0/3.0 } else { 2.0/3.0 });
|
||||
darker_color = self.scale_color(color, if is_groove { 1.0 / 3.0 } else { 2.0 / 3.0 });
|
||||
lighter_color = color;
|
||||
} else {
|
||||
// You can't scale black color (i.e. 'scaled = 0 * scale', equals black).
|
||||
|
@ -832,10 +832,10 @@ impl<'a> PaintContext<'a> {
|
|||
if color != scaled_color {
|
||||
scaled_color = match direction {
|
||||
Direction::Top | Direction::Left => {
|
||||
self.scale_color(color, if is_inset { 2.0/3.0 } else { 1.0 })
|
||||
self.scale_color(color, if is_inset { 2.0 / 3.0 } else { 1.0 })
|
||||
}
|
||||
Direction::Right | Direction::Bottom => {
|
||||
self.scale_color(color, if is_inset { 1.0 } else { 2.0/3.0 })
|
||||
self.scale_color(color, if is_inset { 1.0 } else { 2.0 / 3.0 })
|
||||
}
|
||||
};
|
||||
} else {
|
||||
|
|
|
@ -659,12 +659,12 @@ enum MsgFromWorkerThread {
|
|||
}
|
||||
|
||||
pub static THREAD_TINT_COLORS: [Color; 8] = [
|
||||
Color { r: 6.0/255.0, g: 153.0/255.0, b: 198.0/255.0, a: 0.7 },
|
||||
Color { r: 255.0/255.0, g: 212.0/255.0, b: 83.0/255.0, a: 0.7 },
|
||||
Color { r: 116.0/255.0, g: 29.0/255.0, b: 109.0/255.0, a: 0.7 },
|
||||
Color { r: 204.0/255.0, g: 158.0/255.0, b: 199.0/255.0, a: 0.7 },
|
||||
Color { r: 242.0/255.0, g: 46.0/255.0, b: 121.0/255.0, a: 0.7 },
|
||||
Color { r: 116.0/255.0, g: 203.0/255.0, b: 196.0/255.0, a: 0.7 },
|
||||
Color { r: 255.0/255.0, g: 249.0/255.0, b: 201.0/255.0, a: 0.7 },
|
||||
Color { r: 137.0/255.0, g: 196.0/255.0, b: 78.0/255.0, a: 0.7 },
|
||||
Color { r: 6.0 / 255.0, g: 153.0 / 255.0, b: 198.0 / 255.0, a: 0.7 },
|
||||
Color { r: 255.0 / 255.0, g: 212.0 / 255.0, b: 83.0 / 255.0, a: 0.7 },
|
||||
Color { r: 116.0 / 255.0, g: 29.0 / 255.0, b: 109.0 / 255.0, a: 0.7 },
|
||||
Color { r: 204.0 / 255.0, g: 158.0 / 255.0, b: 199.0 / 255.0, a: 0.7 },
|
||||
Color { r: 242.0 / 255.0, g: 46.0 / 255.0, b: 121.0 / 255.0, a: 0.7 },
|
||||
Color { r: 116.0 / 255.0, g: 203.0 / 255.0, b: 196.0 / 255.0, a: 0.7 },
|
||||
Color { r: 255.0 / 255.0, g: 249.0 / 255.0, b: 201.0 / 255.0, a: 0.7 },
|
||||
Color { r: 137.0 / 255.0, g: 196.0 / 255.0, b: 78.0 / 255.0, a: 0.7 },
|
||||
];
|
||||
|
|
|
@ -8,17 +8,11 @@ extern crate freetype;
|
|||
extern crate fontconfig;
|
||||
|
||||
use fontconfig::fontconfig::{FcChar8, FcResultMatch, FcSetSystem};
|
||||
use fontconfig::fontconfig::{
|
||||
FcConfigGetCurrent, FcConfigGetFonts,
|
||||
FcConfigSubstitute, FcDefaultSubstitute,
|
||||
FcFontMatch,
|
||||
FcNameParse, FcPatternGetString,
|
||||
FcPatternDestroy, FcFontSetDestroy,
|
||||
FcMatchPattern,
|
||||
FcPatternCreate, FcPatternAddString,
|
||||
FcFontSetList, FcObjectSetCreate, FcObjectSetDestroy,
|
||||
FcObjectSetAdd, FcPatternGetInteger
|
||||
};
|
||||
use fontconfig::fontconfig::{FcConfigGetCurrent, FcConfigGetFonts, FcConfigSubstitute};
|
||||
use fontconfig::fontconfig::{FcDefaultSubstitute, FcFontMatch, FcNameParse, FcPatternGetString};
|
||||
use fontconfig::fontconfig::{FcPatternDestroy, FcFontSetDestroy, FcMatchPattern, FcPatternCreate};
|
||||
use fontconfig::fontconfig::{FcPatternAddString, FcFontSetList, FcObjectSetCreate, FcObjectSetDestroy};
|
||||
use fontconfig::fontconfig::{FcObjectSetAdd, FcPatternGetInteger};
|
||||
|
||||
use util::str::c_str_to_string;
|
||||
|
||||
|
|
|
@ -89,7 +89,7 @@ impl Deserialize for CachedCTFont {
|
|||
type Value = CachedCTFont;
|
||||
|
||||
#[inline]
|
||||
fn visit_none<E>(&mut self) -> Result<CachedCTFont,E> where E: Error {
|
||||
fn visit_none<E>(&mut self) -> Result<CachedCTFont, E> where E: Error {
|
||||
Ok(CachedCTFont(Mutex::new(None)))
|
||||
}
|
||||
}
|
||||
|
|
|
@ -394,7 +394,7 @@ impl<'a> DetailedGlyphStore {
|
|||
// Thar be dragons here. You have been warned. (Tips accepted.)
|
||||
let mut unsorted_records: Vec<DetailedGlyphRecord> = vec!();
|
||||
mem::swap(&mut self.detail_lookup, &mut unsorted_records);
|
||||
let mut mut_records : Vec<DetailedGlyphRecord> = unsorted_records;
|
||||
let mut mut_records: Vec<DetailedGlyphRecord> = unsorted_records;
|
||||
mut_records.sort_by(|a, b| {
|
||||
if a < b {
|
||||
Ordering::Less
|
||||
|
|
|
@ -46,7 +46,7 @@ pub struct NaturalWordSliceIterator<'a> {
|
|||
|
||||
struct CharIndexComparator;
|
||||
|
||||
impl Comparator<CharIndex,GlyphRun> for CharIndexComparator {
|
||||
impl Comparator<CharIndex, GlyphRun> for CharIndexComparator {
|
||||
fn compare(&self, key: &CharIndex, value: &GlyphRun) -> Ordering {
|
||||
if *key < value.range.begin() {
|
||||
Ordering::Less
|
||||
|
|
|
@ -93,7 +93,7 @@ pub fn process_new_animations(rw_data: &mut LayoutTaskData, pipeline_id: Pipelin
|
|||
|
||||
/// Recalculates style for a set of animations. This does *not* run with the DOM lock held.
|
||||
pub fn recalc_style_for_animations(flow: &mut Flow,
|
||||
animations: &HashMap<OpaqueNode,Vec<Animation>>) {
|
||||
animations: &HashMap<OpaqueNode, Vec<Animation>>) {
|
||||
let mut damage = RestyleDamage::empty();
|
||||
flow.mutate_fragments(&mut |fragment| {
|
||||
if let Some(ref animations) = animations.get(&OpaqueNode(fragment.node.id())) {
|
||||
|
|
|
@ -124,7 +124,7 @@ pub struct SharedLayoutContext {
|
|||
pub visible_rects: Arc<HashMap<LayerId, Rect<Au>, DefaultState<FnvHasher>>>,
|
||||
|
||||
/// The animations that are currently running.
|
||||
pub running_animations: Arc<HashMap<OpaqueNode,Vec<Animation>>>,
|
||||
pub running_animations: Arc<HashMap<OpaqueNode, Vec<Animation>>>,
|
||||
|
||||
/// Why is this reflow occurring
|
||||
pub goal: ReflowGoal,
|
||||
|
|
|
@ -139,7 +139,7 @@ impl<'a> Hash for ApplicableDeclarationsCacheQuery<'a> {
|
|||
static APPLICABLE_DECLARATIONS_CACHE_SIZE: usize = 32;
|
||||
|
||||
pub struct ApplicableDeclarationsCache {
|
||||
cache: SimpleHashCache<ApplicableDeclarationsCacheEntry,Arc<ComputedValues>>,
|
||||
cache: SimpleHashCache<ApplicableDeclarationsCacheEntry, Arc<ComputedValues>>,
|
||||
}
|
||||
|
||||
impl ApplicableDeclarationsCache {
|
||||
|
|
|
@ -45,10 +45,10 @@ use std::sync::Arc;
|
|||
use std::sync::mpsc::channel;
|
||||
use std::f32;
|
||||
use style::computed_values::filter::Filter;
|
||||
use style::computed_values::{background_attachment, background_clip, background_origin,
|
||||
background_repeat, background_size};
|
||||
use style::computed_values::{border_style, image_rendering, overflow_x, position,
|
||||
visibility, transform, transform_style};
|
||||
use style::computed_values::{background_attachment, background_clip, background_origin};
|
||||
use style::computed_values::{background_repeat, background_size};
|
||||
use style::computed_values::{border_style, image_rendering, overflow_x, position};
|
||||
use style::computed_values::{visibility, transform, transform_style};
|
||||
use style::properties::ComputedValues;
|
||||
use style::properties::style_structs::Border;
|
||||
use style::values::RGBA;
|
||||
|
@ -1108,7 +1108,7 @@ impl FragmentDisplayListBuilding for Fragment {
|
|||
},
|
||||
None => IpcSharedMemory::from_byte(0xFFu8, width * height * 4),
|
||||
};
|
||||
display_list.content.push_back(DisplayItem::ImageClass(box ImageDisplayItem{
|
||||
display_list.content.push_back(DisplayItem::ImageClass(box ImageDisplayItem {
|
||||
base: BaseDisplayItem::new(stacking_relative_content_box,
|
||||
DisplayItemMetadata::new(self.node,
|
||||
&*self.style,
|
||||
|
|
|
@ -468,10 +468,10 @@ pub trait MutableFlowUtils {
|
|||
// Traversals
|
||||
|
||||
/// Traverses the tree in preorder.
|
||||
fn traverse_preorder<T:PreorderFlowTraversal>(self, traversal: &T);
|
||||
fn traverse_preorder<T: PreorderFlowTraversal>(self, traversal: &T);
|
||||
|
||||
/// Traverses the tree in postorder.
|
||||
fn traverse_postorder<T:PostorderFlowTraversal>(self, traversal: &T);
|
||||
fn traverse_postorder<T: PostorderFlowTraversal>(self, traversal: &T);
|
||||
|
||||
/// Traverse the Absolute flow tree in preorder.
|
||||
///
|
||||
|
@ -1301,7 +1301,7 @@ impl<'a> ImmutableFlowUtils for &'a (Flow + 'a) {
|
|||
|
||||
impl<'a> MutableFlowUtils for &'a mut (Flow + 'a) {
|
||||
/// Traverses the tree in preorder.
|
||||
fn traverse_preorder<T:PreorderFlowTraversal>(self, traversal: &T) {
|
||||
fn traverse_preorder<T: PreorderFlowTraversal>(self, traversal: &T) {
|
||||
if traversal.should_process(self) {
|
||||
traversal.process(self);
|
||||
}
|
||||
|
@ -1312,7 +1312,7 @@ impl<'a> MutableFlowUtils for &'a mut (Flow + 'a) {
|
|||
}
|
||||
|
||||
/// Traverses the tree in postorder.
|
||||
fn traverse_postorder<T:PostorderFlowTraversal>(self, traversal: &T) {
|
||||
fn traverse_postorder<T: PostorderFlowTraversal>(self, traversal: &T) {
|
||||
for kid in child_iter(self) {
|
||||
kid.traverse_postorder(traversal);
|
||||
}
|
||||
|
@ -1446,7 +1446,7 @@ impl OpaqueFlow {
|
|||
#[allow(unsafe_code)]
|
||||
pub fn from_flow(flow: &Flow) -> OpaqueFlow {
|
||||
unsafe {
|
||||
let object = mem::transmute::<&Flow,raw::TraitObject>(flow);
|
||||
let object = mem::transmute::<&Flow, raw::TraitObject>(flow);
|
||||
OpaqueFlow(object.data as usize)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1446,7 +1446,7 @@ impl Fragment {
|
|||
|
||||
/// A helper method that uses the breaking strategy described by `slice_iterator` (at present,
|
||||
/// either natural word breaking or character breaking) to split this fragment.
|
||||
fn calculate_split_position_using_breaking_strategy<'a,I>(
|
||||
fn calculate_split_position_using_breaking_strategy<'a, I>(
|
||||
&self,
|
||||
slice_iterator: I,
|
||||
max_inline_size: Au,
|
||||
|
|
|
@ -101,7 +101,7 @@ pub struct ResolveGeneratedContent<'a> {
|
|||
/// The counter representing an ordered list item.
|
||||
list_item: Counter,
|
||||
/// Named CSS counters.
|
||||
counters: HashMap<String,Counter>,
|
||||
counters: HashMap<String, Counter>,
|
||||
/// The level of quote nesting.
|
||||
quote: u32,
|
||||
}
|
||||
|
|
|
@ -309,11 +309,11 @@ impl LineBreaker {
|
|||
}
|
||||
|
||||
/// Reflows the given fragments, which have been plucked out of the inline flow.
|
||||
fn reflow_fragments<'a,I>(&mut self,
|
||||
mut old_fragment_iter: I,
|
||||
flow: &'a InlineFlow,
|
||||
layout_context: &LayoutContext)
|
||||
where I: Iterator<Item=Fragment> {
|
||||
fn reflow_fragments<'a, I>(&mut self,
|
||||
mut old_fragment_iter: I,
|
||||
flow: &'a InlineFlow,
|
||||
layout_context: &LayoutContext)
|
||||
where I: Iterator<Item=Fragment> {
|
||||
loop {
|
||||
// Acquire the next fragment to lay out from the work list or fragment list, as
|
||||
// appropriate.
|
||||
|
|
|
@ -139,7 +139,7 @@ pub struct LayoutTaskData {
|
|||
pub offset_parent_response: OffsetParentResponse,
|
||||
|
||||
/// The list of currently-running animations.
|
||||
pub running_animations: Arc<HashMap<OpaqueNode,Vec<Animation>>>,
|
||||
pub running_animations: Arc<HashMap<OpaqueNode, Vec<Animation>>>,
|
||||
|
||||
/// Receives newly-discovered animations.
|
||||
pub new_animations_receiver: Receiver<Animation>,
|
||||
|
@ -708,7 +708,7 @@ impl LayoutTask {
|
|||
url: Url,
|
||||
mq: MediaQueryList,
|
||||
pending: PendingAsyncLoad,
|
||||
responder: Box<StylesheetLoadResponder+Send>,
|
||||
responder: Box<StylesheetLoadResponder + Send>,
|
||||
possibly_locked_rw_data:
|
||||
&mut Option<MutexGuard<'a, LayoutTaskData>>) {
|
||||
// TODO: Get the actual value. http://dev.w3.org/csswg/css-syntax/#environment-encoding
|
||||
|
|
|
@ -89,14 +89,14 @@ pub type UnsafeFlowList = (Box<Vec<UnsafeLayoutNode>>, usize);
|
|||
|
||||
pub type ChunkedDomTraversalFunction =
|
||||
extern "Rust" fn(UnsafeLayoutNodeList,
|
||||
&mut WorkerProxy<SharedLayoutContext,UnsafeLayoutNodeList>);
|
||||
&mut WorkerProxy<SharedLayoutContext, UnsafeLayoutNodeList>);
|
||||
|
||||
pub type DomTraversalFunction =
|
||||
extern "Rust" fn(UnsafeLayoutNode,
|
||||
&mut WorkerProxy<SharedLayoutContext,UnsafeLayoutNodeList>);
|
||||
&mut WorkerProxy<SharedLayoutContext, UnsafeLayoutNodeList>);
|
||||
|
||||
pub type ChunkedFlowTraversalFunction =
|
||||
extern "Rust" fn(UnsafeFlowList, &mut WorkerProxy<SharedLayoutContext,UnsafeFlowList>);
|
||||
extern "Rust" fn(UnsafeFlowList, &mut WorkerProxy<SharedLayoutContext, UnsafeFlowList>);
|
||||
|
||||
pub type FlowTraversalFunction = extern "Rust" fn(UnsafeFlow, &SharedLayoutContext);
|
||||
|
||||
|
@ -104,13 +104,13 @@ pub type FlowTraversalFunction = extern "Rust" fn(UnsafeFlow, &SharedLayoutConte
|
|||
pub trait ParallelPreorderDomTraversal : PreorderDomTraversal {
|
||||
fn run_parallel(&self,
|
||||
nodes: UnsafeLayoutNodeList,
|
||||
proxy: &mut WorkerProxy<SharedLayoutContext,UnsafeLayoutNodeList>);
|
||||
proxy: &mut WorkerProxy<SharedLayoutContext, UnsafeLayoutNodeList>);
|
||||
|
||||
#[inline(always)]
|
||||
fn run_parallel_helper(
|
||||
&self,
|
||||
unsafe_nodes: UnsafeLayoutNodeList,
|
||||
proxy: &mut WorkerProxy<SharedLayoutContext,UnsafeLayoutNodeList>,
|
||||
proxy: &mut WorkerProxy<SharedLayoutContext, UnsafeLayoutNodeList>,
|
||||
top_down_func: ChunkedDomTraversalFunction,
|
||||
bottom_up_func: DomTraversalFunction) {
|
||||
let mut discovered_child_nodes = Vec::new();
|
||||
|
@ -168,7 +168,7 @@ trait ParallelPostorderDomTraversal : PostorderDomTraversal {
|
|||
/// fetch-and-subtract the parent's children count.
|
||||
fn run_parallel(&self,
|
||||
mut unsafe_node: UnsafeLayoutNode,
|
||||
proxy: &mut WorkerProxy<SharedLayoutContext,UnsafeLayoutNodeList>) {
|
||||
proxy: &mut WorkerProxy<SharedLayoutContext, UnsafeLayoutNodeList>) {
|
||||
loop {
|
||||
// Get a real layout node.
|
||||
let node: LayoutNode = unsafe {
|
||||
|
@ -284,14 +284,14 @@ trait ParallelPostorderFlowTraversal : PostorderFlowTraversal {
|
|||
trait ParallelPreorderFlowTraversal : PreorderFlowTraversal {
|
||||
fn run_parallel(&self,
|
||||
unsafe_flows: UnsafeFlowList,
|
||||
proxy: &mut WorkerProxy<SharedLayoutContext,UnsafeFlowList>);
|
||||
proxy: &mut WorkerProxy<SharedLayoutContext, UnsafeFlowList>);
|
||||
|
||||
fn should_record_thread_ids(&self) -> bool;
|
||||
|
||||
#[inline(always)]
|
||||
fn run_parallel_helper(&self,
|
||||
unsafe_flows: UnsafeFlowList,
|
||||
proxy: &mut WorkerProxy<SharedLayoutContext,UnsafeFlowList>,
|
||||
proxy: &mut WorkerProxy<SharedLayoutContext, UnsafeFlowList>,
|
||||
top_down_func: ChunkedFlowTraversalFunction,
|
||||
bottom_up_func: FlowTraversalFunction) {
|
||||
let mut discovered_child_flows = Vec::new();
|
||||
|
@ -335,7 +335,7 @@ trait ParallelPreorderFlowTraversal : PreorderFlowTraversal {
|
|||
impl<'a> ParallelPreorderFlowTraversal for AssignISizes<'a> {
|
||||
fn run_parallel(&self,
|
||||
unsafe_flows: UnsafeFlowList,
|
||||
proxy: &mut WorkerProxy<SharedLayoutContext,UnsafeFlowList>) {
|
||||
proxy: &mut WorkerProxy<SharedLayoutContext, UnsafeFlowList>) {
|
||||
self.run_parallel_helper(unsafe_flows,
|
||||
proxy,
|
||||
assign_inline_sizes,
|
||||
|
@ -397,7 +397,7 @@ fn construct_flows(unsafe_node: UnsafeLayoutNode,
|
|||
}
|
||||
|
||||
fn assign_inline_sizes(unsafe_flows: UnsafeFlowList,
|
||||
proxy: &mut WorkerProxy<SharedLayoutContext,UnsafeFlowList>) {
|
||||
proxy: &mut WorkerProxy<SharedLayoutContext, UnsafeFlowList>) {
|
||||
let shared_layout_context = proxy.user_data();
|
||||
let layout_context = LayoutContext::new(shared_layout_context);
|
||||
let assign_inline_sizes_traversal = AssignISizes {
|
||||
|
@ -438,12 +438,12 @@ fn build_display_list(unsafe_flow: UnsafeFlow,
|
|||
build_display_list_traversal.run_parallel(unsafe_flow);
|
||||
}
|
||||
|
||||
fn run_queue_with_custom_work_data_type<To,F>(
|
||||
fn run_queue_with_custom_work_data_type<To, F>(
|
||||
queue: &mut WorkQueue<SharedLayoutContext, WorkQueueData>,
|
||||
callback: F,
|
||||
shared_layout_context: &SharedLayoutContext)
|
||||
where To: 'static + Send, F: FnOnce(&mut WorkQueue<SharedLayoutContext,To>) {
|
||||
let queue: &mut WorkQueue<SharedLayoutContext,To> = unsafe {
|
||||
where To: 'static + Send, F: FnOnce(&mut WorkQueue<SharedLayoutContext, To>) {
|
||||
let queue: &mut WorkQueue<SharedLayoutContext, To> = unsafe {
|
||||
mem::transmute(queue)
|
||||
};
|
||||
callback(queue);
|
||||
|
|
|
@ -137,7 +137,7 @@ pub fn iterate_through_flow_tree_fragment_border_boxes(root: &mut FlowRef,
|
|||
};
|
||||
|
||||
// FIXME(#2795): Get the real container size.
|
||||
doit(kid, level+1, iterator, &stacking_context_position);
|
||||
doit(kid, level + 1, iterator, &stacking_context_position);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -166,7 +166,7 @@ impl TableRowFlow {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn populate_collapsed_border_spacing<'a,I>(
|
||||
pub fn populate_collapsed_border_spacing<'a, I>(
|
||||
&mut self,
|
||||
collapsed_inline_direction_border_widths_for_table: &[Au],
|
||||
collapsed_block_direction_border_widths_for_table: &mut Peekable<I>)
|
||||
|
|
|
@ -83,7 +83,7 @@ impl TableRowGroupFlow {
|
|||
&self.block_flow.fragment
|
||||
}
|
||||
|
||||
pub fn populate_collapsed_border_spacing<'a,I>(
|
||||
pub fn populate_collapsed_border_spacing<'a, I>(
|
||||
&mut self,
|
||||
collapsed_inline_direction_border_widths_for_table: &[Au],
|
||||
collapsed_block_direction_border_widths_for_table: &mut Peekable<I>)
|
||||
|
|
|
@ -280,7 +280,7 @@ impl<'ln> LayoutNode<'ln> {
|
|||
|
||||
/// Borrows the layout data immutably. Fails on a conflicting borrow.
|
||||
#[inline(always)]
|
||||
pub fn borrow_layout_data<'a>(&'a self) -> Ref<'a,Option<LayoutDataWrapper>> {
|
||||
pub fn borrow_layout_data<'a>(&'a self) -> Ref<'a, Option<LayoutDataWrapper>> {
|
||||
unsafe {
|
||||
mem::transmute(self.get_jsmanaged().layout_data())
|
||||
}
|
||||
|
@ -288,7 +288,7 @@ impl<'ln> LayoutNode<'ln> {
|
|||
|
||||
/// Borrows the layout data mutably. Fails on a conflicting borrow.
|
||||
#[inline(always)]
|
||||
pub fn mutate_layout_data<'a>(&'a self) -> RefMut<'a,Option<LayoutDataWrapper>> {
|
||||
pub fn mutate_layout_data<'a>(&'a self) -> RefMut<'a, Option<LayoutDataWrapper>> {
|
||||
unsafe {
|
||||
mem::transmute(self.get_jsmanaged().layout_data_mut())
|
||||
}
|
||||
|
@ -743,7 +743,7 @@ impl<'ln> ThreadSafeLayoutNode<'ln> {
|
|||
///
|
||||
/// TODO(pcwalton): Make this private. It will let us avoid borrow flag checks in some cases.
|
||||
#[inline(always)]
|
||||
pub fn borrow_layout_data<'a>(&'a self) -> Ref<'a,Option<LayoutDataWrapper>> {
|
||||
pub fn borrow_layout_data<'a>(&'a self) -> Ref<'a, Option<LayoutDataWrapper>> {
|
||||
self.node.borrow_layout_data()
|
||||
}
|
||||
|
||||
|
@ -751,7 +751,7 @@ impl<'ln> ThreadSafeLayoutNode<'ln> {
|
|||
///
|
||||
/// TODO(pcwalton): Make this private. It will let us avoid borrow flag checks in some cases.
|
||||
#[inline(always)]
|
||||
pub fn mutate_layout_data<'a>(&'a self) -> RefMut<'a,Option<LayoutDataWrapper>> {
|
||||
pub fn mutate_layout_data<'a>(&'a self) -> RefMut<'a, Option<LayoutDataWrapper>> {
|
||||
self.node.mutate_layout_data()
|
||||
}
|
||||
|
||||
|
|
|
@ -53,7 +53,7 @@ pub struct CORSCacheEntry {
|
|||
}
|
||||
|
||||
impl CORSCacheEntry {
|
||||
fn new(origin:Url, url: Url, max_age: u32, credentials: bool,
|
||||
fn new(origin: Url, url: Url, max_age: u32, credentials: bool,
|
||||
header_or_method: HeaderOrMethod) -> CORSCacheEntry {
|
||||
CORSCacheEntry {
|
||||
origin: origin,
|
||||
|
|
|
@ -112,7 +112,7 @@ pub struct Request {
|
|||
pub redirect_mode: RedirectMode,
|
||||
pub redirect_count: usize,
|
||||
pub response_tainting: ResponseTainting,
|
||||
pub cache: Option<Box<CORSCache+'static>>
|
||||
pub cache: Option<Box<CORSCache + 'static>>
|
||||
}
|
||||
|
||||
impl Request {
|
||||
|
|
|
@ -23,14 +23,14 @@ impl MIMEClassifier {
|
|||
supplied_type: &Option<(String, String)>,
|
||||
data: &[u8]) -> Option<(String, String)> {
|
||||
|
||||
match *supplied_type{
|
||||
match *supplied_type {
|
||||
None => {
|
||||
return self.sniff_unknown_type(!no_sniff, data);
|
||||
}
|
||||
Some((ref media_type, ref media_subtype)) => {
|
||||
match (&**media_type, &**media_subtype) {
|
||||
("unknown", "unknown") | ("application", "unknown") | ("*", "*") => {
|
||||
return self.sniff_unknown_type(!no_sniff,data);
|
||||
return self.sniff_unknown_type(!no_sniff, data);
|
||||
}
|
||||
_ => {
|
||||
if no_sniff {
|
||||
|
@ -71,8 +71,8 @@ impl MIMEClassifier {
|
|||
return supplied_type.clone();
|
||||
}
|
||||
|
||||
pub fn new()->MIMEClassifier {
|
||||
MIMEClassifier{
|
||||
pub fn new() -> MIMEClassifier {
|
||||
MIMEClassifier {
|
||||
image_classifier: GroupedClassifier::image_classifer(),
|
||||
audio_video_classifer: GroupedClassifier::audio_video_classifer(),
|
||||
scriptable_classifier: GroupedClassifier::scriptable_classifier(),
|
||||
|
@ -84,7 +84,7 @@ impl MIMEClassifier {
|
|||
}
|
||||
//some sort of iterator over the classifiers might be better?
|
||||
fn sniff_unknown_type(&self, sniff_scriptable: bool, data: &[u8]) ->
|
||||
Option<(String,String)> {
|
||||
Option<(String, String)> {
|
||||
if sniff_scriptable {
|
||||
self.scriptable_classifier.classify(data)
|
||||
} else {
|
||||
|
@ -107,21 +107,21 @@ impl MIMEClassifier {
|
|||
}
|
||||
}
|
||||
fn is_html(tp: &str, sub_tp: &str) -> bool {
|
||||
tp=="text" && sub_tp=="html"
|
||||
tp == "text" && sub_tp == "html"
|
||||
}
|
||||
}
|
||||
|
||||
pub fn as_string_option(tup: Option<(&'static str, &'static str)>) -> Option<(String,String)> {
|
||||
pub fn as_string_option(tup: Option<(&'static str, &'static str)>) -> Option<(String, String)> {
|
||||
tup.map(|(a, b)| (a.to_owned(), b.to_owned()))
|
||||
}
|
||||
|
||||
//Interface used for composite types
|
||||
trait MIMEChecker {
|
||||
fn classify(&self, data: &[u8])->Option<(String, String)>;
|
||||
fn classify(&self, data: &[u8]) -> Option<(String, String)>;
|
||||
}
|
||||
|
||||
trait Matches {
|
||||
fn matches(&mut self, matches: &[u8])->bool;
|
||||
fn matches(&mut self, matches: &[u8]) -> bool;
|
||||
}
|
||||
|
||||
impl <'a, T: Iterator<Item=&'a u8> + Clone> Matches for T {
|
||||
|
@ -215,7 +215,7 @@ impl MIMEChecker for TagTerminatedByteMatcher {
|
|||
pub struct Mp4Matcher;
|
||||
|
||||
impl Mp4Matcher {
|
||||
pub fn matches(&self,data: &[u8]) -> bool {
|
||||
pub fn matches(&self, data: &[u8]) -> bool {
|
||||
if data.len() < 12 {
|
||||
return false;
|
||||
}
|
||||
|
@ -235,7 +235,7 @@ impl Mp4Matcher {
|
|||
}
|
||||
let mut all_match = true;
|
||||
for i in 8..11 {
|
||||
if data[i]!=mp4[i - 8] {
|
||||
if data[i] != mp4[i - 8] {
|
||||
all_match = false;
|
||||
break;
|
||||
}
|
||||
|
@ -278,7 +278,7 @@ struct BinaryOrPlaintextClassifier;
|
|||
|
||||
impl BinaryOrPlaintextClassifier {
|
||||
fn classify_impl(&self, data: &[u8]) -> Option<(&'static str, &'static str)> {
|
||||
if (data.len() >=2 &&
|
||||
if (data.len() >= 2 &&
|
||||
((data[0] == 0xFFu8 && data[1] == 0xFEu8) ||
|
||||
(data[0] == 0xFEu8 && data[1] == 0xFFu8))) ||
|
||||
(data.len() >= 3 && data[0] == 0xEFu8 && data[1] == 0xBBu8 && data[2] == 0xBFu8)
|
||||
|
@ -320,7 +320,7 @@ impl GroupedClassifier {
|
|||
}
|
||||
}
|
||||
fn audio_video_classifer() -> GroupedClassifier {
|
||||
GroupedClassifier{
|
||||
GroupedClassifier {
|
||||
byte_matchers: vec![
|
||||
box ByteMatcher::video_webm(),
|
||||
box ByteMatcher::audio_basic(),
|
||||
|
@ -335,7 +335,7 @@ impl GroupedClassifier {
|
|||
}
|
||||
}
|
||||
fn scriptable_classifier() -> GroupedClassifier {
|
||||
GroupedClassifier{
|
||||
GroupedClassifier {
|
||||
byte_matchers: vec![
|
||||
box ByteMatcher::text_html_doctype(),
|
||||
box ByteMatcher::text_html_page(),
|
||||
|
@ -361,7 +361,7 @@ impl GroupedClassifier {
|
|||
|
||||
}
|
||||
fn plaintext_classifier() -> GroupedClassifier {
|
||||
GroupedClassifier{
|
||||
GroupedClassifier {
|
||||
byte_matchers: vec![
|
||||
box ByteMatcher::text_plain_utf_8_bom(),
|
||||
box ByteMatcher::text_plain_utf_16le_bom(),
|
||||
|
@ -395,7 +395,7 @@ impl GroupedClassifier {
|
|||
}
|
||||
}
|
||||
impl MIMEChecker for GroupedClassifier {
|
||||
fn classify(&self,data: &[u8]) -> Option<(String, String)> {
|
||||
fn classify(&self, data: &[u8]) -> Option<(String, String)> {
|
||||
self.byte_matchers
|
||||
.iter()
|
||||
.filter_map(|matcher| matcher.classify(data))
|
||||
|
@ -405,7 +405,7 @@ impl MIMEChecker for GroupedClassifier {
|
|||
|
||||
struct FeedsClassifier;
|
||||
impl FeedsClassifier {
|
||||
fn classify_impl(&self,data: &[u8]) -> Option<(&'static str,&'static str)> {
|
||||
fn classify_impl(&self, data: &[u8]) -> Option<(&'static str, &'static str)> {
|
||||
let length = data.len();
|
||||
let mut data_iterator = data.iter();
|
||||
|
||||
|
@ -469,7 +469,7 @@ impl FeedsClassifier {
|
|||
}
|
||||
|
||||
impl MIMEChecker for FeedsClassifier {
|
||||
fn classify(&self,data: &[u8]) -> Option<(String, String)> {
|
||||
fn classify(&self, data: &[u8]) -> Option<(String, String)> {
|
||||
as_string_option(self.classify_impl(data))
|
||||
}
|
||||
}
|
||||
|
@ -478,8 +478,8 @@ impl MIMEChecker for FeedsClassifier {
|
|||
//TODO: These should be configured and not hard coded
|
||||
impl ByteMatcher {
|
||||
//A Windows Icon signature
|
||||
fn image_x_icon()->ByteMatcher {
|
||||
ByteMatcher{
|
||||
fn image_x_icon() -> ByteMatcher {
|
||||
ByteMatcher {
|
||||
pattern: b"\x00\x00\x01\x00",
|
||||
mask: b"\xFF\xFF\xFF\xFF",
|
||||
content_type: ("image", "x-icon"),
|
||||
|
@ -487,8 +487,8 @@ impl ByteMatcher {
|
|||
}
|
||||
}
|
||||
//A Windows Cursor signature.
|
||||
fn image_x_icon_cursor()->ByteMatcher {
|
||||
ByteMatcher{
|
||||
fn image_x_icon_cursor() -> ByteMatcher {
|
||||
ByteMatcher {
|
||||
pattern: b"\x00\x00\x02\x00",
|
||||
mask: b"\xFF\xFF\xFF\xFF",
|
||||
content_type: ("image", "x-icon"),
|
||||
|
@ -496,8 +496,8 @@ impl ByteMatcher {
|
|||
}
|
||||
}
|
||||
//The string "BM", a BMP signature.
|
||||
fn image_bmp()->ByteMatcher {
|
||||
ByteMatcher{
|
||||
fn image_bmp() -> ByteMatcher {
|
||||
ByteMatcher {
|
||||
pattern: b"BM",
|
||||
mask: b"\xFF\xFF",
|
||||
content_type: ("image", "bmp"),
|
||||
|
@ -505,8 +505,8 @@ impl ByteMatcher {
|
|||
}
|
||||
}
|
||||
//The string "GIF89a", a GIF signature.
|
||||
fn image_gif89a()->ByteMatcher {
|
||||
ByteMatcher{
|
||||
fn image_gif89a() -> ByteMatcher {
|
||||
ByteMatcher {
|
||||
pattern: b"GIF89a",
|
||||
mask: b"\xFF\xFF\xFF\xFF\xFF\xFF",
|
||||
content_type: ("image", "gif"),
|
||||
|
@ -514,8 +514,8 @@ impl ByteMatcher {
|
|||
}
|
||||
}
|
||||
//The string "GIF87a", a GIF signature.
|
||||
fn image_gif87a()->ByteMatcher {
|
||||
ByteMatcher{
|
||||
fn image_gif87a() -> ByteMatcher {
|
||||
ByteMatcher {
|
||||
pattern: b"GIF87a",
|
||||
mask: b"\xFF\xFF\xFF\xFF\xFF\xFF",
|
||||
content_type: ("image", "gif"),
|
||||
|
@ -523,8 +523,8 @@ impl ByteMatcher {
|
|||
}
|
||||
}
|
||||
//The string "RIFF" followed by four bytes followed by the string "WEBPVP".
|
||||
fn image_webp()->ByteMatcher {
|
||||
ByteMatcher{
|
||||
fn image_webp() -> ByteMatcher {
|
||||
ByteMatcher {
|
||||
pattern: b"RIFF\x00\x00\x00\x00WEBPVP",
|
||||
mask: b"\xFF\xFF\xFF\xFF\x00\x00\x00\x00,\xFF\xFF\xFF\xFF\xFF\xFF",
|
||||
content_type: ("image", "webp"),
|
||||
|
@ -533,8 +533,8 @@ impl ByteMatcher {
|
|||
}
|
||||
//An error-checking byte followed by the string "PNG" followed by CR LF SUB LF, the PNG
|
||||
//signature.
|
||||
fn image_png()->ByteMatcher {
|
||||
ByteMatcher{
|
||||
fn image_png() -> ByteMatcher {
|
||||
ByteMatcher {
|
||||
pattern: b"\x89PNG\r\n\x1A\n",
|
||||
mask: b"\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF",
|
||||
content_type: ("image", "png"),
|
||||
|
@ -542,8 +542,8 @@ impl ByteMatcher {
|
|||
}
|
||||
}
|
||||
// The JPEG Start of Image marker followed by the indicator byte of another marker.
|
||||
fn image_jpeg()->ByteMatcher {
|
||||
ByteMatcher{
|
||||
fn image_jpeg() -> ByteMatcher {
|
||||
ByteMatcher {
|
||||
pattern: b"\xFF\xD8\xFF",
|
||||
mask: b"\xFF\xFF\xFF",
|
||||
content_type: ("image", "jpeg"),
|
||||
|
@ -551,8 +551,8 @@ impl ByteMatcher {
|
|||
}
|
||||
}
|
||||
//The WebM signature. [TODO: Use more bytes?]
|
||||
fn video_webm()->ByteMatcher {
|
||||
ByteMatcher{
|
||||
fn video_webm() -> ByteMatcher {
|
||||
ByteMatcher {
|
||||
pattern: b"\x1A\x45\xDF\xA3",
|
||||
mask: b"\xFF\xFF\xFF\xFF",
|
||||
content_type: ("video", "webm"),
|
||||
|
@ -560,8 +560,8 @@ impl ByteMatcher {
|
|||
}
|
||||
}
|
||||
//The string ".snd", the basic audio signature.
|
||||
fn audio_basic()->ByteMatcher {
|
||||
ByteMatcher{
|
||||
fn audio_basic() -> ByteMatcher {
|
||||
ByteMatcher {
|
||||
pattern: b".snd",
|
||||
mask: b"\xFF\xFF\xFF\xFF",
|
||||
content_type: ("audio", "basic"),
|
||||
|
@ -569,8 +569,8 @@ impl ByteMatcher {
|
|||
}
|
||||
}
|
||||
//The string "FORM" followed by four bytes followed by the string "AIFF", the AIFF signature.
|
||||
fn audio_aiff()->ByteMatcher {
|
||||
ByteMatcher{
|
||||
fn audio_aiff() -> ByteMatcher {
|
||||
ByteMatcher {
|
||||
pattern: b"FORM\x00\x00\x00\x00AIFF",
|
||||
mask: b"\xFF\xFF\xFF\xFF\x00\x00\x00\x00\xFF\xFF\xFF\xFF",
|
||||
content_type: ("audio", "aiff"),
|
||||
|
@ -578,8 +578,8 @@ impl ByteMatcher {
|
|||
}
|
||||
}
|
||||
//The string "ID3", the ID3v2-tagged MP3 signature.
|
||||
fn audio_mpeg()->ByteMatcher {
|
||||
ByteMatcher{
|
||||
fn audio_mpeg() -> ByteMatcher {
|
||||
ByteMatcher {
|
||||
pattern: b"ID3",
|
||||
mask: b"\xFF\xFF\xFF",
|
||||
content_type: ("audio", "mpeg"),
|
||||
|
@ -587,8 +587,8 @@ impl ByteMatcher {
|
|||
}
|
||||
}
|
||||
//The string "OggS" followed by NUL, the Ogg container signature.
|
||||
fn application_ogg()->ByteMatcher {
|
||||
ByteMatcher{
|
||||
fn application_ogg() -> ByteMatcher {
|
||||
ByteMatcher {
|
||||
pattern: b"OggS",
|
||||
mask: b"\xFF\xFF\xFF\xFF\xFF",
|
||||
content_type: ("application", "ogg"),
|
||||
|
@ -597,8 +597,8 @@ impl ByteMatcher {
|
|||
}
|
||||
//The string "MThd" followed by four bytes representing the number 6 in 32 bits (big-endian),
|
||||
//the MIDI signature.
|
||||
fn audio_midi()->ByteMatcher {
|
||||
ByteMatcher{
|
||||
fn audio_midi() -> ByteMatcher {
|
||||
ByteMatcher {
|
||||
pattern: b"MThd\x00\x00\x00\x06",
|
||||
mask: b"\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF",
|
||||
content_type: ("audio", "midi"),
|
||||
|
@ -606,8 +606,8 @@ impl ByteMatcher {
|
|||
}
|
||||
}
|
||||
//The string "RIFF" followed by four bytes followed by the string "AVI ", the AVI signature.
|
||||
fn video_avi()->ByteMatcher {
|
||||
ByteMatcher{
|
||||
fn video_avi() -> ByteMatcher {
|
||||
ByteMatcher {
|
||||
pattern: b"RIFF\x00\x00\x00\x00AVI ",
|
||||
mask: b"\xFF\xFF\xFF\xFF\x00\x00\x00\x00\xFF\xFF\xFF\xFF",
|
||||
content_type: ("video", "avi"),
|
||||
|
@ -615,8 +615,8 @@ impl ByteMatcher {
|
|||
}
|
||||
}
|
||||
// The string "RIFF" followed by four bytes followed by the string "WAVE", the WAVE signature.
|
||||
fn audio_wave()->ByteMatcher {
|
||||
ByteMatcher{
|
||||
fn audio_wave() -> ByteMatcher {
|
||||
ByteMatcher {
|
||||
pattern: b"RIFF\x00\x00\x00\x00WAVE",
|
||||
mask: b"\xFF\xFF\xFF\xFF\x00\x00\x00\x00\xFF\xFF\xFF\xFF",
|
||||
content_type: ("audio", "wave"),
|
||||
|
@ -624,9 +624,9 @@ impl ByteMatcher {
|
|||
}
|
||||
}
|
||||
// doctype terminated with Tag terminating (TT) Byte
|
||||
fn text_html_doctype()->TagTerminatedByteMatcher {
|
||||
fn text_html_doctype() -> TagTerminatedByteMatcher {
|
||||
TagTerminatedByteMatcher {
|
||||
matcher: ByteMatcher{
|
||||
matcher: ByteMatcher {
|
||||
pattern: b"<!DOCTYPE HTML",
|
||||
mask: b"\xFF\xFF\xDF\xDF\xDF\xDF\xDF\xDF\xDF\xFF\xDF\xDF\xDF\xDF",
|
||||
content_type: ("text", "html"),
|
||||
|
@ -636,9 +636,9 @@ impl ByteMatcher {
|
|||
}
|
||||
|
||||
// HTML terminated with Tag terminating (TT) Byte: 0x20 (SP)
|
||||
fn text_html_page()->TagTerminatedByteMatcher {
|
||||
fn text_html_page() -> TagTerminatedByteMatcher {
|
||||
TagTerminatedByteMatcher {
|
||||
matcher: ByteMatcher{
|
||||
matcher: ByteMatcher {
|
||||
pattern: b"<HTML",
|
||||
mask: b"\xFF\xDF\xDF\xDF\xDF\xFF",
|
||||
content_type: ("text", "html"),
|
||||
|
@ -648,9 +648,9 @@ impl ByteMatcher {
|
|||
}
|
||||
|
||||
// head terminated with Tag Terminating (TT) Byte
|
||||
fn text_html_head()->TagTerminatedByteMatcher {
|
||||
fn text_html_head() -> TagTerminatedByteMatcher {
|
||||
TagTerminatedByteMatcher {
|
||||
matcher: ByteMatcher{
|
||||
matcher: ByteMatcher {
|
||||
pattern: b"<HEAD",
|
||||
mask: b"\xFF\xDF\xDF\xDF\xDF",
|
||||
content_type: ("text", "html"),
|
||||
|
@ -660,7 +660,7 @@ impl ByteMatcher {
|
|||
}
|
||||
|
||||
// script terminated with Tag Terminating (TT) Byte
|
||||
fn text_html_script()->TagTerminatedByteMatcher {
|
||||
fn text_html_script() -> TagTerminatedByteMatcher {
|
||||
TagTerminatedByteMatcher {
|
||||
matcher: ByteMatcher {
|
||||
pattern: b"<SCRIPT",
|
||||
|
@ -672,9 +672,9 @@ impl ByteMatcher {
|
|||
}
|
||||
|
||||
// iframe terminated with Tag Terminating (TT) Byte
|
||||
fn text_html_iframe()->TagTerminatedByteMatcher {
|
||||
fn text_html_iframe() -> TagTerminatedByteMatcher {
|
||||
TagTerminatedByteMatcher {
|
||||
matcher: ByteMatcher{
|
||||
matcher: ByteMatcher {
|
||||
pattern: b"<IFRAME",
|
||||
mask: b"\xFF\xDF\xDF\xDF\xDF\xDF\xDF",
|
||||
content_type: ("text", "html"),
|
||||
|
@ -684,9 +684,9 @@ impl ByteMatcher {
|
|||
}
|
||||
|
||||
// h1 terminated with Tag Terminating (TT) Byte
|
||||
fn text_html_h1()->TagTerminatedByteMatcher {
|
||||
fn text_html_h1() -> TagTerminatedByteMatcher {
|
||||
TagTerminatedByteMatcher {
|
||||
matcher: ByteMatcher{
|
||||
matcher: ByteMatcher {
|
||||
pattern: b"<H1",
|
||||
mask: b"\xFF\xDF\xFF",
|
||||
content_type: ("text", "html"),
|
||||
|
@ -696,9 +696,9 @@ impl ByteMatcher {
|
|||
}
|
||||
|
||||
// div terminated with Tag Terminating (TT) Byte
|
||||
fn text_html_div()->TagTerminatedByteMatcher {
|
||||
fn text_html_div() -> TagTerminatedByteMatcher {
|
||||
TagTerminatedByteMatcher {
|
||||
matcher: ByteMatcher{
|
||||
matcher: ByteMatcher {
|
||||
pattern: b"<DIV",
|
||||
mask: b"\xFF\xDF\xDF\xDF",
|
||||
content_type: ("text", "html"),
|
||||
|
@ -708,9 +708,9 @@ impl ByteMatcher {
|
|||
}
|
||||
|
||||
// font terminated with Tag Terminating (TT) Byte
|
||||
fn text_html_font()->TagTerminatedByteMatcher {
|
||||
fn text_html_font() -> TagTerminatedByteMatcher {
|
||||
TagTerminatedByteMatcher {
|
||||
matcher: ByteMatcher{
|
||||
matcher: ByteMatcher {
|
||||
pattern: b"<FONT",
|
||||
mask: b"\xFF\xDF\xDF\xDF\xDF",
|
||||
content_type: ("text", "html"),
|
||||
|
@ -720,9 +720,9 @@ impl ByteMatcher {
|
|||
}
|
||||
|
||||
// table terminated with Tag Terminating (TT) Byte
|
||||
fn text_html_table()->TagTerminatedByteMatcher {
|
||||
fn text_html_table() -> TagTerminatedByteMatcher {
|
||||
TagTerminatedByteMatcher {
|
||||
matcher: ByteMatcher{
|
||||
matcher: ByteMatcher {
|
||||
pattern: b"<TABLE",
|
||||
mask: b"\xFF\xDF\xDF\xDF\xDF\xDF",
|
||||
content_type: ("text", "html"),
|
||||
|
@ -732,9 +732,9 @@ impl ByteMatcher {
|
|||
}
|
||||
|
||||
// a terminated with Tag Terminating (TT) Byte
|
||||
fn text_html_a()->TagTerminatedByteMatcher {
|
||||
fn text_html_a() -> TagTerminatedByteMatcher {
|
||||
TagTerminatedByteMatcher {
|
||||
matcher: ByteMatcher{
|
||||
matcher: ByteMatcher {
|
||||
pattern: b"<A",
|
||||
mask: b"\xFF\xDF",
|
||||
content_type: ("text", "html"),
|
||||
|
@ -744,9 +744,9 @@ impl ByteMatcher {
|
|||
}
|
||||
|
||||
// style terminated with Tag Terminating (TT) Byte
|
||||
fn text_html_style()->TagTerminatedByteMatcher {
|
||||
fn text_html_style() -> TagTerminatedByteMatcher {
|
||||
TagTerminatedByteMatcher {
|
||||
matcher: ByteMatcher{
|
||||
matcher: ByteMatcher {
|
||||
pattern: b"<STYLE",
|
||||
mask: b"\xFF\xDF\xDF\xDF\xDF\xDF",
|
||||
content_type: ("text", "html"),
|
||||
|
@ -756,9 +756,9 @@ impl ByteMatcher {
|
|||
}
|
||||
|
||||
// title terminated with Tag Terminating (TT) Byte
|
||||
fn text_html_title()->TagTerminatedByteMatcher {
|
||||
fn text_html_title() -> TagTerminatedByteMatcher {
|
||||
TagTerminatedByteMatcher {
|
||||
matcher: ByteMatcher{
|
||||
matcher: ByteMatcher {
|
||||
pattern: b"<TITLE",
|
||||
mask: b"\xFF\xDF\xDF\xDF\xDF\xDF",
|
||||
content_type: ("text", "html"),
|
||||
|
@ -768,9 +768,9 @@ impl ByteMatcher {
|
|||
}
|
||||
|
||||
// b terminated with Tag Terminating (TT) Byte
|
||||
fn text_html_b()->TagTerminatedByteMatcher {
|
||||
fn text_html_b() -> TagTerminatedByteMatcher {
|
||||
TagTerminatedByteMatcher {
|
||||
matcher: ByteMatcher{
|
||||
matcher: ByteMatcher {
|
||||
pattern: b"<B",
|
||||
mask: b"\xFF\xDF",
|
||||
content_type: ("text", "html"),
|
||||
|
@ -780,9 +780,9 @@ impl ByteMatcher {
|
|||
}
|
||||
|
||||
// body terminated with Tag Terminating (TT) Byte
|
||||
fn text_html_body()->TagTerminatedByteMatcher {
|
||||
fn text_html_body() -> TagTerminatedByteMatcher {
|
||||
TagTerminatedByteMatcher {
|
||||
matcher: ByteMatcher{
|
||||
matcher: ByteMatcher {
|
||||
pattern: b"<BODY",
|
||||
mask: b"\xFF\xDF\xDF\xDF\xDF",
|
||||
content_type: ("text", "html"),
|
||||
|
@ -792,9 +792,9 @@ impl ByteMatcher {
|
|||
}
|
||||
|
||||
// br terminated with Tag Terminating (TT) Byte
|
||||
fn text_html_br()->TagTerminatedByteMatcher {
|
||||
fn text_html_br() -> TagTerminatedByteMatcher {
|
||||
TagTerminatedByteMatcher {
|
||||
matcher: ByteMatcher{
|
||||
matcher: ByteMatcher {
|
||||
pattern: b"<BR",
|
||||
mask: b"\xFF\xDF\xDF",
|
||||
content_type: ("text", "html"),
|
||||
|
@ -804,9 +804,9 @@ impl ByteMatcher {
|
|||
}
|
||||
|
||||
// p terminated with Tag Terminating (TT) Byte
|
||||
fn text_html_p()->TagTerminatedByteMatcher {
|
||||
fn text_html_p() -> TagTerminatedByteMatcher {
|
||||
TagTerminatedByteMatcher {
|
||||
matcher: ByteMatcher{
|
||||
matcher: ByteMatcher {
|
||||
pattern: b"<P",
|
||||
mask: b"\xFF\xDF",
|
||||
content_type: ("text", "html"),
|
||||
|
@ -816,9 +816,9 @@ impl ByteMatcher {
|
|||
}
|
||||
|
||||
// comment terminated with Tag Terminating (TT) Byte
|
||||
fn text_html_comment()->TagTerminatedByteMatcher {
|
||||
fn text_html_comment() -> TagTerminatedByteMatcher {
|
||||
TagTerminatedByteMatcher {
|
||||
matcher: ByteMatcher{
|
||||
matcher: ByteMatcher {
|
||||
pattern: b"<!--",
|
||||
mask: b"\xFF\xFF\xFF\xFF",
|
||||
content_type: ("text", "html"),
|
||||
|
@ -828,7 +828,7 @@ impl ByteMatcher {
|
|||
}
|
||||
|
||||
//The string "<?xml".
|
||||
fn text_xml()->ByteMatcher {
|
||||
fn text_xml() -> ByteMatcher {
|
||||
ByteMatcher {
|
||||
pattern: b"<?xml",
|
||||
mask: b"\xFF\xFF\xFF\xFF\xFF",
|
||||
|
@ -837,8 +837,8 @@ impl ByteMatcher {
|
|||
}
|
||||
}
|
||||
//The string "%PDF-", the PDF signature.
|
||||
fn application_pdf()->ByteMatcher {
|
||||
ByteMatcher{
|
||||
fn application_pdf() -> ByteMatcher {
|
||||
ByteMatcher {
|
||||
pattern: b"%PDF",
|
||||
mask: b"\xFF\xFF\xFF\xFF\xFF",
|
||||
content_type: ("application", "pdf"),
|
||||
|
@ -848,8 +848,8 @@ impl ByteMatcher {
|
|||
//34 bytes followed by the string "LP", the Embedded OpenType signature.
|
||||
// TODO: Use this in font context classifier
|
||||
#[allow(dead_code)]
|
||||
fn application_vnd_ms_font_object()->ByteMatcher {
|
||||
ByteMatcher{
|
||||
fn application_vnd_ms_font_object() -> ByteMatcher {
|
||||
ByteMatcher {
|
||||
pattern: b"\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
|
||||
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
|
||||
\x00\x00LP",
|
||||
|
@ -863,8 +863,8 @@ impl ByteMatcher {
|
|||
//4 bytes representing the version number 1.0, a TrueType signature.
|
||||
// TODO: Use this in font context classifier
|
||||
#[allow(dead_code)]
|
||||
fn true_type()->ByteMatcher {
|
||||
ByteMatcher{
|
||||
fn true_type() -> ByteMatcher {
|
||||
ByteMatcher {
|
||||
pattern: b"\x00\x01\x00\x00",
|
||||
mask: b"\xFF\xFF\xFF\xFF",
|
||||
content_type: ("(TrueType)", ""),
|
||||
|
@ -874,8 +874,8 @@ impl ByteMatcher {
|
|||
//The string "OTTO", the OpenType signature.
|
||||
// TODO: Use this in font context classifier
|
||||
#[allow(dead_code)]
|
||||
fn open_type()->ByteMatcher {
|
||||
ByteMatcher{
|
||||
fn open_type() -> ByteMatcher {
|
||||
ByteMatcher {
|
||||
pattern: b"OTTO",
|
||||
mask: b"\xFF\xFF\xFF\xFF",
|
||||
content_type: ("(OpenType)", ""),
|
||||
|
@ -885,8 +885,8 @@ impl ByteMatcher {
|
|||
// The string "ttcf", the TrueType Collection signature.
|
||||
// TODO: Use this in font context classifier
|
||||
#[allow(dead_code)]
|
||||
fn true_type_collection()->ByteMatcher {
|
||||
ByteMatcher{
|
||||
fn true_type_collection() -> ByteMatcher {
|
||||
ByteMatcher {
|
||||
pattern: b"ttcf",
|
||||
mask: b"\xFF\xFF\xFF\xFF",
|
||||
content_type: ("(TrueType Collection)", ""),
|
||||
|
@ -896,8 +896,8 @@ impl ByteMatcher {
|
|||
// The string "wOFF", the Web Open Font Format signature.
|
||||
// TODO: Use this in font context classifier
|
||||
#[allow(dead_code)]
|
||||
fn application_font_woff()->ByteMatcher {
|
||||
ByteMatcher{
|
||||
fn application_font_woff() -> ByteMatcher {
|
||||
ByteMatcher {
|
||||
pattern: b"wOFF",
|
||||
mask: b"\xFF\xFF\xFF\xFF",
|
||||
content_type: ("application", "font-woff"),
|
||||
|
@ -905,8 +905,8 @@ impl ByteMatcher {
|
|||
}
|
||||
}
|
||||
//The GZIP archive signature.
|
||||
fn application_x_gzip()->ByteMatcher {
|
||||
ByteMatcher{
|
||||
fn application_x_gzip() -> ByteMatcher {
|
||||
ByteMatcher {
|
||||
pattern: b"\x1F\x8B\x08",
|
||||
mask: b"\xFF\xFF\xFF",
|
||||
content_type: ("application", "x-gzip"),
|
||||
|
@ -914,8 +914,8 @@ impl ByteMatcher {
|
|||
}
|
||||
}
|
||||
//The string "PK" followed by ETX EOT, the ZIP archive signature.
|
||||
fn application_zip()->ByteMatcher {
|
||||
ByteMatcher{
|
||||
fn application_zip() -> ByteMatcher {
|
||||
ByteMatcher {
|
||||
pattern: b"PK\x03\x04",
|
||||
mask: b"\xFF\xFF\xFF\xFF",
|
||||
content_type: ("application", "zip"),
|
||||
|
@ -923,8 +923,8 @@ impl ByteMatcher {
|
|||
}
|
||||
}
|
||||
//The string "Rar " followed by SUB BEL NUL, the RAR archive signature.
|
||||
fn application_x_rar_compressed()->ByteMatcher {
|
||||
ByteMatcher{
|
||||
fn application_x_rar_compressed() -> ByteMatcher {
|
||||
ByteMatcher {
|
||||
pattern: b"Rar \x1A\x07\x00",
|
||||
mask: b"\xFF\xFF\xFF\xFF\xFF\xFF\xFF",
|
||||
content_type: ("application", "x-rar-compressed"),
|
||||
|
@ -932,8 +932,8 @@ impl ByteMatcher {
|
|||
}
|
||||
}
|
||||
// The string "%!PS-Adobe-", the PostScript signature.
|
||||
fn application_postscript()->ByteMatcher {
|
||||
ByteMatcher{
|
||||
fn application_postscript() -> ByteMatcher {
|
||||
ByteMatcher {
|
||||
pattern: b"%!PS-Adobe-",
|
||||
mask: b"\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF",
|
||||
content_type: ("application", "postscript"),
|
||||
|
@ -941,8 +941,8 @@ impl ByteMatcher {
|
|||
}
|
||||
}
|
||||
// UTF-16BE BOM
|
||||
fn text_plain_utf_16be_bom()->ByteMatcher {
|
||||
ByteMatcher{
|
||||
fn text_plain_utf_16be_bom() -> ByteMatcher {
|
||||
ByteMatcher {
|
||||
pattern: b"\xFE\xFF\x00\x00",
|
||||
mask: b"\xFF\xFF\x00\x00",
|
||||
content_type: ("text", "plain"),
|
||||
|
@ -950,8 +950,8 @@ impl ByteMatcher {
|
|||
}
|
||||
}
|
||||
//UTF-16LE BOM
|
||||
fn text_plain_utf_16le_bom()->ByteMatcher {
|
||||
ByteMatcher{
|
||||
fn text_plain_utf_16le_bom() -> ByteMatcher {
|
||||
ByteMatcher {
|
||||
pattern: b"\xFF\xFE\x00\x00",
|
||||
mask: b"\xFF\xFF\x00\x00",
|
||||
content_type: ("text", "plain"),
|
||||
|
@ -959,8 +959,8 @@ impl ByteMatcher {
|
|||
}
|
||||
}
|
||||
//UTF-8 BOM
|
||||
fn text_plain_utf_8_bom()->ByteMatcher {
|
||||
ByteMatcher{
|
||||
fn text_plain_utf_8_bom() -> ByteMatcher {
|
||||
ByteMatcher {
|
||||
pattern: b"\xEF\xBB\xBF\x00",
|
||||
mask: b"\xFF\xFF\xFF\x00",
|
||||
content_type: ("text", "plain"),
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
|
||||
|
||||
pub static PUB_DOMAINS : [&'static str; 5953] = [
|
||||
pub static PUB_DOMAINS: [&'static str; 5953] = [
|
||||
"com.ac",
|
||||
"edu.ac",
|
||||
"gov.ac",
|
||||
|
|
|
@ -116,7 +116,7 @@ pub fn load_from_memory(buffer: &[u8]) -> Option<Image> {
|
|||
|
||||
fn is_gif(buffer: &[u8]) -> bool {
|
||||
match buffer {
|
||||
[b'G',b'I',b'F',b'8', n, b'a', ..] if n == b'7' || n == b'9' => true,
|
||||
[b'G', b'I', b'F', b'8', n, b'a', ..] if n == b'7' || n == b'9' => true,
|
||||
_ => false
|
||||
}
|
||||
}
|
||||
|
|
|
@ -51,7 +51,7 @@ fn expand_cased<'cx, T>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenTree],
|
|||
}
|
||||
(_, rest) => {
|
||||
if rest > 0 {
|
||||
cx.span_err(sp, &format!("expected 1 argument, found {}", rest+1));
|
||||
cx.span_err(sp, &format!("expected 1 argument, found {}", rest + 1));
|
||||
}
|
||||
base::DummyResult::expr(sp)
|
||||
}
|
||||
|
|
|
@ -8,8 +8,8 @@ use syntax::ptr::P;
|
|||
use syntax::ast::{MetaItem, Expr};
|
||||
use syntax::ast;
|
||||
use syntax::ext::build::AstBuilder;
|
||||
use syntax::ext::deriving::generic::{combine_substructure, EnumMatching, FieldInfo, MethodDef, Struct,
|
||||
Substructure, TraitDef, ty};
|
||||
use syntax::ext::deriving::generic::{combine_substructure, EnumMatching, FieldInfo, MethodDef};
|
||||
use syntax::ext::deriving::generic::{Struct, Substructure, TraitDef, ty};
|
||||
|
||||
pub fn expand_dom_struct(cx: &mut ExtCtxt, sp: Span, _: &MetaItem, anno: Annotatable) -> Annotatable {
|
||||
if let Annotatable::Item(item) = anno {
|
||||
|
|
|
@ -22,7 +22,7 @@ pub fn match_ty_unwrap<'a>(ty: &'a Ty, segments: &[&str]) -> Option<&'a [P<Ty>]>
|
|||
// I could muck around with the maps and find the full path
|
||||
// however the more efficient way is to simply reverse the iterators and zip them
|
||||
// which will compare them in reverse until one of them runs out of segments
|
||||
if seg.iter().rev().zip(segments.iter().rev()).all(|(a,b)| a.identifier.name.as_str() == *b) {
|
||||
if seg.iter().rev().zip(segments.iter().rev()).all(|(a, b)| a.identifier.name.as_str() == *b) {
|
||||
match seg.last() {
|
||||
Some(&PathSegment {parameters: AngleBracketedParameters(ref a), ..}) => {
|
||||
Some(&a.types)
|
||||
|
|
|
@ -106,10 +106,10 @@ impl CORSRequest {
|
|||
}
|
||||
|
||||
pub fn http_fetch_async(&self,
|
||||
listener: Box<AsyncCORSResponseListener+Send>,
|
||||
script_chan: Box<ScriptChan+Send>) {
|
||||
listener: Box<AsyncCORSResponseListener + Send>,
|
||||
script_chan: Box<ScriptChan + Send>) {
|
||||
struct CORSContext {
|
||||
listener: Box<AsyncCORSResponseListener+Send>,
|
||||
listener: Box<AsyncCORSResponseListener + Send>,
|
||||
response: RefCell<Option<CORSResponse>>,
|
||||
}
|
||||
|
||||
|
@ -341,7 +341,7 @@ pub struct CORSCacheEntry {
|
|||
}
|
||||
|
||||
impl CORSCacheEntry {
|
||||
fn new(origin:Url,
|
||||
fn new(origin: Url,
|
||||
url: Url,
|
||||
max_age: u32,
|
||||
credentials: bool,
|
||||
|
|
|
@ -149,7 +149,7 @@ impl<'a> GlobalRef<'a> {
|
|||
|
||||
/// `ScriptChan` used to send messages to the event loop of this global's
|
||||
/// thread.
|
||||
pub fn script_chan(&self) -> Box<ScriptChan+Send> {
|
||||
pub fn script_chan(&self) -> Box<ScriptChan + Send> {
|
||||
match *self {
|
||||
GlobalRef::Window(ref window) => window.script_chan(),
|
||||
GlobalRef::Worker(ref worker) => worker.script_chan(),
|
||||
|
@ -159,7 +159,7 @@ impl<'a> GlobalRef<'a> {
|
|||
/// Create a new sender/receiver pair that can be used to implement an on-demand
|
||||
/// event loop. Used for implementing web APIs that require blocking semantics
|
||||
/// without resorting to nested event loops.
|
||||
pub fn new_script_pair(&self) -> (Box<ScriptChan+Send>, Box<ScriptPort+Send>) {
|
||||
pub fn new_script_pair(&self) -> (Box<ScriptChan + Send>, Box<ScriptPort + Send>) {
|
||||
match *self {
|
||||
GlobalRef::Window(ref window) => window.new_script_pair(),
|
||||
GlobalRef::Worker(ref worker) => worker.new_script_pair(),
|
||||
|
|
|
@ -207,11 +207,11 @@ impl MutHeapJSVal {
|
|||
/// `JS<T>`.
|
||||
#[must_root]
|
||||
#[derive(JSTraceable)]
|
||||
pub struct MutHeap<T: HeapGCValue+Copy> {
|
||||
pub struct MutHeap<T: HeapGCValue + Copy> {
|
||||
val: Cell<T>,
|
||||
}
|
||||
|
||||
impl<T: HeapGCValue+Copy> MutHeap<T> {
|
||||
impl<T: HeapGCValue + Copy> MutHeap<T> {
|
||||
/// Create a new `MutHeap`.
|
||||
pub fn new(initial: T) -> MutHeap<T> {
|
||||
MutHeap {
|
||||
|
@ -236,11 +236,11 @@ impl<T: HeapGCValue+Copy> MutHeap<T> {
|
|||
/// barriers are enforced.
|
||||
#[must_root]
|
||||
#[derive(JSTraceable, HeapSizeOf)]
|
||||
pub struct MutNullableHeap<T: HeapGCValue+Copy> {
|
||||
pub struct MutNullableHeap<T: HeapGCValue + Copy> {
|
||||
ptr: Cell<Option<T>>
|
||||
}
|
||||
|
||||
impl<T: HeapGCValue+Copy> MutNullableHeap<T> {
|
||||
impl<T: HeapGCValue + Copy> MutNullableHeap<T> {
|
||||
/// Create a new `MutNullableHeap`.
|
||||
pub fn new(initial: Option<T>) -> MutNullableHeap<T> {
|
||||
MutNullableHeap {
|
||||
|
@ -288,7 +288,7 @@ impl<T: Reflectable> MutNullableHeap<JS<T>> {
|
|||
}
|
||||
}
|
||||
|
||||
impl<T: HeapGCValue+Copy> Default for MutNullableHeap<T> {
|
||||
impl<T: HeapGCValue + Copy> Default for MutNullableHeap<T> {
|
||||
fn default() -> MutNullableHeap<T> {
|
||||
MutNullableHeap {
|
||||
ptr: Cell::new(None)
|
||||
|
|
|
@ -178,7 +178,7 @@ impl<T: JSTraceable> JSTraceable for *mut T {
|
|||
}
|
||||
}
|
||||
|
||||
impl<T: JSTraceable+Copy> JSTraceable for Cell<T> {
|
||||
impl<T: JSTraceable + Copy> JSTraceable for Cell<T> {
|
||||
fn trace(&self, trc: *mut JSTracer) {
|
||||
self.get().trace(trc)
|
||||
}
|
||||
|
@ -246,7 +246,7 @@ impl<T: JSTraceable, U: JSTraceable> JSTraceable for Result<T, U> {
|
|||
}
|
||||
}
|
||||
|
||||
impl<K,V,S> JSTraceable for HashMap<K, V, S>
|
||||
impl<K, V, S> JSTraceable for HashMap<K, V, S>
|
||||
where K: Hash + Eq + JSTraceable,
|
||||
V: JSTraceable,
|
||||
S: HashState,
|
||||
|
@ -306,7 +306,7 @@ no_jsmanaged_fields!(WebGLError);
|
|||
no_jsmanaged_fields!(ProfilerChan);
|
||||
no_jsmanaged_fields!(PseudoElement);
|
||||
|
||||
impl JSTraceable for Box<ScriptChan+Send> {
|
||||
impl JSTraceable for Box<ScriptChan + Send> {
|
||||
#[inline]
|
||||
fn trace(&self, _trc: *mut JSTracer) {
|
||||
// Do nothing
|
||||
|
@ -327,7 +327,7 @@ impl<'a> JSTraceable for &'a str {
|
|||
}
|
||||
}
|
||||
|
||||
impl<A,B> JSTraceable for fn(A) -> B {
|
||||
impl<A, B> JSTraceable for fn(A) -> B {
|
||||
#[inline]
|
||||
fn trace(&self, _: *mut JSTracer) {
|
||||
// Do nothing
|
||||
|
@ -341,7 +341,7 @@ impl<T> JSTraceable for IpcSender<T> where T: Deserialize + Serialize {
|
|||
}
|
||||
}
|
||||
|
||||
impl JSTraceable for Box<LayoutRPC+'static> {
|
||||
impl JSTraceable for Box<LayoutRPC + 'static> {
|
||||
#[inline]
|
||||
fn trace(&self, _: *mut JSTracer) {
|
||||
// Do nothing
|
||||
|
|
|
@ -35,7 +35,7 @@ pub struct Blob {
|
|||
isClosed_: Cell<bool>
|
||||
}
|
||||
|
||||
fn is_ascii_printable(string: &DOMString) -> bool{
|
||||
fn is_ascii_printable(string: &DOMString) -> bool {
|
||||
// Step 5.1 in Sec 5.1 of File API spec
|
||||
// http://dev.w3.org/2006/webapi/FileAPI/#constructorBlob
|
||||
return string.chars().all(|c| { c >= '\x20' && c <= '\x7E' })
|
||||
|
@ -92,7 +92,7 @@ impl<'a> BlobHelpers for &'a Blob {
|
|||
|
||||
impl<'a> BlobMethods for &'a Blob {
|
||||
// https://dev.w3.org/2006/webapi/FileAPI/#dfn-size
|
||||
fn Size(self) -> u64{
|
||||
fn Size(self) -> u64 {
|
||||
match self.bytes {
|
||||
None => 0,
|
||||
Some(ref bytes) => bytes.len() as u64
|
||||
|
|
|
@ -173,7 +173,7 @@ impl CanvasRenderingContext2D {
|
|||
|
||||
// The source rectangle is the rectangle whose corners are the four points (sx, sy),
|
||||
// (sx+sw, sy), (sx+sw, sy+sh), (sx, sy+sh).
|
||||
let source_rect = Rect::new(Point2D::new(sx.min(sx+sw), sy.min(sy+sh)),
|
||||
let source_rect = Rect::new(Point2D::new(sx.min(sx + sw), sy.min(sy + sh)),
|
||||
Size2D::new(sw.abs(), sh.abs()));
|
||||
|
||||
// When the source rectangle is outside the source image,
|
||||
|
@ -191,8 +191,10 @@ impl CanvasRenderingContext2D {
|
|||
|
||||
// The destination rectangle is the rectangle whose corners are the four points (dx, dy),
|
||||
// (dx+dw, dy), (dx+dw, dy+dh), (dx, dy+dh).
|
||||
let dest_rect = Rect::new(Point2D::new(dx.min(dx+dest_rect_width_scaled), dy.min(dy+dest_rect_height_scaled)),
|
||||
Size2D::new(dest_rect_width_scaled.abs(), dest_rect_height_scaled.abs()));
|
||||
let dest_rect = Rect::new(Point2D::new(dx.min(dx + dest_rect_width_scaled),
|
||||
dy.min(dy + dest_rect_height_scaled)),
|
||||
Size2D::new(dest_rect_width_scaled.abs(),
|
||||
dest_rect_height_scaled.abs()));
|
||||
|
||||
let source_rect = Rect::new(Point2D::new(source_rect_clipped.origin.x,
|
||||
source_rect_clipped.origin.y),
|
||||
|
|
|
@ -88,7 +88,7 @@ impl<'a> ConsoleMethods for &'a Console {
|
|||
|
||||
fn prepare_message(logLevel: LogLevel, message: String) -> ConsoleMessage {
|
||||
//TODO: Sending fake values for filename, lineNumber and columnNumber in LogMessage; adjust later
|
||||
ConsoleMessage{
|
||||
ConsoleMessage {
|
||||
message: message,
|
||||
logLevel: logLevel,
|
||||
filename: "test".to_owned(),
|
||||
|
|
|
@ -150,7 +150,7 @@ pub struct DedicatedWorkerGlobalScope {
|
|||
worker: DOMRefCell<Option<TrustedWorkerAddress>>,
|
||||
#[ignore_heap_size_of = "Can't measure trait objects"]
|
||||
/// Sender to the parent thread.
|
||||
parent_sender: Box<ScriptChan+Send>,
|
||||
parent_sender: Box<ScriptChan + Send>,
|
||||
}
|
||||
|
||||
impl DedicatedWorkerGlobalScope {
|
||||
|
@ -159,7 +159,7 @@ impl DedicatedWorkerGlobalScope {
|
|||
id: PipelineId,
|
||||
devtools_port: Receiver<DevtoolScriptControlMsg>,
|
||||
runtime: Rc<Runtime>,
|
||||
parent_sender: Box<ScriptChan+Send>,
|
||||
parent_sender: Box<ScriptChan + Send>,
|
||||
own_sender: Sender<(TrustedWorkerAddress, WorkerScriptMsg)>,
|
||||
receiver: Receiver<(TrustedWorkerAddress, WorkerScriptMsg)>)
|
||||
-> DedicatedWorkerGlobalScope {
|
||||
|
@ -180,7 +180,7 @@ impl DedicatedWorkerGlobalScope {
|
|||
id: PipelineId,
|
||||
devtools_port: Receiver<DevtoolScriptControlMsg>,
|
||||
runtime: Rc<Runtime>,
|
||||
parent_sender: Box<ScriptChan+Send>,
|
||||
parent_sender: Box<ScriptChan + Send>,
|
||||
own_sender: Sender<(TrustedWorkerAddress, WorkerScriptMsg)>,
|
||||
receiver: Receiver<(TrustedWorkerAddress, WorkerScriptMsg)>)
|
||||
-> Root<DedicatedWorkerGlobalScope> {
|
||||
|
@ -197,7 +197,7 @@ impl DedicatedWorkerGlobalScope {
|
|||
id: PipelineId,
|
||||
devtools_ipc_port: IpcReceiver<DevtoolScriptControlMsg>,
|
||||
worker: TrustedWorkerAddress,
|
||||
parent_sender: Box<ScriptChan+Send>,
|
||||
parent_sender: Box<ScriptChan + Send>,
|
||||
own_sender: Sender<(TrustedWorkerAddress, WorkerScriptMsg)>,
|
||||
receiver: Receiver<(TrustedWorkerAddress, WorkerScriptMsg)>) {
|
||||
let serialized_worker_url = worker_url.serialize();
|
||||
|
@ -247,14 +247,14 @@ impl DedicatedWorkerGlobalScope {
|
|||
}
|
||||
|
||||
pub trait DedicatedWorkerGlobalScopeHelpers {
|
||||
fn script_chan(self) -> Box<ScriptChan+Send>;
|
||||
fn script_chan(self) -> Box<ScriptChan + Send>;
|
||||
fn pipeline(self) -> PipelineId;
|
||||
fn new_script_pair(self) -> (Box<ScriptChan+Send>, Box<ScriptPort+Send>);
|
||||
fn new_script_pair(self) -> (Box<ScriptChan + Send>, Box<ScriptPort + Send>);
|
||||
fn process_event(self, msg: CommonScriptMsg);
|
||||
}
|
||||
|
||||
impl<'a> DedicatedWorkerGlobalScopeHelpers for &'a DedicatedWorkerGlobalScope {
|
||||
fn script_chan(self) -> Box<ScriptChan+Send> {
|
||||
fn script_chan(self) -> Box<ScriptChan + Send> {
|
||||
box WorkerThreadWorkerChan {
|
||||
sender: self.own_sender.clone(),
|
||||
worker: self.worker.borrow().as_ref().unwrap().clone(),
|
||||
|
@ -265,7 +265,7 @@ impl<'a> DedicatedWorkerGlobalScopeHelpers for &'a DedicatedWorkerGlobalScope {
|
|||
self.id
|
||||
}
|
||||
|
||||
fn new_script_pair(self) -> (Box<ScriptChan+Send>, Box<ScriptPort+Send>) {
|
||||
fn new_script_pair(self) -> (Box<ScriptChan + Send>, Box<ScriptPort + Send>) {
|
||||
let (tx, rx) = channel();
|
||||
let chan = box SendableWorkerScriptChan {
|
||||
sender: tx,
|
||||
|
|
|
@ -267,7 +267,7 @@ impl<'a> FileReaderMethods for &'a FileReader {
|
|||
}
|
||||
|
||||
// https://w3c.github.io/FileAPI/#dfn-readAsText
|
||||
fn ReadAsText(self, blob: &Blob, label:Option<DOMString>) -> ErrorResult {
|
||||
fn ReadAsText(self, blob: &Blob, label: Option<DOMString>) -> ErrorResult {
|
||||
self.read(FileReaderFunction::ReadAsText, blob, label)
|
||||
}
|
||||
|
||||
|
|
|
@ -121,7 +121,7 @@ impl<'a> FormDataMethods for &'a FormData {
|
|||
}
|
||||
}
|
||||
|
||||
trait PrivateFormDataHelpers{
|
||||
trait PrivateFormDataHelpers {
|
||||
fn get_file_from_blob(self, value: &Blob, filename: Option<DOMString>) -> Root<File>;
|
||||
}
|
||||
|
||||
|
|
|
@ -26,7 +26,7 @@ pub trait CollectionFilter : JSTraceable {
|
|||
#[must_root]
|
||||
pub enum CollectionTypeId {
|
||||
Static(Vec<JS<Element>>),
|
||||
Live(JS<Node>, Box<CollectionFilter+'static>)
|
||||
Live(JS<Node>, Box<CollectionFilter + 'static>)
|
||||
}
|
||||
|
||||
#[dom_struct]
|
||||
|
@ -53,7 +53,7 @@ impl HTMLCollection {
|
|||
|
||||
impl HTMLCollection {
|
||||
pub fn create(window: &Window, root: &Node,
|
||||
filter: Box<CollectionFilter+'static>) -> Root<HTMLCollection> {
|
||||
filter: Box<CollectionFilter + 'static>) -> Root<HTMLCollection> {
|
||||
HTMLCollection::new(window, CollectionTypeId::Live(JS::from_ref(root), filter))
|
||||
}
|
||||
|
||||
|
|
|
@ -7,8 +7,8 @@ use dom::bindings::codegen::Bindings::HTMLIFrameElementBinding;
|
|||
use dom::bindings::codegen::Bindings::HTMLIFrameElementBinding::HTMLIFrameElementMethods;
|
||||
use dom::bindings::codegen::Bindings::WindowBinding::WindowMethods;
|
||||
use dom::bindings::codegen::InheritTypes::{NodeCast, ElementCast, EventCast};
|
||||
use dom::bindings::codegen::InheritTypes::{EventTargetCast, HTMLElementCast,
|
||||
HTMLIFrameElementDerived};
|
||||
use dom::bindings::codegen::InheritTypes::{EventTargetCast, HTMLElementCast};
|
||||
use dom::bindings::codegen::InheritTypes::HTMLIFrameElementDerived;
|
||||
use dom::bindings::conversions::ToJSValConvertible;
|
||||
use dom::bindings::error::{ErrorResult, Fallible};
|
||||
use dom::bindings::error::Error::NotSupported;
|
||||
|
|
|
@ -8,8 +8,8 @@ use dom::bindings::cell::DOMRefCell;
|
|||
use dom::bindings::codegen::Bindings::HTMLImageElementBinding;
|
||||
use dom::bindings::codegen::Bindings::HTMLImageElementBinding::HTMLImageElementMethods;
|
||||
use dom::bindings::codegen::Bindings::WindowBinding::WindowMethods;
|
||||
use dom::bindings::codegen::InheritTypes::{NodeCast, ElementCast, EventTargetCast, HTMLElementCast,
|
||||
HTMLImageElementDerived};
|
||||
use dom::bindings::codegen::InheritTypes::{NodeCast, ElementCast, EventTargetCast};
|
||||
use dom::bindings::codegen::InheritTypes::{HTMLElementCast, HTMLImageElementDerived};
|
||||
use dom::bindings::error::Fallible;
|
||||
use dom::bindings::global::GlobalRef;
|
||||
use dom::bindings::js::{LayoutJS, Root};
|
||||
|
|
|
@ -264,7 +264,7 @@ impl LayoutDataRef {
|
|||
|
||||
/// Borrows the layout data immutably. This function is *not* thread-safe.
|
||||
#[inline]
|
||||
pub fn borrow<'a>(&'a self) -> Ref<'a,Option<LayoutData>> {
|
||||
pub fn borrow<'a>(&'a self) -> Ref<'a, Option<LayoutData>> {
|
||||
debug_assert!(task_state::get().is_layout());
|
||||
self.data_cell.borrow()
|
||||
}
|
||||
|
@ -275,7 +275,7 @@ impl LayoutDataRef {
|
|||
/// prevent CSS selector matching from mutably accessing nodes it's not supposed to and racing
|
||||
/// on it. This has already resulted in one bug!
|
||||
#[inline]
|
||||
pub fn borrow_mut<'a>(&'a self) -> RefMut<'a,Option<LayoutData>> {
|
||||
pub fn borrow_mut<'a>(&'a self) -> RefMut<'a, Option<LayoutData>> {
|
||||
debug_assert!(task_state::get().is_layout());
|
||||
self.data_cell.borrow_mut()
|
||||
}
|
||||
|
@ -1464,7 +1464,7 @@ pub enum CloneChildrenFlag {
|
|||
fn as_uintptr<T>(t: &T) -> uintptr_t { t as *const T as uintptr_t }
|
||||
|
||||
impl Node {
|
||||
pub fn reflect_node<N: Reflectable+NodeBase>
|
||||
pub fn reflect_node<N: Reflectable + NodeBase>
|
||||
(node: Box<N>,
|
||||
document: &Document,
|
||||
wrap_fn: extern "Rust" fn(*mut JSContext, GlobalRef, Box<N>) -> Root<N>)
|
||||
|
@ -2554,12 +2554,12 @@ pub struct TrustedNodeAddress(pub *const c_void);
|
|||
#[allow(unsafe_code)]
|
||||
unsafe impl Send for TrustedNodeAddress {}
|
||||
|
||||
pub fn document_from_node<T: NodeBase+Reflectable>(derived: &T) -> Root<Document> {
|
||||
pub fn document_from_node<T: NodeBase + Reflectable>(derived: &T) -> Root<Document> {
|
||||
let node: &Node = NodeCast::from_ref(derived);
|
||||
node.owner_doc()
|
||||
}
|
||||
|
||||
pub fn window_from_node<T: NodeBase+Reflectable>(derived: &T) -> Root<Window> {
|
||||
pub fn window_from_node<T: NodeBase + Reflectable>(derived: &T) -> Root<Window> {
|
||||
let document = document_from_node(derived);
|
||||
document.r().window()
|
||||
}
|
||||
|
|
|
@ -69,7 +69,7 @@ impl<'a> ProgressEventMethods for &'a ProgressEvent {
|
|||
}
|
||||
|
||||
// https://xhr.spec.whatwg.org/#dom-progressevent-loaded
|
||||
fn Loaded(self) -> u64{
|
||||
fn Loaded(self) -> u64 {
|
||||
self.loaded
|
||||
}
|
||||
|
||||
|
|
|
@ -114,7 +114,7 @@ impl Range {
|
|||
};
|
||||
|
||||
// Step 11.
|
||||
let contained_children : Vec<Root<Node>> =
|
||||
let contained_children: Vec<Root<Node>> =
|
||||
common_ancestor.children().filter(|n| Range::contains(self, n)).collect();
|
||||
|
||||
// Step 12.
|
||||
|
|
|
@ -63,13 +63,13 @@ pub struct ParserContext {
|
|||
/// The subpage associated with this document.
|
||||
subpage: Option<SubpageId>,
|
||||
/// The target event loop for the response notifications.
|
||||
script_chan: Box<ScriptChan+Send>,
|
||||
script_chan: Box<ScriptChan + Send>,
|
||||
/// The URL for this document.
|
||||
url: Url,
|
||||
}
|
||||
|
||||
impl ParserContext {
|
||||
pub fn new(id: PipelineId, subpage: Option<SubpageId>, script_chan: Box<ScriptChan+Send>,
|
||||
pub fn new(id: PipelineId, subpage: Option<SubpageId>, script_chan: Box<ScriptChan + Send>,
|
||||
url: Url) -> ParserContext {
|
||||
ParserContext {
|
||||
parser: RefCell::new(None),
|
||||
|
|
|
@ -144,7 +144,7 @@ trait PrivateStorageHelpers {
|
|||
impl<'a> PrivateStorageHelpers for &'a Storage {
|
||||
/// https://html.spec.whatwg.org/multipage/#send-a-storage-notification
|
||||
fn broadcast_change_notification(self, key: Option<DOMString>, old_value: Option<DOMString>,
|
||||
new_value: Option<DOMString>){
|
||||
new_value: Option<DOMString>) {
|
||||
let global_root = self.global.root();
|
||||
let global_ref = global_root.r();
|
||||
let main_script_chan = global_ref.as_window().main_thread_script_chan();
|
||||
|
|
|
@ -7,7 +7,7 @@ use dom::bindings::codegen::Bindings::WebGLActiveInfoBinding;
|
|||
use dom::bindings::codegen::Bindings::WebGLActiveInfoBinding::WebGLActiveInfoMethods;
|
||||
use dom::bindings::global::GlobalRef;
|
||||
use dom::bindings::js::Root;
|
||||
use dom::bindings::utils::{Reflector,reflect_dom_object};
|
||||
use dom::bindings::utils::{Reflector, reflect_dom_object};
|
||||
use util::str::DOMString;
|
||||
|
||||
#[dom_struct]
|
||||
|
|
|
@ -7,7 +7,7 @@ use dom::bindings::codegen::Bindings::WebGLShaderPrecisionFormatBinding;
|
|||
use dom::bindings::codegen::Bindings::WebGLShaderPrecisionFormatBinding::WebGLShaderPrecisionFormatMethods;
|
||||
use dom::bindings::global::GlobalRef;
|
||||
use dom::bindings::js::Root;
|
||||
use dom::bindings::utils::{Reflector,reflect_dom_object};
|
||||
use dom::bindings::utils::{Reflector, reflect_dom_object};
|
||||
|
||||
#[dom_struct]
|
||||
#[derive(HeapSizeOf)]
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
use dom::bindings::codegen::Bindings::WebGLUniformLocationBinding;
|
||||
use dom::bindings::global::GlobalRef;
|
||||
use dom::bindings::js::Root;
|
||||
use dom::bindings::utils::{Reflector,reflect_dom_object};
|
||||
use dom::bindings::utils::{Reflector, reflect_dom_object};
|
||||
|
||||
#[dom_struct]
|
||||
#[derive(HeapSizeOf)]
|
||||
|
|
|
@ -153,7 +153,7 @@ impl WebSocket {
|
|||
return Err(Syntax);
|
||||
}
|
||||
|
||||
if protocols[i+1..].iter().any(|p| p == protocol) {
|
||||
if protocols[i + 1..].iter().any(|p| p == protocol) {
|
||||
return Err(Syntax);
|
||||
|
||||
}
|
||||
|
|
|
@ -52,7 +52,7 @@ use profile_traits::mem;
|
|||
use string_cache::Atom;
|
||||
use util::geometry::{self, Au, MAX_RECT};
|
||||
use util::{breakpoint, opts};
|
||||
use util::str::{DOMString,HTML_SPACE_CHARACTERS};
|
||||
use util::str::{DOMString, HTML_SPACE_CHARACTERS};
|
||||
|
||||
use euclid::{Point2D, Rect, Size2D};
|
||||
use ipc_channel::ipc::IpcSender;
|
||||
|
@ -179,7 +179,7 @@ pub struct Window {
|
|||
|
||||
/// A handle to perform RPC calls into the layout, quickly.
|
||||
#[ignore_heap_size_of = "trait objects are hard"]
|
||||
layout_rpc: Box<LayoutRPC+'static>,
|
||||
layout_rpc: Box<LayoutRPC + 'static>,
|
||||
|
||||
/// The port that we will use to join layout. If this is `None`, then layout is not running.
|
||||
#[ignore_heap_size_of = "channels are hard"]
|
||||
|
@ -232,7 +232,7 @@ impl Window {
|
|||
self.js_runtime.borrow().as_ref().unwrap().cx()
|
||||
}
|
||||
|
||||
pub fn script_chan(&self) -> Box<ScriptChan+Send> {
|
||||
pub fn script_chan(&self) -> Box<ScriptChan + Send> {
|
||||
self.script_chan.clone()
|
||||
}
|
||||
|
||||
|
@ -264,7 +264,7 @@ impl Window {
|
|||
self.parent_info
|
||||
}
|
||||
|
||||
pub fn new_script_pair(&self) -> (Box<ScriptChan+Send>, Box<ScriptPort+Send>) {
|
||||
pub fn new_script_pair(&self) -> (Box<ScriptChan + Send>, Box<ScriptPort + Send>) {
|
||||
let (tx, rx) = channel();
|
||||
(box SendableMainThreadScriptChan(tx), box rx)
|
||||
}
|
||||
|
@ -1143,7 +1143,7 @@ impl Window {
|
|||
}
|
||||
}
|
||||
|
||||
fn should_move_clip_rect(clip_rect: Rect<Au>, new_viewport: Rect<f32>) -> bool{
|
||||
fn should_move_clip_rect(clip_rect: Rect<Au>, new_viewport: Rect<f32>) -> bool {
|
||||
let clip_rect = Rect::new(Point2D::new(clip_rect.origin.x.to_f32_px(),
|
||||
clip_rect.origin.y.to_f32_px()),
|
||||
Size2D::new(clip_rect.size.width.to_f32_px(),
|
||||
|
|
|
@ -288,9 +288,9 @@ impl<'a> WorkerGlobalScopeMethods for &'a WorkerGlobalScope {
|
|||
pub trait WorkerGlobalScopeHelpers {
|
||||
fn execute_script(self, source: DOMString);
|
||||
fn handle_fire_timer(self, timer_id: TimerId);
|
||||
fn script_chan(self) -> Box<ScriptChan+Send>;
|
||||
fn script_chan(self) -> Box<ScriptChan + Send>;
|
||||
fn pipeline(self) -> PipelineId;
|
||||
fn new_script_pair(self) -> (Box<ScriptChan+Send>, Box<ScriptPort+Send>);
|
||||
fn new_script_pair(self) -> (Box<ScriptChan + Send>, Box<ScriptPort + Send>);
|
||||
fn process_event(self, msg: CommonScriptMsg);
|
||||
fn get_cx(self) -> *mut JSContext;
|
||||
fn set_devtools_wants_updates(self, value: bool);
|
||||
|
@ -311,7 +311,7 @@ impl<'a> WorkerGlobalScopeHelpers for &'a WorkerGlobalScope {
|
|||
}
|
||||
}
|
||||
|
||||
fn script_chan(self) -> Box<ScriptChan+Send> {
|
||||
fn script_chan(self) -> Box<ScriptChan + Send> {
|
||||
let dedicated =
|
||||
DedicatedWorkerGlobalScopeCast::to_ref(self);
|
||||
match dedicated {
|
||||
|
@ -329,7 +329,7 @@ impl<'a> WorkerGlobalScopeHelpers for &'a WorkerGlobalScope {
|
|||
}
|
||||
}
|
||||
|
||||
fn new_script_pair(self) -> (Box<ScriptChan+Send>, Box<ScriptPort+Send>) {
|
||||
fn new_script_pair(self) -> (Box<ScriptChan + Send>, Box<ScriptPort + Send>) {
|
||||
let dedicated =
|
||||
DedicatedWorkerGlobalScopeCast::to_ref(self);
|
||||
match dedicated {
|
||||
|
|
|
@ -148,7 +148,7 @@ pub struct XMLHttpRequest {
|
|||
timeout_cancel: DOMRefCell<Option<Sender<()>>>,
|
||||
fetch_time: Cell<i64>,
|
||||
#[ignore_heap_size_of = "Cannot calculate Heap size"]
|
||||
timeout_target: DOMRefCell<Option<Box<ScriptChan+Send>>>,
|
||||
timeout_target: DOMRefCell<Option<Box<ScriptChan + Send>>>,
|
||||
generation_id: Cell<GenerationId>,
|
||||
response_status: Cell<Result<(), ()>>,
|
||||
}
|
||||
|
@ -201,13 +201,13 @@ impl XMLHttpRequest {
|
|||
fn check_cors(context: Arc<Mutex<XHRContext>>,
|
||||
load_data: LoadData,
|
||||
req: CORSRequest,
|
||||
script_chan: Box<ScriptChan+Send>,
|
||||
script_chan: Box<ScriptChan + Send>,
|
||||
resource_task: ResourceTask) {
|
||||
struct CORSContext {
|
||||
xhr: Arc<Mutex<XHRContext>>,
|
||||
load_data: RefCell<Option<LoadData>>,
|
||||
req: CORSRequest,
|
||||
script_chan: Box<ScriptChan+Send>,
|
||||
script_chan: Box<ScriptChan + Send>,
|
||||
resource_task: ResourceTask,
|
||||
}
|
||||
|
||||
|
@ -244,7 +244,7 @@ impl XMLHttpRequest {
|
|||
}
|
||||
|
||||
fn initiate_async_xhr(context: Arc<Mutex<XHRContext>>,
|
||||
script_chan: Box<ScriptChan+Send>,
|
||||
script_chan: Box<ScriptChan + Send>,
|
||||
resource_task: ResourceTask,
|
||||
load_data: LoadData) {
|
||||
impl AsyncResponseListener for XHRContext {
|
||||
|
@ -761,7 +761,7 @@ trait PrivateXMLHttpRequestHelpers {
|
|||
fn dispatch_upload_progress_event(self, type_: DOMString, partial_load: Option<u64>);
|
||||
fn dispatch_response_progress_event(self, type_: DOMString);
|
||||
fn text_response(self) -> DOMString;
|
||||
fn set_timeout(self, timeout:u32);
|
||||
fn set_timeout(self, timeout: u32);
|
||||
fn cancel_timeout(self);
|
||||
fn filter_response_headers(self) -> Headers;
|
||||
fn discard_subsequent_responses(self);
|
||||
|
|
|
@ -43,7 +43,7 @@ impl XMLHttpRequestEventTargetDerived for EventTarget {
|
|||
}
|
||||
|
||||
impl<'a> XMLHttpRequestEventTargetMethods for &'a XMLHttpRequestEventTarget {
|
||||
event_handler!(loadstart,GetOnloadstart, SetOnloadstart);
|
||||
event_handler!(loadstart, GetOnloadstart, SetOnloadstart);
|
||||
event_handler!(progress, GetOnprogress, SetOnprogress);
|
||||
event_handler!(abort, GetOnabort, SetOnabort);
|
||||
event_handler!(error, GetOnerror, SetOnerror);
|
||||
|
|
|
@ -39,7 +39,7 @@ pub enum Msg {
|
|||
AddStylesheet(Stylesheet, MediaQueryList),
|
||||
|
||||
/// Adds the given stylesheet to the document.
|
||||
LoadStylesheet(Url, MediaQueryList, PendingAsyncLoad, Box<StylesheetLoadResponder+Send>),
|
||||
LoadStylesheet(Url, MediaQueryList, PendingAsyncLoad, Box<StylesheetLoadResponder + Send>),
|
||||
|
||||
/// Puts a document into quirks mode, causing the quirks mode stylesheet to be loaded.
|
||||
SetQuirksMode,
|
||||
|
@ -199,7 +199,7 @@ pub trait ScriptLayoutChan {
|
|||
|
||||
impl ScriptLayoutChan for OpaqueScriptLayoutChannel {
|
||||
fn new(sender: Sender<Msg>, receiver: Receiver<Msg>) -> OpaqueScriptLayoutChannel {
|
||||
let inner = (box sender as Box<Any+Send>, box receiver as Box<Any+Send>);
|
||||
let inner = (box sender as Box<Any + Send>, box receiver as Box<Any + Send>);
|
||||
OpaqueScriptLayoutChannel(inner)
|
||||
}
|
||||
|
||||
|
|
|
@ -10,7 +10,7 @@ use std::sync::{Arc, Mutex};
|
|||
/// a target thread, where they are invoked on the provided context object.
|
||||
pub struct NetworkListener<T: AsyncResponseListener + PreInvoke + Send + 'static> {
|
||||
pub context: Arc<Mutex<T>>,
|
||||
pub script_chan: Box<ScriptChan+Send>,
|
||||
pub script_chan: Box<ScriptChan + Send>,
|
||||
}
|
||||
|
||||
impl<T: AsyncResponseListener + PreInvoke + Send + 'static> NetworkListener<T> {
|
||||
|
|
|
@ -32,8 +32,8 @@ use dom::bindings::js::{RootCollectionPtr, Root, RootedReference};
|
|||
use dom::bindings::refcounted::{LiveDOMReferences, Trusted, TrustedReference, trace_refcounted_objects};
|
||||
use dom::bindings::trace::{JSTraceable, trace_traceables, RootedVec};
|
||||
use dom::bindings::utils::{WRAP_CALLBACKS, DOM_CALLBACKS};
|
||||
use dom::document::{Document, IsHTMLDocument, DocumentHelpers, DocumentProgressHandler,
|
||||
DocumentProgressTask, DocumentSource, MouseEventType};
|
||||
use dom::document::{Document, IsHTMLDocument, DocumentHelpers, DocumentProgressHandler};
|
||||
use dom::document::{DocumentProgressTask, DocumentSource, MouseEventType};
|
||||
use dom::element::{Element, AttributeHandlers};
|
||||
use dom::event::{EventHelpers, EventBubbles, EventCancelable};
|
||||
use dom::htmliframeelement::HTMLIFrameElementHelpers;
|
||||
|
@ -190,7 +190,7 @@ pub enum CommonScriptMsg {
|
|||
/// A DOM object's last pinned reference was removed (dispatched to all tasks).
|
||||
RefcountCleanup(TrustedReference),
|
||||
/// Generic message that encapsulates event handling.
|
||||
RunnableMsg(Box<Runnable+Send>),
|
||||
RunnableMsg(Box<Runnable + Send>),
|
||||
}
|
||||
|
||||
/// Messages used to control the script event loop
|
||||
|
@ -203,7 +203,7 @@ pub enum MainThreadScriptMsg {
|
|||
/// should be closed (only dispatched to ScriptTask).
|
||||
ExitWindow(PipelineId),
|
||||
/// Generic message for running tasks in the ScriptTask
|
||||
MainThreadRunnableMsg(Box<MainThreadRunnable+Send>),
|
||||
MainThreadRunnableMsg(Box<MainThreadRunnable + Send>),
|
||||
/// Begins a content-initiated load on the specified pipeline (only
|
||||
/// dispatched to ScriptTask).
|
||||
Navigate(PipelineId, LoadData),
|
||||
|
@ -214,10 +214,10 @@ pub trait ScriptChan {
|
|||
/// Send a message to the associated event loop.
|
||||
fn send(&self, msg: CommonScriptMsg) -> Result<(), ()>;
|
||||
/// Clone this handle.
|
||||
fn clone(&self) -> Box<ScriptChan+Send>;
|
||||
fn clone(&self) -> Box<ScriptChan + Send>;
|
||||
}
|
||||
|
||||
impl OpaqueSender<CommonScriptMsg> for Box<ScriptChan+Send> {
|
||||
impl OpaqueSender<CommonScriptMsg> for Box<ScriptChan + Send> {
|
||||
fn send(&self, msg: CommonScriptMsg) {
|
||||
ScriptChan::send(&**self, msg).unwrap();
|
||||
}
|
||||
|
@ -270,7 +270,7 @@ impl ScriptChan for SendableMainThreadScriptChan {
|
|||
return chan.send(msg).map_err(|_| ());
|
||||
}
|
||||
|
||||
fn clone(&self) -> Box<ScriptChan+Send> {
|
||||
fn clone(&self) -> Box<ScriptChan + Send> {
|
||||
let SendableMainThreadScriptChan(ref chan) = *self;
|
||||
box SendableMainThreadScriptChan((*chan).clone())
|
||||
}
|
||||
|
@ -294,7 +294,7 @@ impl ScriptChan for MainThreadScriptChan {
|
|||
return chan.send(MainThreadScriptMsg::Common(msg)).map_err(|_| ());
|
||||
}
|
||||
|
||||
fn clone(&self) -> Box<ScriptChan+Send> {
|
||||
fn clone(&self) -> Box<ScriptChan + Send> {
|
||||
let MainThreadScriptChan(ref chan) = *self;
|
||||
box MainThreadScriptChan((*chan).clone())
|
||||
}
|
||||
|
@ -436,8 +436,8 @@ impl ScriptTaskFactory for ScriptTask {
|
|||
ScriptLayoutChan::new(chan, port)
|
||||
}
|
||||
|
||||
fn clone_layout_channel(_phantom: Option<&mut ScriptTask>, pair: &OpaqueScriptLayoutChannel) -> Box<Any+Send> {
|
||||
box pair.sender() as Box<Any+Send>
|
||||
fn clone_layout_channel(_phantom: Option<&mut ScriptTask>, pair: &OpaqueScriptLayoutChannel) -> Box<Any + Send> {
|
||||
box pair.sender() as Box<Any + Send>
|
||||
}
|
||||
|
||||
fn create(_phantom: Option<&mut ScriptTask>,
|
||||
|
|
|
@ -142,7 +142,7 @@ impl TimerManager {
|
|||
timeout: i32,
|
||||
is_interval: IsInterval,
|
||||
source: TimerSource,
|
||||
script_chan: Box<ScriptChan+Send>)
|
||||
script_chan: Box<ScriptChan + Send>)
|
||||
-> i32 {
|
||||
let duration_ms = cmp::max(0, timeout) as u32;
|
||||
let handle = self.next_timer_handle.get();
|
||||
|
|
|
@ -135,7 +135,7 @@ pub enum ConstellationControlMsg {
|
|||
/// Notifies script task that all animations are done
|
||||
TickAllAnimations(PipelineId),
|
||||
/// Notifies script that a stylesheet has finished loading.
|
||||
StylesheetLoadComplete(PipelineId, Url, Box<StylesheetLoadResponder+Send>),
|
||||
StylesheetLoadComplete(PipelineId, Url, Box<StylesheetLoadResponder + Send>),
|
||||
/// Get the current state of the script task for a given pipeline.
|
||||
GetCurrentState(Sender<ScriptState>, PipelineId),
|
||||
}
|
||||
|
@ -169,7 +169,7 @@ pub enum CompositorEvent {
|
|||
|
||||
/// An opaque wrapper around script<->layout channels to avoid leaking message types into
|
||||
/// crates that don't need to know about them.
|
||||
pub struct OpaqueScriptLayoutChannel(pub (Box<Any+Send>, Box<Any+Send>));
|
||||
pub struct OpaqueScriptLayoutChannel(pub (Box<Any + Send>, Box<Any + Send>));
|
||||
|
||||
/// This trait allows creating a `ScriptTask` without depending on the `script`
|
||||
/// crate.
|
||||
|
@ -195,5 +195,5 @@ pub trait ScriptTaskFactory {
|
|||
fn create_layout_channel(_phantom: Option<&mut Self>) -> OpaqueScriptLayoutChannel;
|
||||
/// Clone the `Sender` in `pair`.
|
||||
fn clone_layout_channel(_phantom: Option<&mut Self>, pair: &OpaqueScriptLayoutChannel)
|
||||
-> Box<Any+Send>;
|
||||
-> Box<Any + Send>;
|
||||
}
|
||||
|
|
|
@ -155,7 +155,7 @@ impl Browser {
|
|||
}
|
||||
|
||||
fn create_constellation(opts: opts::Opts,
|
||||
compositor_proxy: Box<CompositorProxy+Send>,
|
||||
compositor_proxy: Box<CompositorProxy + Send>,
|
||||
time_profiler_chan: time::ProfilerChan,
|
||||
mem_profiler_chan: mem::ProfilerChan,
|
||||
devtools_chan: Option<Sender<devtools_traits::DevtoolsControlMsg>>,
|
||||
|
|
|
@ -378,7 +378,7 @@ impl Interpolate for Au {
|
|||
}
|
||||
}
|
||||
|
||||
impl <T> Interpolate for Option<T> where T:Interpolate {
|
||||
impl <T> Interpolate for Option<T> where T: Interpolate {
|
||||
#[inline]
|
||||
fn interpolate(&self, other: &Option<T>, time: f64) -> Option<Option<T>> {
|
||||
match (self, other) {
|
||||
|
|
|
@ -32,14 +32,14 @@ pub trait PresentationalHintSynthesis {
|
|||
/// `common_style_affecting_attributes` or `rare_style_affecting_attributes` as appropriate. If
|
||||
/// you don't, you risk strange random nondeterministic failures due to false positives in
|
||||
/// style sharing.
|
||||
fn synthesize_presentational_hints_for_legacy_attributes<E,V>(
|
||||
fn synthesize_presentational_hints_for_legacy_attributes<E, V>(
|
||||
&self, element: &E, matching_rules_list: &mut V, shareable: &mut bool)
|
||||
where E: Element + TElementAttributes,
|
||||
V: VecLike<DeclarationBlock<Vec<PropertyDeclaration>>>;
|
||||
}
|
||||
|
||||
impl PresentationalHintSynthesis for Stylist {
|
||||
fn synthesize_presentational_hints_for_legacy_attributes<E,V>(
|
||||
fn synthesize_presentational_hints_for_legacy_attributes<E, V>(
|
||||
&self, element: &E, matching_rules_list: &mut V, shareable: &mut bool)
|
||||
where E: Element + TElementAttributes,
|
||||
V: VecLike<DeclarationBlock<Vec<PropertyDeclaration>>> {
|
||||
|
|
|
@ -200,7 +200,7 @@ impl Stylist {
|
|||
/// The returned boolean indicates whether the style is *shareable*; that is, whether the
|
||||
/// matched selectors are simple enough to allow the matching logic to be reduced to the logic
|
||||
/// in `css::matching::PrivateMatchMethods::candidate_element_allows_for_style_sharing`.
|
||||
pub fn push_applicable_declarations<E,V>(
|
||||
pub fn push_applicable_declarations<E, V>(
|
||||
&self,
|
||||
element: &E,
|
||||
parent_bf: Option<&BloomFilter>,
|
||||
|
|
|
@ -10,8 +10,8 @@ use url::Url;
|
|||
|
||||
use encoding::EncodingRef;
|
||||
|
||||
use cssparser::{Parser, decode_stylesheet_bytes,
|
||||
QualifiedRuleParser, AtRuleParser, RuleListParser, AtRuleType};
|
||||
use cssparser::{Parser, decode_stylesheet_bytes, QualifiedRuleParser, AtRuleParser};
|
||||
use cssparser::{RuleListParser, AtRuleType};
|
||||
use string_cache::{Atom, Namespace};
|
||||
use selectors::parser::{Selector, parse_selector_list};
|
||||
use parser::{ParserContext, log_css_error};
|
||||
|
|
|
@ -16,11 +16,11 @@ pub struct HashCache<K, V> {
|
|||
entries: HashMap<K, V, DefaultState<SipHasher>>,
|
||||
}
|
||||
|
||||
impl<K, V> HashCache<K,V>
|
||||
impl<K, V> HashCache<K, V>
|
||||
where K: Clone + PartialEq + Eq + Hash,
|
||||
V: Clone,
|
||||
{
|
||||
pub fn new() -> HashCache<K,V> {
|
||||
pub fn new() -> HashCache<K, V> {
|
||||
HashCache {
|
||||
entries: HashMap::with_hash_state(<DefaultState<SipHasher> as Default>::default()),
|
||||
}
|
||||
|
@ -58,7 +58,7 @@ pub struct LRUCache<K, V> {
|
|||
cache_size: usize,
|
||||
}
|
||||
|
||||
impl<K: Clone + PartialEq, V: Clone> LRUCache<K,V> {
|
||||
impl<K: Clone + PartialEq, V: Clone> LRUCache<K, V> {
|
||||
pub fn new(size: usize) -> LRUCache<K, V> {
|
||||
LRUCache {
|
||||
entries: vec!(),
|
||||
|
@ -76,7 +76,7 @@ impl<K: Clone + PartialEq, V: Clone> LRUCache<K,V> {
|
|||
self.entries[last_index].1.clone()
|
||||
}
|
||||
|
||||
pub fn iter<'a>(&'a self) -> Iter<'a,(K,V)> {
|
||||
pub fn iter<'a>(&'a self) -> Iter<'a,(K, V)> {
|
||||
self.entries.iter()
|
||||
}
|
||||
|
||||
|
@ -110,14 +110,14 @@ impl<K: Clone + PartialEq, V: Clone> LRUCache<K,V> {
|
|||
}
|
||||
}
|
||||
|
||||
pub struct SimpleHashCache<K,V> {
|
||||
entries: Vec<Option<(K,V)>>,
|
||||
pub struct SimpleHashCache<K, V> {
|
||||
entries: Vec<Option<(K, V)>>,
|
||||
k0: u64,
|
||||
k1: u64,
|
||||
}
|
||||
|
||||
impl<K:Clone+Eq+Hash,V:Clone> SimpleHashCache<K,V> {
|
||||
pub fn new(cache_size: usize) -> SimpleHashCache<K,V> {
|
||||
impl<K: Clone + Eq + Hash, V: Clone> SimpleHashCache<K, V> {
|
||||
pub fn new(cache_size: usize) -> SimpleHashCache<K, V> {
|
||||
let mut r = rand::thread_rng();
|
||||
SimpleHashCache {
|
||||
entries: vec![None; cache_size],
|
||||
|
@ -132,7 +132,7 @@ impl<K:Clone+Eq+Hash,V:Clone> SimpleHashCache<K,V> {
|
|||
}
|
||||
|
||||
#[inline]
|
||||
fn bucket_for_key<Q:Hash>(&self, key: &Q) -> usize {
|
||||
fn bucket_for_key<Q: Hash>(&self, key: &Q) -> usize {
|
||||
let mut hasher = SipHasher::new_with_keys(self.k0, self.k1);
|
||||
key.hash(&mut hasher);
|
||||
self.to_bucket(hasher.finish() as usize)
|
||||
|
|
|
@ -279,7 +279,7 @@ pub fn px_to_pt(px: f64) -> f64 {
|
|||
/// Returns true if the rect contains the given point. Points on the top or left sides of the rect
|
||||
/// are considered inside the rectangle, while points on the right or bottom sides of the rect are
|
||||
/// not considered inside the rectangle.
|
||||
pub fn rect_contains_point<T:PartialOrd + Add<T, Output=T>>(rect: Rect<T>, point: Point2D<T>) -> bool {
|
||||
pub fn rect_contains_point<T: PartialOrd + Add<T, Output=T>>(rect: Rect<T>, point: Point2D<T>) -> bool {
|
||||
point.x >= rect.origin.x && point.x < rect.origin.x + rect.size.width &&
|
||||
point.y >= rect.origin.y && point.y < rect.origin.y + rect.size.height
|
||||
}
|
||||
|
|
|
@ -14,7 +14,7 @@ use std::sync::atomic::{ATOMIC_USIZE_INIT, AtomicUsize, Ordering};
|
|||
use std::sync::mpsc::{self, Receiver, Sender};
|
||||
|
||||
lazy_static! {
|
||||
static ref IN_PROCESS_SENDERS: Mutex<HashMap<usize,Box<Any + Send>>> =
|
||||
static ref IN_PROCESS_SENDERS: Mutex<HashMap<usize, Box<Any + Send>>> =
|
||||
Mutex::new(HashMap::new());
|
||||
}
|
||||
|
||||
|
@ -49,7 +49,7 @@ impl<T> Clone for OptionalIpcSender<T> where T: Deserialize + Serialize + Send +
|
|||
|
||||
impl<T> Deserialize for OptionalIpcSender<T> where T: Deserialize + Serialize + Send + Any {
|
||||
fn deserialize<D>(deserializer: &mut D)
|
||||
-> Result<OptionalIpcSender<T>,D::Error> where D: Deserializer {
|
||||
-> Result<OptionalIpcSender<T>, D::Error> where D: Deserializer {
|
||||
if opts::get().multiprocess {
|
||||
return Ok(OptionalIpcSender::OutOfProcess(try!(Deserialize::deserialize(
|
||||
deserializer))))
|
||||
|
@ -66,7 +66,7 @@ impl<T> Deserialize for OptionalIpcSender<T> where T: Deserialize + Serialize +
|
|||
}
|
||||
|
||||
impl<T> Serialize for OptionalIpcSender<T> where T: Deserialize + Serialize + Send + Any {
|
||||
fn serialize<S>(&self, serializer: &mut S) -> Result<(),S::Error> where S: Serializer {
|
||||
fn serialize<S>(&self, serializer: &mut S) -> Result<(), S::Error> where S: Serializer {
|
||||
match *self {
|
||||
OptionalIpcSender::OutOfProcess(ref ipc_sender) => ipc_sender.serialize(serializer),
|
||||
OptionalIpcSender::InProcess(ref sender) => {
|
||||
|
|
|
@ -456,7 +456,7 @@ impl<T: Copy + Add<T, Output=T>> LogicalPoint<T> {
|
|||
}
|
||||
}
|
||||
|
||||
impl<T: Copy + Add<T,Output=T>> Add<LogicalSize<T>> for LogicalPoint<T> {
|
||||
impl<T: Copy + Add<T, Output=T>> Add<LogicalSize<T>> for LogicalPoint<T> {
|
||||
type Output = LogicalPoint<T>;
|
||||
|
||||
#[inline]
|
||||
|
@ -470,7 +470,7 @@ impl<T: Copy + Add<T,Output=T>> Add<LogicalSize<T>> for LogicalPoint<T> {
|
|||
}
|
||||
}
|
||||
|
||||
impl<T: Copy + Sub<T,Output=T>> Sub<LogicalSize<T>> for LogicalPoint<T> {
|
||||
impl<T: Copy + Sub<T, Output=T>> Sub<LogicalSize<T>> for LogicalPoint<T> {
|
||||
type Output = LogicalPoint<T>;
|
||||
|
||||
#[inline]
|
||||
|
|
|
@ -50,7 +50,7 @@ impl<T> PersistentList<T> where T: Send + Sync {
|
|||
}
|
||||
|
||||
#[inline]
|
||||
pub fn iter<'a>(&'a self) -> PersistentListIterator<'a,T> {
|
||||
pub fn iter<'a>(&'a self) -> PersistentListIterator<'a, T> {
|
||||
// This could clone (and would not need the lifetime if it did), but then it would incur
|
||||
// atomic operations on every call to `.next()`. Bad.
|
||||
PersistentListIterator {
|
||||
|
@ -74,7 +74,7 @@ pub struct PersistentListIterator<'a,T> where T: 'a + Send + Sync {
|
|||
entry: Option<&'a PersistentListEntry<T>>,
|
||||
}
|
||||
|
||||
impl<'a,T> Iterator for PersistentListIterator<'a,T> where T: Send + Sync + 'static {
|
||||
impl<'a, T> Iterator for PersistentListIterator<'a, T> where T: Send + Sync + 'static {
|
||||
type Item = &'a T;
|
||||
|
||||
#[inline]
|
||||
|
|
|
@ -34,7 +34,7 @@ impl TaskPool {
|
|||
for i in 0..tasks {
|
||||
let state = state.clone();
|
||||
spawn_named(
|
||||
format!("TaskPoolWorker {}/{}", i+1, tasks),
|
||||
format!("TaskPoolWorker {}/{}", i + 1, tasks),
|
||||
move || worker(&*state));
|
||||
}
|
||||
|
||||
|
|
|
@ -10,7 +10,7 @@ use std::ops;
|
|||
|
||||
/// FIXME(pcwalton): Workaround for lack of unboxed closures. This is called in
|
||||
/// performance-critical code, so a closure is insufficient.
|
||||
pub trait Comparator<K,T> {
|
||||
pub trait Comparator<K, T> {
|
||||
fn compare(&self, key: &K, value: &T) -> Ordering;
|
||||
}
|
||||
|
||||
|
@ -20,7 +20,7 @@ pub trait BinarySearchMethods<T: Ord + PartialOrd + PartialEq> {
|
|||
}
|
||||
|
||||
pub trait FullBinarySearchMethods<T> {
|
||||
fn binary_search_index_by<K,C:Comparator<K,T>>(&self, key: &K, cmp: C) -> Option<usize>;
|
||||
fn binary_search_index_by<K, C: Comparator<K, T>>(&self, key: &K, cmp: C) -> Option<usize>;
|
||||
}
|
||||
|
||||
impl<T: Ord + PartialOrd + PartialEq> BinarySearchMethods<T> for [T] {
|
||||
|
@ -34,13 +34,13 @@ impl<T: Ord + PartialOrd + PartialEq> BinarySearchMethods<T> for [T] {
|
|||
}
|
||||
|
||||
impl<T> FullBinarySearchMethods<T> for [T] {
|
||||
fn binary_search_index_by<K,C:Comparator<K,T>>(&self, key: &K, cmp: C) -> Option<usize> {
|
||||
fn binary_search_index_by<K, C: Comparator<K, T>>(&self, key: &K, cmp: C) -> Option<usize> {
|
||||
if self.is_empty() {
|
||||
return None;
|
||||
}
|
||||
|
||||
let mut low : isize = 0;
|
||||
let mut high : isize = (self.len() as isize) - 1;
|
||||
let mut low: isize = 0;
|
||||
let mut high: isize = (self.len() as isize) - 1;
|
||||
|
||||
while low <= high {
|
||||
// http://googleresearch.blogspot.com/2006/06/extra-extra-read-all-about-it-nearly.html
|
||||
|
@ -59,7 +59,7 @@ impl<T> FullBinarySearchMethods<T> for [T] {
|
|||
|
||||
struct DefaultComparator;
|
||||
|
||||
impl<T:PartialEq + PartialOrd + Ord> Comparator<T,T> for DefaultComparator {
|
||||
impl<T: PartialEq + PartialOrd + Ord> Comparator<T, T> for DefaultComparator {
|
||||
fn compare(&self, key: &T, value: &T) -> Ordering {
|
||||
(*key).cmp(value)
|
||||
}
|
||||
|
|
|
@ -273,7 +273,7 @@ impl<QueueData: Sync, WorkData: Send> WorkQueue<QueueData, WorkData> {
|
|||
for (i, thread) in threads.into_iter().enumerate() {
|
||||
|
||||
spawn_named(
|
||||
format!("{} worker {}/{}", task_name, i+1, thread_count),
|
||||
format!("{} worker {}/{}", task_name, i + 1, thread_count),
|
||||
move || {
|
||||
task_state::initialize(state | task_state::IN_WORKER);
|
||||
let mut thread = thread;
|
||||
|
|
|
@ -19,18 +19,17 @@ extern crate rustc_serialize;
|
|||
extern crate uuid;
|
||||
extern crate ipc_channel;
|
||||
|
||||
use msg::constellation_msg::{ConstellationChan, LoadData, FrameId, PipelineId, NavigationDirection,
|
||||
WebDriverCommandMsg};
|
||||
use msg::constellation_msg::{ConstellationChan, LoadData, FrameId, PipelineId};
|
||||
use msg::constellation_msg::{NavigationDirection, WebDriverCommandMsg};
|
||||
use msg::constellation_msg::Msg as ConstellationMsg;
|
||||
use msg::webdriver_msg::{WebDriverFrameId, WebDriverScriptCommand, WebDriverJSError, WebDriverJSResult, LoadStatus};
|
||||
|
||||
use url::Url;
|
||||
use webdriver::command::{WebDriverMessage, WebDriverCommand};
|
||||
use webdriver::command::{GetParameters, JavascriptCommandParameters, LocatorParameters,
|
||||
SwitchToFrameParameters, TimeoutsParameters};
|
||||
use webdriver::command::{GetParameters, JavascriptCommandParameters, LocatorParameters};
|
||||
use webdriver::command::{SwitchToFrameParameters, TimeoutsParameters};
|
||||
use webdriver::common::{LocatorStrategy, WebElement};
|
||||
use webdriver::response::{
|
||||
WebDriverResponse, NewSessionResponse, ValueResponse};
|
||||
use webdriver::response::{WebDriverResponse, NewSessionResponse, ValueResponse};
|
||||
use webdriver::server::{self, WebDriverHandler, Session};
|
||||
use webdriver::error::{WebDriverResult, WebDriverError, ErrorStatus};
|
||||
use util::task::spawn_named;
|
||||
|
@ -498,7 +497,7 @@ impl Handler {
|
|||
"Taking screenshot failed"))
|
||||
};
|
||||
let config = Config {
|
||||
char_set:CharacterSet::Standard,
|
||||
char_set: CharacterSet::Standard,
|
||||
newline: Newline::LF,
|
||||
pad: true,
|
||||
line_length: None
|
||||
|
|
|
@ -58,19 +58,19 @@ bitflags! {
|
|||
|
||||
// Some shortcuts use Cmd on Mac and Control on other systems.
|
||||
#[cfg(all(feature = "window", target_os="macos"))]
|
||||
const CMD_OR_CONTROL : constellation_msg::KeyModifiers = SUPER;
|
||||
const CMD_OR_CONTROL: constellation_msg::KeyModifiers = SUPER;
|
||||
#[cfg(all(feature = "window", not(target_os="macos")))]
|
||||
const CMD_OR_CONTROL : constellation_msg::KeyModifiers = CONTROL;
|
||||
const CMD_OR_CONTROL: constellation_msg::KeyModifiers = CONTROL;
|
||||
|
||||
// Some shortcuts use Cmd on Mac and Alt on other systems.
|
||||
#[cfg(all(feature = "window", target_os="macos"))]
|
||||
const CMD_OR_ALT : constellation_msg::KeyModifiers = SUPER;
|
||||
const CMD_OR_ALT: constellation_msg::KeyModifiers = SUPER;
|
||||
#[cfg(all(feature = "window", not(target_os="macos")))]
|
||||
const CMD_OR_ALT : constellation_msg::KeyModifiers = ALT;
|
||||
const CMD_OR_ALT: constellation_msg::KeyModifiers = ALT;
|
||||
|
||||
// This should vary by zoom level and maybe actual text size (focused or under cursor)
|
||||
#[cfg(feature = "window")]
|
||||
const LINE_HEIGHT : f32 = 38.0;
|
||||
const LINE_HEIGHT: f32 = 38.0;
|
||||
|
||||
/// The type of a window.
|
||||
#[cfg(feature = "window")]
|
||||
|
@ -212,7 +212,7 @@ impl Window {
|
|||
MouseScrollDelta::PixelDelta(_, dy) => dy
|
||||
};
|
||||
if dy < 0.0 {
|
||||
self.event_queue.borrow_mut().push(WindowEvent::PinchZoom(1.0/1.1));
|
||||
self.event_queue.borrow_mut().push(WindowEvent::PinchZoom(1.0 / 1.1));
|
||||
} else if dy > 0.0 {
|
||||
self.event_queue.borrow_mut().push(WindowEvent::PinchZoom(1.1));
|
||||
}
|
||||
|
@ -523,7 +523,7 @@ impl WindowMethods for Window {
|
|||
}
|
||||
|
||||
fn create_compositor_channel(window: &Option<Rc<Window>>)
|
||||
-> (Box<CompositorProxy+Send>, Box<CompositorReceiver>) {
|
||||
-> (Box<CompositorProxy + Send>, Box<CompositorReceiver>) {
|
||||
let (sender, receiver) = channel();
|
||||
|
||||
let window_proxy = match window {
|
||||
|
@ -534,7 +534,7 @@ impl WindowMethods for Window {
|
|||
(box GlutinCompositorProxy {
|
||||
sender: sender,
|
||||
window_proxy: window_proxy,
|
||||
} as Box<CompositorProxy+Send>,
|
||||
} as Box<CompositorProxy + Send>,
|
||||
box receiver as Box<CompositorReceiver>)
|
||||
}
|
||||
|
||||
|
@ -641,7 +641,7 @@ impl WindowMethods for Window {
|
|||
self.event_queue.borrow_mut().push(WindowEvent::Zoom(1.1));
|
||||
}
|
||||
(CMD_OR_CONTROL, Key::Minus) => {
|
||||
self.event_queue.borrow_mut().push(WindowEvent::Zoom(1.0/1.1));
|
||||
self.event_queue.borrow_mut().push(WindowEvent::Zoom(1.0 / 1.1));
|
||||
}
|
||||
(CMD_OR_CONTROL, Key::Num0) |
|
||||
(CMD_OR_CONTROL, Key::Kp0) => {
|
||||
|
@ -746,13 +746,13 @@ impl WindowMethods for Window {
|
|||
}
|
||||
|
||||
fn create_compositor_channel(_: &Option<Rc<Window>>)
|
||||
-> (Box<CompositorProxy+Send>, Box<CompositorReceiver>) {
|
||||
-> (Box<CompositorProxy + Send>, Box<CompositorReceiver>) {
|
||||
let (sender, receiver) = channel();
|
||||
|
||||
(box GlutinCompositorProxy {
|
||||
sender: sender,
|
||||
window_proxy: None,
|
||||
} as Box<CompositorProxy+Send>,
|
||||
} as Box<CompositorProxy + Send>,
|
||||
box receiver as Box<CompositorReceiver>)
|
||||
}
|
||||
|
||||
|
@ -818,11 +818,11 @@ impl CompositorProxy for GlutinCompositorProxy {
|
|||
window_proxy.wakeup_event_loop()
|
||||
}
|
||||
}
|
||||
fn clone_compositor_proxy(&self) -> Box<CompositorProxy+Send> {
|
||||
fn clone_compositor_proxy(&self) -> Box<CompositorProxy + Send> {
|
||||
box GlutinCompositorProxy {
|
||||
sender: self.sender.clone(),
|
||||
window_proxy: self.window_proxy.clone(),
|
||||
} as Box<CompositorProxy+Send>
|
||||
} as Box<CompositorProxy + Send>
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -202,9 +202,10 @@ fn read_input_device(device_path: &Path,
|
|||
let cur_dist = dist(slotA.x, slotB.x, slotA.y, slotB.y);
|
||||
println!("Zooming {} {} {} {}",
|
||||
cur_dist, last_dist, screen_dist,
|
||||
((screen_dist + (cur_dist - last_dist))/screen_dist));
|
||||
((screen_dist + (cur_dist - last_dist)) / screen_dist));
|
||||
sender.send(
|
||||
WindowEvent::Zoom((screen_dist + (cur_dist - last_dist))/screen_dist)).ok().unwrap();
|
||||
WindowEvent::Zoom((screen_dist + (cur_dist - last_dist)) /
|
||||
screen_dist)).ok().unwrap();
|
||||
last_dist = cur_dist;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -830,12 +830,12 @@ impl WindowMethods for Window {
|
|||
}
|
||||
|
||||
fn create_compositor_channel(window: &Option<Rc<Window>>)
|
||||
-> (Box<CompositorProxy+Send>, Box<CompositorReceiver>) {
|
||||
-> (Box<CompositorProxy + Send>, Box<CompositorReceiver>) {
|
||||
let (sender, receiver) = channel();
|
||||
(box GonkCompositorProxy {
|
||||
sender: sender,
|
||||
event_sender: window.as_ref().unwrap().event_send.clone(),
|
||||
} as Box<CompositorProxy+Send>,
|
||||
} as Box<CompositorProxy + Send>,
|
||||
box receiver as Box<CompositorReceiver>)
|
||||
}
|
||||
|
||||
|
@ -865,11 +865,11 @@ impl CompositorProxy for GonkCompositorProxy {
|
|||
self.sender.send(msg).ok().unwrap();
|
||||
self.event_sender.send(WindowEvent::Idle).ok().unwrap();
|
||||
}
|
||||
fn clone_compositor_proxy(&self) -> Box<CompositorProxy+Send> {
|
||||
fn clone_compositor_proxy(&self) -> Box<CompositorProxy + Send> {
|
||||
box GonkCompositorProxy {
|
||||
sender: self.sender.clone(),
|
||||
event_sender: self.event_sender.clone(),
|
||||
} as Box<CompositorProxy+Send>
|
||||
} as Box<CompositorProxy + Send>
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -165,6 +165,93 @@ def check_toml(file_name, contents):
|
|||
yield (idx + 1, "found asterisk instead of minimum version number")
|
||||
|
||||
|
||||
def check_rust(file_name, contents):
|
||||
if not file_name.endswith(".rs") or \
|
||||
file_name.endswith("properties.mako.rs") or \
|
||||
file_name.endswith("style/build.rs") or \
|
||||
file_name.endswith("unit/style/stylesheets.rs"):
|
||||
raise StopIteration
|
||||
contents = contents.splitlines(True)
|
||||
comment_depth = 0
|
||||
merged_lines = ''
|
||||
for idx, line in enumerate(contents):
|
||||
# simplify the analysis
|
||||
line = line.strip()
|
||||
|
||||
# Simple heuristic to avoid common case of no comments.
|
||||
if '/' in line:
|
||||
comment_depth += line.count('/*')
|
||||
comment_depth -= line.count('*/')
|
||||
|
||||
if line.endswith('\\'):
|
||||
merged_lines += line[:-1]
|
||||
continue
|
||||
if comment_depth:
|
||||
merged_lines += line
|
||||
continue
|
||||
if merged_lines:
|
||||
line = merged_lines + line
|
||||
merged_lines = ''
|
||||
|
||||
# get rid of strings and chars because cases like regex expression
|
||||
line = re.sub('".*?"|\'.*?\'', '', line)
|
||||
|
||||
# get rid of comments and attributes
|
||||
line = re.sub('//.*?$|/\*.*?$|^\*.*?$|^#.*?$', '', line)
|
||||
|
||||
match = re.search(r",[A-Za-z0-9]", line)
|
||||
if match:
|
||||
yield (idx + 1, "missing space after ,")
|
||||
|
||||
# Avoid flagging <Item=Foo> constructs
|
||||
def is_associated_type(match, line, index):
|
||||
open_angle = line[0:match.end()].rfind('<')
|
||||
close_angle = line[open_angle:].find('>') if open_angle != -1 else -1
|
||||
is_equals = match.group(0)[index] == '='
|
||||
generic_open = open_angle != -1 and open_angle < match.start()
|
||||
generic_close = close_angle != -1 and close_angle + open_angle >= match.end()
|
||||
return is_equals and generic_open and generic_close
|
||||
|
||||
# - not included because of scientific notation (1e-6)
|
||||
match = re.search(r"[A-Za-z0-9][\+/\*%=]", line)
|
||||
if match and not is_associated_type(match, line, 1):
|
||||
yield (idx + 1, "missing space before %s" % match.group(0)[1])
|
||||
|
||||
# * not included because of dereferencing and casting
|
||||
# - not included because of unary negation
|
||||
match = re.search(r"[\+/\%=][A-Za-z0-9]", line)
|
||||
if match and not is_associated_type(match, line, 0):
|
||||
yield (idx + 1, "missing space after %s" % match.group(0)[0])
|
||||
|
||||
match = re.search(r"\)->", line)
|
||||
if match:
|
||||
yield (idx + 1, "missing space before ->")
|
||||
|
||||
match = re.search(r"->[A-Za-z]", line)
|
||||
if match:
|
||||
yield (idx + 1, "missing space after ->")
|
||||
|
||||
# Avoid flagging ::crate::mod and `trait Foo : Bar`
|
||||
match = line.find(" :")
|
||||
if match != -1:
|
||||
if line[0:match].find('trait ') == -1 and line[match + 2] != ':':
|
||||
yield (idx + 1, "extra space before :")
|
||||
|
||||
# Avoid flagging crate::mod
|
||||
match = re.search(r"[^:]:[A-Za-z]", line)
|
||||
if match:
|
||||
# Avoid flagging macros like $t1:expr
|
||||
if line[0:match.end()].rfind('$') == -1:
|
||||
yield (idx + 1, "missing space after :")
|
||||
|
||||
match = re.search(r"[A-Za-z0-9\)]{", line)
|
||||
if match:
|
||||
yield (idx + 1, "missing space before {")
|
||||
|
||||
if line.startswith("use ") and "{" in line and "}" not in line:
|
||||
yield (idx + 1, "use statement spans multiple lines")
|
||||
|
||||
|
||||
def check_webidl_spec(file_name, contents):
|
||||
# Sorted by this function (in pseudo-Rust). The idea is to group the same
|
||||
# organization together.
|
||||
|
@ -278,7 +365,8 @@ def scan():
|
|||
all_files = collect_file_names()
|
||||
files_to_check = filter(should_check, all_files)
|
||||
|
||||
checking_functions = [check_license, check_by_line, check_flake8, check_toml, check_webidl_spec, check_spec]
|
||||
checking_functions = [check_license, check_by_line, check_flake8, check_toml,
|
||||
check_rust, check_webidl_spec, check_spec]
|
||||
errors = collect_errors_for_files(files_to_check, checking_functions)
|
||||
|
||||
reftest_files = collect_file_names(reftest_directories)
|
||||
|
|
|
@ -28,18 +28,18 @@ fn test_sniff_mp4_matcher() {
|
|||
|
||||
match read_result {
|
||||
Ok(data) => {
|
||||
println!("Data Length {:?}",data.len());
|
||||
println!("Data Length {:?}", data.len());
|
||||
if !matcher.matches(&data) {
|
||||
panic!("Didn't read mime type")
|
||||
}
|
||||
},
|
||||
Err(e) => panic!("Couldn't read from file with error {}",e)
|
||||
Err(e) => panic!("Couldn't read from file with error {}", e)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
fn test_sniff_full(filename_orig: &path::Path,type_string: &str,subtype_string: &str,
|
||||
supplied_type: Option<(&'static str,&'static str)>){
|
||||
fn test_sniff_full(filename_orig: &path::Path, type_string: &str, subtype_string: &str,
|
||||
supplied_type: Option<(&'static str, &'static str)>) {
|
||||
let current_working_directory = env::current_dir().unwrap();
|
||||
println!("The current directory is {}", current_working_directory.display());
|
||||
|
||||
|
@ -71,19 +71,19 @@ fn test_sniff_full(filename_orig: &path::Path,type_string: &str,subtype_string:
|
|||
}
|
||||
|
||||
#[cfg(test)]
|
||||
fn test_sniff_classification(file: &str,type_string: &str,subtype_string: &str,
|
||||
supplied_type: Option<(&'static str,&'static str)>){
|
||||
fn test_sniff_classification(file: &str, type_string: &str, subtype_string: &str,
|
||||
supplied_type: Option<(&'static str, &'static str)>) {
|
||||
let mut x = PathBuf::from("./");
|
||||
x.push(type_string);
|
||||
x.push(subtype_string);
|
||||
x.push(file);
|
||||
test_sniff_full(&x,type_string,subtype_string,supplied_type);
|
||||
test_sniff_full(&x, type_string, subtype_string, supplied_type);
|
||||
}
|
||||
#[cfg(test)]
|
||||
fn test_sniff_classification_sup(file: &str,type_string: &'static str,subtype_string: &str) {
|
||||
test_sniff_classification(file,type_string,subtype_string, None);
|
||||
fn test_sniff_classification_sup(file: &str, type_string: &'static str, subtype_string: &str) {
|
||||
test_sniff_classification(file, type_string, subtype_string, None);
|
||||
let class_type = Some((type_string, ""));
|
||||
test_sniff_classification(file,type_string,subtype_string,class_type);
|
||||
test_sniff_classification(file, type_string, subtype_string, class_type);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
|
|
@ -26,7 +26,7 @@ fn test_lru_cache() {
|
|||
let four = Cell::new("four");
|
||||
|
||||
// Test normal insertion.
|
||||
let mut cache: LRUCache<usize,Cell<&str>> = LRUCache::new(2); // (_, _) (cache is empty)
|
||||
let mut cache: LRUCache<usize, Cell<&str>> = LRUCache::new(2); // (_, _) (cache is empty)
|
||||
cache.insert(1, one); // (1, _)
|
||||
cache.insert(2, two); // (1, 2)
|
||||
cache.insert(3, three); // (2, 3)
|
||||
|
|
|
@ -3,8 +3,8 @@
|
|||
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
|
||||
|
||||
use euclid::{Size2D, Point2D, SideOffsets2D, Rect};
|
||||
use util::logical_geometry::{WritingMode, LogicalSize, LogicalPoint, LogicalMargin, LogicalRect,
|
||||
FLAG_RTL, FLAG_VERTICAL, FLAG_VERTICAL_LR, FLAG_SIDEWAYS_LEFT};
|
||||
use util::logical_geometry::{WritingMode, LogicalSize, LogicalPoint, LogicalMargin, LogicalRect};
|
||||
use util::logical_geometry::{FLAG_RTL, FLAG_VERTICAL, FLAG_VERTICAL_LR, FLAG_SIDEWAYS_LEFT};
|
||||
|
||||
#[cfg(test)]
|
||||
fn modes() -> [WritingMode; 10] {
|
||||
|
|
Загрузка…
Ссылка в новой задаче