diff --git a/gfx/wr/webrender/res/brush_blend.glsl b/gfx/wr/webrender/res/brush_blend.glsl index 016fd26231de..64356d95f475 100644 --- a/gfx/wr/webrender/res/brush_blend.glsl +++ b/gfx/wr/webrender/res/brush_blend.glsl @@ -37,10 +37,7 @@ void brush_vs( // PictureTask src_task = fetch_picture_task(user_data.x); vec2 texture_size = vec2(textureSize(sColor0, 0).xy); vec2 f = (vi.local_pos - local_rect.p0) / local_rect.size; - ImageResourceExtra extra_data = fetch_image_resource_extra(user_data.x); - vec2 x = mix(extra_data.st_tl, extra_data.st_tr, f.x); - vec2 y = mix(extra_data.st_bl, extra_data.st_br, f.x); - f = mix(x, y, f.y); + f = get_image_quad_uv(user_data.x, f); vec2 uv = mix(uv0, uv1, f); float perspective_interpolate = (brush_flags & BRUSH_FLAG_PERSPECTIVE_INTERPOLATION) != 0 ? 1.0 : 0.0; diff --git a/gfx/wr/webrender/res/brush_image.glsl b/gfx/wr/webrender/res/brush_image.glsl index ee4bdffb1207..8c78e79ed14e 100644 --- a/gfx/wr/webrender/res/brush_image.glsl +++ b/gfx/wr/webrender/res/brush_image.glsl @@ -131,10 +131,7 @@ void brush_vs( // Since the screen space UVs specify an arbitrary quad, do // a bilinear interpolation to get the correct UV for this // local position. - ImageResourceExtra extra_data = fetch_image_resource_extra(user_data.w); - vec2 x = mix(extra_data.st_tl, extra_data.st_tr, f.x); - vec2 y = mix(extra_data.st_bl, extra_data.st_br, f.x); - f = mix(x, y, f.y); + f = get_image_quad_uv(user_data.w, f); break; } default: diff --git a/gfx/wr/webrender/res/gpu_cache.glsl b/gfx/wr/webrender/res/gpu_cache.glsl index a4c391721dc4..e12483d20bfd 100644 --- a/gfx/wr/webrender/res/gpu_cache.glsl +++ b/gfx/wr/webrender/res/gpu_cache.glsl @@ -117,20 +117,21 @@ ImageResource fetch_image_resource_direct(ivec2 address) { // Fetch optional extra data for a texture cache resource. This can contain // a polygon defining a UV rect within the texture cache resource. +// Note: the polygon coordinates are in homogeneous space. struct ImageResourceExtra { - vec2 st_tl; - vec2 st_tr; - vec2 st_bl; - vec2 st_br; + vec4 st_tl; + vec4 st_tr; + vec4 st_bl; + vec4 st_br; }; ImageResourceExtra fetch_image_resource_extra(int address) { - vec4 data[2] = fetch_from_gpu_cache_2(address + VECS_PER_IMAGE_RESOURCE); + vec4 data[4] = fetch_from_gpu_cache_4(address + VECS_PER_IMAGE_RESOURCE); return ImageResourceExtra( - data[0].xy, - data[0].zw, - data[1].xy, - data[1].zw + data[0], + data[1], + data[2], + data[3] ); } diff --git a/gfx/wr/webrender/res/prim_shared.glsl b/gfx/wr/webrender/res/prim_shared.glsl index 22ab93111578..1358f57c75fe 100644 --- a/gfx/wr/webrender/res/prim_shared.glsl +++ b/gfx/wr/webrender/res/prim_shared.glsl @@ -222,6 +222,16 @@ void write_clip(vec4 world_pos, vec2 snap_offset, ClipArea area) { ); vClipMaskUv = vec4(uv, area.common_data.texture_layer_index, world_pos.w); } + +// Read the exta image data containing the homogeneous screen space coordinates +// of the corners, interpolate between them, and return real screen space UV. +vec2 get_image_quad_uv(int address, vec2 f) { + ImageResourceExtra extra_data = fetch_image_resource_extra(address); + vec4 x = mix(extra_data.st_tl, extra_data.st_tr, f.x); + vec4 y = mix(extra_data.st_bl, extra_data.st_br, f.x); + vec4 z = mix(x, y, f.y); + return z.xy / z.w; +} #endif //WR_VERTEX_SHADER #ifdef WR_FRAGMENT_SHADER diff --git a/gfx/wr/webrender/res/ps_split_composite.glsl b/gfx/wr/webrender/res/ps_split_composite.glsl index 6388c023d115..f032951b7ee8 100644 --- a/gfx/wr/webrender/res/ps_split_composite.glsl +++ b/gfx/wr/webrender/res/ps_split_composite.glsl @@ -61,7 +61,6 @@ void main(void) { PictureTask dest_task = fetch_picture_task(ph.render_task_index); Transform transform = fetch_transform(ph.transform_id); ImageResource res = fetch_image_resource(ph.user_data.x); - ImageResourceExtra extra_data = fetch_image_resource_extra(ph.user_data.x); ClipArea clip_area = fetch_clip_area(ph.clip_task_index); vec2 dest_origin = dest_task.common_data.task_rect.p0 - @@ -99,12 +98,7 @@ void main(void) { ) / texture_size.xyxy; vec2 f = (local_pos - ph.local_rect.p0) / ph.local_rect.size; - - f = bilerp( - extra_data.st_tl, extra_data.st_tr, - extra_data.st_bl, extra_data.st_br, - f.y, f.x - ); + f = get_image_quad_uv(ph.user_data.x, f); vec2 uv = mix(uv0, uv1, f); float perspective_interpolate = float(ph.user_data.y); diff --git a/gfx/wr/webrender/src/device/gl.rs b/gfx/wr/webrender/src/device/gl.rs index 0d770c0d4172..92a0220cfa2f 100644 --- a/gfx/wr/webrender/src/device/gl.rs +++ b/gfx/wr/webrender/src/device/gl.rs @@ -1260,7 +1260,7 @@ impl Device { #[cfg(debug_assertions)] fn print_shader_errors(source: &str, log: &str) { // hacky way to extract the offending lines - if !log.starts_with("0:") { + if !log.starts_with("0:") && !log.starts_with("0(") { return; } let end_pos = match log[2..].chars().position(|c| !c.is_digit(10)) { diff --git a/gfx/wr/webrender/src/gpu_cache.rs b/gfx/wr/webrender/src/gpu_cache.rs index 1ce2544186d3..31d829f4be8c 100644 --- a/gfx/wr/webrender/src/gpu_cache.rs +++ b/gfx/wr/webrender/src/gpu_cache.rs @@ -25,7 +25,7 @@ //! for this frame. use api::{DebugFlags, DocumentId, PremultipliedColorF, IdNamespace, TexelRect}; -use euclid::TypedRect; +use euclid::{HomogeneousVector, TypedRect}; use internal_types::{FastHashMap}; use profiler::GpuCacheProfileCounters; use render_backend::{FrameStamp, FrameId}; @@ -112,6 +112,19 @@ impl

From> for GpuBlockData { } } +impl

From> for GpuBlockData { + fn from(v: HomogeneousVector) -> Self { + GpuBlockData { + data: [ + v.x, + v.y, + v.z, + v.w, + ], + } + } +} + impl From for GpuBlockData { fn from(tr: TexelRect) -> Self { GpuBlockData { diff --git a/gfx/wr/webrender/src/gpu_types.rs b/gfx/wr/webrender/src/gpu_types.rs index 9892db585079..3a5cd4c4ebea 100644 --- a/gfx/wr/webrender/src/gpu_types.rs +++ b/gfx/wr/webrender/src/gpu_types.rs @@ -3,7 +3,8 @@ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ use api::{ - DevicePoint, DeviceSize, DeviceRect, LayoutRect, LayoutToWorldTransform, LayoutTransform, + DeviceHomogeneousVector, DevicePoint, DeviceSize, DeviceRect, + LayoutRect, LayoutToWorldTransform, LayoutTransform, PremultipliedColorF, LayoutToPictureTransform, PictureToLayoutTransform, PicturePixel, WorldPixel, WorldToLayoutTransform, LayoutPoint, }; @@ -565,10 +566,10 @@ pub enum UvRectKind { // use a bilerp() to correctly interpolate a // UV coord in the vertex shader. Quad { - top_left: DevicePoint, - top_right: DevicePoint, - bottom_left: DevicePoint, - bottom_right: DevicePoint, + top_left: DeviceHomogeneousVector, + top_right: DeviceHomogeneousVector, + bottom_left: DeviceHomogeneousVector, + bottom_right: DeviceHomogeneousVector, }, } @@ -585,6 +586,8 @@ pub struct ImageSource { impl ImageSource { pub fn write_gpu_blocks(&self, request: &mut GpuDataRequest) { + // see fetch_image_resource in GLSL + // has to be VECS_PER_IMAGE_RESOURCE vectors request.push([ self.p0.x, self.p0.y, @@ -600,19 +603,12 @@ impl ImageSource { // If this is a polygon uv kind, then upload the four vertices. if let UvRectKind::Quad { top_left, top_right, bottom_left, bottom_right } = self.uv_rect_kind { - request.push([ - top_left.x, - top_left.y, - top_right.x, - top_right.y, - ]); - - request.push([ - bottom_left.x, - bottom_left.y, - bottom_right.x, - bottom_right.y, - ]); + // see fetch_image_resource_extra in GLSL + //Note: we really need only 3 components per point here: X, Y, and W + request.push(top_left); + request.push(top_right); + request.push(bottom_left); + request.push(bottom_right); } } } diff --git a/gfx/wr/webrender/src/picture.rs b/gfx/wr/webrender/src/picture.rs index 45c15c983c34..aa0f83d37e36 100644 --- a/gfx/wr/webrender/src/picture.rs +++ b/gfx/wr/webrender/src/picture.rs @@ -7,13 +7,13 @@ use api::{DeviceIntRect, DeviceIntSize, DevicePoint, DeviceRect}; use api::{LayoutRect, PictureToRasterTransform, LayoutPixel, PropertyBinding, PropertyBindingId}; use api::{DevicePixelScale, RasterRect, RasterSpace, ColorF, ImageKey, DirtyRect, WorldSize, ClipMode, LayoutSize}; use api::{PicturePixel, RasterPixel, WorldPixel, WorldRect, ImageFormat, ImageDescriptor, WorldVector2D, LayoutPoint}; -use api::{DebugFlags, DeviceVector2D}; +use api::{DebugFlags, DeviceHomogeneousVector, DeviceVector2D}; use box_shadow::{BLUR_SAMPLE_SCALE}; use clip::{ClipChainId, ClipChainNode, ClipItem}; use clip_scroll_tree::{ROOT_SPATIAL_NODE_INDEX, ClipScrollTree, SpatialNodeIndex, CoordinateSystemId}; use debug_colors; use device::TextureFilter; -use euclid::{size2, TypedScale, vec3, TypedRect, TypedPoint2D, TypedSize2D}; +use euclid::{size2, vec3, TypedRect, TypedPoint2D, TypedSize2D}; use euclid::approxeq::ApproxEq; use frame_builder::{FrameVisibilityContext, FrameVisibilityState}; use intern::ItemUid; @@ -2942,38 +2942,38 @@ impl PicturePrimitive { } } -// Calculate a single screen-space UV for a picture. +// Calculate a single homogeneous screen-space UV for a picture. fn calculate_screen_uv( local_pos: &PicturePoint, transform: &PictureToRasterTransform, rendered_rect: &DeviceRect, device_pixel_scale: DevicePixelScale, supports_snapping: bool, -) -> DevicePoint { - let raster_pos = match transform.transform_point2d(local_pos) { - Some(pos) => pos, - None => { - //Warning: this is incorrect and needs to be fixed properly. - // The transformation has put a local vertex behind the near clipping plane... - // Proper solution would be to keep the near-clipping-plane results around - // (currently produced by calculate_screen_bounding_rect) and use them here. - return DevicePoint::new(0.5, 0.5); - } - }; +) -> DeviceHomogeneousVector { + let raster_pos = transform.transform_point2d_homogeneous(local_pos); - let raster_to_device_space = TypedScale::new(1.0) * device_pixel_scale; - - let mut device_pos = raster_pos * raster_to_device_space; + let mut device_vec = DeviceHomogeneousVector::new( + raster_pos.x * device_pixel_scale.0, + raster_pos.y * device_pixel_scale.0, + 0.0, + raster_pos.w, + ); // Apply snapping for axis-aligned scroll nodes, as per prim_shared.glsl. if transform.transform_kind() == TransformedRectKind::AxisAligned && supports_snapping { - device_pos.x = (device_pos.x + 0.5).floor(); - device_pos.y = (device_pos.y + 0.5).floor(); + device_vec = DeviceHomogeneousVector::new( + (device_vec.x / device_vec.w + 0.5).floor(), + (device_vec.y / device_vec.w + 0.5).floor(), + 0.0, + 1.0, + ); } - DevicePoint::new( - (device_pos.x - rendered_rect.origin.x) / rendered_rect.size.width, - (device_pos.y - rendered_rect.origin.y) / rendered_rect.size.height, + DeviceHomogeneousVector::new( + (device_vec.x - rendered_rect.origin.x * device_vec.w) / rendered_rect.size.width, + (device_vec.y - rendered_rect.origin.y * device_vec.w) / rendered_rect.size.height, + 0.0, + device_vec.w, ) } diff --git a/gfx/wr/webrender_api/src/units.rs b/gfx/wr/webrender_api/src/units.rs index 3fcf4655def4..fd9c6f1bc919 100644 --- a/gfx/wr/webrender_api/src/units.rs +++ b/gfx/wr/webrender_api/src/units.rs @@ -15,6 +15,7 @@ use app_units::Au; use euclid::{Length, TypedRect, TypedScale, TypedSize2D, TypedTransform3D, TypedTranslation2D}; use euclid::{TypedPoint2D, TypedPoint3D, TypedVector2D, TypedVector3D, TypedSideOffsets2D}; +use euclid::HomogeneousVector; use DirtyRect; /// Geometry in the coordinate system of the render target (screen or intermediate @@ -32,6 +33,7 @@ pub type DeviceRect = TypedRect; pub type DevicePoint = TypedPoint2D; pub type DeviceVector2D = TypedVector2D; pub type DeviceSize = TypedSize2D; +pub type DeviceHomogeneousVector = HomogeneousVector; /// Geometry in the coordinate system of a Picture (intermediate /// surface) in physical pixels.