cogl: Read pixels as per the stored format

By the looks of it, commit 95e9fa10ef was taping over an Intel DRI bug
that would make it return post-swizzling pixel data on glReadPixels().
There's been reports over time of that commit resulting in wrong colors
on other drivers, and lately Mesa >17.3 started showing the same symptoms
on Intel.

But texture swizzling works by changing parameters before fragment shaders
and reading pixels from an already drawn FBO/texture doesn't involve those.
This should thus use pixel_format_to_gl_with_target(), which will result in
correctly requesting the same pixel format than the underlying texture,
while still considering it BGRA for the upper layers in the swizzling case.

https://gitlab.gnome.org/GNOME/mutter/issues/72

Closes: #72
This commit is contained in:
Carlos Garnacho 2018-03-09 16:46:59 +01:00 committed by Ray Strode
parent a5fd9a6e2f
commit 2f260edf19

View File

@ -1412,22 +1412,12 @@ _cogl_framebuffer_gl_read_pixels_into_bitmap (CoglFramebuffer *framebuffer,
if (!cogl_is_offscreen (framebuffer))
y = framebuffer_height - y - height;
required_format = ctx->driver_vtable->pixel_format_to_gl (ctx,
format,
&gl_intformat,
&gl_format,
&gl_type);
#if HAVE_COGL_GL
/* As we are reading pixels, we want to consider the bitmap according to
* its real pixel format, not the swizzled channels we pretend face to the
* pipeline.
*/
if ((ctx->driver == COGL_DRIVER_GL || ctx->driver == COGL_DRIVER_GL3) &&
(format == COGL_PIXEL_FORMAT_BGRA_8888 ||
format == COGL_PIXEL_FORMAT_BGRA_8888_PRE) &&
_cogl_has_private_feature (ctx, COGL_PRIVATE_FEATURE_TEXTURE_SWIZZLE))
gl_format = GL_BGRA;
#endif
required_format = ctx->driver_vtable->pixel_format_to_gl_with_target (ctx,
framebuffer->internal_format,
format,
&gl_intformat,
&gl_format,
&gl_type);
/* NB: All offscreen rendering is done upside down so there is no need
* to flip in this case... */