summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorNeil Roberts <neil@linux.intel.com>2013-06-21 17:46:19 +0100
committerNeil Roberts <neil@linux.intel.com>2013-07-01 13:40:39 +0100
commite3bd994ea1cd9468d93a04174cecbac8dde4125e (patch)
treeb79e07bef0fdccf5c10654b805dd98945ce33cc7
parentbf1596f41b86559c60a5f8b68a252afa41a2e97b (diff)
downloadcogl-e3bd994ea1cd9468d93a04174cecbac8dde4125e.tar.gz
Fix the alpha value in the default texture data
When a layer is added to a pipeline without setting a texture it ends up sampling from a default 1x1 texture which is meant to be solid white. However for some reason we were creating the texture with 0 opacity which is effectively an invalid premultiplied colour. This would make the blending behave oddly if it was used. https://bugzilla.gnome.org/show_bug.cgi?id=702570 Reviewed-by: Robert Bragg <robert@linux.intel.com> (cherry picked from commit 2ffc77565fb6395b986d3274f8bdb6eee6addbf9)
-rw-r--r--cogl/cogl-context.c2
1 files changed, 1 insertions, 1 deletions
diff --git a/cogl/cogl-context.c b/cogl/cogl-context.c
index 175e69dd..359e1129 100644
--- a/cogl/cogl-context.c
+++ b/cogl/cogl-context.c
@@ -133,7 +133,7 @@ cogl_context_new (CoglDisplay *display,
CoglError **error)
{
CoglContext *context;
- GLubyte default_texture_data[] = { 0xff, 0xff, 0xff, 0x0 };
+ uint8_t default_texture_data[] = { 0xff, 0xff, 0xff, 0xff };
CoglBitmap *default_texture_bitmap;
const CoglWinsysVtable *winsys;
int i;