cogl-vertex-buffer: Add support for unsigned int indices

This adds a COGL_INDICES_TYPE_UNSIGNED_INT enum value so that unsigned
ints can be used with cogl_vertex_buffer_indices_new.  Unsigned ints
are not supported in core on GLES so a feature flag has also been
added to advertise this. GLES only sets the feature if the
GL_OES_element_index_uint extension is available. It is an error to
call indices_new() with unsigned ints unless the feature is
advertised.

http://bugzilla.openedhand.com/show_bug.cgi?id=1998
This commit is contained in:
Neil Roberts 2010-02-23 14:45:44 +00:00
parent b583083a3f
commit 82fd07c54a
5 changed files with 37 additions and 3 deletions

View File

@ -199,6 +199,9 @@ typedef enum { /*< prefix=COGL_PIXEL_FORMAT >*/
* @COGL_FEATURE_STENCIL_BUFFER: Stencil buffer support
* @COGL_FEATURE_VBOS: VBO support
* @COGL_FEATURE_PBOS: PBO support
* @COGL_FEATURE_UNSIGNED_INT_INDICES: Set if
* %COGL_INDICES_TYPE_UNSIGNED_INT is supported in
* cogl_vertex_buffer_indices_new().
*
* Flags for the supported features.
*
@ -217,7 +220,8 @@ typedef enum
COGL_FEATURE_FOUR_CLIP_PLANES = (1 << 9),
COGL_FEATURE_STENCIL_BUFFER = (1 << 10),
COGL_FEATURE_VBOS = (1 << 11),
COGL_FEATURE_PBOS = (1 << 12)
COGL_FEATURE_PBOS = (1 << 12),
COGL_FEATURE_UNSIGNED_INT_INDICES = (1 << 13)
} CoglFeatureFlags;
/**

View File

@ -171,6 +171,11 @@
#endif
/* This isn't defined in the GLES headers */
#ifndef GL_UNSIGNED_INT
#define GL_UNSIGNED_INT 0x1405
#endif
/*
* GL/GLES compatability defines for shader things:
*/
@ -1782,8 +1787,10 @@ get_indices_type_size (GLuint indices_type)
{
if (indices_type == GL_UNSIGNED_BYTE)
return sizeof (GLubyte);
if (indices_type == GL_UNSIGNED_SHORT)
else if (indices_type == GL_UNSIGNED_SHORT)
return sizeof (GLushort);
else if (indices_type == GL_UNSIGNED_INT)
return sizeof (GLuint);
else
{
g_critical ("Unknown indices type %d\n", indices_type);
@ -1809,6 +1816,14 @@ cogl_vertex_buffer_indices_new (CoglIndicesType indices_type,
indices->type = GL_UNSIGNED_BYTE;
else if (indices_type == COGL_INDICES_TYPE_UNSIGNED_SHORT)
indices->type = GL_UNSIGNED_SHORT;
else if (indices_type == COGL_INDICES_TYPE_UNSIGNED_INT)
{
g_return_val_if_fail (cogl_features_available
(COGL_FEATURE_UNSIGNED_INT_INDICES),
COGL_INVALID_HANDLE);
indices->type = GL_UNSIGNED_INT;
}
else
{
g_critical ("unknown indices type %d", indices_type);

View File

@ -301,14 +301,22 @@ cogl_vertex_buffer_draw (CoglHandle handle,
* CoglIndicesType:
* @COGL_INDICES_TYPE_UNSIGNED_BYTE: Your indices are unsigned bytes
* @COGL_INDICES_TYPE_UNSIGNED_SHORT: Your indices are unsigned shorts
* @COGL_INDICES_TYPE_UNSIGNED_INT: Your indices are unsigned ints
*
* You should aim to use the smallest data type that gives you enough
* range, since it reduces the size of your index array and can help
* reduce the demand on memory bandwidth.
*
* Note that %COGL_INDICES_TYPE_UNSIGNED_INT is only supported if the
* %COGL_FEATURE_UNSIGNED_INT_INDICES feature is available. This
* should always be available on OpenGL but on OpenGL ES it will only
* be available if the GL_OES_element_index_uint extension is
* advertized.
*/
typedef enum {
COGL_INDICES_TYPE_UNSIGNED_BYTE,
COGL_INDICES_TYPE_UNSIGNED_SHORT,
COGL_INDICES_TYPE_UNSIGNED_INT,
} CoglIndicesType;
/**

View File

@ -185,7 +185,8 @@ _cogl_features_init (void)
_cogl_get_gl_version (&gl_major, &gl_minor);
flags = COGL_FEATURE_TEXTURE_READ_PIXELS;
flags = (COGL_FEATURE_TEXTURE_READ_PIXELS
| COGL_FEATURE_UNSIGNED_INT_INDICES);
gl_extensions = (const char*) glGetString (GL_EXTENSIONS);

View File

@ -70,3 +70,9 @@ COGL_FEATURE_FUNCTION (void, glDeleteFramebuffers,
COGL_FEATURE_FUNCTION (void, glGenerateMipmap,
(GLenum target))
COGL_FEATURE_END ()
COGL_FEATURE_BEGIN (element_index_uint, 255, 255,
"OES\0",
"element_index_uint\0",
COGL_FEATURE_UNSIGNED_INT_INDICES)
COGL_FEATURE_END ()