Question

I coded a simple program using GLSL which must display a simple textured Box. To do this I load an OBJ file called 'Box.mesh' and next I initialize VBOs for the vertex, normal, texture and index buffer.

Here's the Box.mesh file content :

o Cube
v 1.000000 -1.000000 -1.000000
v 1.000000 -1.000000 1.000000
v -1.000000 -1.000000 1.000000
v -1.000000 -1.000000 -1.000000
v 1.000000 1.000000 -0.999999
v 0.999999 1.000000 1.000001
v -1.000000 1.000000 1.000000
v -1.000000 1.000000 -1.000000
vt 0.626059 0.265705
vt 0.626059 0.487398
vt 0.404365 0.487398
vt 0.626060 0.930786
vt 0.404365 0.930786
vt 0.404365 0.709091
vt 0.847752 0.487397
vt 0.847753 0.709091
vt 0.626059 0.709091
vt 0.182672 0.487397
vt 0.626059 0.044011
vt 0.404366 0.265704
vt 0.182671 0.709091
vt 0.404366 0.044011
vn 0.000000 -1.000000 0.000000
vn -0.000000 1.000000 0.000000
vn 1.000000 -0.000000 0.000001
vn -0.000000 -0.000000 1.000000
vn -1.000000 -0.000000 -0.000000
vn 0.000000 0.000000 -1.000000
vn 1.000000 0.000000 -0.000000
usemtl BoxMtl
s off
f 1/1/1 2/2/1 3/3/1
f 5/4/2 8/5/2 7/6/2
f 1/7/3 5/8/3 6/9/3
f 2/2/4 6/9/4 3/3/4
f 3/3/5 7/6/5 4/10/5
f 5/11/6 1/1/6 4/12/6
f 4/12/1 1/1/1 3/3/1
f 6/9/2 5/4/2 7/6/2
f 2/2/7 1/7/7 6/9/7
f 6/9/4 7/6/4 3/3/4
f 7/6/5 8/13/5 4/10/5
f 8/14/6 5/11/6 4/12/6

And a piece of code of my program :

#define OFFSET_BUFFER(offset) ((char*)NULL + (offset))

[...]

//VBO Initialization

Basic::OBJReader objReader;
Basic::OBJImage objImg = objReader.Parse("Box.mesh");

GLuint handle[4];
glGenBuffers(1, handle);

std::vector<float> position = objImg.GetVertexPosition();

glBindBuffer(GL_ARRAY_BUFFER, handle[0]);
glBufferData(GL_ARRAY_BUFFER, position.size() * sizeof(GLfloat), &position[0], GL_STATIC_DRAW);
glBindBuffer(GL_ARRAY_BUFFER, 0);

std::vector<float> normal = objImg.GetVertexNormal();

glBindBuffer(GL_ARRAY_BUFFER, handle[1]);
glBufferData(GL_ARRAY_BUFFER, normal.size() * sizeof(GLfloat), &normal[0], GL_STATIC_DRAW);
glBindBuffer(GL_ARRAY_BUFFER, 0);

std::vector<float> texture = objImg.GetVertexTexture();

glBindBuffer(GL_ARRAY_BUFFER, handle[2]);
glBufferData(GL_ARRAY_BUFFER, texture.size() * sizeof(GLfloat), &texture[0], GL_STATIC_DRAW);
glBindBuffer(GL_ARRAY_BUFFER, 0);

std::vector<unsigned int> faces = objImg.GetOBJFaceImageList().at(0).GetFaceData();

glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, handle[3]);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, faces.size() * sizeof(GLuint), &faces[0], GL_STATIC_DRAW);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0);

[...]

/*Main loop*/

while (isAlive == true)
{
    [...]

    glEnableVertexAttribArray(0);
    glBindBuffer(GL_ARRAY_BUFFER, handle[0]);
        glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 0, OFFSET_BUFFER(0));
    glBindBuffer(GL_ARRAY_BUFFER, 0);

    glEnableVertexAttribArray(1);
    glBindBuffer(GL_ARRAY_BUFFER, handle[1]);
        glVertexAttribPointer(1, 3, GL_FLOAT, GL_FALSE, 0, OFFSET_BUFFER(0));
    glBindBuffer(GL_ARRAY_BUFFER, 0);

    glEnableVertexAttribArray(2);
    glBindBuffer(GL_ARRAY_BUFFER, handle[2]);
        glBindTexture(GL_TEXTURE_2D, textureID);
        glVertexAttribPointer(2, 2, GL_FLOAT, GL_FALSE, 0, OFFSET_BUFFER(0));
    glBindBuffer(GL_ARRAY_BUFFER, 0);

    glBindBuffer(GL_ARRAY_BUFFER, handle[3]);
        glDrawElements(GL_TRIANGLES, 36, GL_UNSIGNED_INT, OFFSET_BUFFER(0));
    glBindBuffer(GL_ARRAY_BUFFER, 0);

    glDisableVertexAttribArray(2);
    glDisableVertexAttribArray(1);
    glDisableVertexAttribArray(0);

    [...]
}

I checked the content and the size of all the buffers and they are correct. So I don't understand why I have a crash at the first call of glDrawElements. I'm really lost. Does anyone can help me, please? Thanks a lot in advance for your help.

Was it helpful?

Solution

Assuming handle[3] is your element array buffer, this is wrong prior to calling glDrawElements (...):

glBindBuffer(GL_ARRAY_BUFFER, handle[3]);

It needs to be:

glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, handle[3]);

Presumably you have no element array buffer bound, and this instructs OpenGL that the final parameter in glDrawElements (...) is a pointer to client memory (instead of an offset into a buffer object's memory). GL will attempt to dereference a NULL pointer when it comes time to pull the vertex indices and then this happens.

Licensed under: CC-BY-SA with attribution
Not affiliated with StackOverflow
scroll top