Question

Here is my shader program:

#version 330 core

// Input vertex data, different for all executions of this shader.
layout(location = 0) in vec3 vertexPosition_modelspace;
layout(location = 1) in vec3 vertexNormal_modelspace;


// Values that stay constant for the whole mesh.
uniform mat4 MVP;
uniform mat4 V;
uniform mat4 M;
uniform mat3 blNormalMatrix;
uniform vec3 lightPos;
out vec4 forFragColor;
const vec3 diffuseColor = vec3(0.55, 0.09, 0.09);

void main(){

    // Output position of the vertex, in clip space : MVP * position
  gl_Position =  MVP * vec4(vertexPosition_modelspace,1);
    vec3 MaterialAmbientColor = vec3(0.1,0.1,0.1) * diffuseColor;
  // all following gemetric computations are performed in the
  // camera coordinate system (aka eye coordinates)
  vec3 vertexNormal_cameraspace = (V*M*vec4(vertexNormal_modelspace,0)).xyz;
  vec4 vertexPosition_cameraspace4 = V*M* vec4(vertexPosition_modelspace,1);
  vec3 vertexPosition_cameraspace = vec3(vertexPosition_cameraspace4).xyz;
  vec3 lightDir = normalize(lightPos - vertexPosition_cameraspace);

  float lambertian = clamp(dot(lightDir,vertexNormal_cameraspace), 0.0,1.0);
  forFragColor = vec4(lambertian*diffuseColor , 1.0);

}

My problem is that this "worked" in the older opengl profile, didn't even have the version number, I think it was around Opengl 2.1 or so, the key change was that I originally had normal = gl_normalMatrix * gl_normal and things worked.

However that was based on my professor's code which I've updated to the 3.3+ core profile and after maybe fixing the deprecated functions I am now left with this:

https://drive.google.com/file/d/0B6oLZ_d7S-U7cVpkUXpVXzdaZEk/edit?usp=sharing is a link to the video of my program's behavior.

The light source should be a point light at (0,0,3) or so that shouldn't move; but its not following a particularly logical behaviorial pattern, I can't make sense of it.

I tried passing the inverse transpose of the model matrix and using them as a replacement normalMatrix but it wrecked my normals. So I don't know.

This was my normalMatrix:

glm::mat3 MyNormalMatrix = glm::mat3(glm::transpose(glm::inverse(ModelMatrix)));

Edit: Here is my Display code:

   glClearColor(0.0f, 0.0f, 0.4f, 0.0f);
   glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);   
   glEnable(GL_DEPTH_TEST);

      // Use our shader
   glUseProgram(programID);

   // Get our transformations iff we move the camera around.
   glm::mat4 MyModelMatrix = ModelMatrix * thisTran * ThisRot;

   MVP = ProjectionMatrix * ViewMatrix * MyModelMatrix;
   glm::mat4 ModelView = ViewMatrix * MyModelMatrix;
   glm::mat3 MyNormalMatrix = glm::mat3(glm::transpose(glm::inverse(ModelView)));
   glm::vec3 newLightPos = lightPos;
      // Send our transformation to the currently bound shader, 
      // in the "MVP" uniform
   glUniformMatrix4fv(MatrixID, 1, GL_FALSE, &MVP[0][0]);
   glUniformMatrix4fv(ModelMatrixID, 1, GL_FALSE, &MyModelMatrix[0][0]);
   glUniformMatrix4fv(ViewMatrixID, 1, GL_FALSE, &ViewMatrix[0][0]);
   glUniformMatrix4fv(BlNormalMatrix,1,GL_FALSE, &MyNormalMatrix[0][0]);
   glUniformMatrix4fv(BlRotations, 1, GL_FALSE, &ThisRot[0][0]);
   glUniform3f(BlCamera, cameraLoc.x, cameraLoc.y, cameraLoc.z);
   glUniform3f(lPosition, newLightPos.x,newLightPos.y,newLightPos.z);

   // VBO buffer: vertices
      // 1rst attribute buffer : vertices
   glEnableVertexAttribArray(0);
   glBindBuffer(GL_ARRAY_BUFFER, vertexbuffer);
   glVertexAttribPointer(
      0,                  // attribute
      3,                  // size
      GL_FLOAT,           // type
      GL_FALSE,           // normalized?
      0,                  // stride
      (void*)0            // array buffer offset
   );

   // 2rd attribute buffer : normals
   glEnableVertexAttribArray(1);
   glBindBuffer(GL_ARRAY_BUFFER, normalbuffer);
   glVertexAttribPointer(
      1,                                // attribute
      3,                                // size
      GL_FLOAT,                         // type
      GL_FALSE,                         // normalized?
      0,                                // stride
      (void*)0                          // array buffer offset
   );

   // draw object using opengl 3.3 shit
   glDrawArrays(GL_TRIANGLES, 0, vertices.size() );
Was it helpful?

Solution

The problem ultimately turned out to be an issue with the Model Loader provided by my Professor, was somehow incompatible with modern opengl and would only "mostly" work in that it was clearly missing the left/right normals or they had invalid values. Solved with using an implementation of Assimp.

The code, with assimp linked is like this:

void blInitResWAssimp() {
    cout << "blInitResWAssimp" << endl;
    blCreateModelViewProjectionMatrix();

    //loads object
    bool res = loadAssImp("Resources/RCSS-subdiv.obj", indices, indexed_vertices, indexed_uvs, indexed_normals);
    //bool res = loadAssImp("Resources/cheb.obj", indices, indexed_vertices, indexed_uvs, indexed_normals);

    glGenVertexArrays(1, &VertexArrayID);
    glBindVertexArray(VertexArrayID);

    // Load it into a VBO
    glGenBuffers(1, &vertexbuffer);
    glBindBuffer(GL_ARRAY_BUFFER, vertexbuffer);
    glBufferData(GL_ARRAY_BUFFER, indexed_vertices.size() * sizeof(glm::vec3), &indexed_vertices[0], GL_STATIC_DRAW);

    // Normal buffer
    glGenBuffers(1, &normalbuffer);
    glBindBuffer(GL_ARRAY_BUFFER, normalbuffer);
    glBufferData(GL_ARRAY_BUFFER, indexed_normals.size() * sizeof(glm::vec3), &indexed_normals[0], GL_STATIC_DRAW);

    // Generate a buffer for the indices as well
    glGenBuffers(1, &elementbuffer);
    glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, elementbuffer);
    glBufferData(GL_ELEMENT_ARRAY_BUFFER, indices.size() * sizeof(unsigned short), &indices[0], GL_STATIC_DRAW);

    //ModelMatrix = ModelMatrix * glm::translate(glm::mat4(1.0f), glm::vec3(-0.5, -0.5, 0));

    }

Assimp stuff

bool loadAssImp(
    const char * path,
    std::vector<unsigned short> & indices,
    std::vector<glm::vec3> & vertices,
    std::vector<glm::vec2> & uvs,
    std::vector<glm::vec3> & normals
    ){

    Assimp::Importer importer;

    const aiScene* scene = importer.ReadFile(path, 0/*aiProcess_JoinIdenticalVertices | aiProcess_SortByPType*/);
    if (!scene) {
        fprintf(stderr, importer.GetErrorString());
        getchar();
        return false;
    }
    const aiMesh* mesh = scene->mMeshes[0]; // In this simple example code we always use the 1rst mesh (in OBJ files there is often only one anyway)
    const aiMaterial* material = scene->mMaterials[0];

    // Fill vertices positions
    vertices.reserve(mesh->mNumVertices);
    for (unsigned int i = 0; i<mesh->mNumVertices; i++){
        aiVector3D pos = mesh->mVertices[i];
        vertices.push_back(glm::vec3(pos.x, pos.y, pos.z));
    }

    // Fill vertices texture coordinates
    /*
    uvs.reserve(mesh->mNumVertices);
    for (unsigned int i = 0; i<mesh->mNumVertices; i++){
        aiVector3D UVW = mesh->mTextureCoords[0][i]; // Assume only 1 set of UV coords; AssImp supports 8 UV sets.
        uvs.push_back(glm::vec2(UVW.x, UVW.y));
    }*/

    // Fill vertices normals
    normals.reserve(mesh->mNumVertices);
    for (unsigned int i = 0; i<mesh->mNumVertices; i++){
        aiVector3D n = mesh->mNormals[i];
        //aiVector3D n = mesh->mVertices[i];
        normals.push_back(glm::vec3(n.x, n.y, n.z));
    }


    // Fill face ind5ices
    indices.reserve(3 * mesh->mNumFaces);
    for (unsigned int i = 0; i<mesh->mNumFaces; i++){
        // Assume the model has only triangles.
        indices.push_back(mesh->mFaces[i].mIndices[0]);
        indices.push_back(mesh->mFaces[i].mIndices[1]);
        indices.push_back(mesh->mFaces[i].mIndices[2]);
    }

    // The "scene" pointer will be deleted automatically by "importer"

    }
Licensed under: CC-BY-SA with attribution
Not affiliated with StackOverflow
scroll top