Frage

I have vertex shader

#version 330 core

layout(location = 0) in vec3 VertexPosition;
layout(location = 1) in vec2 VertexUV;
layout(location = 2) in vec3 VertexNormal;

out VS_GS_VERTEX
{
    vec2 UV;
    vec3 vs_worldpos;
    vec3 vs_normal;
} vertex_out;

uniform mat4 proj_matrix;
uniform mat4 model_matrix;

void main(void)
{
    gl_Normal = VertexNormal;
    gl_Position = proj_matrix * vec4(VertexPosition, 1.0);

    vertex_out.UV = VertexUV; //VertexPosition.xy;
    vertex_out.vs_worldpos = gl_Position.xyz;
    vertex_out.vs_normal = mat3(model_matrix) * gl_Normal;
}

and fragment shader

#version 330 core

in GS_FS_VERTEX
{
    vec2 UV;
    vec3 vs_worldpos;
    vec3 vs_normal;
} vertex_in;

// Values that stay constant for the whole mesh.
uniform sampler2D sampler0;
uniform sampler2D sampler1;
uniform sampler2D sampler2;
uniform sampler2D sampler3;
//uniform sampler2D alphamap0;
uniform sampler2D alphamap1;
uniform sampler2D alphamap2;
uniform sampler2D alphamap3;
uniform int tex_count;

uniform vec4 color_ambient = vec4(0.75, 0.75, 0.75, 1.0);
uniform vec4 color_diffuse = vec4(0.25, 0.25, 0.25, 1.0);
//uniform vec4 color_specular = vec4(1.0, 1.0, 1.0, 1.0);
uniform vec4 color_specular = vec4(0.1, 0.1, 0.1, 0.25);
uniform float shininess = 5.0f;
uniform vec3 light_position = vec3(12.0f, 32.0f, 560.0f);

void main(){
    vec3 light_direction = normalize(light_position - vertex_in.vs_worldpos);
    vec3 normal = normalize(vertex_in.vs_normal);
    vec3 half_vector = normalize(light_direction + normalize(vertex_in.vs_worldpos));
    float diffuse = max(0.0, dot(normal, light_direction));
    float specular = pow(max(0.0, dot(vertex_in.vs_normal, half_vector)), shininess);
    gl_FragColor = texture( sampler0, vertex_in.UV ) * color_ambient + diffuse * color_diffuse + specular * color_specular;

    // http://www.opengl.org/wiki/Texture_Combiners
    // GL_MODULATE = *
    // GL_INTERPOLATE Blend tex0 and tex1 based on a blending factor = mix(texel0, texel1, BlendFactor)
    // GL_INTERPOLATE Blend tex0 and tex1 based on alpha of tex0 = mix(texel0, texel1, texel0.a)
    // GL_ADD = clamp(texel0 + texel1, 0.0, 1.0)
    if (tex_count > 0){
        vec4 temp = texture( sampler1, vertex_in.UV );
        vec4 amap = texture( alphamap1, vertex_in.UV);
        gl_FragColor = mix(gl_FragColor, temp, amap.a);
    }
    if (tex_count > 1){
        vec4 temp = texture( sampler2, vertex_in.UV );
        vec4 amap = texture( alphamap2, vertex_in.UV);
        gl_FragColor = mix(gl_FragColor, temp, amap.a);
    }
    if (tex_count > 2){
        vec4 temp = texture( sampler3, vertex_in.UV );
        vec4 amap = texture( alphamap3, vertex_in.UV);
        gl_FragColor = mix(gl_FragColor, temp, amap.a);
    }
}

It takes indexed GL_TRIANGLE_STRIP as input

glBindBuffer(GL_ARRAY_BUFFER, tMt.vertex_buf_id[cx, cy]);
glVertexAttribPointer(VERTEX_LAYOUT_POSITION, 3, GL_FLOAT, false, 0, pointer(0));
glEnableVertexAttribArray(0);

{ chunk tex position }
glBindBuffer(GL_ARRAY_BUFFER, chunkTexPositionBO);
glVertexAttribPointer(VERTEX_LAYOUT_TEX_UV, 2, GL_FLOAT, false, 0, pointer(0));
glEnableVertexAttribArray(1);

glBindBuffer(GL_ARRAY_BUFFER, tMt.normal_buf_id[cx, cy]);
glVertexAttribPointer(VERTEX_LAYOUT_NORMAL, 3, GL_FLOAT, true, 0, pointer(0));
glEnableVertexAttribArray(2);

{ index buffer }
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, chunkIndexBO);

for i := 0 to tMt.texCount - 1 do begin
  bt := tMt.texture_buf_id[cx, cy][i];
  if bt = nil then
     break;
  glUniform1i(proj_tex_count_loc, i);
  glActiveTexture(GL_TEXTURE0 + i);
  glBindTexture(GL_TEXTURE_2D, bt.id);

  glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
  glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);

  if i > 0 then begin
    // this time, use blending:
    glActiveTexture(GL_TEXTURE4 + 1);
    glBindTexture(GL_TEXTURE_2D, tMt.alphamaps[cx, cy][i - 1]);

    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
  end;
end;

glDrawElements(GL_TRIANGLE_STRIP, length(chunkIndexArr), GL_UNSIGNED_SHORT, nil);

Code works as intended except I'm not sure is my normals arranged properly: they was stored as bytes (converted to GLfloat as b / FF), coordinates xyz changed and some probably need negation.

Can someone show me geometry shader to show normals as lines as shown at http://blogs.agi.com/insight3d/index.php/2008/10/23/geometry-shader-for-debugging-normals/ (those shader not works at all and it seems out/in data losed between vertex and fragment shader).

P.S. I'm not sure I did everything properly (starting OpenGL and GLSL) so any suggestions also appreciated.

Edit: I made simple geometry shader by examples

// This is a very simple pass-through geometry shader
#version 330 core

layout (triangles) in;
layout (triangle_strip, max_vertices = 145) out;

in VS_GS_VERTEX
{
    vec2 UV;
    vec3 vs_worldpos;
    vec3 vs_normal;
} vertex_in[];

out GS_FS_VERTEX
{
    vec2 UV;
    vec3 vs_worldpos;
    vec3 vs_normal;
} vertex_out;

uniform float uNormalsLength = 0.5;

void main()
{
    int i;
    // Loop over the input vertices
    for (i = 0; i < gl_in.length(); i++)
    {
        vertex_out.UV = vertex_in[i].UV;
        vertex_out.vs_worldpos = vertex_in[i].vs_worldpos;
        vertex_out.vs_normal = vertex_in[i].vs_normal;

        // Copy the input position to the output
        gl_Position = gl_PositionIn[i];
        EmitVertex();

        gl_Position = gl_ModelViewProjectionMatrix * (gl_PositionIn[i] + (vec4(vertex_in[i].vs_normal, 0) * uNormalsLength));
        gl_FrontColor = vec4(0.0, 0.0, 0.0, 1.0); //gl_FrontColorIn[i];
        EmitVertex();      
    }
    // End the primitive. This is not strictly necessary
    // and is only here for illustrative purposes.
    EndPrimitive();
}

but I don't knwo where it takes gl_ModelViewProjectionMatrix (seems deprecated) and result looks awful, it seems everything including normals stripped. Picture in glPolygonMode(GL_FRONT, GL_LINE) mode, textures also trying to map onto those. enter image description here

War es hilfreich?

Lösung

As it seems, you're doing it all in a single pass and you actually emit 6 vertices per incoming triangle. This is not what you want.

Either do it in two passes, i.e. one pass for the mesh, the other for the normals, or try to emit the original triangle and a degenerate triangle for the normal. For simplicity I'd go for the two-pass version:

Inside your render loop:

  • render terrain
  • if and only if debug geometry is to be rendered
    • enable your debug normals shader
    • render the terrain mesh a second time, passing POINTS to the vertex shader

To make this work, you'll need a second program object that is made up like in the blog post you previously linked to, consisting of a simple pass trough vertex shader, the following geometry shader and a fragment shader for coloring the lines representing the normals.

The vertex and fragment shaders should be no problem. Assuming you have a smoothed mesh, i.e. you have actual, averaged vertex normals, you can simply pass in points and emit lines.

#version 330 core

// assuming you have vertex normals, you need to render a vertex
// only a single time. with any other prim type, you may render
// the same normal multiple times
layout (points) in; 

// Geometry shaders can only output points, line strips or triangle
// strips by definition. you output a single line per vertex. therefore, 
// the maximum number of vertices per line_strip is 2. This is effectively
// the same as rendering distinct line segments.
layout (line_strip, max_vertices = 2) out;

in      vec3  vs_normal[];
uniform float normal_scale = 0.5; // don't forget: this is the default value! 

/* if you're never going to change the normal_scale, consider simply putting a 
  constant there instead:
  const float normal_scale = 0.5;
*/

void main()
{
    // we simply transform and emit the incoming vertex - this is v0 of our
    // line segment
    vec4 v0     = gl_in[0].gl_Position;
    gl_Position = gl_ModelViewProjectionMatrix * v0;
    EmitVertex();

    // we calculate v1 of our line segment
    vec4 v1     = v0 + vec4(vs_normal[0] * normal_scale, 0);
    gl_Position = gl_ModelViewProjectionMatrix * v1;
    EmitVertex();

    EndPrimitive();
}

Warning: Untested code!

This is probably as simple as it gets. Add a uniform to your fragment shader so you can color your normals as you like or simply export a constant color.

Note: This code still uses gl_ModevelViewProjectionMatrix. If you're writing GL core code, please consider replacing legacy GL constructs, like the matrix stack, with your own stuff!

Note 2: Your geometry shader is not what is usually referred to as a pass through shader. First, you do processing on the incoming data that is more than just assigning incoming values to outgoing values. Second, how can it be a pass-through shader, if you generate geometry? Pass-through means, you don't do anything else than pass incoming values to the next shader stage.

Lizenziert unter: CC-BY-SA mit Zuschreibung
Nicht verbunden mit StackOverflow
scroll top