OpenGL binding textures to vertex buffer objects (with CG Shaders)

karmalis Source

I'll get straight to the point:

I have created a structure for my vertices:

struct Vertex3D
{
    Vector3D position;
    Vector2D textureCoordinate;
    Vector3D normal;
}

I then import a particular *.dae file and bind it to a OpenGL Vertex Buffer, which is constructed out of a list of Vertex3D structured vertices. All goes well, the mesh imports and is displayed, I can manipulate it with shaders, but I have a problem.

What I also do is load up and assign a texture to it. Aftewards I'm trying to display it in this manner:

glEnableVertexAttribArray(0);
glEnableVertexAttribArray(8);
glEnableVertexAttribArray(2);

glBindBuffer(GL_ARRAY_BUFFER, this->_entries[i].VB);
glVertexAttribPointer(0,3, GL_FLOAT, GL_FALSE, sizeof(RomCommon::Vertex3D), 0); // Vertices
glVertexAttribPointer(8,2, GL_FLOAT, GL_FALSE, sizeof(RomCommon::Vertex3D), (const GLvoid*)sizeof(RomCommon::Vector3D)); // Supposed texture position
glVertexAttribPointer(2,3, GL_FLOAT, GL_FALSE, sizeof(RomCommon::Vertex3D),  (const GLvoid*)(sizeof(RomCommon::Vector2D) + sizeof(RomCommon::Vector3D))); // Supposed normal position
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, this->_entries[i].IB);
glBindTexture(GL_TEXTURE_2D, 3);
glDrawElements(GL_TRIANGLES, this->_entries[i]._indexCount, GL_UNSIGNED_INT, 0);

glDisableVertexAttribArray(0);
glDisableVertexAttribArray(8);
glDisableVertexAttribArray(2);

My problem is that the texture is not displayed. Only the color from the texture's first pixel is assigned (in my case it's blue). I've used gDEBugger to debug the whole process, and I can tell that the VertexBuffer seems to be set up correctly, the image of the texture is loaded up correctly, yet it won't be displayed.

I have tried searching and trying out different aspects of debugging, this includes:

  • making sure glEnable(GL_TEXTURE_2D) is set up;
  • adding glEnable(GL_LIGHTING) and glDisable(GL_LIGHTING);
  • using glDisable(GL_DEPTH_TEST); glDisable(GL_BLEND); and glDisable(GL_SCISSOR_TEST); before binding the texture
  • fetching for errors
  • searching for strange messages
  • double checking the vertex buffer in the memory (literaly going through the elemnts and cross comparing what's in the *.dae file)

EDIT

I am using CG Shaders with this. Here is the Vertex Program:

struct vsOutput {


 float4 position : POSITION;
  float2 texCoord : TEXCOORD0;
  float3 color    : COLOR;
};

vsOutput VS_Main(      float4 position : POSITION,
                           float2 texCoord : TEXCOORD0,
                           float3 color    : COLOR,
                           uniform float4x4 ModelViewProj
                           )
{
  vsOutput OUT;

  OUT.position = mul(ModelViewProj, position);
  OUT.texCoord = texCoord;
  OUT.color = color;

  return OUT;   
}

And the Fragment Program:

struct fsOutput {
  float4 color : COLOR;
};

fsOutput FS_Main(
    float2 texCoord : TEXCOORD0,
    uniform sampler2D decal : TEX0
)
{ 
    fsOutput OUT;
    OUT.color = tex2D(decal,texCoord);
    return OUT;
}

I could add more details about my project if necessary, though from what I've traced, the problem seems to be somewhere in the rendering of the vertex buffers.

EDIT

I've also found that because I'm using CG, the vertexAttribPointers should be different (8 is for TEXCOORD0) so I changed the general description based on that.

SOLUTION EDIT

Many thanks to the few chaps in the comments who opened my mind to look at the problem from a different perspective and actually do some more reading. Here is the solution:

    glEnableVertexAttribArray(0);
    glEnableVertexAttribArray(8);
    glEnableVertexAttribArray(2);

    glBindBuffer(GL_ARRAY_BUFFER, this->_entries[i].VB);
    glVertexAttribPointer(0,3, GL_FLOAT, GL_FALSE, sizeof(RomCommon::Vertex3D), 0); // Vertices
    glVertexAttribPointer(8,2, GL_FLOAT, GL_FALSE, sizeof(RomCommon::Vertex3D), (const GLvoid*)sizeof(RomCommon::Vector3D)); // Supposed texture position
    glVertexAttribPointer(2,3, GL_FLOAT, GL_FALSE, sizeof(RomCommon::Vertex3D),  (const GLvoid*)(sizeof(RomCommon::Vector2D) + sizeof(RomCommon::Vector3D))); // Supposed normal position
    glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, this->_entries[i].IB);


/* Solution start */
    glEnableClientState(GL_TEXTURE_COORD_ARRAY);
    glTexCoordPointer(2, GL_FLOAT, sizeof(RomCommon::Vertex3D), (const GLvoid*)12);
        glBindTexture(GL_TEXTURE_2D, 3);
        glDrawElements(GL_TRIANGLES, this->_entries[i]._indexCount, GL_UNSIGNED_INT, 0);
    glDisableClientState(GL_TEXTURE_COORD_ARRAY);
/* Solution end */


    glDisableVertexAttribArray(0);
    glDisableVertexAttribArray(8);
    glDisableVertexAttribArray(2);

My problem was that I was only passing texture coordinates to the shader, but did not push it to the OpenGL state machine (if that's a good expression). I think this has something to do with the fact that I am using CG shaders and not GLSL shaders, though I could be wrong. The code is loosely based on a few examples and explanations that I read, but they were all based on GLSL shaders and they were working, hence the dediction. Anyway, it is now resolved.

c++openglrenderingtexture-mappingvertex-buffer

Answers

answered 5 years ago genpfault #1

glBindTexture(GL_TEXTURE_2D, 3);

I'd be wary of forcing texture object IDs on OpenGL. Technically it ought to work but using glGenTextures() to acquire texture IDs is a better idea.

comments powered by Disqus