how can I specify interleaved vertex attributes and vertex indices

  • how can I specify interleaved vertex attributes and vertex indices user3643

    I'm writing a generic ShaderProgram class that compiles a set of Shader objects, passes args to the shader (like vertex position, vertex normal, tex coords etc), then links the shader components into a shader program, for use with glDrawArrays.

    My vertex data already exists in a VertexBufferObject that uses the following data structure to create a vertex buffer:

    class CustomVertex
    float   m_Position[3];      // x, y, z      // offset 0, size = 3*sizeof(float)
    float   m_TexCoords[2];     // u, v         // offset 3*sizeof(float), size = 2*sizeof(float)
    float   m_Normal[3];        // nx, ny, nz;
    float   colour[4];          // r, g, b, a
    float   padding[20];        // padded for performance

    I've already written a working VertexBufferObject class that creates a vertex buffer object from an array of CustomVertex objects. This array is said to be interleaved. It renders successfully with the following code:

    void VertexBufferObject::Draw()
    if( ! m_bInitialized )
    glBindBuffer( GL_ARRAY_BUFFER,          m_nVboId );
    glBindBuffer( GL_ELEMENT_ARRAY_BUFFER,  m_nVboIdIndex );
    glEnableClientState( GL_VERTEX_ARRAY );
    glEnableClientState( GL_TEXTURE_COORD_ARRAY );
    glEnableClientState( GL_NORMAL_ARRAY );
    glEnableClientState( GL_COLOR_ARRAY );  
    glVertexPointer( 3, GL_FLOAT, sizeof(CustomVertex), ((char*)NULL + 0) );
    glTexCoordPointer(3, GL_FLOAT, sizeof(CustomVertex), ((char*)NULL + 12));
    glNormalPointer(GL_FLOAT, sizeof(CustomVertex), ((char*)NULL + 20));
    glColorPointer(3, GL_FLOAT, sizeof(CustomVertex), ((char*)NULL + 32));  
    glDrawElements( GL_TRIANGLES, m_nNumIndices, GL_UNSIGNED_INT, ((char*)NULL +  0) );
    glDisableClientState( GL_VERTEX_ARRAY );
    glDisableClientState( GL_TEXTURE_COORD_ARRAY );
    glDisableClientState( GL_NORMAL_ARRAY );
    glDisableClientState( GL_COLOR_ARRAY );
    glBindBuffer( GL_ARRAY_BUFFER,          0 );
    glBindBuffer( GL_ELEMENT_ARRAY_BUFFER,  0 );

    Back to the Vertex Array Object though. My code for creating the Vertex Array object is as follows. This is performed before the ShaderProgram runtime linking stage, and no glErrors are reported after its steps.

    // Specify the shader arg locations (e.g. their order in the shader code)
    for( int n = 0; n < vShaderArgs.size(); n ++)   
        glBindAttribLocation( m_nProgramId, n, vShaderArgs[n].sFieldName.c_str() ); 
    // Create and bind to a vertex array object, which stores the relationship between 
    // the buffer and the input attributes
    glGenVertexArrays( 1, &m_nVaoHandle );
    glBindVertexArray( m_nVaoHandle );
    // Enable the vertex attribute array (we're using interleaved array, since its faster)  
    glBindBuffer( GL_ARRAY_BUFFER,          vShaderArgs[0].nVboId );    
    glBindBuffer( GL_ELEMENT_ARRAY_BUFFER,  vShaderArgs[0].nVboIndexId );
    // vertex data
    for( int n = 0; n < vShaderArgs.size(); n ++ )
            (GLubyte *) NULL + vShaderArgs[n].nFieldOffset

    This doesn't render correctly at all. I get a pattern of white specks onscreen, in the shape of the terrain rectangle, but there are no regular lines etc. Here's the code I use for rendering:

    void ShaderProgram::Draw()
    using namespace AntiMatter;
    if( ! m_nShaderProgramId || ! m_nVaoHandle )
        AppLog::Ref().LogMsg("ShaderProgram::Draw() Couldn't draw object, as initialization of ShaderProgram is incomplete");
    glUseProgram( m_nShaderProgramId );
    glBindVertexArray( m_nVaoHandle );  
    glDrawArrays( GL_TRIANGLES, 0, m_nNumTris );

    Can anyone see errors or omissions in either the VAO creation code or rendering code?


  • If you want to use VAOs with index buffers, you need glDrawElements() instead of glDrawArrays(). What you are seeing is probably all your vertices being rendered as triangles, as if there was no index buffer.

c++ opengl vertex-arrays index-buffer
Related questions and answers
  • , and vertices arrays if (Objects[i].textured) glEnableClientState(GL_TEXTURE_COORD_ARRAY); if (lit) glEnableClientState(GL_NORMAL_ARRAY); glEnableClientState(GL_VERTEX_ARRAY); // Point them to the objects arrays if (Objects[i].textured) glTexCoordPointer(2, GL_FLOAT, 0, Objects[i].TexCoords...); // Set The Color Of The Model ( NEW ) // ORIGINAL DRAWING CODE //Draw the model as an interpolation between the two frames glBegin(GL_TRIANGLES); for(int i = 0; i

  • (mats.mvHandle,1,GL_FALSE,glm::value_ptr(mats.modelViewMatrix)); //bind to vertex array object glBindVertexArray(vaoHandle); //render scene glDrawArrays(GL_TRIANGLES, 0, 240*3 ); //do post-processing...); glDrawArrays(GL_TRIANGLES, 0, 6); glBindTexture(GL_TEXTURE_2D, 0); } Tried everything I can think of or find in a FBO tutorial or have read about. I don't get any errors and it returns as complete...; case GL_FRAMEBUFFER_INCOMPLETE_MISSING_ATTACHMENT: cout<&lt;"GL_FRAMEBUFFER_INCOMPLETE_MISSING_ATTACHMENT_EXT\n"&lt;&lt;endl; break; case GL_FRAMEBUFFER_INCOMPLETE_DRAW_BUFFER

  • (GL_TEXTURE_COORD_ARRAY); glEnableClientState(GL_VERTEX_ARRAY); glNormalPointer(GL_FLOAT, 0, indices); glTexCoordPointer(2, GL_FLOAT, sizeof(float)*3, indices); glVertexPointer(3, GL...); glEnableClientState(GL_TEXTURE_COORD_ARRAY); glEnableClientState(GL_VERTEX_ARRAY); glNormalPointer(GL_FLOAT, 0, &normals[0]); glTexCoordPointer(2, GL_FLOAT, 0, &textCoords[0]); glVertexPointer(3, GL_FLOAT, 0, &vertices[0]); glDrawArrays(GL_TRIANGLES, 0, vertices.size()/3); glDisableClientState(GL_VERTEX_ARRAY); // disable vertex arrays glDisableClientState(GL_TEXTURE

  • [] = { // first input element: position {"POSITION", 0, DXGI_FORMAT_R32G32B32_FLOAT, 0, D3D10_APPEND_ALIGNED_ELEMENT, D3D10_INPUT_PER_VERTEX_DATA, 0}, // second input element: color {"COLOR", 0, DXGI_FORMAT_R32G32B32A32_FLOAT, 0, D3D10_APPEND_ALIGNED_ELEMENT, D3D10_INPUT_PER_VERTEX_DATA, 0} }; // use the input element descriptions to create the input layout device..._WRITE; bd.MiscFlags = 0; device->CreateBuffer(&bd, NULL, &pBuffer); void* pVoid; // the void pointer pBuffer->Map(D3D10_MAP_WRITE_DISCARD, 0, &pVoid); // map the vertex buffer

  • wrong in my frame buffer set up code, or elsewhere. But I can't see what. The FBO is set up through the following function: unsigned int fbo_id; unsigned int depth_buffer; int m_FBOWidth, m_FBOHeight...;MVP[0]); } glBindVertexArray(vaoID); glDrawElements(GL_TRIANGLES, sizeof(cube.polygon)/sizeof(cube.polygon[0]), GL_UNSIGNED_INT, 0); } void drawStaticBox() { Matrix4 M(1...I'm having trouble implementing render to texture with OpenGL 3. My issue is that after rendering to the frame buffer, it appears the rendered object becomes deformed, which may imply a bad

  • _Commands, m_MD2Header.num_glcommands * sizeof(int)); // Read all the data. for(int i = 0; i &lt; m_MD2Header.num_frames; ++i) { md2_frame_t* Frame = (md2_frame_t*)&Buffer[m_MD2Header.framesize * i]; vec3_t* Vertices = &m_Vertices[m_MD2Header.num_xyz * i]; int* Normals = &m_Normals[m_MD2Header.num_xyz * i]; for(int vertex = 0; vertex &lt... char index_normal; }; typedef short md2_textcoord_t[2]; struct md2_frame_t { float scale[3]; vec3_t translate; char name[16]; md2_vertex_t vertices[1]; // First vertex of this frame

  • set_location(int X, int Y) { location[0] = X; location[1] = Y;}; bool Alive() {return alive;}; void SetLife(bool f) {alive= f;}; //////////////////////////////// Move...];}; char Contents(int Y, int X) {return contents[Y][X];}; vector &lt;string> Save() {return save;}; int size() {return contents.size...; for(char i= '0'; i!= ' ';swall++) i= buff[buff.size()-1][swall+1]; int sspace= 0; int I= swall+1; for(char i= '0'; i!= 'X';sspace++, I++) i= buff[buff.size()-1][I

  • float4 Colour; // a struct for the vertex shader return value struct VSOut { float4 Col : COLOR; // vertex normal float4 Pos : SV_POSITION; // vertex screen coordinates }; // the vertex... return Output; // send the modified vertex data to the Rasterizer Stage } // the pixel shader float4 PS(float4 Col : COLOR) : SV_TARGET { return Col; // set the pixel color to the color passed... UINT stride = sizeof(VERTEX); UINT offset = 0; device->IASetVertexBuffers(0, 1, mesh.PBuffer(), &stride, &offset); device->IASetIndexBuffer(mesh.IBuffer(), DXGI_FORMAT_R32_UINT, 0

  • : return DefWindowProc(hWnd, message, wParam, lParam); } } bool initDirect3D(void) { pD3D = NULL; pd3dDevice = NULL; // create the DirectX object if(NULL == (pD3D = Direct3DCreate9...; pd3dDevice -> Clear(0, NULL, D3DCLEAR_TARGET, D3DCOLOR_XRGB(0, 0, 255), 1.0f, 0); // clear the back buffer to a blue color if (SUCCEEDED(pd3dDevice -> BeginScene...(); } // present the back buffer contents to the display pd3dDevice -> Present(NULL, NULL, NULL, NULL); } void cleanUp (void) { // release the device and the Direct3D object if (pd3dDevice

Data information