Tilemap rendering with openGL (why the rendering is extremely messed up)

danran
  • Tilemap rendering with openGL (why the rendering is extremely messed up) danran

    I'm using SDL & openGL to render a tile-map. The issue is that the tile-map rendering is extremely messed up, and I'm just a bit unsure what I'm doing wrong exactly.

    It should just be the first tile being rendered, but I'm getting a blurred mess :S. My rendering code:

    glBindTexture(GL_TEXTURE_2D, texture);
    float texscale = 1.0f / (float)tileWidth;
    sourceX = sourceX / (float)tileSheetWidth;
    
    glBegin(GL_QUADS);
    
    // Top-left vertex (corner) 
    glTexCoord2f( sourceX, sourceY);
    glVertex2i(x, y);
    
    // Bottom-left vertex (corner)
    glTexCoord2f( sourceX + texscale, sourceY);
    glVertex2i( x + tileWidth, y);
    
    // Bottom-right vertex (corner)
    glTexCoord2f( sourceX + texscale, texscale + sourceY);
    glVertex2i( x + tileWidth, y + tileHeight);
    
    // Top-right vertex (corner)
    glTexCoord2f( sourceX, texscale + sourceY);
    glVertex2i(x, y + tileHeight);
    
    glEnd();
    glLoadIdentity();
    

    My initialization code for OpenGL:

    // Set the OpenGL state after creating the context with SDL_SetVideoMode
    glClearColor(0, 0, 0, 0);
    glDisable(GL_DEPTH_TEST);
    glEnable(GL_TEXTURE_2D); //Enable 2D rendering
    glViewport(0, 0, Width, Height); //Set Up openGL viewport (screen)
    glMatrixMode(GL_PROJECTION);
    glLoadIdentity();
    glOrtho(0, Width, Height, 0, -1, 1); 
    glMatrixMode(GL_MODELVIEW);
    glLoadIdentity();
    

    Just in-case, here's my image loading code, I think that perhaps this may be were the problem lies somehow:

    LoadImage(string filename, bool loadingTileSheet)
    {
      SDL_Surface *LoadedImage = NULL;
      GLuint texture;
      Uint32 rmask, gmask, bmask, amask;
      LoadedImage = IMG_Load(filename.c_str());
    
      if (loadingTileSheet)
      {
        tileSheetWidth = LoadedImage->w;
        numberOfTiles = (LoadedImage->w / tileWidth) + 1;
      }
    
      SDL_PixelFormat *pixf = SDL_GetVideoSurface()->format;
      SDL_SetAlpha(LoadedImage, 0, 0);
    
      if (SDL_BYTEORDER == SDL_BIG_ENDIAN)    
      {
        rmask = 0xff000000;    
        gmask = 0x00ff0000;   
        bmask = 0x0000ff00;   
        amask = 0x000000ff;
      }
      else 
      {
        rmask = 0x000000ff;    
        gmask = 0x0000ff00;   
        bmask = 0x00ff0000;   
        amask = 0xff000000;
      }
    
      SDL_Surface *image = SDL_CreateRGBSurface(SDL_SWSURFACE, LoadedImage->w, LoadedImage->h, 32, rmask, gmask, bmask, amask);
      SDL_BlitSurface(LoadedImage, NULL, image, NULL);
    
      glGenTextures(1, &texture);
      glBindTexture(GL_TEXTURE_2D, texture);
    
      glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
      glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
      glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
      glTexParameteri (GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
      glTexImage2D (GL_TEXTURE_2D, 0, GL_RGBA, image->w, image->h, 0, GL_RGBA, GL_UNSIGNED_BYTE, image->pixels);
    
      return texture;
    }
    

  • Code dump + find my problem, doesn't make a good question. I suggest finding code that works via a tutorial or other source, and build off of that code or compare it to yours.

    That being said... Test your texture loading code by drawing a quad with the full texture on it (use the texcoords 0 -> 1). If that displays correctly then check your glTexCoord2f calls.

  • float texscale = 1.0f / (float)tileWidth;
    ;;;
    glTexCoord2f( sourceX + texscale, texscale + sourceY);
    

    You are computing texScale from your x axis but applying it to the y axis too.

    Compute the height of a tile in the texture separately from the width of a tile in texture coordinates, as they are different values when your atlas is packed to not have the same number of tiles vertically as horizontally...

Tags
c++ opengl rendering tilemap
Related questions and answers
  • ; glGenTextures(1, &thisIsMyTexture); temporarySurface = loadImage("theimagetomytexture.png"); glBindTexture(GL_TEXTURE_2D,thisIsMyTexture); glTexImage2D(GL_TEXTURE_2D, 0, 4, temporarySurface->w, temporarySurface->h, 0, GL_RGBA, GL_UNSIGNED_BYTE, temporarySurface->pixels); glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR ); glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_MAG...; outside that space, white is drawn. Here is my OpenGL init code... SDL_SetVideoMode(windowWidth, windowHeight, 16, SDL_HWSURFACE|SDL_GL_DOUBLEBUFFER|SDL_OPENGL); glClearColor(0, 0, 0, 0

  • ); glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA8, m_FBOWidth, m_FBOHeight, 0, GL_RGBA, GL_UNSIGNED_BYTE, 0); glBindTexture(GL...); glBindFramebuffer(GL_FRAMEBUFFER, 0); } Here is my drawing box code, which just takes a transformation matrix and calls the appropriate functions. The current values of P is a projection matrix

  • is the init code for the FBO and it's texture: glGenTextures(1,&fboimg); glBindTexture(GL_TEXTURE_2D,fboimg); glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA8, 512, 512, 0, GL_RGBA, GL_UNSIGNED_BYTE, 0); glBindTexture(GL_TEXTURE_2D, 0); glGenFramebuffers(1,&fboHandle); glBindFramebuffer(GL_FRAMEBUFFER,fboHandle

  • // also drawing more here... // stone texture glBindTexture(GL_TEXTURE_2D, texIDs[0]); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT); glPushMatrix...I'm trying to map a brick texture on the edge of a fountain and I'm using gluDisk for that. How can I make the right coordinates for the disk? My code looks like this and I have only found a function

  • _Quit(); return -1; } glGenTextures(1, &image); glBindTexture(GL_TEXTURE_2D, image); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST); glTexImage2D(GL_TEXTURE_2D, 0, ilGetInteger(IL_IMAGE_BPP), ilGetInteger(IL_IMAGE_WIDTH), ilGetInteger(IL_IMAGE_HEIGHT), 0, ilGetInteger(IL_IMAGE_FORMAT

  • (1, &shaderTexture[0]); // Get A Free Texture ID ( NEW ) glBindTexture (GL_TEXTURE_1D, shaderTexture[0]); // Bind This Texture. From Now On It Will Be 1D ( NEW ) // For Crying Out Loud Don't Let OpenGL Use Bi/Trilinear Filtering! ( NEW ) glTexParameteri (GL_TEXTURE_1D, GL_TEXTURE_MAG_FILTER, GL_NEAREST); glTexParameteri (GL_TEXTURE_1D, GL_TEXTURE_MIN_FILTER, GL_NEAREST); glTexImage1D (GL_TEXTURE_1D, 0, GL_RGB, 32, 0, GL_RGB , GL_FLOAT, shaderData); // Upload ( NEW ) } This is the drawing for the animated MD2 model: void

  • ;fileName ) { SDL_Surface *image = IMG_Load( fileName.c_str() ); SDL_DisplayFormatAlpha(image); unsigned object(0); glGenTextures(1, &object); glBindTexture(GL_TEXTURE_2D, object); glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, image -> w, image -> h, 0, GL_RGBA, GL_UNSIGNED_BYTE, image -> pixels); //Free surface

  • _WRITE; bd.MiscFlags = 0; device->CreateBuffer(&bd, NULL, &pBuffer); void* pVoid; // the void pointer pBuffer->Map(D3D10_MAP_WRITE_DISCARD, 0, &pVoid); // map the vertex buffer...(zbd)); zbd.Width = SCREEN_WIDTH; // set the width of the depth buffer zbd.Height = SCREEN_HEIGHT; // set the height of the depth buffer zbd.ArraySize = 1; // we are only creating one texture...(pBackBuffer, NULL, &rtv); pBackBuffer->Release(); // set the back buffer as the render target device->OMSetRenderTargets(1, &rtv, dsv); D3D10_VIEWPORT viewport; // create a struct to hold

  • _TRIANGLES, m_nNumIndices, GL_UNSIGNED_INT, ((char*)NULL + 0) ); glDisableClientState( GL_VERTEX_ARRAY ); glDisableClientState( GL_TEXTURE_COORD_ARRAY ); glDisableClientState( GL_NORMAL_ARRAY ); glDisableClientState( GL_COLOR_ARRAY ); glBindBuffer( GL_ARRAY_BUFFER, 0 ); glBindBuffer( GL_ELEMENT_ARRAY_BUFFER, 0 ); } Back to the Vertex Array Object though. My code for creating the Vertex...: float m_Position[3]; // x, y, z // offset 0, size = 3*sizeof(float) float m_TexCoords[2]; // u, v // offset 3*sizeof(float), size = 2*sizeof(float) float m_Normal[3

Data information