Question

I want to load a HBITMAP from a resource file and use it as an OpenGL texture. The code I use:

HBITMAP hBmp = (HBITMAP) LoadImage(hInstance, 
            MAKEINTRESOURCE(id), IMAGE_BITMAP, 0, 0, LR_CREATEDIBSECTION);
BITMAP BM;
GetObject(hBmp, sizeof(BM), &BM);

glPixelStorei(GL_UNPACK_ALIGNMENT, 4);
GLvoid* bits = BM.bmBits;
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, BM.bmWidth,
        BM.bmHeight, 0, GL_BGRA_EXT,
        GL_UNSIGNED_BYTE,
        bits);

But I allways get an error from Visual Studio that I dont have access to the pointer bits. The error is at the last line of code:

bits);

I actually can use the NULL pointer instead of bits without getting an error. I can output bits to using a messagebox. Does anybody have an idea what is wrong with my code?

Was it helpful?

Solution

From the GetObject documentation:

If hgdiobj is a handle to a bitmap created by any other means, GetObject returns only the width, height, and color format information of the bitmap. You can obtain the bitmap's bit values by calling the GetDIBits or GetBitmapBits function.

In context, "other means" is anything other than CreateDIBSection. You're not using CreateDIBSection, you're using LoadImage. Which category the LR_CREATEDIBSECTION flag puts you into is unclear, but the workaround is clear: Use GetDIBits.

OTHER TIPS

Working solution for GLUT on Windows XP (tcc or lcc compiler)

GLuint LoadTexture(GLuint tex, const char * filename)
{
HBITMAP hBitmap;
BITMAP bm;
HINSTANCE hInstance = GetModuleHandle(NULL);

//standard bmp 24 bit
//supported resolutions 64x64, 128x128, 256x256, 512x512

//type "char" has a 1 byte size, other types take more byte and will not work
unsigned char * data;
unsigned char R, G, B;

//LoadImage() loads the bmp picture as an interlaced image
hBitmap = LoadImage(NULL, filename, IMAGE_BITMAP, 0, 0, LR_LOADFROMFILE|LR_CREATEDIBSECTION);
    
GetObject(hBitmap, sizeof(BITMAP), &bm);

//get the address of the start of the image data in memory 
data =  bm.bmBits;

//swap R, G, B values for correct color display

int index, i;

for (i = 0; i < bm.bmWidth * bm.bmHeight ; i++)
    {
    index = i*3;
    B = data[index]; G = data[index+1]; R = data[index+2];
    data[index] = R; data[index+1] = G; data[index+2] = B;
    }

//print image parameters
printf ("bmType %u\n",bm.bmType); 
printf ("bmWidth %u\n",bm.bmWidth); 
printf ("bmHeight %u\n",bm.bmHeight); 
printf ("bmWidthBytes %u\n",bm.bmWidthBytes); 
printf ("bmPlanes %u\n",bm.bmPlanes); 
printf ("bmBitsPixel %u\n",bm.bmBitsPixel); 
printf ("bmBits %p\n",bm.bmBits);
printf ("hInstance %p\n",hInstance);

//create texture from loaded bmp image 
glGenTextures( 1, &tex);
glBindTexture( GL_TEXTURE_2D, tex);

glTexImage2D(GL_TEXTURE_2D, 0, 4, bm.bmWidth, bm.bmHeight, 0, GL_RGB, GL_UNSIGNED_BYTE, bm.bmBits);

printf ("--- texture %u created ---\n", tex);

//texture filtering
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); 
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameterf( GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT);
glTexParameterf( GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT);
glTexEnvi(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_DECAL);
}

void init(void)
{
//enable texturing
glEnable(GL_TEXTURE_2D);
//load texture from bmp 24 bit image file
//bmp exported from mspaint for windows xp
LoadTexture(1, "image1.bmp");
LoadTexture(2, "image2.bmp");
LoadTexture(3, "image3.bmp");
LoadTexture(4, "image4.bmp");
LoadTexture(5, "image5.bmp");
LoadTexture(6, "image6.bmp");
. . . . . . . . . . . . . . . . . . . .

Compiling.

no cyrilic names, "" for names with spases

tcc C:\tcc\src\box\box.c -o C:\tcc\src\box\box.exe -LC:\tcc\lib -luser32 -lgdi32 -lopengl32 -lglu32 -lglut32 -Wl,-subsystem=console

(without console -Wl,-subsystem=windows)

enter image description here

Licensed under: CC-BY-SA with attribution
Not affiliated with StackOverflow
scroll top