yup. third thread with the exact same title. stupid opengl.
anyway, when i draw a normal square polygon in the center of the screen, and apply a texture(24-bit bmp) to it with alpha blending for purple(255,0,255), it looks perfectly proportional and all, but many of the edges are still purple. so i thought that the bitmap itself contained those purple pixels and went into photoshop and zoomed in like 300% to where the game appeared to have showed purple. I couldn't find anything on the bitmap but solid purple. once in the game, i take a screenshot of the textured square once in the game and go into photoshop and look at the colors. Its varied purple, so naturally i wouldn't be able to alpha it out.
How does a perfectly good texture with no purple but 255,0,255 all of a sudden have a slightly different shade? its like openGL was anti-aliasing it for me or it was stretched out of proportion and opengl was trying to correct it.
I can't find anything at all that would change the appearance.
here's my code for importing the texture from a resource(loadgltextures):
Code:
int LoadGLTextures() // Load Bitmaps And Convert To Textures
{
int Status=FALSE; // Status Indicator
HBITMAP bitmap = LoadBitmap(GetModuleHandle(NULL),
MAKEINTRESOURCE(102));
BITMAPINFO info;
BITMAPINFOHEADER header;
header.biSize = sizeof(BITMAPINFOHEADER);
header.biWidth = 256;
header.biHeight = 256;
header.biPlanes = 1;
header.biBitCount = 24;
header.biCompression = BI_RGB;
header.biSizeImage = 0;
header.biClrUsed = 0;
header.biClrImportant = 0;
info.bmiHeader = header;
info.bmiColors->rgbRed = NULL;
info.bmiColors->rgbGreen = NULL;
info.bmiColors->rgbBlue = NULL;
info.bmiColors->rgbReserved = NULL;
const int size = 256*256*3;
unsigned char data[size];
HDC hdc = GetDC(hWnd);
GetDIBits(hdc, bitmap, 0, 256, &data, &info, DIB_RGB_COLORS);
ReleaseDC(hWnd, hdc);
unsigned char buff;
for(int i=0; i<256*256; i++)
{
buff = data[i*3];
if(i>=3)
{
data[i*3] = data[i*3+2];
data[i*3+2] = buff;
}
}
BYTE * data2= new BYTE[4*256*256];
int temp=0,temp2=0;
for (int n=0; n<256*256; n++)
{
BYTE alpha = 255;
if ((data[temp2] ==255) && data[temp2+1]==0 && (data[temp2+2] == 255) )
//|| ((data[temp2] <= data[temp2+2]+20 && data[temp2] >= data[temp2+2]-20) && (data[temp2+1] >= 0 && data[temp2+1]<= 20)))
{
alpha =0;
}
else
alpha = 255;
data2[temp++] = data[temp2++];
data2[temp++] = data[temp2++];
data2[temp++] = data[temp2++];
data2[temp++] = alpha;
}
glGenTextures(1, &m_texture[0]);
glBindTexture(GL_TEXTURE_2D, m_texture[0]);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER,GL_LINEAR);
glTexParameterf(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER, GL_LINEAR_MIPMAP_NEAREST);
gluBuild2DMipmaps(GL_TEXTURE_2D, 4, 256, 256,
GL_RGBA, GL_UNSIGNED_BYTE, data2);
delete[] data2;
//...
return Status;
}
notice my attempts on alphaing out the different shades of purple. obviously wouldn't work since the alphaing would go before opengl screwing with the displaying of the texture.
and here's my drawing of the polygon(dont think this matters but just incase):
Code:
glLoadIdentity();
glBindTexture(GL_TEXTURE_2D, m_texture[0]);
glTranslatef(1.4+(wb), -1.2f-(wb), -6.0f+(wb));
// glEnable(GL_ALPHA_TEST);
// glAlphaFunc(GL_LESS,0);
glEnable(GL_BLEND);
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
glBegin(GL_QUADS);
glTexCoord2f(0.0f, 0.0f); glVertex3f(-1.0f, -1.0f, 1.0f); // Bottom Left Of The Texture and Quad
glTexCoord2f(1.0f, 0.0f); glVertex3f( 1.5f, -1.0f, 1.0f); // Bottom Right Of The Texture and Quad
glTexCoord2f(1.0f, 1.0f); glVertex3f( 1.5f, 1.0f, 1.0f); // Top Right Of The Texture and Quad
glTexCoord2f(0.0f, 1.0f); glVertex3f(-1.0f, 1.0f, 1.0f);
glEnd();
helpez-moi por favor
thanks!