matsp: Yeah I always compile with -Wall. That was the only warning I was getting. My current code is doing what tabstop suggested:
Code:
#include "screen.h"
#include "vector.h"
SDL_Surface *screen = NULL;
SDL_Event event;
int main()
{
screen = SDL_SetVideoMode(GRAPHICS_W, GRAPHICS_H, 32, SDL_SWSURFACE);
long buffer[SCREEN_BUFFER_SIZE];
for(int i=0; i<SCREEN_BUFFER_SIZE; i++) buffer[i]=0xFFFFFFFF;
int quit = 0;
while(! quit)
{
blit_and_fade(&buffer, &screen, SCREEN_BUFFER_SIZE, 0.9);
SDL_Flip(screen);
while(SDL_PollEvent(&event)) if(event.type==SDL_KEYDOWN) quit = 1;
SDL_Delay(20);
}
return 0;
}
and:
Code:
void blit_and_fade(long (*src)[], SDL_Surface **dst, Uint32 size, float mod)
{
Uint32 *pix = (Uint32*)(*dst)->pixels;
Uint32 *buf = (Uint32*)(*src);
Uint32 r_b, g;
int fade = (int) (mod * 256.0);
for(int i=0; i<size; i++, pix++, buf++)
{
*pix = *buf;
r_b = ((( *buf & 0x00FF00FF) * fade) & 0xFF00FF00) >> 8;
g = ((( *buf & 0x0000FF00) * fade) & 0x00FF0000) >> 8;
*buf = r_b | g;
}
}
Which was working w/o errors or warnings.
In vector.h I had these defined:
Code:
#ifndef Uint8
#define Uint8 unsigned char
#endif
#ifndef Uint16
#define Uint16 unsigned short
#endif
#ifndef Uint32
#define Uint32 unsigned long
#endif
but the Uints are also defined in SDL. Could this be causing a problem?