declaration of variables and start of program:

Code:
#include<stdio.h>

main()
{
	int fahr, celcius;
	int upper, lower, step;

	lower = 0;
	step = 20;
	upper = 300;
loop one:

Code:
printf("\n");

	for(celcius = lower; celcius <= upper; celcius = celcius + step)
	{
		fahr = 9 *(celcius) / 5 + 32;

		printf("%d\t%d\n", fahr, celcius);
	}
loop two:

Code:
	printf("\n");

	for(celcius = lower; celcius <= upper; celcius = celcius + step)
	{
		fahr = ((9/5) * celcius) + 32;

		printf("%d\t%d\n", fahr, celcius);
	}
the first executes fine and gives good results for converting from F to C.

the second executes fine but both fahr and celcius increment by 20.

the math should give the same results unless i am just not seeing the algebra error (very possible as it is kinda late here)