Whats wrong with the following code to find maclaurin series?

Code:#include <stdio.h> #include <math.h> #define TRUE 1 #define FALSE 0 int main () { int x, n, i; double approx = 0; /* Write your code here */ scanf("%d", &x); scanf("%d", &n); printf("%d", x); printf("%d", n); while (n <= 1) { approx = pow(x, n); approx += approx; n++; } while ( n > 1) { i = n; while ( i > 0) { approx = (pow(x, n) * 1.0) / (i); approx += approx; i--; } } printf("%.10lf\n", approx); return 0; }