Thread: segmenation fault when executing

  1. #1
    Registered User
    Join Date
    Sep 2004
    Posts
    26

    segmenation fault when executing

    Code:
    #include <stdio.h>
    #include <string.h>
    #include <stdlib.h>
    #include <sys/types.h>
    #include <unistd.h>
    #include <fcntl.h>
    #include <sys/time.h>
    #include <math.h>
    #include <string.h>
    
    // Sockets includes.
    #include <sys/socket.h>
    #include <netinet/in.h>
    #include <netdb.h>
    
    
    
    // Constants
    #define HOSTLEN 256
    #define BUFLEN 40960
    #define CMDLEN 256
    
    // Cache data structures from websurf.c
    char* cache;
    int cache_size = 0;
    int num_compacts = 0;   // Spread number of compactions over total sites visited.
    
    struct node
    {
    	char webpage[80];
    	int space;
    	int buddy_index;
    	int status;
    };
    
    struct partition
    {
    	struct node *heap;
    	int part_length;
    	int blockSize;	
    };
    
    int cache_space=64;
    struct partition *mem;
    int mem_length;
    
    char **siteList;
    char domain[HOSTLEN];
    int numSites=0;
    
    int websurf(int argc, char **argv);
    int browse(char* host, char* page, int id);
    void createCache();
    int cacheWebpage(int index, int size);
    void readWebpages(char* urls);
    int putWebpage();
    int memorySpace(int sizeOfWebpage);
    int main()
    {	
    	char* surf_arg[9];
    	int x; int page_size; int page_index;
    	srand((unsigned)time(NULL));
    	surf_arg[0]="";
            surf_arg[1]="-f";
            surf_arg[2]="my_url.txt";
            surf_arg[3]="-s";
            surf_arg[4]="1";
            surf_arg[5]="-n";
            surf_arg[6]="1";
            surf_arg[7]="-c";
            surf_arg[8]="1";
    	createCache();
            readWebpages("urls.txt");
    	printf("****************************************************************************\n");
    	for(x=0; x<1; x++)
    	{
    		page_index=putWebpage();
    		page_size=websurf(9,surf_arg);
    		printf("page_size: %d\n",page_size);
    	//	cacheWebpage(page_index, page_size);
    		printf("****************************************************************************\n");
    	}
    	
    	for(x=0; x<mem_length; x++)
            {
                    printf("%d\n",mem[x].part_length);
            }
    
    	return 0;
    }
    
    void createCache()
    {
    	int num_elements=1;
    	int x; int y; int temp_size=cache_space;
    	for(x=1; x<cache_space; x=x*2)
    	{
    		num_elements++;
    	}
    	mem_length=num_elements;
    	struct partition *mem=(struct partition*)calloc(mem_length,sizeof(struct partition));
    	for(x=0; x<mem_length-1; x++)
    	{
    		mem[x].heap=(struct node*)calloc(temp_size,sizeof(struct node));
    		mem[x].blockSize=(int)pow((double)2,(double)x);
    		mem[x].part_length=temp_size;
    		for(y=0; y<mem[x].part_length; y++ )
    		{
    			mem[x].heap[y].buddy_index=y+1;
    			mem[x].heap[y].status=0;
    			y++;
    			mem[x].heap[y].buddy_index=y-1;
    			mem[x].heap[y].status=0;
    		}
    		temp_size=temp_size/2;	
    	}
    	mem[mem_length-1].heap=(struct node*)calloc(1,sizeof(struct node));
    	mem[mem_length-1].blockSize=cache_space;
    	mem[mem_length-1].part_length=1;
    	mem[mem_length-1].heap[0].buddy_index=-1;
    }
    
    int cacheWebpage(int index, int size)
    {
    	int x; int ins;
    	int sizeOnCache=memorySpace(size); 
    	if(sizeOnCache>cache_space)
    		return -1;
    	printf("page: %s, size: %d, sizeoncache: %d\n",siteList[index],size,sizeOnCache);
    	printf("mem_length: %d\n",mem_length);
    }
    
    void readWebpages(char* urls)
    {
    	int s;
    	FILE *stream;
    	stream=fopen(urls,"r+");
    	fscanf(stream,"%i %s\n",&numSites,domain);
    	siteList=(char**)calloc(numSites,sizeof(char*));
    	for(s=0; s<numSites; s++)
    	{
    		siteList[s] = (char*)calloc(HOSTLEN, sizeof(char));
    		fscanf(stream,"%s\n",siteList[s]);
    	}		
    }
    
    int putWebpage()
    {
    	FILE* stream = fopen("my_url.txt","w");
    	int ran_site_index=0;
    	ran_site_index=(rand() % 30);
    	fprintf(stream,"%d %s\n",1,domain);
    	fprintf(stream,"%s\n",siteList[ran_site_index]);
    	fclose(stream);
    	return ran_site_index;
    }
    
    int memorySpace(int sizeOfWebpage)
    {
    	int powerOfTwo=1;
    	while(powerOfTwo<sizeOfWebpage)
    		powerOfTwo*=2;
    	return powerOfTwo;
    }
    
    int websurf(int argc, char **argv) 
    {
    	// Program arguments
    	int s, num_websites = 0, page_index; int page_size;
    	char filename[HOSTLEN];
    	FILE* fp;
    	struct timeval tval;
    	struct timezone tzone;
    	// Web host and pages
    	char host[HOSTLEN];
    	char **pages;
    	int num_pages = 0;
    	// Get program arguments.
    	for (s=1; s<argc; s++) {
    		// Filename
    		if ((strcmp (argv[s],"-f")) == 0) {
    			s++;
    			strcpy (filename, argv[s]);
    		}
    		// Number of compactions
    		else if ((strcmp (argv[s],"-c")) == 0) {
    			s++;
    			num_compacts = atoi (argv[s]);
    		}
    		// Number of websites to randomly visit
    		else if ((strcmp (argv[s],"-n")) == 0) {
    			s++;
    			num_websites = atoi (argv[s]);
    		}
    		// Cache size
    		else if ((strcmp (argv[s],"-s")) == 0) {
    			s++;
    			cache_size = atoi (argv[s]);
    		}
    		// Print usage
    		else {
    			printf ("Usage: ./websurf -f <filename> -s <queue size> -n <# websites to visit> -c <# of compactions>\n");
    			exit (0);
    		}
    	}
    	// Print program arguments
    	printf ("Filename: %s\n", filename);
    	printf ("Number of websites to access = %i\n", num_websites);
    	printf ("Cache size = %i\n", cache_size);
    	printf ("Number of times to compact memory = %i\n", num_compacts);
    	// Open file and store host and page names into pages array.
    	fp = fopen (filename, "r+");
    	if (fp == NULL) {
    		printf ("Error - could not open URLs file\n");
    		exit (-1);
    	}
    	fscanf (fp, "%i %s\n", &num_pages, host);
    	pages = (char**) calloc (num_pages, sizeof(char*));
    	for (s=0; s<num_pages; s++) {
    		pages[s] = (char*) calloc (HOSTLEN, sizeof(char));
    		fscanf (fp, "%s\n", pages[s]);
    		//printf ("URL: %s%s\n", host, pages[s]);
    	}
    	// Seed random number generator
    	gettimeofday (&tval, &tzone);
    	srand48 (tval.tv_usec);
    	// Start browsing randomly in list of available pages.
    	for (s=0; s<num_websites; s++) {
    		page_index = (int)(ceil(drand48() * (float)num_pages)) - 1;  // Get random page index.
    		printf ("Page index %i URL: %s%s\n", page_index, host, pages[page_index]);
    		page_size=browse (host, pages[page_index], page_index);
    	}
    	return page_size; 
    } 
    /**********************************************************************************/ 
    // Main function where host and page names are initialized, and threads are created. 
    int browse (char* host, char* page, int id) {
    	//Socket data structures - Need only 1 socket to browse many pages.
    	int sckt;
    	struct hostent *he;
    	struct sockaddr_in srvaddr;
    	char cmd[CMDLEN];  // Command line to send to web server
    	// Webpage data structures
    	int nread;
    	char buf[BUFLEN];
    	char *body;
    	int temp_fd;
    //	int child;
    //	int result;
    	char tempfilename[16];
    	int webpage_size = 0; // Initialize page size to 0
    	
    	// Child process
    //	int pid;
    	
    	// create TCP connection to web server
    	if ((sckt = socket(AF_INET, SOCK_STREAM, 0)) == -1)
    		perror("socket"), exit(1);
    	if ((he = gethostbyname(host)) == NULL)
    		perror("gethostbyname"), exit(1);
    	//bzero(&srvaddr, sizeof(srvaddr));
    	srvaddr.sin_family = AF_INET;
    	srvaddr.sin_addr.s_addr = *(int *)(he->h_addr_list[0]);
    	srvaddr.sin_port = htons (80);
    	if (connect(sckt, (struct sockaddr *)&srvaddr, sizeof(srvaddr)) == -1)
           		perror("connect"), exit(1);
    	// create GET request
    	sprintf(cmd, "GET %s HTTP/1.1\r\nHost: %s\r\n\r\n", page, host);
    	printf ("cmd: %s\n", cmd);
    	// send GET request
    	if (write(sckt, cmd, strlen(cmd)) != strlen(cmd))
    		perror("GET write"), exit(1);
    	// read response
    	if ((nread = read(sckt, buf, BUFLEN)) < 0)
    		perror("read"), exit(1);
    	if (nread >= BUFLEN)
    		fprintf(stderr, "web: need bigger (HTTP) read buffer\n"), exit(1);
    	buf[nread] = 0;
    	printf("web: nread = %d\n", nread);
    	if (nread < 512) printf("web: response: %s\n", buf);
    	else printf("web: initial chars of response: %.300s\n", buf);
    	// check return code
    	//if (strncmp(buf, "HTTP/1.1 200 OK", 15))
    	// fprintf(stderr, "web: GET failed\n"), exit(1);
    	// bail out if chunked encoding
    	if (strstr(buf, "Transfer-Encoding: chunked")) {
    		fprintf(stderr, "web: can't handle chunked encoding\n");
    		//pthread_exit(NULL);
    		exit (0);
    	}
    	// trim HTTP protocol header from response
    	if ((body = strstr(buf, "\r\n\r\n")) == NULL)
    		fprintf(stderr, "web: can't find end of HTTP header in response\n"), exit(1);
    	body = body + 4;
    	// create & open temp file
    	sprintf(tempfilename, "temp%i.html", id);
    	temp_fd = open (tempfilename, O_CREAT | O_RDWR | O_TRUNC, S_IRWXU);
    	// if server sent only header: read further response & dump in a loop
    	webpage_size = 0; // Initialize page size to 0
    	if (body-buf >= nread) {
    		do {
    			if ((nread = read(sckt, buf, BUFLEN)) < 0)
    				perror("read"), exit(1);
    			else if (nread == 0) break;
    			if (nread >= BUFLEN)
    				fprintf(stderr, "web: need bigger (HTTP) read buffer\n"), exit(1);
    			if (write(temp_fd, buf, nread) != nread)
    				perror("file write"), exit(1);
    			webpage_size += nread; // Accumulate page size.
    		} while (nread > 0);
    		close(temp_fd);
    	}
    	// if server sent header & some body: assume it's the whole page & dump it into file
    	else {
    		webpage_size = strlen(body);
    		if (write(temp_fd, body, strlen(body)) != strlen(body))
    			perror("file write"), exit(1);
    		close(temp_fd);
    	}
    	
    	// Print out size of webpage content
    	//printf ("Webpage size %i \n", (webpage_size/1024)+1);
    	return((webpage_size/1024)+1);
    	// Fork off process to display webpage.
    	/* pid = fork ();
    	printf ("Process id %i\n", pid);
    	if (pid == 0) {
    		// Run Lynx
    		if (execl("/opt/gnu/bin/lynx", "/opt/gnu/bin/lynx", tempfilename, NULL) < 0)
    			perror("execl"), exit(1);
    		// Better to run netscape if possible
    		//if (execl("/usr/local/bin/netscape", "/usr/local/bin/netscape", tempfilename, NULL) < 0)
    		// perror("execl"), exit(1);
    	}
    	else {
    		printf ("After forking child process, parent process will now start writing to the file descriptor (pipe or file).\n");
    	} */ 
    }
    i keep getting a segmentation fault when i try to access the array mem outside of the function createCache(). In the function i allocate all the space and initialize the variables that i need. I created mem as a public variable. When the code executes the for loop at the very end of main it gets a segmentation fault.

  2. #2
    Registered User
    Join Date
    Sep 2004
    Posts
    26
    i got it nevermind

  3. #3
    ---
    Join Date
    May 2004
    Posts
    1,379
    good because there is no way anyone is just going to look through all that code for you.
    next time use a debugger and at least point out a section of code that the problem would possibly be in.

Popular pages Recent additions subscribe to a feed

Similar Threads

  1. Segmentation fault when executing bounded buffer program
    By Megalodon01 in forum C Programming
    Replies: 7
    Last Post: 03-31-2008, 12:19 AM
  2. Segmenation Fault
    By sunoflight77 in forum C++ Programming
    Replies: 2
    Last Post: 05-10-2005, 12:07 AM
  3. C Segmenation fault
    By OrbiT^ in forum C Programming
    Replies: 3
    Last Post: 04-20-2005, 10:58 AM
  4. Locating A Segmentation Fault
    By Stack Overflow in forum C Programming
    Replies: 12
    Last Post: 12-14-2004, 01:33 PM
  5. keep getting Segmentation fault when executing
    By v3dant in forum C Programming
    Replies: 2
    Last Post: 11-06-2004, 11:58 AM