Hello, I'm trying to write a simple "file encryption" type program, merely to teach myself a little bit about programming in C, but I keep getting strange errors in the output files from my program. What my program does is take a hexadecimal key on the command line (say, "F0BD5") and then reduce the ASCII values to actual binary values, (so 'F' (0x46) turns into 0x0F), and then "roll" each byte depending on the key value. For example, if the byte is 01101010b, and the key was 0x03, the original byte would roll to the right three places, so the output is 01001101b.The least significant three bits would be appended to the top. However, I'm having trouble with my "rolling" function. Here's my source code:
Code:
#include <stdio.h>
#include <strings.h>
#define BLOCK 4096
#define DEBUG 1
int convertkey(void);
char encode(char, char);
int engine(char *);
char getkey(void);
char *key, *infilename, *outfilename,
inbuffer[BLOCK], outbuffer[BLOCK];
int kp=0, maxkp;
FILE *infile, *outfile;
int main(int argc, char *argv[]) {
if(argc!=5) {
fprintf(stderr, "Need operation, key, input, and output files!\n");
return 1;
}
key=argv[2];
if((infile=fopen(argv[3], "rb"))==NULL) {
fprintf(stderr, "Can't open input file.\n");
return 2;
}
if((outfile=fopen(argv[4], "wb+"))==NULL) {
fprintf(stderr, "Can't open output file.\n");
return 3;
}
if((maxkp=convertkey())==0) {
fprintf(stderr, "Invalid hexadecimal key.\n");
return 4;
}
if(engine(argv[1])!=0) { //send argv1 so the engine knows if encoding or decoding
fprintf(stderr, "Write error in the engine.\n");
return 5;
}
fclose(infile);
fclose(outfile);
return 0;
}
int convertkey(void) {
int i;
for(i=0; i<strlen(key); i++) {
if((key[i]>='0')&&(key[i]<='9')) {
key[i]-='0';
continue;
}
if((key[i]>='A')&&(key[i]<='F')) {
key[i]-='A';
continue;
}
if((key[i]>='a')&&(key[i]<='f')) {
key[i]-='a';
continue;
}
else {
key[i]=0;
break;
}
}
return i; //so the length doesn't need to be rechecked.
}
char getkey(void) {
char retval;
if(kp==maxkp) kp=0;
retval=key[kp];
kp++;
return retval;
}
int engine(char *operation) {
int i, iolen;
while((iolen=fread(inbuffer, 1, BLOCK, infile))!=0) {
for(i=0; i<iolen; i++) {
if(operation[0]=='e') outbuffer[i]=encode(getkey(), inbuffer[i]);
else outbuffer[i]=encode((8-getkey()), inbuffer[i]);
}
if(fwrite(outbuffer, 1, iolen, outfile)!=iolen) return 1;
}
return 0;
}
char encode(char enckey, char encbyte) {
char usekey, retval;
usekey=enckey&0x07;
retval=((encbyte>>usekey)|(encbyte<<(8-usekey)));
if(enckey&0x08) retval=~retval;
return retval;
}
To see what was happening, I made a file with the values from 0x00 to 0xFF in it, and "encrypted" it with a key of 0x01 (should roll to the right one, and place the last bit up top), but when I do this, as soon as the input value is 0x80 or above, the output always has the 0x80 bit set to 1.
Ultimately, after encrypting and then decrypting, half the bytes come back correct, and the others are 0xFF. I just don't get why this is happening.