I'm trying to encrypt a string as size of 8 bytes (64 bits) using AES128 from wincrypt.h
, note that the string is smaller than the block size of AES which is 128 bits.
HCRYPTKEY hKey;
if (!CryptDeriveKey(hProv, CALG_AES_128, hHash, 0, &hKey)) {
dwStatus = GetLastError();
printf("CryptDeriveKey failed: %x\n", dwStatus);
CryptReleaseContext(hProv, 0);
system("pause");
return dwStatus;
}
printf("[+] CryptDeriveKey Success\n");
const size_t string_size = 8;
BYTE string[8] = { "Foooooo" }; // \0 counts as 1 byte
DWORD out_len = 8;
if (!CryptEncrypt(hKey, NULL, TRUE, 0, string, &out_len, string_size)) {
printf("[-] CryptEncrypt failed\n");
}
printf("%s", string);
printf("\n");
if (!CryptDecrypt(hKey, NULL, TRUE, 0, string, &out_len)) {
printf("[-] CryptDecrypt failed\n");
}
printf("%s", string);
printf("\n");
But it doesn't look to encrypt/decrypt well because I get this Output:
[+] CryptDeriveKey Success
Fooooooo
[-] CryptEncrypt failed
ÉÆ╠1╔P█ídhù;$§┴
[-] CryptDecrypt failed
What I'm doing wrong? &out_len
or string_size
should be 128 as AES block size?