I have an example code for CRC16-CCITT algorithm written in C++ and I need help converting it to Python.
Example C++ code:
#include<iostream>
using namespace std;
unsigned short calculateCRC(unsigned char data[], unsigned int length)
{
unsigned int i;
unsigned short crc = 0;
for(i=0; i<length; i++){
crc = (unsigned char)(crc >>8) | (crc<<8);
crc ^= data[i];
crc ^= (unsigned char)(crc & 0xff) >> 4;
crc ^= crc << 12;
crc ^= (crc & 0x00ff) << 5;
}
return crc;
}
int main()
{
unsigned int length;
length = 15;
unsigned char data[length] = {0x01,0x08,0x00,0x93,0x50,0x2e,0x42,0x83,0x3e,0xf1,0x3f,0x48,0xb5,0x04,0xbb};
unsigned int crc;
crc = calculateCRC(data, length);
cout<< std::hex << crc << '\n';
}
This code gives 9288 as output which is correct.
I tried the following in Python:
#!/usr/bin/env python3
def calculateCRC(data):
crc = 0
for dat in data:
crc = (crc >> 8) or (crc << 8)
crc ^= dat
crc ^= (crc and 0xff) >> 4
crc ^= crc << 12
crc ^= (crc and 0x00ff) << 5
crc = hex(crc)
return (crc)
data = [0x01,0x08,0x00,0x93,0x50,0x2e,0x42,0x83,0x3e,0xf1,0x3f,0x48,0xb5,0x04,0xbb]
print(calculateCRC(data))
This outputs 0xf988334b0799be2081.
Could you please help me understand what I am doing wrong? Thank you.