I am trying to use CryptoJS to get the checksum of large files( bigger than 500MB) so browser doesn't crash. I am already using a Web Worker with chunking. So, I am try to progressively use each chunk when I iterate through the chunks to update CryptoJS to start creating the checksum. However, it's still returning the wrong checksum at the end when I finalize. It's seems like it's only returning the checksum of the last chunk not the checksum of all the chunks. Can you let me know what I am doing wrong.
Also, I don't have to use CryptoJS as I find it to be slow but it seems the only library that can handle progressive encryption.
var sha256 = CryptoJS.algo.SHA256.create(),
sha256Update,
checksum = [],
chunker = function(workerData) {
var file = workerData.fileBlob,
totalFileSize = file.size,
chunkLength = 3145728,
start = 0,
stop = chunkLength,
i = 0, readSlicer,
fileSlicer,
chunk,
chunkUint8,
timerCounter = 0,
hashConvert;
var checker = function() {
start = stop;
stop += chunkLength;
if(start >= totalFileSize) {
console.log("Done reading file!", stop, totalFileSize);
sha256Update.finalize();
console.log("CheckSum : ", sha256Update._hash.toString(CryptoJS.enc.Hex));
return;
}
readBlock(start, chunkLength, file);
};
var readBlock = function(start, chunkLength, file) {
readSlicer = new FileReaderSync();
fileSlicer = file.slice(start, stop + 1);
chunk = readSlicer.readAsArrayBuffer(fileSlicer);
chunkUint8 = new Uint8Array(chunk);
var wordArr = CryptoJS.lib.WordArray.create(chunkUint8);
sha256Update = sha256.update(wordArr);
checksum.push(sha256Update);
checker();
};
readBlock(start, chunkLength, file);
};