I am trying to code a Huffman string encoding algorithm.
My solution works this way:
- since every letter in the string have a special binary code associated to it, search the binary tree and when a letter is found, add it to the map with binary code. (where I went wrong is here)
- iterate on the string, and for each letter, associate the value associated with the key of the letter of the map.
I don't have the tree printed somewhere, even tough it could help you help me, but here is what I get for the string abracadabra, and what I should get:
Correct Code : 000010000110110101111101011000111110110100111011101100101101110000110000110111100101111101010010
What I get :
00001000111011010110101111010101100011101011010
Here is my code:
#include <algorithm>
#include <map>
string codes = "";
void getMapCharBinaryCode(Node root, string &prefix, map <char, string> &m){
if(!root) return;
if(root->value){
if(!m.count(root->value)){
m[root->value] = prefix;
prefix = "";
}
}
if(root->leftChild){
getMapCharBinaryCode(root->leftChild, prefix += "0",m);
}
if(root->rightChild){
getMapCharBinaryCode(root->rightChild, prefix += "1",m);
}
}
string encode(string text, Node tree){
// text is "abracadabra"
// create map for each char -> binary code
map<char, string> m;
string prefix = "";
getMapCharBinaryCode(tree, prefix, m);
// iterate on text and assign each letter with binary code from map
for(int i = 0; i < text.size(); i++) {
codes += m[text[i]];
}
return codes;
}