I coded up this little test case to try and understand base64 encodings, but I ran into this problem.
see below, why are "stringUtf16" and the "stringDefault" from Encoding.Default not equal? one has a length of 4, the other a length of 3... but display the same on the screen.
Thanks!
[TestMethod]
public void EncodingTest()
{
var raw = new byte[] { 0x63, 0xE8, 0x3F };
string stringUtf16 = "cè?"; // .Net uses UTF-16 encoding by default
Console.WriteLine("Utf16: " + stringUtf16);
string encodedBase64 = "Y+g/";
stringUtf16.Length.Should().Be(4);
//------ Verify Encodings
var stringDefault = Encoding.Default.GetString(raw);
Console.WriteLine("Default: " + stringDefault);
stringDefault.Length.Should().Be(3);
// stringDefault.Should().Be(stringUtf16); // FAILS HERE!
var stringUnicode = Encoding.Unicode.GetString(raw);
Console.WriteLine("Unicode: " + stringUnicode);
stringUnicode.Should().NotBe(stringUtf16);
var stringAscii = Encoding.ASCII.GetString(raw);
Console.WriteLine("Ascii: " + stringAscii);
stringAscii.Should().NotBe(stringUtf16);
var stringUtf8 = Encoding.UTF8.GetString(raw);
Console.WriteLine("UTF8: " + stringUtf8);
stringUtf8.Should().NotBe(stringUtf16);
string stringBase64 = Convert.ToBase64String(raw);
Console.WriteLine("Base64: " + stringBase64);
stringBase64.Should().NotBe(stringUtf16);
stringBase64.Should().Be(encodedBase64);
//------ Verify Decodings
byte[] decodedBytes = Convert.FromBase64String(encodedBase64);
decodedBytes.Length.Should().Be(3);
string decodedText = Encoding.Default.GetString(decodedBytes);
Console.WriteLine(decodedText);
decodedText.Length.Should().Be(3);
decodedText.Should().Be(stringDefault);
}