I am having problems decoding UTF-8 strings in POST data when using the Node.JS web server.
See this complete testcase:
require("http").createServer(function(request, response) {
if (request.method != "POST") {
response.writeHead(200, {'Content-Type': 'text/html; charset=utf-8'});
response.end('<html>'+
'<head><meta http-equiv="content-type" content="text/html; charset=utf-8"></head>'+
'<body>'+
'<form method="post">'+
'<input name="test" value="Grüße!"><input type="submit">'+
'</form></body></html>');
} else {
console.log("CONTENT TYPE=",request.headers['content-type']);
var body="";
request.on('data', function (data) {
body += data;
});
request.on('end', function () {
console.log("POST BODY=",body);
response.writeHead(200, {'Content-Type': 'text/plain; charset=utf-8'});
response.end("POST DATA:\n"+body+"\n---\nUNESCAPED:\n"+unescape(body)+
"\n---\nHARDCODED: Grüße!");
});
}
}).listen(11180);
This is a standalone web server that listens on port 11180 and sends a HTML page with a simple form that contains an input field with special characters. POSTing that form to the server will echo it's contents in a plain text response.
My problem is that the special charactes are not being displayed properly neither on the console nor in the browser. This is what I see with both FireFox and IE:
POST DATA:
test=Gr%C3%BC%C3%9Fe%21
---
UNESCAPED:
test=GrüÃe!
---
HARDCODED: Grüße!
The last line is a hardcoded string Grüße!
that should match the value of the input field (as to verify that it's not a displaying problem). Obviously the POST data is not interpreted as UTF-8. The same problem happens when using require('querystring')
to break the data into fields.
Any clue?
Using Node.JS v0.4.11 on Debian Linux 4, source code is saved in utf-8 charset