I have a client server application written in C# .Net 2.0. I have had the client/server response/request code running for 4 years(!). Recently, on a specific machine, the client can not connect to server:
on the code line:
HttpWebResponse response = (HttpWebResponse)request.GetResponse();
WebException: The remote server returned an error: (503) Server Unavailable.
Message="The remote server returned an error: (503) Server Unavailable."
Status System.Net.WebExceptionStatus.ProtocolError
I believe this issue is machine-specific since i never had this issue on any other machine.
Is there something I need to configure on that machine? What did I miss?
HttpWebRequest request = (HttpWebRequest)HttpWebRequest.Create(string.Format("http://{0}:{1}/", server, httpPort));
request.ContentType = "application/x-www-form-urlencoded";
string msgStr = msgString;
byte[] buffer = Encoding.UTF8.GetBytes(msgStr);
request.ContentLength = buffer.Length;
request.Method = "POST";
try {
System.IO.Stream requestStream = request.GetRequestStream();
requestStream.Write(buffer, 0, buffer.Length);
requestStream.Close();
requestStream.Dispose();
HttpWebResponse response = (HttpWebResponse)request.GetResponse();
byte[] inBuffer = new byte[10000];
Stream responseStream = response.GetResponseStream();
int iBytes = responseStream.Read(inBuffer, 0, inBuffer.Length);
StringBuilder sb = new StringBuilder(10000);
while (iBytes > 0) {
sb.Append(Encoding.UTF8.GetString(inBuffer, 0, iBytes));
iBytes = responseStream.Read(inBuffer, 0, inBuffer.Length);
}
response.Close();
responseStream.Close();
responseStream.Dispose();
if (!string.IsNullOrEmpty(sb.ToString())) {
message = HandleMessage(sb.ToString());
} else { //bug 58
message = HandleEmptyMessage(msgString);
}
} catch (System.Net.WebException we) {
_connected = false;
EventsHelper.AsyncFire(ConnectionLost, this, EventArgs.Empty);
throw new Exception(we.Message);
}