/// <summary>
/// Prints the HTTP Web page from the given URL and status data while
/// receiving the page.
/// </summary>
/// <param name="url">The URL of the page to print.</param>
/// <param name="length"> </param>
public static byte[] GetHttpBytes(string url, out int length)
{
const int requestTimeout = 5000;
const int bufferSize = 8192;
length = 0;
const int threadWait = 10;
byte[] page = new byte[0];
if (page.Length > 0)
{
return page;
}
//int totalBytes = 0;
try
{
WebResponse resp = null;
try
{
// Create an HTTP Web request.
HttpWebRequest request = WebRequest.Create(url) as HttpWebRequest;
// Assign the certificates. The value must not be null if the
// connection is HTTPS.
if (request != null)
{
//request.HttpsAuthentCerts = caCerts;
// Set request.KeepAlive to use a persistent connection.
request.KeepAlive = false; // true;
request.Timeout = requestTimeout;
request.ReadWriteTimeout = requestTimeout;
// Get a response from the server.
try
{
resp = request.GetResponse();
}
catch (Exception e)
{
DoDebug("Exception in HttpWebRequest.GetResponse(): " + e.Message);
}
// Get the network response stream to read the page data.
if (resp != null)
{
Stream respStream = resp.GetResponseStream();
byte[] byteData = new byte[bufferSize];
//char[] charData = new char[bufferSize];
int bytesRead;
// allow 5 seconds for reading the stream
respStream.ReadTimeout = requestTimeout;
// If we know the content length, read exactly that amount of
// data; otherwise, read until there is nothing left to read.
if (resp.ContentLength !=
-1)
{
for (int dataRem = (int) resp.ContentLength; dataRem > 0;)
{
Thread.Sleep(threadWait);
bytesRead =
respStream.Read(byteData, 0, byteData.Length);
//DoDebug("Read: " + bytesRead);
length += bytesRead;
if (bytesRead == 0)
{
DoDebug("Error: Received " +
(resp.ContentLength - dataRem) + " Out of " +
resp.ContentLength);
break;
}
dataRem -= bytesRead;
byte[] pageNew = new byte[page.Length + bytesRead];
Array.Copy(page, pageNew, page.Length);
Array.Copy(byteData, 0, pageNew, page.Length, bytesRead);
page = pageNew;
/*// Convert from bytes to chars, and add to the page
// string.
int byteUsed, charUsed;
bool completed;
//totalBytes += bytesRead;
utf8Decoder.Convert(byteData, 0, bytesRead, charData, 0,
bytesRead, true, out byteUsed, out charUsed,
out completed);
page = page + new String(charData, 0, charUsed);*/
// Display the page download status.
//DoDebug("Bytes Read Now: " + bytesRead +
// " Total: " + totalBytes);
}
//page = new String(
// Encoding.UTF8.GetChars(byteData));
}
else
{
// Read until the end of the data is reached.
while (true)
{
// If the Read method times out, it throws an exception,
// which is expected for Keep-Alive streams because the
// connection isn't terminated.
try
{
Thread.Sleep(threadWait);
bytesRead =
respStream.Read(byteData, 0, byteData.Length);
//DoDebug("Read: " + bytesRead);
length += bytesRead;
}
catch (Exception)
{
bytesRead = 0;
}
// Zero bytes indicates the connection has been closed
// by the server.
if (bytesRead == 0)
{
break;
}
//int byteUsed, charUsed;
//bool completed;
//totalBytes += bytesRead;
//utf8Decoder.Convert(byteData, 0, bytesRead, charData, 0,
// bytesRead, true, out byteUsed, out charUsed,
// out completed);
byte[] pageNew = new byte[page.Length + bytesRead];
Array.Copy(page, pageNew, page.Length);
Array.Copy(byteData, 0, pageNew, page.Length, bytesRead);
page = pageNew;
//page = page + new String(charData, 0, charUsed);
// Display page download status.
//DoDebug("Bytes Read Now: " + bytesRead +
//" Total: " + totalBytes);
}
//DoDebug("Total bytes downloaded in message body : "
// + totalBytes);
}
}
}
}
finally
{
// Close the response stream. For Keep-Alive streams, the
// stream will remain open and will be pushed into the unused
// stream list.
if (resp != null)
{
resp.Close();
}
}
}
catch (Exception ex)
{
DoDebug("Exception in HttpWebRequest.GetResponse(): " + ex.Message);
}
return page;
}
I tried to download the files from HTTP and it worked fine