日期:2013-12-10  浏览次数:20410 次

private string RequestGet(string TheURL, string TheProxy)
{
Uri uri = new Uri(TheURL);
HttpWebRequest request = (HttpWebRequest) WebRequest.Create(uri);
string page;
try
{

request.KeepAlive = false;
request.ProtocolVersion=HttpVersion.Version10;

request.Method = "GET";

request.ContentType = "application/x-www-form-urlencoded";

request.Proxy = System.Net.WebProxy.GetDefaultProxy();

//allow auto redirects from redirect headers
request.AllowAutoRedirect=true;

//maximum of 10 auto redirects
request.MaximumAutomaticRedirections=10;

//30 second timeout for request
request.Timeout=(int) new TimeSpan(0,0,60).TotalMilliseconds;

//give the crawler a name.
request.UserAgent = "Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1)";
//request.UserAgent="Mozilla/3.0 (compatible; My Browser/1.0)";

HttpWebResponse response = (HttpWebResponse) request.GetResponse();
Stream responseStream = response.GetResponseStream();
StreamReader readStream = new StreamReader (responseStream, System.Text.Encoding.Default);

page = readStream.ReadToEnd();
}
catch (Exception ee)
{
page = "Fail message : "+ee.Message;
}
return page;

}