C#用HttpWebRequest通过代理服务器验证后抓取网页内容

时间:2022-12-21 18:25:38

来自:http://www.cnblogs.com/wenanry/archive/2009/02/13/1390160.html

内网用户或代理上网的用户使用

using System.IO;
using System.Net;

public string get_html()

{

string urlStr = "http://www.domain.com";                              //設定要獲取的地址
HttpWebRequest hwr = (HttpWebRequest)HttpWebRequest.Create(urlStr);      //建立HttpWebRequest對象
hwr.Timeout = 60000;                                                  //定義服務器超時時間
WebProxy proxy = new WebProxy();                                      //定義一個網關對象
proxy.Address = new Uri("http://proxy.domain.com:3128");              //網關服務器:端口
proxy.Credentials = new NetworkCredential("f3210316", "6978233");      //用戶名,密碼
hwr.UseDefaultCredentials = true;                                      //啟用網關認証
hwr.Proxy = proxy;                                                      //設置網關

try
{

        HttpWebResponse hwrs = (HttpWebResponse)hwr.GetResponse();              //取得回應

}

catch
{
         MessageBox.Show("无法连接代理!");
         return;
}

//判断HTTP响应状态
if(hwrs.StatusCode != HttpStatusCode.OK)
{
         MessageBox.Show("访问失败!");
         hwrs.Close();
         return;
}
else

{
        Stream s = hwrs.GetResponseStream();                                  //得到回應的流對象
        StreamReader sr = new StreamReader(s, Encoding.UTF8);                  //以UTF-8編碼讀取流
        StringBuilder content = new StringBuilder();                          //
    while (sr.Peek() != -1)                                                  //每次讀取一行,直到
    {                                                                      //下一個字節沒有內容
        content.Append(sr.ReadLine()+""r"n");                              //返回為止
    }                                                                      //
        //return content.ToString() ;

}

//输出所有的Header(当然包括服务器输出的Cookie)
//for(int ii=0;ii<hwrs.Headers.Count;ii++)
//{
//MessageBox.Show(hwrs.Headers.GetKey(ii)+":"+res.Headers[ii]);
//}

}

C#用HttpWebRequest通过代理服务器验证后抓取网页内容

大家知道,用HttpWebRequest可以通过Http对网页进行抓取,但是如果是内网,而且是通过代理上网的用户,如果直接进行操作是行不通的。
那有没有什么办法呢?
当然有,呵呵,见以下代码:

C#用HttpWebRequest通过代理服务器验证后抓取网页内容string urlStr= "http://www.domain.com";                           //設定要獲取的地址
C#用HttpWebRequest通过代理服务器验证后抓取网页内容
HttpWebRequest hwr= (HttpWebRequest)HttpWebRequest.Create(urlStr);   //建立HttpWebRequest對象
C#用HttpWebRequest通过代理服务器验证后抓取网页内容
hwr.Timeout= 60000;                                               //定義服務器超時時間
C#用HttpWebRequest通过代理服务器验证后抓取网页内容
WebProxy proxy= new WebProxy();                                   //定義一個網關對象
C#用HttpWebRequest通过代理服务器验证后抓取网页内容
proxy.Address= new Uri("http://proxy.domain.com:3128");           //網關服務器:端口
C#用HttpWebRequest通过代理服务器验证后抓取网页内容
proxy.Credentials= new NetworkCredential("f3210316","6978233");   //用戶名,密碼
C#用HttpWebRequest通过代理服务器验证后抓取网页内容
hwr.UseDefaultCredentials= true;                                   //啟用網關認証
C#用HttpWebRequest通过代理服务器验证后抓取网页内容
hwr.Proxy= proxy;                                                   //設置網關
C#用HttpWebRequest通过代理服务器验证后抓取网页内容
HttpWebResponse hwrs= (HttpWebResponse)hwr.GetResponse();           //取得回應
C#用HttpWebRequest通过代理服务器验证后抓取网页内容
Stream s= hwrs.GetResponseStream();                               //得到回應的流對象
C#用HttpWebRequest通过代理服务器验证后抓取网页内容
StreamReader sr= new StreamReader(s, Encoding.UTF8);               //以UTF-8編碼讀取流
C#用HttpWebRequest通过代理服务器验证后抓取网页内容
StringBuilder content= new StringBuilder();                       //
C#用HttpWebRequest通过代理服务器验证后抓取网页内容
while (sr.Peek()!= -1)                                               //每次讀取一行,直到
C#用HttpWebRequest通过代理服务器验证后抓取网页内容C#用HttpWebRequest通过代理服务器验证后抓取网页内容
{                                                                   //下一個字節沒有內容
C#用HttpWebRequest通过代理服务器验证后抓取网页内容
    content.Append(sr.ReadLine()+""r"n");                           //返回為止
C#用HttpWebRequest通过代理服务器验证后抓取网页内容
}
                                                                   //
C#用HttpWebRequest通过代理服务器验证后抓取网页内容
return content.ToString() ;                                           //返回得到的字符串