python 爬虫爬取代理验证验证代理的速度?一直不能爬取,出现各种问题,求指导??


下面是代码。。。


 import urllib2,re,threading,time,httplib

rawProxyList = []
checkedProxyList = []

portdicts ={'v':"3",'m':"4",'a':"2",'l':"9",'q':"0",'b':"5",'i':"7",'w':"6",'r':"8",'c':"1"}

targets = []
for i in xrange(1,9):
        target = r"http://www.cnproxy.com/proxy%d.html" % i
        targets.append(target)

p = re.compile(r'''<tr><td>(.+?)<SCRIPT type=text/javascript>document.write\(":"\+(.+?)\)</SCRIPT></td><td>(.+?)</td><td>.+?</td><td>(.+?)</td></tr>''')

class ProxyGet(threading.Thread):
    def __init__(self,target):
        threading.Thread.__init__(self)
        self.target = target

    def getProxy(self):
        print "目标网站: " + self.target
        req = urllib2.urlopen(self.target)
        result = req.read()
        matchs = p.findall(result)
        for row in matchs:
            ip=row[0]
            port =row[1]
            port = map(lambda x:portdicts[x],port.split('+'))
            port = ''.join(port)
            agent = row[2]
            addr = row[3].decode("cp936").encode("utf-8")
            proxy = [ip,port,addr]
            rawProxyList.append(proxy)

    def run(self):
        self.getProxy()

class ProxyCheck(threading.Thread):
    def __init__(self,proxyList):
        threading.Thread.__init__(self)
        self.proxyList = proxyList
        self.timeout = 5
        self.testUrl = "http://www.baidu.com/"
        self.testStr = "030173"

    def checkProxy(self):
        cookies = urllib2.HTTPCookieProcessor()
        for proxy in self.proxyList:
            proxyHandler = urllib2.ProxyHandler({"http" : r'http://%s:%s' %(proxy[0],proxy[1])})
            opener = urllib2.build_opener(cookies,proxyHandler)
            opener.addheaders = [('User-agent', 'Mozilla/5.0 (Windows NT 6.2; WOW64; rv:22.0) Gecko/20100101 Firefox/22.0')]
            t1 = time.time()
            try:
                req = opener.open(self.testUrl, timeout=self.timeout)
                result = req.read()
                timeused = time.time() - t1
                pos = result.find(self.testStr)
                if pos > 1:
checkedProxyList.append((proxy[0],proxy[1],proxy[2],timeused))
                else:
                     continue
            except httplib.BadStatusLine:
                pass

    def run(self):
        self.checkProxy()

if __name__ == "__main__":
    getThreads = []
    checkThreads = []

for i in range(len(targets)):
    t = ProxyGet(targets[i])
    getThreads.append(t)

for i in range(len(getThreads)):
    getThreads[i].start()

for i in range(len(getThreads)):
    getThreads[i].join()

print "总共抓取了%s个代理" %len(rawProxyList)
for i in range(20):
    t = ProxyCheck(rawProxyList[((len(rawProxyList)+19)/20) * i:((len(rawProxyList)+19)/20) * (i+1)])
    checkThreads.append(t)

for i in range(len(checkThreads)):
    checkThreads[i].start()

for i in range(len(checkThreads)):
    checkThreads[i].join()

print "总共有%s个代理通过校验" %len(checkedProxyList)


 Exception in thread Thread-55:
Traceback (most recent call last):
  File "D:\anzhuang\Anaconda\lib\threading.py", line 810, in __bootstrap_inner
    self.run()
  File "D:/python/crawleProxyAndTest.py", line 94, in run
    self.checkProxy()
  File "D:/python/crawleProxyAndTest.py", line 77, in checkProxy
    req = opener.open(self.testUrl, timeout=self.timeout)
  File "D:\anzhuang\Anaconda\lib\urllib2.py", line 431, in open
    response = self._open(req, data)
  File "D:\anzhuang\Anaconda\lib\urllib2.py", line 449, in _open
    '_open', req)
  File "D:\anzhuang\Anaconda\lib\urllib2.py", line 409, in _call_chain
    result = func(*args)
  File "D:\anzhuang\Anaconda\lib\urllib2.py", line 1227, in http_open
    return self.do_open(httplib.HTTPConnection, req)
  File "D:\anzhuang\Anaconda\lib\urllib2.py", line 1197, in do_open
    raise URLError(err)
URLError: <urlopen error timed out>

这几天出现不能爬,之前在网上查很多的,提到最多的是,认为爬虫爬取网站,网站认为是攻击性行为,返回各种各样的错误。。。
求各位大神指点下

python python-爬虫 网页爬虫

Cybunny 9 years, 8 months ago

timed out , 亲测我这里直接进是进不了cnproxy的, 用国外代理能进, 可能是cnproxy被墙的缘故吧

蕾蒂丶怀特洛克 answered 9 years, 8 months ago

每个线程爬完了最好暂停几秒在继续,你这好像是直接请求的?最好模拟浏览器或者使用代理啊

清廉正直文文丸 answered 9 years, 8 months ago

简单的几种可能,爬的速度太快,网站直接封了你IP,或者出现了验证码之类的措施,你连个错误信息都不贴,让人家怎么解答

黑社会大流氓 answered 9 years, 8 months ago

Your Answer