#encoding=utf-8 #author: walker #date: 2016-04-14 #summary: 用协程/线程池并发检验代理有效性 import os, sys, time import requests from concurrent import futures cur_dir_fullpath = os.path.dirname(os.path.abspath(__file__)) headers = { 'accept': '*/*', 'user-agent': 'mozilla/4.0 (compatible; msie 8.0; windows nt 6.1; wow64; trident/4.0; slcc2; .net clr 2.0.50727; .net clr 3.5.30729; .net clr 3.0.30729; .net4.0c; .net4.0e)', } #检验单个代理的有效性 #如果有效,返回该proxy;否则,返回空字符串 def check(desturl, proxy, feature): proxies = {'http': 'http://' + proxy} r = none #声明 exmsg = none try: r = requests.get(url=desturl, headers=headers, proxies=proxies, timeout=3) except: exmsg = '* ' + traceback.format_exc() #print(exmsg) finally: if 'r' in locals() and r: r.close() if exmsg: return '' if r.status_code != 200: return '' if r.text.find(feature) < 0: return '' return proxy #输入代理列表(set/list),返回有效代理列表 def getvalidproxypool(rawproxypool, desturl, feature): validproxylist = list() #有效代理列表 pool = futures.threadpoolexecutor(8) futurelist = list() for proxy in rawproxypool: futurelist.append(pool.submit(check, desturl, proxy, feature)) print('\n submit done, waiting for responses\n') for future in futures.as_completed(futurelist): proxy = future.result() print('proxy:' + proxy) if proxy: #有效代理 validproxylist.append(proxy) print('validproxylist size:' + str(len(validproxylist))) return validproxylist #获取原始代理池 def getrawproxypool(): rawproxypool = set() #通过某种方式获取原始代理池...... return rawproxypool if __name__ == "__main__": rawproxypool = getrawproxypool() desturl = 'http://...' #需要通过代理访问的目标地址 feature = 'xxx' #目标网页的特征码 validproxypool = getvalidproxypool(rawproxypool, desturl, feature)
更多python3实现并发检验代理池地址的方法。
