最近在自学爬虫,琢磨各种防封、防反爬虫措施,于是乎想自己收集一些user-agent用来做随机header,偶然发现github上有一个专门提供随机ua的库,遂记录之
名称:fake-useragent
setting.py
DOWNLOADER_MIDDLEWARES = {
'scrapy.downloadermiddleware.useragent.UserAgentMiddleware': None,
'ArticleSpider.middlewares.RandomUserAgentMiddleware': 543,
}
RANDOM_UA_TYPE="random"
middlewares.py
from fake_useragent import UserAgent
#####################################
#这里都是scrapy自动生成的代码#
#####################################
class RandomUserAgentMiddleware(object):
#设置随机User-Agent
def __init__(self, crawler):
super(RandomUserAgentMiddleware,self).__init__()
self.ua=UserAgent()
self.ua_type=crawler.settings.get("RANDOM_UA_TYPE","random")
@classmethod
def from_crawler(cls, crawler):
return cls(crawler)
def process_request(self, request, spider):
def get_ua():
return getattr(self.ua,self.ua_type)
request.headers.setdefault('User-Agent',get_ua())