下载:避免网页错误;错误码 5xx; 代理
def download_page(url, num_retries = 2, proxy=None, referer=None):
page_buf = ''
print 'downloading:', url
try:
# set http proxy
if proxy:
httpServer = {'http': 'http://%s/' % proxy}
handlers = [urllib2.ProxyHandler(httpServer)]
opener = urllib2.build_opener(*handlers)
else:
opener = urllib2.build_opener()
method = urllib2.Request(url)
# set HTTP Referer
if referer:
method.add_header('Referer', referer)
# add user agent
method.add_header('User-Agent',
'Mozilla/5.0 (X11; Linux i686) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/42.0.2311.135 Safari/537.36')
method.add_header('Accept-Language', 'en-US,en;q=0.5')
result = opener.open(method, timeout=100)
page_buf = result.read()
except Exception, e:
print 'download failed: ', e
page_buf = None
if num_retries > 0:
if hasattr(e, 'code') and 500 <= e.code < 600:
# recursively retry 5xx http error
return download_page(url, num_retries - 1, proxy, referer)
return page_buf
if __name__ == '__main__':
url = 'https://zhidao.baidu.com/question/2073804096754701028.html'
page_buf = download_page(url, 2 , '127.0.0.1:8087')