本来想用python编写个网络爬虫程序玩玩,可是无法通过公司代理,报407错误,狠狠的Google+反复测试,终于执行成功,记录代码如下:
import urllib2
import os
proxy_info = {'user':'域名\用户名', 'password':'口令' , 'server':'代理服务器地址:端口'}
url = "http://www.baidu.com"
def url_build_proxy_opener(proxy_info):
passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
passmgr.add_password(None, proxy_info['server'] , proxy_info['user'], proxy_info['password'])
auth = urllib2.ProxyBasicAuthHandler(passmgr)
opener = urllib2.build_opener(urllib2.ProxyHandler({'http':proxy_info['server']}) , auth)
return opener
opener = url_build_proxy_opener(proxy_info)
urllib2.install_opener(opener)
response = urllib2.urlopen(url)
print(response.read())
os.system("pause")