Python爬虫 | 常用免费代理IP和免费IP池构建

常用免费IP网址:
http://www.goubanjia.com/
http://www.66ip.cn/1.html
http://www.ip3366.net/free/
http://www.kxdaili.com/dailiip.html
https://www.kuaidaili.com/free/
https://www.xicidaili.com/
http://www.iphai.com/
http://www.89ip.cn/
http://www.proxy360.cn/Region/China
http://www.ip181.com/
https://premproxy.com/
http://www.xroxy.com/
http://www.data5u.com/free/

利用西祠代理获取代理IP

import urllib.request
from bs4 import BeautifulSoup
import requests

proxys = []
for d in range(1,10):                                                                           # 采集1到2页
    scrapeUrl = 'http://www.xicidaili.com/nn/%d/' % d
    req = urllib.request.Request(scrapeUrl)
    req.add_header('User-Agent', 'Mozilla/4.0 (compatible; MSIE 5.5; Windows NT)')
    response = urllib.request.urlopen(req)
    html = response.read()

    bsObj = BeautifulSoup(html, "html.parser")

    for i in range(100):
        speed = float(bsObj.select('td')[6 + i * 10].div.get('title').replace('秒', ''))
        if speed < 0.6:  # 验证速度,只要速度在0.6秒之内的
            ip = bsObj.select('td')[1 + i * 10].get_text()
            port = bsObj.select('td')[2 + i * 10].get_text()
            proxy_host = ip + ':' + port
            proxy_temp = {"http": proxy_host, "https": proxy_host}
            proxys.append(proxy_temp)
    # print(proxys)

for proxy in proxys:
    try:
        url = 'https://www.cnblogs.com/Summer-skr--blog/'    #url更换为想爬取什么网站
        header = {
            'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8',
            'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 '
                          '(KHTML, like Gecko) Chrome/58.0.3029.110 Safari/537.36'
        }
        response = requests.get(url, proxies=proxy, headers=header, timeout=3)
        code = requests.get(url, proxies=proxy, headers=header, timeout=3).status_code
        if code == 200:
            print(code)
            response.encoding = 'utf-8'
            if "address" in response.text:
                print(proxy)

    except:
        # print("失败")

二、付费IP池构建

当公司规模比较小的时候,买代理IP,当公司规模比较大的时候自建 IP

市面上的代理ip提供商:

  • 快代理
  • 太阳代理

提取IP-太阳HTTP代理http://http.taiyangruanjim/getapi/

第1步:把下面的都设置好

第2步:点击API链接--->复制链接

会显示到期时间

只有IP白名单列表里的IP才可以使用这些IP。白名单越多,费用越高

image

每次提取,数量会减少

脚本

# 脚本1
import requests
import redis
class IpGeneration:             # 类后面没有括号
    r = redis.Redis('127.0.0.1',6379)           # 注意书写方式
    def ip_generator(self):

        # url = http_ip = 'http://http.tiqu.qingjuhe.cn/getip?num=3&type=2&pro=&city=0&yys=0&port=11&pack=9274&ts=0&ys=0&cs=0&lb=1&sb=0&pb=45&mr=0®ions='
        # header= {
        #     'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/73.0.3683.103 Safari/537.36'
        # }
        # res = requests.get(url=url,headers = header).json()
        res = {"code":0,"success":"true","msg":"0","data":[{"ip":"180.95.169.87","port":4369,"expire_time":"2018-12-18 22:42:03"},{"ip":"182.127.84.177","port":43621,"expire_time":"2018-12-18 21:04:02"},{"ip":"182.101.230.25","port":43561,"expire_time":"2018-12-18 21:00:03"}]}
        # print(res)
        proxy_list = res["data"]
        for proxy in proxy_list:
            ip = proxy['ip']
            port = proxy['port']
            expire_time = proxy['expire_time']
            new_proxy_item = {"schema":"https","ip":ip,"port":port,"expire_time":xpire_time,account : "",password:"",count: 2}          # 使用次数

        proxy_list1 = []
        # 对代理池1中的代理做去重
        pool1 = r.smembers(ip_pool1)
        for p in pool1:
            p = eval(p)
            ip = p["ip"]
            port = p["port"]
            pool1_proxy = pool1_ip + ":" + pool1_port

            if pool1_proxy not in proxy_list1:
                proxy_list1.append(p)
            else:
                print("duplicating:",p)
                r.srem("pool1", p)

            new_proxy = ip + ":" + port
            if new_proxy not in proxy_list1:
                proxy_list1.append(new_proxy_item)
                print('Add:', json.dumps(new_proxy_item))
                r.sadd(pool1_proxy,json.dumps(new_proxy_item))






if __name__ == "__main__":
    obj = IpGeneration()
    obj.ip_generator()

参考链接
https://www.cnblogs.com/lokvahkoor/p/10828055.html
https://blog.csdn.net/qq_42206477/article/details/85551939
https://blog.csdn.net/sdu_hao/article/details/96856026

你可能感兴趣的:(Python爬虫 | 常用免费代理IP和免费IP池构建)