python爬取链家租房之获取北京所有区的网站分栏地址(第一次写,code太粗犷,欢迎提建议)

__author__ = 'Lee'
import requests
from bs4 import BeautifulSoup


url_list = 'https://bj.lianjia.com/zufang/'
url_list1 = 'https://bj.lianjia.com'
area_list = []
headers = {
    'User-Agent':'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.110 Safari/537.36',
        'Cookie':'zufang_huodong_show=1; lianjia_uuid=5382c1b6-d6ca-47d1-9d94-fa1dbc834a92; UM_distinctid=15c81d9262f1f-006143a03563a5-5393662-144000-15c81d92631225; select_city=110000; _jzqy=1.1496827112.1497081788.1.jzqsr=baidu.-; _jzqckmp=1; all-lj=1e9f8fe64a0d8d4cd8642eafcff9cfff; _jzqx=1.1497156954.1497156954.1.jzqsr=captcha%2Elianjia%2Ecom|jzqct=/.-; CNZZDATA1253477573=511845211-1496852558-https%253A%252F%252Fwww.lianjia.com%252F%7C1497181716; _smt_uid=5937c502.2a3ce711; CNZZDATA1254525948=2045919104-1496852507-https%253A%252F%252Fwww.lianjia.com%252F%7C1497182934; CNZZDATA1255633284=200718649-1496852692-https%253A%252F%252Fwww.lianjia.com%252F%7C1497180541; CNZZDATA1255604082=1396707825-1496855343-https%253A%252F%252Fwww.lianjia.com%252F%7C1497185090; _qzja=1.1476077399.1496827137642.1497140960523.1497156953782.1497156953782.1497156965945.0.0.0.21.4; _qzjb=1.1497156953781.2.0.0.0; _qzjc=1; _qzjto=5.2.0; _jzqa=1.3587254022940067300.1496827112.1497140960.1497156954.4; _jzqc=1; _jzqb=1.2.10.1497156954.1; _ga=GA1.2.118746223.1496827120; _gid=GA1.2.968075510.1497081790; lianjia_ssid=253b9cec-863e-41d4-a745-090d3a031bd7'
}
proxies = {"http": "http://119.57.105.241:8080"}
wb_data = requests.get(url_list)
soup = BeautifulSoup(wb_data.text,'lxml')
area = soup.select('#filter-options > dl > dd > div > a ' )
area_len = len(area)

for i in area:
    # area_list.append(area[i].get('href'))
    area_list.append(i.get('href'))
    #print(i.get('href'))
area_list1 = area_list[1:18]
print(area_list1)
area_list2 = area_list[18:20]
area_item1 = str(area_list2).split('m')[1].split("'")[0]
area_item2 = str(area_list2).split('m')[2].split("'")[0]
area_list2 = list(area_list1)
area_list2.append(area_item1)
area_list2.append(area_item2)

print(area_list2)

for i in area_list2:
    print(url_list1+i)

'''
https://bj.lianjia.com/zufang/dongcheng/
https://bj.lianjia.com/zufang/xicheng/
https://bj.lianjia.com/zufang/chaoyang/
https://bj.lianjia.com/zufang/haidian/
https://bj.lianjia.com/zufang/fengtai/
https://bj.lianjia.com/zufang/shijingshan/
https://bj.lianjia.com/zufang/tongzhou/
https://bj.lianjia.com/zufang/changping/
https://bj.lianjia.com/zufang/daxing/
https://bj.lianjia.com/zufang/yizhuangkaifaqu/
https://bj.lianjia.com/zufang/shunyi/
https://bj.lianjia.com/zufang/fangshan/
https://bj.lianjia.com/zufang/mentougou/
https://bj.lianjia.com/zufang/pinggu/
https://bj.lianjia.com/zufang/huairou/
https://bj.lianjia.com/zufang/miyun/
https://bj.lianjia.com/zufang/yanqing/
https://bj.lianjia.com/zufang/yanjiao/
https://bj.lianjia.com/zufang/xianghe/
'''

你可能感兴趣的:(python爬取链家租房之获取北京所有区的网站分栏地址(第一次写,code太粗犷,欢迎提建议))