安居客新房信息爬取

本文实现爬取安居客二手房房源信息,并保存到本地csv文本中

爬取网址:https://tj.fang.anjuke.com/loupan/all/(天津新房)

#使用模块:BeautifulSoup

import requests
import math
import time
import random
from bs4 import BeautifulSoup
import ssl
ssl._create_default_https_context = ssl._create_unverified_context

uri='https://tj.fang.anjuke.com/loupan/all/'

def get_data(url):
    try:
        page_headers={"User-Agent":"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/42.0.2311.135 Safari/537.36 Edge/12.10240",
                      "Cookie":"isp=true; als=0; sessid=EE6C479F-383E-4532-5FBE-SX0608113249; lps=http%3A%2F%2Fuser.anjuke.com%2Fajax%2FcheckMenu%2F%3Fr%3D0.9773651958602518%26callback%3DjQuery1113019512877839682363_1559964793029%26_%3D1559964793030%7Chttps%3A%2F%2Ftj.fang.anjuke.com%2Floupan%2Fall%2F; lp_lt_ut=e9d09e6a59979c288d00ea3d7f7f7573; __xsptplusUT_8=1; _ga=GA1.2.205429121.1559658612; _gid=GA1.2.1582905499.1559979223; _gat=1; isp=true; wmda_uuid=b01457fc0c7d92a795d75c630ff6262c; wmda_new_uuid=1; wmda_visited_projects=%3B8788302075828; isp=true; aQQ_ajkguid=C8BE4FEC-9EE8-C5E6-D456-C6695B138768; twe=2; wmda_session_id_8788302075828=1559979235829-aa5a0183-f840-376c; __xsptplus8=8.7.1559979226.1559979258.3%234%7C%7C%7C%7C%7C%23%23zPjxAYBc5rYjoCxhfqsuNhjRq2XqZ05X%23; 58tj_uuid=73404522-7a8e-420a-93be-a96ac163fc6d; new_session=0; init_refer=https%253A%252F%252Ftianjin.anjuke.com%252F%253Fpi%253DPZ-baidu-pc-all-biaoti; new_uv=7; Hm_lvt_c5899c8768ebee272710c9c5f365a6d8=1559658645,1559804791,1559964795; Hm_lpvt_c5899c8768ebee272710c9c5f365a6d8=1559979258; ctid=26"
                     }
        res=requests.get(url,headers=page_headers)
        status=res.status_code
        data=res.text
        #print(status)
        soup=BeautifulSoup(data,'lxml')
        #print(soup.prettify())  #输出格式化的html代码
        return soup,status

    except Exception as e:
        print(str(e))
        return 0,0


def get_hourse(url):
    soup,status=get_data(url)
    fang_all=soup.find_all("div",class_="item-mod ") #注,class是关键字,这里用到需要加个下划线
    #print(fang_all)
    with open('house_anjuke.csv','a+',encoding='utf-8') as fh:
        for fang in fang_all:
            try:
                #url链接
                fang_uri=fang.attrs.get('data-link')
                #print(fang_uri)
                #楼盘
                fang_name=fang.h3.span.text.replace(',',':') 
                #位置
                fang_location=fang.find('span',class_="list-map").text.replace("\xa0"," ").replace(',',":")        
                #标签
                fang_tag=fang.find('div',class_="tag-panel").text.strip().replace('\n',":").replace(',',":")
                #价格
                fang_price=fang.find('a',class_="favor-pos").p.text
                #户型
                fang_huxing=str(fang.find('a',class_='huxing').find_all('span')).replace('','').replace('','').replace(',',':')                    
                                    
                #print("楼盘:%s" % fang_name)
                #print("位置:%s" % fang_location)
                #print("户型:%s" % fang_huxing)
                #print("价格:%s" % fang_price)
                #print("标签:%s" % fang_tag)
                #print("---------------------------------------------")
                fh.write(fang_name+","+fang_location+","+fang_huxing+","+fang_price+","+fang_tag+","+fang_uri+"\n")
            except Exception as e:
                print(e)

            
#get_hourse(uri)
            

if __name__=='__main__':
    url_start='https://tj.fang.anjuke.com/loupan/all/'
    soup,status=get_data(url_start)
    if status==200:
        num=int(soup.find('span',class_='result').em.text)
        page_num=math.ceil(num/60)+1
        print("结果共%s页" % page_num)
        with open('house_anjuke.csv','w',encoding='utf-8') as fh:
            fh.write("楼盘,位置,户型,价格,标签,网页链接\n")
        for i in range(1,page_num):
            url=url_start+'p'+str(i)+'/'
            #print(url)
            print("正在爬取第 %s 页..." % i)
            get_hourse(url)
            #随机等待
            span=round(random.random()*6,1)
            time.sleep(span)
        print("ok")

你可能感兴趣的:(爬虫)