作为一个Python新手,公司突然安排我爬取房价数据,真让人有点头大啊!幸好网上的大佬们经验丰富,给予了很多代码上的帮助。本文代码在网友pythoner111爬虫项目–爬取安居客二手房信息的基础上修改,爬取过程顺利,若有不足之处还请大家指出修改。
import requests
import bs4
import time
import random
import pandas as pd
import openpyxl
house_info=[]
for i in range(1,100):
url="https://bj.anjuke.com/sale/p"+str(i)+"/#filtersort"
headers = {
"user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/76.0.3809.87 Safari/537.36"
}
print("开始爬取安居客平台北京二手房第%s页信息....." %(str(i)))
response = requests.get(url=url, headers=headers)
#生成bs4对象
bsoup=bs4.BeautifulSoup(response.text,'lxml')
house_list=bsoup.find_all('li', class_="list-item")
for house in house_list:
#bs4解析文件
titile = house.find('a').text.strip()
house_type = house.find('div', class_='details-item').span.text
area = house.find('div', class_='details-item').contents[3].text
try:
address = house.find('span',class_='comm-address').text.strip()
except AttributeError:
address='error'
#爬取过程中,有些房子缺少地址信息,使用try except的方法可以防止报错
price = house.find('span', class_='price-det').text.strip()
unit_price = house.find('span', class_='unit-price').text.strip()
pd1= pd.DataFrame({'titile': titile, 'house_type': house_type,
'area': area, 'address': address, 'price': price, 'unit_price': unit_price},index=[0])
house_info.append(pd1)
second=random.randrange(3,5)
time.sleep(second)
house_info2=pd.concat(house_info)
house_info2.to_excel('beijing.xlsx',index=False)