Python爬虫实战笔记_2-1 筛选房源

mongodb的使用
  1. step1 处理url,先得到各个page的url, 用geturls()得到每个page中各个房源的url, 再用soup解析得到每个房源的相关信息,依次存入数据库
  2. step2 从数据库查找>500的房源数据
  3. 顺便实现了断点续传
#!usr/bin/env python
#_*_ coding: utf-8 _*_
#
# 在mongodb中筛选房源
#
from bs4 import BeautifulSoup
import requests
import os
import urllib

import pymongo
from mongoconn import mongoset


def geturls(url):
    url_data = requests.get(url)
    soup = BeautifulSoup(url_data.text, "lxml")
    pagelist = soup.select('#page_list > ul > li > a')
    urllist = [item.get('href') for item in pagelist]
    return urllist

def getemtext(element):
    return element.get_text().strip()

def getgender(element):
    if element.get("class") == ["member_boy_ico"]:
        return 'male'
    elif element.get("class") == ["member_girl_ico"]:
        return 'female'
    else:
        return ''
    pass

def get_target_info(url):
    url_data = requests.get(url)
    soup = BeautifulSoup(url_data.text, "lxml")
    title = soup.select('div.pho_info > h4 > em')
    address = soup.select('div.pho_info > p > span.pr5')
    image = soup.select('div.pho_show_big > div > img')
    price = soup.select('div.bg_box > div.day_top > div > span')
    lordname = soup.select('div.bg_box > div > div > h6 > a.lorder_name')
    gender = soup.select('div.bg_box > div > div > h6 > span')
    lordpic = soup.select('div.bg_box > div > div.member_pic > a > img')

    data = {
        "title": getemtext(title[0]),
        "address": getemtext(address[0]),
        "image": image[0].get('src'),
        "price": getemtext(price[0]),
        "lordname": getemtext(lordname[0]),#.get_text(),
        "gender": getgender(gender[0]),
        "lordpic": lordpic[0].get('src')
    }
    return data

if __name__ == '__main__':
    tinfo = mongoset('xiaozhu', 'info')
    tinfo.create_index([('title', pymongo.DESCENDING), ('address', pymongo.DESCENDING)], unique=True)
    ### step1 save data into mongodb
    urls = ["http://sh.xiaozhu.com/search-duanzufang-p{}-0/".format(pageid) for pageid in range(1, 4)]
    print(urls)
    for url in urls:
        urllist = geturls(url)
        for urlitem in urllist:
            data = get_target_info(urlitem)
            print({'title': data['title']})
            try:
                tinfo.insert_one(data)
            except pymongo.errors.DuplicateKeyError:
                tinfo.update_one({'title': data['title']}, {'$set': {'price': data['price']}})
            except:
                pass

    ### step2 get target info
    for info in tinfo.find({'price': {'$gt': '500'}}):
        #if int(info['price']) >= 500:
        print(info)

部分输出信息如下

{'gender': 'female', 'lordpic': 'http://image.xiaozhustatic1.com/21/4,0,2,9380,374,374,194746bc.jpg', '_id': ObjectId('5775150484a5fd48784276e0'), 'image': 'http://image.xiaozhustatic1.com/00,800,533/4,0,81,10040,1798,1200,a0fc0b77.jpg', 'lordname': '空白色', 'address': '上海市闸北区止园路99弄1号楼', 'title': '人民广场 外滩 地铁零距离旅游温馨便利两居室', 'price': '599'}
{'gender': 'female', 'lordpic': 'http://image.xiaozhustatic1.com/21/4,0,2,9380,374,374,194746bc.jpg', '_id': ObjectId('5775150684a5fd48784276e3'), 'image': 'http://image.xiaozhustatic1.com/00,800,533/5,0,36,3151,1798,1200,62875638.jpg', 'lordname': '空白色', 'address': '上海市闸北区宝昌路399弄3号楼', 'title': '近外滩人民广场上海火车站地铁站的豪华公寓', 'price': '599'}
{'gender': 'female', 'lordpic': 'http://image.xiaozhustatic1.com/21/2,0,7,3130,260,260,1edf95cd.jpg', '_id': ObjectId('5775150c84a5fd48784276ed'), 'image': 'http://image.xiaozhustatic1.com/00,800,533/2,0,74,4017,1800,1200,b920d1da.jpg', 'lordname': 'glorymedia', 'address': '上海市闵行区中春路新龙路', 'title': '近虹桥机场、高铁,花园钢琴房', 'price': '1088'}
{'gender': 'female', 'lordpic': 'http://image.xiaozhustatic1.com/21/6,0,64,1269,250,250,8cd6afee.jpg', '_id': ObjectId('5775150e84a5fd48784276f0'), 'image': 'http://image.xiaozhustatic1.com/00,800,533/3,0,93,4657,1800,1200,38973fc6.jpg', 'lordname': '夏小G', 'address': '上海市嘉定区嘉宝梦之湾', 'title': '古镇多彩设计感北欧风格民居 电影拍摄片场', 'price': '550'}
{'gender': 'female', 'lordpic': 'http://image.xiaozhustatic1.com/21/1,0,2,3091,260,260,bd3907dd.jpg', '_id': ObjectId('5775151684a5fd48784276fd'), 'image': 'http://image.xiaozhustatic1.com/00,802,533/5,0,50,1993,1300,865,83b4ddfc.jpg', 'lordname': '介介', 'address': '上海市黄浦区中山南路398号', 'title': '独享外滩豪宅江景房 东方明珠 豫园', 'price': '799'}
{'gender': '', 'lordpic': 'http://image.xiaozhustatic1.com/21/2,0,30,4168,375,375,668e4b62.jpg', '_id': ObjectId('5775151c84a5fd4878427706'), 'image': 'http://image.xiaozhustatic1.com/00,800,533/6,0,15,2889,1798,1200,4573a3c7.jpg', 'lordname': '王湘', 'address': '上海市徐汇区襄阳南路306弄', 'title': '法国手艺人打造/法租界花园洋房/电影拍摄地', 'price': '538'}
{'gender': 'male', 'lordpic': 'http://image.xiaozhustatic1.com/21/4,0,4,9896,329,329,b702853f.jpg', '_id': ObjectId('5775151e84a5fd4878427709'), 'image': 'http://image.xiaozhustatic1.com/00,800,533/4,0,85,10672,1798,1200,d35465af.jpg', 'lordname': 'siyan5', 'address': '上海市徐汇区淮海中路1200弄', 'title': '淮海中路靜安寺1/2/7/10号地铁两房两卫', 'price': '698'}

数据表中保存的数据截图


Python爬虫实战笔记_2-1 筛选房源_第1张图片
Screen Shot 2016-06-30 at 8.48.17 PM.png
总结
  • 有的房源没有房主的性别信息,将gender属性置空
  • from mongoconn import mongoset 引入的函数如下:
#!usr/bin/env python
#_*_ coding: utf-8 _*_
#
#  connect mongodb

import pymongo


def mongoset(db, table):
    client = pymongo.MongoClient('localhost', 27017)
    data = client[db]
    sheet = data[table]
    return sheet
  • mongo更新数据的用法:update_one
  • mongo设置主键的方法:create_index

你可能感兴趣的:(Python爬虫实战笔记_2-1 筛选房源)