利用scrapy爬取58同城租房信息

tc.py 代码

# -*- coding: utf-8 -*-
import scrapy
from ..items import TcItem

class Tc58Spider(scrapy.Spider):
    name = 'tc'
    allowed_domains = ['bj.58.com']
    start_urls = ['http://bj.58.com/chuzu/pn1/?PGTID=0d3090a7-0000-1fd7-9c9a-3a83d8c87059&ClickID=2']
    #http://bj.58.com/chuzu/pn5/?PGTID=0d3090a7-0000-1fd7-9c9a-3a83d8c87059&ClickID=2
    def parse(self, response):
        for i in range(2,10):
            next_url = 'http://bj.58.com/chuzu/pn{}/?PGTID=0d3090a7-0000-1fd7-9c9a-3a83d8c87059&ClickID=2'.format(i)
            yield scrapy.Request(next_url, callback=self.parse_xq)
    def parse_xq(self, response):
        # with open('58tongcheng.html','wb') as f:
        #     f.write(response.body)
        #/html/body/div[4]/div[1]/div[5]/div[2]/ul/li[1]/div[2]/h2/a
        item=TcItem()
        titles = response.xpath('//ul[@class="listUl"]/li/div[2]/h2/a/text()').extract()
        print(titles)
        rooms =response.xpath('//ul[@class="listUl"]/li/div[2]/p[1]/text()').extract()
        print(rooms)
        adds = response.xpath('//ul[@class="listUl"]/li/div[2]/p[2]/a/text()').extract()
        print(adds)
        prices = response.xpath('//ul[@class="listUl"]/li/div[3]/div[2]/b/text()').extract()
        print(prices)
        for i in range(0,len(prices)):
            title = titles[i].replace('\n','').replace(' ','')
            if title == '':
                title = 'mjx'
            else:
                item['title'] = title
            # print(item['title'])
            item['room'] = rooms[i].replace('\xa0','').replace(' ','')
            item['dizhi'] = adds[i].replace('.','')
            item['price'] = prices[i]
            yield item





items.py代码

class TcItem(scrapy.Item):
    title = scrapy.Field()
    room = scrapy.Field()
    dizhi = scrapy.Field()
    price = scrapy.Field()
    def get_insert_sql(self):
        sql = 'insert into tc_test(title,room,dizhi,price) values (%s,%s,%s,%s)'
        data = (self['title'],self['room'],repr(self['dizhi']),self['price'])

        return (sql,data)

pipelines.py代码

class MysqlProjectPipeline(object):
    def process_item(self, item, spider):
        (insert_sql,data) = item.get_insert_sql()
        myhelper = MysqlHelper()
        myhelper.execute_modify_sql(insert_sql,data)

 

你可能感兴趣的:(学习博客,python)