Scrapy连接到MySQL

Scrapy连接到MySQL

修改pipelines.py文件加入如下代码

# 爬取到的数据写入到MySQL数据库
import pymysql
class MySQLPipeline(object):

    # 打开数据库
    def open_spider(self, spider):
        db = spider.settings.get('MYSQL_DB_NAME','scrapy_db')
        host = spider.settings.get('MYSQL_HOST', 'localhost')
        port = spider.settings.get('MYSQL_PORT', 3306)
        user = spider.settings.get('MYSQL_USER', 'root')
        passwd = spider.settings.get('MYSQL_PASSWORD', '123456')

        self.db_conn =pymysql.connect(host=host, port=port, db=db, user=user, passwd=passwd, charset='utf8')
        self.db_cur = self.db_conn.cursor()

    # 关闭数据库
    def close_spider(self, spider):
        self.db_conn.commit()
        self.db_conn.close()

    # 对数据进行处理
    def process_item(self, item, spider):
        self.insert_db(item)
        return item

    #插入数据
    def insert_db(self, item):
        values = (
            item['upc'],
            item['name'],
            item['price'],
            item['review_rating'],
            item['review_num'],
            item['stock'],
        )
        try:
            sql = 'INSERT INTO books VALUES(%s,%s,%s,%s,%s,%s)'
            self.db_cur.execute(sql, values)
            self.db_conn.commit()
            print("Insert finished")
        except:
            print("Insert to DB failed")
            self.db_conn.commit()
            self.db_conn.close()      

详细可参考:

Scrapy连接到各类数据库(SQLite,Mysql,Mongodb,Redis)

你可能感兴趣的:(MySQL)