爬虫数据插入mysql

#!/usr/bin/python
# !coding=utf-8
from urllib.parse import urlencode
import requests
from pyquery import PyQuery as pq
import pymysql


def get_page(page):
    params = {
        "type": "uid",
        "value": "2830678474",
        "containerid": "1076032830678474",
        "page": page
    }
    url = base_url + urlencode(params)
    try:
        response = requests.get(url, headers=headers)
        if response.status_code == 200:
            return response.json()
    except requests.ConnectionError as e:
        print("ERROR", e.args)


def parse_page(json):
    if json:
        items = json.get('data').get('cards')
        for item in items:
            item = item.get('mblog')
            weibo = {}
            weibo['id'] = item.get('id')
            weibo['text'] = pq(item.get('text')).text()
            weibo['attitudes'] = item.get('attitudes_count')
            weibo['comments'] = item.get('comments_count')
            weibo['reposts'] = item.get('reposts_count')
            yield weibo


def save_mysql(result):
    table = "weibo"
    keys = ','.join(result.keys())
    values = ','.join(["%s"] * len(result))
    sql = 'insert into {table}({keys}) values({values})'.format(table=table, keys=keys, values=values)
    try:
        if cursor.execute(sql, tuple(result.values())):
            print("Successful")
            db.commit()
    except:
        print("Failed")
        db.rollback()


if __name__ == "__main__":
    base_url = "https://m.weibo.cn/api/container/getIndex?"
    headers = {"Referer": "https://m.weibo.cn/u/2830678474",
               "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/70.0.3538.77 Safari/537.36",
               "X-Requested-With": "XMLHttpRequest"
               }
    db = pymysql.connect(host='192.168.1.230', user='root', password='root', db='weibo', port=3306)
    cursor = db.cursor()

    for page in range(2, 11):
        json = get_page(page)
        results = parse_page(json)
        for result in results:
            save_mysql(result)
    db.close()

你可能感兴趣的:(爬虫数据插入mysql)