爬虫今日头条--张碧晨

import requests
import pandas as pd
import time


# 获取每页数据
def get_r(page):
    url = 'https://www.toutiao.com/article/v2/tab_comments/'
    parames = {
        'aid': 24,
        'app_name': 'toutiao_web',
        'offset': (page - 1) * 50,
        'count': 50,
        'group_id': 1689580945358862,
        'item_id': 1689580945358862,
        '_signature': '_02B4Z6wo00101IZvL-QAAIDDNp7DLQUxIpCGSitAAEGVC86zvKTfCRp-Wa5ANNYQfh2WgJumlc0EwRxHlBXpQfGU21441ffJ1RLN.qHCZ33.fOX8DklzsWQNwEzpSQU3jMrfczAcJUflkfJib6'
    }

    headers = {
        "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.66 Safari/537.36",
    }

    r = requests.get(url, params=parames, headers=headers)

    return r


# 解析每页评价数据
def get_comment(r):
    j = r.json()

    data_list = j['data']
    comments = []
    for data in data_list:
        content = data['comment']
        comment = {}
        comment['create_time'] = content['create_time']
        comment['digg_count'] = content['digg_count']
        comment['reply_count'] = content['reply_count']
        comment['text'] = content['text']
        comment['user_id'] = content['user_id']
        comment['user_name'] = content['user_name']
        comment['verified_reason'] = content['verified_reason']

        comments.append(comment)

    return comments


# 主函数
def main():
    r = get_r(1)
    j = r.json()
    total_number = j['total_number']
    pages = total_number // 50 + 1
    print(f'一共 {total_number} 条评价,共 {pages} 页')
    comments_list = []
    for page in range(1, pages + 1):
        # time.sleep(0.5)
        r = get_r(page)
        comments = get_comment(r)
        comments_list.extend(comments)
        print(f'第 {page} 页评价已爬取')

    df = pd.DataFrame(comments_list)
    df['create_time'] = df['create_time'].apply(lambda x: pd.Timestamp(x, unit="s", tz='Asia/Shanghai'))
    df.drop_duplicates(subset='user_id', inplace=True)

    return df


if __name__ == "__main__":
    df = main()
    df.to_csv("./张碧晨.csv")

你可能感兴趣的:(爬虫今日头条--张碧晨)