以百度贴吧中“美团骑手吧”为例,对页面中的帖子评论进行爬取,并将结果以json的格式保存到本地中。
from lxml import etree
import requests
import json
# 根据网页url获取评论
def GetComments(url):
# 使用requests库发送GET请求,获取网页内容,并将响应内容编码设置为utf-8
response = requests.get(url)
response.encoding = 'utf-8'
content = response.text #将响应内容转换为字符串
ret = etree.HTML(content) #使用lxml库的HTML方法解析网页内容
# 使用XPath表达式提取所有帖子信息
lists = ret.xpath('//li[@class=" j_thread_list clearfix thread_item_box"]')
# 依次获取每条帖子的评论信息,并存放在my_list列表当中
for list in lists:
global my_list
comment = str(list.xpath('.//div[@class="threadlist_abs threadlist_abs_onlyline "]/text()')[0].strip())
my_list.append(comment)
if __name__ == '__main__':
base_url = 'https://tieba.baidu.com/f?kw=%E7%BE%8E%E5%9B%A2%E9%AA%91%E6%89%8B&ie=utf-8&pn='
my_list = []
# 每50条数据为1页,依次获取多个页面的帖子评论
for i in range(0,50,50):
url = base_url + f'{i}'
GetComments(url)
# 将获取的帖子评论以json格式进行保存
with open('list_file.json', 'w', encoding='utf-8') as file:
json.dump(my_list, file, ensure_ascii=False, indent=4)
根据上述爬取到的评论,利用snowlp工具进行情感分析,将评论分为“积极”“中性”消极“,并根据数量占比进行可视化。
from snownlp import SnowNLP
import matplotlib.pyplot as plt
import json
# 定义全局变量comments,用于存储评论数据
global comments
# 获取在本地存放的评论数据
with open('list_file.json', 'r', encoding='utf-8') as file:
comments = json.load(file)
# 利用情感分析函数对每一条评论进行分析,判断其情感色彩
def sentiment_analysis(text):
s = SnowNLP(text)
if s.sentiments > 0.6:
return "积极"
elif s.sentiments < 0.4:
return "消极"
else:
return "中性"
# 对评论数据进行情感分析
sentiments = [sentiment_analysis(comment) for comment in comments]
# 统计各类情感的数量
positive_count = sentiments.count("积极")
negative_count = sentiments.count("消极")
neutral_count = sentiments.count("中性")
# 可视化处理
labels = ["积极", "消极", "中性"]
sizes = [positive_count, negative_count, neutral_count]
colors = ["green", "red", "gray"]
explode = (0.1, 0, 0)
plt.rcParams['font.sans-serif'] = ['SimHei'] # 设置字体为黑体
plt.pie(sizes, explode=explode, labels=labels, colors=colors, autopct="%1.1f%%", shadow=True, startangle=90)
plt.axis("equal")
plt.show()