UA爬虫

# -*- coding: utf-8 -*-

import requests
import pandas as pd
from urllib import parse
from lxml import etree

url = 'http://useragentstring.com/pages/useragentstring.php?name=All'

header = {
        'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8',
        'User-Agent': 'Mozilla/5.0 (compatible; ABrowse 0.4; Syllable)'
    }



def ua(url, name):
	print(url, name)
	response = requests.get(url,
                        headers=header,
                        timeout=60
                        )

	tree = etree.HTML(response.text)
	browsers = tree.xpath('//ul/li/a/text()')
	browsers = [browser for browser in browsers if len(browser)>20]

	df = pd.DataFrame({'id':range(1,len(browsers)+1), 'ua':browsers})

	df.to_csv('user_agents_' + name + '.csv', index=False)

	hrefs = tree.xpath(u"//a")
	for href in hrefs:
		link = href.attrib.get('href')
		if link and '/pages/useragentstring.php?' in link :
			params = parse.parse_qs( parse.urlparse('http://useragentstring.com' + link ).query )
			if params.get('name'):
				if params.get('version'):
					ua('http://useragentstring.com' + link, params.get('name')[0] + params.get('version')[0])
				elif params.get('vers'):
					ua('http://useragentstring.com' + link, params.get('name')[0] + params.get('vers')[0])
				else:
					ua('http://useragentstring.com' + link, params.get('name')[0])
			else:
				ua('http://useragentstring.com' + link, params.get('typ')[0])

ua(url, 'all')

你可能感兴趣的:(python)