多进程和多线程文章传送门
看一个简单的代码,访问100次百度的耗时
# coding: utf-8
import time
import requests
def get_response():
try:
url = 'https://www.baidu.com/'
headers = {
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/70.0.3538.25 Safari/537.36 Core/1.70.3883.400 QQBrowser/10.8.4559.400',
}
response = requests.get(url, headers=headers, timeout=2)
print(response.status_code)
except Exception as e:
print(e)
if __name__ == '__main__':
a = time.time()
for i in range(100):
get_response()
print(time.time() - a)
# coding: utf-8
import time
import threading
import requests
def get_response():
try:
url = 'https://www.baidu.com/'
headers = {
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/70.0.3538.25 Safari/537.36 Core/1.70.3883.400 QQBrowser/10.8.4559.400',
}
response = requests.get(url, headers=headers, timeout=2)
print(response.status_code)
except Exception as e:
print(e)
def fun():
for i in range(10):
get_response()
if __name__ == '__main__':
for i in range(10):
threading.Thread(target=fun).start()
windows环境下100次10个线程:耗时7s
# coding: utf-8
import time
import threading
import requests
import multiprocessing
class Spider(threading.Thread):
def __init__(self):
threading.Thread.__init__(self)
def get_response(self):
try:
url = 'https://www.baidu.com/'
headers = {
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/70.0.3538.25 Safari/537.36 Core/1.70.3883.400 QQBrowser/10.8.4559.400',
}
response = requests.get(url, headers=headers, timeout=2)
print(response.status_code)
except Exception as e:
print(e)
def run(self):
for i in range(10):
self.get_response()
if __name__ == '__main__':
for i in range(10):
Spider().run()
# coding: utf-8
import requests
import multiprocessing
def get_response():
try:
url = 'https://www.baidu.com/'
headers = {
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/70.0.3538.25 Safari/537.36 Core/1.70.3883.400 QQBrowser/10.8.4559.400',
}
response = requests.get(url, headers=headers, timeout=2)
print(response.status_code)
except Exception as e:
print(e)
def fun():
for i in range(25):
get_response()
if __name__ == '__main__':
for i in range(4):
multiprocessing.Process(target=fun).start()
windows环境下100次并发4个进程:耗时12秒
# coding: utf-8
import requests
import multiprocessing
class Spider(multiprocessing.Process):
def get_response(self):
try:
url = 'https://www.baidu.com/'
headers = {
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/70.0.3538.25 Safari/537.36 Core/1.70.3883.400 QQBrowser/10.8.4559.400',
}
response = requests.get(url, headers=headers, timeout=2)
print(response.status_code)
except Exception as e:
print(e)
def run(self):
for i in range(25):
self.get_response()
if __name__ == '__main__':
for i in range(4):
s = Spider()
s.start()
注意:使用gevent,可以获得极高的并发性能,但gevent只能在Unix/Linux下运行,在Windows下不保证正常安装和运行。
依赖于greenlet 、library
支持python 2.6+ 、python 3.3+
pip install gevent
# coding: utf-8
# 在导入其他库和模块前,先把monkey模块导入进来,并运行monkey.patch_all()。这样,才能先给程序打上补丁。
from gevent import monkey # 从gevent库里导入了monkey模块,这个模块能将程序转换成可异步的程序
monkey.patch_all() # 它的作用其实就像你的电脑有时会弹出“是否要用补丁修补漏洞或更新”一样。它能给程序打上补丁,让程序变成是异步模式,而不是同步模式。它也叫“猴子补丁”。
import gevent
import requests
import time
def get_response(url): # 定义一个函数,用来执行解析网址和爬取内容
headers = {
'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/75.0.3770.142 Safari/537.36'}
res = requests.get(url, headers=headers) # 发出请求
print(res.status_code)
if __name__ == '__main__':
start = time.time() # 开始时间
# 构建100个请求任务
url_list = []
for i in range(100):
url = 'https://www.baidu.com/'
url_list.append(url)
# 使用协程
tasks_list = []
for url in url_list:
# 用gevent.spawn()创建任务,此任务可以调用cra(url)函数,参数1函数名,后边为该函数需要的参数,按顺序写
task = gevent.spawn(get_response, url)
tasks_list.append(task) # 将任务加入列表
# 调用gevent库里的joinall方法,能启动执行tasks_list所有的任务。
gevent.joinall(tasks_list)
end = time.time() # 结束时间
print(end - start)
另外我们可以配合多进程+协程使用