协程、多进程、多线程效率对比

from multiprocessing.dummy import Pool as ThreadPool
from multiprocessing import Pool as ProcessPool
import asyncio
import aiohttp
import time
import requests

baseurl = 'https://www.energylabelrecord.com:12066/productpub/list.do?ec_model_no=1&type=markingTitle&typeValue=&pageNum={}&pageSize=15&_=1517580652009'

def profile(f):
    def inner(*args, **kwargs):
        t1 = time.time()
        f(*args, **kwargs)
        t2 = time.time()
        print(f.__name__ + ' cost: ',t2-t1)
    return inner

def scrape(url):
    res = requests.get(url)
    print(res.text)

@profile
def thread(pool):
    for i in range(100):
        num = i + 1
        url = baseurl.format(num)
        pool.apply_async(scrape, (url,))
    pool.close()
    pool.join()
    print('thread end')

@profile
def process(pool):
    for i in range(50):
        num = i + 1
        url = baseurl.format(num)
        pool.apply_async(scrape, (url,))
    pool.close()
    pool.join()
    print('Process end')

async def async_task(url):
    async with aiohttp.ClientSession(loop=loop) as session:
        async with session.get(url) as response:
            res = await response.read()
            print(res.decode())
            
@profile
def async():
    urls = [baseurl.format(num+1) for num in range(50)]
    tasks = [async_task(url) for url in urls]
    loop.run_until_complete(asyncio.gather(*tasks))

if __name__ == '__main__':
    loop = asyncio.get_event_loop()
    tpool = ThreadPool(10)
    ppool = ProcessPool(10)
    async()
    # thread(tpool)
    # process(ppool)

经过三次测试
协程耗时分别为 7、6.95、6.76
多线程耗时分别为9.86、10.23、10.39
多进程耗时分别为9.18、9.45、9.23

你可能感兴趣的:(协程、多进程、多线程效率对比)