使用aiohttp实现高并发网络请求

import asyncio
import aiohttp


class AsnycGrab(object):

    def __init__(self, url_list, max_threads):
        self.urls = url_list
        self.max_threads = max_threads  #协程数

    async def get_results(self, url):
        # 异步发送请求
        async with aiohttp.ClientSession() as session:
            async with session.get(url, timeout=30) as response:
                assert response.status == 200
                html = await response.read()
        print(url)
        return 1

    async def handle_tasks(self, task_id, work_queue):
        while not work_queue.empty():
            current_url = await work_queue.get()  #从队列中获取url
            try:
                task_status = await self.get_results(current_url)
            except Exception as e:
                print(e)

    def eventloop(self):
        q = asyncio.Queue()  # 协程队列
        [q.put_nowait(url) for url in self.urls]  # 讲url全部放到队列
        loop = asyncio.get_event_loop()  # 创建事件循环
        tasks = [self.handle_tasks(task_id, q, ) for task_id in range(self.max_threads)]  # 创建self.max_threads个任务
        loop.run_until_complete(asyncio.wait(tasks))
        loop.close()


if __name__ == '__main__':
    async_example = AsnycGrab(['http://127.0.0.1:5000' for i in range(100000)], 500)  #创建100000个任务,每次异步提交500个
    async_example.eventloop()

你可能感兴趣的:(python基础)