# -*- coding:utf-8 -*- import requests import asyncio import time import aiohttp ''' - aiohttp - 是一个支持异步的网络请求模块 - pip install aiohttp requests模块不支持异步 ''' urls = ['http://localhost:5000/bobo','http://localhost:5000/tom','http://localhost:5000/jay',] # async def get_request(url): # #requests是一个不支持异步的模块 # page_text = requests.get(url).text # return page_text async def get_request(url): #实例化好一个请求对象 async with aiohttp.ClientSession() as sess: #调用get发起请求,返回一个响应对象 #get/post(url,headers,params/data,prosy="http://ip:port") async with await sess.get(url=url) as response: #text()获取了字符串形式的响应数据 #read()获取byte类型的响应数据 page_text = await response.text() return page_text if __name__ == "__main__": start = time.time() tasks = [] for url in urls: c = get_request(url) task = asyncio.ensure_future(c) tasks.append(task) loop = asyncio.get_event_loop() loop.run_until_complete(asyncio.wait(tasks)) print('总耗时: ',time.time()-start)
标签:__,aiohttp,get,url,text,爬虫,time From: https://www.cnblogs.com/colin88/p/16885972.html