# 解决:
# 1.aiohttp.TCPConnector(limit=0) => aiohttp.TCPConnector(limit=10)
# 2.添加 timeout=aiohttp.ClientTimeout(total=600)
async def get_data_main():
# 初始化数据库
# db_utils.init_db('auto_search_uat.db')
set_logging.set_get_data_log()
db_utils.clear_data('auto_search_uat.db')
# 计算code_info的总页数
try:
async with aiohttp.ClientSession(connector=aiohttp.TCPConnector(limit=12), timeout=aiohttp.ClientTimeout(total=600),headers=header) as session:
get_data_api = GetDataApi()
total_pages = get_data_api.get_total_pages()
logger.info(f'total_pages:{total_pages}')
# taks1 = [asyncio.ensure_future(get_code_info(page, session)) for page in range(1, total_pages[1] + 1)]
# taks2 = [asyncio.ensure_future(get_serial_info(page, session)) for page in range(1, total_pages[0] + 1)]
# taks3 = [asyncio.ensure_future(get_product_model(page, session)) for page in range(1, total_pages[2] + 1)]
taks4 = [asyncio.ensure_future(get_keywork(page, session)) for page in range(1, total_pages[3] + 1)]
# taks1.extend(taks2)
# taks1.extend(taks3)
# taks1.extend(taks4)
await asyncio.gather(*taks4)
except ContentTypeError as e:
logger.exception(e)
参考:
1.https://blog.csdn.net/zhb_feng/article/details/118081444