import asyncio
import aiohttp
CONCURRENCY = 99999
URL = 'https://www.baidu.com'
semaphore = asyncio.Semaphore(CONCURRENCY)
session = None
async def scrape_api():
# async with semaphore:
await asyncio.sleep(1)
return "{}".format()
async def scrape_api2():
async with semaphore:
await asyncio.sleep(1)
return "={}=".format()
async def main2():
import time
global session
session = aiohttp.ClientSession()
start = time.time()
## 创建1000 个协程对象
scrape_list = [ asyncio.ensure_future(scrape_api(i)) for i in range(99)]
list_value = await asyncio.gather(*scrape_list)
print(list_value)
print(time.time() - start)
async def main():
import time
global session
session = aiohttp.ClientSession()
start =time.time()
# scrape_index_tasks = [asyncio.create_task(scrape_api2(_)) for _ in range(99)]
# list_value = await asyncio.gather(*scrape_index_tasks)
task1 = asyncio.create_task(scrape_api(1))
task2 = asyncio.create_task(scrape_api(2))
task3 = asyncio.create_task(scrape_api(2))
task4 = asyncio.create_task(scrape_api(2))
task5 = asyncio.create_task(scrape_api(2))
task6 = asyncio.create_task(scrape_api(2))
task7 = asyncio.create_task(scrape_api(2))
task8 = asyncio.create_task(scrape_api(2))
task9 = asyncio.create_task(scrape_api(2))
# b = asyncio.wait([task1,task2,task3, task4,task5,task6,task7,task8, task9])
a = [await task1,
await task2,
await task3,
await task4,
await task5,
await task6,
await task7,
await task8,
await task9]
print(a)
# await task2
print(time.time() - start)
async def test2():
await asyncio.sleep(2)
return "2"
async def test_sleep():
await asyncio.sleep(2)
return "2"
async def test1():
task = asyncio.create_task(test_sleep())
task2 = asyncio.create_task(test_sleep())
c = [await task,
await task2]
return c
if name == 'main':
# a = asyncio.get_event_loop().run_until_complete(main2())
import time
begin = time.time()
a = asyncio.run(test1())
print("%%")
print(a)
end = time.time()
print(end - begin)
pass