多进程
import logging
import time
from multiprocessing import Pool
logging.basicConfig(format='%(asctime)s %(message)s', level=logging.INFO)
def f(x):
time.sleep(1)
logging.info(x)
return x
if __name__ == '__main__':
with Pool(5) as p:
p.map(f, [i for i in range(1, 100)])
多线程
import logging
import time
import threading
import queue
logging.basicConfig(format='%(asctime)s %(message)s', level=logging.INFO)
q = queue.Queue()
def worker():
while True:
item = q.get()
time.sleep(1)
logging.info(item)
q.task_done()
# 启动线程数.
for i in range(3):
threading.Thread(target=worker, daemon=True).start()
# Send thirty task requests to the worker.
for item in range(30):
q.put(item)
# Block until all tasks are done.
q.join()
print('All work completed')
协程
import asyncio
def get_urls():
return ['http://xxx.com', 'http://xxx.com']
async def get_title(url, sem):
async with sem:
time.sleep(2)
print(url)
if __name__ == '__main__':
loop = asyncio.get_event_loop()
# 任务列表,列表里是coroutine或future
tasks = []
# 限制协程数量
sem = asyncio.Semaphore(2)
urls = get_urls()
for url in urls:
tasks.append(get_title(url, sem))
try:
loop.run_until_complete(asyncio.wait(tasks))
finally:
loop.close()
参考