from multiprocessing import Manager,Process
from time import sleep
def spdier(url_queue):
try:
while not url_queue.empty():
i=url_queue.get()
print(i)
sleep(1)
except Exception as e:
print(e)
if __name__=='__main__':
url_queue = Manager().Queue()
for i in range(1,11):
url_queue.put(i)
all_process=[]
for i in range(3):
p=Process(target=spdier,args=(url_queue,))
p.start()
all_process.append(p)
[p.join() for p in all_process]
进程池
from multiprocessing import Manager,Pool
from time import sleep
def spider(url_queue):
while not url_queue.empty():
i=url_queue.get()
print(i)
sleep(1)
if __name__=='__main__':
url_queue=Manager().Queue()
for i in range(1,11):
url_queue.put(i)
pool = Pool(3)
pool.apply_async(func=spider,args=(url_queue,))
pool.apply_async(func=spider,args=(url_queue,))
pool.apply_async(func=spider,args=(url_queue,))
pool.close()
pool.join()
上一章节:python 爬虫之多线程_宠乖仪的博客-CSDN博客
|