52 lines
994 B
Python
52 lines
994 B
Python
# -*- encoding:utf-8 -*-
|
|
|
|
'''
|
|
@Author : dingjiawen
|
|
@Date : 2023/10/17 21:28
|
|
@Usage : pool池相关使用
|
|
@Desc :
|
|
'''
|
|
|
|
from multiprocessing import Lock, Pool
|
|
import time
|
|
import requests
|
|
from requests.exceptions import ConnectionError
|
|
|
|
|
|
def scrape(url):
|
|
try:
|
|
print(requests.get(url))
|
|
except ConnectionError:
|
|
print('Error Occured ', url)
|
|
finally:
|
|
print('URL ', url, ' Scraped')
|
|
|
|
|
|
def function(index):
|
|
print('Start process: ', index)
|
|
time.sleep(3)
|
|
print('End process', index)
|
|
|
|
|
|
if __name__ == '__main__':
|
|
|
|
# 异步线程池使用
|
|
pool = Pool(processes=3)
|
|
for i in range(4):
|
|
pool.apply_async(function, (i,))
|
|
|
|
print("Started processes")
|
|
pool.close()
|
|
pool.join()
|
|
print("Subprocess done.")
|
|
|
|
# 便捷的map法
|
|
pool = Pool(processes=3)
|
|
urls = [
|
|
'https://www.baidu.com',
|
|
'http://www.meituan.com/',
|
|
'http://blog.csdn.net/',
|
|
'http://xxxyxxx.net'
|
|
]
|
|
pool.map(scrape, urls)
|