import signal
from future.builtins import str as text
+import sys
import os
import requests
-import threadpool
import time
-import multiprocessing as mp
try:
from urllib.parse import urlparse
from nhentai.logger import logger
from nhentai.parser import request
-from nhentai.utils import Singleton, signal_handler
+from nhentai.utils import Singleton
requests.packages.urllib3.disable_warnings()
-semaphore = mp.Semaphore()
+semaphore = multiprocessing.Semaphore(1)
class NHentaiImageNotExistException(Exception):
queue = [(self, url, folder) for url in queue]
pool = multiprocessing.Pool(self.size, init_worker)
-
- for item in queue:
- pool.apply_async(download_wrapper, args=item, callback=self._download_callback)
+ [pool.apply_async(download_wrapper, args=item) for item in queue]
pool.close()
pool.join()
def download_wrapper(obj, url, folder=''):
- if semaphore.get_value():
+ if sys.platform == 'darwin' or semaphore.get_value():
return Downloader.download_(obj, url=url, folder=folder)
else:
return -3, None
doujinshi['img_id'] = img_id.group(1)
doujinshi['ext'] = ext
- pages = 0
for _ in doujinshi_info.find_all('div', class_='tag-container field-name'):
if re.search('Pages:', _.text):
pages = _.find('span', class_='name').string