]> git.lizzy.rs Git - nhentai.git/commitdiff
Merge pull request #214 from lleene/master
authorRicter Zheng <RicterZheng@gmail.com>
Thu, 3 Jun 2021 00:00:18 +0000 (08:00 +0800)
committerGitHub <noreply@github.com>
Thu, 3 Jun 2021 00:00:18 +0000 (08:00 +0800)
Add dryrun option to command line interface

nhentai/__init__.py
nhentai/constant.py
nhentai/downloader.py
nhentai/utils.py

index e0d4f0abe3d4bd3ecce49ee4b7f7f903384d173e..e0baeec631184ff14f68d13ad7f7cc4e309ada86 100644 (file)
@@ -1,3 +1,3 @@
-__version__ = '0.4.15'
+__version__ = '0.4.16'
 __author__ = 'RicterZ'
 __email__ = 'ricterzheng@gmail.com'
index ef2edb90bdce5610bbbd58f4114ee9ff2e6c6006..10feab8e0eca695e30ce122f54d667a612cd9baa 100644 (file)
@@ -29,7 +29,6 @@ NHENTAI_HOME = os.path.join(os.getenv('HOME', tempfile.gettempdir()), '.nhentai'
 NHENTAI_HISTORY = os.path.join(NHENTAI_HOME, 'history.sqlite3')
 NHENTAI_CONFIG_FILE = os.path.join(NHENTAI_HOME, 'config.json')
 
-
 CONFIG = {
     'proxy': {'http': '', 'https': ''},
     'cookie': '',
index dd1e431a41cfa096de4e2e4fb381819bbba71845..f0aa9fde874f9c13c13273e3a1a7e458651ce5b7 100644 (file)
@@ -14,6 +14,7 @@ try:
 except ImportError:
     from urlparse import urlparse
 
+from nhentai import constant
 from nhentai.logger import logger
 from nhentai.parser import request
 from nhentai.utils import Singleton
@@ -34,7 +35,7 @@ class Downloader(Singleton):
         self.timeout = timeout
         self.delay = delay
 
-    def download_(self, url, folder='', filename='', retried=0):
+    def download_(self, url, folder='', filename='', retried=0, proxy=None):
         if self.delay:
             time.sleep(self.delay)
         logger.info('Starting to download {0} ...'.format(url))
@@ -51,7 +52,7 @@ class Downloader(Singleton):
                 i = 0
                 while i < 10:
                     try:
-                        response = request('get', url, stream=True, timeout=self.timeout)
+                        response = request('get', url, stream=True, timeout=self.timeout, proxies=proxy)
                         if response.status_code != 200:
                             raise NHentaiImageNotExistException
 
@@ -77,7 +78,8 @@ class Downloader(Singleton):
         except (requests.HTTPError, requests.Timeout) as e:
             if retried < 3:
                 logger.warning('Warning: {0}, retrying({1}) ...'.format(str(e), retried))
-                return 0, self.download_(url=url, folder=folder, filename=filename, retried=retried+1)
+                return 0, self.download_(url=url, folder=folder, filename=filename,
+                                         retried=retried+1, proxy=proxy)
             else:
                 return 0, None
 
@@ -128,7 +130,7 @@ class Downloader(Singleton):
         else:
             logger.warning('Path \'{0}\' already exist.'.format(folder))
 
-        queue = [(self, url, folder) for url in queue]
+        queue = [(self, url, folder, constant.CONFIG['proxy']) for url in queue]
 
         pool = multiprocessing.Pool(self.size, init_worker)
         [pool.apply_async(download_wrapper, args=item) for item in queue]
@@ -137,9 +139,9 @@ class Downloader(Singleton):
         pool.join()
 
 
-def download_wrapper(obj, url, folder=''):
+def download_wrapper(obj, url, folder='', proxy=None):
     if sys.platform == 'darwin' or semaphore.get_value():
-        return Downloader.download_(obj, url=url, folder=folder)
+        return Downloader.download_(obj, url=url, folder=folder, proxy=proxy)
     else:
         return -3, None
 
index dd74e6690dbd096b5c62509368d913df1f0c63ef..3364d0d4b675b40b6a13141d47bf9cb170dd18c0 100644 (file)
@@ -20,7 +20,11 @@ def request(method, url, **kwargs):
         'User-Agent': 'nhentai command line client (https://github.com/RicterZ/nhentai)',
         'Cookie': constant.CONFIG['cookie']
     })
-    return getattr(session, method)(url, proxies=constant.CONFIG['proxy'], verify=False, **kwargs)
+
+    if not kwargs.get('proxies', None):
+        kwargs['proxies'] = constant.CONFIG['proxy']
+
+    return getattr(session, method)(url, verify=False, **kwargs)
 
 
 def check_cookie():