]> git.lizzy.rs Git - nhentai.git/commitdiff
support python2.6
authorRicter Z <ricterzheng@gmail.com>
Mon, 2 May 2016 07:55:14 +0000 (15:55 +0800)
committerRicter Z <ricterzheng@gmail.com>
Mon, 2 May 2016 07:55:14 +0000 (15:55 +0800)
nhentai/__init__.py
nhentai/cmdline.py
nhentai/command.py
nhentai/doujinshi.py
nhentai/downloader.py
nhentai/parser.py
setup.cfg [new file with mode: 0644]
setup.py

index e89f277970159805a83838642321afecd6651365..e738d5955ed9c89b41211ba56be29b49762f3416 100644 (file)
@@ -1,3 +1,3 @@
-__version__ = '0.1.4'
+__version__ = '0.1.5'
 __author__ = 'Ricter'
 __email__ = 'ricterzheng@gmail.com'
index 2f0e754b902dc724c8409096ca56ae797f762edd..8b6720c54e34d539a700c0f03c171aaaf9aa85f4 100644 (file)
@@ -46,26 +46,26 @@ def cmd_parser():
     if args.is_download and not args.id and not args.ids and not args.keyword:
         logger.critical('Doujinshi id/ids is required for downloading')
         parser.print_help()
-        raise SystemExit
+        exit(1)
 
     if args.id:
         args.ids = (args.id, ) if not args.ids else args.ids
 
     if not args.keyword and not args.ids:
         parser.print_help()
-        raise SystemExit
+        exit(1)
 
     if args.threads <= 0:
         args.threads = 1
     elif args.threads > 10:
         logger.critical('Maximum number of used threads is 10')
-        raise SystemExit
+        exit(1)
 
     if args.proxy:
         import urlparse
         proxy_url = urlparse.urlparse(args.proxy)
         if proxy_url.scheme not in ('http', 'https'):
-            logger.error('Invalid protocol \'{}\' of proxy, ignored'.format(proxy_url.scheme))
+            logger.error('Invalid protocol \'{0}\' of proxy, ignored'.format(proxy_url.scheme))
         else:
             constant.PROXY = {proxy_url.scheme: args.proxy}
 
index a50e8ebf4c2c3a561cab107ff095217da63003a0..4c05eeaf16ebafa02a0f38c69fa98251661bc1f6 100644 (file)
@@ -28,7 +28,7 @@ def main():
             doujinshi_info = doujinshi_parser(id)
             doujinshi_list.append(Doujinshi(**doujinshi_info))
     else:
-        raise SystemExit
+        exit(1)
 
     if options.is_download:
         downloader = Downloader(path=options.saved_path,
@@ -44,7 +44,7 @@ def main():
 
 def signal_handler(signal, frame):
     logger.error('Ctrl-C signal received. Quit.')
-    raise SystemExit
+    exit(1)
 
 
 signal.signal(signal.SIGINT, signal_handler)
index 6e0436bbc6b7556663895b233c3456d004be9db6..87157ad45097afedc7f452e69ad34e3362cf4821 100644 (file)
@@ -28,7 +28,7 @@ class Doujinshi(object):
         self.info = DoujinshiInfo(**kwargs)
 
     def __repr__(self):
-        return '<Doujinshi: {}>'.format(self.name)
+        return '<Doujinshi: {0}>'.format(self.name)
 
     def show(self):
         table = [
@@ -41,7 +41,7 @@ class Doujinshi(object):
             ["URL", self.url],
             ["Pages", self.pages],
         ]
-        logger.info(u'Print doujinshi information\n{}'.format(tabulate(table)))
+        logger.info(u'Print doujinshi information\n{0}'.format(tabulate(table)))
 
     def download(self):
         logger.info('Start download doujinshi: %s' % self.name)
index 4f292b99a4a4adbe7fd1ca35ccf230f4f79e59eb..595c67e4e9d624030a30a3758de0d76d2ac927fb 100644 (file)
@@ -24,7 +24,7 @@ class Downloader(object):
         self.timeout = timeout
 
     def _download(self, url, folder='', filename='', retried=False):
-        logger.info('Start downloading: {} ...'.format(url))
+        logger.info('Start downloading: {0} ...'.format(url))
         filename = filename if filename else os.path.basename(urlparse(url).path)
         try:
             with open(os.path.join(folder, filename), "wb") as f:
@@ -37,7 +37,7 @@ class Downloader(object):
                         f.write(chunk)
         except requests.HTTPError as e:
             if not retried:
-                logger.error('Error: {}, retrying'.format(str(e)))
+                logger.error('Error: {0}, retrying'.format(str(e)))
                 return self._download(url=url, folder=folder, filename=filename, retried=True)
             else:
                 return None
@@ -49,8 +49,8 @@ class Downloader(object):
     def _download_callback(self, request, result):
         if not result:
             logger.critical('Too many errors occurred, quit.')
-            raise SystemExit
-        logger.log(15, '{} download successfully'.format(result))
+            exit(1)
+        logger.log(15, '{0} download successfully'.format(result))
 
     def download(self, queue, folder=''):
         if not isinstance(folder, (str, unicode)):
@@ -60,14 +60,14 @@ class Downloader(object):
             folder = os.path.join(self.path, folder)
 
         if not os.path.exists(folder):
-            logger.warn('Path \'{}\' not exist.'.format(folder))
+            logger.warn('Path \'{0}\' not exist.'.format(folder))
             try:
                 os.makedirs(folder)
             except EnvironmentError as e:
-                logger.critical('Error: {}'.format(str(e)))
-                raise SystemExit
+                logger.critical('Error: {0}'.format(str(e)))
+                exit(1)
         else:
-            logger.warn('Path \'{}\' already exist.'.format(folder))
+            logger.warn('Path \'{0}\' already exist.'.format(folder))
 
         queue = [([url], {'folder': folder}) for url in queue]
 
index f068670232884d996069e631e1e04a0d1177434e..e9cb4c56a5c085ce6d139d5046934022e3c9e6b2 100644 (file)
@@ -11,20 +11,20 @@ from tabulate import tabulate
 
 def request(method, url, **kwargs):
     if not hasattr(requests, method):
-        raise AttributeError('\'requests\' object has no attribute \'{}\''.format(method))
+        raise AttributeError('\'requests\' object has no attribute \'{0}\''.format(method))
 
     return requests.__dict__[method](url, proxies=constant.PROXY, **kwargs)
 
 
 def doujinshi_parser(id_):
     if not isinstance(id_, (int,)) and (isinstance(id_, (str,)) and not id_.isdigit()):
-        raise Exception('Doujinshi id({}) is not valid'.format(id_))
+        raise Exception('Doujinshi id({0}) is not valid'.format(id_))
 
     id_ = int(id_)
-    logger.log(15, 'Fetching doujinshi information of id {}'.format(id_))
+    logger.log(15, 'Fetching doujinshi information of id {0}'.format(id_))
     doujinshi = dict()
     doujinshi['id'] = id_
-    url = '{}/{}/'.format(constant.DETAIL_URL, id_)
+    url = '{0}/{1}/'.format(constant.DETAIL_URL, id_)
 
     try:
         response = request('get', url).content
@@ -71,14 +71,14 @@ def doujinshi_parser(id_):
 
 
 def search_parser(keyword, page):
-    logger.debug('Searching doujinshis of keyword {}'.format(keyword))
+    logger.debug('Searching doujinshis of keyword {0}'.format(keyword))
     result = []
     try:
         response = request('get', url=constant.SEARCH_URL, params={'q': keyword, 'page': page}).content
     except requests.ConnectionError as e:
         logger.critical(e)
         logger.warn('If you are in China, please configure the proxy to fu*k GFW.')
-        raise SystemExit
+        exit(1)
 
     html = BeautifulSoup(response)
     doujinshi_search_result = html.find_all('div', attrs={'class': 'gallery'})
diff --git a/setup.cfg b/setup.cfg
new file mode 100644 (file)
index 0000000..04f6b82
--- /dev/null
+++ b/setup.cfg
@@ -0,0 +1,3 @@
+[metadata]
+description-file = README.rst
+
index 00eb63502179638f47c217396fa20320a98b955a..0f647326e5168bf005192b441ff1920c65811cbd 100644 (file)
--- a/setup.py
+++ b/setup.py
@@ -1,9 +1,15 @@
+import codecs
 from setuptools import setup, find_packages
 from nhentai import __version__, __author__, __email__
 
 with open('requirements.txt') as f:
     requirements = [l for l in f.read().splitlines() if l]
 
+
+def long_description():
+    with codecs.open('README.rst', 'r') as f:
+        return f.read()
+
 setup(
     name='nhentai',
     version=__version__,
@@ -13,7 +19,9 @@ setup(
     author_email=__email__,
     keywords='nhentai, doujinshi',
     description='nhentai.net doujinshis downloader',
+    long_description=long_description(),
     url='https://github.com/RicterZ/nhentai',
+    download_url='https://github.com/RicterZ/nhentai/tarball/master',
     include_package_data=True,
     zip_safe=False,