+def paging(page_string):
+ # 1,3-5,14 -> [1, 3, 4, 5, 14]
+ if not page_string:
+ return []
+
+ page_list = []
+ for i in page_string.split(','):
+ if '-' in i:
+ start, end = i.split('-')
+ if not (start.isdigit() and end.isdigit()):
+ raise Exception('Invalid page number')
+ page_list.extend(list(range(int(start), int(end) + 1)))
+ else:
+ if not i.isdigit():
+ raise Exception('Invalid page number')
+ page_list.append(int(i))
+
+ return page_list
+
+
+def generate_metadata_file(output_dir, table, doujinshi_obj=None):
+ logger.info('Writing Metadata Info')
+
+ if doujinshi_obj is not None:
+ doujinshi_dir = os.path.join(output_dir, doujinshi_obj.filename)
+ else:
+ doujinshi_dir = '.'
+
+ logger.info(doujinshi_dir)
+
+ f = open(os.path.join(doujinshi_dir, 'info.txt'), 'w', encoding='utf-8')
+
+ fields = ['TITLE', 'ORIGINAL TITLE', 'AUTHOR', 'ARTIST', 'CIRCLE', 'SCANLATOR',
+ 'TRANSLATOR', 'PUBLISHER', 'DESCRIPTION', 'STATUS', 'CHAPTERS', 'PAGES',
+ 'TAGS', 'TYPE', 'LANGUAGE', 'RELEASED', 'READING DIRECTION', 'CHARACTERS',
+ 'SERIES', 'PARODY', 'URL']
+ special_fields = ['PARODY', 'TITLE', 'ORIGINAL TITLE', 'CHARACTERS', 'AUTHOR',
+ 'LANGUAGE', 'TAGS', 'URL', 'PAGES']
+
+ for i in range(len(fields)):
+ f.write('{}: '.format(fields[i]))
+ if fields[i] in special_fields:
+ f.write(str(table[special_fields.index(fields[i])][1]))
+ f.write('\n')
+
+ f.close()
+
+
+class DB(object):
+ conn = None
+ cur = None
+
+ def __enter__(self):
+ self.conn = sqlite3.connect(constant.NHENTAI_HISTORY)
+ self.cur = self.conn.cursor()
+ self.cur.execute('CREATE TABLE IF NOT EXISTS download_history (id text)')
+ self.conn.commit()
+ return self
+
+ def __exit__(self, exc_type, exc_val, exc_tb):
+ self.conn.close()
+
+ def clean_all(self):
+ self.cur.execute('DELETE FROM download_history WHERE 1')
+ self.conn.commit()
+
+ def add_one(self, data):
+ self.cur.execute('INSERT INTO download_history VALUES (?)', [data])
+ self.conn.commit()
+
+ def get_all(self):
+ data = self.cur.execute('SELECT id FROM download_history')
+ return [i[0] for i in data]