Merge branch 'pull/221' into master

This commit is contained in:
Ricter Zheng 2021-06-07 16:01:54 +08:00 committed by GitHub
commit 1de7e1f998
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
6 changed files with 32 additions and 12 deletions

View File

@ -1,3 +1,3 @@
__version__ = '0.4.15'
__version__ = '0.4.16'
__author__ = 'RicterZ'
__email__ = 'ricterzheng@gmail.com'

View File

@ -89,6 +89,7 @@ def cmd_parser():
parser.add_option('--file', '-f', type='string', dest='file', action='store', help='read gallery IDs from file.')
parser.add_option('--format', type='string', dest='name_format', action='store',
help='format the saved folder name', default='[%i][%a][%t]')
parser.add_option('--dry-run', '-r', action='store_true', dest='dryrun', help='Dry run, skip file download.')
# generate options
parser.add_option('--html', dest='html_viewer', action='store_true',
@ -217,4 +218,8 @@ def cmd_parser():
logger.critical('Maximum number of used threads is 15')
exit(1)
if args.dryrun and (args.is_cbz or args.is_pdf):
logger.critical('Cannot generate PDF or CBZ during dry-run')
exit(1)
return args

View File

@ -91,7 +91,9 @@ def main():
timeout=options.timeout, delay=options.delay)
for doujinshi in doujinshi_list:
if not options.dryrun:
doujinshi.downloader = downloader
doujinshi.download()
doujinshi.downloader = downloader
doujinshi.download()
@ -100,6 +102,7 @@ def main():
table=doujinshi.table
generate_metadatafile(options.output_dir,table,doujinshi)
if options.is_save_download_history:
with DB() as db:
db.add_one(doujinshi.id)

View File

@ -29,7 +29,6 @@ NHENTAI_HOME = os.path.join(os.getenv('HOME', tempfile.gettempdir()), '.nhentai'
NHENTAI_HISTORY = os.path.join(NHENTAI_HOME, 'history.sqlite3')
NHENTAI_CONFIG_FILE = os.path.join(NHENTAI_HOME, 'config.json')
CONFIG = {
'proxy': {'http': '', 'https': ''},
'cookie': '',

View File

@ -14,6 +14,7 @@ try:
except ImportError:
from urlparse import urlparse
from nhentai import constant
from nhentai.logger import logger
from nhentai.parser import request
from nhentai.utils import Singleton
@ -34,7 +35,7 @@ class Downloader(Singleton):
self.timeout = timeout
self.delay = delay
def download_(self, url, folder='', filename='', retried=0):
def download_(self, url, folder='', filename='', retried=0, proxy=None):
if self.delay:
time.sleep(self.delay)
logger.info('Starting to download {0} ...'.format(url))
@ -51,7 +52,7 @@ class Downloader(Singleton):
i = 0
while i < 10:
try:
response = request('get', url, stream=True, timeout=self.timeout)
response = request('get', url, stream=True, timeout=self.timeout, proxies=proxy)
if response.status_code != 200:
raise NHentaiImageNotExistException
@ -77,7 +78,8 @@ class Downloader(Singleton):
except (requests.HTTPError, requests.Timeout) as e:
if retried < 3:
logger.warning('Warning: {0}, retrying({1}) ...'.format(str(e), retried))
return 0, self.download_(url=url, folder=folder, filename=filename, retried=retried+1)
return 0, self.download_(url=url, folder=folder, filename=filename,
retried=retried+1, proxy=proxy)
else:
return 0, None
@ -128,7 +130,7 @@ class Downloader(Singleton):
else:
logger.warning('Path \'{0}\' already exist.'.format(folder))
queue = [(self, url, folder) for url in queue]
queue = [(self, url, folder, constant.CONFIG['proxy']) for url in queue]
pool = multiprocessing.Pool(self.size, init_worker)
[pool.apply_async(download_wrapper, args=item) for item in queue]
@ -137,9 +139,9 @@ class Downloader(Singleton):
pool.join()
def download_wrapper(obj, url, folder=''):
def download_wrapper(obj, url, folder='', proxy=None):
if sys.platform == 'darwin' or semaphore.get_value():
return Downloader.download_(obj, url=url, folder=folder)
return Downloader.download_(obj, url=url, folder=folder, proxy=proxy)
else:
return -3, None

View File

@ -20,7 +20,11 @@ def request(method, url, **kwargs):
'User-Agent': 'nhentai command line client (https://github.com/RicterZ/nhentai)',
'Cookie': constant.CONFIG['cookie']
})
return getattr(session, method)(url, proxies=constant.CONFIG['proxy'], verify=False, **kwargs)
if not kwargs.get('proxies', None):
kwargs['proxies'] = constant.CONFIG['proxy']
return getattr(session, method)(url, verify=False, **kwargs)
def check_cookie():
@ -70,6 +74,13 @@ def generate_html(output_dir='.', doujinshi_obj=None, template='default'):
else:
doujinshi_dir = '.'
if not os.path.exists(doujinshi_dir):
logger.warning('Path \'{0}\' does not exist, creating.'.format(doujinshi_dir))
try:
os.makedirs(doujinshi_dir)
except EnvironmentError as e:
logger.critical('{0}'.format(str(e)))
file_list = os.listdir(doujinshi_dir)
file_list.sort()