diff --git a/nhentai/command.py b/nhentai/command.py index d5e3a8d..a375308 100644 --- a/nhentai/command.py +++ b/nhentai/command.py @@ -7,6 +7,7 @@ import platform import urllib import urllib3.exceptions +from requests import options from nhentai import constant from nhentai.cmdline import cmd_parser, banner, write_config @@ -52,6 +53,9 @@ def main(): page_list = paging(options.page) + if options.retry: + constant.RETRY_TIMES = int(options.retry) + if options.favorites: if not options.is_download: logger.warning('You do not specify --download option') @@ -87,7 +91,7 @@ def main(): if not options.is_show: downloader = Downloader(path=options.output_dir, threads=options.threads, timeout=options.timeout, delay=options.delay, - retry=options.retry, exit_on_fail=options.exit_on_fail, + exit_on_fail=options.exit_on_fail, no_filename_padding=options.no_filename_padding) for doujinshi_id in doujinshi_ids: diff --git a/nhentai/constant.py b/nhentai/constant.py index b27c4fd..9e02e5c 100644 --- a/nhentai/constant.py +++ b/nhentai/constant.py @@ -37,6 +37,8 @@ FAV_URL = f'{BASE_URL}/favorites/' PATH_SEPARATOR = os.path.sep +RETRY_TIMES = 3 + IMAGE_URL = f'{urlparse(BASE_URL).scheme}://i1.{urlparse(BASE_URL).hostname}/galleries' IMAGE_URL_MIRRORS = [ diff --git a/nhentai/downloader.py b/nhentai/downloader.py index 438a994..b1208e0 100644 --- a/nhentai/downloader.py +++ b/nhentai/downloader.py @@ -34,13 +34,12 @@ def download_callback(result): class Downloader(Singleton): - def __init__(self, path='', threads=5, timeout=30, delay=0, retry=3, exit_on_fail=False, + def __init__(self, path='', threads=5, timeout=30, delay=0, exit_on_fail=False, no_filename_padding=False): self.threads = threads self.path = str(path) self.timeout = timeout self.delay = delay - self.retry = retry self.exit_on_fail = exit_on_fail self.folder = None self.semaphore = None @@ -101,7 +100,7 @@ class Downloader(Singleton): return -1, url except (httpx.HTTPStatusError, httpx.TimeoutException, httpx.ConnectError) as e: - if retried < self.retry: + if retried < constant.RETRY_TIMES: logger.warning(f'Download {filename} failed, retrying({retried + 1}) times...') return await self.download( url=url, @@ -111,7 +110,7 @@ class Downloader(Singleton): proxy=proxy, ) else: - logger.warning(f'Download {filename} failed with {self.retry} times retried, skipped') + logger.warning(f'Download {filename} failed with {constant.RETRY_TIMES} times retried, skipped') return -2, url except NHentaiImageNotExistException as e: diff --git a/nhentai/parser.py b/nhentai/parser.py index a0e1932..6fed216 100644 --- a/nhentai/parser.py +++ b/nhentai/parser.py @@ -92,13 +92,26 @@ def favorites_parser(page=None): page_range_list = range(1, pages + 1) for page in page_range_list: - try: - logger.info(f'Getting doujinshi ids of page {page}') - resp = request('get', f'{constant.FAV_URL}?page={page}').content + logger.info(f'Getting doujinshi ids of page {page}') - result.extend(_get_title_and_id(resp)) - except Exception as e: - logger.error(f'Error: {e}, continue') + i = 0 + while i < constant.RETRY_TIMES: + if i == 2: + logger.error(f'Failed to get favorites at page {page} after 3 times retried, skipped') + break + + try: + resp = request('get', f'{constant.FAV_URL}?page={page}').content + temp_result = _get_title_and_id(resp) + if not temp_result: + i += 1 + continue + else: + result.extend(temp_result) + break + + except Exception as e: + logger.warning(f'Error: {e}, retrying ({i} times)...') return result @@ -261,7 +274,7 @@ def search_parser(keyword, sorting, page, is_page_all=False): i = 0 logger.info(f'Searching doujinshis using keywords "{keyword}" on page {p}{total}') - while i < 3: + while i < constant.RETRY_TIMES: try: url = request('get', url=constant.SEARCH_URL, params={'query': keyword, 'page': p, 'sort': sorting}).url