add global retry for search, download, fetch favorites

This commit is contained in:
ricterz 2025-02-09 20:02:52 +08:00
parent 29c3abbe5c
commit 023c8969eb
4 changed files with 30 additions and 12 deletions

View File

@ -7,6 +7,7 @@ import platform
import urllib
import urllib3.exceptions
from requests import options
from nhentai import constant
from nhentai.cmdline import cmd_parser, banner, write_config
@ -52,6 +53,9 @@ def main():
page_list = paging(options.page)
if options.retry:
constant.RETRY_TIMES = int(options.retry)
if options.favorites:
if not options.is_download:
logger.warning('You do not specify --download option')
@ -87,7 +91,7 @@ def main():
if not options.is_show:
downloader = Downloader(path=options.output_dir, threads=options.threads,
timeout=options.timeout, delay=options.delay,
retry=options.retry, exit_on_fail=options.exit_on_fail,
exit_on_fail=options.exit_on_fail,
no_filename_padding=options.no_filename_padding)
for doujinshi_id in doujinshi_ids:

View File

@ -37,6 +37,8 @@ FAV_URL = f'{BASE_URL}/favorites/'
PATH_SEPARATOR = os.path.sep
RETRY_TIMES = 3
IMAGE_URL = f'{urlparse(BASE_URL).scheme}://i1.{urlparse(BASE_URL).hostname}/galleries'
IMAGE_URL_MIRRORS = [

View File

@ -34,13 +34,12 @@ def download_callback(result):
class Downloader(Singleton):
def __init__(self, path='', threads=5, timeout=30, delay=0, retry=3, exit_on_fail=False,
def __init__(self, path='', threads=5, timeout=30, delay=0, exit_on_fail=False,
no_filename_padding=False):
self.threads = threads
self.path = str(path)
self.timeout = timeout
self.delay = delay
self.retry = retry
self.exit_on_fail = exit_on_fail
self.folder = None
self.semaphore = None
@ -101,7 +100,7 @@ class Downloader(Singleton):
return -1, url
except (httpx.HTTPStatusError, httpx.TimeoutException, httpx.ConnectError) as e:
if retried < self.retry:
if retried < constant.RETRY_TIMES:
logger.warning(f'Download {filename} failed, retrying({retried + 1}) times...')
return await self.download(
url=url,
@ -111,7 +110,7 @@ class Downloader(Singleton):
proxy=proxy,
)
else:
logger.warning(f'Download {filename} failed with {self.retry} times retried, skipped')
logger.warning(f'Download {filename} failed with {constant.RETRY_TIMES} times retried, skipped')
return -2, url
except NHentaiImageNotExistException as e:

View File

@ -92,13 +92,26 @@ def favorites_parser(page=None):
page_range_list = range(1, pages + 1)
for page in page_range_list:
try:
logger.info(f'Getting doujinshi ids of page {page}')
resp = request('get', f'{constant.FAV_URL}?page={page}').content
result.extend(_get_title_and_id(resp))
i = 0
while i < constant.RETRY_TIMES:
if i == 2:
logger.error(f'Failed to get favorites at page {page} after 3 times retried, skipped')
break
try:
resp = request('get', f'{constant.FAV_URL}?page={page}').content
temp_result = _get_title_and_id(resp)
if not temp_result:
i += 1
continue
else:
result.extend(temp_result)
break
except Exception as e:
logger.error(f'Error: {e}, continue')
logger.warning(f'Error: {e}, retrying ({i} times)...')
return result
@ -261,7 +274,7 @@ def search_parser(keyword, sorting, page, is_page_all=False):
i = 0
logger.info(f'Searching doujinshis using keywords "{keyword}" on page {p}{total}')
while i < 3:
while i < constant.RETRY_TIMES:
try:
url = request('get', url=constant.SEARCH_URL, params={'query': keyword,
'page': p, 'sort': sorting}).url