fix: failure chain

This commit is contained in:
sgqy 2025-01-26 22:30:55 +09:00
parent c7ff5c2c5c
commit d90fd871ef
4 changed files with 22 additions and 11 deletions

View File

@ -185,6 +185,8 @@ Other options:
timeout for downloading doujinshi timeout for downloading doujinshi
-d DELAY, --delay=DELAY -d DELAY, --delay=DELAY
slow down between downloading every doujinshi slow down between downloading every doujinshi
--retry=RETRY retry times when downloading failed
--exit-on-fail exit on fail to prevent generating incomplete files
--proxy=PROXY store a proxy, for example: -p "http://127.0.0.1:1080" --proxy=PROXY store a proxy, for example: -p "http://127.0.0.1:1080"
-f FILE, --file=FILE read gallery IDs from file. -f FILE, --file=FILE read gallery IDs from file.
--format=NAME_FORMAT format the saved folder name --format=NAME_FORMAT format the saved folder name

View File

@ -97,6 +97,10 @@ def cmd_parser():
help='timeout for downloading doujinshi') help='timeout for downloading doujinshi')
parser.add_option('--delay', '-d', type='int', dest='delay', action='store', default=0, parser.add_option('--delay', '-d', type='int', dest='delay', action='store', default=0,
help='slow down between downloading every doujinshi') help='slow down between downloading every doujinshi')
parser.add_option('--retry', type='int', dest='retry', action='store', default=3,
help='retry times when downloading failed')
parser.add_option('--exit-on-fail', dest='exit_on_fail', action='store_true', default=False,
help='exit on fail to prevent generating incomplete files')
parser.add_option('--proxy', type='string', dest='proxy', action='store', parser.add_option('--proxy', type='string', dest='proxy', action='store',
help='store a proxy, for example: -p "http://127.0.0.1:1080"') help='store a proxy, for example: -p "http://127.0.0.1:1080"')
parser.add_option('--file', '-f', type='string', dest='file', action='store', parser.add_option('--file', '-f', type='string', dest='file', action='store',

View File

@ -78,7 +78,8 @@ def main():
if not options.is_show: if not options.is_show:
downloader = Downloader(path=options.output_dir, threads=options.threads, downloader = Downloader(path=options.output_dir, threads=options.threads,
timeout=options.timeout, delay=options.delay) timeout=options.timeout, delay=options.delay,
retry=options.retry, exit_on_fail=options.exit_on_fail)
for doujinshi_id in doujinshi_ids: for doujinshi_id in doujinshi_ids:
doujinshi_info = doujinshi_parser(doujinshi_id) doujinshi_info = doujinshi_parser(doujinshi_id)

View File

@ -34,11 +34,13 @@ def download_callback(result):
class Downloader(Singleton): class Downloader(Singleton):
def __init__(self, path='', threads=5, timeout=30, delay=0): def __init__(self, path='', threads=5, timeout=30, delay=0, retry=3, exit_on_fail=False):
self.threads = threads self.threads = threads
self.path = str(path) self.path = str(path)
self.timeout = timeout self.timeout = timeout
self.delay = delay self.delay = delay
self.retry = retry
self.exit_on_fail = exit_on_fail
self.folder = None self.folder = None
self.semaphore = None self.semaphore = None
@ -47,12 +49,14 @@ class Downloader(Singleton):
for completed_task in asyncio.as_completed(tasks): for completed_task in asyncio.as_completed(tasks):
try: try:
result = await completed_task result = await completed_task
if result[1]: if result[0] > 0:
logger.info(f'{result[1]} download completed') logger.info(f'{result[1]} download completed')
else: else:
logger.warning(f'{result[1]} download failed, return value {result[0]}') raise Exception(f'{result[1]} download failed, return value {result[0]}')
except Exception as e: except Exception as e:
logger.error(f'An error occurred: {e}') logger.error(f'An error occurred: {e}')
if self.exit_on_fail:
raise Exception('User intends to exit on fail')
async def _semaphore_download(self, *args, **kwargs): async def _semaphore_download(self, *args, **kwargs):
async with self.semaphore: async with self.semaphore:
@ -88,10 +92,10 @@ class Downloader(Singleton):
if not await self.save(filename, response): if not await self.save(filename, response):
logger.error(f'Can not download image {url}') logger.error(f'Can not download image {url}')
return 1, url return -1, url
except (httpx.HTTPStatusError, httpx.TimeoutException, httpx.ConnectError) as e: except (httpx.HTTPStatusError, httpx.TimeoutException, httpx.ConnectError) as e:
if retried < 3: if retried < self.retry:
logger.warning(f'Download {filename} failed, retrying({retried + 1}) times...') logger.warning(f'Download {filename} failed, retrying({retried + 1}) times...')
return await self.download( return await self.download(
url=url, url=url,
@ -101,12 +105,12 @@ class Downloader(Singleton):
proxy=proxy, proxy=proxy,
) )
else: else:
logger.warning(f'Download {filename} failed with 3 times retried, skipped') logger.warning(f'Download {filename} failed with {self.retry} times retried, skipped')
return 0, url return -2, url
except NHentaiImageNotExistException as e: except NHentaiImageNotExistException as e:
os.remove(save_file_path) os.remove(save_file_path)
return -1, url return -3, url
except Exception as e: except Exception as e:
import traceback import traceback
@ -114,10 +118,10 @@ class Downloader(Singleton):
logger.error(f"Exception type: {type(e)}") logger.error(f"Exception type: {type(e)}")
traceback.print_stack() traceback.print_stack()
logger.critical(str(e)) logger.critical(str(e))
return 0, url return -9, url
except KeyboardInterrupt: except KeyboardInterrupt:
return -3, url return -4, url
return 1, url return 1, url