diff --git a/README.rst b/README.rst index 3e45bc5..ad4c388 100644 --- a/README.rst +++ b/README.rst @@ -185,6 +185,8 @@ Other options: timeout for downloading doujinshi -d DELAY, --delay=DELAY slow down between downloading every doujinshi + --retry=RETRY retry times when downloading failed + --exit-on-fail exit on fail to prevent generating incomplete files --proxy=PROXY store a proxy, for example: -p "http://127.0.0.1:1080" -f FILE, --file=FILE read gallery IDs from file. --format=NAME_FORMAT format the saved folder name diff --git a/nhentai/cmdline.py b/nhentai/cmdline.py index d5f931c..1f7e096 100644 --- a/nhentai/cmdline.py +++ b/nhentai/cmdline.py @@ -97,6 +97,10 @@ def cmd_parser(): help='timeout for downloading doujinshi') parser.add_option('--delay', '-d', type='int', dest='delay', action='store', default=0, help='slow down between downloading every doujinshi') + parser.add_option('--retry', type='int', dest='retry', action='store', default=3, + help='retry times when downloading failed') + parser.add_option('--exit-on-fail', dest='exit_on_fail', action='store_true', default=False, + help='exit on fail to prevent generating incomplete files') parser.add_option('--proxy', type='string', dest='proxy', action='store', help='store a proxy, for example: -p "http://127.0.0.1:1080"') parser.add_option('--file', '-f', type='string', dest='file', action='store', diff --git a/nhentai/command.py b/nhentai/command.py index 4032a16..6b8a119 100644 --- a/nhentai/command.py +++ b/nhentai/command.py @@ -78,7 +78,8 @@ def main(): if not options.is_show: downloader = Downloader(path=options.output_dir, threads=options.threads, - timeout=options.timeout, delay=options.delay) + timeout=options.timeout, delay=options.delay, + retry=options.retry, exit_on_fail=options.exit_on_fail) for doujinshi_id in doujinshi_ids: doujinshi_info = doujinshi_parser(doujinshi_id) diff --git a/nhentai/downloader.py b/nhentai/downloader.py index d72fb0b..b17619a 100644 --- a/nhentai/downloader.py +++ b/nhentai/downloader.py @@ -34,11 +34,13 @@ def download_callback(result): class Downloader(Singleton): - def __init__(self, path='', threads=5, timeout=30, delay=0): + def __init__(self, path='', threads=5, timeout=30, delay=0, retry=3, exit_on_fail=False): self.threads = threads self.path = str(path) self.timeout = timeout self.delay = delay + self.retry = retry + self.exit_on_fail = exit_on_fail self.folder = None self.semaphore = None @@ -47,12 +49,14 @@ class Downloader(Singleton): for completed_task in asyncio.as_completed(tasks): try: result = await completed_task - if result[1]: + if result[0] > 0: logger.info(f'{result[1]} download completed') else: - logger.warning(f'{result[1]} download failed, return value {result[0]}') + raise Exception(f'{result[1]} download failed, return value {result[0]}') except Exception as e: logger.error(f'An error occurred: {e}') + if self.exit_on_fail: + raise Exception('User intends to exit on fail') async def _semaphore_download(self, *args, **kwargs): async with self.semaphore: @@ -88,10 +92,10 @@ class Downloader(Singleton): if not await self.save(filename, response): logger.error(f'Can not download image {url}') - return 1, url + return -1, url except (httpx.HTTPStatusError, httpx.TimeoutException, httpx.ConnectError) as e: - if retried < 3: + if retried < self.retry: logger.warning(f'Download {filename} failed, retrying({retried + 1}) times...') return await self.download( url=url, @@ -101,12 +105,12 @@ class Downloader(Singleton): proxy=proxy, ) else: - logger.warning(f'Download {filename} failed with 3 times retried, skipped') - return 0, url + logger.warning(f'Download {filename} failed with {self.retry} times retried, skipped') + return -2, url except NHentaiImageNotExistException as e: os.remove(save_file_path) - return -1, url + return -3, url except Exception as e: import traceback @@ -114,10 +118,10 @@ class Downloader(Singleton): logger.error(f"Exception type: {type(e)}") traceback.print_stack() logger.critical(str(e)) - return 0, url + return -9, url except KeyboardInterrupt: - return -3, url + return -4, url return 1, url