Compare commits

..

5 Commits

Author SHA1 Message Date
0a47527461 optimize logger output #390 2025-02-09 20:15:17 +08:00
023c8969eb add global retry for search, download, fetch favorites 2025-02-09 20:02:52 +08:00
29c3abbe5c Merge branch 'master' of github.com:RicterZ/nhentai 2025-02-08 16:21:08 +08:00
057fae8a83 0.5.23 2025-02-03 15:47:51 +08:00
ba59dcf4db add up/down arrow 2025-01-16 22:40:53 +08:00
8 changed files with 47 additions and 27 deletions

View File

@ -1,3 +1,3 @@
__version__ = '0.5.22'
__version__ = '0.5.23'
__author__ = 'RicterZ'
__email__ = 'ricterzheng@gmail.com'

View File

@ -171,22 +171,25 @@ def cmd_parser():
# --- set config ---
if args.cookie is not None:
constant.CONFIG['cookie'] = args.cookie
constant.CONFIG['cookie'] = args.cookie.strip()
write_config()
logger.info('Cookie saved.')
sys.exit(0)
elif args.useragent is not None:
constant.CONFIG['useragent'] = args.useragent
if args.useragent is not None:
constant.CONFIG['useragent'] = args.useragent.strip()
write_config()
logger.info('User-Agent saved.')
sys.exit(0)
elif args.language is not None:
if args.language is not None:
constant.CONFIG['language'] = args.language
write_config()
logger.info(f'Default language now set to "{args.language}"')
sys.exit(0)
# TODO: search without language
if any([args.cookie, args.useragent, args.language]):
sys.exit(0)
# -- end set config
if args.proxy is not None:
proxy_url = urlparse(args.proxy)
if not args.proxy == '' and proxy_url.scheme not in ('http', 'https', 'socks5', 'socks5h',

View File

@ -4,8 +4,6 @@ import shutil
import sys
import signal
import platform
import urllib
import urllib3.exceptions
from nhentai import constant
@ -15,7 +13,6 @@ from nhentai.doujinshi import Doujinshi
from nhentai.downloader import Downloader
from nhentai.logger import logger
from nhentai.constant import BASE_URL
from nhentai.serializer import serialize_json
from nhentai.utils import generate_html, generate_doc, generate_main_html, generate_metadata_file, \
paging, check_cookie, signal_handler, DB, move_to_folder
@ -52,6 +49,9 @@ def main():
page_list = paging(options.page)
if options.retry:
constant.RETRY_TIMES = int(options.retry)
if options.favorites:
if not options.is_download:
logger.warning('You do not specify --download option')
@ -87,7 +87,7 @@ def main():
if not options.is_show:
downloader = Downloader(path=options.output_dir, threads=options.threads,
timeout=options.timeout, delay=options.delay,
retry=options.retry, exit_on_fail=options.exit_on_fail,
exit_on_fail=options.exit_on_fail,
no_filename_padding=options.no_filename_padding)
for doujinshi_id in doujinshi_ids:

View File

@ -37,6 +37,8 @@ FAV_URL = f'{BASE_URL}/favorites/'
PATH_SEPARATOR = os.path.sep
RETRY_TIMES = 3
IMAGE_URL = f'{urlparse(BASE_URL).scheme}://i1.{urlparse(BASE_URL).hostname}/galleries'
IMAGE_URL_MIRRORS = [

View File

@ -34,13 +34,12 @@ def download_callback(result):
class Downloader(Singleton):
def __init__(self, path='', threads=5, timeout=30, delay=0, retry=3, exit_on_fail=False,
def __init__(self, path='', threads=5, timeout=30, delay=0, exit_on_fail=False,
no_filename_padding=False):
self.threads = threads
self.path = str(path)
self.timeout = timeout
self.delay = delay
self.retry = retry
self.exit_on_fail = exit_on_fail
self.folder = None
self.semaphore = None
@ -101,7 +100,7 @@ class Downloader(Singleton):
return -1, url
except (httpx.HTTPStatusError, httpx.TimeoutException, httpx.ConnectError) as e:
if retried < self.retry:
if retried < constant.RETRY_TIMES:
logger.warning(f'Download {filename} failed, retrying({retried + 1}) times...')
return await self.download(
url=url,
@ -111,7 +110,7 @@ class Downloader(Singleton):
proxy=proxy,
)
else:
logger.warning(f'Download {filename} failed with {self.retry} times retried, skipped')
logger.warning(f'Download {filename} failed with {constant.RETRY_TIMES} times retried, skipped')
return -2, url
except NHentaiImageNotExistException as e:

View File

@ -92,13 +92,27 @@ def favorites_parser(page=None):
page_range_list = range(1, pages + 1)
for page in page_range_list:
try:
logger.info(f'Getting doujinshi ids of page {page}')
resp = request('get', f'{constant.FAV_URL}?page={page}').content
logger.info(f'Getting doujinshi ids of page {page}')
result.extend(_get_title_and_id(resp))
except Exception as e:
logger.error(f'Error: {e}, continue')
i = 0
while i <= constant.RETRY_TIMES + 1:
i += 1
if i > 3:
logger.error(f'Failed to get favorites at page {page} after 3 times retried, skipped')
break
try:
resp = request('get', f'{constant.FAV_URL}?page={page}').content
temp_result = _get_title_and_id(resp)
if not temp_result:
logger.warning(f'Failed to get favorites at page {page}, retrying ({i} times) ...')
continue
else:
result.extend(temp_result)
break
except Exception as e:
logger.warning(f'Error: {e}, retrying ({i} times) ...')
return result
@ -261,7 +275,7 @@ def search_parser(keyword, sorting, page, is_page_all=False):
i = 0
logger.info(f'Searching doujinshis using keywords "{keyword}" on page {p}{total}')
while i < 3:
while i < constant.RETRY_TIMES:
try:
url = request('get', url=constant.SEARCH_URL, params={'query': keyword,
'page': p, 'sort': sorting}).url

View File

@ -49,8 +49,8 @@ document.onkeypress = event => {
switch (event.key.toLowerCase()) {
// Previous Image
case 'w':
scrollBy(0, -40);
break;
scrollBy(0, -40);
break;
case 'a':
changePage(currentPage - 1);
break;
@ -61,7 +61,7 @@ document.onkeypress = event => {
// Next Image
case ' ':
case 's':
scrollBy(0, 40);
scrollBy(0, 40);
break;
case 'd':
changePage(currentPage + 1);
@ -75,11 +75,13 @@ document.onkeydown = event =>{
changePage(currentPage - 1);
break;
case 38: //up
changePage(currentPage - 1);
break;
case 39: //right
changePage(currentPage + 1);
break;
case 40: //down
changePage(currentPage + 1);
break;
}
};

View File

@ -1,6 +1,6 @@
[tool.poetry]
name = "nhentai"
version = "0.5.22"
version = "0.5.23"
description = "nhentai doujinshi downloader"
authors = ["Ricter Z <ricterzheng@gmail.com>"]
license = "MIT"