optimize logger output #390

This commit is contained in:
ricterz 2025-02-09 20:15:17 +08:00
parent 023c8969eb
commit 0a47527461
3 changed files with 15 additions and 15 deletions

View File

@ -171,22 +171,25 @@ def cmd_parser():
# --- set config ---
if args.cookie is not None:
constant.CONFIG['cookie'] = args.cookie
constant.CONFIG['cookie'] = args.cookie.strip()
write_config()
logger.info('Cookie saved.')
sys.exit(0)
elif args.useragent is not None:
constant.CONFIG['useragent'] = args.useragent
if args.useragent is not None:
constant.CONFIG['useragent'] = args.useragent.strip()
write_config()
logger.info('User-Agent saved.')
sys.exit(0)
elif args.language is not None:
if args.language is not None:
constant.CONFIG['language'] = args.language
write_config()
logger.info(f'Default language now set to "{args.language}"')
sys.exit(0)
# TODO: search without language
if any([args.cookie, args.useragent, args.language]):
sys.exit(0)
# -- end set config
if args.proxy is not None:
proxy_url = urlparse(args.proxy)
if not args.proxy == '' and proxy_url.scheme not in ('http', 'https', 'socks5', 'socks5h',

View File

@ -4,10 +4,7 @@ import shutil
import sys
import signal
import platform
import urllib
import urllib3.exceptions
from requests import options
from nhentai import constant
from nhentai.cmdline import cmd_parser, banner, write_config
@ -16,7 +13,6 @@ from nhentai.doujinshi import Doujinshi
from nhentai.downloader import Downloader
from nhentai.logger import logger
from nhentai.constant import BASE_URL
from nhentai.serializer import serialize_json
from nhentai.utils import generate_html, generate_doc, generate_main_html, generate_metadata_file, \
paging, check_cookie, signal_handler, DB, move_to_folder

View File

@ -95,8 +95,9 @@ def favorites_parser(page=None):
logger.info(f'Getting doujinshi ids of page {page}')
i = 0
while i < constant.RETRY_TIMES:
if i == 2:
while i <= constant.RETRY_TIMES + 1:
i += 1
if i > 3:
logger.error(f'Failed to get favorites at page {page} after 3 times retried, skipped')
break
@ -104,14 +105,14 @@ def favorites_parser(page=None):
resp = request('get', f'{constant.FAV_URL}?page={page}').content
temp_result = _get_title_and_id(resp)
if not temp_result:
i += 1
logger.warning(f'Failed to get favorites at page {page}, retrying ({i} times) ...')
continue
else:
result.extend(temp_result)
break
except Exception as e:
logger.warning(f'Error: {e}, retrying ({i} times)...')
logger.warning(f'Error: {e}, retrying ({i} times) ...')
return result