diff --git a/nhentai/cmdline.py b/nhentai/cmdline.py index 0b5a421..e07467f 100644 --- a/nhentai/cmdline.py +++ b/nhentai/cmdline.py @@ -71,9 +71,9 @@ def cmd_parser(): help='all search results') parser.add_option('--page', '--page-range', type='string', dest='page', action='store', default='', help='page number of search results. e.g. 1,2-5,14') - parser.add_option('--sorting', dest='sorting', action='store', default='recent', + parser.add_option('--sorting', dest='sorting', action='store', default='date', help='sorting of doujinshi (recent / popular / popular-[today|week])', - choices=['recent', 'popular', 'popular-today', 'popular-week']) + choices=['recent', 'popular', 'popular-today', 'popular-week', 'date']) # download options parser.add_option('--output', '-o', type='string', dest='output_dir', action='store', default='./', @@ -112,7 +112,7 @@ def cmd_parser(): # nhentai options parser.add_option('--cookie', type='str', dest='cookie', action='store', help='set cookie of nhentai to bypass Cloudflare captcha') - parser.add_option('--useragent', type='str', dest='useragent', action='store', + parser.add_option('--useragent', '--user-agent', type='str', dest='useragent', action='store', help='set useragent to bypass Cloudflare captcha') parser.add_option('--language', type='str', dest='language', action='store', help='set default language to parse doujinshis') @@ -158,7 +158,7 @@ def cmd_parser(): elif args.useragent is not None: constant.CONFIG['useragent'] = args.useragent write_config() - logger.info('Useragent saved.') + logger.info('User-Agent saved.') exit(0) elif args.language is not None: constant.CONFIG['language'] = args.language diff --git a/nhentai/parser.py b/nhentai/parser.py index 4562d3e..2e7a29a 100644 --- a/nhentai/parser.py +++ b/nhentai/parser.py @@ -200,6 +200,7 @@ def print_doujinshi(doujinshi_list): def search_parser(keyword, sorting, page, is_page_all=False): # keyword = '+'.join([i.strip().replace(' ', '-').lower() for i in keyword.split(',')]) result = [] + response = None if not page: page = [1] @@ -217,13 +218,14 @@ def search_parser(keyword, sorting, page, is_page_all=False): try: url = request('get', url=constant.SEARCH_URL, params={'query': keyword, 'page': p, 'sort': sorting}).url + print(url) response = request('get', url.replace('%2B', '+')).json() except Exception as e: logger.critical(str(e)) - + response = None break - if 'result' not in response: + if response is None or 'result' not in response: logger.warning('No result in response in page {}'.format(p)) break