mirror of
https://github.com/RicterZ/nhentai.git
synced 2025-04-19 10:21:19 +02:00
add tests
This commit is contained in:
parent
c06f3225a3
commit
c7c3572811
@ -3,16 +3,13 @@
|
|||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
import json
|
import json
|
||||||
|
import nhentai.constant as constant
|
||||||
|
|
||||||
|
from urllib.parse import urlparse
|
||||||
from optparse import OptionParser
|
from optparse import OptionParser
|
||||||
|
|
||||||
try:
|
|
||||||
from itertools import ifilter as filter
|
|
||||||
except ImportError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
import nhentai.constant as constant
|
|
||||||
from nhentai import __version__
|
from nhentai import __version__
|
||||||
from nhentai.utils import urlparse, generate_html, generate_main_html, DB
|
from nhentai.utils import generate_html, generate_main_html, DB
|
||||||
from nhentai.logger import logger
|
from nhentai.logger import logger
|
||||||
|
|
||||||
|
|
||||||
@ -142,35 +139,35 @@ def cmd_parser():
|
|||||||
|
|
||||||
if args.html_viewer:
|
if args.html_viewer:
|
||||||
generate_html(template=constant.CONFIG['template'])
|
generate_html(template=constant.CONFIG['template'])
|
||||||
exit(0)
|
sys.exit(0)
|
||||||
|
|
||||||
if args.main_viewer and not args.id and not args.keyword and not args.favorites:
|
if args.main_viewer and not args.id and not args.keyword and not args.favorites:
|
||||||
generate_main_html()
|
generate_main_html()
|
||||||
exit(0)
|
sys.exit(0)
|
||||||
|
|
||||||
if args.clean_download_history:
|
if args.clean_download_history:
|
||||||
with DB() as db:
|
with DB() as db:
|
||||||
db.clean_all()
|
db.clean_all()
|
||||||
|
|
||||||
logger.info('Download history cleaned.')
|
logger.info('Download history cleaned.')
|
||||||
exit(0)
|
sys.exit(0)
|
||||||
|
|
||||||
# --- set config ---
|
# --- set config ---
|
||||||
if args.cookie is not None:
|
if args.cookie is not None:
|
||||||
constant.CONFIG['cookie'] = args.cookie
|
constant.CONFIG['cookie'] = args.cookie
|
||||||
write_config()
|
write_config()
|
||||||
logger.info('Cookie saved.')
|
logger.info('Cookie saved.')
|
||||||
exit(0)
|
sys.exit(0)
|
||||||
elif args.useragent is not None:
|
elif args.useragent is not None:
|
||||||
constant.CONFIG['useragent'] = args.useragent
|
constant.CONFIG['useragent'] = args.useragent
|
||||||
write_config()
|
write_config()
|
||||||
logger.info('User-Agent saved.')
|
logger.info('User-Agent saved.')
|
||||||
exit(0)
|
sys.exit(0)
|
||||||
elif args.language is not None:
|
elif args.language is not None:
|
||||||
constant.CONFIG['language'] = args.language
|
constant.CONFIG['language'] = args.language
|
||||||
write_config()
|
write_config()
|
||||||
logger.info(f'Default language now set to "{args.language}"')
|
logger.info(f'Default language now set to "{args.language}"')
|
||||||
exit(0)
|
sys.exit(0)
|
||||||
# TODO: search without language
|
# TODO: search without language
|
||||||
|
|
||||||
if args.proxy is not None:
|
if args.proxy is not None:
|
||||||
@ -178,7 +175,7 @@ def cmd_parser():
|
|||||||
if not args.proxy == '' and proxy_url.scheme not in ('http', 'https', 'socks5', 'socks5h',
|
if not args.proxy == '' and proxy_url.scheme not in ('http', 'https', 'socks5', 'socks5h',
|
||||||
'socks4', 'socks4a'):
|
'socks4', 'socks4a'):
|
||||||
logger.error(f'Invalid protocol "{proxy_url.scheme}" of proxy, ignored')
|
logger.error(f'Invalid protocol "{proxy_url.scheme}" of proxy, ignored')
|
||||||
exit(0)
|
sys.exit(0)
|
||||||
else:
|
else:
|
||||||
constant.CONFIG['proxy'] = {
|
constant.CONFIG['proxy'] = {
|
||||||
'http': args.proxy,
|
'http': args.proxy,
|
||||||
@ -186,7 +183,7 @@ def cmd_parser():
|
|||||||
}
|
}
|
||||||
logger.info(f'Proxy now set to "{args.proxy}"')
|
logger.info(f'Proxy now set to "{args.proxy}"')
|
||||||
write_config()
|
write_config()
|
||||||
exit(0)
|
sys.exit(0)
|
||||||
|
|
||||||
if args.viewer_template is not None:
|
if args.viewer_template is not None:
|
||||||
if not args.viewer_template:
|
if not args.viewer_template:
|
||||||
@ -195,7 +192,7 @@ def cmd_parser():
|
|||||||
if not os.path.exists(os.path.join(os.path.dirname(__file__),
|
if not os.path.exists(os.path.join(os.path.dirname(__file__),
|
||||||
f'viewer/{args.viewer_template}/index.html')):
|
f'viewer/{args.viewer_template}/index.html')):
|
||||||
logger.error(f'Template "{args.viewer_template}" does not exists')
|
logger.error(f'Template "{args.viewer_template}" does not exists')
|
||||||
exit(1)
|
sys.exit(1)
|
||||||
else:
|
else:
|
||||||
constant.CONFIG['template'] = args.viewer_template
|
constant.CONFIG['template'] = args.viewer_template
|
||||||
write_config()
|
write_config()
|
||||||
@ -205,7 +202,7 @@ def cmd_parser():
|
|||||||
if args.favorites:
|
if args.favorites:
|
||||||
if not constant.CONFIG['cookie']:
|
if not constant.CONFIG['cookie']:
|
||||||
logger.warning('Cookie has not been set, please use `nhentai --cookie \'COOKIE\'` to set it.')
|
logger.warning('Cookie has not been set, please use `nhentai --cookie \'COOKIE\'` to set it.')
|
||||||
exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
if args.file:
|
if args.file:
|
||||||
with open(args.file, 'r') as f:
|
with open(args.file, 'r') as f:
|
||||||
@ -215,21 +212,21 @@ def cmd_parser():
|
|||||||
if (args.is_download or args.is_show) and not args.id and not args.keyword and not args.favorites:
|
if (args.is_download or args.is_show) and not args.id and not args.keyword and not args.favorites:
|
||||||
logger.critical('Doujinshi id(s) are required for downloading')
|
logger.critical('Doujinshi id(s) are required for downloading')
|
||||||
parser.print_help()
|
parser.print_help()
|
||||||
exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
if not args.keyword and not args.id and not args.favorites:
|
if not args.keyword and not args.id and not args.favorites:
|
||||||
parser.print_help()
|
parser.print_help()
|
||||||
exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
if args.threads <= 0:
|
if args.threads <= 0:
|
||||||
args.threads = 1
|
args.threads = 1
|
||||||
|
|
||||||
elif args.threads > 15:
|
elif args.threads > 15:
|
||||||
logger.critical('Maximum number of used threads is 15')
|
logger.critical('Maximum number of used threads is 15')
|
||||||
exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
if args.dryrun and (args.is_cbz or args.is_pdf):
|
if args.dryrun and (args.is_cbz or args.is_pdf):
|
||||||
logger.critical('Cannot generate PDF or CBZ during dry-run')
|
logger.critical('Cannot generate PDF or CBZ during dry-run')
|
||||||
exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
return args
|
return args
|
||||||
|
@ -20,7 +20,7 @@ def main():
|
|||||||
|
|
||||||
if sys.version_info < (3, 0, 0):
|
if sys.version_info < (3, 0, 0):
|
||||||
logger.error('nhentai now only support Python 3.x')
|
logger.error('nhentai now only support Python 3.x')
|
||||||
exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
options = cmd_parser()
|
options = cmd_parser()
|
||||||
logger.info(f'Using mirror: {BASE_URL}')
|
logger.info(f'Using mirror: {BASE_URL}')
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
# coding: utf-8
|
# coding: utf-8
|
||||||
|
import sys
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import time
|
import time
|
||||||
@ -41,11 +41,11 @@ def login(username, password):
|
|||||||
|
|
||||||
if 'Invalid username/email or password' in resp.text:
|
if 'Invalid username/email or password' in resp.text:
|
||||||
logger.error('Login failed, please check your username and password')
|
logger.error('Login failed, please check your username and password')
|
||||||
exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
if 'You\'re loading pages way too quickly.' in resp.text or 'Really, slow down' in resp.text:
|
if 'You\'re loading pages way too quickly.' in resp.text or 'Really, slow down' in resp.text:
|
||||||
logger.error('Using nhentai --cookie \'YOUR_COOKIE_HERE\' to save your Cookie.')
|
logger.error('Using nhentai --cookie \'YOUR_COOKIE_HERE\' to save your Cookie.')
|
||||||
exit(2)
|
sys.exit(2)
|
||||||
|
|
||||||
|
|
||||||
def _get_title_and_id(response):
|
def _get_title_and_id(response):
|
||||||
@ -151,7 +151,7 @@ def doujinshi_parser(id_):
|
|||||||
|
|
||||||
if not img_id:
|
if not img_id:
|
||||||
logger.critical('Tried yo get image id failed')
|
logger.critical('Tried yo get image id failed')
|
||||||
exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
doujinshi['img_id'] = img_id.group(1)
|
doujinshi['img_id'] = img_id.group(1)
|
||||||
doujinshi['ext'] = ext
|
doujinshi['ext'] = ext
|
||||||
@ -178,6 +178,62 @@ def doujinshi_parser(id_):
|
|||||||
return doujinshi
|
return doujinshi
|
||||||
|
|
||||||
|
|
||||||
|
def legacy_doujinshi_parser(id_):
|
||||||
|
if not isinstance(id_, (int,)) and (isinstance(id_, (str,)) and not id_.isdigit()):
|
||||||
|
raise Exception(f'Doujinshi id({id_}) is not valid')
|
||||||
|
|
||||||
|
id_ = int(id_)
|
||||||
|
logger.info(f'Fetching information of doujinshi id {id_}')
|
||||||
|
doujinshi = dict()
|
||||||
|
doujinshi['id'] = id_
|
||||||
|
url = f'{constant.DETAIL_URL}/{id_}'
|
||||||
|
i = 0
|
||||||
|
while 5 > i:
|
||||||
|
try:
|
||||||
|
response = request('get', url).json()
|
||||||
|
except Exception as e:
|
||||||
|
i += 1
|
||||||
|
if not i < 5:
|
||||||
|
logger.critical(str(e))
|
||||||
|
sys.exit(1)
|
||||||
|
continue
|
||||||
|
break
|
||||||
|
|
||||||
|
doujinshi['name'] = response['title']['english']
|
||||||
|
doujinshi['subtitle'] = response['title']['japanese']
|
||||||
|
doujinshi['img_id'] = response['media_id']
|
||||||
|
doujinshi['ext'] = ''.join([i['t'] for i in response['images']['pages']])
|
||||||
|
doujinshi['pages'] = len(response['images']['pages'])
|
||||||
|
|
||||||
|
# gain information of the doujinshi
|
||||||
|
needed_fields = ['character', 'artist', 'language', 'tag', 'parody', 'group', 'category']
|
||||||
|
for tag in response['tags']:
|
||||||
|
tag_type = tag['type']
|
||||||
|
if tag_type in needed_fields:
|
||||||
|
if tag_type == 'tag':
|
||||||
|
if tag_type not in doujinshi:
|
||||||
|
doujinshi[tag_type] = {}
|
||||||
|
|
||||||
|
tag['name'] = tag['name'].replace(' ', '-')
|
||||||
|
tag['name'] = tag['name'].lower()
|
||||||
|
doujinshi[tag_type][tag['name']] = tag['id']
|
||||||
|
elif tag_type not in doujinshi:
|
||||||
|
doujinshi[tag_type] = tag['name']
|
||||||
|
else:
|
||||||
|
doujinshi[tag_type] += ', ' + tag['name']
|
||||||
|
|
||||||
|
return doujinshi
|
||||||
|
|
||||||
|
|
||||||
|
def print_doujinshi(doujinshi_list):
|
||||||
|
if not doujinshi_list:
|
||||||
|
return
|
||||||
|
doujinshi_list = [(i['id'], i['title']) for i in doujinshi_list]
|
||||||
|
headers = ['id', 'doujinshi']
|
||||||
|
logger.info(f'Search Result || Found {doujinshi_list.__len__()} doujinshis')
|
||||||
|
print(tabulate(tabular_data=doujinshi_list, headers=headers, tablefmt='rst'))
|
||||||
|
|
||||||
|
|
||||||
def legacy_search_parser(keyword, sorting, page, is_page_all=False):
|
def legacy_search_parser(keyword, sorting, page, is_page_all=False):
|
||||||
logger.debug(f'Searching doujinshis of keyword {keyword}')
|
logger.debug(f'Searching doujinshis of keyword {keyword}')
|
||||||
|
|
||||||
@ -214,15 +270,6 @@ def legacy_search_parser(keyword, sorting, page, is_page_all=False):
|
|||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
||||||
def print_doujinshi(doujinshi_list):
|
|
||||||
if not doujinshi_list:
|
|
||||||
return
|
|
||||||
doujinshi_list = [(i['id'], i['title']) for i in doujinshi_list]
|
|
||||||
headers = ['id', 'doujinshi']
|
|
||||||
logger.info(f'Search Result || Found {doujinshi_list.__len__()} doujinshis')
|
|
||||||
print(tabulate(tabular_data=doujinshi_list, headers=headers, tablefmt='rst'))
|
|
||||||
|
|
||||||
|
|
||||||
def search_parser(keyword, sorting, page, is_page_all=False):
|
def search_parser(keyword, sorting, page, is_page_all=False):
|
||||||
result = []
|
result = []
|
||||||
response = None
|
response = None
|
||||||
@ -268,52 +315,5 @@ def search_parser(keyword, sorting, page, is_page_all=False):
|
|||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
||||||
def __api_suspended_doujinshi_parser(id_):
|
|
||||||
if not isinstance(id_, (int,)) and (isinstance(id_, (str,)) and not id_.isdigit()):
|
|
||||||
raise Exception(f'Doujinshi id({id_}) is not valid')
|
|
||||||
|
|
||||||
id_ = int(id_)
|
|
||||||
logger.info(f'Fetching information of doujinshi id {id_}')
|
|
||||||
doujinshi = dict()
|
|
||||||
doujinshi['id'] = id_
|
|
||||||
url = f'{constant.DETAIL_URL}/{id_}'
|
|
||||||
i = 0
|
|
||||||
while 5 > i:
|
|
||||||
try:
|
|
||||||
response = request('get', url).json()
|
|
||||||
except Exception as e:
|
|
||||||
i += 1
|
|
||||||
if not i < 5:
|
|
||||||
logger.critical(str(e))
|
|
||||||
exit(1)
|
|
||||||
continue
|
|
||||||
break
|
|
||||||
|
|
||||||
doujinshi['name'] = response['title']['english']
|
|
||||||
doujinshi['subtitle'] = response['title']['japanese']
|
|
||||||
doujinshi['img_id'] = response['media_id']
|
|
||||||
doujinshi['ext'] = ''.join([i['t'] for i in response['images']['pages']])
|
|
||||||
doujinshi['pages'] = len(response['images']['pages'])
|
|
||||||
|
|
||||||
# gain information of the doujinshi
|
|
||||||
needed_fields = ['character', 'artist', 'language', 'tag', 'parody', 'group', 'category']
|
|
||||||
for tag in response['tags']:
|
|
||||||
tag_type = tag['type']
|
|
||||||
if tag_type in needed_fields:
|
|
||||||
if tag_type == 'tag':
|
|
||||||
if tag_type not in doujinshi:
|
|
||||||
doujinshi[tag_type] = {}
|
|
||||||
|
|
||||||
tag['name'] = tag['name'].replace(' ', '-')
|
|
||||||
tag['name'] = tag['name'].lower()
|
|
||||||
doujinshi[tag_type][tag['name']] = tag['id']
|
|
||||||
elif tag_type not in doujinshi:
|
|
||||||
doujinshi[tag_type] = tag['name']
|
|
||||||
else:
|
|
||||||
doujinshi[tag_type] += ', ' + tag['name']
|
|
||||||
|
|
||||||
return doujinshi
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
print(doujinshi_parser("32271"))
|
print(doujinshi_parser("32271"))
|
||||||
|
@ -32,9 +32,9 @@ def request(method, url, **kwargs):
|
|||||||
|
|
||||||
def check_cookie():
|
def check_cookie():
|
||||||
response = request('get', constant.BASE_URL)
|
response = request('get', constant.BASE_URL)
|
||||||
if response.status_code == 503 and 'cf-browser-verification' in response.text:
|
if response.status_code == 403 and 'Just a moment...' in response.text:
|
||||||
logger.error('Blocked by Cloudflare captcha, please set your cookie and useragent')
|
logger.error('Blocked by Cloudflare captcha, please set your cookie and useragent')
|
||||||
exit(-1)
|
sys.exit(1)
|
||||||
|
|
||||||
username = re.findall('"/users/[0-9]+/(.*?)"', response.text)
|
username = re.findall('"/users/[0-9]+/(.*?)"', response.text)
|
||||||
if not username:
|
if not username:
|
||||||
@ -57,15 +57,6 @@ class Singleton(_Singleton(str('SingletonMeta'), (object,), {})):
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
def urlparse(url):
|
|
||||||
try:
|
|
||||||
from urlparse import urlparse
|
|
||||||
except ImportError:
|
|
||||||
from urllib.parse import urlparse
|
|
||||||
|
|
||||||
return urlparse(url)
|
|
||||||
|
|
||||||
|
|
||||||
def readfile(path):
|
def readfile(path):
|
||||||
loc = os.path.dirname(__file__)
|
loc = os.path.dirname(__file__)
|
||||||
|
|
||||||
@ -108,12 +99,8 @@ def generate_html(output_dir='.', doujinshi_obj=None, template='default'):
|
|||||||
|
|
||||||
data = html.format(TITLE=name, IMAGES=image_html, SCRIPTS=js, STYLES=css)
|
data = html.format(TITLE=name, IMAGES=image_html, SCRIPTS=js, STYLES=css)
|
||||||
try:
|
try:
|
||||||
if sys.version_info < (3, 0):
|
with open(os.path.join(doujinshi_dir, 'index.html'), 'wb') as f:
|
||||||
with open(os.path.join(doujinshi_dir, 'index.html'), 'w') as f:
|
f.write(data.encode('utf-8'))
|
||||||
f.write(data)
|
|
||||||
else:
|
|
||||||
with open(os.path.join(doujinshi_dir, 'index.html'), 'wb') as f:
|
|
||||||
f.write(data.encode('utf-8'))
|
|
||||||
|
|
||||||
logger.log(16, f'HTML Viewer has been written to "{os.path.join(doujinshi_dir, "index.html")}"')
|
logger.log(16, f'HTML Viewer has been written to "{os.path.join(doujinshi_dir, "index.html")}"')
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
@ -167,12 +154,8 @@ def generate_main_html(output_dir='./'):
|
|||||||
return
|
return
|
||||||
try:
|
try:
|
||||||
data = main.format(STYLES=css, SCRIPTS=js, PICTURE=image_html)
|
data = main.format(STYLES=css, SCRIPTS=js, PICTURE=image_html)
|
||||||
if sys.version_info < (3, 0):
|
with open('./main.html', 'wb') as f:
|
||||||
with open('./main.html', 'w') as f:
|
f.write(data.encode('utf-8'))
|
||||||
f.write(data)
|
|
||||||
else:
|
|
||||||
with open('./main.html', 'wb') as f:
|
|
||||||
f.write(data.encode('utf-8'))
|
|
||||||
shutil.copy(os.path.dirname(__file__) + '/viewer/logo.png', './')
|
shutil.copy(os.path.dirname(__file__) + '/viewer/logo.png', './')
|
||||||
set_js_database()
|
set_js_database()
|
||||||
logger.log(16, f'Main Viewer has been written to "{output_dir}main.html"')
|
logger.log(16, f'Main Viewer has been written to "{output_dir}main.html"')
|
||||||
@ -269,7 +252,7 @@ def format_filename(s, length=MAX_FIELD_LENGTH, _truncate_only=False):
|
|||||||
|
|
||||||
def signal_handler(signal, frame):
|
def signal_handler(signal, frame):
|
||||||
logger.error('Ctrl-C signal received. Stopping...')
|
logger.error('Ctrl-C signal received. Stopping...')
|
||||||
exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
|
|
||||||
def paging(page_string):
|
def paging(page_string):
|
||||||
|
0
tests/__init__.py
Normal file
0
tests/__init__.py
Normal file
36
tests/test_download.py
Normal file
36
tests/test_download.py
Normal file
@ -0,0 +1,36 @@
|
|||||||
|
import unittest
|
||||||
|
import os
|
||||||
|
import urllib3.exceptions
|
||||||
|
|
||||||
|
from nhentai import constant
|
||||||
|
from nhentai.cmdline import load_config
|
||||||
|
from nhentai.downloader import Downloader
|
||||||
|
from nhentai.parser import doujinshi_parser
|
||||||
|
from nhentai.doujinshi import Doujinshi
|
||||||
|
from nhentai.utils import generate_html, generate_cbz
|
||||||
|
|
||||||
|
|
||||||
|
class TestDownload(unittest.TestCase):
|
||||||
|
def setUp(self) -> None:
|
||||||
|
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
|
||||||
|
load_config()
|
||||||
|
constant.CONFIG['cookie'] = os.getenv('NHENTAI_COOKIE')
|
||||||
|
constant.CONFIG['useragent'] = os.getenv('NHENTAI_UA')
|
||||||
|
|
||||||
|
def test_download(self):
|
||||||
|
did = 440546
|
||||||
|
info = Doujinshi(**doujinshi_parser(did), name_format='%i')
|
||||||
|
info.downloader = Downloader(path='/tmp', size=5)
|
||||||
|
info.download()
|
||||||
|
|
||||||
|
self.assertTrue(os.path.exists(f'/tmp/{did}/001.jpg'))
|
||||||
|
|
||||||
|
generate_html('/tmp', info)
|
||||||
|
self.assertTrue(os.path.exists(f'/tmp/{did}/index.html'))
|
||||||
|
|
||||||
|
generate_cbz('/tmp', info)
|
||||||
|
self.assertTrue(os.path.exists(f'/tmp/{did}.cbz'))
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
unittest.main()
|
26
tests/test_login.py
Normal file
26
tests/test_login.py
Normal file
@ -0,0 +1,26 @@
|
|||||||
|
import os
|
||||||
|
import unittest
|
||||||
|
import urllib3.exceptions
|
||||||
|
|
||||||
|
from nhentai import constant
|
||||||
|
from nhentai.cmdline import load_config
|
||||||
|
from nhentai.utils import check_cookie
|
||||||
|
|
||||||
|
|
||||||
|
class TestLogin(unittest.TestCase):
|
||||||
|
def setUp(self) -> None:
|
||||||
|
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
|
||||||
|
load_config()
|
||||||
|
constant.CONFIG['cookie'] = os.getenv('NHENTAI_COOKIE')
|
||||||
|
constant.CONFIG['useragent'] = os.getenv('NHENTAI_UA')
|
||||||
|
|
||||||
|
def test_cookie(self):
|
||||||
|
try:
|
||||||
|
check_cookie()
|
||||||
|
self.assertTrue(True)
|
||||||
|
except Exception as e:
|
||||||
|
self.assertIsNone(e)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
unittest.main()
|
27
tests/test_parser.py
Normal file
27
tests/test_parser.py
Normal file
@ -0,0 +1,27 @@
|
|||||||
|
import unittest
|
||||||
|
import os
|
||||||
|
import urllib3.exceptions
|
||||||
|
|
||||||
|
from nhentai import constant
|
||||||
|
from nhentai.cmdline import load_config
|
||||||
|
from nhentai.parser import search_parser, doujinshi_parser, favorites_parser
|
||||||
|
|
||||||
|
|
||||||
|
class TestParser(unittest.TestCase):
|
||||||
|
def setUp(self) -> None:
|
||||||
|
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
|
||||||
|
load_config()
|
||||||
|
constant.CONFIG['cookie'] = os.getenv('NHENTAI_COOKIE')
|
||||||
|
constant.CONFIG['useragent'] = os.getenv('NHENTAI_UA')
|
||||||
|
|
||||||
|
def test_search(self):
|
||||||
|
result = search_parser('umaru', 'recent', [1], False)
|
||||||
|
self.assertTrue(len(result) > 0)
|
||||||
|
|
||||||
|
def test_doujinshi_parser(self):
|
||||||
|
result = doujinshi_parser(123456)
|
||||||
|
self.assertTrue(result['pages'] == 84)
|
||||||
|
|
||||||
|
def test_favorites_parser(self):
|
||||||
|
result = favorites_parser(page=[1])
|
||||||
|
self.assertTrue(len(result) > 0)
|
Loading…
x
Reference in New Issue
Block a user