Compare commits

..

9 Commits
0.4.2 ... 0.4.3

Author SHA1 Message Date
4d89b80e67 Merge branch 'dev' of github.com:RicterZ/nhentai into dev 2020-10-02 01:56:31 +08:00
4cc4f35a0d fix bug in search 2020-10-02 01:55:03 +08:00
ad86c49de9 Merge branch 'master' into dev 2020-10-02 01:47:35 +08:00
5a538fe82f add tests and new python version 2020-10-02 01:43:44 +08:00
eb35ba9848 0.4.2 2020-10-02 01:41:02 +08:00
b0902c2d58 Merge pull request #147 from fuchs2711/fix-win32-filename
Fix invalid filenames on Windows
2020-07-19 11:12:25 +08:00
320f36c264 Fix invalid filenames on Windows 2020-07-18 15:19:41 +02:00
1dae63be39 Merge pull request #141 from RicterZ/dev
update tests
2020-06-26 13:32:35 +08:00
8ed1b89277 Merge pull request #140 from RicterZ/dev
0.4.0
2020-06-26 13:16:55 +08:00
3 changed files with 6 additions and 9 deletions

View File

@ -4,13 +4,14 @@ os:
language: python
python:
- 3.7
- 3.8
install:
- python setup.py install
script:
- echo 268642 > /tmp/test.txt
- nhentai --cookie "_ga=GA1.2.2000087053.1558179358; __cfduid=d8930f7b43d04e1b2117719e28386b2e31593148489; csrftoken=3914GQGSmmqQyfQTBswNgfXuhFiefu8sAgOnsfZWiiqS4PJpKivuTp34p2USV6xu; sessionid=be0w2lwlprlmld3ahg9i592ipsuaw840"
- nhentai --cookie "_ga=GA1.2.1651446371.1545407218; __cfduid=d0ed34dfb81167d2a51a1d6392c1768a81601380350; csrftoken=KRN0GR1ft86m3HTefpQA99pp6R1Bo7hUs5QxNGOAIuwB5g4EcJj04fwMB8QKgLaB; sessionid=7hzoowox78c90wi5ud5ibphm4axcck7c"
- nhentai --search umaru
- nhentai --id=152503,146134 -t 10 --output=/tmp/ --cbz
- nhentai -F

View File

@ -1,3 +1,3 @@
__version__ = '0.4.1'
__version__ = '0.4.2'
__author__ = 'RicterZ'
__email__ = 'ricterzheng@gmail.com'

View File

@ -221,7 +221,7 @@ def print_doujinshi(doujinshi_list):
def search_parser(keyword, sorting, page):
logger.debug('Searching doujinshis using keywords {0}'.format(keyword))
keyword = '+'.join([i.strip().replace(' ', '-').lower() for i in keyword.split(',')])
# keyword = '+'.join([i.strip().replace(' ', '-').lower() for i in keyword.split(',')])
result = []
i = 0
while i < 5:
@ -229,12 +229,8 @@ def search_parser(keyword, sorting, page):
url = request('get', url=constant.SEARCH_URL, params={'query': keyword, 'page': page, 'sort': sorting}).url
response = request('get', url.replace('%2B', '+')).json()
except Exception as e:
i += 1
if not i < 5:
logger.critical(str(e))
logger.warn('If you are in China, please configure the proxy to fu*k GFW.')
exit(1)
continue
logger.critical(str(e))
break
if 'result' not in response: