Compare commits

..

13 Commits
0.4.1 ... 0.4.3

Author SHA1 Message Date
4d89b80e67 Merge branch 'dev' of github.com:RicterZ/nhentai into dev 2020-10-02 01:56:31 +08:00
4cc4f35a0d fix bug in search 2020-10-02 01:55:03 +08:00
ad86c49de9 Merge branch 'master' into dev 2020-10-02 01:47:35 +08:00
5a538fe82f add tests and new python version 2020-10-02 01:43:44 +08:00
eb35ba9848 0.4.2 2020-10-02 01:41:02 +08:00
14a53a0953 fix 2020-10-02 01:39:42 +08:00
c5e4b5ffa8 update 2020-10-02 01:39:14 +08:00
b3f25875d0 fix bug on mac #126 2020-10-02 01:32:18 +08:00
91053b98af 0.4.1 2020-10-02 01:02:41 +08:00
b0902c2d58 Merge pull request #147 from fuchs2711/fix-win32-filename
Fix invalid filenames on Windows
2020-07-19 11:12:25 +08:00
320f36c264 Fix invalid filenames on Windows 2020-07-18 15:19:41 +02:00
1dae63be39 Merge pull request #141 from RicterZ/dev
update tests
2020-06-26 13:32:35 +08:00
8ed1b89277 Merge pull request #140 from RicterZ/dev
0.4.0
2020-06-26 13:16:55 +08:00
5 changed files with 12 additions and 19 deletions

View File

@ -4,13 +4,14 @@ os:
language: python language: python
python: python:
- 3.7 - 3.7
- 3.8
install: install:
- python setup.py install - python setup.py install
script: script:
- echo 268642 > /tmp/test.txt - echo 268642 > /tmp/test.txt
- nhentai --cookie "_ga=GA1.2.2000087053.1558179358; __cfduid=d8930f7b43d04e1b2117719e28386b2e31593148489; csrftoken=3914GQGSmmqQyfQTBswNgfXuhFiefu8sAgOnsfZWiiqS4PJpKivuTp34p2USV6xu; sessionid=be0w2lwlprlmld3ahg9i592ipsuaw840" - nhentai --cookie "_ga=GA1.2.1651446371.1545407218; __cfduid=d0ed34dfb81167d2a51a1d6392c1768a81601380350; csrftoken=KRN0GR1ft86m3HTefpQA99pp6R1Bo7hUs5QxNGOAIuwB5g4EcJj04fwMB8QKgLaB; sessionid=7hzoowox78c90wi5ud5ibphm4axcck7c"
- nhentai --search umaru - nhentai --search umaru
- nhentai --id=152503,146134 -t 10 --output=/tmp/ --cbz - nhentai --id=152503,146134 -t 10 --output=/tmp/ --cbz
- nhentai -F - nhentai -F

View File

@ -1,3 +1,3 @@
__version__ = '0.4.0' __version__ = '0.4.2'
__author__ = 'RicterZ' __author__ = 'RicterZ'
__email__ = 'ricterzheng@gmail.com' __email__ = 'ricterzheng@gmail.com'

View File

@ -5,11 +5,10 @@ import multiprocessing
import signal import signal
from future.builtins import str as text from future.builtins import str as text
import sys
import os import os
import requests import requests
import threadpool
import time import time
import multiprocessing as mp
try: try:
from urllib.parse import urlparse from urllib.parse import urlparse
@ -18,10 +17,10 @@ except ImportError:
from nhentai.logger import logger from nhentai.logger import logger
from nhentai.parser import request from nhentai.parser import request
from nhentai.utils import Singleton, signal_handler from nhentai.utils import Singleton
requests.packages.urllib3.disable_warnings() requests.packages.urllib3.disable_warnings()
semaphore = mp.Semaphore() semaphore = multiprocessing.Semaphore(1)
class NHentaiImageNotExistException(Exception): class NHentaiImageNotExistException(Exception):
@ -133,16 +132,14 @@ class Downloader(Singleton):
queue = [(self, url, folder) for url in queue] queue = [(self, url, folder) for url in queue]
pool = multiprocessing.Pool(self.size, init_worker) pool = multiprocessing.Pool(self.size, init_worker)
[pool.apply_async(download_wrapper, args=item) for item in queue]
for item in queue:
pool.apply_async(download_wrapper, args=item, callback=self._download_callback)
pool.close() pool.close()
pool.join() pool.join()
def download_wrapper(obj, url, folder=''): def download_wrapper(obj, url, folder=''):
if semaphore.get_value(): if sys.platform == 'darwin' or semaphore.get_value():
return Downloader.download_(obj, url=url, folder=folder) return Downloader.download_(obj, url=url, folder=folder)
else: else:
return -3, None return -3, None

View File

@ -178,7 +178,6 @@ def doujinshi_parser(id_):
doujinshi['img_id'] = img_id.group(1) doujinshi['img_id'] = img_id.group(1)
doujinshi['ext'] = ext doujinshi['ext'] = ext
pages = 0
for _ in doujinshi_info.find_all('div', class_='tag-container field-name'): for _ in doujinshi_info.find_all('div', class_='tag-container field-name'):
if re.search('Pages:', _.text): if re.search('Pages:', _.text):
pages = _.find('span', class_='name').string pages = _.find('span', class_='name').string
@ -222,7 +221,7 @@ def print_doujinshi(doujinshi_list):
def search_parser(keyword, sorting, page): def search_parser(keyword, sorting, page):
logger.debug('Searching doujinshis using keywords {0}'.format(keyword)) logger.debug('Searching doujinshis using keywords {0}'.format(keyword))
keyword = '+'.join([i.strip().replace(' ', '-').lower() for i in keyword.split(',')]) # keyword = '+'.join([i.strip().replace(' ', '-').lower() for i in keyword.split(',')])
result = [] result = []
i = 0 i = 0
while i < 5: while i < 5:
@ -230,12 +229,8 @@ def search_parser(keyword, sorting, page):
url = request('get', url=constant.SEARCH_URL, params={'query': keyword, 'page': page, 'sort': sorting}).url url = request('get', url=constant.SEARCH_URL, params={'query': keyword, 'page': page, 'sort': sorting}).url
response = request('get', url.replace('%2B', '+')).json() response = request('get', url.replace('%2B', '+')).json()
except Exception as e: except Exception as e:
i += 1
if not i < 5:
logger.critical(str(e)) logger.critical(str(e))
logger.warn('If you are in China, please configure the proxy to fu*k GFW.')
exit(1)
continue
break break
if 'result' not in response: if 'result' not in response:

View File

@ -23,7 +23,7 @@ setup(
author=__author__, author=__author__,
author_email=__email__, author_email=__email__,
keywords='nhentai, doujinshi', keywords=['nhentai', 'doujinshi', 'downloader'],
description='nhentai.net doujinshis downloader', description='nhentai.net doujinshis downloader',
long_description=long_description(), long_description=long_description(),
url='https://github.com/RicterZ/nhentai', url='https://github.com/RicterZ/nhentai',