Compare commits

..

4 Commits
0.4.1 ... 0.4.2

Author SHA1 Message Date
14a53a0953 fix 2020-10-02 01:39:42 +08:00
c5e4b5ffa8 update 2020-10-02 01:39:14 +08:00
b3f25875d0 fix bug on mac #126 2020-10-02 01:32:18 +08:00
91053b98af 0.4.1 2020-10-02 01:02:41 +08:00
4 changed files with 7 additions and 11 deletions

View File

@ -1,3 +1,3 @@
__version__ = '0.4.0' __version__ = '0.4.1'
__author__ = 'RicterZ' __author__ = 'RicterZ'
__email__ = 'ricterzheng@gmail.com' __email__ = 'ricterzheng@gmail.com'

View File

@ -5,11 +5,10 @@ import multiprocessing
import signal import signal
from future.builtins import str as text from future.builtins import str as text
import sys
import os import os
import requests import requests
import threadpool
import time import time
import multiprocessing as mp
try: try:
from urllib.parse import urlparse from urllib.parse import urlparse
@ -18,10 +17,10 @@ except ImportError:
from nhentai.logger import logger from nhentai.logger import logger
from nhentai.parser import request from nhentai.parser import request
from nhentai.utils import Singleton, signal_handler from nhentai.utils import Singleton
requests.packages.urllib3.disable_warnings() requests.packages.urllib3.disable_warnings()
semaphore = mp.Semaphore() semaphore = multiprocessing.Semaphore(1)
class NHentaiImageNotExistException(Exception): class NHentaiImageNotExistException(Exception):
@ -133,16 +132,14 @@ class Downloader(Singleton):
queue = [(self, url, folder) for url in queue] queue = [(self, url, folder) for url in queue]
pool = multiprocessing.Pool(self.size, init_worker) pool = multiprocessing.Pool(self.size, init_worker)
[pool.apply_async(download_wrapper, args=item) for item in queue]
for item in queue:
pool.apply_async(download_wrapper, args=item, callback=self._download_callback)
pool.close() pool.close()
pool.join() pool.join()
def download_wrapper(obj, url, folder=''): def download_wrapper(obj, url, folder=''):
if semaphore.get_value(): if sys.platform == 'darwin' or semaphore.get_value():
return Downloader.download_(obj, url=url, folder=folder) return Downloader.download_(obj, url=url, folder=folder)
else: else:
return -3, None return -3, None

View File

@ -178,7 +178,6 @@ def doujinshi_parser(id_):
doujinshi['img_id'] = img_id.group(1) doujinshi['img_id'] = img_id.group(1)
doujinshi['ext'] = ext doujinshi['ext'] = ext
pages = 0
for _ in doujinshi_info.find_all('div', class_='tag-container field-name'): for _ in doujinshi_info.find_all('div', class_='tag-container field-name'):
if re.search('Pages:', _.text): if re.search('Pages:', _.text):
pages = _.find('span', class_='name').string pages = _.find('span', class_='name').string

View File

@ -23,7 +23,7 @@ setup(
author=__author__, author=__author__,
author_email=__email__, author_email=__email__,
keywords='nhentai, doujinshi', keywords=['nhentai', 'doujinshi', 'downloader'],
description='nhentai.net doujinshis downloader', description='nhentai.net doujinshis downloader',
long_description=long_description(), long_description=long_description(),
url='https://github.com/RicterZ/nhentai', url='https://github.com/RicterZ/nhentai',