Merge pull request #156 from RicterZ/dev

0.4.2
This commit is contained in:
Ricter Zheng 2020-10-02 01:56:04 +08:00 committed by GitHub
commit 0a94ef9cf1
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
7 changed files with 14 additions and 20 deletions

View File

@ -4,13 +4,14 @@ os:
language: python
python:
- 3.7
- 3.8
install:
- python setup.py install
script:
- echo 268642 > /tmp/test.txt
- nhentai --cookie "_ga=GA1.2.2000087053.1558179358; __cfduid=d8930f7b43d04e1b2117719e28386b2e31593148489; csrftoken=3914GQGSmmqQyfQTBswNgfXuhFiefu8sAgOnsfZWiiqS4PJpKivuTp34p2USV6xu; sessionid=be0w2lwlprlmld3ahg9i592ipsuaw840"
- nhentai --cookie "_ga=GA1.2.1651446371.1545407218; __cfduid=d0ed34dfb81167d2a51a1d6392c1768a81601380350; csrftoken=KRN0GR1ft86m3HTefpQA99pp6R1Bo7hUs5QxNGOAIuwB5g4EcJj04fwMB8QKgLaB; sessionid=7hzoowox78c90wi5ud5ibphm4axcck7c"
- nhentai --search umaru
- nhentai --id=152503,146134 -t 10 --output=/tmp/ --cbz
- nhentai -F

View File

@ -1,3 +1,3 @@
__version__ = '0.4.0'
__version__ = '0.4.2'
__author__ = 'RicterZ'
__email__ = 'ricterzheng@gmail.com'

View File

@ -5,11 +5,10 @@ import multiprocessing
import signal
from future.builtins import str as text
import sys
import os
import requests
import threadpool
import time
import multiprocessing as mp
try:
from urllib.parse import urlparse
@ -18,10 +17,10 @@ except ImportError:
from nhentai.logger import logger
from nhentai.parser import request
from nhentai.utils import Singleton, signal_handler
from nhentai.utils import Singleton
requests.packages.urllib3.disable_warnings()
semaphore = mp.Semaphore()
semaphore = multiprocessing.Semaphore(1)
class NHentaiImageNotExistException(Exception):
@ -133,16 +132,14 @@ class Downloader(Singleton):
queue = [(self, url, folder) for url in queue]
pool = multiprocessing.Pool(self.size, init_worker)
for item in queue:
pool.apply_async(download_wrapper, args=item, callback=self._download_callback)
[pool.apply_async(download_wrapper, args=item) for item in queue]
pool.close()
pool.join()
def download_wrapper(obj, url, folder=''):
if semaphore.get_value():
if sys.platform == 'darwin' or semaphore.get_value():
return Downloader.download_(obj, url=url, folder=folder)
else:
return -3, None

View File

@ -178,7 +178,6 @@ def doujinshi_parser(id_):
doujinshi['img_id'] = img_id.group(1)
doujinshi['ext'] = ext
pages = 0
for _ in doujinshi_info.find_all('div', class_='tag-container field-name'):
if re.search('Pages:', _.text):
pages = _.find('span', class_='name').string

View File

@ -9,7 +9,6 @@ import zipfile
import shutil
import requests
import sqlite3
import img2pdf
from nhentai import constant
from nhentai.logger import logger
@ -195,6 +194,11 @@ def generate_cbz(output_dir='.', doujinshi_obj=None, rm_origin_dir=False, write_
def generate_pdf(output_dir='.', doujinshi_obj=None, rm_origin_dir=False):
try:
import img2pdf
except ImportError:
logger.error("Please install img2pdf package by using pip.")
"""Write images to a PDF file using img2pdf."""
if doujinshi_obj is not None:
doujinshi_dir = os.path.join(output_dir, doujinshi_obj.filename)
@ -233,12 +237,6 @@ and append a file extension like '.txt', so I avoid the potential of using
an invalid filename.
"""
if sys.platform.startswith('win32'):
invalid_chars = '\/:*?"<>|.'
for char in invalid_chars:
s = s.replace(char, '_')
return s
# maybe you can use `--format` to select a suitable filename
valid_chars = "-_.()[] %s%s" % (string.ascii_letters, string.digits)
filename = ''.join(c for c in s if c in valid_chars)

View File

@ -4,5 +4,4 @@ BeautifulSoup4>=4.0.0
threadpool>=1.2.7
tabulate>=0.7.5
future>=0.15.2
img2pdf>=0.3.6
iso8601 >= 0.1

View File

@ -23,7 +23,7 @@ setup(
author=__author__,
author_email=__email__,
keywords='nhentai, doujinshi',
keywords=['nhentai', 'doujinshi', 'downloader'],
description='nhentai.net doujinshis downloader',
long_description=long_description(),
url='https://github.com/RicterZ/nhentai',