support python2.6

This commit is contained in:
Ricter Z 2016-05-02 15:55:14 +08:00
parent bf8205efbe
commit 3a52e8a8bc
8 changed files with 34 additions and 23 deletions

View File

@ -1,3 +1,3 @@
__version__ = '0.1.4'
__version__ = '0.1.5'
__author__ = 'Ricter'
__email__ = 'ricterzheng@gmail.com'

View File

@ -46,26 +46,26 @@ def cmd_parser():
if args.is_download and not args.id and not args.ids and not args.keyword:
logger.critical('Doujinshi id/ids is required for downloading')
parser.print_help()
raise SystemExit
exit(1)
if args.id:
args.ids = (args.id, ) if not args.ids else args.ids
if not args.keyword and not args.ids:
parser.print_help()
raise SystemExit
exit(1)
if args.threads <= 0:
args.threads = 1
elif args.threads > 10:
logger.critical('Maximum number of used threads is 10')
raise SystemExit
exit(1)
if args.proxy:
import urlparse
proxy_url = urlparse.urlparse(args.proxy)
if proxy_url.scheme not in ('http', 'https'):
logger.error('Invalid protocol \'{}\' of proxy, ignored'.format(proxy_url.scheme))
logger.error('Invalid protocol \'{0}\' of proxy, ignored'.format(proxy_url.scheme))
else:
constant.PROXY = {proxy_url.scheme: args.proxy}

View File

@ -28,7 +28,7 @@ def main():
doujinshi_info = doujinshi_parser(id)
doujinshi_list.append(Doujinshi(**doujinshi_info))
else:
raise SystemExit
exit(1)
if options.is_download:
downloader = Downloader(path=options.saved_path,
@ -44,7 +44,7 @@ def main():
def signal_handler(signal, frame):
logger.error('Ctrl-C signal received. Quit.')
raise SystemExit
exit(1)
signal.signal(signal.SIGINT, signal_handler)

View File

@ -28,7 +28,7 @@ class Doujinshi(object):
self.info = DoujinshiInfo(**kwargs)
def __repr__(self):
return '<Doujinshi: {}>'.format(self.name)
return '<Doujinshi: {0}>'.format(self.name)
def show(self):
table = [
@ -41,7 +41,7 @@ class Doujinshi(object):
["URL", self.url],
["Pages", self.pages],
]
logger.info(u'Print doujinshi information\n{}'.format(tabulate(table)))
logger.info(u'Print doujinshi information\n{0}'.format(tabulate(table)))
def download(self):
logger.info('Start download doujinshi: %s' % self.name)

View File

@ -24,7 +24,7 @@ class Downloader(object):
self.timeout = timeout
def _download(self, url, folder='', filename='', retried=False):
logger.info('Start downloading: {} ...'.format(url))
logger.info('Start downloading: {0} ...'.format(url))
filename = filename if filename else os.path.basename(urlparse(url).path)
try:
with open(os.path.join(folder, filename), "wb") as f:
@ -37,7 +37,7 @@ class Downloader(object):
f.write(chunk)
except requests.HTTPError as e:
if not retried:
logger.error('Error: {}, retrying'.format(str(e)))
logger.error('Error: {0}, retrying'.format(str(e)))
return self._download(url=url, folder=folder, filename=filename, retried=True)
else:
return None
@ -49,8 +49,8 @@ class Downloader(object):
def _download_callback(self, request, result):
if not result:
logger.critical('Too many errors occurred, quit.')
raise SystemExit
logger.log(15, '{} download successfully'.format(result))
exit(1)
logger.log(15, '{0} download successfully'.format(result))
def download(self, queue, folder=''):
if not isinstance(folder, (str, unicode)):
@ -60,14 +60,14 @@ class Downloader(object):
folder = os.path.join(self.path, folder)
if not os.path.exists(folder):
logger.warn('Path \'{}\' not exist.'.format(folder))
logger.warn('Path \'{0}\' not exist.'.format(folder))
try:
os.makedirs(folder)
except EnvironmentError as e:
logger.critical('Error: {}'.format(str(e)))
raise SystemExit
logger.critical('Error: {0}'.format(str(e)))
exit(1)
else:
logger.warn('Path \'{}\' already exist.'.format(folder))
logger.warn('Path \'{0}\' already exist.'.format(folder))
queue = [([url], {'folder': folder}) for url in queue]

View File

@ -11,20 +11,20 @@ from tabulate import tabulate
def request(method, url, **kwargs):
if not hasattr(requests, method):
raise AttributeError('\'requests\' object has no attribute \'{}\''.format(method))
raise AttributeError('\'requests\' object has no attribute \'{0}\''.format(method))
return requests.__dict__[method](url, proxies=constant.PROXY, **kwargs)
def doujinshi_parser(id_):
if not isinstance(id_, (int,)) and (isinstance(id_, (str,)) and not id_.isdigit()):
raise Exception('Doujinshi id({}) is not valid'.format(id_))
raise Exception('Doujinshi id({0}) is not valid'.format(id_))
id_ = int(id_)
logger.log(15, 'Fetching doujinshi information of id {}'.format(id_))
logger.log(15, 'Fetching doujinshi information of id {0}'.format(id_))
doujinshi = dict()
doujinshi['id'] = id_
url = '{}/{}/'.format(constant.DETAIL_URL, id_)
url = '{0}/{1}/'.format(constant.DETAIL_URL, id_)
try:
response = request('get', url).content
@ -71,14 +71,14 @@ def doujinshi_parser(id_):
def search_parser(keyword, page):
logger.debug('Searching doujinshis of keyword {}'.format(keyword))
logger.debug('Searching doujinshis of keyword {0}'.format(keyword))
result = []
try:
response = request('get', url=constant.SEARCH_URL, params={'q': keyword, 'page': page}).content
except requests.ConnectionError as e:
logger.critical(e)
logger.warn('If you are in China, please configure the proxy to fu*k GFW.')
raise SystemExit
exit(1)
html = BeautifulSoup(response)
doujinshi_search_result = html.find_all('div', attrs={'class': 'gallery'})

3
setup.cfg Normal file
View File

@ -0,0 +1,3 @@
[metadata]
description-file = README.rst

View File

@ -1,9 +1,15 @@
import codecs
from setuptools import setup, find_packages
from nhentai import __version__, __author__, __email__
with open('requirements.txt') as f:
requirements = [l for l in f.read().splitlines() if l]
def long_description():
with codecs.open('README.rst', 'r') as f:
return f.read()
setup(
name='nhentai',
version=__version__,
@ -13,7 +19,9 @@ setup(
author_email=__email__,
keywords='nhentai, doujinshi',
description='nhentai.net doujinshis downloader',
long_description=long_description(),
url='https://github.com/RicterZ/nhentai',
download_url='https://github.com/RicterZ/nhentai/tarball/master',
include_package_data=True,
zip_safe=False,