Compare commits

..

28 Commits
0.5.5 ... 0.5.7

Author SHA1 Message Date
40072a8483 0.5.7 2024-09-21 00:00:04 +08:00
f97469259d fix #331 2024-09-20 23:59:34 +08:00
ec608cc741 fix workflow docker issue 2024-09-20 23:58:25 +08:00
30e2814fe2 update version number in pyproject.toml 2024-09-20 23:57:10 +08:00
da298e1fe7 Merge pull request #312 from RicterZ/dependabot/pip/idna-3.7
Bump idna from 3.4 to 3.7
2024-09-20 23:56:25 +08:00
51d43ddde0 Merge branch 'master' into dependabot/pip/idna-3.7 2024-09-20 23:56:18 +08:00
c734881fc7 Merge pull request #316 from RicterZ/dependabot/pip/requests-2.32.0
Bump requests from 2.31.0 to 2.32.0
2024-09-20 23:55:33 +08:00
8d5803a45e Merge branch 'master' into dependabot/pip/requests-2.32.0 2024-09-20 23:55:28 +08:00
b441085b45 Merge pull request #318 from RicterZ/dependabot/pip/urllib3-1.26.19
Bump urllib3 from 1.26.18 to 1.26.19
2024-09-20 23:55:08 +08:00
132b26f8c4 Merge branch 'master' into dependabot/pip/urllib3-1.26.19 2024-09-20 23:54:57 +08:00
a0dc952fd3 Merge pull request #319 from RicterZ/dependabot/pip/certifi-2024.7.4
Bump certifi from 2022.12.7 to 2024.7.4
2024-09-20 23:54:18 +08:00
2bd862777b fix #333 2024-09-20 23:53:26 +08:00
35c55503fa 0.5.6 2024-09-20 23:39:38 +08:00
29aac84d53 fix #336 2024-09-20 23:34:26 +08:00
4ed4523782 fix #341 2024-09-20 23:27:37 +08:00
4223326c13 Merge pull request #340 from vglint/patch-3
Fix gallery search for folders with underscore
2024-09-14 10:17:57 +08:00
a248ff98c4 Fix gallery search for folders with underscore
Gallery title names replace '_' in the folder name with ' ' (generate_main_html()). To match against these title names when searching, we must also replace '_' with ' ' for each folder name we add to the list of titles to unhide.
2024-09-13 15:56:01 -07:00
021f17d229 Merge pull request #321 from PenitentMonke/xdg-base-dir
Adhere to XDG base dir spec on Linux
2024-07-08 22:03:38 +08:00
4162eabe93 Adhere to XDG base dir spec on Linux
Change how NHENTAI_HOME is set to follow the XDG Base Directory
Specification where possible, when running on Linux.

ISSUE: 299
2024-07-07 02:40:33 -03:00
c75e9efb21 Bump certifi from 2022.12.7 to 2024.7.4
Bumps [certifi](https://github.com/certifi/python-certifi) from 2022.12.7 to 2024.7.4.
- [Commits](https://github.com/certifi/python-certifi/compare/2022.12.07...2024.07.04)

---
updated-dependencies:
- dependency-name: certifi
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
2024-07-05 21:52:23 +00:00
f2dec5c2a3 Bump urllib3 from 1.26.18 to 1.26.19
Bumps [urllib3](https://github.com/urllib3/urllib3) from 1.26.18 to 1.26.19.
- [Release notes](https://github.com/urllib3/urllib3/releases)
- [Changelog](https://github.com/urllib3/urllib3/blob/1.26.19/CHANGES.rst)
- [Commits](https://github.com/urllib3/urllib3/compare/1.26.18...1.26.19)

---
updated-dependencies:
- dependency-name: urllib3
  dependency-type: direct:production
...

Signed-off-by: dependabot[bot] <support@github.com>
2024-06-18 01:35:13 +00:00
845a0d5659 ---
updated-dependencies:
- dependency-name: requests
  dependency-type: direct:production
...

Signed-off-by: dependabot[bot] <support@github.com>
2024-05-21 05:39:26 +00:00
03d85c4e5d Bump idna from 3.4 to 3.7
Bumps [idna](https://github.com/kjd/idna) from 3.4 to 3.7.
- [Release notes](https://github.com/kjd/idna/releases)
- [Changelog](https://github.com/kjd/idna/blob/master/HISTORY.rst)
- [Commits](https://github.com/kjd/idna/compare/v3.4...v3.7)

---
updated-dependencies:
- dependency-name: idna
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
2024-04-12 02:06:40 +00:00
dc54a43610 Merge pull request #311 from RicterZ/dev
Dev merge to master
2024-03-28 17:56:28 +08:00
4ecffaff55 Merge pull request #310 from Spyridion/dev
Changed parser option checks to allow artist search
2024-03-28 17:42:42 +08:00
457f12d40d Changed parser option checks to allow artist search 2024-03-28 02:40:14 -07:00
499081a9cd Merge pull request #306 from myc1ou1d/dev
fix file not found error when cbz file exists.
2024-02-25 00:37:32 +08:00
53aa04af1e fix file not found error when cbz file exists. 2024-02-24 23:27:52 +08:00
10 changed files with 64 additions and 30 deletions

View File

@ -1,3 +1,3 @@
__version__ = '0.5.5'
__version__ = '0.5.7'
__author__ = 'RicterZ'
__email__ = 'ricterzheng@gmail.com'

View File

@ -213,7 +213,7 @@ def cmd_parser():
_ = [i.strip() for i in f.readlines()]
args.id = set(int(i) for i in _ if i.isdigit())
if (args.is_download or args.is_show) and not args.id and not args.keyword and not args.favorites:
if (args.is_download or args.is_show) and not args.id and not args.keyword and not args.favorites and not args.artist:
logger.critical('Doujinshi id(s) are required for downloading')
parser.print_help()
sys.exit(1)

View File

@ -3,6 +3,23 @@ import os
import tempfile
from urllib.parse import urlparse
from platform import system
def get_nhentai_home() -> str:
home = os.getenv('HOME', tempfile.gettempdir())
if system() == 'Linux':
xdgdat = os.getenv('XDG_DATA_HOME')
if xdgdat and os.path.exists(os.path.join(xdgdat, 'nhentai')):
return os.path.join(xdgdat, 'nhentai')
if home and os.path.exists(os.path.join(home, '.nhentai')):
return os.path.join(home, '.nhentai')
if xdgdat:
return os.path.join(xdgdat, 'nhentai')
# Use old default path in other systems
return os.path.join(home, '.nhentai')
DEBUG = os.getenv('DEBUG', False)
@ -20,8 +37,13 @@ FAV_URL = f'{BASE_URL}/favorites/'
IMAGE_URL = f'{urlparse(BASE_URL).scheme}://i.{urlparse(BASE_URL).hostname}/galleries'
IMAGE_URL_MIRRORS = [
f'{urlparse(BASE_URL).scheme}://i3.{urlparse(BASE_URL).hostname}'
f'{urlparse(BASE_URL).scheme}://i5.{urlparse(BASE_URL).hostname}'
f'{urlparse(BASE_URL).scheme}://i7.{urlparse(BASE_URL).hostname}'
]
NHENTAI_HOME = os.path.join(os.getenv('HOME', tempfile.gettempdir()), '.nhentai')
NHENTAI_HOME = get_nhentai_home()
NHENTAI_HISTORY = os.path.join(NHENTAI_HOME, 'history.sqlite3')
NHENTAI_CONFIG_FILE = os.path.join(NHENTAI_HOME, 'config.json')
@ -32,7 +54,8 @@ CONFIG = {
'cookie': '',
'language': '',
'template': '',
'useragent': 'nhentai command line client (https://github.com/RicterZ/nhentai)'
'useragent': 'nhentai command line client (https://github.com/RicterZ/nhentai)',
'max_filename': 85
}
LANGUAGE_ISO = {

View File

@ -67,10 +67,14 @@ class Downloader(Singleton):
try:
response = request('get', url, stream=True, timeout=self.timeout, proxies=proxy)
if response.status_code != 200:
raise NHentaiImageNotExistException
except NHentaiImageNotExistException as e:
raise e
path = urlparse(url).path
for mirror in constant.IMAGE_URL_MIRRORS:
print(f'{mirror}{path}')
mirror_url = f'{mirror}{path}'
response = request('get', mirror_url, stream=True,
timeout=self.timeout, proxies=proxy)
if response.status_code == 200:
break
except Exception as e:
i += 1

View File

@ -135,7 +135,7 @@ def doujinshi_parser(id_, counter=0):
logger.warning(f'Error: {e}, ignored')
return None
print(response)
# print(response)
html = BeautifulSoup(response, 'html.parser')
doujinshi_info = html.find('div', attrs={'id': 'info'})
@ -327,7 +327,9 @@ def search_parser(keyword, sorting, page, is_page_all=False):
for row in response['result']:
title = row['title']['english']
title = title[:85] + '..' if len(title) > 85 else title
title = title[:constant.CONFIG['max_filename']] + '..' if \
len(title) > constant.CONFIG['max_filename'] else title
result.append({'id': row['id'], 'title': title})
not_exists_persist = False

View File

@ -166,6 +166,9 @@ def generate_main_html(output_dir='./'):
def generate_cbz(output_dir='.', doujinshi_obj=None, rm_origin_dir=False, write_comic_info=True, move_to_folder=False):
if doujinshi_obj is not None:
doujinshi_dir = os.path.join(output_dir, doujinshi_obj.filename)
if os.path.exists(doujinshi_dir+".cbz"):
logger.warning(f'Comic Book CBZ file exists, skip "{doujinshi_dir}"')
return
if write_comic_info:
serialize_comic_xml(doujinshi_obj, doujinshi_dir)
cbz_filename = os.path.join(os.path.join(doujinshi_dir, '..'), f'{doujinshi_obj.filename}.cbz')
@ -206,9 +209,10 @@ def generate_pdf(output_dir='.', doujinshi_obj=None, rm_origin_dir=False, move_t
"""Write images to a PDF file using img2pdf."""
if doujinshi_obj is not None:
doujinshi_dir = os.path.join(output_dir, doujinshi_obj.filename)
filename = doujinshi_obj.filename.replace('/', '-')
pdf_filename = os.path.join(
os.path.join(doujinshi_dir, '..'),
f'{doujinshi_obj.filename}.pdf'
f'{filename}.pdf'
)
else:
pdf_filename = './doujinshi.pdf'
@ -222,7 +226,7 @@ def generate_pdf(output_dir='.', doujinshi_obj=None, rm_origin_dir=False, move_t
full_path_list = (
[os.path.join(doujinshi_dir, image) for image in file_list]
)
pdf_f.write(img2pdf.convert(full_path_list))
pdf_f.write(img2pdf.convert(full_path_list, rotation=img2pdf.Rotation.ifvalid))
if rm_origin_dir:
shutil.rmtree(doujinshi_dir, ignore_errors=True)

View File

@ -139,7 +139,7 @@ function filter_searcher(){
break
}
}
if (verifier){doujinshi_id.push(data[i].Folder);}
if (verifier){doujinshi_id.push(data[i].Folder.replace("_", " "));}
}
var gallery = document.getElementsByClassName("gallery-favorite");
for (var i = 0; i < gallery.length; i++){

30
poetry.lock generated
View File

@ -1,4 +1,4 @@
# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand.
# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand.
[[package]]
name = "beautifulsoup4"
@ -20,13 +20,13 @@ lxml = ["lxml"]
[[package]]
name = "certifi"
version = "2022.12.7"
version = "2024.7.4"
description = "Python package for providing Mozilla's CA Bundle."
optional = false
python-versions = ">=3.6"
files = [
{file = "certifi-2022.12.7-py3-none-any.whl", hash = "sha256:4ad3232f5e926d6718ec31cfc1fcadfde020920e278684144551c91769c7bc18"},
{file = "certifi-2022.12.7.tar.gz", hash = "sha256:35824b4c3a97115964b408844d64aa14db1cc518f6562e8d7261699d1350a9e3"},
{file = "certifi-2024.7.4-py3-none-any.whl", hash = "sha256:c198e21b1289c2ab85ee4e67bb4b4ef3ead0892059901a8d5b622f24a1101e90"},
{file = "certifi-2024.7.4.tar.gz", hash = "sha256:5a1e7645bc0ec61a09e26c36f6106dd4cf40c6db3a1fb6352b0244e7fb057c7b"},
]
[[package]]
@ -128,13 +128,13 @@ files = [
[[package]]
name = "idna"
version = "3.4"
version = "3.7"
description = "Internationalized Domain Names in Applications (IDNA)"
optional = false
python-versions = ">=3.5"
files = [
{file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"},
{file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"},
{file = "idna-3.7-py3-none-any.whl", hash = "sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0"},
{file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"},
]
[[package]]
@ -150,13 +150,13 @@ files = [
[[package]]
name = "requests"
version = "2.31.0"
version = "2.32.0"
description = "Python HTTP for Humans."
optional = false
python-versions = ">=3.7"
python-versions = ">=3.8"
files = [
{file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"},
{file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"},
{file = "requests-2.32.0-py3-none-any.whl", hash = "sha256:f2c3881dddb70d056c5bd7600a4fae312b2a300e39be6a118d30b90bd27262b5"},
{file = "requests-2.32.0.tar.gz", hash = "sha256:fa5490319474c82ef1d2c9bc459d3652e3ae4ef4c4ebdd18a21145a47ca4b6b8"},
]
[package.dependencies]
@ -196,13 +196,13 @@ widechars = ["wcwidth"]
[[package]]
name = "urllib3"
version = "1.26.18"
version = "1.26.19"
description = "HTTP library with thread-safe connection pooling, file post, and more."
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*"
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7"
files = [
{file = "urllib3-1.26.18-py2.py3-none-any.whl", hash = "sha256:34b97092d7e0a3a8cf7cd10e386f401b3737364026c45e622aa02903dffe0f07"},
{file = "urllib3-1.26.18.tar.gz", hash = "sha256:f8ecc1bba5667413457c529ab955bf8c67b45db799d159066261719e328580a0"},
{file = "urllib3-1.26.19-py2.py3-none-any.whl", hash = "sha256:37a0344459b199fce0e80b0d3569837ec6b6937435c5244e7fd73fa6006830f3"},
{file = "urllib3-1.26.19.tar.gz", hash = "sha256:3e3d753a8618b86d7de333b4223005f68720bcd6a7d2bcb9fbd2229ec7c1e429"},
]
[package.extras]

View File

@ -1,6 +1,6 @@
[tool.poetry]
name = "nhentai"
version = "0.5.3"
version = "0.5.7"
description = "nhentai doujinshi downloader"
authors = ["Ricter Z <ricterzheng@gmail.com>"]
license = "MIT"

View File

@ -1,5 +1,6 @@
requests
soupsieve
setuptools
BeautifulSoup4
tabulate
iso8601