feat: add compress option

This commit is contained in:
Hellagur4225 2025-02-17 23:04:45 +08:00
parent 34c1ea8952
commit 6efbc73c10
3 changed files with 46 additions and 25 deletions

View File

@ -138,6 +138,7 @@ def cmd_parser():
parser.add_option('--regenerate', dest='regenerate', action='store_true', default=False,
help='regenerate the cbz or pdf file if exists')
parser.add_option('--zip', action='store_true', help='Package into a single zip file')
# nhentai options
parser.add_option('--cookie', type='str', dest='cookie', action='store',

View File

@ -10,7 +10,7 @@ from nhentai import constant
from nhentai.cmdline import cmd_parser, banner, write_config
from nhentai.parser import doujinshi_parser, search_parser, legacy_search_parser, print_doujinshi, favorites_parser
from nhentai.doujinshi import Doujinshi
from nhentai.downloader import Downloader
from nhentai.downloader import Downloader, CompressedDownloader
from nhentai.logger import logger
from nhentai.constant import BASE_URL
from nhentai.utils import generate_html, generate_doc, generate_main_html, generate_metadata, \
@ -84,8 +84,11 @@ def main():
doujinshi_ids = list(set(map(int, doujinshi_ids)) - set(data))
if options.zip:
options.is_nohtml = True
options.no_metadata = True
if not options.is_show:
downloader = Downloader(path=options.output_dir, threads=options.threads,
downloader = (CompressedDownloader if options.zip else Downloader)(path=options.output_dir, threads=options.threads,
timeout=options.timeout, delay=options.delay,
exit_on_fail=options.exit_on_fail,
no_filename_padding=options.no_filename_padding)

View File

@ -4,6 +4,8 @@ import os
import asyncio
import httpx
import urllib3.exceptions
import zipfile
import io
from urllib.parse import urlparse
from nhentai import constant
@ -13,11 +15,6 @@ from nhentai.utils import Singleton, async_request
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
class NHentaiImageNotExistException(Exception):
pass
def download_callback(result):
result, data = result
if result == 0:
@ -77,13 +74,7 @@ class Downloader(Singleton):
else:
filename = base_filename + extension
save_file_path = os.path.join(self.folder, filename)
try:
if os.path.exists(save_file_path):
logger.warning(f'Skipped download: {save_file_path} already exists')
return 1, url
response = await async_request('GET', url, timeout=self.timeout, proxy=proxy)
if response.status_code != 200:
@ -113,10 +104,6 @@ class Downloader(Singleton):
logger.warning(f'Download {filename} failed with {constant.RETRY_TIMES} times retried, skipped')
return -2, url
except NHentaiImageNotExistException as e:
os.remove(save_file_path)
return -3, url
except Exception as e:
import traceback
@ -130,11 +117,11 @@ class Downloader(Singleton):
return 1, url
async def save(self, save_file_path, response) -> bool:
async def save(self, filename, response) -> bool:
if response is None:
logger.error('Error: Response is None')
return False
save_file_path = os.path.join(self.folder, save_file_path)
save_file_path = os.path.join(self.folder, filename)
with open(save_file_path, 'wb') as f:
if response is not None:
length = response.headers.get('content-length')
@ -145,6 +132,14 @@ class Downloader(Singleton):
f.write(chunk)
return True
def create_storage_object(self, folder:str):
if not os.path.exists(folder):
try:
os.makedirs(folder)
except EnvironmentError as e:
logger.critical(str(e))
self.folder:str = folder
def start_download(self, queue, folder='') -> bool:
if not isinstance(folder, (str,)):
folder = str(folder)
@ -153,12 +148,7 @@ class Downloader(Singleton):
folder = os.path.join(self.path, folder)
logger.info(f'Doujinshi will be saved at "{folder}"')
if not os.path.exists(folder):
try:
os.makedirs(folder)
except EnvironmentError as e:
logger.critical(str(e))
self.folder = folder
self.create_storage_object(folder)
if os.getenv('DEBUG', None) == 'NODOWNLOAD':
# Assuming we want to continue with rest of process.
@ -175,3 +165,30 @@ class Downloader(Singleton):
asyncio.run(self.fiber(coroutines))
return True
class CompressedDownloader(Downloader):
def create_storage_object(self, folder):
filename = f'{folder}.zip'
print(filename)
self.zipfile = zipfile.ZipFile(filename,'w')
async def save(self, filename, response) -> bool:
if response is None:
logger.error('Error: Response is None')
return False
image_data = io.BytesIO()
length = response.headers.get('content-length')
if length is None:
content = await response.read()
image_data.write(content)
else:
async for chunk in response.aiter_bytes(2048):
image_data.write(chunk)
image_data.seek(0)
self.zipfile.writestr(filename, image_data.read())
return True
def __del__(self):
self.zipfile.close()