fix: add headers, proxy to async_request

This commit is contained in:
normalizedwater546 2024-11-23 13:11:25 +00:00
parent feb7f45533
commit e3410f5a9a
2 changed files with 26 additions and 10 deletions

View File

@ -1,19 +1,18 @@
# coding: utf- # coding: utf-
import os import os
import asyncio
import httpx
import urllib3.exceptions import urllib3.exceptions
from urllib.parse import urlparse from urllib.parse import urlparse
from nhentai import constant from nhentai import constant
from nhentai.logger import logger from nhentai.logger import logger
from nhentai.utils import Singleton from nhentai.utils import Singleton, async_request
import asyncio
import httpx
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
class NHentaiImageNotExistException(Exception): class NHentaiImageNotExistException(Exception):
pass pass
@ -68,14 +67,14 @@ class Downloader(Singleton):
logger.warning(f'Skipped download: {save_file_path} already exists') logger.warning(f'Skipped download: {save_file_path} already exists')
return 1, url return 1, url
response = await self.async_request(url, self.timeout) # TODO: Add proxy response = await async_request('GET', url, timeout=self.timeout, proxies=proxy)
if response.status_code != 200: if response.status_code != 200:
path = urlparse(url).path path = urlparse(url).path
for mirror in constant.IMAGE_URL_MIRRORS: for mirror in constant.IMAGE_URL_MIRRORS:
logger.info(f"Try mirror: {mirror}{path}") logger.info(f"Try mirror: {mirror}{path}")
mirror_url = f'{mirror}{path}' mirror_url = f'{mirror}{path}'
response = await self.async_request(mirror_url, self.timeout) response = await async_request('GET', mirror_url, timeout=self.timeout, proxies=proxy)
if response.status_code == 200: if response.status_code == 200:
break break
@ -128,12 +127,8 @@ class Downloader(Singleton):
f.write(chunk) f.write(chunk)
return True return True
async def async_request(self, url, timeout):
async with httpx.AsyncClient() as client:
return await client.get(url, timeout=timeout)
def start_download(self, queue, folder='') -> bool: def start_download(self, queue, folder='') -> bool:
logger.warning("Proxy temporarily unavailable, it will be fixed later. ")
if not isinstance(folder, (str,)): if not isinstance(folder, (str,)):
folder = str(folder) folder = str(folder)

View File

@ -6,6 +6,7 @@ import os
import zipfile import zipfile
import shutil import shutil
import httpx
import requests import requests
import sqlite3 import sqlite3
import urllib.parse import urllib.parse
@ -32,8 +33,28 @@ def request(method, url, **kwargs):
return getattr(session, method)(url, verify=False, **kwargs) return getattr(session, method)(url, verify=False, **kwargs)
async def async_request(method, url, proxies = None, **kwargs):
headers = {
'Referer': constant.LOGIN_URL,
'User-Agent': constant.CONFIG['useragent'],
'Cookie': constant.CONFIG['cookie'],
}
if proxies is None:
proxies = constant.CONFIG['proxy']
if proxies.get('http') == '' and proxies.get('https') == '':
proxies = None
async with httpx.AsyncClient(headers=headers, verify=False, proxies=proxies, **kwargs) as client:
response = await client.request(method, url, **kwargs)
return response
def check_cookie(): def check_cookie():
response = request('get', constant.BASE_URL) response = request('get', constant.BASE_URL)
if response.status_code == 403 and 'Just a moment...' in response.text: if response.status_code == 403 and 'Just a moment...' in response.text:
logger.error('Blocked by Cloudflare captcha, please set your cookie and useragent') logger.error('Blocked by Cloudflare captcha, please set your cookie and useragent')
sys.exit(1) sys.exit(1)