page of search result

This commit is contained in:
ricterz 2015-05-09 23:33:51 +08:00
parent 1f0d2ce724
commit d5f257c5e6
3 changed files with 9 additions and 9 deletions

View File

@ -15,10 +15,12 @@ def banner():
def cmd_parser(): def cmd_parser():
parser = OptionParser() parser = OptionParser()
parser.add_option('--search', type='string', dest='keyword', action='store', help='keyword searched')
parser.add_option('--download', dest='is_download', action='store_true', help='download dojinshi or not') parser.add_option('--download', dest='is_download', action='store_true', help='download dojinshi or not')
parser.add_option('--id', type='int', dest='id', action='store', help='dojinshi id of nhentai') parser.add_option('--id', type='int', dest='id', action='store', help='dojinshi id of nhentai')
parser.add_option('--ids', type='str', dest='ids', action='store', help='dojinshi id set, e.g. 1,2,3') parser.add_option('--ids', type='str', dest='ids', action='store', help='dojinshi id set, e.g. 1,2,3')
parser.add_option('--search', type='string', dest='keyword', action='store', help='keyword searched')
parser.add_option('--page', type='int', dest='page', action='store', default=1,
help='page number of search result')
parser.add_option('--path', type='string', dest='saved_path', action='store', default='', parser.add_option('--path', type='string', dest='saved_path', action='store', default='',
help='path which save the dojinshi') help='path which save the dojinshi')
parser.add_option('--threads', '-t', type='int', dest='threads', action='store', default=1, parser.add_option('--threads', '-t', type='int', dest='threads', action='store', default=1,

View File

@ -48,10 +48,10 @@ def dojinshi_parser(id):
return dojinshi return dojinshi
def search_parser(keyword): def search_parser(keyword, page):
logger.debug('Searching dojinshis of keyword %s' % keyword) logger.debug('Searching dojinshis of keyword %s' % keyword)
result = [] result = []
response = requests.get(SEARCH_URL, params={'q': keyword}).content response = requests.get(SEARCH_URL, params={'q': keyword, 'page': page}).content
html = BeautifulSoup(response) html = BeautifulSoup(response)
dojinshi_search_result = html.find_all('div', attrs={'class': 'preview-container'}) dojinshi_search_result = html.find_all('div', attrs={'class': 'preview-container'})
for dojinshi in dojinshi_search_result: for dojinshi in dojinshi_search_result:
@ -62,12 +62,10 @@ def search_parser(keyword):
return result return result
def tag_parser(tag):
pass
def print_dojinshi(dojinshi_list): def print_dojinshi(dojinshi_list):
logger.log(15, 'Print Dojinshi list') if not dojinshi_list:
return
logger.log(15, 'Print dojinshi list')
print '-' * 60 print '-' * 60
for dojinshi in dojinshi_list: for dojinshi in dojinshi_list:
print dojinshi['id'], '-', dojinshi['title'] print dojinshi['id'], '-', dojinshi['title']

View File

@ -19,7 +19,7 @@ def main():
dojinshi_list = [] dojinshi_list = []
if options.keyword: if options.keyword:
dojinshis = search_parser(options.keyword) dojinshis = search_parser(options.keyword, options.page)
if options.is_download: if options.is_download:
dojinshi_ids = map(lambda d: d['id'], dojinshis) dojinshi_ids = map(lambda d: d['id'], dojinshis)
else: else: