mirror of
https://github.com/soimort/you-get.git
synced 2025-01-23 21:45:02 +03:00
feat: add arguments to specify the playlist first, last, page-size options
This commit is contained in:
parent
5c2bb3fa34
commit
e37836a40b
@ -1557,6 +1557,21 @@ def script_main(download, download_playlist, **kwargs):
|
|||||||
'-l', '--playlist', action='store_true',
|
'-l', '--playlist', action='store_true',
|
||||||
help='Prefer to download a playlist'
|
help='Prefer to download a playlist'
|
||||||
)
|
)
|
||||||
|
|
||||||
|
playlist_grp = parser.add_argument_group('Playlist optional options')
|
||||||
|
playlist_grp.add_argument(
|
||||||
|
'-first', '--first', metavar='FIRST',
|
||||||
|
help='the first number'
|
||||||
|
)
|
||||||
|
playlist_grp.add_argument(
|
||||||
|
'-last', '--last', metavar='LAST',
|
||||||
|
help='the last number'
|
||||||
|
)
|
||||||
|
playlist_grp.add_argument(
|
||||||
|
'-size', '--page-size', metavar='PAGE_SIZE',
|
||||||
|
help='the page size number'
|
||||||
|
)
|
||||||
|
|
||||||
download_grp.add_argument(
|
download_grp.add_argument(
|
||||||
'-a', '--auto-rename', action='store_true', default=False,
|
'-a', '--auto-rename', action='store_true', default=False,
|
||||||
help='Auto rename same name different files'
|
help='Auto rename same name different files'
|
||||||
@ -1674,7 +1689,7 @@ def script_main(download, download_playlist, **kwargs):
|
|||||||
socket.setdefaulttimeout(args.timeout)
|
socket.setdefaulttimeout(args.timeout)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
extra = {}
|
extra = {'args': args}
|
||||||
if extractor_proxy:
|
if extractor_proxy:
|
||||||
extra['extractor_proxy'] = extractor_proxy
|
extra['extractor_proxy'] = extractor_proxy
|
||||||
if stream_id:
|
if stream_id:
|
||||||
|
@ -4,15 +4,18 @@ __all__ = ['lrts_download']
|
|||||||
|
|
||||||
import logging
|
import logging
|
||||||
from ..common import *
|
from ..common import *
|
||||||
|
from ..util import log, term
|
||||||
|
|
||||||
def lrts_download(url, output_dir='.', merge=True, info_only=False, **kwargs):
|
def lrts_download(url, output_dir='.', merge=True, info_only=False, **kwargs):
|
||||||
html = get_html(url)
|
html = get_html(url)
|
||||||
|
args = kwargs.get('args')
|
||||||
|
if not args: args = {}
|
||||||
matched = re.search(r"/book/(\d+)", url)
|
matched = re.search(r"/book/(\d+)", url)
|
||||||
if not matched:
|
if not matched:
|
||||||
raise AssertionError("not found book number: %s" % url)
|
raise AssertionError("not found book number: %s" % url)
|
||||||
book_no = matched.group(1)
|
book_no = matched.group(1)
|
||||||
book_title = book_no
|
book_title = book_no
|
||||||
matched = re.search(r"<title>(.*)-(.*)</title>", html)
|
matched = re.search(r"<title>([^-]*)[-](.*)[,](.*)</title>", html)
|
||||||
if matched:
|
if matched:
|
||||||
book_title = matched.group(1)
|
book_title = matched.group(1)
|
||||||
|
|
||||||
@ -20,15 +23,25 @@ def lrts_download(url, output_dir='.', merge=True, info_only=False, **kwargs):
|
|||||||
if not matched:
|
if not matched:
|
||||||
raise AssertionError("not found total count in html")
|
raise AssertionError("not found total count in html")
|
||||||
total_count = int(matched.group(1))
|
total_count = int(matched.group(1))
|
||||||
logging.debug('total: %s' % total_count)
|
log.i('%s total: %s' % (book_title, total_count))
|
||||||
|
first_page = 0
|
||||||
|
if ('first' in args and args.first!= None):
|
||||||
|
first_page = int(args.first)
|
||||||
|
|
||||||
page_size = 10
|
page_size = 10
|
||||||
logging.debug('total page count: %s' % ((total_count // page_size) + 1))
|
if ('page_size' in args and args.page_size != None):
|
||||||
|
page_size = int(args.page_size)
|
||||||
|
last_page = (total_count // page_size) + 1
|
||||||
|
if ('last' in args and args.last != None):
|
||||||
|
last_page = int(args.last)
|
||||||
|
|
||||||
|
log.i('page size is %s, page from %s to %s' % (page_size, first_page, last_page))
|
||||||
headers = {
|
headers = {
|
||||||
'Referer': url
|
'Referer': url
|
||||||
}
|
}
|
||||||
items = []
|
items = []
|
||||||
if (total_count > page_size):
|
if (total_count > page_size):
|
||||||
for page in range((total_count // page_size) + 1):
|
for page in range(first_page, last_page):
|
||||||
page_url = 'http://www.lrts.me/ajax/book/%s/%s/%s' % (book_no, page, page_size)
|
page_url = 'http://www.lrts.me/ajax/book/%s/%s/%s' % (book_no, page, page_size)
|
||||||
response_content = json.loads(post_content(page_url, headers))
|
response_content = json.loads(post_content(page_url, headers))
|
||||||
if response_content['status'] != 'success':
|
if response_content['status'] != 'success':
|
||||||
@ -48,13 +61,14 @@ def lrts_download(url, output_dir='.', merge=True, info_only=False, **kwargs):
|
|||||||
for item in items:
|
for item in items:
|
||||||
i_url = 'http://www.lrts.me/ajax/path/4/%s/%s' % (item['fatherResId'], item['resId'])
|
i_url = 'http://www.lrts.me/ajax/path/4/%s/%s' % (item['fatherResId'], item['resId'])
|
||||||
response_content = json.loads(post_content(i_url, headers))
|
response_content = json.loads(post_content(i_url, headers))
|
||||||
# logging.debug(response_content)
|
logging.debug(response_content)
|
||||||
if response_content['status'] == 'success' and response_content['data']:
|
if response_content['status'] == 'success' and response_content['data']:
|
||||||
item['ok'] = True
|
item['ok'] = True
|
||||||
item['url'] = response_content['data']
|
item['url'] = response_content['data']
|
||||||
|
logging.debug('ok')
|
||||||
|
|
||||||
items = list(filter(lambda i: 'ok' in i and i['ok'], items))
|
items = list(filter(lambda i: 'ok' in i and i['ok'], items))
|
||||||
print('Downloading %s: %s count ...' % (book_title, len(items)))
|
log.i('Downloading %s: %s count ...' % (book_title, len(items)))
|
||||||
|
|
||||||
for item in items:
|
for item in items:
|
||||||
title = item['resName']
|
title = item['resName']
|
||||||
|
Loading…
Reference in New Issue
Block a user