[facebook] use hd_src whenever possible and remove duplicates

This commit is contained in:
Mort Yao 2016-07-01 16:42:04 +02:00
parent 7516028dd8
commit e84810c4d2
No known key found for this signature in database
GPG Key ID: 07DA00CB78203251

View File

@ -9,17 +9,22 @@ def facebook_download(url, output_dir='.', merge=True, info_only=False, **kwargs
html = get_html(url)
title = r1(r'<title id="pageTitle">(.+)</title>', html)
sd_urls = [
sd_urls = list(set([
unicodize(str.replace(i, '\\/', '/'))
for i in re.findall(r'"sd_src_no_ratelimit":"([^"]*)"', html)
]
]))
hd_urls = list(set([
unicodize(str.replace(i, '\\/', '/'))
for i in re.findall(r'"hd_src_no_ratelimit":"([^"]*)"', html)
]))
urls = hd_urls if hd_urls else sd_urls
type, ext, size = url_info(sd_urls[0], True)
size = urls_size(sd_urls)
type, ext, size = url_info(urls[0], True)
size = urls_size(urls)
print_info(site_info, title, type, size)
if not info_only:
download_urls(sd_urls, title, ext, size, output_dir, merge=False)
download_urls(urls, title, ext, size, output_dir, merge=False)
site_info = "Facebook.com"
download = facebook_download