2013-01-27 02:50:38 +04:00
|
|
|
#!/usr/bin/env python
|
|
|
|
|
|
|
|
__all__ = ['facebook_download']
|
|
|
|
|
|
|
|
from ..common import *
|
2015-04-12 16:55:36 +03:00
|
|
|
import json
|
2013-01-27 02:50:38 +04:00
|
|
|
|
2015-09-26 08:45:39 +03:00
|
|
|
def facebook_download(url, output_dir='.', merge=True, info_only=False, **kwargs):
|
2013-01-27 02:50:38 +04:00
|
|
|
html = get_html(url)
|
2015-04-12 16:55:36 +03:00
|
|
|
|
2016-06-23 23:13:15 +03:00
|
|
|
title = r1(r'<title id="pageTitle">(.+)</title>', html)
|
2016-07-01 17:42:04 +03:00
|
|
|
sd_urls = list(set([
|
2016-06-23 23:13:15 +03:00
|
|
|
unicodize(str.replace(i, '\\/', '/'))
|
2017-01-17 01:29:21 +03:00
|
|
|
for i in re.findall(r'sd_src_no_ratelimit:"([^"]*)"', html)
|
2016-07-01 17:42:04 +03:00
|
|
|
]))
|
|
|
|
hd_urls = list(set([
|
|
|
|
unicodize(str.replace(i, '\\/', '/'))
|
2017-01-17 01:29:21 +03:00
|
|
|
for i in re.findall(r'hd_src_no_ratelimit:"([^"]*)"', html)
|
2016-07-01 17:42:04 +03:00
|
|
|
]))
|
|
|
|
urls = hd_urls if hd_urls else sd_urls
|
2015-04-12 16:55:36 +03:00
|
|
|
|
2016-07-01 17:42:04 +03:00
|
|
|
type, ext, size = url_info(urls[0], True)
|
|
|
|
size = urls_size(urls)
|
2015-04-12 16:55:36 +03:00
|
|
|
|
2013-01-27 02:50:38 +04:00
|
|
|
print_info(site_info, title, type, size)
|
|
|
|
if not info_only:
|
2016-07-01 17:42:04 +03:00
|
|
|
download_urls(urls, title, ext, size, output_dir, merge=False)
|
2013-01-27 02:50:38 +04:00
|
|
|
|
|
|
|
site_info = "Facebook.com"
|
|
|
|
download = facebook_download
|
|
|
|
download_playlist = playlist_not_supported('facebook')
|