2012-09-25 02:27:28 +04:00
|
|
|
#!/usr/bin/env python
|
|
|
|
|
|
|
|
__all__ = ['dailymotion_download']
|
|
|
|
|
|
|
|
from ..common import *
|
|
|
|
|
|
|
|
def dailymotion_download(url, output_dir = '.', merge = True, info_only = False):
|
2013-07-18 20:14:05 +04:00
|
|
|
"""Downloads Dailymotion videos by URL.
|
|
|
|
"""
|
2014-02-10 05:16:03 +04:00
|
|
|
|
2015-08-28 19:23:29 +03:00
|
|
|
html = get_content(url)
|
|
|
|
info = json.loads(match1(html, r'qualities":({.+?}),"'))
|
|
|
|
title = match1(html, r'"title"\s*:\s*"(.+?)",')
|
2014-02-10 05:16:03 +04:00
|
|
|
|
2015-08-28 19:23:29 +03:00
|
|
|
for quality in ['720','480','380','240','auto']:
|
|
|
|
real_url = info[quality][0]["url"]
|
2013-01-19 06:23:11 +04:00
|
|
|
if real_url:
|
|
|
|
break
|
2014-02-10 05:16:03 +04:00
|
|
|
|
2012-09-25 02:27:28 +04:00
|
|
|
type, ext, size = url_info(real_url)
|
2014-02-10 05:16:03 +04:00
|
|
|
|
2012-09-25 02:27:28 +04:00
|
|
|
print_info(site_info, title, type, size)
|
|
|
|
if not info_only:
|
|
|
|
download_urls([real_url], title, ext, size, output_dir, merge = merge)
|
|
|
|
|
|
|
|
site_info = "Dailymotion.com"
|
|
|
|
download = dailymotion_download
|
|
|
|
download_playlist = playlist_not_supported('dailymotion')
|