2012-09-25 02:27:28 +04:00
|
|
|
#!/usr/bin/env python
|
|
|
|
|
|
|
|
__all__ = ['dailymotion_download']
|
|
|
|
|
|
|
|
from ..common import *
|
|
|
|
|
2015-09-26 08:45:39 +03:00
|
|
|
def dailymotion_download(url, output_dir = '.', merge = True, info_only = False, **kwargs):
|
2013-07-18 20:14:05 +04:00
|
|
|
"""Downloads Dailymotion videos by URL.
|
|
|
|
"""
|
2014-02-10 05:16:03 +04:00
|
|
|
|
2015-08-28 19:23:29 +03:00
|
|
|
html = get_content(url)
|
|
|
|
info = json.loads(match1(html, r'qualities":({.+?}),"'))
|
2015-09-02 13:45:28 +03:00
|
|
|
title = match1(html, r'"video_title"\s*:\s*"(.+?)",')
|
2014-02-10 05:16:03 +04:00
|
|
|
|
2015-08-28 19:23:29 +03:00
|
|
|
for quality in ['720','480','380','240','auto']:
|
2015-09-02 08:56:02 +03:00
|
|
|
try:
|
|
|
|
real_url = info[quality][0]["url"]
|
|
|
|
if real_url:
|
|
|
|
break
|
|
|
|
except KeyError:
|
|
|
|
pass
|
2014-02-10 05:16:03 +04:00
|
|
|
|
2012-09-25 02:27:28 +04:00
|
|
|
type, ext, size = url_info(real_url)
|
2014-02-10 05:16:03 +04:00
|
|
|
|
2012-09-25 02:27:28 +04:00
|
|
|
print_info(site_info, title, type, size)
|
|
|
|
if not info_only:
|
|
|
|
download_urls([real_url], title, ext, size, output_dir, merge = merge)
|
|
|
|
|
|
|
|
site_info = "Dailymotion.com"
|
|
|
|
download = dailymotion_download
|
|
|
|
download_playlist = playlist_not_supported('dailymotion')
|