mirror of
https://github.com/soimort/you-get.git
synced 2025-02-02 16:24:00 +03:00
Add metacafe support
This commit is contained in:
parent
25c769df2e
commit
f0826dd1e9
@ -43,7 +43,7 @@ Fork me on GitHub: <https://github.com/soimort/you-get>
|
||||
* DouyuTV (斗鱼) <http://www.douyutv.com>
|
||||
* eHow <http://www.ehow.com>
|
||||
* Facebook <http://facebook.com>
|
||||
* Fun.tv (风行, Funshion) <http://www.fun.tv/>
|
||||
* Fun.tv (风行, Funshion) <http://www.fun.tv>
|
||||
* Google Drive <http://docs.google.com>
|
||||
* ifeng (凤凰视频) <http://v.ifeng.com>
|
||||
* iQIYI (爱奇艺) <http://www.iqiyi.com>
|
||||
@ -54,15 +54,16 @@ Fork me on GitHub: <https://github.com/soimort/you-get>
|
||||
* Kuwo (酷我音乐) <http://www.kuwo.cn>
|
||||
* LeTV (乐视网) <http://www.letv.com>
|
||||
* Lizhi.fm (荔枝FM) <http://www.lizhi.fm>
|
||||
* Metacafe <http://www.metacafe.com>
|
||||
* MioMio <http://www.miomio.tv>
|
||||
* MTV 81 <http://www.mtv81.com>
|
||||
* NetEase (网易视频) <http://v.163.com>
|
||||
* NetEase Music (网易云音乐) <http://music.163.com>
|
||||
* PPTV <http://www.pptv.com>
|
||||
* QianMo (阡陌视频) <http://qianmo.com/>
|
||||
* QianMo (阡陌视频) <http://qianmo.com>
|
||||
* QQ (腾讯视频) <http://v.qq.com>
|
||||
* Sina (新浪视频) <http://video.sina.com.cn>
|
||||
* Weibo Miaopai (新浪微博秒拍视频) <http://video.weibo.com/>
|
||||
* Weibo Miaopai (新浪微博秒拍视频) <http://video.weibo.com>
|
||||
* Sohu (搜狐视频) <http://tv.sohu.com>
|
||||
* SongTaste <http://www.songtaste.com>
|
||||
* SoundCloud <http://soundcloud.com>
|
||||
|
@ -922,7 +922,7 @@ def script_main(script_name, download, download_playlist = None):
|
||||
sys.exit(1)
|
||||
|
||||
def url_to_module(url):
|
||||
from .extractors import netease, w56, acfun, baidu, baomihua, bilibili, blip, catfun, cntv, cbs, coursera, dailymotion, dongting, douban, douyutv, ehow, facebook, freesound, funshion, google, sina, ifeng, alive, instagram, iqiyi, joy, jpopsuki, khan, ku6, kugou, kuwo, letv, lizhi, magisto, miaopai, miomio, mixcloud, mtv81, nicovideo, pptv, qianmo, qq, sohu, songtaste, soundcloud, ted, theplatform, tudou, tucao, tumblr, twitter, vid48, videobam, vidto, vimeo, vine, vk, xiami, yinyuetai, youku, youtube, zhanqi
|
||||
from .extractors import netease, w56, acfun, baidu, baomihua, bilibili, blip, catfun, cntv, cbs, coursera, dailymotion, dongting, douban, douyutv, ehow, facebook, freesound, funshion, google, sina, ifeng, alive, instagram, iqiyi, joy, jpopsuki, khan, ku6, kugou, kuwo, letv, lizhi, magisto, metacafe, miaopai, miomio, mixcloud, mtv81, nicovideo, pptv, qianmo, qq, sohu, songtaste, soundcloud, ted, theplatform, tudou, tucao, tumblr, twitter, vid48, videobam, vidto, vimeo, vine, vk, xiami, yinyuetai, youku, youtube, zhanqi
|
||||
|
||||
video_host = r1(r'https?://([^/]+)/', url)
|
||||
video_url = r1(r'https?://[^/]+(.*)', url)
|
||||
@ -970,6 +970,7 @@ def url_to_module(url):
|
||||
'letv': letv,
|
||||
'lizhi':lizhi,
|
||||
'magisto': magisto,
|
||||
'metacafe': metacafe,
|
||||
'miomio': miomio,
|
||||
'mixcloud': mixcloud,
|
||||
'mtv81': mtv81,
|
||||
|
@ -28,6 +28,7 @@ from .kuwo import *
|
||||
from .letv import *
|
||||
from .lizhi import *
|
||||
from .magisto import *
|
||||
from .metacafe import *
|
||||
from .miaopai import *
|
||||
from .miomio import *
|
||||
from .mixcloud import *
|
||||
|
27
src/you_get/extractors/metacafe.py
Normal file
27
src/you_get/extractors/metacafe.py
Normal file
@ -0,0 +1,27 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
__all__ = ['metacafe_download']
|
||||
|
||||
from ..common import *
|
||||
import urllib.error
|
||||
from urllib.parse import unquote
|
||||
|
||||
def metacafe_download(url, output_dir = '.', merge = True, info_only = False):
|
||||
if re.match(r'http://www.metacafe.com/watch/\w+', url):
|
||||
html =get_content(url)
|
||||
title = r1(r'<meta property="og:title" content="([^"]*)"', html)
|
||||
|
||||
for i in html.split('&'): #wont bother to use re
|
||||
if 'videoURL' in i:
|
||||
url_raw = i[9:]
|
||||
|
||||
url = unquote(url_raw)
|
||||
|
||||
type, ext, size = url_info(url)
|
||||
print_info(site_info, title, type, size)
|
||||
if not info_only:
|
||||
download_urls([url], title, ext, size, output_dir, merge=merge)
|
||||
|
||||
site_info = "metacafe"
|
||||
download = metacafe_download
|
||||
download_playlist = playlist_not_supported('metacafe')
|
Loading…
Reference in New Issue
Block a user