2012-12-22 20:46:22 +04:00
|
|
|
#!/usr/bin/env python
|
|
|
|
# -*- coding: utf-8 -*-
|
|
|
|
|
|
|
|
__all__ = ['xiami_download']
|
|
|
|
|
|
|
|
from ..common import *
|
|
|
|
|
|
|
|
from xml.dom.minidom import parseString
|
|
|
|
from urllib import parse
|
|
|
|
|
|
|
|
def location_dec(str):
|
|
|
|
head = int(str[0])
|
|
|
|
str = str[1:]
|
|
|
|
rows = head
|
|
|
|
cols = int(len(str)/rows) + 1
|
2012-12-27 07:00:08 +04:00
|
|
|
|
2012-12-22 20:46:22 +04:00
|
|
|
out = ""
|
|
|
|
full_row = len(str) % head
|
|
|
|
for c in range(cols):
|
|
|
|
for r in range(rows):
|
|
|
|
if c == (cols - 1) and r >= full_row:
|
|
|
|
continue
|
|
|
|
if r < full_row:
|
|
|
|
char = str[r*cols+c]
|
|
|
|
else:
|
|
|
|
char = str[cols*full_row+(r-full_row)*(cols-1)+c]
|
|
|
|
out += char
|
|
|
|
return parse.unquote(out).replace("^", "0")
|
|
|
|
|
2013-03-26 16:53:39 +04:00
|
|
|
def xiami_download_lyric(lrc_url, file_name, output_dir):
|
|
|
|
lrc = get_html(lrc_url, faker = True)
|
|
|
|
if len(lrc) > 0:
|
2013-04-16 13:00:45 +04:00
|
|
|
with open(output_dir + "/" + file_name.replace('/', '-') + '.lrc', 'w', encoding='utf-8') as x:
|
2013-03-26 16:53:39 +04:00
|
|
|
x.write(lrc)
|
|
|
|
|
2013-04-29 05:53:13 +04:00
|
|
|
def xiami_download_pic(pic_url, file_name, output_dir):
|
|
|
|
pic_url = pic_url.replace('_1', '')
|
|
|
|
pos = pic_url.rfind('.')
|
|
|
|
ext = pic_url[pos:]
|
|
|
|
pic = get_response(pic_url, faker = True).data
|
|
|
|
if len(pic) > 0:
|
|
|
|
with open(output_dir + "/" + file_name.replace('/', '-') + ext, 'wb') as x:
|
|
|
|
x.write(pic)
|
|
|
|
|
2012-12-22 20:46:22 +04:00
|
|
|
def xiami_download_song(sid, output_dir = '.', merge = True, info_only = False):
|
2013-02-17 10:03:33 +04:00
|
|
|
xml = get_html('http://www.xiami.com/song/playlist/id/%s/object_name/default/object_id/0' % sid, faker = True)
|
2012-12-22 20:46:22 +04:00
|
|
|
doc = parseString(xml)
|
|
|
|
i = doc.getElementsByTagName("track")[0]
|
|
|
|
artist = i.getElementsByTagName("artist")[0].firstChild.nodeValue
|
|
|
|
album_name = i.getElementsByTagName("album_name")[0].firstChild.nodeValue
|
|
|
|
song_title = i.getElementsByTagName("title")[0].firstChild.nodeValue
|
|
|
|
url = location_dec(i.getElementsByTagName("location")[0].firstChild.nodeValue)
|
2013-03-26 16:53:39 +04:00
|
|
|
lrc_url = i.getElementsByTagName("lyric")[0].firstChild.nodeValue
|
2012-12-27 07:00:08 +04:00
|
|
|
type, ext, size = url_info(url, faker = True)
|
2013-04-12 22:15:18 +04:00
|
|
|
if not ext:
|
|
|
|
ext = 'mp3'
|
2012-12-27 07:00:08 +04:00
|
|
|
|
|
|
|
print_info(site_info, song_title, type, size)
|
|
|
|
if not info_only:
|
2013-03-26 16:53:39 +04:00
|
|
|
file_name = "%s - %s - %s" % (song_title, album_name, artist)
|
|
|
|
download_urls([url], file_name, ext, size, output_dir, merge = merge, faker = True)
|
|
|
|
xiami_download_lyric(lrc_url, file_name, output_dir)
|
|
|
|
|
|
|
|
def xiami_download_showcollect(cid, output_dir = '.', merge = True, info_only = False):
|
|
|
|
html = get_html('http://www.xiami.com/song/showcollect/id/' + cid, faker = True)
|
|
|
|
collect_name = r1(r'<title>(.*)</title>', html)
|
2012-12-22 20:46:22 +04:00
|
|
|
|
2013-03-26 16:53:39 +04:00
|
|
|
xml = get_html('http://www.xiami.com/song/playlist/id/%s/type/3' % cid, faker = True)
|
2012-12-22 20:46:22 +04:00
|
|
|
doc = parseString(xml)
|
2013-03-26 16:53:39 +04:00
|
|
|
output_dir = output_dir + "/" + "[" + collect_name + "]"
|
2012-12-22 20:46:22 +04:00
|
|
|
tracks = doc.getElementsByTagName("track")
|
|
|
|
track_nr = 1
|
|
|
|
for i in tracks:
|
|
|
|
artist = i.getElementsByTagName("artist")[0].firstChild.nodeValue
|
|
|
|
album_name = i.getElementsByTagName("album_name")[0].firstChild.nodeValue
|
|
|
|
song_title = i.getElementsByTagName("title")[0].firstChild.nodeValue
|
|
|
|
url = location_dec(i.getElementsByTagName("location")[0].firstChild.nodeValue)
|
2013-03-26 16:53:39 +04:00
|
|
|
lrc_url = i.getElementsByTagName("lyric")[0].firstChild.nodeValue
|
2012-12-27 07:00:08 +04:00
|
|
|
type, ext, size = url_info(url, faker = True)
|
2013-04-12 22:15:18 +04:00
|
|
|
if not ext:
|
|
|
|
ext = 'mp3'
|
2012-12-27 07:00:08 +04:00
|
|
|
|
|
|
|
print_info(site_info, song_title, type, size)
|
|
|
|
if not info_only:
|
2013-03-26 16:53:39 +04:00
|
|
|
file_name = "%02d.%s - %s - %s" % (track_nr, song_title, artist, album_name)
|
|
|
|
download_urls([url], file_name, ext, size, output_dir, merge = merge, faker = True)
|
|
|
|
xiami_download_lyric(lrc_url, file_name, output_dir)
|
2012-12-27 07:00:08 +04:00
|
|
|
|
2012-12-22 20:46:22 +04:00
|
|
|
track_nr += 1
|
|
|
|
|
2013-03-26 16:53:39 +04:00
|
|
|
def xiami_download_album(aid, output_dir = '.', merge = True, info_only = False):
|
|
|
|
xml = get_html('http://www.xiami.com/song/playlist/id/%s/type/1' % aid, faker = True)
|
2012-12-22 20:46:22 +04:00
|
|
|
album_name = r1(r'<album_name><!\[CDATA\[(.*)\]\]>', xml)
|
2013-03-26 16:53:39 +04:00
|
|
|
artist = r1(r'<artist><!\[CDATA\[(.*)\]\]>', xml)
|
2012-12-22 20:46:22 +04:00
|
|
|
doc = parseString(xml)
|
2013-03-26 16:53:39 +04:00
|
|
|
output_dir = output_dir + "/%s - %s" % (artist, album_name)
|
2012-12-22 20:46:22 +04:00
|
|
|
tracks = doc.getElementsByTagName("track")
|
|
|
|
track_nr = 1
|
2013-04-29 05:53:13 +04:00
|
|
|
pic_exist = False
|
2012-12-22 20:46:22 +04:00
|
|
|
for i in tracks:
|
|
|
|
song_title = i.getElementsByTagName("title")[0].firstChild.nodeValue
|
|
|
|
url = location_dec(i.getElementsByTagName("location")[0].firstChild.nodeValue)
|
2013-03-26 16:53:39 +04:00
|
|
|
lrc_url = i.getElementsByTagName("lyric")[0].firstChild.nodeValue
|
2013-04-29 05:53:13 +04:00
|
|
|
if not pic_exist:
|
|
|
|
pic_url = i.getElementsByTagName("pic")[0].firstChild.nodeValue
|
2012-12-27 07:00:08 +04:00
|
|
|
type, ext, size = url_info(url, faker = True)
|
2013-04-12 22:15:18 +04:00
|
|
|
if not ext:
|
|
|
|
ext = 'mp3'
|
|
|
|
|
2012-12-27 07:00:08 +04:00
|
|
|
print_info(site_info, song_title, type, size)
|
|
|
|
if not info_only:
|
2013-03-26 16:53:39 +04:00
|
|
|
file_name = "%02d.%s" % (track_nr, song_title)
|
|
|
|
download_urls([url], file_name, ext, size, output_dir, merge = merge, faker = True)
|
|
|
|
xiami_download_lyric(lrc_url, file_name, output_dir)
|
2013-04-29 05:53:13 +04:00
|
|
|
if not pic_exist:
|
|
|
|
xiami_download_pic(pic_url, 'cover', output_dir)
|
|
|
|
pic_exist = True
|
2012-12-27 07:00:08 +04:00
|
|
|
|
2012-12-22 20:46:22 +04:00
|
|
|
track_nr += 1
|
|
|
|
|
|
|
|
def xiami_download(url, output_dir = '.', stream_type = None, merge = True, info_only = False):
|
|
|
|
if re.match(r'http://www.xiami.com/album/\d+', url):
|
|
|
|
id = r1(r'http://www.xiami.com/album/(\d+)', url)
|
2012-12-27 07:00:08 +04:00
|
|
|
xiami_download_album(id, output_dir, merge, info_only)
|
|
|
|
|
2012-12-22 20:46:22 +04:00
|
|
|
if re.match(r'http://www.xiami.com/song/showcollect/id/\d+', url):
|
|
|
|
id = r1(r'http://www.xiami.com/song/showcollect/id/(\d+)', url)
|
2012-12-27 07:00:08 +04:00
|
|
|
xiami_download_showcollect(id, output_dir, merge, info_only)
|
|
|
|
|
2012-12-22 20:46:22 +04:00
|
|
|
if re.match('http://www.xiami.com/song/\d+', url):
|
|
|
|
id = r1(r'http://www.xiami.com/song/(\d+)', url)
|
2012-12-27 07:00:08 +04:00
|
|
|
xiami_download_song(id, output_dir, merge, info_only)
|
2012-12-22 20:46:22 +04:00
|
|
|
|
|
|
|
site_info = "Xiami.com"
|
|
|
|
download = xiami_download
|
|
|
|
download_playlist = playlist_not_supported("xiami")
|