Merge branch 'develop' of https://github.com/ljhcage/you-get into ljhcage-develop
Some checks failed
develop / build (3.10) (push) Has been cancelled
develop / build (3.11) (push) Has been cancelled
develop / build (3.12) (push) Has been cancelled
develop / build (3.13) (push) Has been cancelled
develop / build (3.7) (push) Has been cancelled
develop / build (3.8) (push) Has been cancelled
develop / build (3.9) (push) Has been cancelled
develop / build (pypy-3.10) (push) Has been cancelled
develop / build (pypy-3.8) (push) Has been cancelled
develop / build (pypy-3.9) (push) Has been cancelled

This commit is contained in:
Mort Yao 2024-12-11 00:33:07 +01:00
commit ce1f930c43
No known key found for this signature in database
GPG Key ID: 07DA00CB78203251
17 changed files with 4 additions and 26 deletions

View File

@ -43,7 +43,7 @@ class AcFun(VideoExtractor):
currentVideoInfo = json_data.get('currentVideoInfo') currentVideoInfo = json_data.get('currentVideoInfo')
else: else:
raise NotImplemented raise NotImplementedError()
if 'ksPlayJson' in currentVideoInfo: if 'ksPlayJson' in currentVideoInfo:
durationMillis = currentVideoInfo['durationMillis'] durationMillis = currentVideoInfo['durationMillis']
@ -193,7 +193,7 @@ class AcFun(VideoExtractor):
m3u8_url = getM3u8UrlFromCurrentVideoInfo(currentVideoInfo) m3u8_url = getM3u8UrlFromCurrentVideoInfo(currentVideoInfo)
else: else:
raise NotImplemented raise NotImplementedError()
assert title and m3u8_url assert title and m3u8_url
title = unescape_html(title) title = unescape_html(title)

View File

@ -185,7 +185,7 @@ def baidu_pan_download(url):
isprotected = False isprotected = False
sign, timestamp, bdstoken, appid, primary_id, fs_id, uk = baidu_pan_parse( sign, timestamp, bdstoken, appid, primary_id, fs_id, uk = baidu_pan_parse(
html) html)
if sign == None: if sign is None:
if re.findall(r'\baccess-code\b', html): if re.findall(r'\baccess-code\b', html):
isprotected = True isprotected = True
sign, timestamp, bdstoken, appid, primary_id, fs_id, uk, fake_headers, psk = baidu_pan_protected_share( sign, timestamp, bdstoken, appid, primary_id, fs_id, uk, fake_headers, psk = baidu_pan_protected_share(
@ -193,7 +193,7 @@ def baidu_pan_download(url):
# raise NotImplementedError("Password required!") # raise NotImplementedError("Password required!")
if isprotected != True: if isprotected != True:
raise AssertionError("Share not found or canceled: %s" % url) raise AssertionError("Share not found or canceled: %s" % url)
if bdstoken == None: if bdstoken is None:
bdstoken = "" bdstoken = ""
if isprotected != True: if isprotected != True:
sign, timestamp, bdstoken, appid, primary_id, fs_id, uk = baidu_pan_parse( sign, timestamp, bdstoken, appid, primary_id, fs_id, uk = baidu_pan_parse(

View File

@ -3,7 +3,6 @@
__all__ = ['facebook_download'] __all__ = ['facebook_download']
from ..common import * from ..common import *
import json
def facebook_download(url, output_dir='.', merge=True, info_only=False, **kwargs): def facebook_download(url, output_dir='.', merge=True, info_only=False, **kwargs):
url = re.sub(r'//.*?facebook.com','//facebook.com',url) url = re.sub(r'//.*?facebook.com','//facebook.com',url)

View File

@ -5,7 +5,6 @@ __all__ = ['fc2video_download']
from ..common import * from ..common import *
from hashlib import md5 from hashlib import md5
from urllib.parse import urlparse from urllib.parse import urlparse
import re
#---------------------------------------------------------------------- #----------------------------------------------------------------------
def makeMimi(upid): def makeMimi(upid):

View File

@ -3,7 +3,6 @@
__all__ = ['giphy_download'] __all__ = ['giphy_download']
from ..common import * from ..common import *
import json
def giphy_download(url, output_dir='.', merge=True, info_only=False, **kwargs): def giphy_download(url, output_dir='.', merge=True, info_only=False, **kwargs):
html = get_html(url) html = get_html(url)

View File

@ -10,7 +10,6 @@ from uuid import uuid4
from random import random,randint from random import random,randint
import json import json
from math import floor from math import floor
from zlib import decompress
import hashlib import hashlib
import time import time

View File

@ -1,12 +1,7 @@
#!/usr/bin/env python #!/usr/bin/env python
import base64 import base64
import binascii
from ..common import * from ..common import *
import random
import string
import ctypes
from json import loads from json import loads
from urllib import request from urllib import request

View File

@ -2,10 +2,8 @@
import urllib.request import urllib.request
import urllib.parse import urllib.parse
import json
import re import re
from ..util import log
from ..common import get_content, download_urls, print_info, playlist_not_supported, url_size from ..common import get_content, download_urls, print_info, playlist_not_supported, url_size
__all__ = ['kuaishou_download_by_url'] __all__ = ['kuaishou_download_by_url']

View File

@ -6,7 +6,6 @@ from ..common import *
from json import loads from json import loads
from base64 import b64decode from base64 import b64decode
import re import re
import hashlib
def kugou_download(url, output_dir=".", merge=True, info_only=False, **kwargs): def kugou_download(url, output_dir=".", merge=True, info_only=False, **kwargs):

View File

@ -3,7 +3,6 @@
__all__ = ['pixnet_download'] __all__ = ['pixnet_download']
from ..common import * from ..common import *
import urllib.error
from time import time from time import time
from urllib.parse import quote from urllib.parse import quote
from json import loads from json import loads

View File

@ -5,9 +5,6 @@ __all__ = ['sohu_download']
from ..common import * from ..common import *
import json import json
import time
from random import random
from urllib.parse import urlparse
''' '''
Changelog: Changelog:

View File

@ -5,7 +5,6 @@ __all__ = ['sndcd_download']
from ..common import * from ..common import *
import re import re
import json import json
import urllib.error
def get_sndcd_apikey(): def get_sndcd_apikey():

View File

@ -27,7 +27,6 @@ def suntv_download(url, output_dir = '.', merge = True, info_only = False, **kwa
html = html.decode('gbk') html = html.decode('gbk')
title = match1(html, '<title>([^<]+)').strip() #get rid of \r\n s title = match1(html, '<title>([^<]+)').strip() #get rid of \r\n s
type_ = ''
size = 0 size = 0
type, ext, size = url_info(video_url) type, ext, size = url_info(video_url)

View File

@ -3,7 +3,6 @@
__all__ = ['ucas_download', 'ucas_download_single', 'ucas_download_playlist'] __all__ = ['ucas_download', 'ucas_download_single', 'ucas_download_playlist']
from ..common import * from ..common import *
import urllib.error
import http.client import http.client
from time import time from time import time
from random import random from random import random

View File

@ -3,7 +3,6 @@
__all__ = ['veoh_download'] __all__ = ['veoh_download']
from ..common import * from ..common import *
import urllib.error
def veoh_download(url, output_dir = '.', merge = False, info_only = False, **kwargs): def veoh_download(url, output_dir = '.', merge = False, info_only = False, **kwargs):
'''Get item_id''' '''Get item_id'''

View File

@ -4,7 +4,6 @@ __all__ = ['yizhibo_download']
from ..common import * from ..common import *
import json import json
import time
def yizhibo_download(url, output_dir = '.', merge = True, info_only = False, **kwargs): def yizhibo_download(url, output_dir = '.', merge = True, info_only = False, **kwargs):
video_id = url[url.rfind('/')+1:].split(".")[0] video_id = url[url.rfind('/')+1:].split(".")[0]

View File

@ -5,7 +5,6 @@ from ..common import *
from ..extractor import VideoExtractor from ..extractor import VideoExtractor
import time import time
import traceback
import json import json
import urllib.request import urllib.request
import urllib.parse import urllib.parse