Merge branch 'develop' of https://github.com/ljhcage/you-get into ljhcage-develop
Some checks failed
develop / build (3.10) (push) Has been cancelled
develop / build (3.11) (push) Has been cancelled
develop / build (3.12) (push) Has been cancelled
develop / build (3.13) (push) Has been cancelled
develop / build (3.7) (push) Has been cancelled
develop / build (3.8) (push) Has been cancelled
develop / build (3.9) (push) Has been cancelled
develop / build (pypy-3.10) (push) Has been cancelled
develop / build (pypy-3.8) (push) Has been cancelled
develop / build (pypy-3.9) (push) Has been cancelled

This commit is contained in:
Mort Yao 2024-12-11 00:33:07 +01:00
commit ce1f930c43
No known key found for this signature in database
GPG Key ID: 07DA00CB78203251
17 changed files with 4 additions and 26 deletions

View File

@ -43,7 +43,7 @@ class AcFun(VideoExtractor):
currentVideoInfo = json_data.get('currentVideoInfo')
else:
raise NotImplemented
raise NotImplementedError()
if 'ksPlayJson' in currentVideoInfo:
durationMillis = currentVideoInfo['durationMillis']
@ -193,7 +193,7 @@ class AcFun(VideoExtractor):
m3u8_url = getM3u8UrlFromCurrentVideoInfo(currentVideoInfo)
else:
raise NotImplemented
raise NotImplementedError()
assert title and m3u8_url
title = unescape_html(title)

View File

@ -185,7 +185,7 @@ def baidu_pan_download(url):
isprotected = False
sign, timestamp, bdstoken, appid, primary_id, fs_id, uk = baidu_pan_parse(
html)
if sign == None:
if sign is None:
if re.findall(r'\baccess-code\b', html):
isprotected = True
sign, timestamp, bdstoken, appid, primary_id, fs_id, uk, fake_headers, psk = baidu_pan_protected_share(
@ -193,7 +193,7 @@ def baidu_pan_download(url):
# raise NotImplementedError("Password required!")
if isprotected != True:
raise AssertionError("Share not found or canceled: %s" % url)
if bdstoken == None:
if bdstoken is None:
bdstoken = ""
if isprotected != True:
sign, timestamp, bdstoken, appid, primary_id, fs_id, uk = baidu_pan_parse(

View File

@ -3,7 +3,6 @@
__all__ = ['facebook_download']
from ..common import *
import json
def facebook_download(url, output_dir='.', merge=True, info_only=False, **kwargs):
url = re.sub(r'//.*?facebook.com','//facebook.com',url)

View File

@ -5,7 +5,6 @@ __all__ = ['fc2video_download']
from ..common import *
from hashlib import md5
from urllib.parse import urlparse
import re
#----------------------------------------------------------------------
def makeMimi(upid):

View File

@ -3,7 +3,6 @@
__all__ = ['giphy_download']
from ..common import *
import json
def giphy_download(url, output_dir='.', merge=True, info_only=False, **kwargs):
html = get_html(url)

View File

@ -10,7 +10,6 @@ from uuid import uuid4
from random import random,randint
import json
from math import floor
from zlib import decompress
import hashlib
import time

View File

@ -1,12 +1,7 @@
#!/usr/bin/env python
import base64
import binascii
from ..common import *
import random
import string
import ctypes
from json import loads
from urllib import request

View File

@ -2,10 +2,8 @@
import urllib.request
import urllib.parse
import json
import re
from ..util import log
from ..common import get_content, download_urls, print_info, playlist_not_supported, url_size
__all__ = ['kuaishou_download_by_url']

View File

@ -6,7 +6,6 @@ from ..common import *
from json import loads
from base64 import b64decode
import re
import hashlib
def kugou_download(url, output_dir=".", merge=True, info_only=False, **kwargs):

View File

@ -3,7 +3,6 @@
__all__ = ['pixnet_download']
from ..common import *
import urllib.error
from time import time
from urllib.parse import quote
from json import loads

View File

@ -5,9 +5,6 @@ __all__ = ['sohu_download']
from ..common import *
import json
import time
from random import random
from urllib.parse import urlparse
'''
Changelog:

View File

@ -5,7 +5,6 @@ __all__ = ['sndcd_download']
from ..common import *
import re
import json
import urllib.error
def get_sndcd_apikey():

View File

@ -27,7 +27,6 @@ def suntv_download(url, output_dir = '.', merge = True, info_only = False, **kwa
html = html.decode('gbk')
title = match1(html, '<title>([^<]+)').strip() #get rid of \r\n s
type_ = ''
size = 0
type, ext, size = url_info(video_url)

View File

@ -3,7 +3,6 @@
__all__ = ['ucas_download', 'ucas_download_single', 'ucas_download_playlist']
from ..common import *
import urllib.error
import http.client
from time import time
from random import random

View File

@ -3,7 +3,6 @@
__all__ = ['veoh_download']
from ..common import *
import urllib.error
def veoh_download(url, output_dir = '.', merge = False, info_only = False, **kwargs):
'''Get item_id'''

View File

@ -4,7 +4,6 @@ __all__ = ['yizhibo_download']
from ..common import *
import json
import time
def yizhibo_download(url, output_dir = '.', merge = True, info_only = False, **kwargs):
video_id = url[url.rfind('/')+1:].split(".")[0]

View File

@ -5,7 +5,6 @@ from ..common import *
from ..extractor import VideoExtractor
import time
import traceback
import json
import urllib.request
import urllib.parse