fix sending all cookies without checking

This commit is contained in:
root 2022-11-24 03:49:48 +08:00
parent e674bfbc2b
commit 6e866121a1

View File

@ -22,6 +22,7 @@ from .util.strings import get_filename, unescape_html
from . import json_output as json_output_
sys.stdout = io.TextIOWrapper(sys.stdout.buffer, encoding='utf8')
# fmt: off
SITES = {
'163' : 'netease',
'56' : 'w56',
@ -125,6 +126,7 @@ SITES = {
'zhibo' : 'zhibo',
'zhihu' : 'zhihu',
}
# fmt: on
dry_run = False
json_output = False
@ -459,17 +461,7 @@ def get_content(url, headers={}, decoded=True):
req = request.Request(url, headers=headers)
if cookies:
# NOTE: Do not use cookies.add_cookie_header(req)
# #HttpOnly_ cookies were not supported by CookieJar and MozillaCookieJar properly until python 3.10
# See also:
# - https://github.com/python/cpython/pull/17471
# - https://bugs.python.org/issue2190
# Here we add cookies to the request headers manually
cookie_strings = []
for cookie in list(cookies):
cookie_strings.append(cookie.name + '=' + cookie.value)
cookie_headers = {'Cookie': '; '.join(cookie_strings)}
req.headers.update(cookie_headers)
cookies.add_cookie_header(req)
response = urlopen_with_retry(req)
data = response.read()
@ -512,17 +504,7 @@ def post_content(url, headers={}, post_data={}, decoded=True, **kwargs):
req = request.Request(url, headers=headers)
if cookies:
# NOTE: Do not use cookies.add_cookie_header(req)
# #HttpOnly_ cookies were not supported by CookieJar and MozillaCookieJar properly until python 3.10
# See also:
# - https://github.com/python/cpython/pull/17471
# - https://bugs.python.org/issue2190
# Here we add cookies to the request headers manually
cookie_strings = []
for cookie in list(cookies):
cookie_strings.append(cookie.name + '=' + cookie.value)
cookie_headers = {'Cookie': '; '.join(cookie_strings)}
req.headers.update(cookie_headers)
cookies.add_cookie_header(req)
if kwargs.get('post_data_raw'):
post_data_enc = bytes(kwargs['post_data_raw'], 'utf-8')
else:
@ -972,11 +954,13 @@ def get_output_filename(urls, title, ext, output_dir, merge, **kwargs):
result = '%s.%s' % (result, merged_ext)
return result.replace("'", "_")
def print_user_agent(faker=False):
urllib_default_user_agent = 'Python-urllib/%d.%d' % sys.version_info[:2]
user_agent = fake_headers['User-Agent'] if faker else urllib_default_user_agent
print('User Agent: %s' % user_agent)
def download_urls(
urls, title, ext, total_size, output_dir='.', refer=None, merge=True,
faker=False, headers={}, **kwargs
@ -1016,7 +1000,7 @@ def download_urls(
if total_size:
if not force and os.path.exists(output_filepath) and not auto_rename \
and (os.path.getsize(output_filepath) >= total_size * 0.9\
and (os.path.getsize(output_filepath) >= total_size * 0.9
or skip_existing_file_size_check):
if skip_existing_file_size_check:
log.w('Skipping %s without checking size: file already exists' % output_filepath)
@ -1381,6 +1365,7 @@ def download_main(download, download_playlist, urls, playlist, **kwargs):
def load_cookies(cookiefile):
global cookies
HTTPONLY_PREFIX, HTTPONLY_ATTR = "#HttpOnly_", "HTTPOnly"
if cookiefile.endswith('.txt'):
# MozillaCookieJar treats prefix '#HttpOnly_' as comments incorrectly!
# do not use its load()
@ -1396,13 +1381,19 @@ def load_cookies(cookiefile):
ignore_discard, ignore_expires = False, False
with open(cookiefile, 'r', encoding='utf-8') as f:
for line in f:
rest = {}
if line.startswith(HTTPONLY_PREFIX):
rest[HTTPONLY_ATTR] = ""
line = line[len(HTTPONLY_PREFIX):]
# last field may be absent, so keep any trailing tab
if line.endswith("\n"): line = line[:-1]
if line.endswith("\n"):
line = line[:-1]
# skip comments and blank lines XXX what is $ for?
if (line.strip().startswith(("#", "$")) or
line.strip() == ""):
if not line.strip().startswith('#HttpOnly_'): # skip for #HttpOnly_
continue
domain, domain_specified, path, secure, expires, name, value = \
@ -1417,7 +1408,6 @@ def load_cookies(cookiefile):
value = None
initial_dot = domain.startswith(".")
if not line.strip().startswith('#HttpOnly_'): # skip for #HttpOnly_
assert domain_specified == initial_dot
discard = False
@ -1435,7 +1425,7 @@ def load_cookies(cookiefile):
discard,
None,
None,
{})
rest)
if not ignore_discard and c.discard:
continue
if not ignore_expires and c.is_expired(now):
@ -1443,24 +1433,25 @@ def load_cookies(cookiefile):
cookies.set_cookie(c)
elif cookiefile.endswith(('.sqlite', '.sqlite3')):
import sqlite3, shutil, tempfile
temp_dir = tempfile.gettempdir()
temp_cookiefile = os.path.join(temp_dir, 'temp_cookiefile.sqlite')
shutil.copy2(cookiefile, temp_cookiefile)
import sqlite3
cookies = cookiejar.MozillaCookieJar()
con = sqlite3.connect(temp_cookiefile)
con = sqlite3.connect(cookiefile)
cur = con.cursor()
cur.execute("""SELECT host, path, isSecure, expiry, name, value
cur.execute("""SELECT host, path, isSecure, expiry, name, value, isHttpOnly
FROM moz_cookies""")
for item in cur.fetchall():
for item in cur:
rest = {}
if item[6] == 1:
rest[HTTPONLY_ATTR] = ""
c = cookiejar.Cookie(
0, item[4], item[5], None, False, item[0],
item[0].startswith('.'), item[0].startswith('.'),
item[1], False, item[2], item[3], item[3] == '', None,
None, {},
None, rest,
)
cookies.set_cookie(c)
cur.close()
con.close()
else:
log.e('[error] unsupported cookies format')
@ -1656,8 +1647,7 @@ def script_main(download, download_playlist, **kwargs):
download_grp.add_argument('--itag', help=argparse.SUPPRESS)
download_grp.add_argument('-m', '--m3u8', action='store_true', default=False,
help = 'download video using an m3u8 url')
help='download video using an m3u8 url') # nofmt
parser.add_argument('URL', nargs='*', help=argparse.SUPPRESS)