[common] fix urls_size (support faker and headers)

This commit is contained in:
Mort Yao 2015-12-13 04:55:50 +01:00
parent f1e7885140
commit 944a166c02

View File

@ -322,11 +322,8 @@ def url_size(url, faker = False, headers = {}):
size = response.headers['content-length'] size = response.headers['content-length']
return int(size) if size!=None else float('inf') return int(size) if size!=None else float('inf')
# TO BE DEPRECATED def urls_size(urls, faker = False, headers = {}):
# urls_size() does not have a faker return sum([url_size(url, faker=faker, headers=headers) for url in urls])
# also it takes too long time
def urls_size(urls):
return sum(map(url_size, urls))
def get_head(url, headers = {}): def get_head(url, headers = {}):
if headers: if headers:
@ -665,7 +662,7 @@ def download_urls(urls, title, ext, total_size, output_dir='.', refer=None, merg
if not total_size: if not total_size:
try: try:
total_size = urls_size(urls) total_size = urls_size(urls, faker=faker, headers=headers)
except: except:
import traceback import traceback
traceback.print_exc(file=sys.stdout) traceback.print_exc(file=sys.stdout)