mirror of
https://github.com/yt-dlp/yt-dlp.git
synced 2024-11-07 19:52:40 +01:00
PEP8: W503
This commit is contained in:
parent
77b2986b5b
commit
8fb3ac3649
@ -45,12 +45,12 @@
|
|||||||
|
|
||||||
RESULT = ('.' + domain + '\n' in LIST or '\n' + domain + '\n' in LIST)
|
RESULT = ('.' + domain + '\n' in LIST or '\n' + domain + '\n' in LIST)
|
||||||
|
|
||||||
if RESULT and ('info_dict' not in test or 'age_limit' not in test['info_dict']
|
if RESULT and ('info_dict' not in test or 'age_limit' not in test['info_dict'] or
|
||||||
or test['info_dict']['age_limit'] != 18):
|
test['info_dict']['age_limit'] != 18):
|
||||||
print('\nPotential missing age_limit check: {0}'.format(test['name']))
|
print('\nPotential missing age_limit check: {0}'.format(test['name']))
|
||||||
|
|
||||||
elif not RESULT and ('info_dict' in test and 'age_limit' in test['info_dict']
|
elif not RESULT and ('info_dict' in test and 'age_limit' in test['info_dict'] and
|
||||||
and test['info_dict']['age_limit'] == 18):
|
test['info_dict']['age_limit'] == 18):
|
||||||
print('\nPotential false negative: {0}'.format(test['name']))
|
print('\nPotential false negative: {0}'.format(test['name']))
|
||||||
|
|
||||||
else:
|
else:
|
||||||
|
@ -34,8 +34,8 @@ def _make_testfunc(testfile):
|
|||||||
def test_func(self):
|
def test_func(self):
|
||||||
as_file = os.path.join(TEST_DIR, testfile)
|
as_file = os.path.join(TEST_DIR, testfile)
|
||||||
swf_file = os.path.join(TEST_DIR, test_id + '.swf')
|
swf_file = os.path.join(TEST_DIR, test_id + '.swf')
|
||||||
if ((not os.path.exists(swf_file))
|
if ((not os.path.exists(swf_file)) or
|
||||||
or os.path.getmtime(swf_file) < os.path.getmtime(as_file)):
|
os.path.getmtime(swf_file) < os.path.getmtime(as_file)):
|
||||||
# Recompile
|
# Recompile
|
||||||
try:
|
try:
|
||||||
subprocess.check_call([
|
subprocess.check_call([
|
||||||
|
@ -308,8 +308,8 @@ def __init__(self, params=None, auto_init=True):
|
|||||||
raise
|
raise
|
||||||
|
|
||||||
if (sys.version_info >= (3,) and sys.platform != 'win32' and
|
if (sys.version_info >= (3,) and sys.platform != 'win32' and
|
||||||
sys.getfilesystemencoding() in ['ascii', 'ANSI_X3.4-1968']
|
sys.getfilesystemencoding() in ['ascii', 'ANSI_X3.4-1968'] and
|
||||||
and not params.get('restrictfilenames', False)):
|
not params.get('restrictfilenames', False)):
|
||||||
# On Python 3, the Unicode filesystem API will throw errors (#1474)
|
# On Python 3, the Unicode filesystem API will throw errors (#1474)
|
||||||
self.report_warning(
|
self.report_warning(
|
||||||
'Assuming --restrict-filenames since file system encoding '
|
'Assuming --restrict-filenames since file system encoding '
|
||||||
@ -1366,8 +1366,8 @@ def download(self, url_list):
|
|||||||
"""Download a given list of URLs."""
|
"""Download a given list of URLs."""
|
||||||
outtmpl = self.params.get('outtmpl', DEFAULT_OUTTMPL)
|
outtmpl = self.params.get('outtmpl', DEFAULT_OUTTMPL)
|
||||||
if (len(url_list) > 1 and
|
if (len(url_list) > 1 and
|
||||||
'%' not in outtmpl
|
'%' not in outtmpl and
|
||||||
and self.params.get('max_downloads') != 1):
|
self.params.get('max_downloads') != 1):
|
||||||
raise SameFileError(outtmpl)
|
raise SameFileError(outtmpl)
|
||||||
|
|
||||||
for url in url_list:
|
for url in url_list:
|
||||||
|
@ -189,14 +189,14 @@ def _real_main(argv=None):
|
|||||||
# In Python 2, sys.argv is a bytestring (also note http://bugs.python.org/issue2128 for Windows systems)
|
# In Python 2, sys.argv is a bytestring (also note http://bugs.python.org/issue2128 for Windows systems)
|
||||||
if opts.outtmpl is not None:
|
if opts.outtmpl is not None:
|
||||||
opts.outtmpl = opts.outtmpl.decode(preferredencoding())
|
opts.outtmpl = opts.outtmpl.decode(preferredencoding())
|
||||||
outtmpl = ((opts.outtmpl is not None and opts.outtmpl)
|
outtmpl = ((opts.outtmpl is not None and opts.outtmpl) or
|
||||||
or (opts.format == '-1' and opts.usetitle and '%(title)s-%(id)s-%(format)s.%(ext)s')
|
(opts.format == '-1' and opts.usetitle and '%(title)s-%(id)s-%(format)s.%(ext)s') or
|
||||||
or (opts.format == '-1' and '%(id)s-%(format)s.%(ext)s')
|
(opts.format == '-1' and '%(id)s-%(format)s.%(ext)s') or
|
||||||
or (opts.usetitle and opts.autonumber and '%(autonumber)s-%(title)s-%(id)s.%(ext)s')
|
(opts.usetitle and opts.autonumber and '%(autonumber)s-%(title)s-%(id)s.%(ext)s') or
|
||||||
or (opts.usetitle and '%(title)s-%(id)s.%(ext)s')
|
(opts.usetitle and '%(title)s-%(id)s.%(ext)s') or
|
||||||
or (opts.useid and '%(id)s.%(ext)s')
|
(opts.useid and '%(id)s.%(ext)s') or
|
||||||
or (opts.autonumber and '%(autonumber)s-%(id)s.%(ext)s')
|
(opts.autonumber and '%(autonumber)s-%(id)s.%(ext)s') or
|
||||||
or DEFAULT_OUTTMPL)
|
DEFAULT_OUTTMPL)
|
||||||
if not os.path.splitext(outtmpl)[1] and opts.extractaudio:
|
if not os.path.splitext(outtmpl)[1] and opts.extractaudio:
|
||||||
parser.error('Cannot download a video and extract audio into the same'
|
parser.error('Cannot download a video and extract audio into the same'
|
||||||
' file! Use "{0}.%(ext)s" instead of "{0}" as the output'
|
' file! Use "{0}.%(ext)s" instead of "{0}" as the output'
|
||||||
|
@ -311,14 +311,14 @@ def download(self, filename, info_dict):
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
nooverwrites_and_exists = (
|
nooverwrites_and_exists = (
|
||||||
self.params.get('nooverwrites', False)
|
self.params.get('nooverwrites', False) and
|
||||||
and os.path.exists(encodeFilename(filename))
|
os.path.exists(encodeFilename(filename))
|
||||||
)
|
)
|
||||||
|
|
||||||
continuedl_and_exists = (
|
continuedl_and_exists = (
|
||||||
self.params.get('continuedl', False)
|
self.params.get('continuedl', False) and
|
||||||
and os.path.isfile(encodeFilename(filename))
|
os.path.isfile(encodeFilename(filename)) and
|
||||||
and not self.params.get('nopart', False)
|
not self.params.get('nopart', False)
|
||||||
)
|
)
|
||||||
|
|
||||||
# Check file already present
|
# Check file already present
|
||||||
|
@ -325,8 +325,8 @@ def frag_progress_hook(s):
|
|||||||
state['frag_index'] += 1
|
state['frag_index'] += 1
|
||||||
|
|
||||||
estimated_size = (
|
estimated_size = (
|
||||||
(state['downloaded_bytes'] + frag_total_bytes)
|
(state['downloaded_bytes'] + frag_total_bytes) /
|
||||||
/ (state['frag_index'] + 1) * total_frags)
|
(state['frag_index'] + 1) * total_frags)
|
||||||
time_now = time.time()
|
time_now = time.time()
|
||||||
state['total_bytes_estimate'] = estimated_size
|
state['total_bytes_estimate'] = estimated_size
|
||||||
state['elapsed'] = time_now - start
|
state['elapsed'] = time_now - start
|
||||||
|
@ -28,7 +28,6 @@ class AdobeTVIE(InfoExtractor):
|
|||||||
|
|
||||||
def _real_extract(self, url):
|
def _real_extract(self, url):
|
||||||
video_id = self._match_id(url)
|
video_id = self._match_id(url)
|
||||||
|
|
||||||
webpage = self._download_webpage(url, video_id)
|
webpage = self._download_webpage(url, video_id)
|
||||||
|
|
||||||
player = self._parse_json(
|
player = self._parse_json(
|
||||||
@ -44,8 +43,10 @@ def _real_extract(self, url):
|
|||||||
self._html_search_meta('datepublished', webpage, 'upload date'))
|
self._html_search_meta('datepublished', webpage, 'upload date'))
|
||||||
|
|
||||||
duration = parse_duration(
|
duration = parse_duration(
|
||||||
self._html_search_meta('duration', webpage, 'duration')
|
self._html_search_meta('duration', webpage, 'duration') or
|
||||||
or self._search_regex(r'Runtime:\s*(\d{2}:\d{2}:\d{2})', webpage, 'duration'))
|
self._search_regex(
|
||||||
|
r'Runtime:\s*(\d{2}:\d{2}:\d{2})',
|
||||||
|
webpage, 'duration', fatal=False))
|
||||||
|
|
||||||
view_count = str_to_int(self._search_regex(
|
view_count = str_to_int(self._search_regex(
|
||||||
r'<div class="views">\s*Views?:\s*([\d,.]+)\s*</div>',
|
r'<div class="views">\s*Views?:\s*([\d,.]+)\s*</div>',
|
||||||
|
@ -808,8 +808,8 @@ def _extract_f4m_formats(self, manifest_url, video_id, preference=None, f4m_id=N
|
|||||||
media_nodes = manifest.findall('{http://ns.adobe.com/f4m/2.0}media')
|
media_nodes = manifest.findall('{http://ns.adobe.com/f4m/2.0}media')
|
||||||
for i, media_el in enumerate(media_nodes):
|
for i, media_el in enumerate(media_nodes):
|
||||||
if manifest_version == '2.0':
|
if manifest_version == '2.0':
|
||||||
manifest_url = ('/'.join(manifest_url.split('/')[:-1]) + '/'
|
manifest_url = ('/'.join(manifest_url.split('/')[:-1]) + '/' +
|
||||||
+ (media_el.attrib.get('href') or media_el.attrib.get('url')))
|
(media_el.attrib.get('href') or media_el.attrib.get('url')))
|
||||||
tbr = int_or_none(media_el.attrib.get('bitrate'))
|
tbr = int_or_none(media_el.attrib.get('bitrate'))
|
||||||
formats.append({
|
formats.append({
|
||||||
'format_id': '-'.join(filter(None, [f4m_id, 'f4m-%d' % (i if tbr is None else tbr)])),
|
'format_id': '-'.join(filter(None, [f4m_id, 'f4m-%d' % (i if tbr is None else tbr)])),
|
||||||
|
@ -25,8 +25,9 @@ def _real_extract(self, url):
|
|||||||
r"flashvars.pvg_id=\"(\d+)\";",
|
r"flashvars.pvg_id=\"(\d+)\";",
|
||||||
webpage, 'ID')
|
webpage, 'ID')
|
||||||
|
|
||||||
json_url = ('http://static.videos.gouv.fr/brightcovehub/export/json/'
|
json_url = (
|
||||||
+ video_id)
|
'http://static.videos.gouv.fr/brightcovehub/export/json/%s' %
|
||||||
|
video_id)
|
||||||
info = self._download_json(json_url, title, 'Downloading JSON config')
|
info = self._download_json(json_url, title, 'Downloading JSON config')
|
||||||
video_url = info['renditions'][0]['url']
|
video_url = info['renditions'][0]['url']
|
||||||
|
|
||||||
|
@ -900,8 +900,8 @@ def _windows_write_string(s, out):
|
|||||||
def not_a_console(handle):
|
def not_a_console(handle):
|
||||||
if handle == INVALID_HANDLE_VALUE or handle is None:
|
if handle == INVALID_HANDLE_VALUE or handle is None:
|
||||||
return True
|
return True
|
||||||
return ((GetFileType(handle) & ~FILE_TYPE_REMOTE) != FILE_TYPE_CHAR
|
return ((GetFileType(handle) & ~FILE_TYPE_REMOTE) != FILE_TYPE_CHAR or
|
||||||
or GetConsoleMode(handle, ctypes.byref(ctypes.wintypes.DWORD())) == 0)
|
GetConsoleMode(handle, ctypes.byref(ctypes.wintypes.DWORD())) == 0)
|
||||||
|
|
||||||
if not_a_console(h):
|
if not_a_console(h):
|
||||||
return False
|
return False
|
||||||
|
Loading…
Reference in New Issue
Block a user