1
0
mirror of https://github.com/yt-dlp/yt-dlp.git synced 2024-11-02 17:22:31 +01:00

[brightcove] update policy key on failing requests

This commit is contained in:
Remita Amine 2019-12-31 16:42:56 +01:00
parent 0a02732b56
commit 0606808746

View File

@ -588,11 +588,15 @@ def _real_extract(self, url):
policy_key_id = '%s_%s' % (account_id, player_id) policy_key_id = '%s_%s' % (account_id, player_id)
policy_key = self._downloader.cache.load('brightcove', policy_key_id) policy_key = self._downloader.cache.load('brightcove', policy_key_id)
if not policy_key: policy_key_extracted = False
def extract_policy_key():
webpage = self._download_webpage( webpage = self._download_webpage(
'http://players.brightcove.net/%s/%s_%s/index.min.js' 'http://players.brightcove.net/%s/%s_%s/index.min.js'
% (account_id, player_id, embed), video_id) % (account_id, player_id, embed), video_id)
policy_key = None
catalog = self._search_regex( catalog = self._search_regex(
r'catalog\(({.+?})\);', webpage, 'catalog', default=None) r'catalog\(({.+?})\);', webpage, 'catalog', default=None)
if catalog: if catalog:
@ -605,28 +609,38 @@ def _real_extract(self, url):
policy_key = self._search_regex( policy_key = self._search_regex(
r'policyKey\s*:\s*(["\'])(?P<pk>.+?)\1', r'policyKey\s*:\s*(["\'])(?P<pk>.+?)\1',
webpage, 'policy key', group='pk') webpage, 'policy key', group='pk')
self._downloader.cache.store('brightcove', policy_key_id, policy_key) self._downloader.cache.store('brightcove', policy_key_id, policy_key)
return policy_key
api_url = 'https://edge.api.brightcove.com/playback/v1/accounts/%s/%ss/%s' % (account_id, content_type, video_id) api_url = 'https://edge.api.brightcove.com/playback/v1/accounts/%s/%ss/%s' % (account_id, content_type, video_id)
headers = { headers = {}
'Accept': 'application/json;pk=%s' % policy_key,
}
referrer = smuggled_data.get('referrer') referrer = smuggled_data.get('referrer')
if referrer: if referrer:
headers.update({ headers.update({
'Referer': referrer, 'Referer': referrer,
'Origin': re.search(r'https?://[^/]+', referrer).group(0), 'Origin': re.search(r'https?://[^/]+', referrer).group(0),
}) })
try:
json_data = self._download_json(api_url, video_id, headers=headers) for _ in range(2):
except ExtractorError as e: if not policy_key:
if isinstance(e.cause, compat_HTTPError) and e.cause.code == 403: policy_key = extract_policy_key()
json_data = self._parse_json(e.cause.read().decode(), video_id)[0] policy_key_extracted = True
message = json_data.get('message') or json_data['error_code'] headers['Accept'] = 'application/json;pk=%s' % policy_key
if json_data.get('error_subcode') == 'CLIENT_GEO': try:
self.raise_geo_restricted(msg=message) json_data = self._download_json(api_url, video_id, headers=headers)
raise ExtractorError(message, expected=True) break
raise except ExtractorError as e:
if isinstance(e.cause, compat_HTTPError) and e.cause.code in (401, 403):
json_data = self._parse_json(e.cause.read().decode(), video_id)[0]
message = json_data.get('message') or json_data['error_code']
if json_data.get('error_subcode') == 'CLIENT_GEO':
self.raise_geo_restricted(msg=message)
elif json_data.get('error_code') == 'INVALID_POLICY_KEY' and not policy_key_extracted:
policy_key = None
continue
raise ExtractorError(message, expected=True)
raise
errors = json_data.get('errors') errors = json_data.get('errors')
if errors and errors[0].get('error_subcode') == 'TVE_AUTH': if errors and errors[0].get('error_subcode') == 'TVE_AUTH':