Compare commits
10 Commits
7f26bf0251
...
d35e4ea080
Author | SHA1 | Date | |
---|---|---|---|
|
d35e4ea080 | ||
|
4d05f84325 | ||
|
e0094e63c3 | ||
|
fd8242e3ef | ||
|
ad01fa6cca | ||
|
2eac0fa379 | ||
|
ab5617be9e | ||
|
b028b2fa27 | ||
|
d1dbd37b09 | ||
|
bcb8143f1d |
@ -577,9 +577,11 @@ class TestJSInterpreter(unittest.TestCase):
|
||||
def test_unary_operators(self):
|
||||
jsi = JSInterpreter('function f(){return 2 - - - 2;}')
|
||||
self.assertEqual(jsi.call_function('f'), 0)
|
||||
# fails
|
||||
# jsi = JSInterpreter('function f(){return 2 + - + - - 2;}')
|
||||
# self.assertEqual(jsi.call_function('f'), 0)
|
||||
jsi = JSInterpreter('function f(){return 2 + - + - - 2;}')
|
||||
self.assertEqual(jsi.call_function('f'), 0)
|
||||
# https://github.com/ytdl-org/youtube-dl/issues/32815
|
||||
jsi = JSInterpreter('function f(){return 0 - 7 * - 6;}')
|
||||
self.assertEqual(jsi.call_function('f'), 42)
|
||||
|
||||
""" # fails so far
|
||||
def test_packed(self):
|
||||
|
@ -158,6 +158,10 @@ _NSIG_TESTS = [
|
||||
'https://www.youtube.com/s/player/b7910ca8/player_ias.vflset/en_US/base.js',
|
||||
'_hXMCwMt9qE310D', 'LoZMgkkofRMCZQ',
|
||||
),
|
||||
(
|
||||
'https://www.youtube.com/s/player/590f65a6/player_ias.vflset/en_US/base.js',
|
||||
'1tm7-g_A9zsI8_Lay_', 'xI4Vem4Put_rOg',
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
|
@ -3033,7 +3033,6 @@ class InfoExtractor(object):
|
||||
transform_source=transform_source, default=None)
|
||||
|
||||
def _extract_jwplayer_data(self, webpage, video_id, *args, **kwargs):
|
||||
|
||||
# allow passing `transform_source` through to _find_jwplayer_data()
|
||||
transform_source = kwargs.pop('transform_source', None)
|
||||
kwfind = compat_kwargs({'transform_source': transform_source}) if transform_source else {}
|
||||
|
@ -8,7 +8,7 @@ from ..compat import compat_str
|
||||
from ..utils import (
|
||||
int_or_none,
|
||||
str_or_none,
|
||||
try_get,
|
||||
traverse_obj,
|
||||
)
|
||||
|
||||
|
||||
@ -109,7 +109,7 @@ class PalcoMP3ArtistIE(PalcoMP3BaseIE):
|
||||
}
|
||||
name'''
|
||||
|
||||
@ classmethod
|
||||
@classmethod
|
||||
def suitable(cls, url):
|
||||
return False if re.match(PalcoMP3IE._VALID_URL, url) else super(PalcoMP3ArtistIE, cls).suitable(url)
|
||||
|
||||
@ -118,7 +118,8 @@ class PalcoMP3ArtistIE(PalcoMP3BaseIE):
|
||||
artist = self._call_api(artist_slug, self._ARTIST_FIELDS_TMPL)['artist']
|
||||
|
||||
def entries():
|
||||
for music in (try_get(artist, lambda x: x['musics']['nodes'], list) or []):
|
||||
for music in traverse_obj(artist, (
|
||||
'musics', 'nodes', lambda _, m: m['musicID'])):
|
||||
yield self._parse_music(music)
|
||||
|
||||
return self.playlist_result(
|
||||
@ -137,7 +138,7 @@ class PalcoMP3VideoIE(PalcoMP3BaseIE):
|
||||
'title': 'Maiara e Maraisa - Você Faz Falta Aqui - DVD Ao Vivo Em Campo Grande',
|
||||
'description': 'md5:7043342c09a224598e93546e98e49282',
|
||||
'upload_date': '20161107',
|
||||
'uploader_id': 'maiaramaraisaoficial',
|
||||
'uploader_id': '@maiaramaraisaoficial',
|
||||
'uploader': 'Maiara e Maraisa',
|
||||
}
|
||||
}]
|
||||
|
@ -2,32 +2,62 @@
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from .common import InfoExtractor
|
||||
from ..compat import (
|
||||
compat_kwargs,
|
||||
compat_str,
|
||||
)
|
||||
from ..utils import (
|
||||
smuggle_url,
|
||||
ExtractorError,
|
||||
HEADRequest,
|
||||
float_or_none,
|
||||
int_or_none,
|
||||
merge_dicts,
|
||||
parse_duration,
|
||||
parse_iso8601,
|
||||
traverse_obj,
|
||||
update_url_query,
|
||||
url_or_none,
|
||||
)
|
||||
|
||||
|
||||
class SBSIE(InfoExtractor):
|
||||
IE_DESC = 'sbs.com.au'
|
||||
_VALID_URL = r'https?://(?:www\.)?sbs\.com\.au/(?:ondemand(?:/video/(?:single/)?|.*?\bplay=|/watch/)|news/(?:embeds/)?video/)(?P<id>[0-9]+)'
|
||||
_VALID_URL = r'''(?x)
|
||||
https?://(?:www\.)?sbs\.com\.au/(?:
|
||||
ondemand(?:
|
||||
/video/(?:single/)?|
|
||||
/(?:movie|tv-program)/[^/]+/|
|
||||
/(?:tv|news)-series/(?:[^/]+/){3}|
|
||||
.*?\bplay=|/watch/
|
||||
)|news/(?:embeds/)?video/
|
||||
)(?P<id>[0-9]+)'''
|
||||
_EMBED_REGEX = [r'''(?x)]
|
||||
(?:
|
||||
<meta\s+property="og:video"\s+content=|
|
||||
<iframe[^>]+?src=
|
||||
)
|
||||
(["\'])(?P<url>https?://(?:www\.)?sbs\.com\.au/ondemand/video/.+?)\1''']
|
||||
|
||||
_TESTS = [{
|
||||
# Original URL is handled by the generic IE which finds the iframe:
|
||||
# http://www.sbs.com.au/thefeed/blog/2014/08/21/dingo-conservation
|
||||
# Exceptional unrestricted show for testing, thanks SBS,
|
||||
# from an iframe of this page, handled by the generic IE, now 404:
|
||||
# http://www.sbs.com.au/thefeed/blog/2014/08/21/dingo-conservation, but replaced by
|
||||
# https://www.sbs.com.au/programs/video/320403011771/Dingo-Conservation-The-Feed
|
||||
'url': 'http://www.sbs.com.au/ondemand/video/single/320403011771/?source=drupal&vertical=thefeed',
|
||||
'md5': '3150cf278965eeabb5b4cea1c963fe0a',
|
||||
'md5': 'e49d0290cb4f40d893b8dfe760dce6b0',
|
||||
'info_dict': {
|
||||
'id': '_rFBPRPO4pMR',
|
||||
'id': '320403011771', # formerly '_rFBPRPO4pMR', no longer found
|
||||
'ext': 'mp4',
|
||||
'title': 'Dingo Conservation (The Feed)',
|
||||
'description': 'md5:f250a9856fca50d22dec0b5b8015f8a5',
|
||||
'thumbnail': r're:http://.*\.jpg',
|
||||
'thumbnail': r're:https?://.*\.jpg',
|
||||
'duration': 308,
|
||||
'timestamp': 1408613220,
|
||||
'upload_date': '20140821',
|
||||
'uploader': 'SBSC',
|
||||
'tags': None,
|
||||
'categories': None,
|
||||
},
|
||||
'expected_warnings': ['Unable to download JSON metadata'],
|
||||
}, {
|
||||
'url': 'http://www.sbs.com.au/ondemand/video/320403011771/Dingo-Conservation-The-Feed',
|
||||
'only_matching': True,
|
||||
@ -46,33 +76,160 @@ class SBSIE(InfoExtractor):
|
||||
}, {
|
||||
'url': 'https://www.sbs.com.au/ondemand/watch/1698704451971',
|
||||
'only_matching': True,
|
||||
}, {
|
||||
'url': 'https://www.sbs.com.au/ondemand/movie/coherence/1469404227931',
|
||||
'only_matching': True,
|
||||
}, {
|
||||
'note': 'Live stream',
|
||||
'url': 'https://www.sbs.com.au/ondemand/video/1726824003663/sbs-24x7-live-stream-nsw',
|
||||
'only_matching': True,
|
||||
}, {
|
||||
'url': 'https://www.sbs.com.au/ondemand/news-series/dateline/dateline-2022/dateline-s2022-ep26/2072245827515',
|
||||
'only_matching': True,
|
||||
}, {
|
||||
'url': 'https://www.sbs.com.au/ondemand/tv-series/the-handmaids-tale/season-5/the-handmaids-tale-s5-ep1/2065631811776',
|
||||
'only_matching': True,
|
||||
}, {
|
||||
'url': 'https://www.sbs.com.au/ondemand/tv-program/autun-romes-forgotten-sister/2116212803602',
|
||||
'only_matching': True,
|
||||
}]
|
||||
|
||||
def _extract_m3u8_formats(self, m3u8_url, video_id, *args, **kwargs):
|
||||
# ext, entry_protocol, preference, m3u8_id, note, errnote, fatal,
|
||||
# live, data, headers, query
|
||||
entry_protocol = args[1] if len(args) > 1 else kwargs.get('entry_protocol')
|
||||
if not entry_protocol:
|
||||
entry_protocol = 'm3u8_native'
|
||||
if len(args) > 1:
|
||||
args = list(args)
|
||||
args[1] = entry_protocol
|
||||
else:
|
||||
kwargs['entry_protocol'] = entry_protocol
|
||||
kwargs = compat_kwargs(kwargs)
|
||||
|
||||
return super(SBSIE, self)._extract_m3u8_formats(m3u8_url, video_id, *args, **kwargs)
|
||||
|
||||
_GEO_COUNTRIES = ['AU']
|
||||
# naming for exportability
|
||||
AUS_TV_PARENTAL_GUIDELINES = {
|
||||
'P': 0,
|
||||
'C': 7,
|
||||
'G': 0,
|
||||
'PG': 0,
|
||||
'M': 14,
|
||||
'MA15+': 15,
|
||||
'AV15+': 15,
|
||||
'MAV15+': 15,
|
||||
'R18+': 18,
|
||||
'NC': 0, # not classified (unofficial, used by SBS)
|
||||
}
|
||||
_PLAYER_API = 'https://www.sbs.com.au/api/v3'
|
||||
_CATALOGUE_API = 'https://catalogue.pr.sbsod.com/'
|
||||
_VOD_BASE_URL = 'https://sbs-vod-prod-01.akamaized.net/'
|
||||
|
||||
def _call_api(self, video_id, path, query=None, data=None, headers=None, fatal=True):
|
||||
return self._download_json(update_url_query(
|
||||
self._CATALOGUE_API + path, query),
|
||||
video_id, headers=headers or {}, fatal=fatal) or {}
|
||||
|
||||
def _get_smil_url(self, video_id):
|
||||
return update_url_query(
|
||||
self._PLAYER_API + 'video_smil', {'id': video_id})
|
||||
|
||||
def _get_player_data(self, video_id, headers=None, fatal=False):
|
||||
return self._download_json(update_url_query(
|
||||
self._PLAYER_API + 'video_stream', {'id': video_id, 'context': 'tv'}),
|
||||
video_id, headers=headers or {}, fatal=fatal) or {}
|
||||
|
||||
def _real_extract(self, url):
|
||||
video_id = self._match_id(url)
|
||||
player_params = self._download_json(
|
||||
'http://www.sbs.com.au/api/video_pdkvars/id/%s?form=json' % video_id, video_id)
|
||||
# get media links directly though later metadata may contain contentUrl
|
||||
smil_url = self._get_smil_url(video_id)
|
||||
formats = self._extract_smil_formats(smil_url, video_id, fatal=False) or []
|
||||
|
||||
error = player_params.get('error')
|
||||
if error:
|
||||
error_message = 'Sorry, The video you are looking for does not exist.'
|
||||
video_data = error.get('results') or {}
|
||||
error_code = error.get('errorCode')
|
||||
if error_code == 'ComingSoon':
|
||||
error_message = '%s is not yet available.' % video_data.get('title', '')
|
||||
elif error_code in ('Forbidden', 'intranetAccessOnly'):
|
||||
error_message = 'Sorry, This video cannot be accessed via this website'
|
||||
elif error_code == 'Expired':
|
||||
error_message = 'Sorry, %s is no longer available.' % video_data.get('title', '')
|
||||
raise ExtractorError('%s said: %s' % (self.IE_NAME, error_message), expected=True)
|
||||
if not formats:
|
||||
urlh = self._request_webpage(
|
||||
HEADRequest(self._VOD_BASE_URL), video_id,
|
||||
note='Checking geo-restriction', fatal=False, expected_status=403)
|
||||
if urlh:
|
||||
error_reasons = urlh.headers.get_all('x-error-reason') or []
|
||||
if 'geo-blocked' in error_reasons:
|
||||
self.raise_geo_restricted(countries=self._GEO_COUNTRIES)
|
||||
|
||||
urls = player_params['releaseUrls']
|
||||
theplatform_url = (urls.get('progressive') or urls.get('html')
|
||||
or urls.get('standard') or player_params['relatedItemsURL'])
|
||||
self._sort_formats(formats)
|
||||
|
||||
return {
|
||||
'_type': 'url_transparent',
|
||||
'ie_key': 'ThePlatform',
|
||||
# try for metadata from the same source
|
||||
player_data = self._get_player_data(video_id, fatal=False)
|
||||
media = traverse_obj(player_data, 'video_object', expected_type=dict) or {}
|
||||
# get, or add, metadata from catalogue
|
||||
media.update(self._call_api(video_id, 'mpx-media/' + video_id, fatal=not media))
|
||||
|
||||
# utils candidate for use with traverse_obj()
|
||||
def txt_or_none(s):
|
||||
return (s.strip() or None) if isinstance(s, compat_str) else None
|
||||
|
||||
# expected_type fn for thumbs
|
||||
def xlate_thumb(t):
|
||||
u = url_or_none(t.get('contentUrl'))
|
||||
return u and {
|
||||
'id': t.get('name'),
|
||||
'url': u,
|
||||
'width': int_or_none(t.get('width')),
|
||||
'height': int_or_none(t.get('height')),
|
||||
}
|
||||
|
||||
# may be numeric or timecoded
|
||||
def really_parse_duration(d):
|
||||
result = float_or_none(d)
|
||||
if result is None:
|
||||
result = parse_duration(d)
|
||||
return result
|
||||
|
||||
def traverse_media(*args, **kwargs):
|
||||
nkwargs = None
|
||||
if 'expected_type' not in kwargs:
|
||||
kwargs['expected_type'] = txt_or_none
|
||||
nkwargs = kwargs
|
||||
if 'get_all' not in kwargs:
|
||||
kwargs['get_all'] = False
|
||||
nkwargs = kwargs
|
||||
if nkwargs:
|
||||
kwargs = compat_kwargs(nkwargs)
|
||||
return traverse_obj(media, *args, **kwargs)
|
||||
|
||||
# For named episodes, use the catalogue's title to set episode, rather than generic 'Episode N'.
|
||||
if traverse_media('partOfSeries', expected_type=dict):
|
||||
media['epName'] = traverse_media('title')
|
||||
|
||||
return merge_dicts(*reversed(({
|
||||
'id': video_id,
|
||||
'url': smuggle_url(self._proto_relative_url(theplatform_url), {'force_smil_url': True}),
|
||||
}
|
||||
}, dict((k, traverse_media(v)) for k, v in {
|
||||
'title': 'name',
|
||||
'description': 'description',
|
||||
'channel': ('taxonomy', 'channel', 'name'),
|
||||
'series': ((('partOfSeries', 'name'), 'seriesTitle')),
|
||||
'series_id': ((('partOfSeries', 'uuid'), 'seriesID')),
|
||||
'episode': 'epName',
|
||||
}.items()), {
|
||||
'season_number': traverse_media((('partOfSeries', None), 'seasonNumber'), expected_type=int_or_none),
|
||||
'episode_number': traverse_media('episodeNumber', expected_type=int_or_none),
|
||||
'timestamp': traverse_media('datePublished', ('publication', 'startDate'),
|
||||
expected_type=parse_iso8601),
|
||||
'release_year': traverse_media('releaseYear', expected_type=int_or_none),
|
||||
'duration': traverse_media('duration', expected_type=really_parse_duration),
|
||||
'is_live': traverse_media('liveStream', expected_type=bool),
|
||||
'age_limit': self.AUS_TV_PARENTAL_GUIDELINES.get(traverse_media(
|
||||
'classificationID', 'contentRating', default='').upper()),
|
||||
'categories': traverse_media(
|
||||
('genres', Ellipsis), ('taxonomy', ('genre', 'subgenre'), 'name'),
|
||||
get_all=True) or None,
|
||||
'tags': traverse_media(
|
||||
(('consumerAdviceTexts', ('sbsSubCertification', 'consumerAdvice')), Ellipsis),
|
||||
get_all=True) or None,
|
||||
'thumbnails': traverse_media(('thumbnails', Ellipsis),
|
||||
expected_type=xlate_thumb, get_all=True),
|
||||
'formats': formats,
|
||||
# TODO: _extract_smil_formats_and_subtitles()
|
||||
# 'subtitles': subtitles,
|
||||
'uploader': 'SBSC',
|
||||
})))
|
||||
|
@ -14,6 +14,7 @@ from .utils import (
|
||||
remove_quotes,
|
||||
unified_timestamp,
|
||||
variadic,
|
||||
write_string,
|
||||
)
|
||||
from .compat import (
|
||||
compat_basestring,
|
||||
@ -53,15 +54,16 @@ def wraps_op(op):
|
||||
|
||||
# NB In principle NaN cannot be checked by membership.
|
||||
# Here all NaN values are actually this one, so _NaN is _NaN,
|
||||
# although _NaN != _NaN.
|
||||
# although _NaN != _NaN. Ditto Infinity.
|
||||
|
||||
_NaN = float('nan')
|
||||
_Infinity = float('inf')
|
||||
|
||||
|
||||
def _js_bit_op(op):
|
||||
|
||||
def zeroise(x):
|
||||
return 0 if x in (None, JS_Undefined, _NaN) else x
|
||||
return 0 if x in (None, JS_Undefined, _NaN, _Infinity) else x
|
||||
|
||||
@wraps_op(op)
|
||||
def wrapped(a, b):
|
||||
@ -84,7 +86,7 @@ def _js_arith_op(op):
|
||||
def _js_div(a, b):
|
||||
if JS_Undefined in (a, b) or not (a or b):
|
||||
return _NaN
|
||||
return operator.truediv(a or 0, b) if b else float('inf')
|
||||
return operator.truediv(a or 0, b) if b else _Infinity
|
||||
|
||||
|
||||
def _js_mod(a, b):
|
||||
@ -220,6 +222,42 @@ class LocalNameSpace(ChainMap):
|
||||
return 'LocalNameSpace%s' % (self.maps, )
|
||||
|
||||
|
||||
class Debugger(object):
|
||||
ENABLED = False
|
||||
|
||||
@staticmethod
|
||||
def write(*args, **kwargs):
|
||||
level = kwargs.get('level', 100)
|
||||
|
||||
def truncate_string(s, left, right=0):
|
||||
if s is None or len(s) <= left + right:
|
||||
return s
|
||||
return '...'.join((s[:left - 3], s[-right:] if right else ''))
|
||||
|
||||
write_string('[debug] JS: {0}{1}\n'.format(
|
||||
' ' * (100 - level),
|
||||
' '.join(truncate_string(compat_str(x), 50, 50) for x in args)))
|
||||
|
||||
@classmethod
|
||||
def wrap_interpreter(cls, f):
|
||||
def interpret_statement(self, stmt, local_vars, allow_recursion, *args, **kwargs):
|
||||
if cls.ENABLED and stmt.strip():
|
||||
cls.write(stmt, level=allow_recursion)
|
||||
try:
|
||||
ret, should_ret = f(self, stmt, local_vars, allow_recursion, *args, **kwargs)
|
||||
except Exception as e:
|
||||
if cls.ENABLED:
|
||||
if isinstance(e, ExtractorError):
|
||||
e = e.orig_msg
|
||||
cls.write('=> Raises:', e, '<-|', stmt, level=allow_recursion)
|
||||
raise
|
||||
if cls.ENABLED and stmt.strip():
|
||||
if should_ret or not repr(ret) == stmt:
|
||||
cls.write(['->', '=>'][should_ret], repr(ret), '<-|', stmt, level=allow_recursion)
|
||||
return ret, should_ret
|
||||
return interpret_statement
|
||||
|
||||
|
||||
class JSInterpreter(object):
|
||||
__named_object_counter = 0
|
||||
|
||||
@ -307,8 +345,7 @@ class JSInterpreter(object):
|
||||
def __op_chars(cls):
|
||||
op_chars = set(';,[')
|
||||
for op in cls._all_operators():
|
||||
for c in op[0]:
|
||||
op_chars.add(c)
|
||||
op_chars.update(op[0])
|
||||
return op_chars
|
||||
|
||||
def _named_object(self, namespace, obj):
|
||||
@ -326,9 +363,8 @@ class JSInterpreter(object):
|
||||
# collections.Counter() is ~10% slower in both 2.7 and 3.9
|
||||
counters = dict((k, 0) for k in _MATCHING_PARENS.values())
|
||||
start, splits, pos, delim_len = 0, 0, 0, len(delim) - 1
|
||||
in_quote, escaping, skipping = None, False, 0
|
||||
after_op, in_regex_char_group = True, False
|
||||
|
||||
in_quote, escaping, after_op, in_regex_char_group = None, False, True, False
|
||||
skipping = 0
|
||||
for idx, char in enumerate(expr):
|
||||
paren_delta = 0
|
||||
if not in_quote:
|
||||
@ -382,10 +418,12 @@ class JSInterpreter(object):
|
||||
return separated[0][1:].strip(), separated[1].strip()
|
||||
|
||||
@staticmethod
|
||||
def _all_operators():
|
||||
return itertools.chain(
|
||||
# Ref: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Operators/Operator_Precedence
|
||||
_SC_OPERATORS, _LOG_OPERATORS, _COMP_OPERATORS, _OPERATORS)
|
||||
def _all_operators(_cached=[]):
|
||||
if not _cached:
|
||||
_cached.extend(itertools.chain(
|
||||
# Ref: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Operators/Operator_Precedence
|
||||
_SC_OPERATORS, _LOG_OPERATORS, _COMP_OPERATORS, _OPERATORS))
|
||||
return _cached
|
||||
|
||||
def _operator(self, op, left_val, right_expr, expr, local_vars, allow_recursion):
|
||||
if op in ('||', '&&'):
|
||||
@ -416,7 +454,7 @@ class JSInterpreter(object):
|
||||
except Exception as e:
|
||||
if allow_undefined:
|
||||
return JS_Undefined
|
||||
raise self.Exception('Cannot get index {idx:.100}'.format(**locals()), expr=repr(obj), cause=e)
|
||||
raise self.Exception('Cannot get index {idx!r:.100}'.format(**locals()), expr=repr(obj), cause=e)
|
||||
|
||||
def _dump(self, obj, namespace):
|
||||
try:
|
||||
@ -438,6 +476,7 @@ class JSInterpreter(object):
|
||||
_FINALLY_RE = re.compile(r'finally\s*\{')
|
||||
_SWITCH_RE = re.compile(r'switch\s*\(')
|
||||
|
||||
@Debugger.wrap_interpreter
|
||||
def interpret_statement(self, stmt, local_vars, allow_recursion=100):
|
||||
if allow_recursion < 0:
|
||||
raise self.Exception('Recursion limit reached')
|
||||
@ -511,7 +550,6 @@ class JSInterpreter(object):
|
||||
expr = self._dump(inner, local_vars) + outer
|
||||
|
||||
if expr.startswith('('):
|
||||
|
||||
m = re.match(r'\((?P<d>[a-z])%(?P<e>[a-z])\.length\+(?P=e)\.length\)%(?P=e)\.length', expr)
|
||||
if m:
|
||||
# short-cut eval of frequently used `(d%e.length+e.length)%e.length`, worth ~6% on `pytest -k test_nsig`
|
||||
@ -693,7 +731,7 @@ class JSInterpreter(object):
|
||||
(?P<op>{_OPERATOR_RE})?
|
||||
=(?!=)(?P<expr>.*)$
|
||||
)|(?P<return>
|
||||
(?!if|return|true|false|null|undefined)(?P<name>{_NAME_RE})$
|
||||
(?!if|return|true|false|null|undefined|NaN|Infinity)(?P<name>{_NAME_RE})$
|
||||
)|(?P<indexing>
|
||||
(?P<in>{_NAME_RE})\[(?P<idx>.+)\]$
|
||||
)|(?P<attribute>
|
||||
@ -727,11 +765,12 @@ class JSInterpreter(object):
|
||||
raise JS_Break()
|
||||
elif expr == 'continue':
|
||||
raise JS_Continue()
|
||||
|
||||
elif expr == 'undefined':
|
||||
return JS_Undefined, should_return
|
||||
elif expr == 'NaN':
|
||||
return _NaN, should_return
|
||||
elif expr == 'Infinity':
|
||||
return _Infinity, should_return
|
||||
|
||||
elif md.get('return'):
|
||||
return local_vars[m.group('name')], should_return
|
||||
@ -760,18 +799,28 @@ class JSInterpreter(object):
|
||||
right_expr = separated.pop()
|
||||
# handle operators that are both unary and binary, minimal BODMAS
|
||||
if op in ('+', '-'):
|
||||
# simplify/adjust consecutive instances of these operators
|
||||
undone = 0
|
||||
while len(separated) > 1 and not separated[-1].strip():
|
||||
undone += 1
|
||||
separated.pop()
|
||||
if op == '-' and undone % 2 != 0:
|
||||
right_expr = op + right_expr
|
||||
elif op == '+':
|
||||
while len(separated) > 1 and separated[-1].strip() in self.OP_CHARS:
|
||||
right_expr = separated.pop() + right_expr
|
||||
# hanging op at end of left => unary + (strip) or - (push right)
|
||||
left_val = separated[-1]
|
||||
for dm_op in ('*', '%', '/', '**'):
|
||||
bodmas = tuple(self._separate(left_val, dm_op, skip_delims=skip_delim))
|
||||
if len(bodmas) > 1 and not bodmas[-1].strip():
|
||||
expr = op.join(separated) + op + right_expr
|
||||
right_expr = None
|
||||
if len(separated) > 1:
|
||||
separated.pop()
|
||||
right_expr = op.join((left_val, right_expr))
|
||||
else:
|
||||
separated = [op.join((left_val, right_expr))]
|
||||
right_expr = None
|
||||
break
|
||||
if right_expr is None:
|
||||
continue
|
||||
@ -797,6 +846,8 @@ class JSInterpreter(object):
|
||||
|
||||
def eval_method():
|
||||
if (variable, member) == ('console', 'debug'):
|
||||
if Debugger.ENABLED:
|
||||
Debugger.write(self.interpret_expression('[{}]'.format(arg_str), local_vars, allow_recursion))
|
||||
return
|
||||
types = {
|
||||
'String': compat_str,
|
||||
|
@ -2406,7 +2406,7 @@ class ExtractorError(YoutubeDLError):
|
||||
""" tb, if given, is the original traceback (so that it can be printed out).
|
||||
If expected is set, this is a normal error message and most likely not a bug in youtube-dl.
|
||||
"""
|
||||
|
||||
self.orig_msg = msg
|
||||
if sys.exc_info()[0] in (compat_urllib_error.URLError, socket.timeout, UnavailableVideoError):
|
||||
expected = True
|
||||
if video_id is not None:
|
||||
|
Loading…
Reference in New Issue
Block a user