Compare commits
6 Commits
dcde3c9f4d
...
3432f0a00a
Author | SHA1 | Date | |
---|---|---|---|
|
3432f0a00a | ||
|
37cea84f77 | ||
|
4652109643 | ||
|
3c466186a8 | ||
|
c85000591b | ||
|
1b599af1db |
@ -14,9 +14,11 @@ sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
|||||||
import io
|
import io
|
||||||
import itertools
|
import itertools
|
||||||
import json
|
import json
|
||||||
|
import types
|
||||||
import xml.etree.ElementTree
|
import xml.etree.ElementTree
|
||||||
|
|
||||||
from youtube_dl.utils import (
|
from youtube_dl.utils import (
|
||||||
|
_UnsafeExtensionError,
|
||||||
age_restricted,
|
age_restricted,
|
||||||
args_to_str,
|
args_to_str,
|
||||||
base_url,
|
base_url,
|
||||||
@ -270,6 +272,27 @@ class TestUtil(unittest.TestCase):
|
|||||||
expand_path('~/%s' % env('YOUTUBE_DL_EXPATH_PATH')),
|
expand_path('~/%s' % env('YOUTUBE_DL_EXPATH_PATH')),
|
||||||
'%s/expanded' % compat_getenv('HOME'))
|
'%s/expanded' % compat_getenv('HOME'))
|
||||||
|
|
||||||
|
_uncommon_extensions = [
|
||||||
|
('exe', 'abc.exe.ext'),
|
||||||
|
('de', 'abc.de.ext'),
|
||||||
|
('../.mp4', None),
|
||||||
|
('..\\.mp4', None),
|
||||||
|
]
|
||||||
|
|
||||||
|
def assertUnsafeExtension(self, ext=None):
|
||||||
|
assert_raises = self.assertRaises(_UnsafeExtensionError)
|
||||||
|
assert_raises.ext = ext
|
||||||
|
orig_exit = assert_raises.__exit__
|
||||||
|
|
||||||
|
def my_exit(self_, exc_type, exc_val, exc_tb):
|
||||||
|
did_raise = orig_exit(exc_type, exc_val, exc_tb)
|
||||||
|
if did_raise and assert_raises.ext is not None:
|
||||||
|
self.assertEqual(assert_raises.ext, assert_raises.exception.extension, 'Unsafe extension not as unexpected')
|
||||||
|
return did_raise
|
||||||
|
|
||||||
|
assert_raises.__exit__ = types.MethodType(my_exit, assert_raises)
|
||||||
|
return assert_raises
|
||||||
|
|
||||||
def test_prepend_extension(self):
|
def test_prepend_extension(self):
|
||||||
self.assertEqual(prepend_extension('abc.ext', 'temp'), 'abc.temp.ext')
|
self.assertEqual(prepend_extension('abc.ext', 'temp'), 'abc.temp.ext')
|
||||||
self.assertEqual(prepend_extension('abc.ext', 'temp', 'ext'), 'abc.temp.ext')
|
self.assertEqual(prepend_extension('abc.ext', 'temp', 'ext'), 'abc.temp.ext')
|
||||||
@ -278,6 +301,19 @@ class TestUtil(unittest.TestCase):
|
|||||||
self.assertEqual(prepend_extension('.abc', 'temp'), '.abc.temp')
|
self.assertEqual(prepend_extension('.abc', 'temp'), '.abc.temp')
|
||||||
self.assertEqual(prepend_extension('.abc.ext', 'temp'), '.abc.temp.ext')
|
self.assertEqual(prepend_extension('.abc.ext', 'temp'), '.abc.temp.ext')
|
||||||
|
|
||||||
|
# Test uncommon extensions
|
||||||
|
self.assertEqual(prepend_extension('abc.ext', 'bin'), 'abc.bin.ext')
|
||||||
|
for ext, result in self._uncommon_extensions:
|
||||||
|
with self.assertUnsafeExtension(ext):
|
||||||
|
prepend_extension('abc', ext)
|
||||||
|
if result:
|
||||||
|
self.assertEqual(prepend_extension('abc.ext', ext, 'ext'), result)
|
||||||
|
else:
|
||||||
|
with self.assertUnsafeExtension(ext):
|
||||||
|
prepend_extension('abc.ext', ext, 'ext')
|
||||||
|
with self.assertUnsafeExtension(ext):
|
||||||
|
prepend_extension('abc.unexpected_ext', ext, 'ext')
|
||||||
|
|
||||||
def test_replace_extension(self):
|
def test_replace_extension(self):
|
||||||
self.assertEqual(replace_extension('abc.ext', 'temp'), 'abc.temp')
|
self.assertEqual(replace_extension('abc.ext', 'temp'), 'abc.temp')
|
||||||
self.assertEqual(replace_extension('abc.ext', 'temp', 'ext'), 'abc.temp')
|
self.assertEqual(replace_extension('abc.ext', 'temp', 'ext'), 'abc.temp')
|
||||||
@ -286,6 +322,16 @@ class TestUtil(unittest.TestCase):
|
|||||||
self.assertEqual(replace_extension('.abc', 'temp'), '.abc.temp')
|
self.assertEqual(replace_extension('.abc', 'temp'), '.abc.temp')
|
||||||
self.assertEqual(replace_extension('.abc.ext', 'temp'), '.abc.temp')
|
self.assertEqual(replace_extension('.abc.ext', 'temp'), '.abc.temp')
|
||||||
|
|
||||||
|
# Test uncommon extensions
|
||||||
|
self.assertEqual(replace_extension('abc.ext', 'bin'), 'abc.unknown_video')
|
||||||
|
for ext, _ in self._uncommon_extensions:
|
||||||
|
with self.assertUnsafeExtension(ext):
|
||||||
|
replace_extension('abc', ext)
|
||||||
|
with self.assertUnsafeExtension(ext):
|
||||||
|
replace_extension('abc.ext', ext, 'ext')
|
||||||
|
with self.assertUnsafeExtension(ext):
|
||||||
|
replace_extension('abc.unexpected_ext', ext, 'ext')
|
||||||
|
|
||||||
def test_subtitles_filename(self):
|
def test_subtitles_filename(self):
|
||||||
self.assertEqual(subtitles_filename('abc.ext', 'en', 'vtt'), 'abc.en.vtt')
|
self.assertEqual(subtitles_filename('abc.ext', 'en', 'vtt'), 'abc.en.vtt')
|
||||||
self.assertEqual(subtitles_filename('abc.ext', 'en', 'vtt', 'ext'), 'abc.en.vtt')
|
self.assertEqual(subtitles_filename('abc.ext', 'en', 'vtt', 'ext'), 'abc.en.vtt')
|
||||||
|
@ -7,6 +7,7 @@ import collections
|
|||||||
import copy
|
import copy
|
||||||
import datetime
|
import datetime
|
||||||
import errno
|
import errno
|
||||||
|
import functools
|
||||||
import io
|
import io
|
||||||
import itertools
|
import itertools
|
||||||
import json
|
import json
|
||||||
@ -53,6 +54,7 @@ from .compat import (
|
|||||||
compat_urllib_request_DataHandler,
|
compat_urllib_request_DataHandler,
|
||||||
)
|
)
|
||||||
from .utils import (
|
from .utils import (
|
||||||
|
_UnsafeExtensionError,
|
||||||
age_restricted,
|
age_restricted,
|
||||||
args_to_str,
|
args_to_str,
|
||||||
bug_reports_message,
|
bug_reports_message,
|
||||||
@ -129,6 +131,20 @@ if compat_os_name == 'nt':
|
|||||||
import ctypes
|
import ctypes
|
||||||
|
|
||||||
|
|
||||||
|
def _catch_unsafe_file_extension(func):
|
||||||
|
@functools.wraps(func)
|
||||||
|
def wrapper(self, *args, **kwargs):
|
||||||
|
try:
|
||||||
|
return func(self, *args, **kwargs)
|
||||||
|
except _UnsafeExtensionError as error:
|
||||||
|
self.report_error(
|
||||||
|
'{0} found; to avoid damaging your system, this value is disallowed.'
|
||||||
|
' If you believe this is an error{1}').format(
|
||||||
|
error.message, bug_reports_message(','))
|
||||||
|
|
||||||
|
return wrapper
|
||||||
|
|
||||||
|
|
||||||
class YoutubeDL(object):
|
class YoutubeDL(object):
|
||||||
"""YoutubeDL class.
|
"""YoutubeDL class.
|
||||||
|
|
||||||
@ -1925,6 +1941,7 @@ class YoutubeDL(object):
|
|||||||
if self.params.get('forcejson', False):
|
if self.params.get('forcejson', False):
|
||||||
self.to_stdout(json.dumps(self.sanitize_info(info_dict)))
|
self.to_stdout(json.dumps(self.sanitize_info(info_dict)))
|
||||||
|
|
||||||
|
@_catch_unsafe_file_extension
|
||||||
def process_info(self, info_dict):
|
def process_info(self, info_dict):
|
||||||
"""Process a single resolved IE result."""
|
"""Process a single resolved IE result."""
|
||||||
|
|
||||||
@ -2371,60 +2388,38 @@ class YoutubeDL(object):
|
|||||||
return res
|
return res
|
||||||
|
|
||||||
def _format_note(self, fdict):
|
def _format_note(self, fdict):
|
||||||
res = ''
|
note_parts = []
|
||||||
if fdict.get('ext') in ['f4f', 'f4m']:
|
if fdict.get('ext') in ('f4f', 'f4m'):
|
||||||
res += '(unsupported) '
|
note_parts.append('(unsupported)')
|
||||||
if fdict.get('language'):
|
if fdict.get('language'):
|
||||||
if res:
|
note_parts.append(f'[{fdict["language"]}]')
|
||||||
res += ' '
|
if fdict.get('format_note'):
|
||||||
res += '[%s] ' % fdict['language']
|
note_parts.append(fdict['format_note'])
|
||||||
if fdict.get('format_note') is not None:
|
|
||||||
res += fdict['format_note'] + ' '
|
|
||||||
if fdict.get('tbr') is not None:
|
if fdict.get('tbr') is not None:
|
||||||
res += '%4dk ' % fdict['tbr']
|
note_parts.append('%4dk' % fdict['tbr'])
|
||||||
if fdict.get('container') is not None:
|
if fdict.get('container') is not None:
|
||||||
if res:
|
note_parts.append('%s container' % fdict['container'])
|
||||||
res += ', '
|
if fdict.get('vcodec') not in (None, 'none'):
|
||||||
res += '%s container' % fdict['container']
|
note_parts.append(fdict['vcodec'] + ('@' if fdict.get('vbr') else ''))
|
||||||
if (fdict.get('vcodec') is not None
|
elif fdict.get('vbr') is not None:
|
||||||
and fdict.get('vcodec') != 'none'):
|
note_parts.append('video@')
|
||||||
if res:
|
|
||||||
res += ', '
|
|
||||||
res += fdict['vcodec']
|
|
||||||
if fdict.get('vbr') is not None:
|
if fdict.get('vbr') is not None:
|
||||||
res += '@'
|
note_parts.append('%4dk' % fdict['vbr'])
|
||||||
elif fdict.get('vbr') is not None and fdict.get('abr') is not None:
|
|
||||||
res += 'video@'
|
|
||||||
if fdict.get('vbr') is not None:
|
|
||||||
res += '%4dk' % fdict['vbr']
|
|
||||||
if fdict.get('fps') is not None:
|
if fdict.get('fps') is not None:
|
||||||
if res:
|
note_parts.append('%sfps' % fdict['fps'])
|
||||||
res += ', '
|
|
||||||
res += '%sfps' % fdict['fps']
|
|
||||||
if fdict.get('acodec') is not None:
|
if fdict.get('acodec') is not None:
|
||||||
if res:
|
note_parts.append('video only' if fdict['acodec'] == 'none' else '%-5s' % fdict['acodec'])
|
||||||
res += ', '
|
|
||||||
if fdict['acodec'] == 'none':
|
|
||||||
res += 'video only'
|
|
||||||
else:
|
|
||||||
res += '%-5s' % fdict['acodec']
|
|
||||||
elif fdict.get('abr') is not None:
|
elif fdict.get('abr') is not None:
|
||||||
if res:
|
note_parts.append('audio')
|
||||||
res += ', '
|
|
||||||
res += 'audio'
|
|
||||||
if fdict.get('abr') is not None:
|
if fdict.get('abr') is not None:
|
||||||
res += '@%3dk' % fdict['abr']
|
note_parts.append('@%3dk' % fdict['abr'])
|
||||||
if fdict.get('asr') is not None:
|
if fdict.get('asr') is not None:
|
||||||
res += ' (%5dHz)' % fdict['asr']
|
note_parts.append('(%5dHz)' % fdict['asr'])
|
||||||
if fdict.get('filesize') is not None:
|
if fdict.get('filesize') is not None:
|
||||||
if res:
|
note_parts.append(format_bytes(fdict['filesize']))
|
||||||
res += ', '
|
|
||||||
res += format_bytes(fdict['filesize'])
|
|
||||||
elif fdict.get('filesize_approx') is not None:
|
elif fdict.get('filesize_approx') is not None:
|
||||||
if res:
|
note_parts.append('~' + format_bytes(fdict['filesize_approx']))
|
||||||
res += ', '
|
return ' '.join(note_parts)
|
||||||
res += '~' + format_bytes(fdict['filesize_approx'])
|
|
||||||
return res
|
|
||||||
|
|
||||||
def list_formats(self, info_dict):
|
def list_formats(self, info_dict):
|
||||||
formats = info_dict.get('formats', [info_dict])
|
formats = info_dict.get('formats', [info_dict])
|
||||||
|
@ -21,6 +21,7 @@ from .compat import (
|
|||||||
workaround_optparse_bug9161,
|
workaround_optparse_bug9161,
|
||||||
)
|
)
|
||||||
from .utils import (
|
from .utils import (
|
||||||
|
_UnsafeExtensionError,
|
||||||
DateRange,
|
DateRange,
|
||||||
decodeOption,
|
decodeOption,
|
||||||
DEFAULT_OUTTMPL,
|
DEFAULT_OUTTMPL,
|
||||||
@ -173,6 +174,9 @@ def _real_main(argv=None):
|
|||||||
if opts.ap_mso and opts.ap_mso not in MSO_INFO:
|
if opts.ap_mso and opts.ap_mso not in MSO_INFO:
|
||||||
parser.error('Unsupported TV Provider, use --ap-list-mso to get a list of supported TV Providers')
|
parser.error('Unsupported TV Provider, use --ap-list-mso to get a list of supported TV Providers')
|
||||||
|
|
||||||
|
if opts.no_check_extensions:
|
||||||
|
_UnsafeExtensionError.lenient = True
|
||||||
|
|
||||||
def parse_retries(retries):
|
def parse_retries(retries):
|
||||||
if retries in ('inf', 'infinite'):
|
if retries in ('inf', 'infinite'):
|
||||||
parsed_retries = float('inf')
|
parsed_retries = float('inf')
|
||||||
|
@ -87,6 +87,7 @@ class YoutubeBaseInfoExtractor(InfoExtractor):
|
|||||||
If _LOGIN_REQUIRED is set and no authentication was provided, an error is raised.
|
If _LOGIN_REQUIRED is set and no authentication was provided, an error is raised.
|
||||||
"""
|
"""
|
||||||
username, password = self._get_login_info()
|
username, password = self._get_login_info()
|
||||||
|
|
||||||
# No authentication to be performed
|
# No authentication to be performed
|
||||||
if username is None:
|
if username is None:
|
||||||
if self._LOGIN_REQUIRED and self._downloader.params.get('cookiefile') is None:
|
if self._LOGIN_REQUIRED and self._downloader.params.get('cookiefile') is None:
|
||||||
@ -129,22 +130,21 @@ class YoutubeBaseInfoExtractor(InfoExtractor):
|
|||||||
self._downloader.report_warning(message)
|
self._downloader.report_warning(message)
|
||||||
|
|
||||||
lookup_req = [
|
lookup_req = [
|
||||||
username,
|
username, None, [], None, 'US', None, None, 2, False, True,
|
||||||
None, [], None, 'US', None, None, 2, False, True,
|
[None, None, [2, 1, None, 1,
|
||||||
[
|
|
||||||
None, None,
|
|
||||||
[2, 1, None, 1,
|
|
||||||
'https://accounts.google.com/ServiceLogin?passive=true&continue=https%3A%2F%2Fwww.youtube.com%2Fsignin%3Fnext%3D%252F%26action_handle_signin%3Dtrue%26hl%3Den%26app%3Ddesktop%26feature%3Dsign_in_button&hl=en&service=youtube&uilel=3&requestPath=%2FServiceLogin&Page=PasswordSeparationSignIn',
|
'https://accounts.google.com/ServiceLogin?passive=true&continue=https%3A%2F%2Fwww.youtube.com%2Fsignin%3Fnext%3D%252F%26action_handle_signin%3Dtrue%26hl%3Den%26app%3Ddesktop%26feature%3Dsign_in_button&hl=en&service=youtube&uilel=3&requestPath=%2FServiceLogin&Page=PasswordSeparationSignIn',
|
||||||
None, [], 4],
|
None, [], 4], 1, [None, None, []], None, None, None, True],
|
||||||
1, [None, None, []], None, None, None, True
|
|
||||||
],
|
|
||||||
username,
|
username,
|
||||||
]
|
]
|
||||||
|
|
||||||
lookup_results = req(
|
# --- Cambio 1: Extracción de función para mejorar la legibilidad ---
|
||||||
self._LOOKUP_URL, lookup_req,
|
def perform_lookup(req):
|
||||||
|
return self._download_json(
|
||||||
|
self._LOOKUP_URL, req,
|
||||||
'Looking up account info', 'Unable to look up account info')
|
'Looking up account info', 'Unable to look up account info')
|
||||||
|
|
||||||
|
lookup_results = perform_lookup(lookup_req)
|
||||||
|
|
||||||
if lookup_results is False:
|
if lookup_results is False:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
@ -154,12 +154,10 @@ class YoutubeBaseInfoExtractor(InfoExtractor):
|
|||||||
return False
|
return False
|
||||||
|
|
||||||
challenge_req = [
|
challenge_req = [
|
||||||
user_hash,
|
user_hash, None, 1, None, [1, None, None, None, [password, None, True]],
|
||||||
None, 1, None, [1, None, None, None, [password, None, True]],
|
[None, None, [2, 1, None, 1,
|
||||||
[
|
'https://accounts.google.com/ServiceLogin?passive=true&continue=https%3A%2F%2Fwww.youtube.com%2Fsignin%3Fnext%3D%252F%26action_handle_signin%3Dtrue%26hl%3Den%26app%3Ddesktop%26feature%3Dsign_in_button&hl=en&service=youtube&uilel=3&requestPath=%2FServiceLogin&Page=PasswordSeparationSignIn',
|
||||||
None, None, [2, 1, None, 1, 'https://accounts.google.com/ServiceLogin?passive=true&continue=https%3A%2F%2Fwww.youtube.com%2Fsignin%3Fnext%3D%252F%26action_handle_signin%3Dtrue%26hl%3Den%26app%3Ddesktop%26feature%3Dsign_in_button&hl=en&service=youtube&uilel=3&requestPath=%2FServiceLogin&Page=PasswordSeparationSignIn', None, [], 4],
|
None, [], 4], 1, [None, None, []], None, None, None, True]]
|
||||||
1, [None, None, []], None, None, None, True
|
|
||||||
]]
|
|
||||||
|
|
||||||
challenge_results = req(
|
challenge_results = req(
|
||||||
self._CHALLENGE_URL, challenge_req,
|
self._CHALLENGE_URL, challenge_req,
|
||||||
|
@ -533,6 +533,10 @@ def parseOpts(overrideArguments=None):
|
|||||||
'--no-check-certificate',
|
'--no-check-certificate',
|
||||||
action='store_true', dest='no_check_certificate', default=False,
|
action='store_true', dest='no_check_certificate', default=False,
|
||||||
help='Suppress HTTPS certificate validation')
|
help='Suppress HTTPS certificate validation')
|
||||||
|
workarounds.add_option(
|
||||||
|
'--no-check-extensions',
|
||||||
|
action='store_true', dest='no_check_extensions', default=False,
|
||||||
|
help='Suppress file extension validation')
|
||||||
workarounds.add_option(
|
workarounds.add_option(
|
||||||
'--prefer-insecure',
|
'--prefer-insecure',
|
||||||
'--prefer-unsecure', action='store_true', dest='prefer_insecure',
|
'--prefer-unsecure', action='store_true', dest='prefer_insecure',
|
||||||
|
@ -1717,21 +1717,6 @@ TIMEZONE_NAMES = {
|
|||||||
'PST': -8, 'PDT': -7 # Pacific
|
'PST': -8, 'PDT': -7 # Pacific
|
||||||
}
|
}
|
||||||
|
|
||||||
KNOWN_EXTENSIONS = (
|
|
||||||
'mp4', 'm4a', 'm4p', 'm4b', 'm4r', 'm4v', 'aac',
|
|
||||||
'flv', 'f4v', 'f4a', 'f4b',
|
|
||||||
'webm', 'ogg', 'ogv', 'oga', 'ogx', 'spx', 'opus',
|
|
||||||
'mkv', 'mka', 'mk3d',
|
|
||||||
'avi', 'divx',
|
|
||||||
'mov',
|
|
||||||
'asf', 'wmv', 'wma',
|
|
||||||
'3gp', '3g2',
|
|
||||||
'mp3',
|
|
||||||
'flac',
|
|
||||||
'ape',
|
|
||||||
'wav',
|
|
||||||
'f4f', 'f4m', 'm3u8', 'smil')
|
|
||||||
|
|
||||||
# needed for sanitizing filenames in restricted mode
|
# needed for sanitizing filenames in restricted mode
|
||||||
ACCENT_CHARS = dict(zip('ÂÃÄÀÁÅÆÇÈÉÊËÌÍÎÏÐÑÒÓÔÕÖŐØŒÙÚÛÜŰÝÞßàáâãäåæçèéêëìíîïðñòóôõöőøœùúûüűýþÿ',
|
ACCENT_CHARS = dict(zip('ÂÃÄÀÁÅÆÇÈÉÊËÌÍÎÏÐÑÒÓÔÕÖŐØŒÙÚÛÜŰÝÞßàáâãäåæçèéêëìíîïðñòóôõöőøœùúûüűýþÿ',
|
||||||
itertools.chain('AAAAAA', ['AE'], 'CEEEEIIIIDNOOOOOOO', ['OE'], 'UUUUUY', ['TH', 'ss'],
|
itertools.chain('AAAAAA', ['AE'], 'CEEEEIIIIDNOOOOOOO', ['OE'], 'UUUUUY', ['TH', 'ss'],
|
||||||
@ -3959,19 +3944,22 @@ def parse_duration(s):
|
|||||||
return duration
|
return duration
|
||||||
|
|
||||||
|
|
||||||
def prepend_extension(filename, ext, expected_real_ext=None):
|
def _change_extension(prepend, filename, ext, expected_real_ext=None):
|
||||||
name, real_ext = os.path.splitext(filename)
|
name, real_ext = os.path.splitext(filename)
|
||||||
return (
|
sanitize_extension = _UnsafeExtensionError.sanitize_extension
|
||||||
'{0}.{1}{2}'.format(name, ext, real_ext)
|
|
||||||
if not expected_real_ext or real_ext[1:] == expected_real_ext
|
if not expected_real_ext or real_ext.partition('.')[0::2] == ('', expected_real_ext):
|
||||||
else '{0}.{1}'.format(filename, ext))
|
filename = name
|
||||||
|
if prepend and real_ext:
|
||||||
|
sanitize_extension(ext, prepend=prepend)
|
||||||
|
return ''.join((filename, '.', ext, real_ext))
|
||||||
|
|
||||||
|
# Mitigate path traversal and file impersonation attacks
|
||||||
|
return '.'.join((filename, sanitize_extension(ext)))
|
||||||
|
|
||||||
|
|
||||||
def replace_extension(filename, ext, expected_real_ext=None):
|
prepend_extension = functools.partial(_change_extension, True)
|
||||||
name, real_ext = os.path.splitext(filename)
|
replace_extension = functools.partial(_change_extension, False)
|
||||||
return '{0}.{1}'.format(
|
|
||||||
name if not expected_real_ext or real_ext[1:] == expected_real_ext else filename,
|
|
||||||
ext)
|
|
||||||
|
|
||||||
|
|
||||||
def check_executable(exe, args=[]):
|
def check_executable(exe, args=[]):
|
||||||
@ -6002,81 +5990,46 @@ def parse_m3u8_attributes(attrib):
|
|||||||
def urshift(val, n):
|
def urshift(val, n):
|
||||||
return val >> n if val >= 0 else (val + 0x100000000) >> n
|
return val >> n if val >= 0 else (val + 0x100000000) >> n
|
||||||
|
|
||||||
|
|
||||||
# Based on png2str() written by @gdkchan and improved by @yokrysty
|
# Based on png2str() written by @gdkchan and improved by @yokrysty
|
||||||
# Originally posted at https://github.com/ytdl-org/youtube-dl/issues/9706
|
# Originally posted at https://github.com/ytdl-org/youtube-dl/issues/9706
|
||||||
def decode_png(png_data):
|
def decode_png(png_data):
|
||||||
# Reference: https://www.w3.org/TR/PNG/
|
# Reference: https://www.w3.org/TR/PNG/
|
||||||
header = png_data[8:]
|
if png_data[:8] != b'\x89PNG\x0d\x0a\x1a\x0a':
|
||||||
|
|
||||||
if png_data[:8] != b'\x89PNG\x0d\x0a\x1a\x0a' or header[4:8] != b'IHDR':
|
|
||||||
raise IOError('Not a valid PNG file.')
|
raise IOError('Not a valid PNG file.')
|
||||||
|
|
||||||
int_map = {1: '>B', 2: '>H', 4: '>I'}
|
def unpack_integer(data):
|
||||||
unpack_integer = lambda x: compat_struct_unpack(int_map[len(x)], x)[0]
|
return compat_struct_unpack(f'>{int_map[len(data)]}', data)[0]
|
||||||
|
|
||||||
|
int_map = {1: 'B', 2: 'H', 4: 'I'}
|
||||||
|
header = png_data[8:]
|
||||||
chunks = []
|
chunks = []
|
||||||
|
|
||||||
while header:
|
while header:
|
||||||
length = unpack_integer(header[:4])
|
length = unpack_integer(header[:4])
|
||||||
header = header[4:]
|
chunk_type, chunk_data, header = header[4:8], header[8:8 + length], header[8 + length + 4:]
|
||||||
|
chunks.append({'type': chunk_type, 'data': chunk_data})
|
||||||
|
|
||||||
chunk_type = header[:4]
|
if not (ihdr := next((c["data"] for c in chunks if c["type"] == b'IHDR'), None)):
|
||||||
header = header[4:]
|
raise IOError("Unable to read PNG header.")
|
||||||
|
|
||||||
chunk_data = header[:length]
|
width, height = unpack_integer(ihdr[:4]), unpack_integer(ihdr[4:8])
|
||||||
header = header[length:]
|
idat = b''.join(c['data'] for c in chunks if c['type'] == b'IDAT')
|
||||||
|
|
||||||
header = header[4:] # Skip CRC
|
|
||||||
|
|
||||||
chunks.append({
|
|
||||||
'type': chunk_type,
|
|
||||||
'length': length,
|
|
||||||
'data': chunk_data
|
|
||||||
})
|
|
||||||
|
|
||||||
ihdr = chunks[0]['data']
|
|
||||||
|
|
||||||
width = unpack_integer(ihdr[:4])
|
|
||||||
height = unpack_integer(ihdr[4:8])
|
|
||||||
|
|
||||||
idat = b''
|
|
||||||
|
|
||||||
for chunk in chunks:
|
|
||||||
if chunk['type'] == b'IDAT':
|
|
||||||
idat += chunk['data']
|
|
||||||
|
|
||||||
if not idat:
|
if not idat:
|
||||||
raise IOError('Unable to read PNG data.')
|
raise IOError('Unable to read PNG data.')
|
||||||
|
|
||||||
decompressed_data = bytearray(zlib.decompress(idat))
|
decompressed_data = bytearray(zlib.decompress(idat))
|
||||||
|
|
||||||
stride = width * 3
|
stride = width * 3
|
||||||
pixels = []
|
pixels = [[] for _ in range(height)]
|
||||||
|
|
||||||
def _get_pixel(idx):
|
def _get_pixel(x, y):
|
||||||
x = idx % stride
|
return pixels[y][x] if x >= 0 and y >= 0 else 0
|
||||||
y = idx // stride
|
|
||||||
return pixels[y][x]
|
|
||||||
|
|
||||||
for y in range(height):
|
for y in range(height):
|
||||||
basePos = y * (1 + stride)
|
filter_type = decompressed_data[y * (1 + stride)]
|
||||||
filter_type = decompressed_data[basePos]
|
|
||||||
|
|
||||||
current_row = []
|
|
||||||
|
|
||||||
pixels.append(current_row)
|
|
||||||
|
|
||||||
for x in range(stride):
|
for x in range(stride):
|
||||||
color = decompressed_data[1 + basePos + x]
|
color = decompressed_data[1 + y * (1 + stride) + x]
|
||||||
basex = y * stride + x
|
left, up = _get_pixel(x - 3, y), _get_pixel(x, y - 1)
|
||||||
left = 0
|
|
||||||
up = 0
|
|
||||||
|
|
||||||
if x > 2:
|
|
||||||
left = _get_pixel(basex - 3)
|
|
||||||
if y > 0:
|
|
||||||
up = _get_pixel(basex - stride)
|
|
||||||
|
|
||||||
if filter_type == 1: # Sub
|
if filter_type == 1: # Sub
|
||||||
color = (color + left) & 0xff
|
color = (color + left) & 0xff
|
||||||
@ -6085,31 +6038,15 @@ def decode_png(png_data):
|
|||||||
elif filter_type == 3: # Average
|
elif filter_type == 3: # Average
|
||||||
color = (color + ((left + up) >> 1)) & 0xff
|
color = (color + ((left + up) >> 1)) & 0xff
|
||||||
elif filter_type == 4: # Paeth
|
elif filter_type == 4: # Paeth
|
||||||
a = left
|
a, b, c = left, up, _get_pixel(x - 3, y - 1)
|
||||||
b = up
|
|
||||||
c = 0
|
|
||||||
|
|
||||||
if x > 2 and y > 0:
|
|
||||||
c = _get_pixel(basex - stride - 3)
|
|
||||||
|
|
||||||
p = a + b - c
|
p = a + b - c
|
||||||
|
pa, pb, pc = abs(p - a), abs(p - b), abs(p - c)
|
||||||
|
color = (color + (a if pa <= pb and pa <= pc else b if pb <= pc else c)) & 0xff
|
||||||
|
|
||||||
pa = abs(p - a)
|
pixels[y].append(color)
|
||||||
pb = abs(p - b)
|
|
||||||
pc = abs(p - c)
|
|
||||||
|
|
||||||
if pa <= pb and pa <= pc:
|
|
||||||
color = (color + a) & 0xff
|
|
||||||
elif pb <= pc:
|
|
||||||
color = (color + b) & 0xff
|
|
||||||
else:
|
|
||||||
color = (color + c) & 0xff
|
|
||||||
|
|
||||||
current_row.append(color)
|
|
||||||
|
|
||||||
return width, height, pixels
|
return width, height, pixels
|
||||||
|
|
||||||
|
|
||||||
def write_xattr(path, key, value):
|
def write_xattr(path, key, value):
|
||||||
# This mess below finds the best xattr tool for the job
|
# This mess below finds the best xattr tool for the job
|
||||||
try:
|
try:
|
||||||
@ -6561,3 +6498,138 @@ def join_nonempty(*values, **kwargs):
|
|||||||
if from_dict is not None:
|
if from_dict is not None:
|
||||||
values = (traverse_obj(from_dict, variadic(v)) for v in values)
|
values = (traverse_obj(from_dict, variadic(v)) for v in values)
|
||||||
return delim.join(map(compat_str, filter(None, values)))
|
return delim.join(map(compat_str, filter(None, values)))
|
||||||
|
|
||||||
|
|
||||||
|
class Namespace(object):
|
||||||
|
"""Immutable namespace"""
|
||||||
|
|
||||||
|
def __init__(self, **kw_attr):
|
||||||
|
self.__dict__.update(kw_attr)
|
||||||
|
|
||||||
|
def __iter__(self):
|
||||||
|
return iter(self.__dict__.values())
|
||||||
|
|
||||||
|
@property
|
||||||
|
def items_(self):
|
||||||
|
return self.__dict__.items()
|
||||||
|
|
||||||
|
|
||||||
|
MEDIA_EXTENSIONS = Namespace(
|
||||||
|
common_video=('avi', 'flv', 'mkv', 'mov', 'mp4', 'webm'),
|
||||||
|
video=('3g2', '3gp', 'f4v', 'mk3d', 'divx', 'mpg', 'ogv', 'm4v', 'wmv'),
|
||||||
|
common_audio=('aiff', 'alac', 'flac', 'm4a', 'mka', 'mp3', 'ogg', 'opus', 'wav'),
|
||||||
|
audio=('aac', 'ape', 'asf', 'f4a', 'f4b', 'm4b', 'm4p', 'm4r', 'oga', 'ogx', 'spx', 'vorbis', 'wma', 'weba'),
|
||||||
|
thumbnails=('jpg', 'png', 'webp'),
|
||||||
|
# storyboards=('mhtml', ),
|
||||||
|
subtitles=('srt', 'vtt', 'ass', 'lrc', 'ttml'),
|
||||||
|
manifests=('f4f', 'f4m', 'm3u8', 'smil', 'mpd'),
|
||||||
|
)
|
||||||
|
MEDIA_EXTENSIONS.video = MEDIA_EXTENSIONS.common_video + MEDIA_EXTENSIONS.video
|
||||||
|
MEDIA_EXTENSIONS.audio = MEDIA_EXTENSIONS.common_audio + MEDIA_EXTENSIONS.audio
|
||||||
|
|
||||||
|
KNOWN_EXTENSIONS = (
|
||||||
|
MEDIA_EXTENSIONS.video + MEDIA_EXTENSIONS.audio
|
||||||
|
+ MEDIA_EXTENSIONS.manifests
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class _UnsafeExtensionError(Exception):
|
||||||
|
"""
|
||||||
|
Mitigation exception for unwanted file overwrite/path traversal
|
||||||
|
|
||||||
|
Ref: https://github.com/yt-dlp/yt-dlp/security/advisories/GHSA-79w7-vh3h-8g4j
|
||||||
|
"""
|
||||||
|
_ALLOWED_EXTENSIONS = frozenset(itertools.chain(
|
||||||
|
( # internal
|
||||||
|
'description',
|
||||||
|
'json',
|
||||||
|
'meta',
|
||||||
|
'orig',
|
||||||
|
'part',
|
||||||
|
'temp',
|
||||||
|
'uncut',
|
||||||
|
'unknown_video',
|
||||||
|
'ytdl',
|
||||||
|
),
|
||||||
|
# video
|
||||||
|
MEDIA_EXTENSIONS.video, (
|
||||||
|
'avif',
|
||||||
|
'ismv',
|
||||||
|
'm2ts',
|
||||||
|
'm4s',
|
||||||
|
'mng',
|
||||||
|
'mpeg',
|
||||||
|
'qt',
|
||||||
|
'swf',
|
||||||
|
'ts',
|
||||||
|
'vp9',
|
||||||
|
'wvm',
|
||||||
|
),
|
||||||
|
# audio
|
||||||
|
MEDIA_EXTENSIONS.audio, (
|
||||||
|
'isma',
|
||||||
|
'mid',
|
||||||
|
'mpga',
|
||||||
|
'ra',
|
||||||
|
),
|
||||||
|
# image
|
||||||
|
MEDIA_EXTENSIONS.thumbnails, (
|
||||||
|
'bmp',
|
||||||
|
'gif',
|
||||||
|
'ico',
|
||||||
|
'heic',
|
||||||
|
'jng',
|
||||||
|
'jpeg',
|
||||||
|
'jxl',
|
||||||
|
'svg',
|
||||||
|
'tif',
|
||||||
|
'wbmp',
|
||||||
|
),
|
||||||
|
# subtitle
|
||||||
|
MEDIA_EXTENSIONS.subtitles, (
|
||||||
|
'dfxp',
|
||||||
|
'fs',
|
||||||
|
'ismt',
|
||||||
|
'sami',
|
||||||
|
'scc',
|
||||||
|
'ssa',
|
||||||
|
'tt',
|
||||||
|
),
|
||||||
|
# others
|
||||||
|
MEDIA_EXTENSIONS.manifests,
|
||||||
|
(
|
||||||
|
# not used in yt-dl
|
||||||
|
# *MEDIA_EXTENSIONS.storyboards,
|
||||||
|
# 'desktop',
|
||||||
|
# 'ism',
|
||||||
|
# 'm3u',
|
||||||
|
# 'sbv',
|
||||||
|
# 'swp',
|
||||||
|
# 'url',
|
||||||
|
# 'webloc',
|
||||||
|
# 'xml',
|
||||||
|
)))
|
||||||
|
|
||||||
|
def __init__(self, extension):
|
||||||
|
super(_UnsafeExtensionError, self).__init__('unsafe file extension: {0!r}'.format(extension))
|
||||||
|
self.extension = extension
|
||||||
|
|
||||||
|
# support --no-check-extensions
|
||||||
|
lenient = False
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def sanitize_extension(cls, extension, **kwargs):
|
||||||
|
# ... /, *, prepend=False
|
||||||
|
prepend = kwargs.get('prepend', False)
|
||||||
|
|
||||||
|
if '/' in extension or '\\' in extension:
|
||||||
|
raise cls(extension)
|
||||||
|
|
||||||
|
if not prepend:
|
||||||
|
last = extension.rpartition('.')[-1]
|
||||||
|
if last == 'bin':
|
||||||
|
extension = last = 'unknown_video'
|
||||||
|
if not (cls.lenient or last.lower() in cls._ALLOWED_EXTENSIONS):
|
||||||
|
raise cls(extension)
|
||||||
|
|
||||||
|
return extension
|
||||||
|
Loading…
Reference in New Issue
Block a user