2015-01-23 18:38:48 -06:00
|
|
|
from __future__ import unicode_literals
|
|
|
|
|
2023-07-04 16:40:56 -05:00
|
|
|
import os
|
2018-03-24 04:29:03 -05:00
|
|
|
import re
|
2015-01-23 18:38:48 -06:00
|
|
|
import subprocess
|
2016-02-19 12:29:24 -06:00
|
|
|
import sys
|
2023-07-04 16:40:56 -05:00
|
|
|
import tempfile
|
2018-03-24 04:29:03 -05:00
|
|
|
import time
|
2015-01-23 18:38:48 -06:00
|
|
|
|
|
|
|
from .common import FileDownloader
|
2017-03-04 21:24:29 -06:00
|
|
|
from ..compat import (
|
|
|
|
compat_setenv,
|
|
|
|
compat_str,
|
2024-03-02 09:17:09 -06:00
|
|
|
compat_subprocess_Popen,
|
2017-03-04 21:24:29 -06:00
|
|
|
)
|
2024-03-03 06:38:00 -06:00
|
|
|
|
|
|
|
try:
|
|
|
|
from ..postprocessor.ffmpeg import FFmpegPostProcessor, EXT_TO_OUT_FORMATS
|
|
|
|
except ImportError:
|
|
|
|
FFmpegPostProcessor = None
|
|
|
|
|
2015-01-23 18:38:48 -06:00
|
|
|
from ..utils import (
|
2015-09-04 16:06:28 -05:00
|
|
|
cli_option,
|
|
|
|
cli_valueless_option,
|
|
|
|
cli_bool_option,
|
|
|
|
cli_configuration_args,
|
2015-01-23 18:38:48 -06:00
|
|
|
encodeFilename,
|
2015-04-25 17:33:43 -05:00
|
|
|
encodeArgument,
|
2016-02-19 12:29:24 -06:00
|
|
|
handle_youtubedl_headers,
|
2016-02-20 00:58:25 -06:00
|
|
|
check_executable,
|
2017-02-02 01:05:16 -06:00
|
|
|
is_outdated_version,
|
2021-01-09 06:26:12 -06:00
|
|
|
process_communicate_or_kill,
|
2023-07-04 16:40:56 -05:00
|
|
|
T,
|
|
|
|
traverse_obj,
|
2015-01-23 18:38:48 -06:00
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
class ExternalFD(FileDownloader):
|
|
|
|
def real_download(self, filename, info_dict):
|
|
|
|
self.report_destination(filename)
|
|
|
|
tmpfilename = self.temp_name(filename)
|
2023-07-04 16:40:56 -05:00
|
|
|
self._cookies_tempfile = None
|
2015-01-23 18:38:48 -06:00
|
|
|
|
2017-04-28 16:33:35 -05:00
|
|
|
try:
|
2018-03-24 04:29:03 -05:00
|
|
|
started = time.time()
|
2017-04-28 16:33:35 -05:00
|
|
|
retval = self._call_downloader(tmpfilename, info_dict)
|
|
|
|
except KeyboardInterrupt:
|
|
|
|
if not info_dict.get('is_live'):
|
|
|
|
raise
|
|
|
|
# Live stream downloading cancellation should be considered as
|
|
|
|
# correct and expected termination thus all postprocessing
|
|
|
|
# should take place
|
|
|
|
retval = 0
|
|
|
|
self.to_screen('[%s] Interrupted by user' % self.get_basename())
|
2023-07-04 16:40:56 -05:00
|
|
|
finally:
|
|
|
|
if self._cookies_tempfile and os.path.isfile(self._cookies_tempfile):
|
|
|
|
try:
|
|
|
|
os.remove(self._cookies_tempfile)
|
|
|
|
except OSError:
|
|
|
|
self.report_warning(
|
|
|
|
'Unable to delete temporary cookies file "{0}"'.format(self._cookies_tempfile))
|
2017-04-28 16:33:35 -05:00
|
|
|
|
2015-01-23 18:38:48 -06:00
|
|
|
if retval == 0:
|
2018-03-24 04:29:03 -05:00
|
|
|
status = {
|
|
|
|
'filename': filename,
|
|
|
|
'status': 'finished',
|
|
|
|
'elapsed': time.time() - started,
|
|
|
|
}
|
|
|
|
if filename != '-':
|
2016-10-08 08:27:24 -05:00
|
|
|
fsize = os.path.getsize(encodeFilename(tmpfilename))
|
|
|
|
self.to_screen('\r[%s] Downloaded %s bytes' % (self.get_basename(), fsize))
|
|
|
|
self.try_rename(tmpfilename, filename)
|
2018-03-24 04:29:03 -05:00
|
|
|
status.update({
|
2016-10-08 08:27:24 -05:00
|
|
|
'downloaded_bytes': fsize,
|
|
|
|
'total_bytes': fsize,
|
|
|
|
})
|
2018-03-24 04:29:03 -05:00
|
|
|
self._hook_progress(status)
|
2015-01-23 18:38:48 -06:00
|
|
|
return True
|
|
|
|
else:
|
|
|
|
self.to_stderr('\n')
|
|
|
|
self.report_error('%s exited with code %d' % (
|
|
|
|
self.get_basename(), retval))
|
|
|
|
return False
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def get_basename(cls):
|
|
|
|
return cls.__name__[:-2].lower()
|
|
|
|
|
|
|
|
@property
|
|
|
|
def exe(self):
|
|
|
|
return self.params.get('external_downloader')
|
|
|
|
|
2016-02-20 00:58:25 -06:00
|
|
|
@classmethod
|
|
|
|
def available(cls):
|
2016-03-13 08:37:45 -05:00
|
|
|
return check_executable(cls.get_basename(), [cls.AVAILABLE_OPT])
|
2016-02-20 00:58:25 -06:00
|
|
|
|
2015-01-23 18:38:48 -06:00
|
|
|
@classmethod
|
|
|
|
def supports(cls, info_dict):
|
|
|
|
return info_dict['protocol'] in ('http', 'https', 'ftp', 'ftps')
|
|
|
|
|
2016-03-13 08:53:17 -05:00
|
|
|
@classmethod
|
|
|
|
def can_download(cls, info_dict):
|
|
|
|
return cls.available() and cls.supports(info_dict)
|
|
|
|
|
2015-08-11 12:00:45 -05:00
|
|
|
def _option(self, command_option, param):
|
2015-09-04 16:06:28 -05:00
|
|
|
return cli_option(self.params, command_option, param)
|
2015-08-11 12:00:45 -05:00
|
|
|
|
2015-09-04 14:07:36 -05:00
|
|
|
def _bool_option(self, command_option, param, true_value='true', false_value='false', separator=None):
|
2015-09-04 16:06:28 -05:00
|
|
|
return cli_bool_option(self.params, command_option, param, true_value, false_value, separator)
|
2015-09-04 14:07:36 -05:00
|
|
|
|
2015-09-04 15:12:13 -05:00
|
|
|
def _valueless_option(self, command_option, param, expected_value=True):
|
2015-09-04 16:06:28 -05:00
|
|
|
return cli_valueless_option(self.params, command_option, param, expected_value)
|
2015-09-04 14:57:19 -05:00
|
|
|
|
2015-03-02 08:06:09 -06:00
|
|
|
def _configuration_args(self, default=[]):
|
2015-09-04 16:06:28 -05:00
|
|
|
return cli_configuration_args(self.params, 'external_downloader_args', default)
|
2015-03-02 08:06:09 -06:00
|
|
|
|
2023-07-04 16:40:56 -05:00
|
|
|
def _write_cookies(self):
|
|
|
|
if not self.ydl.cookiejar.filename:
|
|
|
|
tmp_cookies = tempfile.NamedTemporaryFile(suffix='.cookies', delete=False)
|
|
|
|
tmp_cookies.close()
|
|
|
|
self._cookies_tempfile = tmp_cookies.name
|
|
|
|
self.to_screen('[download] Writing temporary cookies file to "{0}"'.format(self._cookies_tempfile))
|
|
|
|
# real_download resets _cookies_tempfile; if it's None, save() will write to cookiejar.filename
|
|
|
|
self.ydl.cookiejar.save(self._cookies_tempfile, ignore_discard=True, ignore_expires=True)
|
|
|
|
return self.ydl.cookiejar.filename or self._cookies_tempfile
|
|
|
|
|
2015-01-23 18:38:48 -06:00
|
|
|
def _call_downloader(self, tmpfilename, info_dict):
|
|
|
|
""" Either overwrite this or implement _make_cmd """
|
2015-04-25 17:33:43 -05:00
|
|
|
cmd = [encodeArgument(a) for a in self._make_cmd(tmpfilename, info_dict)]
|
2015-01-23 18:38:48 -06:00
|
|
|
|
2015-04-25 17:33:43 -05:00
|
|
|
self._debug_cmd(cmd)
|
2015-01-23 18:38:48 -06:00
|
|
|
|
|
|
|
p = subprocess.Popen(
|
2015-01-24 06:33:45 -06:00
|
|
|
cmd, stderr=subprocess.PIPE)
|
2021-01-09 06:26:12 -06:00
|
|
|
_, stderr = process_communicate_or_kill(p)
|
2015-01-23 18:38:48 -06:00
|
|
|
if p.returncode != 0:
|
2016-06-12 04:45:07 -05:00
|
|
|
self.to_stderr(stderr.decode('utf-8', 'replace'))
|
2015-01-23 18:38:48 -06:00
|
|
|
return p.returncode
|
|
|
|
|
2023-07-04 16:40:56 -05:00
|
|
|
@staticmethod
|
|
|
|
def _header_items(info_dict):
|
|
|
|
return traverse_obj(
|
|
|
|
info_dict, ('http_headers', T(dict.items), Ellipsis))
|
|
|
|
|
2015-01-23 18:38:48 -06:00
|
|
|
|
2015-01-24 06:33:45 -06:00
|
|
|
class CurlFD(ExternalFD):
|
2016-03-13 08:37:45 -05:00
|
|
|
AVAILABLE_OPT = '-V'
|
2016-02-20 00:58:25 -06:00
|
|
|
|
2015-01-24 06:33:45 -06:00
|
|
|
def _make_cmd(self, tmpfilename, info_dict):
|
2023-07-04 16:40:56 -05:00
|
|
|
cmd = [self.exe, '--location', '-o', tmpfilename, '--compressed']
|
|
|
|
cookie_header = self.ydl.cookiejar.get_cookie_header(info_dict['url'])
|
|
|
|
if cookie_header:
|
|
|
|
cmd += ['--cookie', cookie_header]
|
|
|
|
for key, val in self._header_items(info_dict):
|
2015-01-24 06:33:45 -06:00
|
|
|
cmd += ['--header', '%s: %s' % (key, val)]
|
2016-08-12 06:30:02 -05:00
|
|
|
cmd += self._bool_option('--continue-at', 'continuedl', '-', '0')
|
|
|
|
cmd += self._valueless_option('--silent', 'noprogress')
|
|
|
|
cmd += self._valueless_option('--verbose', 'verbose')
|
|
|
|
cmd += self._option('--limit-rate', 'ratelimit')
|
2019-02-22 11:43:29 -06:00
|
|
|
retry = self._option('--retry', 'retries')
|
|
|
|
if len(retry) == 2:
|
|
|
|
if retry[1] in ('inf', 'infinite'):
|
|
|
|
retry[1] = '2147483647'
|
|
|
|
cmd += retry
|
2016-08-12 06:30:02 -05:00
|
|
|
cmd += self._option('--max-filesize', 'max_filesize')
|
2015-08-11 12:05:04 -05:00
|
|
|
cmd += self._option('--interface', 'source_address')
|
2015-09-03 16:25:33 -05:00
|
|
|
cmd += self._option('--proxy', 'proxy')
|
2015-09-04 15:12:13 -05:00
|
|
|
cmd += self._valueless_option('--insecure', 'nocheckcertificate')
|
2015-03-02 08:06:09 -06:00
|
|
|
cmd += self._configuration_args()
|
2015-01-24 06:33:45 -06:00
|
|
|
cmd += ['--', info_dict['url']]
|
|
|
|
return cmd
|
|
|
|
|
2016-08-12 06:30:02 -05:00
|
|
|
def _call_downloader(self, tmpfilename, info_dict):
|
|
|
|
cmd = [encodeArgument(a) for a in self._make_cmd(tmpfilename, info_dict)]
|
|
|
|
|
|
|
|
self._debug_cmd(cmd)
|
|
|
|
|
2016-08-13 04:26:02 -05:00
|
|
|
# curl writes the progress to stderr so don't capture it.
|
2016-08-12 06:30:02 -05:00
|
|
|
p = subprocess.Popen(cmd)
|
2021-01-09 06:26:12 -06:00
|
|
|
process_communicate_or_kill(p)
|
2016-08-12 06:30:02 -05:00
|
|
|
return p.returncode
|
|
|
|
|
2015-01-24 06:33:45 -06:00
|
|
|
|
2015-08-06 12:12:58 -05:00
|
|
|
class AxelFD(ExternalFD):
|
2016-03-13 08:37:45 -05:00
|
|
|
AVAILABLE_OPT = '-V'
|
2016-02-20 00:58:25 -06:00
|
|
|
|
2015-08-06 12:12:58 -05:00
|
|
|
def _make_cmd(self, tmpfilename, info_dict):
|
|
|
|
cmd = [self.exe, '-o', tmpfilename]
|
2023-07-04 16:40:56 -05:00
|
|
|
for key, val in self._header_items(info_dict):
|
2015-08-06 12:12:58 -05:00
|
|
|
cmd += ['-H', '%s: %s' % (key, val)]
|
2023-07-04 16:40:56 -05:00
|
|
|
cookie_header = self.ydl.cookiejar.get_cookie_header(info_dict['url'])
|
|
|
|
if cookie_header:
|
|
|
|
cmd += ['-H', 'Cookie: {0}'.format(cookie_header), '--max-redirect=0']
|
2015-08-06 12:12:58 -05:00
|
|
|
cmd += self._configuration_args()
|
|
|
|
cmd += ['--', info_dict['url']]
|
|
|
|
return cmd
|
|
|
|
|
|
|
|
|
2015-01-23 18:38:48 -06:00
|
|
|
class WgetFD(ExternalFD):
|
2016-03-13 08:37:45 -05:00
|
|
|
AVAILABLE_OPT = '--version'
|
2016-02-20 00:58:25 -06:00
|
|
|
|
2015-01-23 18:38:48 -06:00
|
|
|
def _make_cmd(self, tmpfilename, info_dict):
|
2023-07-04 16:40:56 -05:00
|
|
|
cmd = [self.exe, '-O', tmpfilename, '-nv', '--compression=auto']
|
|
|
|
if self.ydl.cookiejar.get_cookie_header(info_dict['url']):
|
|
|
|
cmd += ['--load-cookies', self._write_cookies()]
|
|
|
|
for key, val in self._header_items(info_dict):
|
2015-01-23 18:38:48 -06:00
|
|
|
cmd += ['--header', '%s: %s' % (key, val)]
|
2019-02-22 11:58:56 -06:00
|
|
|
cmd += self._option('--limit-rate', 'ratelimit')
|
|
|
|
retry = self._option('--tries', 'retries')
|
|
|
|
if len(retry) == 2:
|
|
|
|
if retry[1] in ('inf', 'infinite'):
|
|
|
|
retry[1] = '0'
|
|
|
|
cmd += retry
|
2015-08-11 12:05:04 -05:00
|
|
|
cmd += self._option('--bind-address', 'source_address')
|
2024-02-21 10:29:08 -06:00
|
|
|
proxy = self.params.get('proxy')
|
|
|
|
if proxy:
|
|
|
|
for var in ('http_proxy', 'https_proxy'):
|
|
|
|
cmd += ['--execute', '%s=%s' % (var, proxy)]
|
2015-09-04 15:12:13 -05:00
|
|
|
cmd += self._valueless_option('--no-check-certificate', 'nocheckcertificate')
|
2015-03-02 08:06:09 -06:00
|
|
|
cmd += self._configuration_args()
|
2015-01-23 18:38:48 -06:00
|
|
|
cmd += ['--', info_dict['url']]
|
|
|
|
return cmd
|
|
|
|
|
|
|
|
|
2015-01-24 06:33:45 -06:00
|
|
|
class Aria2cFD(ExternalFD):
|
2016-03-13 08:37:45 -05:00
|
|
|
AVAILABLE_OPT = '-v'
|
2016-02-20 00:58:25 -06:00
|
|
|
|
2023-07-04 16:40:56 -05:00
|
|
|
@staticmethod
|
|
|
|
def _aria2c_filename(fn):
|
|
|
|
return fn if os.path.isabs(fn) else os.path.join('.', fn)
|
|
|
|
|
2015-01-24 06:33:45 -06:00
|
|
|
def _make_cmd(self, tmpfilename, info_dict):
|
2023-07-04 16:40:56 -05:00
|
|
|
cmd = [self.exe, '-c',
|
|
|
|
'--console-log-level=warn', '--summary-interval=0', '--download-result=hide',
|
|
|
|
'--http-accept-gzip=true', '--file-allocation=none', '-x16', '-j16', '-s16']
|
|
|
|
if 'fragments' in info_dict:
|
|
|
|
cmd += ['--allow-overwrite=true', '--allow-piece-length-change=true']
|
|
|
|
else:
|
|
|
|
cmd += ['--min-split-size', '1M']
|
|
|
|
|
|
|
|
if self.ydl.cookiejar.get_cookie_header(info_dict['url']):
|
|
|
|
cmd += ['--load-cookies={0}'.format(self._write_cookies())]
|
|
|
|
for key, val in self._header_items(info_dict):
|
2015-01-24 06:33:45 -06:00
|
|
|
cmd += ['--header', '%s: %s' % (key, val)]
|
2023-07-04 16:40:56 -05:00
|
|
|
cmd += self._configuration_args(['--max-connection-per-server', '4'])
|
|
|
|
cmd += ['--out', os.path.basename(tmpfilename)]
|
|
|
|
cmd += self._option('--max-overall-download-limit', 'ratelimit')
|
2015-08-11 12:05:04 -05:00
|
|
|
cmd += self._option('--interface', 'source_address')
|
2015-08-11 12:00:45 -05:00
|
|
|
cmd += self._option('--all-proxy', 'proxy')
|
2015-09-04 14:07:36 -05:00
|
|
|
cmd += self._bool_option('--check-certificate', 'nocheckcertificate', 'false', 'true', '=')
|
2019-08-31 12:24:43 -05:00
|
|
|
cmd += self._bool_option('--remote-time', 'updatetime', 'true', 'false', '=')
|
2023-07-04 16:40:56 -05:00
|
|
|
cmd += self._bool_option('--show-console-readout', 'noprogress', 'false', 'true', '=')
|
|
|
|
cmd += self._configuration_args()
|
|
|
|
|
|
|
|
# aria2c strips out spaces from the beginning/end of filenames and paths.
|
|
|
|
# We work around this issue by adding a "./" to the beginning of the
|
|
|
|
# filename and relative path, and adding a "/" at the end of the path.
|
|
|
|
# See: https://github.com/yt-dlp/yt-dlp/issues/276
|
|
|
|
# https://github.com/ytdl-org/youtube-dl/issues/20312
|
|
|
|
# https://github.com/aria2/aria2/issues/1373
|
|
|
|
dn = os.path.dirname(tmpfilename)
|
|
|
|
if dn:
|
|
|
|
cmd += ['--dir', self._aria2c_filename(dn) + os.path.sep]
|
|
|
|
if 'fragments' not in info_dict:
|
|
|
|
cmd += ['--out', self._aria2c_filename(os.path.basename(tmpfilename))]
|
|
|
|
cmd += ['--auto-file-renaming=false']
|
|
|
|
if 'fragments' in info_dict:
|
|
|
|
cmd += ['--file-allocation=none', '--uri-selector=inorder']
|
|
|
|
url_list_file = '%s.frag.urls' % (tmpfilename, )
|
|
|
|
url_list = []
|
|
|
|
for frag_index, fragment in enumerate(info_dict['fragments']):
|
|
|
|
fragment_filename = '%s-Frag%d' % (os.path.basename(tmpfilename), frag_index)
|
|
|
|
url_list.append('%s\n\tout=%s' % (fragment['url'], self._aria2c_filename(fragment_filename)))
|
|
|
|
stream, _ = self.sanitize_open(url_list_file, 'wb')
|
|
|
|
stream.write('\n'.join(url_list).encode())
|
|
|
|
stream.close()
|
|
|
|
cmd += ['-i', self._aria2c_filename(url_list_file)]
|
|
|
|
else:
|
|
|
|
cmd += ['--', info_dict['url']]
|
2015-01-24 06:33:45 -06:00
|
|
|
return cmd
|
|
|
|
|
2015-06-25 08:48:04 -05:00
|
|
|
|
2023-02-09 14:19:27 -06:00
|
|
|
class Aria2pFD(ExternalFD):
|
|
|
|
''' Aria2pFD class
|
|
|
|
This class support to use aria2p as downloader.
|
|
|
|
(Aria2p, a command-line tool and Python library to interact with an aria2c daemon process
|
|
|
|
through JSON-RPC.)
|
|
|
|
It can help you to get download progress more easily.
|
|
|
|
To use aria2p as downloader, you need to install aria2c and aria2p, aria2p can download with pip.
|
|
|
|
Then run aria2c in the background and enable with the --enable-rpc option.
|
|
|
|
'''
|
|
|
|
try:
|
|
|
|
import aria2p
|
|
|
|
__avail = True
|
|
|
|
except ImportError:
|
|
|
|
__avail = False
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def available(cls):
|
|
|
|
return cls.__avail
|
|
|
|
|
|
|
|
def _call_downloader(self, tmpfilename, info_dict):
|
|
|
|
aria2 = self.aria2p.API(
|
|
|
|
self.aria2p.Client(
|
|
|
|
host='http://localhost',
|
|
|
|
port=6800,
|
|
|
|
secret=''
|
|
|
|
)
|
|
|
|
)
|
|
|
|
|
|
|
|
options = {
|
|
|
|
'min-split-size': '1M',
|
|
|
|
'max-connection-per-server': 4,
|
|
|
|
'auto-file-renaming': 'false',
|
|
|
|
}
|
|
|
|
options['dir'] = os.path.dirname(tmpfilename) or os.path.abspath('.')
|
|
|
|
options['out'] = os.path.basename(tmpfilename)
|
2023-07-04 16:40:56 -05:00
|
|
|
if self.ydl.cookiejar.get_cookie_header(info_dict['url']):
|
|
|
|
options['load-cookies'] = self._write_cookies()
|
2023-02-09 14:19:27 -06:00
|
|
|
options['header'] = []
|
2023-07-04 16:40:56 -05:00
|
|
|
for key, val in self._header_items(info_dict):
|
2023-02-09 14:19:27 -06:00
|
|
|
options['header'].append('{0}: {1}'.format(key, val))
|
|
|
|
download = aria2.add_uris([info_dict['url']], options)
|
|
|
|
status = {
|
|
|
|
'status': 'downloading',
|
|
|
|
'tmpfilename': tmpfilename,
|
|
|
|
}
|
|
|
|
started = time.time()
|
|
|
|
while download.status in ['active', 'waiting']:
|
|
|
|
download = aria2.get_download(download.gid)
|
|
|
|
status.update({
|
|
|
|
'downloaded_bytes': download.completed_length,
|
|
|
|
'total_bytes': download.total_length,
|
|
|
|
'elapsed': time.time() - started,
|
|
|
|
'eta': download.eta.total_seconds(),
|
|
|
|
'speed': download.download_speed,
|
|
|
|
})
|
|
|
|
self._hook_progress(status)
|
|
|
|
time.sleep(.5)
|
|
|
|
return download.status != 'complete'
|
|
|
|
|
|
|
|
|
2015-06-25 08:48:04 -05:00
|
|
|
class HttpieFD(ExternalFD):
|
2016-02-20 00:58:25 -06:00
|
|
|
@classmethod
|
|
|
|
def available(cls):
|
|
|
|
return check_executable('http', ['--version'])
|
|
|
|
|
2015-06-25 08:48:04 -05:00
|
|
|
def _make_cmd(self, tmpfilename, info_dict):
|
|
|
|
cmd = ['http', '--download', '--output', tmpfilename, info_dict['url']]
|
2023-07-04 16:40:56 -05:00
|
|
|
for key, val in self._header_items(info_dict):
|
2015-06-25 08:48:04 -05:00
|
|
|
cmd += ['%s:%s' % (key, val)]
|
2023-07-04 16:40:56 -05:00
|
|
|
|
|
|
|
# httpie 3.1.0+ removes the Cookie header on redirect, so this should be safe for now. [1]
|
|
|
|
# If we ever need cookie handling for redirects, we can export the cookiejar into a session. [2]
|
|
|
|
# 1: https://github.com/httpie/httpie/security/advisories/GHSA-9w4w-cpc8-h2fq
|
|
|
|
# 2: https://httpie.io/docs/cli/sessions
|
|
|
|
cookie_header = self.ydl.cookiejar.get_cookie_header(info_dict['url'])
|
|
|
|
if cookie_header:
|
|
|
|
cmd += ['Cookie:%s' % cookie_header]
|
2015-06-25 08:48:04 -05:00
|
|
|
return cmd
|
|
|
|
|
2016-02-19 12:29:24 -06:00
|
|
|
|
|
|
|
class FFmpegFD(ExternalFD):
|
|
|
|
@classmethod
|
|
|
|
def supports(cls, info_dict):
|
2023-03-11 06:09:55 -06:00
|
|
|
return info_dict['protocol'] in ('http', 'https', 'ftp', 'ftps', 'm3u8', 'rtsp', 'rtmp', 'mms', 'http_dash_segments')
|
2016-02-19 12:29:24 -06:00
|
|
|
|
2016-02-20 00:58:25 -06:00
|
|
|
@classmethod
|
|
|
|
def available(cls):
|
2024-03-03 06:38:00 -06:00
|
|
|
# actual availability can only be confirmed for an instance
|
|
|
|
return bool(FFmpegPostProcessor)
|
2016-02-20 00:58:25 -06:00
|
|
|
|
2016-02-19 12:29:24 -06:00
|
|
|
def _call_downloader(self, tmpfilename, info_dict):
|
2024-03-03 06:38:00 -06:00
|
|
|
# `downloader` means the parent `YoutubeDL`
|
|
|
|
ffpp = FFmpegPostProcessor(downloader=self.ydl)
|
2016-03-13 14:30:23 -05:00
|
|
|
if not ffpp.available:
|
2024-03-03 06:38:00 -06:00
|
|
|
self.report_error('ffmpeg required for download but no ffmpeg (nor avconv) executable could be found. Please install one.')
|
2016-03-13 14:30:23 -05:00
|
|
|
return False
|
2016-02-19 12:29:24 -06:00
|
|
|
ffpp.check_version()
|
|
|
|
|
|
|
|
args = [ffpp.executable, '-y']
|
|
|
|
|
2017-05-22 11:40:07 -05:00
|
|
|
for log_level in ('quiet', 'verbose'):
|
|
|
|
if self.params.get(log_level, False):
|
|
|
|
args += ['-loglevel', log_level]
|
|
|
|
break
|
|
|
|
|
2017-02-04 08:23:46 -06:00
|
|
|
seekable = info_dict.get('_seekable')
|
|
|
|
if seekable is not None:
|
|
|
|
# setting -seekable prevents ffmpeg from guessing if the server
|
|
|
|
# supports seeking(by adding the header `Range: bytes=0-`), which
|
|
|
|
# can cause problems in some cases
|
2019-03-09 06:14:41 -06:00
|
|
|
# https://github.com/ytdl-org/youtube-dl/issues/11800#issuecomment-275037127
|
2017-02-04 08:23:46 -06:00
|
|
|
# http://trac.ffmpeg.org/ticket/6125#comment:10
|
|
|
|
args += ['-seekable', '1' if seekable else '0']
|
|
|
|
|
2016-03-13 15:13:50 -05:00
|
|
|
args += self._configuration_args()
|
|
|
|
|
2016-03-13 15:11:19 -05:00
|
|
|
# start_time = info_dict.get('start_time') or 0
|
|
|
|
# if start_time:
|
|
|
|
# args += ['-ss', compat_str(start_time)]
|
|
|
|
# end_time = info_dict.get('end_time')
|
|
|
|
# if end_time:
|
|
|
|
# args += ['-t', compat_str(end_time - start_time)]
|
2016-02-19 12:29:24 -06:00
|
|
|
|
2024-03-03 06:38:00 -06:00
|
|
|
url = info_dict['url']
|
2023-07-04 16:40:56 -05:00
|
|
|
cookies = self.ydl.cookiejar.get_cookies_for_url(url)
|
|
|
|
if cookies:
|
|
|
|
args.extend(['-cookies', ''.join(
|
|
|
|
'{0}={1}; path={2}; domain={3};\r\n'.format(
|
|
|
|
cookie.name, cookie.value, cookie.path, cookie.domain)
|
|
|
|
for cookie in cookies)])
|
|
|
|
|
|
|
|
if info_dict.get('http_headers') and re.match(r'^https?://', url):
|
2016-02-19 12:29:24 -06:00
|
|
|
# Trailing \r\n after each HTTP header is important to prevent warning from ffmpeg/avconv:
|
|
|
|
# [http @ 00000000003d2fa0] No trailing CRLF found in HTTP header.
|
|
|
|
headers = handle_youtubedl_headers(info_dict['http_headers'])
|
|
|
|
args += [
|
|
|
|
'-headers',
|
|
|
|
''.join('%s: %s\r\n' % (key, val) for key, val in headers.items())]
|
|
|
|
|
2016-05-09 11:05:12 -05:00
|
|
|
env = None
|
|
|
|
proxy = self.params.get('proxy')
|
|
|
|
if proxy:
|
|
|
|
if not re.match(r'^[\da-zA-Z]+://', proxy):
|
|
|
|
proxy = 'http://%s' % proxy
|
2016-08-25 09:38:06 -05:00
|
|
|
|
|
|
|
if proxy.startswith('socks'):
|
|
|
|
self.report_warning(
|
2016-08-29 06:05:38 -05:00
|
|
|
'%s does not support SOCKS proxies. Downloading is likely to fail. '
|
|
|
|
'Consider adding --hls-prefer-native to your command.' % self.get_basename())
|
2016-08-25 09:38:06 -05:00
|
|
|
|
2016-05-09 11:05:12 -05:00
|
|
|
# Since December 2015 ffmpeg supports -http_proxy option (see
|
|
|
|
# http://git.videolan.org/?p=ffmpeg.git;a=commit;h=b4eb1f29ebddd60c41a2eb39f5af701e38e0d3fd)
|
|
|
|
# We could switch to the following code if we are able to detect version properly
|
|
|
|
# args += ['-http_proxy', proxy]
|
|
|
|
env = os.environ.copy()
|
|
|
|
compat_setenv('HTTP_PROXY', proxy, env=env)
|
2016-06-08 01:43:52 -05:00
|
|
|
compat_setenv('http_proxy', proxy, env=env)
|
2016-05-09 11:05:12 -05:00
|
|
|
|
2016-03-14 10:49:16 -05:00
|
|
|
protocol = info_dict.get('protocol')
|
|
|
|
|
|
|
|
if protocol == 'rtmp':
|
|
|
|
player_url = info_dict.get('player_url')
|
|
|
|
page_url = info_dict.get('page_url')
|
|
|
|
app = info_dict.get('app')
|
|
|
|
play_path = info_dict.get('play_path')
|
|
|
|
tc_url = info_dict.get('tc_url')
|
|
|
|
flash_version = info_dict.get('flash_version')
|
|
|
|
live = info_dict.get('rtmp_live', False)
|
2019-04-02 16:41:23 -05:00
|
|
|
conn = info_dict.get('rtmp_conn')
|
2016-03-14 10:49:16 -05:00
|
|
|
if player_url is not None:
|
|
|
|
args += ['-rtmp_swfverify', player_url]
|
|
|
|
if page_url is not None:
|
|
|
|
args += ['-rtmp_pageurl', page_url]
|
|
|
|
if app is not None:
|
|
|
|
args += ['-rtmp_app', app]
|
|
|
|
if play_path is not None:
|
|
|
|
args += ['-rtmp_playpath', play_path]
|
|
|
|
if tc_url is not None:
|
|
|
|
args += ['-rtmp_tcurl', tc_url]
|
|
|
|
if flash_version is not None:
|
|
|
|
args += ['-rtmp_flashver', flash_version]
|
|
|
|
if live:
|
|
|
|
args += ['-rtmp_live', 'live']
|
2019-04-02 16:41:23 -05:00
|
|
|
if isinstance(conn, list):
|
|
|
|
for entry in conn:
|
|
|
|
args += ['-rtmp_conn', entry]
|
|
|
|
elif isinstance(conn, compat_str):
|
|
|
|
args += ['-rtmp_conn', conn]
|
2016-03-14 10:49:16 -05:00
|
|
|
|
2016-02-19 12:29:24 -06:00
|
|
|
args += ['-i', url, '-c', 'copy']
|
2017-03-04 21:19:44 -06:00
|
|
|
|
|
|
|
if self.params.get('test', False):
|
2017-03-04 21:24:29 -06:00
|
|
|
args += ['-fs', compat_str(self._TEST_FILE_SIZE)]
|
2017-03-04 21:19:44 -06:00
|
|
|
|
2016-05-07 15:29:26 -05:00
|
|
|
if protocol in ('m3u8', 'm3u8_native'):
|
2016-04-14 12:48:00 -05:00
|
|
|
if self.params.get('hls_use_mpegts', False) or tmpfilename == '-':
|
2016-02-19 12:29:24 -06:00
|
|
|
args += ['-f', 'mpegts']
|
|
|
|
else:
|
2017-02-02 01:05:16 -06:00
|
|
|
args += ['-f', 'mp4']
|
2017-02-09 10:36:59 -06:00
|
|
|
if (ffpp.basename == 'ffmpeg' and is_outdated_version(ffpp._versions['ffmpeg'], '3.2', False)) and (not info_dict.get('acodec') or info_dict['acodec'].split('.')[0] in ('aac', 'mp4a')):
|
2017-02-02 01:05:16 -06:00
|
|
|
args += ['-bsf:a', 'aac_adtstoasc']
|
2016-03-14 10:49:16 -05:00
|
|
|
elif protocol == 'rtmp':
|
|
|
|
args += ['-f', 'flv']
|
2016-02-19 12:29:24 -06:00
|
|
|
else:
|
2016-03-13 06:15:29 -05:00
|
|
|
args += ['-f', EXT_TO_OUT_FORMATS.get(info_dict['ext'], info_dict['ext'])]
|
2016-02-19 12:29:24 -06:00
|
|
|
|
|
|
|
args = [encodeArgument(opt) for opt in args]
|
2016-04-16 13:45:56 -05:00
|
|
|
args.append(encodeFilename(ffpp._ffmpeg_filename_argument(tmpfilename), True))
|
2016-02-19 12:29:24 -06:00
|
|
|
|
|
|
|
self._debug_cmd(args)
|
|
|
|
|
2024-03-02 09:17:09 -06:00
|
|
|
# From [1], a PIPE opened in Popen() should be closed, unless
|
|
|
|
# .communicate() is called. Avoid leaking any PIPEs by using Popen
|
|
|
|
# as a context manager (newer Python 3.x and compat)
|
|
|
|
# Fixes "Resource Warning" in test/test_downloader_external.py
|
|
|
|
# [1] https://devpress.csdn.net/python/62fde12d7e66823466192e48.html
|
|
|
|
with compat_subprocess_Popen(args, stdin=subprocess.PIPE, env=env) as proc:
|
|
|
|
try:
|
|
|
|
retval = proc.wait()
|
|
|
|
except BaseException as e:
|
|
|
|
# subprocess.run would send the SIGKILL signal to ffmpeg and the
|
|
|
|
# mp4 file couldn't be played, but if we ask ffmpeg to quit it
|
|
|
|
# produces a file that is playable (this is mostly useful for live
|
|
|
|
# streams). Note that Windows is not affected and produces playable
|
|
|
|
# files (see https://github.com/ytdl-org/youtube-dl/issues/8300).
|
|
|
|
if isinstance(e, KeyboardInterrupt) and sys.platform != 'win32':
|
|
|
|
process_communicate_or_kill(proc, b'q')
|
|
|
|
else:
|
|
|
|
proc.kill()
|
|
|
|
raise
|
2016-02-19 12:29:24 -06:00
|
|
|
return retval
|
|
|
|
|
|
|
|
|
|
|
|
class AVconvFD(FFmpegFD):
|
|
|
|
pass
|
|
|
|
|
2016-11-17 05:42:56 -06:00
|
|
|
|
2015-01-23 18:38:48 -06:00
|
|
|
_BY_NAME = dict(
|
|
|
|
(klass.get_basename(), klass)
|
|
|
|
for name, klass in globals().items()
|
|
|
|
if name.endswith('FD') and name != 'ExternalFD'
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
def list_external_downloaders():
|
|
|
|
return sorted(_BY_NAME.keys())
|
|
|
|
|
|
|
|
|
|
|
|
def get_external_downloader(external_downloader):
|
|
|
|
""" Given the name of the executable, see whether we support the given
|
|
|
|
downloader . """
|
2015-06-27 13:08:52 -05:00
|
|
|
# Drop .exe extension on Windows
|
|
|
|
bn = os.path.splitext(os.path.basename(external_downloader))[0]
|
2015-01-23 18:38:48 -06:00
|
|
|
return _BY_NAME[bn]
|