[compat] Use compat_open()
This commit is contained in:
parent
bcd5c4e152
commit
d38c9addfb
@ -4,6 +4,8 @@ import io
|
|||||||
import sys
|
import sys
|
||||||
import re
|
import re
|
||||||
|
|
||||||
|
from youtube_dl.compat import compat_open as open
|
||||||
|
|
||||||
README_FILE = 'README.md'
|
README_FILE = 'README.md'
|
||||||
helptext = sys.stdin.read()
|
helptext = sys.stdin.read()
|
||||||
|
|
||||||
@ -20,7 +22,7 @@ options = helptext[helptext.index(' General Options:') + 19:]
|
|||||||
options = re.sub(r'(?m)^ (\w.+)$', r'## \1', options)
|
options = re.sub(r'(?m)^ (\w.+)$', r'## \1', options)
|
||||||
options = '# OPTIONS\n' + options + '\n'
|
options = '# OPTIONS\n' + options + '\n'
|
||||||
|
|
||||||
with io.open(README_FILE, 'w', encoding='utf-8') as f:
|
with open(README_FILE, 'w', encoding='utf-8') as f:
|
||||||
f.write(header)
|
f.write(header)
|
||||||
f.write(options)
|
f.write(options)
|
||||||
f.write(footer)
|
f.write(footer)
|
||||||
|
@ -1,7 +1,6 @@
|
|||||||
from __future__ import unicode_literals
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
import errno
|
import errno
|
||||||
import io
|
|
||||||
import hashlib
|
import hashlib
|
||||||
import json
|
import json
|
||||||
import os.path
|
import os.path
|
||||||
@ -14,6 +13,7 @@ import unittest
|
|||||||
import youtube_dl.extractor
|
import youtube_dl.extractor
|
||||||
from youtube_dl import YoutubeDL
|
from youtube_dl import YoutubeDL
|
||||||
from youtube_dl.compat import (
|
from youtube_dl.compat import (
|
||||||
|
compat_open as open,
|
||||||
compat_os_name,
|
compat_os_name,
|
||||||
compat_str,
|
compat_str,
|
||||||
)
|
)
|
||||||
@ -29,10 +29,10 @@ def get_params(override=None):
|
|||||||
"parameters.json")
|
"parameters.json")
|
||||||
LOCAL_PARAMETERS_FILE = os.path.join(os.path.dirname(os.path.abspath(__file__)),
|
LOCAL_PARAMETERS_FILE = os.path.join(os.path.dirname(os.path.abspath(__file__)),
|
||||||
"local_parameters.json")
|
"local_parameters.json")
|
||||||
with io.open(PARAMETERS_FILE, encoding='utf-8') as pf:
|
with open(PARAMETERS_FILE, encoding='utf-8') as pf:
|
||||||
parameters = json.load(pf)
|
parameters = json.load(pf)
|
||||||
if os.path.exists(LOCAL_PARAMETERS_FILE):
|
if os.path.exists(LOCAL_PARAMETERS_FILE):
|
||||||
with io.open(LOCAL_PARAMETERS_FILE, encoding='utf-8') as pf:
|
with open(LOCAL_PARAMETERS_FILE, encoding='utf-8') as pf:
|
||||||
parameters.update(json.load(pf))
|
parameters.update(json.load(pf))
|
||||||
if override:
|
if override:
|
||||||
parameters.update(override)
|
parameters.update(override)
|
||||||
|
@ -3,7 +3,6 @@
|
|||||||
from __future__ import unicode_literals
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
# Allow direct execution
|
# Allow direct execution
|
||||||
import io
|
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
import unittest
|
import unittest
|
||||||
@ -21,6 +20,7 @@ from test.helper import (
|
|||||||
from youtube_dl.compat import (
|
from youtube_dl.compat import (
|
||||||
compat_etree_fromstring,
|
compat_etree_fromstring,
|
||||||
compat_http_server,
|
compat_http_server,
|
||||||
|
compat_open as open,
|
||||||
)
|
)
|
||||||
from youtube_dl.extractor.common import InfoExtractor
|
from youtube_dl.extractor.common import InfoExtractor
|
||||||
from youtube_dl.extractor import (
|
from youtube_dl.extractor import (
|
||||||
@ -902,8 +902,8 @@ jwplayer("mediaplayer").setup({"abouttext":"Visit Indie DB","aboutlink":"http:\/
|
|||||||
]
|
]
|
||||||
|
|
||||||
for m3u8_file, m3u8_url, expected_formats in _TEST_CASES:
|
for m3u8_file, m3u8_url, expected_formats in _TEST_CASES:
|
||||||
with io.open('./test/testdata/m3u8/%s.m3u8' % m3u8_file,
|
with open('./test/testdata/m3u8/%s.m3u8' % m3u8_file,
|
||||||
mode='r', encoding='utf-8') as f:
|
mode='r', encoding='utf-8') as f:
|
||||||
formats = self.ie._parse_m3u8_formats(
|
formats = self.ie._parse_m3u8_formats(
|
||||||
f.read(), m3u8_url, ext='mp4')
|
f.read(), m3u8_url, ext='mp4')
|
||||||
self.ie._sort_formats(formats)
|
self.ie._sort_formats(formats)
|
||||||
@ -1127,8 +1127,8 @@ jwplayer("mediaplayer").setup({"abouttext":"Visit Indie DB","aboutlink":"http:\/
|
|||||||
]
|
]
|
||||||
|
|
||||||
for mpd_file, mpd_url, mpd_base_url, expected_formats in _TEST_CASES:
|
for mpd_file, mpd_url, mpd_base_url, expected_formats in _TEST_CASES:
|
||||||
with io.open('./test/testdata/mpd/%s.mpd' % mpd_file,
|
with open('./test/testdata/mpd/%s.mpd' % mpd_file,
|
||||||
mode='r', encoding='utf-8') as f:
|
mode='r', encoding='utf-8') as f:
|
||||||
formats = self.ie._parse_mpd_formats(
|
formats = self.ie._parse_mpd_formats(
|
||||||
compat_etree_fromstring(f.read().encode('utf-8')),
|
compat_etree_fromstring(f.read().encode('utf-8')),
|
||||||
mpd_base_url=mpd_base_url, mpd_url=mpd_url)
|
mpd_base_url=mpd_base_url, mpd_url=mpd_url)
|
||||||
@ -1154,8 +1154,8 @@ jwplayer("mediaplayer").setup({"abouttext":"Visit Indie DB","aboutlink":"http:\/
|
|||||||
]
|
]
|
||||||
|
|
||||||
for f4m_file, f4m_url, expected_formats in _TEST_CASES:
|
for f4m_file, f4m_url, expected_formats in _TEST_CASES:
|
||||||
with io.open('./test/testdata/f4m/%s.f4m' % f4m_file,
|
with open('./test/testdata/f4m/%s.f4m' % f4m_file,
|
||||||
mode='r', encoding='utf-8') as f:
|
mode='r', encoding='utf-8') as f:
|
||||||
formats = self.ie._parse_f4m_formats(
|
formats = self.ie._parse_f4m_formats(
|
||||||
compat_etree_fromstring(f.read().encode('utf-8')),
|
compat_etree_fromstring(f.read().encode('utf-8')),
|
||||||
f4m_url, None)
|
f4m_url, None)
|
||||||
@ -1202,8 +1202,8 @@ jwplayer("mediaplayer").setup({"abouttext":"Visit Indie DB","aboutlink":"http:\/
|
|||||||
]
|
]
|
||||||
|
|
||||||
for xspf_file, xspf_url, expected_entries in _TEST_CASES:
|
for xspf_file, xspf_url, expected_entries in _TEST_CASES:
|
||||||
with io.open('./test/testdata/xspf/%s.xspf' % xspf_file,
|
with open('./test/testdata/xspf/%s.xspf' % xspf_file,
|
||||||
mode='r', encoding='utf-8') as f:
|
mode='r', encoding='utf-8') as f:
|
||||||
entries = self.ie._parse_xspf(
|
entries = self.ie._parse_xspf(
|
||||||
compat_etree_fromstring(f.read().encode('utf-8')),
|
compat_etree_fromstring(f.read().encode('utf-8')),
|
||||||
xspf_file, xspf_url=xspf_url, xspf_base_url=xspf_url)
|
xspf_file, xspf_url=xspf_url, xspf_base_url=xspf_url)
|
||||||
|
@ -22,6 +22,7 @@ from youtube_dl.compat import (
|
|||||||
compat_http_cookiejar_Cookie,
|
compat_http_cookiejar_Cookie,
|
||||||
compat_http_cookies_SimpleCookie,
|
compat_http_cookies_SimpleCookie,
|
||||||
compat_kwargs,
|
compat_kwargs,
|
||||||
|
compat_open as open,
|
||||||
compat_str,
|
compat_str,
|
||||||
compat_urllib_error,
|
compat_urllib_error,
|
||||||
)
|
)
|
||||||
@ -701,12 +702,12 @@ class TestYoutubeDL(unittest.TestCase):
|
|||||||
|
|
||||||
class SimplePP(PostProcessor):
|
class SimplePP(PostProcessor):
|
||||||
def run(self, info):
|
def run(self, info):
|
||||||
with open(audiofile, 'wt') as f:
|
with open(audiofile, 'w') as f:
|
||||||
f.write('EXAMPLE')
|
f.write('EXAMPLE')
|
||||||
return [info['filepath']], info
|
return [info['filepath']], info
|
||||||
|
|
||||||
def run_pp(params, PP):
|
def run_pp(params, PP):
|
||||||
with open(filename, 'wt') as f:
|
with open(filename, 'w') as f:
|
||||||
f.write('EXAMPLE')
|
f.write('EXAMPLE')
|
||||||
ydl = YoutubeDL(params)
|
ydl = YoutubeDL(params)
|
||||||
ydl.add_post_processor(PP())
|
ydl.add_post_processor(PP())
|
||||||
@ -725,7 +726,7 @@ class TestYoutubeDL(unittest.TestCase):
|
|||||||
|
|
||||||
class ModifierPP(PostProcessor):
|
class ModifierPP(PostProcessor):
|
||||||
def run(self, info):
|
def run(self, info):
|
||||||
with open(info['filepath'], 'wt') as f:
|
with open(info['filepath'], 'w') as f:
|
||||||
f.write('MODIFIED')
|
f.write('MODIFIED')
|
||||||
return [], info
|
return [], info
|
||||||
|
|
||||||
|
@ -20,15 +20,15 @@ from test.helper import (
|
|||||||
|
|
||||||
|
|
||||||
import hashlib
|
import hashlib
|
||||||
import io
|
|
||||||
import json
|
import json
|
||||||
import socket
|
import socket
|
||||||
|
|
||||||
import youtube_dl.YoutubeDL
|
import youtube_dl.YoutubeDL
|
||||||
from youtube_dl.compat import (
|
from youtube_dl.compat import (
|
||||||
compat_http_client,
|
compat_http_client,
|
||||||
compat_urllib_error,
|
|
||||||
compat_HTTPError,
|
compat_HTTPError,
|
||||||
|
compat_open as open,
|
||||||
|
compat_urllib_error,
|
||||||
)
|
)
|
||||||
from youtube_dl.utils import (
|
from youtube_dl.utils import (
|
||||||
DownloadError,
|
DownloadError,
|
||||||
@ -245,7 +245,7 @@ def generator(test_case, tname):
|
|||||||
self.assertTrue(
|
self.assertTrue(
|
||||||
os.path.exists(info_json_fn),
|
os.path.exists(info_json_fn),
|
||||||
'Missing info file %s' % info_json_fn)
|
'Missing info file %s' % info_json_fn)
|
||||||
with io.open(info_json_fn, encoding='utf-8') as infof:
|
with open(info_json_fn, encoding='utf-8') as infof:
|
||||||
info_dict = json.load(infof)
|
info_dict = json.load(infof)
|
||||||
expect_info_dict(self, info_dict, tc.get('info_dict', {}))
|
expect_info_dict(self, info_dict, tc.get('info_dict', {}))
|
||||||
finally:
|
finally:
|
||||||
|
@ -5,16 +5,18 @@ from __future__ import unicode_literals
|
|||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
import unittest
|
import unittest
|
||||||
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
|
||||||
|
|
||||||
|
dirn = os.path.dirname
|
||||||
|
|
||||||
|
sys.path.insert(0, dirn(dirn(os.path.abspath(__file__))))
|
||||||
|
|
||||||
import errno
|
import errno
|
||||||
import io
|
|
||||||
import json
|
import json
|
||||||
import re
|
import re
|
||||||
import subprocess
|
import subprocess
|
||||||
|
|
||||||
from youtube_dl.swfinterp import SWFInterpreter
|
from youtube_dl.swfinterp import SWFInterpreter
|
||||||
|
from youtube_dl.compat import compat_open as open
|
||||||
|
|
||||||
|
|
||||||
TEST_DIR = os.path.join(
|
TEST_DIR = os.path.join(
|
||||||
@ -43,7 +45,7 @@ def _make_testfunc(testfile):
|
|||||||
'-static-link-runtime-shared-libraries', as_file])
|
'-static-link-runtime-shared-libraries', as_file])
|
||||||
except OSError as ose:
|
except OSError as ose:
|
||||||
if ose.errno == errno.ENOENT:
|
if ose.errno == errno.ENOENT:
|
||||||
print('mxmlc not found! Skipping test.')
|
self.skipTest('mxmlc not found!')
|
||||||
return
|
return
|
||||||
raise
|
raise
|
||||||
|
|
||||||
@ -51,7 +53,7 @@ def _make_testfunc(testfile):
|
|||||||
swf_content = swf_f.read()
|
swf_content = swf_f.read()
|
||||||
swfi = SWFInterpreter(swf_content)
|
swfi = SWFInterpreter(swf_content)
|
||||||
|
|
||||||
with io.open(as_file, 'r', encoding='utf-8') as as_f:
|
with open(as_file, 'r', encoding='utf-8') as as_f:
|
||||||
as_content = as_f.read()
|
as_content = as_f.read()
|
||||||
|
|
||||||
def _find_spec(key):
|
def _find_spec(key):
|
||||||
|
@ -2,14 +2,15 @@ from __future__ import unicode_literals
|
|||||||
|
|
||||||
# Allow direct execution
|
# Allow direct execution
|
||||||
import os
|
import os
|
||||||
|
import re
|
||||||
import sys
|
import sys
|
||||||
import unittest
|
import unittest
|
||||||
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
|
||||||
|
|
||||||
import io
|
dirn = os.path.dirname
|
||||||
import re
|
|
||||||
|
|
||||||
rootDir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
|
rootDir = dirn(dirn(os.path.abspath(__file__)))
|
||||||
|
|
||||||
|
sys.path.insert(0, rootDir)
|
||||||
|
|
||||||
IGNORED_FILES = [
|
IGNORED_FILES = [
|
||||||
'setup.py', # http://bugs.python.org/issue13943
|
'setup.py', # http://bugs.python.org/issue13943
|
||||||
@ -24,6 +25,7 @@ IGNORED_DIRS = [
|
|||||||
]
|
]
|
||||||
|
|
||||||
from test.helper import assertRegexpMatches
|
from test.helper import assertRegexpMatches
|
||||||
|
from youtube_dl.compat import compat_open as open
|
||||||
|
|
||||||
|
|
||||||
class TestUnicodeLiterals(unittest.TestCase):
|
class TestUnicodeLiterals(unittest.TestCase):
|
||||||
@ -41,7 +43,7 @@ class TestUnicodeLiterals(unittest.TestCase):
|
|||||||
continue
|
continue
|
||||||
|
|
||||||
fn = os.path.join(dirpath, basename)
|
fn = os.path.join(dirpath, basename)
|
||||||
with io.open(fn, encoding='utf-8') as inf:
|
with open(fn, encoding='utf-8') as inf:
|
||||||
code = inf.read()
|
code = inf.read()
|
||||||
|
|
||||||
if "'" not in code and '"' not in code:
|
if "'" not in code and '"' not in code:
|
||||||
|
@ -11,12 +11,11 @@ sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
|||||||
from test.helper import get_params, try_rm
|
from test.helper import get_params, try_rm
|
||||||
|
|
||||||
|
|
||||||
import io
|
|
||||||
|
|
||||||
import xml.etree.ElementTree
|
import xml.etree.ElementTree
|
||||||
|
|
||||||
import youtube_dl.YoutubeDL
|
import youtube_dl.YoutubeDL
|
||||||
import youtube_dl.extractor
|
import youtube_dl.extractor
|
||||||
|
from youtube_dl.compat import compat_open as open
|
||||||
|
|
||||||
|
|
||||||
class YoutubeDL(youtube_dl.YoutubeDL):
|
class YoutubeDL(youtube_dl.YoutubeDL):
|
||||||
@ -51,7 +50,7 @@ class TestAnnotations(unittest.TestCase):
|
|||||||
ydl.download([TEST_ID])
|
ydl.download([TEST_ID])
|
||||||
self.assertTrue(os.path.exists(ANNOTATIONS_FILE))
|
self.assertTrue(os.path.exists(ANNOTATIONS_FILE))
|
||||||
annoxml = None
|
annoxml = None
|
||||||
with io.open(ANNOTATIONS_FILE, 'r', encoding='utf-8') as annof:
|
with open(ANNOTATIONS_FILE, 'r', encoding='utf-8') as annof:
|
||||||
annoxml = xml.etree.ElementTree.parse(annof)
|
annoxml = xml.etree.ElementTree.parse(annof)
|
||||||
self.assertTrue(annoxml is not None, 'Failed to parse annotations XML')
|
self.assertTrue(annoxml is not None, 'Failed to parse annotations XML')
|
||||||
root = annoxml.getroot()
|
root = annoxml.getroot()
|
||||||
|
@ -8,11 +8,14 @@ import sys
|
|||||||
import unittest
|
import unittest
|
||||||
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||||
|
|
||||||
import io
|
|
||||||
import re
|
import re
|
||||||
import string
|
import string
|
||||||
|
|
||||||
from youtube_dl.compat import compat_str, compat_urlretrieve
|
from youtube_dl.compat import (
|
||||||
|
compat_open as open,
|
||||||
|
compat_str,
|
||||||
|
compat_urlretrieve,
|
||||||
|
)
|
||||||
|
|
||||||
from test.helper import FakeYDL
|
from test.helper import FakeYDL
|
||||||
from youtube_dl.extractor import YoutubeIE
|
from youtube_dl.extractor import YoutubeIE
|
||||||
@ -208,7 +211,7 @@ def t_factory(name, sig_func, url_pattern):
|
|||||||
|
|
||||||
if not os.path.exists(fn):
|
if not os.path.exists(fn):
|
||||||
compat_urlretrieve(url, fn)
|
compat_urlretrieve(url, fn)
|
||||||
with io.open(fn, encoding='utf-8') as testf:
|
with open(fn, encoding='utf-8') as testf:
|
||||||
jscode = testf.read()
|
jscode = testf.read()
|
||||||
self.assertEqual(sig_func(jscode, sig_input), expected_sig)
|
self.assertEqual(sig_func(jscode, sig_input), expected_sig)
|
||||||
|
|
||||||
|
@ -4,11 +4,9 @@
|
|||||||
from __future__ import absolute_import, unicode_literals
|
from __future__ import absolute_import, unicode_literals
|
||||||
|
|
||||||
import collections
|
import collections
|
||||||
import contextlib
|
|
||||||
import copy
|
import copy
|
||||||
import datetime
|
import datetime
|
||||||
import errno
|
import errno
|
||||||
import fileinput
|
|
||||||
import io
|
import io
|
||||||
import itertools
|
import itertools
|
||||||
import json
|
import json
|
||||||
@ -45,6 +43,7 @@ from .compat import (
|
|||||||
compat_kwargs,
|
compat_kwargs,
|
||||||
compat_map as map,
|
compat_map as map,
|
||||||
compat_numeric_types,
|
compat_numeric_types,
|
||||||
|
compat_open as open,
|
||||||
compat_os_name,
|
compat_os_name,
|
||||||
compat_str,
|
compat_str,
|
||||||
compat_tokenize_tokenize,
|
compat_tokenize_tokenize,
|
||||||
@ -1977,7 +1976,7 @@ class YoutubeDL(object):
|
|||||||
else:
|
else:
|
||||||
try:
|
try:
|
||||||
self.to_screen('[info] Writing video description to: ' + descfn)
|
self.to_screen('[info] Writing video description to: ' + descfn)
|
||||||
with io.open(encodeFilename(descfn), 'w', encoding='utf-8') as descfile:
|
with open(encodeFilename(descfn), 'w', encoding='utf-8') as descfile:
|
||||||
descfile.write(info_dict['description'])
|
descfile.write(info_dict['description'])
|
||||||
except (OSError, IOError):
|
except (OSError, IOError):
|
||||||
self.report_error('Cannot write description file ' + descfn)
|
self.report_error('Cannot write description file ' + descfn)
|
||||||
@ -1992,7 +1991,7 @@ class YoutubeDL(object):
|
|||||||
else:
|
else:
|
||||||
try:
|
try:
|
||||||
self.to_screen('[info] Writing video annotations to: ' + annofn)
|
self.to_screen('[info] Writing video annotations to: ' + annofn)
|
||||||
with io.open(encodeFilename(annofn), 'w', encoding='utf-8') as annofile:
|
with open(encodeFilename(annofn), 'w', encoding='utf-8') as annofile:
|
||||||
annofile.write(info_dict['annotations'])
|
annofile.write(info_dict['annotations'])
|
||||||
except (KeyError, TypeError):
|
except (KeyError, TypeError):
|
||||||
self.report_warning('There are no annotations to write.')
|
self.report_warning('There are no annotations to write.')
|
||||||
@ -2019,7 +2018,7 @@ class YoutubeDL(object):
|
|||||||
try:
|
try:
|
||||||
# Use newline='' to prevent conversion of newline characters
|
# Use newline='' to prevent conversion of newline characters
|
||||||
# See https://github.com/ytdl-org/youtube-dl/issues/10268
|
# See https://github.com/ytdl-org/youtube-dl/issues/10268
|
||||||
with io.open(encodeFilename(sub_filename), 'w', encoding='utf-8', newline='') as subfile:
|
with open(encodeFilename(sub_filename), 'w', encoding='utf-8', newline='') as subfile:
|
||||||
subfile.write(sub_info['data'])
|
subfile.write(sub_info['data'])
|
||||||
except (OSError, IOError):
|
except (OSError, IOError):
|
||||||
self.report_error('Cannot write subtitles file ' + sub_filename)
|
self.report_error('Cannot write subtitles file ' + sub_filename)
|
||||||
@ -2028,7 +2027,7 @@ class YoutubeDL(object):
|
|||||||
try:
|
try:
|
||||||
sub_data = ie._request_webpage(
|
sub_data = ie._request_webpage(
|
||||||
sub_info['url'], info_dict['id'], note=False).read()
|
sub_info['url'], info_dict['id'], note=False).read()
|
||||||
with io.open(encodeFilename(sub_filename), 'wb') as subfile:
|
with open(encodeFilename(sub_filename), 'wb') as subfile:
|
||||||
subfile.write(sub_data)
|
subfile.write(sub_data)
|
||||||
except (ExtractorError, IOError, OSError, ValueError) as err:
|
except (ExtractorError, IOError, OSError, ValueError) as err:
|
||||||
self.report_warning('Unable to download subtitle for "%s": %s' %
|
self.report_warning('Unable to download subtitle for "%s": %s' %
|
||||||
@ -2232,12 +2231,8 @@ class YoutubeDL(object):
|
|||||||
return self._download_retcode
|
return self._download_retcode
|
||||||
|
|
||||||
def download_with_info_file(self, info_filename):
|
def download_with_info_file(self, info_filename):
|
||||||
with contextlib.closing(fileinput.FileInput(
|
with open(info_filename, encoding='utf-8') as f:
|
||||||
[info_filename], mode='r',
|
info = self.filter_requested_info(json.load(f))
|
||||||
openhook=fileinput.hook_encoded('utf-8'))) as f:
|
|
||||||
# FileInput doesn't have a read method, we can't call json.load
|
|
||||||
# TODO: let's use io.open(), then
|
|
||||||
info = self.filter_requested_info(json.loads('\n'.join(f)))
|
|
||||||
try:
|
try:
|
||||||
self.process_ie_result(info, download=True)
|
self.process_ie_result(info, download=True)
|
||||||
except DownloadError:
|
except DownloadError:
|
||||||
|
@ -1,14 +1,16 @@
|
|||||||
from __future__ import unicode_literals
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
import errno
|
import errno
|
||||||
import io
|
|
||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import shutil
|
import shutil
|
||||||
import traceback
|
import traceback
|
||||||
|
|
||||||
from .compat import compat_getenv
|
from .compat import (
|
||||||
|
compat_getenv,
|
||||||
|
compat_open as open,
|
||||||
|
)
|
||||||
from .utils import (
|
from .utils import (
|
||||||
error_to_compat_str,
|
error_to_compat_str,
|
||||||
expand_path,
|
expand_path,
|
||||||
@ -83,7 +85,7 @@ class Cache(object):
|
|||||||
cache_fn = self._get_cache_fn(section, key, dtype)
|
cache_fn = self._get_cache_fn(section, key, dtype)
|
||||||
try:
|
try:
|
||||||
try:
|
try:
|
||||||
with io.open(cache_fn, 'r', encoding='utf-8') as cachef:
|
with open(cache_fn, 'r', encoding='utf-8') as cachef:
|
||||||
return self._validate(json.load(cachef), min_ver)
|
return self._validate(json.load(cachef), min_ver)
|
||||||
except ValueError:
|
except ValueError:
|
||||||
try:
|
try:
|
||||||
|
@ -25,6 +25,7 @@ from ..compat import (
|
|||||||
compat_integer_types,
|
compat_integer_types,
|
||||||
compat_http_client,
|
compat_http_client,
|
||||||
compat_map as map,
|
compat_map as map,
|
||||||
|
compat_open as open,
|
||||||
compat_os_name,
|
compat_os_name,
|
||||||
compat_str,
|
compat_str,
|
||||||
compat_urllib_error,
|
compat_urllib_error,
|
||||||
|
@ -7,6 +7,7 @@ import subprocess
|
|||||||
import tempfile
|
import tempfile
|
||||||
|
|
||||||
from ..compat import (
|
from ..compat import (
|
||||||
|
compat_open as open,
|
||||||
compat_urlparse,
|
compat_urlparse,
|
||||||
compat_kwargs,
|
compat_kwargs,
|
||||||
)
|
)
|
||||||
|
@ -18,6 +18,8 @@ from ..utils import (
|
|||||||
shell_quote,
|
shell_quote,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
from ..compat import compat_open as open
|
||||||
|
|
||||||
|
|
||||||
class EmbedThumbnailPPError(PostProcessingError):
|
class EmbedThumbnailPPError(PostProcessingError):
|
||||||
pass
|
pass
|
||||||
|
@ -1,6 +1,5 @@
|
|||||||
from __future__ import unicode_literals
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
import io
|
|
||||||
import os
|
import os
|
||||||
import subprocess
|
import subprocess
|
||||||
import time
|
import time
|
||||||
@ -9,6 +8,7 @@ import re
|
|||||||
|
|
||||||
from .common import AudioConversionError, PostProcessor
|
from .common import AudioConversionError, PostProcessor
|
||||||
|
|
||||||
|
from ..compat import compat_open as open
|
||||||
from ..utils import (
|
from ..utils import (
|
||||||
encodeArgument,
|
encodeArgument,
|
||||||
encodeFilename,
|
encodeFilename,
|
||||||
@ -493,7 +493,7 @@ class FFmpegMetadataPP(FFmpegPostProcessor):
|
|||||||
chapters = info.get('chapters', [])
|
chapters = info.get('chapters', [])
|
||||||
if chapters:
|
if chapters:
|
||||||
metadata_filename = replace_extension(filename, 'meta')
|
metadata_filename = replace_extension(filename, 'meta')
|
||||||
with io.open(metadata_filename, 'wt', encoding='utf-8') as f:
|
with open(metadata_filename, 'w', encoding='utf-8') as f:
|
||||||
def ffmpeg_escape(text):
|
def ffmpeg_escape(text):
|
||||||
return re.sub(r'(=|;|#|\\|\n)', r'\\\1', text)
|
return re.sub(r'(=|;|#|\\|\n)', r'\\\1', text)
|
||||||
|
|
||||||
@ -636,7 +636,7 @@ class FFmpegSubtitlesConvertorPP(FFmpegPostProcessor):
|
|||||||
with open(dfxp_file, 'rb') as f:
|
with open(dfxp_file, 'rb') as f:
|
||||||
srt_data = dfxp2srt(f.read())
|
srt_data = dfxp2srt(f.read())
|
||||||
|
|
||||||
with io.open(srt_file, 'wt', encoding='utf-8') as f:
|
with open(srt_file, 'w', encoding='utf-8') as f:
|
||||||
f.write(srt_data)
|
f.write(srt_data)
|
||||||
old_file = srt_file
|
old_file = srt_file
|
||||||
|
|
||||||
@ -652,7 +652,7 @@ class FFmpegSubtitlesConvertorPP(FFmpegPostProcessor):
|
|||||||
|
|
||||||
self.run_ffmpeg(old_file, new_file, ['-f', new_format])
|
self.run_ffmpeg(old_file, new_file, ['-f', new_format])
|
||||||
|
|
||||||
with io.open(new_file, 'rt', encoding='utf-8') as f:
|
with open(new_file, 'r', encoding='utf-8') as f:
|
||||||
subs[lang] = {
|
subs[lang] = {
|
||||||
'ext': new_ext,
|
'ext': new_ext,
|
||||||
'data': f.read(),
|
'data': f.read(),
|
||||||
|
@ -9,7 +9,10 @@ import subprocess
|
|||||||
import sys
|
import sys
|
||||||
from zipimport import zipimporter
|
from zipimport import zipimporter
|
||||||
|
|
||||||
from .compat import compat_realpath
|
from .compat import (
|
||||||
|
compat_open as open,
|
||||||
|
compat_realpath,
|
||||||
|
)
|
||||||
from .utils import encode_compat_str
|
from .utils import encode_compat_str
|
||||||
|
|
||||||
from .version import __version__
|
from .version import __version__
|
||||||
@ -127,7 +130,7 @@ def update_self(to_screen, verbose, opener):
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
bat = os.path.join(directory, 'youtube-dl-updater.bat')
|
bat = os.path.join(directory, 'youtube-dl-updater.bat')
|
||||||
with io.open(bat, 'w') as batfile:
|
with open(bat, 'w') as batfile:
|
||||||
batfile.write('''
|
batfile.write('''
|
||||||
@echo off
|
@echo off
|
||||||
echo Waiting for file handle to be closed ...
|
echo Waiting for file handle to be closed ...
|
||||||
|
Loading…
Reference in New Issue
Block a user