aboutsummaryrefslogtreecommitdiff
path: root/youtube_dl/downloader
diff options
context:
space:
mode:
Diffstat (limited to 'youtube_dl/downloader')
-rw-r--r--youtube_dl/downloader/dash.py35
-rw-r--r--youtube_dl/downloader/external.py3
-rw-r--r--youtube_dl/downloader/fragment.py15
-rw-r--r--youtube_dl/downloader/hls.py41
4 files changed, 70 insertions, 24 deletions
diff --git a/youtube_dl/downloader/dash.py b/youtube_dl/downloader/dash.py
index 8bbab9dbc..41fc9cfc2 100644
--- a/youtube_dl/downloader/dash.py
+++ b/youtube_dl/downloader/dash.py
@@ -38,8 +38,10 @@ class DashSegmentsFD(FragmentFD):
segments_filenames = []
fragment_retries = self.params.get('fragment_retries', 0)
+ skip_unavailable_fragments = self.params.get('skip_unavailable_fragments', True)
- def append_url_to_file(target_url, tmp_filename, segment_name):
+ def process_segment(segment, tmp_filename, fatal):
+ target_url, segment_name = segment
target_filename = '%s-%s' % (tmp_filename, segment_name)
count = 0
while count <= fragment_retries:
@@ -52,26 +54,35 @@ class DashSegmentsFD(FragmentFD):
down.close()
segments_filenames.append(target_sanitized)
break
- except (compat_urllib_error.HTTPError, ) as err:
+ except compat_urllib_error.HTTPError as err:
# YouTube may often return 404 HTTP error for a fragment causing the
# whole download to fail. However if the same fragment is immediately
# retried with the same request data this usually succeeds (1-2 attemps
# is usually enough) thus allowing to download the whole file successfully.
- # So, we will retry all fragments that fail with 404 HTTP error for now.
- if err.code != 404:
- raise
- # Retry fragment
+ # To be future-proof we will retry all fragments that fail with any
+ # HTTP error.
count += 1
if count <= fragment_retries:
- self.report_retry_fragment(segment_name, count, fragment_retries)
+ self.report_retry_fragment(err, segment_name, count, fragment_retries)
if count > fragment_retries:
+ if not fatal:
+ self.report_skip_fragment(segment_name)
+ return True
self.report_error('giving up after %s fragment retries' % fragment_retries)
return False
-
- if initialization_url:
- append_url_to_file(initialization_url, ctx['tmpfilename'], 'Init')
- for i, segment_url in enumerate(segment_urls):
- append_url_to_file(segment_url, ctx['tmpfilename'], 'Seg%d' % i)
+ return True
+
+ segments_to_download = [(initialization_url, 'Init')] if initialization_url else []
+ segments_to_download.extend([
+ (segment_url, 'Seg%d' % i)
+ for i, segment_url in enumerate(segment_urls)])
+
+ for i, segment in enumerate(segments_to_download):
+ # In DASH, the first segment contains necessary headers to
+ # generate a valid MP4 file, so always abort for the first segment
+ fatal = i == 0 or not skip_unavailable_fragments
+ if not process_segment(segment, ctx['tmpfilename'], fatal):
+ return False
self._finish_frag_download(ctx)
diff --git a/youtube_dl/downloader/external.py b/youtube_dl/downloader/external.py
index 17f12e970..0aeae3b8f 100644
--- a/youtube_dl/downloader/external.py
+++ b/youtube_dl/downloader/external.py
@@ -223,7 +223,8 @@ class FFmpegFD(ExternalFD):
if proxy.startswith('socks'):
self.report_warning(
- '%s does not support SOCKS proxies. Downloading may fail.' % self.get_basename())
+ '%s does not support SOCKS proxies. Downloading is likely to fail. '
+ 'Consider adding --hls-prefer-native to your command.' % self.get_basename())
# Since December 2015 ffmpeg supports -http_proxy option (see
# http://git.videolan.org/?p=ffmpeg.git;a=commit;h=b4eb1f29ebddd60c41a2eb39f5af701e38e0d3fd)
diff --git a/youtube_dl/downloader/fragment.py b/youtube_dl/downloader/fragment.py
index ba903ae10..84aacf7db 100644
--- a/youtube_dl/downloader/fragment.py
+++ b/youtube_dl/downloader/fragment.py
@@ -6,6 +6,7 @@ import time
from .common import FileDownloader
from .http import HttpFD
from ..utils import (
+ error_to_compat_str,
encodeFilename,
sanitize_open,
)
@@ -22,13 +23,19 @@ class FragmentFD(FileDownloader):
Available options:
- fragment_retries: Number of times to retry a fragment for HTTP error (DASH only)
+ fragment_retries: Number of times to retry a fragment for HTTP error (DASH
+ and hlsnative only)
+ skip_unavailable_fragments:
+ Skip unavailable fragments (DASH and hlsnative only)
"""
- def report_retry_fragment(self, fragment_name, count, retries):
+ def report_retry_fragment(self, err, fragment_name, count, retries):
self.to_screen(
- '[download] Got server HTTP error. Retrying fragment %s (attempt %d of %s)...'
- % (fragment_name, count, self.format_retries(retries)))
+ '[download] Got server HTTP error: %s. Retrying fragment %s (attempt %d of %s)...'
+ % (error_to_compat_str(err), fragment_name, count, self.format_retries(retries)))
+
+ def report_skip_fragment(self, fragment_name):
+ self.to_screen('[download] Skipping fragment %s...' % fragment_name)
def _prepare_and_start_frag_download(self, ctx):
self._prepare_frag_download(ctx)
diff --git a/youtube_dl/downloader/hls.py b/youtube_dl/downloader/hls.py
index baaff44d5..5d70abf62 100644
--- a/youtube_dl/downloader/hls.py
+++ b/youtube_dl/downloader/hls.py
@@ -13,6 +13,7 @@ from .fragment import FragmentFD
from .external import FFmpegFD
from ..compat import (
+ compat_urllib_error,
compat_urlparse,
compat_struct_pack,
)
@@ -83,6 +84,10 @@ class HlsFD(FragmentFD):
self._prepare_and_start_frag_download(ctx)
+ fragment_retries = self.params.get('fragment_retries', 0)
+ skip_unavailable_fragments = self.params.get('skip_unavailable_fragments', True)
+ test = self.params.get('test', False)
+
extra_query = None
extra_param_to_segment_url = info_dict.get('extra_param_to_segment_url')
if extra_param_to_segment_url:
@@ -99,15 +104,37 @@ class HlsFD(FragmentFD):
line
if re.match(r'^https?://', line)
else compat_urlparse.urljoin(man_url, line))
- frag_filename = '%s-Frag%d' % (ctx['tmpfilename'], i)
+ frag_name = 'Frag%d' % i
+ frag_filename = '%s-%s' % (ctx['tmpfilename'], frag_name)
if extra_query:
frag_url = update_url_query(frag_url, extra_query)
- success = ctx['dl'].download(frag_filename, {'url': frag_url})
- if not success:
+ count = 0
+ while count <= fragment_retries:
+ try:
+ success = ctx['dl'].download(frag_filename, {'url': frag_url})
+ if not success:
+ return False
+ down, frag_sanitized = sanitize_open(frag_filename, 'rb')
+ frag_content = down.read()
+ down.close()
+ break
+ except compat_urllib_error.HTTPError as err:
+ # Unavailable (possibly temporary) fragments may be served.
+ # First we try to retry then either skip or abort.
+ # See https://github.com/rg3/youtube-dl/issues/10165,
+ # https://github.com/rg3/youtube-dl/issues/10448).
+ count += 1
+ if count <= fragment_retries:
+ self.report_retry_fragment(err, frag_name, count, fragment_retries)
+ if count > fragment_retries:
+ if skip_unavailable_fragments:
+ i += 1
+ media_sequence += 1
+ self.report_skip_fragment(frag_name)
+ continue
+ self.report_error(
+ 'giving up after %s fragment retries' % fragment_retries)
return False
- down, frag_sanitized = sanitize_open(frag_filename, 'rb')
- frag_content = down.read()
- down.close()
if decrypt_info['METHOD'] == 'AES-128':
iv = decrypt_info.get('IV') or compat_struct_pack('>8xq', media_sequence)
frag_content = AES.new(
@@ -115,7 +142,7 @@ class HlsFD(FragmentFD):
ctx['dest_stream'].write(frag_content)
frags_filenames.append(frag_sanitized)
# We only download the first fragment during the test
- if self.params.get('test', False):
+ if test:
break
i += 1
media_sequence += 1