aboutsummaryrefslogtreecommitdiffstats
path: root/hypervideo_dl/downloader/http.py
diff options
context:
space:
mode:
Diffstat (limited to 'hypervideo_dl/downloader/http.py')
-rw-r--r--hypervideo_dl/downloader/http.py98
1 files changed, 50 insertions, 48 deletions
diff --git a/hypervideo_dl/downloader/http.py b/hypervideo_dl/downloader/http.py
index 591a9b0..95c870e 100644
--- a/hypervideo_dl/downloader/http.py
+++ b/hypervideo_dl/downloader/http.py
@@ -1,29 +1,33 @@
-from __future__ import unicode_literals
-
+import http.client
import os
+import random
+import socket
import ssl
import time
-import random
+import urllib.error
from .common import FileDownloader
-from ..compat import (
- compat_urllib_error,
- compat_http_client
-)
from ..utils import (
ContentTooShortError,
+ RetryManager,
+ ThrottledDownload,
+ XAttrMetadataError,
+ XAttrUnavailableError,
encodeFilename,
int_or_none,
parse_http_range,
sanitized_Request,
- ThrottledDownload,
try_call,
write_xattr,
- XAttrMetadataError,
- XAttrUnavailableError,
)
-RESPONSE_READ_EXCEPTIONS = (TimeoutError, ConnectionError, ssl.SSLError, compat_http_client.HTTPException)
+RESPONSE_READ_EXCEPTIONS = (
+ TimeoutError,
+ socket.timeout, # compat: py < 3.10
+ ConnectionError,
+ ssl.SSLError,
+ http.client.HTTPException
+)
class HttpFD(FileDownloader):
@@ -69,9 +73,6 @@ class HttpFD(FileDownloader):
ctx.is_resume = ctx.resume_len > 0
- count = 0
- retries = self.params.get('retries', 0)
-
class SucceedDownload(Exception):
pass
@@ -134,19 +135,18 @@ class HttpFD(FileDownloader):
if has_range:
content_range = ctx.data.headers.get('Content-Range')
content_range_start, content_range_end, content_len = parse_http_range(content_range)
- if content_range_start is not None and range_start == content_range_start:
- # Content-Range is present and matches requested Range, resume is possible
- accept_content_len = (
+ # Content-Range is present and matches requested Range, resume is possible
+ if range_start == content_range_start and (
# Non-chunked download
not ctx.chunk_size
# Chunked download and requested piece or
# its part is promised to be served
or content_range_end == range_end
- or content_len < range_end)
- if accept_content_len:
- ctx.content_len = content_len
- ctx.data_len = min(content_len, req_end or content_len) - (req_start or 0)
- return
+ or content_len < range_end):
+ ctx.content_len = content_len
+ if content_len or req_end:
+ ctx.data_len = min(content_len or req_end, req_end or content_len) - (req_start or 0)
+ return
# Content-Range is either not present or invalid. Assuming remote webserver is
# trying to send the whole file, resume is not possible, so wiping the local file
# and performing entire redownload
@@ -154,7 +154,7 @@ class HttpFD(FileDownloader):
ctx.resume_len = 0
ctx.open_mode = 'wb'
ctx.data_len = ctx.content_len = int_or_none(ctx.data.info().get('Content-length', None))
- except (compat_urllib_error.HTTPError, ) as err:
+ except urllib.error.HTTPError as err:
if err.code == 416:
# Unable to resume (requested range not satisfiable)
try:
@@ -162,7 +162,7 @@ class HttpFD(FileDownloader):
ctx.data = self.ydl.urlopen(
sanitized_Request(url, request_data, headers))
content_length = ctx.data.info()['Content-Length']
- except (compat_urllib_error.HTTPError, ) as err:
+ except urllib.error.HTTPError as err:
if err.code < 500 or err.code >= 600:
raise
else:
@@ -195,7 +195,7 @@ class HttpFD(FileDownloader):
# Unexpected HTTP error
raise
raise RetryDownload(err)
- except compat_urllib_error.URLError as err:
+ except urllib.error.URLError as err:
if isinstance(err.reason, ssl.CertificateError):
raise
raise RetryDownload(err)
@@ -204,6 +204,12 @@ class HttpFD(FileDownloader):
except RESPONSE_READ_EXCEPTIONS as err:
raise RetryDownload(err)
+ def close_stream():
+ if ctx.stream is not None:
+ if not ctx.tmpfilename == '-':
+ ctx.stream.close()
+ ctx.stream = None
+
def download():
data_len = ctx.data.info().get('Content-length', None)
@@ -220,10 +226,12 @@ class HttpFD(FileDownloader):
min_data_len = self.params.get('min_filesize')
max_data_len = self.params.get('max_filesize')
if min_data_len is not None and data_len < min_data_len:
- self.to_screen('\r[download] File is smaller than min-filesize (%s bytes < %s bytes). Aborting.' % (data_len, min_data_len))
+ self.to_screen(
+ f'\r[download] File is smaller than min-filesize ({data_len} bytes < {min_data_len} bytes). Aborting.')
return False
if max_data_len is not None and data_len > max_data_len:
- self.to_screen('\r[download] File is larger than max-filesize (%s bytes > %s bytes). Aborting.' % (data_len, max_data_len))
+ self.to_screen(
+ f'\r[download] File is larger than max-filesize ({data_len} bytes > {max_data_len} bytes). Aborting.')
return False
byte_counter = 0 + ctx.resume_len
@@ -235,12 +243,9 @@ class HttpFD(FileDownloader):
before = start # start measuring
def retry(e):
- to_stdout = ctx.tmpfilename == '-'
- if ctx.stream is not None:
- if not to_stdout:
- ctx.stream.close()
- ctx.stream = None
- ctx.resume_len = byte_counter if to_stdout else os.path.getsize(encodeFilename(ctx.tmpfilename))
+ close_stream()
+ ctx.resume_len = (byte_counter if ctx.tmpfilename == '-'
+ else os.path.getsize(encodeFilename(ctx.tmpfilename)))
raise RetryDownload(e)
while True:
@@ -264,19 +269,19 @@ class HttpFD(FileDownloader):
assert ctx.stream is not None
ctx.filename = self.undo_temp_name(ctx.tmpfilename)
self.report_destination(ctx.filename)
- except (OSError, IOError) as err:
+ except OSError as err:
self.report_error('unable to open for writing: %s' % str(err))
return False
if self.params.get('xattr_set_filesize', False) and data_len is not None:
try:
- write_xattr(ctx.tmpfilename, 'user.ytdl.filesize', str(data_len).encode('utf-8'))
+ write_xattr(ctx.tmpfilename, 'user.ytdl.filesize', str(data_len).encode())
except (XAttrUnavailableError, XAttrMetadataError) as err:
self.report_error('unable to set filesize xattr: %s' % str(err))
try:
ctx.stream.write(data_block)
- except (IOError, OSError) as err:
+ except OSError as err:
self.to_stderr('\n')
self.report_error('unable to write data: %s' % str(err))
return False
@@ -342,9 +347,7 @@ class HttpFD(FileDownloader):
if data_len is not None and byte_counter != data_len:
err = ContentTooShortError(byte_counter, int(data_len))
- if count <= retries:
- retry(err)
- raise err
+ retry(err)
self.try_rename(ctx.tmpfilename, ctx.filename)
@@ -363,21 +366,20 @@ class HttpFD(FileDownloader):
return True
- while count <= retries:
+ for retry in RetryManager(self.params.get('retries'), self.report_retry):
try:
establish_connection()
return download()
- except RetryDownload as e:
- count += 1
- if count <= retries:
- self.report_retry(e.source_error, count, retries)
- else:
- self.to_screen(f'[download] Got server HTTP error: {e.source_error}')
+ except RetryDownload as err:
+ retry.error = err.source_error
continue
except NextFragment:
+ retry.error = None
+ retry.attempt -= 1
continue
except SucceedDownload:
return True
-
- self.report_error('giving up after %s retries' % retries)
+ except: # noqa: E722
+ close_stream()
+ raise
return False