Fix streamTrack

Fixes several cases:
- When connection error thrown on retry, and subsequent retry started
  from the beginning, not the partial start
- Clone headers instead of adding Range header to dz.http_headers
  variables - this was causing errors on other requests
- Use 'with self.dz.session.get' to make sure request cleaned up on
  failure
- Fix calculation of downloadPercentage for retries: complete is now
  the size of the retry, so percentage needs to account for start
  value
This commit is contained in:
kermit 2020-09-29 19:39:17 +01:00
parent 04eb63b4cd
commit bb98953963

View file

@ -642,43 +642,54 @@ class DownloadJob:
return error_num # fallback is enabled and loop went through all formats return error_num # fallback is enabled and loop went through all formats
def streamTrack(self, stream, track, range=None): def streamTrack(self, stream, track, start=0):
if self.queueItem.cancel: raise DownloadCancelled if self.queueItem.cancel: raise DownloadCancelled
headers=dict(self.dz.http_headers)
if range != 0:
headers['Range'] = f'bytes={start}-'
chunkLength = start
percentage = 0
try: try:
headers=self.dz.http_headers with self.dz.session.get(track.downloadUrl, headers=headers, stream=True, timeout=10) as request:
if range is not None:
headers['Range'] = range
request = self.dz.session.get(track.downloadUrl, headers=self.dz.http_headers, stream=True, timeout=10)
except request_exception.ConnectionError:
eventlet.sleep(2)
return self.streamTrack(stream, track)
request.raise_for_status() request.raise_for_status()
blowfish_key = str.encode(self.dz._get_blowfish_key(str(track.id))) blowfish_key = str.encode(self.dz._get_blowfish_key(str(track.id)))
complete = int(request.headers["Content-Length"]) complete = int(request.headers["Content-Length"])
if complete == 0: if complete == 0:
raise DownloadEmpty raise DownloadEmpty
chunkLength = 0 if start != 0:
percentage = 0 responseRange = request.headers["Content-Range"]
try: logger.info(f'{track.title} downloading range {responseRange}')
else:
logger.info(f'{track.title} downloading {complete} bytes')
for chunk in request.iter_content(2048 * 3): for chunk in request.iter_content(2048 * 3):
eventlet.sleep(0)
if self.queueItem.cancel: raise DownloadCancelled if self.queueItem.cancel: raise DownloadCancelled
if len(chunk) >= 2048: if len(chunk) >= 2048:
chunk = Blowfish.new(blowfish_key, Blowfish.MODE_CBC, b"\x00\x01\x02\x03\x04\x05\x06\x07").decrypt(chunk[0:2048]) + chunk[2048:] chunk = Blowfish.new(blowfish_key, Blowfish.MODE_CBC, b"\x00\x01\x02\x03\x04\x05\x06\x07").decrypt(chunk[0:2048]) + chunk[2048:]
stream.write(chunk) stream.write(chunk)
chunkLength += len(chunk) chunkLength += len(chunk)
if isinstance(self.queueItem, QISingle): if isinstance(self.queueItem, QISingle):
percentage = (chunkLength / complete) * 100 percentage = (chunkLength / (complete + start)) * 100
self.downloadPercentage = percentage self.downloadPercentage = percentage
else: else:
chunkProgres = (len(chunk) / complete) / self.queueItem.size * 100 chunkProgres = (len(chunk) / (complete + start)) / self.queueItem.size * 100
self.downloadPercentage += chunkProgres self.downloadPercentage += chunkProgres
self.updatePercentage() self.updatePercentage()
except SSLError:
range = f'bytes={chunkLength}-' except SSLError as e:
logger.info(f'retrying {track.title} with range {range}') logger.info(f'retrying {track.title} from byte {chunkLength}')
return self.streamTrack(stream, track, range) return self.streamTrack(stream, track, chunkLength)
except (request_exception.ConnectionError, requests.exceptions.ReadTimeout):
eventlet.sleep(2)
return self.streamTrack(stream, track, start)
def updatePercentage(self): def updatePercentage(self):
if round(self.downloadPercentage) != self.lastPercentage and round(self.downloadPercentage) % 2 == 0: if round(self.downloadPercentage) != self.lastPercentage and round(self.downloadPercentage) % 2 == 0: