Merge pull request 'Fix possible corruption of download files on retry, assorted other HTTP fixes' (#66) from kermit/deemix:fix-corruption-hang into main
Reviewed-on: https://codeberg.org/RemixDev/deemix/pulls/66
This commit is contained in:
commit
72e3316d57
|
@ -81,7 +81,9 @@ class Deezer:
|
||||||
|
|
||||||
def get_track_filesizes(self, sng_id):
|
def get_track_filesizes(self, sng_id):
|
||||||
try:
|
try:
|
||||||
response = requests.post("https://www.deezer.com/", headers=self.http_headers)
|
response = requests.post("https://www.deezer.com/",
|
||||||
|
headers=self.http_headers,
|
||||||
|
timeout=30)
|
||||||
guest_sid = response.cookies.get('sid')
|
guest_sid = response.cookies.get('sid')
|
||||||
site = requests.post(
|
site = requests.post(
|
||||||
"https://api.deezer.com/1.0/gateway.php",
|
"https://api.deezer.com/1.0/gateway.php",
|
||||||
|
|
|
@ -616,7 +616,7 @@ class DownloadJob:
|
||||||
if int(track.filesizes[f"FILESIZE_{format}"]) != 0:
|
if int(track.filesizes[f"FILESIZE_{format}"]) != 0:
|
||||||
return format_num
|
return format_num
|
||||||
elif not track.filesizes[f"FILESIZE_{format}_TESTED"]:
|
elif not track.filesizes[f"FILESIZE_{format}_TESTED"]:
|
||||||
request = get(self.dz.get_track_stream_url(track.id, track.MD5, track.mediaVersion, format_num), stream=True)
|
request = requests.head(self.dz.get_track_stream_url(track.id, track.MD5, track.mediaVersion, format_num), headers={'User-Agent': USER_AGENT_HEADER}, timeout=30)
|
||||||
try:
|
try:
|
||||||
request.raise_for_status()
|
request.raise_for_status()
|
||||||
return format_num
|
return format_num
|
||||||
|
@ -642,43 +642,54 @@ class DownloadJob:
|
||||||
|
|
||||||
return error_num # fallback is enabled and loop went through all formats
|
return error_num # fallback is enabled and loop went through all formats
|
||||||
|
|
||||||
def streamTrack(self, stream, track, range=None):
|
def streamTrack(self, stream, track, start=0):
|
||||||
if self.queueItem.cancel: raise DownloadCancelled
|
if self.queueItem.cancel: raise DownloadCancelled
|
||||||
|
|
||||||
|
headers=dict(self.dz.http_headers)
|
||||||
|
if range != 0:
|
||||||
|
headers['Range'] = f'bytes={start}-'
|
||||||
|
chunkLength = start
|
||||||
|
percentage = 0
|
||||||
|
|
||||||
try:
|
try:
|
||||||
headers=self.dz.http_headers
|
with self.dz.session.get(track.downloadUrl, headers=headers, stream=True, timeout=10) as request:
|
||||||
if range is not None:
|
|
||||||
headers['Range'] = range
|
|
||||||
request = self.dz.session.get(track.downloadUrl, headers=self.dz.http_headers, stream=True, timeout=10)
|
|
||||||
except request_exception.ConnectionError:
|
|
||||||
eventlet.sleep(2)
|
|
||||||
return self.streamTrack(stream, track)
|
|
||||||
request.raise_for_status()
|
request.raise_for_status()
|
||||||
|
|
||||||
blowfish_key = str.encode(self.dz._get_blowfish_key(str(track.id)))
|
blowfish_key = str.encode(self.dz._get_blowfish_key(str(track.id)))
|
||||||
|
|
||||||
complete = int(request.headers["Content-Length"])
|
complete = int(request.headers["Content-Length"])
|
||||||
if complete == 0:
|
if complete == 0:
|
||||||
raise DownloadEmpty
|
raise DownloadEmpty
|
||||||
chunkLength = 0
|
if start != 0:
|
||||||
percentage = 0
|
responseRange = request.headers["Content-Range"]
|
||||||
try:
|
logger.info(f'{track.title} downloading range {responseRange}')
|
||||||
|
else:
|
||||||
|
logger.info(f'{track.title} downloading {complete} bytes')
|
||||||
|
|
||||||
for chunk in request.iter_content(2048 * 3):
|
for chunk in request.iter_content(2048 * 3):
|
||||||
eventlet.sleep(0)
|
|
||||||
if self.queueItem.cancel: raise DownloadCancelled
|
if self.queueItem.cancel: raise DownloadCancelled
|
||||||
|
|
||||||
if len(chunk) >= 2048:
|
if len(chunk) >= 2048:
|
||||||
chunk = Blowfish.new(blowfish_key, Blowfish.MODE_CBC, b"\x00\x01\x02\x03\x04\x05\x06\x07").decrypt(chunk[0:2048]) + chunk[2048:]
|
chunk = Blowfish.new(blowfish_key, Blowfish.MODE_CBC, b"\x00\x01\x02\x03\x04\x05\x06\x07").decrypt(chunk[0:2048]) + chunk[2048:]
|
||||||
|
|
||||||
stream.write(chunk)
|
stream.write(chunk)
|
||||||
chunkLength += len(chunk)
|
chunkLength += len(chunk)
|
||||||
|
|
||||||
if isinstance(self.queueItem, QISingle):
|
if isinstance(self.queueItem, QISingle):
|
||||||
percentage = (chunkLength / complete) * 100
|
percentage = (chunkLength / (complete + start)) * 100
|
||||||
self.downloadPercentage = percentage
|
self.downloadPercentage = percentage
|
||||||
else:
|
else:
|
||||||
chunkProgres = (len(chunk) / complete) / self.queueItem.size * 100
|
chunkProgres = (len(chunk) / (complete + start)) / self.queueItem.size * 100
|
||||||
self.downloadPercentage += chunkProgres
|
self.downloadPercentage += chunkProgres
|
||||||
|
|
||||||
self.updatePercentage()
|
self.updatePercentage()
|
||||||
except SSLError:
|
|
||||||
range = f'bytes={chunkLength}-'
|
except SSLError as e:
|
||||||
logger.info(f'retrying {track.title} with range {range}')
|
logger.info(f'retrying {track.title} from byte {chunkLength}')
|
||||||
return self.streamTrack(stream, track, range)
|
return self.streamTrack(stream, track, chunkLength)
|
||||||
|
except (request_exception.ConnectionError, requests.exceptions.ReadTimeout):
|
||||||
|
eventlet.sleep(2)
|
||||||
|
return self.streamTrack(stream, track, start)
|
||||||
|
|
||||||
def updatePercentage(self):
|
def updatePercentage(self):
|
||||||
if round(self.downloadPercentage) != self.lastPercentage and round(self.downloadPercentage) % 2 == 0:
|
if round(self.downloadPercentage) != self.lastPercentage and round(self.downloadPercentage) % 2 == 0:
|
||||||
|
|
Loading…
Reference in New Issue