[downloader/ism] Prevent writing the header again when resuming an interrupted download
This commit is contained in:
parent
4d49884c58
commit
d4553567d2
1 changed files with 6 additions and 3 deletions
|
@ -221,10 +221,13 @@ class IsmFD(FragmentFD):
|
||||||
|
|
||||||
self._prepare_and_start_frag_download(ctx)
|
self._prepare_and_start_frag_download(ctx)
|
||||||
|
|
||||||
|
extra_state = ctx.setdefault('extra_state', {
|
||||||
|
'ism_track_written': False,
|
||||||
|
})
|
||||||
|
|
||||||
fragment_retries = self.params.get('fragment_retries', 0)
|
fragment_retries = self.params.get('fragment_retries', 0)
|
||||||
skip_unavailable_fragments = self.params.get('skip_unavailable_fragments', True)
|
skip_unavailable_fragments = self.params.get('skip_unavailable_fragments', True)
|
||||||
|
|
||||||
track_written = False
|
|
||||||
frag_index = 0
|
frag_index = 0
|
||||||
for i, segment in enumerate(segments):
|
for i, segment in enumerate(segments):
|
||||||
frag_index += 1
|
frag_index += 1
|
||||||
|
@ -236,11 +239,11 @@ class IsmFD(FragmentFD):
|
||||||
success, frag_content = self._download_fragment(ctx, segment['url'], info_dict)
|
success, frag_content = self._download_fragment(ctx, segment['url'], info_dict)
|
||||||
if not success:
|
if not success:
|
||||||
return False
|
return False
|
||||||
if not track_written:
|
if not extra_state['ism_track_written']:
|
||||||
tfhd_data = extract_box_data(frag_content, [b'moof', b'traf', b'tfhd'])
|
tfhd_data = extract_box_data(frag_content, [b'moof', b'traf', b'tfhd'])
|
||||||
info_dict['_download_params']['track_id'] = u32.unpack(tfhd_data[4:8])[0]
|
info_dict['_download_params']['track_id'] = u32.unpack(tfhd_data[4:8])[0]
|
||||||
write_piff_header(ctx['dest_stream'], info_dict['_download_params'])
|
write_piff_header(ctx['dest_stream'], info_dict['_download_params'])
|
||||||
track_written = True
|
extra_state['ism_track_written'] = True
|
||||||
self._append_fragment(ctx, frag_content)
|
self._append_fragment(ctx, frag_content)
|
||||||
break
|
break
|
||||||
except compat_urllib_error.HTTPError as err:
|
except compat_urllib_error.HTTPError as err:
|
||||||
|
|
Loading…
Reference in a new issue