mirror of
https://github.com/yt-dlp/yt-dlp.git
synced 2024-11-28 09:31:01 +01:00
[downloader/fragment] use the documented names for fragment progress_hooks fields
This commit is contained in:
parent
fbf56be213
commit
3e0304fe6e
@ -28,7 +28,7 @@ def real_download(self, filename, info_dict):
|
||||
frag_index = 0
|
||||
for i, segment in enumerate(segments):
|
||||
frag_index += 1
|
||||
if frag_index <= ctx['frag_index']:
|
||||
if frag_index <= ctx['fragment_index']:
|
||||
continue
|
||||
# In DASH, the first segment contains necessary headers to
|
||||
# generate a valid MP4 file, so always abort for the first segment
|
||||
|
@ -376,7 +376,7 @@ def real_download(self, filename, info_dict):
|
||||
while fragments_list:
|
||||
seg_i, frag_i = fragments_list.pop(0)
|
||||
frag_index += 1
|
||||
if frag_index <= ctx['frag_index']:
|
||||
if frag_index <= ctx['fragment_index']:
|
||||
continue
|
||||
name = 'Seg%d-Frag%d' % (seg_i, frag_i)
|
||||
query = []
|
||||
|
@ -66,7 +66,9 @@ def _append_fragment(self, ctx, frag_content):
|
||||
if not (ctx.get('live') or ctx['tmpfilename'] == '-'):
|
||||
frag_index_stream, _ = sanitize_open(self.ytdl_filename(ctx['filename']), 'w')
|
||||
frag_index_stream.write(json.dumps({
|
||||
'frag_index': ctx['frag_index']
|
||||
'download': {
|
||||
'last_fragment_index': ctx['fragment_index']
|
||||
},
|
||||
}))
|
||||
frag_index_stream.close()
|
||||
|
||||
@ -100,7 +102,7 @@ def _prepare_frag_download(self, ctx):
|
||||
ytdl_filename = encodeFilename(self.ytdl_filename(ctx['filename']))
|
||||
if os.path.isfile(ytdl_filename):
|
||||
frag_index_stream, _ = sanitize_open(ytdl_filename, 'r')
|
||||
frag_index = json.loads(frag_index_stream.read())['frag_index']
|
||||
frag_index = json.loads(frag_index_stream.read())['download']['last_fragment_index']
|
||||
frag_index_stream.close()
|
||||
dest_stream, tmpfilename = sanitize_open(tmpfilename, open_mode)
|
||||
|
||||
@ -108,7 +110,7 @@ def _prepare_frag_download(self, ctx):
|
||||
'dl': dl,
|
||||
'dest_stream': dest_stream,
|
||||
'tmpfilename': tmpfilename,
|
||||
'frag_index': frag_index,
|
||||
'fragment_index': frag_index,
|
||||
# Total complete fragments downloaded so far in bytes
|
||||
'complete_frags_downloaded_bytes': resume_len,
|
||||
})
|
||||
@ -120,8 +122,8 @@ def _start_frag_download(self, ctx):
|
||||
state = {
|
||||
'status': 'downloading',
|
||||
'downloaded_bytes': ctx['complete_frags_downloaded_bytes'],
|
||||
'frag_index': ctx['frag_index'],
|
||||
'frag_count': total_frags,
|
||||
'fragment_index': ctx['fragment_index'],
|
||||
'fragment_count': total_frags,
|
||||
'filename': ctx['filename'],
|
||||
'tmpfilename': ctx['tmpfilename'],
|
||||
}
|
||||
@ -144,12 +146,12 @@ def frag_progress_hook(s):
|
||||
if not ctx['live']:
|
||||
estimated_size = (
|
||||
(ctx['complete_frags_downloaded_bytes'] + frag_total_bytes) /
|
||||
(state['frag_index'] + 1) * total_frags)
|
||||
(state['fragment_index'] + 1) * total_frags)
|
||||
state['total_bytes_estimate'] = estimated_size
|
||||
|
||||
if s['status'] == 'finished':
|
||||
state['frag_index'] += 1
|
||||
ctx['frag_index'] = state['frag_index']
|
||||
state['fragment_index'] += 1
|
||||
ctx['fragment_index'] = state['fragment_index']
|
||||
state['downloaded_bytes'] += frag_total_bytes - ctx['prev_frag_downloaded_bytes']
|
||||
ctx['complete_frags_downloaded_bytes'] = state['downloaded_bytes']
|
||||
ctx['prev_frag_downloaded_bytes'] = 0
|
||||
|
@ -106,7 +106,7 @@ def real_download(self, filename, info_dict):
|
||||
if line:
|
||||
if not line.startswith('#'):
|
||||
frag_index += 1
|
||||
if frag_index <= ctx['frag_index']:
|
||||
if frag_index <= ctx['fragment_index']:
|
||||
continue
|
||||
frag_url = (
|
||||
line
|
||||
|
@ -227,7 +227,7 @@ def real_download(self, filename, info_dict):
|
||||
frag_index = 0
|
||||
for i, segment in enumerate(segments):
|
||||
frag_index += 1
|
||||
if frag_index <= ctx['frag_index']:
|
||||
if frag_index <= ctx['fragment_index']:
|
||||
continue
|
||||
count = 0
|
||||
while count <= fragment_retries:
|
||||
|
Loading…
Reference in New Issue
Block a user