parent
06dfe0a0a2
commit
f304da8a29
16 changed files with 87 additions and 79 deletions
|
@ -34,7 +34,7 @@ body:
|
|||
label: Example URLs
|
||||
description: |
|
||||
Provide all kinds of example URLs for which support should be added
|
||||
value: |
|
||||
placeholder: |
|
||||
- Single video: https://www.youtube.com/watch?v=BaW_jenozKc
|
||||
- Single video: https://youtu.be/BaW_jenozKc
|
||||
- Playlist: https://www.youtube.com/playlist?list=PL4lCao7KL_QFVb7Iudeipvc2BCavECqzc
|
||||
|
|
56
.gitignore
vendored
56
.gitignore
vendored
|
@ -6,41 +6,47 @@ cookies
|
|||
.netrc
|
||||
|
||||
# Downloaded
|
||||
*.srt
|
||||
*.ttml
|
||||
*.sbv
|
||||
*.vtt
|
||||
*.flv
|
||||
*.mp4
|
||||
*.m4a
|
||||
*.m4v
|
||||
*.mp3
|
||||
*.3gp
|
||||
*.webm
|
||||
*.wav
|
||||
*.annotations.xml
|
||||
*.ape
|
||||
*.mkv
|
||||
*.flac
|
||||
*.aria2
|
||||
*.avi
|
||||
*.swf
|
||||
*.part
|
||||
*.part-*
|
||||
*.ytdl
|
||||
*.description
|
||||
*.desktop
|
||||
*.dump
|
||||
*.flac
|
||||
*.flv
|
||||
*.frag
|
||||
*.frag.urls
|
||||
*.aria2
|
||||
*.swp
|
||||
*.info.json
|
||||
*.jpeg
|
||||
*.jpg
|
||||
*.live_chat.json
|
||||
*.m4a
|
||||
*.m4v
|
||||
*.mhtml
|
||||
*.mkv
|
||||
*.mov
|
||||
*.mp3
|
||||
*.mp4
|
||||
*.ogg
|
||||
*.opus
|
||||
*.info.json
|
||||
*.live_chat.json
|
||||
*.jpg
|
||||
*.jpeg
|
||||
*.part
|
||||
*.part-*
|
||||
*.png
|
||||
*.sbv
|
||||
*.srt
|
||||
*.swf
|
||||
*.swp
|
||||
*.ttml
|
||||
*.unknown_video
|
||||
*.url
|
||||
*.vtt
|
||||
*.wav
|
||||
*.webloc
|
||||
*.webm
|
||||
*.webp
|
||||
*.annotations.xml
|
||||
*.description
|
||||
*.ytdl
|
||||
.cache/
|
||||
|
||||
# Allow config/media files in testdata
|
||||
|
|
7
Makefile
7
Makefile
|
@ -15,9 +15,11 @@ pypi-files: AUTHORS Changelog.md LICENSE README.md README.txt supportedsites com
|
|||
clean-test:
|
||||
rm -rf *.3gp *.annotations.xml *.ape *.avi *.description *.dump *.flac *.flv *.frag *.frag.aria2 *.frag.urls \
|
||||
*.info.json *.jpeg *.jpg *.live_chat.json *.m4a *.m4v *.mkv *.mp3 *.mp4 *.ogg *.opus *.part* *.png *.sbv *.srt \
|
||||
*.swf *.swp *.ttml *.vtt *.wav *.webm *.webp *.ytdl test/testdata/player-*.js
|
||||
*.swf *.swp *.ttml *.vtt *.wav *.webm *.webp *.mhtml *.mov *.unknown_video *.desktop *.url *.webloc *.ytdl \
|
||||
test/testdata/player-*.js tmp/
|
||||
clean-dist:
|
||||
rm -rf yt-dlp.1.temp.md yt-dlp.1 README.txt MANIFEST build/ dist/ .coverage cover/ yt-dlp.tar.gz completions/ yt_dlp/extractor/lazy_extractors.py *.spec CONTRIBUTING.md.tmp yt-dlp yt-dlp.exe yt_dlp.egg-info/ AUTHORS .mailmap
|
||||
rm -rf yt-dlp.1.temp.md yt-dlp.1 README.txt MANIFEST build/ dist/ .coverage cover/ yt-dlp.tar.gz completions/ \
|
||||
yt_dlp/extractor/lazy_extractors.py *.spec CONTRIBUTING.md.tmp yt-dlp yt-dlp.exe yt_dlp.egg-info/ AUTHORS .mailmap
|
||||
clean-cache:
|
||||
find . -name "*.pyc" -o -name "*.class" -delete
|
||||
|
||||
|
@ -31,7 +33,6 @@ DESTDIR ?= .
|
|||
BINDIR ?= $(PREFIX)/bin
|
||||
MANDIR ?= $(PREFIX)/man
|
||||
SHAREDIR ?= $(PREFIX)/share
|
||||
# make_supportedsites.py doesnot work correctly in python2
|
||||
PYTHON ?= /usr/bin/env python3
|
||||
|
||||
# set SYSCONFDIR to /etc if PREFIX=/usr or PREFIX=/usr/local
|
||||
|
|
|
@ -1204,7 +1204,7 @@ To use percent literals in an output template use `%%`. To output to stdout use
|
|||
|
||||
The current default template is `%(title)s [%(id)s].%(ext)s`.
|
||||
|
||||
In some cases, you don't want special characters such as 中, spaces, or &, such as when transferring the downloaded filename to a Windows system or the filename through an 8bit-unsafe channel. In these cases, add the `--restrict-filenames` flag to get a shorter title:
|
||||
In some cases, you don't want special characters such as 中, spaces, or &, such as when transferring the downloaded filename to a Windows system or the filename through an 8bit-unsafe channel. In these cases, add the `--restrict-filenames` flag to get a shorter title.
|
||||
|
||||
#### Output template and Windows batch files
|
||||
|
||||
|
@ -1614,7 +1614,7 @@ with YoutubeDL(ydl_opts) as ydl:
|
|||
ydl.download(['https://www.youtube.com/watch?v=BaW_jenozKc'])
|
||||
```
|
||||
|
||||
Most likely, you'll want to use various options. For a list of options available, have a look at [`yt_dlp/YoutubeDL.py`](yt_dlp/YoutubeDL.py#L154-L452).
|
||||
Most likely, you'll want to use various options. For a list of options available, have a look at [`yt_dlp/YoutubeDL.py`](yt_dlp/YoutubeDL.py#L162).
|
||||
|
||||
Here's a more complete example demonstrating various functionality:
|
||||
|
||||
|
@ -1785,7 +1785,7 @@ These are aliases that are no longer documented for various reasons
|
|||
--yes-overwrites --force-overwrites
|
||||
|
||||
#### Sponskrub Options
|
||||
Support for [SponSkrub](https://github.com/faissaloo/SponSkrub) has been deprecated in favor of `--sponsorblock`
|
||||
Support for [SponSkrub](https://github.com/faissaloo/SponSkrub) has been deprecated in favor of the `--sponsorblock` options
|
||||
|
||||
--sponskrub --sponsorblock-mark all
|
||||
--no-sponskrub --no-sponsorblock
|
||||
|
|
|
@ -848,7 +848,7 @@ class YoutubeDL(object):
|
|||
|
||||
class Styles(Enum):
|
||||
HEADERS = 'yellow'
|
||||
EMPHASIS = 'blue'
|
||||
EMPHASIS = 'light blue'
|
||||
ID = 'green'
|
||||
DELIM = 'blue'
|
||||
ERROR = 'red'
|
||||
|
@ -863,7 +863,7 @@ class YoutubeDL(object):
|
|||
if fallback is not None and text != original_text:
|
||||
text = fallback
|
||||
if isinstance(f, self.Styles):
|
||||
f = f._value_
|
||||
f = f.value
|
||||
return format_text(text, f) if allow_colors else text if fallback is None else fallback
|
||||
|
||||
def _format_screen(self, *args, **kwargs):
|
||||
|
@ -3229,8 +3229,12 @@ class YoutubeDL(object):
|
|||
res += ' '
|
||||
res += '[%s]' % fdict['language']
|
||||
if fdict.get('format_note') is not None:
|
||||
res += fdict['format_note'] + ' '
|
||||
if res:
|
||||
res += ' '
|
||||
res += fdict['format_note']
|
||||
if fdict.get('tbr') is not None:
|
||||
if res:
|
||||
res += ', '
|
||||
res += '%4dk' % fdict['tbr']
|
||||
if fdict.get('container') is not None:
|
||||
if res:
|
||||
|
|
|
@ -25,18 +25,16 @@ from .cookies import SUPPORTED_BROWSERS
|
|||
from .utils import (
|
||||
DateRange,
|
||||
decodeOption,
|
||||
DownloadCancelled,
|
||||
DownloadError,
|
||||
error_to_compat_str,
|
||||
ExistingVideoReached,
|
||||
expand_path,
|
||||
float_or_none,
|
||||
int_or_none,
|
||||
match_filter_func,
|
||||
MaxDownloadsReached,
|
||||
parse_duration,
|
||||
preferredencoding,
|
||||
read_batch_urls,
|
||||
RejectedVideoReached,
|
||||
render_table,
|
||||
SameFileError,
|
||||
setproctitle,
|
||||
|
@ -195,7 +193,7 @@ def _real_main(argv=None):
|
|||
if opts.overwrites: # --yes-overwrites implies --no-continue
|
||||
opts.continue_dl = False
|
||||
if opts.concurrent_fragment_downloads <= 0:
|
||||
raise ValueError('Concurrent fragments must be positive')
|
||||
parser.error('Concurrent fragments must be positive')
|
||||
if opts.wait_for_video is not None:
|
||||
mobj = re.match(r'(?P<min>\d+)(?:-(?P<max>\d+))?$', opts.wait_for_video)
|
||||
if not mobj:
|
||||
|
@ -231,9 +229,9 @@ def _real_main(argv=None):
|
|||
parser.error('invalid http chunk size specified')
|
||||
opts.http_chunk_size = numeric_chunksize
|
||||
if opts.playliststart <= 0:
|
||||
raise ValueError('Playlist start must be positive')
|
||||
raise parser.error('Playlist start must be positive')
|
||||
if opts.playlistend not in (-1, None) and opts.playlistend < opts.playliststart:
|
||||
raise ValueError('Playlist end must be greater than playlist start')
|
||||
raise parser.error('Playlist end must be greater than playlist start')
|
||||
if opts.extractaudio:
|
||||
opts.audioformat = opts.audioformat.lower()
|
||||
if opts.audioformat not in ['best'] + list(FFmpegExtractAudioPP.SUPPORTED_EXTS):
|
||||
|
@ -762,7 +760,7 @@ def _real_main(argv=None):
|
|||
}
|
||||
|
||||
with YoutubeDL(ydl_opts) as ydl:
|
||||
actual_use = len(all_urls) or opts.load_info_filename
|
||||
actual_use = all_urls or opts.load_info_filename
|
||||
|
||||
# Remove cache dir
|
||||
if opts.rm_cachedir:
|
||||
|
@ -791,7 +789,7 @@ def _real_main(argv=None):
|
|||
retcode = ydl.download_with_info_file(expand_path(opts.load_info_filename))
|
||||
else:
|
||||
retcode = ydl.download(all_urls)
|
||||
except (MaxDownloadsReached, ExistingVideoReached, RejectedVideoReached):
|
||||
except DownloadCancelled:
|
||||
ydl.to_screen('Aborting remaining downloads')
|
||||
retcode = 101
|
||||
|
||||
|
|
|
@ -41,6 +41,7 @@ from .external import (
|
|||
|
||||
PROTOCOL_MAP = {
|
||||
'rtmp': RtmpFD,
|
||||
'rtmpe': RtmpFD,
|
||||
'rtmp_ffmpeg': FFmpegFD,
|
||||
'm3u8_native': HlsFD,
|
||||
'm3u8': FFmpegFD,
|
||||
|
|
|
@ -93,6 +93,8 @@ class FileDownloader(object):
|
|||
def format_percent(percent):
|
||||
if percent is None:
|
||||
return '---.-%'
|
||||
elif percent == 100:
|
||||
return '100%'
|
||||
return '%6s' % ('%3.1f%%' % percent)
|
||||
|
||||
@staticmethod
|
||||
|
@ -298,7 +300,7 @@ class FileDownloader(object):
|
|||
s['_elapsed_str'] = self.format_seconds(s['elapsed'])
|
||||
msg_template += ' in %(_elapsed_str)s'
|
||||
s['_percent_str'] = self.format_percent(100)
|
||||
self._report_progress_status(s)
|
||||
self._report_progress_status(s, msg_template)
|
||||
return
|
||||
|
||||
if s['status'] != 'downloading':
|
||||
|
@ -307,7 +309,7 @@ class FileDownloader(object):
|
|||
if s.get('eta') is not None:
|
||||
s['_eta_str'] = self.format_eta(s['eta'])
|
||||
else:
|
||||
s['_eta_str'] = 'Unknown ETA'
|
||||
s['_eta_str'] = 'Unknown'
|
||||
|
||||
if s.get('total_bytes') and s.get('downloaded_bytes') is not None:
|
||||
s['_percent_str'] = self.format_percent(100 * s['downloaded_bytes'] / s['total_bytes'])
|
||||
|
@ -339,7 +341,7 @@ class FileDownloader(object):
|
|||
else:
|
||||
msg_template = '%(_downloaded_bytes_str)s at %(_speed_str)s'
|
||||
else:
|
||||
msg_template = '%(_percent_str)s % at %(_speed_str)s ETA %(_eta_str)s'
|
||||
msg_template = '%(_percent_str)s at %(_speed_str)s ETA %(_eta_str)s'
|
||||
if s.get('fragment_index') and s.get('fragment_count'):
|
||||
msg_template += ' (frag %(fragment_index)s/%(fragment_count)s)'
|
||||
elif s.get('fragment_index'):
|
||||
|
|
|
@ -472,8 +472,7 @@ class BBCCoUkIE(InfoExtractor):
|
|||
f['language_preference'] = -10
|
||||
formats += version_formats
|
||||
for tag, subformats in (version_subtitles or {}).items():
|
||||
subtitles.setdefault(tag, [])
|
||||
subtitles[tag] += subformats
|
||||
subtitles.setdefault(tag, []).extend(subformats)
|
||||
|
||||
return programme_id, title, description, duration, formats, subtitles
|
||||
except ExtractorError as ee:
|
||||
|
|
|
@ -1538,10 +1538,10 @@ class InfoExtractor(object):
|
|||
|
||||
default = ('hidden', 'aud_or_vid', 'hasvid', 'ie_pref', 'lang', 'quality',
|
||||
'res', 'fps', 'hdr:12', 'codec:vp9.2', 'size', 'br', 'asr',
|
||||
'proto', 'ext', 'hasaud', 'source', 'format_id') # These must not be aliases
|
||||
'proto', 'ext', 'hasaud', 'source', 'id') # These must not be aliases
|
||||
ytdl_default = ('hasaud', 'lang', 'quality', 'tbr', 'filesize', 'vbr',
|
||||
'height', 'width', 'proto', 'vext', 'abr', 'aext',
|
||||
'fps', 'fs_approx', 'source', 'format_id')
|
||||
'fps', 'fs_approx', 'source', 'id')
|
||||
|
||||
settings = {
|
||||
'vcodec': {'type': 'ordered', 'regex': True,
|
||||
|
@ -1551,7 +1551,7 @@ class InfoExtractor(object):
|
|||
'hdr': {'type': 'ordered', 'regex': True, 'field': 'dynamic_range',
|
||||
'order': ['dv', '(hdr)?12', r'(hdr)?10\+', '(hdr)?10', 'hlg', '', 'sdr', None]},
|
||||
'proto': {'type': 'ordered', 'regex': True, 'field': 'protocol',
|
||||
'order': ['(ht|f)tps', '(ht|f)tp$', 'm3u8.+', '.*dash', 'ws|websocket', '', 'mms|rtsp', 'none', 'f4']},
|
||||
'order': ['(ht|f)tps', '(ht|f)tp$', 'm3u8.*', '.*dash', 'websocket_frag', 'rtmpe?', '', 'mms|rtsp', 'ws|websocket', 'f4']},
|
||||
'vext': {'type': 'ordered', 'field': 'video_ext',
|
||||
'order': ('mp4', 'webm', 'flv', '', 'none'),
|
||||
'order_free': ('webm', 'mp4', 'flv', '', 'none')},
|
||||
|
@ -1615,7 +1615,12 @@ class InfoExtractor(object):
|
|||
'format_id': {'type': 'alias', 'field': 'id'},
|
||||
}
|
||||
|
||||
_order = []
|
||||
def __init__(self, ie, field_preference):
|
||||
self._order = []
|
||||
self.ydl = ie._downloader
|
||||
self.evaluate_params(self.ydl.params, field_preference)
|
||||
if ie.get_param('verbose'):
|
||||
self.print_verbose_info(self.ydl.write_debug)
|
||||
|
||||
def _get_field_setting(self, field, key):
|
||||
if field not in self.settings:
|
||||
|
@ -1805,10 +1810,7 @@ class InfoExtractor(object):
|
|||
def _sort_formats(self, formats, field_preference=[]):
|
||||
if not formats:
|
||||
return
|
||||
format_sort = self.FormatSort() # params and to_screen are taken from the downloader
|
||||
format_sort.evaluate_params(self._downloader.params, field_preference)
|
||||
if self.get_param('verbose', False):
|
||||
format_sort.print_verbose_info(self._downloader.write_debug)
|
||||
format_sort = self.FormatSort(self, field_preference)
|
||||
formats.sort(key=lambda f: format_sort.calculate_preference(f))
|
||||
|
||||
def _check_formats(self, formats, video_id):
|
||||
|
|
|
@ -499,7 +499,7 @@ class InstagramUserIE(InstagramPlaylistBaseIE):
|
|||
|
||||
class InstagramTagIE(InstagramPlaylistBaseIE):
|
||||
_VALID_URL = r'https?://(?:www\.)?instagram\.com/explore/tags/(?P<id>[^/]+)'
|
||||
IE_DESC = 'Instagram hashtag search'
|
||||
IE_DESC = 'Instagram hashtag search URLs'
|
||||
IE_NAME = 'instagram:tag'
|
||||
_TESTS = [{
|
||||
'url': 'https://instagram.com/explore/tags/lolcats',
|
||||
|
|
|
@ -21,7 +21,6 @@ class MLSSoccerIE(InfoExtractor):
|
|||
'uploader_id': '5530036772001',
|
||||
'tags': ['club/canada'],
|
||||
'is_live': False,
|
||||
'duration_string': '5:50',
|
||||
'upload_date': '20211007',
|
||||
'filesize_approx': 255193528.83200002
|
||||
},
|
||||
|
|
|
@ -703,7 +703,7 @@ class NicovideoSearchURLIE(InfoExtractor):
|
|||
|
||||
|
||||
class NicovideoSearchIE(SearchInfoExtractor, NicovideoSearchURLIE):
|
||||
IE_DESC = 'Nico video searches'
|
||||
IE_DESC = 'Nico video search'
|
||||
IE_NAME = NicovideoSearchIE_NAME
|
||||
_SEARCH_KEY = 'nicosearch'
|
||||
_TESTS = []
|
||||
|
@ -714,7 +714,7 @@ class NicovideoSearchIE(SearchInfoExtractor, NicovideoSearchURLIE):
|
|||
|
||||
|
||||
class NicovideoSearchDateIE(NicovideoSearchIE):
|
||||
IE_DESC = 'Nico video searches, newest first'
|
||||
IE_DESC = 'Nico video search, newest first'
|
||||
IE_NAME = f'{NicovideoSearchIE_NAME}:date'
|
||||
_SEARCH_KEY = 'nicosearchdate'
|
||||
_TESTS = [{
|
||||
|
|
|
@ -377,10 +377,6 @@ def parseOpts(overrideArguments=None):
|
|||
'--reject-title',
|
||||
dest='rejecttitle', metavar='REGEX',
|
||||
help=optparse.SUPPRESS_HELP)
|
||||
selection.add_option(
|
||||
'--max-downloads',
|
||||
dest='max_downloads', metavar='NUMBER', type=int, default=None,
|
||||
help='Abort after downloading NUMBER files')
|
||||
selection.add_option(
|
||||
'--min-filesize',
|
||||
metavar='SIZE', dest='min_filesize', default=None,
|
||||
|
@ -451,6 +447,14 @@ def parseOpts(overrideArguments=None):
|
|||
'--download-archive', metavar='FILE',
|
||||
dest='download_archive',
|
||||
help='Download only videos not listed in the archive file. Record the IDs of all downloaded videos in it')
|
||||
selection.add_option(
|
||||
'--no-download-archive',
|
||||
dest='download_archive', action="store_const", const=None,
|
||||
help='Do not use archive file (default)')
|
||||
selection.add_option(
|
||||
'--max-downloads',
|
||||
dest='max_downloads', metavar='NUMBER', type=int, default=None,
|
||||
help='Abort after downloading NUMBER files')
|
||||
selection.add_option(
|
||||
'--break-on-existing',
|
||||
action='store_true', dest='break_on_existing', default=False,
|
||||
|
@ -471,10 +475,6 @@ def parseOpts(overrideArguments=None):
|
|||
'--skip-playlist-after-errors', metavar='N',
|
||||
dest='skip_playlist_after_errors', default=None, type=int,
|
||||
help='Number of allowed failures until the rest of the playlist is skipped')
|
||||
selection.add_option(
|
||||
'--no-download-archive',
|
||||
dest='download_archive', action="store_const", const=None,
|
||||
help='Do not use archive file (default)')
|
||||
selection.add_option(
|
||||
'--include-ads',
|
||||
dest='include_ads', action='store_true',
|
||||
|
@ -1154,7 +1154,7 @@ def parseOpts(overrideArguments=None):
|
|||
filesystem.add_option(
|
||||
'--cookies',
|
||||
dest='cookiefile', metavar='FILE',
|
||||
help='File to read cookies from and dump cookie jar in')
|
||||
help='Netscape formatted file to read cookies from and dump cookie jar in')
|
||||
filesystem.add_option(
|
||||
'--no-cookies',
|
||||
action='store_const', const=None, dest='cookiefile', metavar='FILE',
|
||||
|
@ -1354,7 +1354,7 @@ def parseOpts(overrideArguments=None):
|
|||
'Automatically correct known faults of the file. '
|
||||
'One of never (do nothing), warn (only emit a warning), '
|
||||
'detect_or_warn (the default; fix file if we can, warn otherwise), '
|
||||
'force (try fixing even if file already exists'))
|
||||
'force (try fixing even if file already exists)'))
|
||||
postproc.add_option(
|
||||
'--prefer-avconv', '--no-prefer-ffmpeg',
|
||||
action='store_false', dest='prefer_ffmpeg',
|
||||
|
|
|
@ -16,7 +16,7 @@ class MetadataParserPP(PostProcessor):
|
|||
for f in actions:
|
||||
action = f[0]
|
||||
assert isinstance(action, self.Actions)
|
||||
self._actions.append(getattr(self, action._value_)(*f[1:]))
|
||||
self._actions.append(getattr(self, action.value)(*f[1:]))
|
||||
|
||||
@classmethod
|
||||
def validate_action(cls, action, *data):
|
||||
|
@ -26,7 +26,7 @@ class MetadataParserPP(PostProcessor):
|
|||
'''
|
||||
if not isinstance(action, cls.Actions):
|
||||
raise ValueError(f'{action!r} is not a valid action')
|
||||
getattr(cls, action._value_)(cls, *data)
|
||||
getattr(cls, action.value)(cls, *data)
|
||||
|
||||
@staticmethod
|
||||
def field_to_template(tmpl):
|
||||
|
|
|
@ -2575,10 +2575,6 @@ class PostProcessingError(YoutubeDLError):
|
|||
indicate an error in the postprocessing task.
|
||||
"""
|
||||
|
||||
def __init__(self, msg):
|
||||
super(PostProcessingError, self).__init__(msg)
|
||||
self.msg = msg
|
||||
|
||||
|
||||
class DownloadCancelled(YoutubeDLError):
|
||||
""" Exception raised when the download queue should be interrupted """
|
||||
|
|
Loading…
Reference in a new issue