mirror of
https://github.com/LucBerge/yt-dlp.git
synced 2025-03-17 19:57:52 +03:00
[cleanup] Misc
Closes #4710, Closes #4754, Closes #4723 Authored by: pukkandan, MrRawes, DavidH-2022
This commit is contained in:
parent
1ac7f46184
commit
d2c8aadf79
12 changed files with 67 additions and 71 deletions
|
@ -1044,7 +1044,7 @@ class YoutubeDL:
|
|||
|
||||
def get_output_path(self, dir_type='', filename=None):
|
||||
paths = self.params.get('paths', {})
|
||||
assert isinstance(paths, dict)
|
||||
assert isinstance(paths, dict), '"paths" parameter must be a dictionary'
|
||||
path = os.path.join(
|
||||
expand_path(paths.get('home', '').strip()),
|
||||
expand_path(paths.get(dir_type, '').strip()) if dir_type else '',
|
||||
|
@ -2745,9 +2745,9 @@ class YoutubeDL:
|
|||
if lang not in available_subs:
|
||||
available_subs[lang] = cap_info
|
||||
|
||||
if (not self.params.get('writesubtitles') and not
|
||||
self.params.get('writeautomaticsub') or not
|
||||
available_subs):
|
||||
if not available_subs or (
|
||||
not self.params.get('writesubtitles')
|
||||
and not self.params.get('writeautomaticsub')):
|
||||
return None
|
||||
|
||||
all_sub_langs = tuple(available_subs.keys())
|
||||
|
@ -2764,7 +2764,7 @@ class YoutubeDL:
|
|||
else:
|
||||
requested_langs = ['en'] if 'en' in all_sub_langs else all_sub_langs[:1]
|
||||
if requested_langs:
|
||||
self.write_debug('Downloading subtitles: %s' % ', '.join(requested_langs))
|
||||
self.to_screen(f'[info] {video_id}: Downloading subtitles: {", ".join(requested_langs)}')
|
||||
|
||||
formats_query = self.params.get('subtitlesformat', 'best')
|
||||
formats_preference = formats_query.split('/') if formats_query else []
|
||||
|
|
|
@ -365,7 +365,7 @@ def validate_options(opts):
|
|||
if keyring not in SUPPORTED_KEYRINGS:
|
||||
raise ValueError(f'unsupported keyring specified for cookies: "{keyring}". '
|
||||
f'Supported keyrings are: {", ".join(sorted(SUPPORTED_KEYRINGS))}')
|
||||
opts.cookiesfrombrowser = (browser_name, profile or None, keyring, container or None)
|
||||
opts.cookiesfrombrowser = (browser_name, profile, keyring, container)
|
||||
|
||||
# MetadataParser
|
||||
def metadataparser_actions(f):
|
||||
|
|
|
@ -25,7 +25,13 @@ from .dependencies import (
|
|||
sqlite3,
|
||||
)
|
||||
from .minicurses import MultilinePrinter, QuietMultilinePrinter
|
||||
from .utils import Popen, YoutubeDLCookieJar, error_to_str, expand_path, try_call
|
||||
from .utils import (
|
||||
Popen,
|
||||
YoutubeDLCookieJar,
|
||||
error_to_str,
|
||||
expand_path,
|
||||
try_call,
|
||||
)
|
||||
|
||||
CHROMIUM_BASED_BROWSERS = {'brave', 'chrome', 'chromium', 'edge', 'opera', 'vivaldi'}
|
||||
SUPPORTED_BROWSERS = CHROMIUM_BASED_BROWSERS | {'firefox', 'safari'}
|
||||
|
@ -138,7 +144,7 @@ def _extract_firefox_cookies(profile, container, logger):
|
|||
containers_path = os.path.join(os.path.dirname(cookie_database_path), 'containers.json')
|
||||
if not os.path.isfile(containers_path) or not os.access(containers_path, os.R_OK):
|
||||
raise FileNotFoundError(f'could not read containers.json in {search_root}')
|
||||
with open(containers_path, 'r') as containers:
|
||||
with open(containers_path) as containers:
|
||||
identities = json.load(containers).get('identities', [])
|
||||
container_id = next((context.get('userContextId') for context in identities if container in (
|
||||
context.get('name'),
|
||||
|
|
|
@ -1,5 +1,28 @@
|
|||
# flake8: noqa: F401
|
||||
|
||||
from .youtube import ( # Youtube is moved to the top to improve performance
|
||||
YoutubeIE,
|
||||
YoutubeClipIE,
|
||||
YoutubeFavouritesIE,
|
||||
YoutubeNotificationsIE,
|
||||
YoutubeHistoryIE,
|
||||
YoutubeTabIE,
|
||||
YoutubeLivestreamEmbedIE,
|
||||
YoutubePlaylistIE,
|
||||
YoutubeRecommendedIE,
|
||||
YoutubeSearchDateIE,
|
||||
YoutubeSearchIE,
|
||||
YoutubeSearchURLIE,
|
||||
YoutubeMusicSearchURLIE,
|
||||
YoutubeSubscriptionsIE,
|
||||
YoutubeStoriesIE,
|
||||
YoutubeTruncatedIDIE,
|
||||
YoutubeTruncatedURLIE,
|
||||
YoutubeYtBeIE,
|
||||
YoutubeYtUserIE,
|
||||
YoutubeWatchLaterIE,
|
||||
)
|
||||
|
||||
from .abc import (
|
||||
ABCIE,
|
||||
ABCIViewIE,
|
||||
|
@ -2191,28 +2214,6 @@ from .younow import (
|
|||
from .youporn import YouPornIE
|
||||
from .yourporn import YourPornIE
|
||||
from .yourupload import YourUploadIE
|
||||
from .youtube import (
|
||||
YoutubeIE,
|
||||
YoutubeClipIE,
|
||||
YoutubeFavouritesIE,
|
||||
YoutubeNotificationsIE,
|
||||
YoutubeHistoryIE,
|
||||
YoutubeTabIE,
|
||||
YoutubeLivestreamEmbedIE,
|
||||
YoutubePlaylistIE,
|
||||
YoutubeRecommendedIE,
|
||||
YoutubeSearchDateIE,
|
||||
YoutubeSearchIE,
|
||||
YoutubeSearchURLIE,
|
||||
YoutubeMusicSearchURLIE,
|
||||
YoutubeSubscriptionsIE,
|
||||
YoutubeStoriesIE,
|
||||
YoutubeTruncatedIDIE,
|
||||
YoutubeTruncatedURLIE,
|
||||
YoutubeYtBeIE,
|
||||
YoutubeYtUserIE,
|
||||
YoutubeWatchLaterIE,
|
||||
)
|
||||
from .zapiks import ZapiksIE
|
||||
from .zattoo import (
|
||||
BBVTVIE,
|
||||
|
|
|
@ -3874,7 +3874,7 @@ class InfoExtractor:
|
|||
def _extract_from_webpage(cls, url, webpage):
|
||||
for embed_url in orderedSet(
|
||||
cls._extract_embed_urls(url, webpage) or [], lazy=True):
|
||||
yield cls.url_result(embed_url, cls)
|
||||
yield cls.url_result(embed_url, None if cls._VALID_URL is False else cls)
|
||||
|
||||
@classmethod
|
||||
def _extract_embed_urls(cls, url, webpage):
|
||||
|
|
|
@ -5,7 +5,7 @@ from ..utils import ExtractorError
|
|||
|
||||
|
||||
class NewsPicksIE(InfoExtractor):
|
||||
_VALID_URL = r'https://newspicks.com/movie-series/(?P<channel_id>\d+)\?movieId=(?P<id>\d+)'
|
||||
_VALID_URL = r'https://newspicks\.com/movie-series/(?P<channel_id>\d+)\?movieId=(?P<id>\d+)'
|
||||
|
||||
_TESTS = [{
|
||||
'url': 'https://newspicks.com/movie-series/11?movieId=1813',
|
||||
|
|
|
@ -3,13 +3,13 @@ import json
|
|||
|
||||
from .common import InfoExtractor
|
||||
from ..utils import (
|
||||
ExtractorError,
|
||||
int_or_none,
|
||||
str_or_none,
|
||||
traverse_obj,
|
||||
unified_strdate,
|
||||
unified_timestamp,
|
||||
url_basename,
|
||||
ExtractorError,
|
||||
)
|
||||
|
||||
|
||||
|
|
|
@ -442,9 +442,9 @@ def create_parser():
|
|||
'allowed_values': {
|
||||
'filename', 'filename-sanitization', 'format-sort', 'abort-on-error', 'format-spec', 'no-playlist-metafiles',
|
||||
'multistreams', 'no-live-chat', 'playlist-index', 'list-formats', 'no-direct-merge',
|
||||
'no-youtube-channel-redirect', 'no-youtube-unavailable-videos', 'no-attach-info-json', 'embed-metadata',
|
||||
'embed-thumbnail-atomicparsley', 'seperate-video-versions', 'no-clean-infojson', 'no-keep-subs', 'no-certifi',
|
||||
'no-youtube-prefer-utc-upload-date'
|
||||
'no-attach-info-json', 'embed-metadata', 'embed-thumbnail-atomicparsley',
|
||||
'seperate-video-versions', 'no-clean-infojson', 'no-keep-subs', 'no-certifi',
|
||||
'no-youtube-channel-redirect', 'no-youtube-unavailable-videos', 'no-youtube-prefer-utc-upload-date',
|
||||
}, 'aliases': {
|
||||
'youtube-dl': ['all', '-multistreams'],
|
||||
'youtube-dlc': ['all', '-no-youtube-channel-redirect', '-no-live-chat'],
|
||||
|
@ -634,7 +634,7 @@ def create_parser():
|
|||
selection.add_option(
|
||||
'--break-per-input',
|
||||
action='store_true', dest='break_per_url', default=False,
|
||||
help='Make --break-on-existing, --break-on-reject, --max-downloads and autonumber reset per input URL')
|
||||
help='--break-on-existing, --break-on-reject, --max-downloads, and autonumber resets per input URL')
|
||||
selection.add_option(
|
||||
'--no-break-per-input',
|
||||
action='store_false', dest='break_per_url',
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue