aboutsummaryrefslogtreecommitdiffstats
path: root/yt_dlp
diff options
context:
space:
mode:
Diffstat (limited to 'yt_dlp')
-rw-r--r--yt_dlp/YoutubeDL.py2
-rw-r--r--yt_dlp/__init__.py169
-rw-r--r--yt_dlp/downloader/hls.py4
-rw-r--r--yt_dlp/extractor/iqiyi.py174
-rw-r--r--yt_dlp/extractor/ivi.py2
-rw-r--r--yt_dlp/options.py11
-rw-r--r--yt_dlp/update.py347
-rw-r--r--yt_dlp/utils.py7
8 files changed, 71 insertions, 645 deletions
diff --git a/yt_dlp/YoutubeDL.py b/yt_dlp/YoutubeDL.py
index b1d009280..9125e4ea6 100644
--- a/yt_dlp/YoutubeDL.py
+++ b/yt_dlp/YoutubeDL.py
@@ -47,8 +47,6 @@ from .postprocessor import (
MoveFilesAfterDownloadPP,
get_postprocessor,
)
-from .postprocessor.ffmpeg import resolve_mapping as resolve_recode_mapping
-from .update import REPOSITORY, current_git_head, detect_variant
from .utils import (
DEFAULT_OUTTMPL,
IDENTITY,
diff --git a/yt_dlp/__init__.py b/yt_dlp/__init__.py
index f1a347514..0c68f8571 100644
--- a/yt_dlp/__init__.py
+++ b/yt_dlp/__init__.py
@@ -1,12 +1,8 @@
-try:
- import contextvars # noqa: F401
-except Exception:
- raise Exception(
- f'You are using an unsupported version of Python. Only Python versions 3.7 and above are supported by yt-dlp') # noqa: F541
+#!/usr/bin/python
+f'You are using an unsupported version of Python. Only Python versions 3.6 and above are supported by yt-dlp' # noqa: F541
-__license__ = 'Public Domain'
+__license__ = 'CC0-1.0'
-import collections
import getpass
import itertools
import optparse
@@ -16,14 +12,14 @@ import sys
from .compat import compat_shlex_quote
from .cookies import SUPPORTED_BROWSERS, SUPPORTED_KEYRINGS
+from .downloader import FileDownloader
from .downloader.external import get_external_downloader
from .extractor import list_extractor_classes
from .extractor.adobepass import MSO_INFO
+from .extractor.common import InfoExtractor
from .options import parseOpts
from .postprocessor import (
FFmpegExtractAudioPP,
- FFmpegMergerPP,
- FFmpegPostProcessor,
FFmpegSubtitlesConvertorPP,
FFmpegThumbnailsConvertorPP,
FFmpegVideoConvertorPP,
@@ -38,7 +34,6 @@ from .utils import (
DateRange,
DownloadCancelled,
DownloadError,
- FormatSorter,
GeoUtils,
PlaylistEntries,
SameFileError,
@@ -49,7 +44,6 @@ from .utils import (
format_field,
int_or_none,
match_filter_func,
- parse_bytes,
parse_duration,
preferredencoding,
read_batch_urls,
@@ -63,8 +57,6 @@ from .utils import (
)
from .YoutubeDL import YoutubeDL
-_IN_CLI = False
-
def _exit(status=0, *args):
for msg in args:
@@ -97,27 +89,28 @@ def print_extractor_information(opts, urls):
out = ''
if opts.list_extractors:
- urls = dict.fromkeys(urls, False)
- for ie in list_extractor_classes(opts.age_limit):
- out += ie.IE_NAME + (' (CURRENTLY BROKEN)' if not ie.working() else '') + '\n'
- if ie == GenericIE:
- matched_urls = [url for url, matched in urls.items() if not matched]
- else:
- matched_urls = tuple(filter(ie.suitable, urls.keys()))
- urls.update(dict.fromkeys(matched_urls, True))
- out += ''.join(f' {url}\n' for url in matched_urls)
+ for ie in list_extractors(opts.age_limit):
+ write_string(ie.IE_NAME + (' (CURRENTLY BROKEN)' if not ie.working() else '') + '\n', out=sys.stdout)
+ matchedUrls = [url for url in urls if ie.suitable(url)]
+ for mu in matchedUrls:
+ write_string(' ' + mu + '\n', out=sys.stdout)
elif opts.list_extractor_descriptions:
- _SEARCHES = ('cute kittens', 'slithering pythons', 'falling cat', 'angry poodle', 'purple fish', 'running tortoise', 'sleeping bunny', 'burping cow')
- out = '\n'.join(
- ie.description(markdown=False, search_examples=_SEARCHES)
- for ie in list_extractor_classes(opts.age_limit) if ie.working() and ie.IE_DESC is not False)
+ for ie in list_extractors(opts.age_limit):
+ if not ie.working():
+ continue
+ if ie.IE_DESC is False:
+ continue
+ desc = ie.IE_DESC or ie.IE_NAME
+ if getattr(ie, 'SEARCH_KEY', None) is not None:
+ _SEARCHES = ('cute kittens', 'slithering pythons', 'falling cat', 'angry poodle', 'purple fish', 'running tortoise', 'sleeping bunny', 'burping cow')
+ _COUNTS = ('', '5', '10', 'all')
+ desc += f'; "{ie.SEARCH_KEY}:" prefix (Example: "{ie.SEARCH_KEY}{random.choice(_COUNTS)}:{random.choice(_SEARCHES)}")'
+ write_string(desc + '\n', out=sys.stdout)
elif opts.ap_list_mso:
- out = 'Supported TV Providers:\n%s\n' % render_table(
- ['mso', 'mso name'],
- [[mso_id, mso_info['name']] for mso_id, mso_info in MSO_INFO.items()])
+ table = [[mso_id, mso_info['name']] for mso_id, mso_info in MSO_INFO.items()]
+ write_string('Supported TV Providers:\n' + render_table(['mso', 'mso name'], table) + '\n', out=sys.stdout)
else:
return False
- write_string(out, out=sys.stdout)
return True
@@ -152,7 +145,7 @@ def set_compat_opts(opts):
else:
opts.embed_infojson = False
if 'format-sort' in opts.compat_opts:
- opts.format_sort.extend(FormatSorter.ytdl_default)
+ opts.format_sort.extend(InfoExtractor.FormatSort.ytdl_default)
_video_multistreams_set = set_default_compat('multistreams', 'allow_multiple_video_streams', False, remove_compat=False)
_audio_multistreams_set = set_default_compat('multistreams', 'allow_multiple_audio_streams', False, remove_compat=False)
if _video_multistreams_set is False and _audio_multistreams_set is False:
@@ -227,11 +220,9 @@ def validate_options(opts):
# Format sort
for f in opts.format_sort:
- validate_regex('format sorting', f, FormatSorter.regex)
+ validate_regex('format sorting', f, InfoExtractor.FormatSort.regex)
# Postprocessor formats
- validate_regex('merge output format', opts.merge_output_format,
- r'({0})(/({0}))*'.format('|'.join(map(re.escape, FFmpegMergerPP.SUPPORTED_EXTS))))
validate_regex('audio format', opts.audioformat, FFmpegExtractAudioPP.FORMAT_RE)
validate_in('subtitle format', opts.convertsubtitles, FFmpegSubtitlesConvertorPP.SUPPORTED_EXTS)
validate_regex('thumbnail format', opts.convertthumbnails, FFmpegThumbnailsConvertorPP.FORMAT_RE)
@@ -281,19 +272,19 @@ def validate_options(opts):
raise ValueError(f'invalid {key} retry sleep expression {expr!r}')
# Bytes
- def validate_bytes(name, value):
+ def parse_bytes(name, value):
if value is None:
return None
- numeric_limit = parse_bytes(value)
+ numeric_limit = FileDownloader.parse_bytes(value)
validate(numeric_limit is not None, 'rate limit', value)
return numeric_limit
- opts.ratelimit = validate_bytes('rate limit', opts.ratelimit)
- opts.throttledratelimit = validate_bytes('throttled rate limit', opts.throttledratelimit)
- opts.min_filesize = validate_bytes('min filesize', opts.min_filesize)
- opts.max_filesize = validate_bytes('max filesize', opts.max_filesize)
- opts.buffersize = validate_bytes('buffer size', opts.buffersize)
- opts.http_chunk_size = validate_bytes('http chunk size', opts.http_chunk_size)
+ opts.ratelimit = parse_bytes('rate limit', opts.ratelimit)
+ opts.throttledratelimit = parse_bytes('throttled rate limit', opts.throttledratelimit)
+ opts.min_filesize = parse_bytes('min filesize', opts.min_filesize)
+ opts.max_filesize = parse_bytes('max filesize', opts.max_filesize)
+ opts.buffersize = parse_bytes('buffer size', opts.buffersize)
+ opts.http_chunk_size = parse_bytes('http chunk size', opts.http_chunk_size)
# Output templates
def validate_outtmpl(tmpl, msg):
@@ -326,15 +317,14 @@ def validate_options(opts):
def parse_chapters(name, value):
chapters, ranges = [], []
- parse_timestamp = lambda x: float('inf') if x in ('inf', 'infinite') else parse_duration(x)
for regex in value or []:
if regex.startswith('*'):
- for range_ in map(str.strip, regex[1:].split(',')):
- mobj = range_ != '-' and re.fullmatch(r'([^-]+)?\s*-\s*([^-]+)?', range_)
- dur = mobj and (parse_timestamp(mobj.group(1) or '0'), parse_timestamp(mobj.group(2) or 'inf'))
- if None in (dur or [None]):
+ for range in regex[1:].split(','):
+ dur = tuple(map(parse_duration, range.strip().split('-')))
+ if len(dur) == 2 and all(t is not None for t in dur):
+ ranges.append(dur)
+ else:
raise ValueError(f'invalid {name} time range "{regex}". Must be of the form *start-end')
- ranges.append(dur)
continue
try:
chapters.append(re.compile(regex))
@@ -347,16 +337,10 @@ def validate_options(opts):
# Cookies from browser
if opts.cookiesfrombrowser:
- container = None
- mobj = re.fullmatch(r'''(?x)
- (?P<name>[^+:]+)
- (?:\s*\+\s*(?P<keyring>[^:]+))?
- (?:\s*:\s*(?P<profile>.+?))?
- (?:\s*::\s*(?P<container>.+))?
- ''', opts.cookiesfrombrowser)
+ mobj = re.match(r'(?P<name>[^+:]+)(\s*\+\s*(?P<keyring>[^:]+))?(\s*:(?P<profile>.+))?', opts.cookiesfrombrowser)
if mobj is None:
raise ValueError(f'invalid cookies from browser arguments: {opts.cookiesfrombrowser}')
- browser_name, keyring, profile, container = mobj.group('name', 'keyring', 'profile', 'container')
+ browser_name, keyring, profile = mobj.group('name', 'keyring', 'profile')
browser_name = browser_name.lower()
if browser_name not in SUPPORTED_BROWSERS:
raise ValueError(f'unsupported browser specified for cookies: "{browser_name}". '
@@ -366,7 +350,7 @@ def validate_options(opts):
if keyring not in SUPPORTED_KEYRINGS:
raise ValueError(f'unsupported keyring specified for cookies: "{keyring}". '
f'Supported keyrings are: {", ".join(sorted(SUPPORTED_KEYRINGS))}')
- opts.cookiesfrombrowser = (browser_name, profile, keyring, container)
+ opts.cookiesfrombrowser = (browser_name, profile, keyring)
# MetadataParser
def metadataparser_actions(f):
@@ -411,9 +395,6 @@ def validate_options(opts):
if opts.download_archive is not None:
opts.download_archive = expand_path(opts.download_archive)
- if opts.ffmpeg_location is not None:
- opts.ffmpeg_location = expand_path(opts.ffmpeg_location)
-
if opts.user_agent is not None:
opts.headers.setdefault('User-Agent', opts.user_agent)
if opts.referer is not None:
@@ -478,24 +459,22 @@ def validate_options(opts):
report_conflict('--playlist-random', 'playlist_random', '--lazy-playlist', 'lazy_playlist')
report_conflict('--dateafter', 'dateafter', '--date', 'date', default=None)
report_conflict('--datebefore', 'datebefore', '--date', 'date', default=None)
- report_conflict('--exec-before-download', 'exec_before_dl_cmd',
- '"--exec before_dl:"', 'exec_cmd', val2=opts.exec_cmd.get('before_dl'))
+ report_conflict('--exec-before-download', 'exec_before_dl_cmd', '"--exec before_dl:"', 'exec_cmd', opts.exec_cmd.get('before_dl'))
report_conflict('--id', 'useid', '--output', 'outtmpl', val2=opts.outtmpl.get('default'))
report_conflict('--remux-video', 'remuxvideo', '--recode-video', 'recodevideo')
report_conflict('--sponskrub', 'sponskrub', '--remove-chapters', 'remove_chapters')
report_conflict('--sponskrub', 'sponskrub', '--sponsorblock-mark', 'sponsorblock_mark')
report_conflict('--sponskrub', 'sponskrub', '--sponsorblock-remove', 'sponsorblock_remove')
- report_conflict('--sponskrub-cut', 'sponskrub_cut', '--split-chapter', 'split_chapters',
- val1=opts.sponskrub and opts.sponskrub_cut)
+ report_conflict('--sponskrub-cut', 'sponskrub_cut', '--split-chapter', 'split_chapters', val1=opts.sponskrub and opts.sponskrub_cut)
# Conflicts with --allow-unplayable-formats
- report_conflict('--embed-metadata', 'addmetadata')
+ report_conflict('--add-metadata', 'addmetadata')
report_conflict('--embed-chapters', 'addchapters')
report_conflict('--embed-info-json', 'embed_infojson')
report_conflict('--embed-subs', 'embedsubtitles')
report_conflict('--embed-thumbnail', 'embedthumbnail')
report_conflict('--extract-audio', 'extractaudio')
- report_conflict('--fixup', 'fixup', val1=opts.fixup not in (None, 'never', 'ignore'), default='never')
+ report_conflict('--fixup', 'fixup', val1=(opts.fixup or '').lower() in ('', 'never', 'ignore'), default='never')
report_conflict('--recode-video', 'recodevideo')
report_conflict('--remove-chapters', 'remove_chapters', default=[])
report_conflict('--remux-video', 'remuxvideo')
@@ -537,7 +516,7 @@ def validate_options(opts):
# Do not unnecessarily download audio
opts.format = 'bestaudio/best'
- if opts.getcomments and opts.writeinfojson is None and not opts.embed_infojson:
+ if opts.getcomments and opts.writeinfojson is None:
# If JSON is not printed anywhere, but comments are requested, save it to file
if not opts.dumpjson or opts.print_json or opts.dump_single_json:
opts.writeinfojson = True
@@ -686,11 +665,8 @@ def get_postprocessors(opts):
}
-ParsedOptions = collections.namedtuple('ParsedOptions', ('parser', 'options', 'urls', 'ydl_opts'))
-
-
def parse_options(argv=None):
- """@returns ParsedOptions(parser, opts, urls, ydl_opts)"""
+ """ @returns (parser, opts, urls, ydl_opts) """
parser, opts, urls = parseOpts(argv)
urls = get_urls(urls, opts.batchfile, opts.verbose)
@@ -702,26 +678,11 @@ def parse_options(argv=None):
postprocessors = list(get_postprocessors(opts))
- print_only = bool(opts.forceprint) and all(k not in opts.forceprint for k in POSTPROCESS_WHEN[2:])
- any_getting = any(getattr(opts, k) for k in (
- 'dumpjson', 'dump_single_json', 'getdescription', 'getduration', 'getfilename',
- 'getformat', 'getid', 'getthumbnail', 'gettitle', 'geturl'
- ))
-
- playlist_pps = [pp for pp in postprocessors if pp.get('when') == 'playlist']
- write_playlist_infojson = (opts.writeinfojson and not opts.clean_infojson
- and opts.allow_playlist_files and opts.outtmpl.get('pl_infojson') != '')
- if not any((
- opts.extract_flat,
- opts.dump_single_json,
- opts.forceprint.get('playlist'),
- opts.print_to_file.get('playlist'),
- write_playlist_infojson,
- )):
- if not playlist_pps:
- opts.extract_flat = 'discard'
- elif playlist_pps == [{'key': 'FFmpegConcat', 'only_multi_video': True, 'when': 'playlist'}]:
- opts.extract_flat = 'discard_in_playlist'
+ any_getting = (any(opts.forceprint.values()) or opts.dumpjson or opts.dump_single_json
+ or opts.geturl or opts.gettitle or opts.getid or opts.getthumbnail
+ or opts.getdescription or opts.getfilename or opts.getformat or opts.getduration)
+
+ any_printing = opts.print_json
final_ext = (
opts.recodevideo if opts.recodevideo in FFmpegVideoConvertorPP.SUPPORTED_EXTS
@@ -729,7 +690,7 @@ def parse_options(argv=None):
else opts.audioformat if (opts.extractaudio and opts.audioformat in FFmpegExtractAudioPP.SUPPORTED_EXTS)
else None)
- return ParsedOptions(parser, opts, urls, {
+ return parser, opts, urls, {
'usenetrc': opts.usenetrc,
'netrc_location': opts.netrc_location,
'username': opts.username,
@@ -739,10 +700,7 @@ def parse_options(argv=None):
'ap_mso': opts.ap_mso,
'ap_username': opts.ap_username,
'ap_password': opts.ap_password,
- 'client_certificate': opts.client_certificate,
- 'client_certificate_key': opts.client_certificate_key,
- 'client_certificate_password': opts.client_certificate_password,
- 'quiet': opts.quiet or any_getting or opts.print_json or bool(opts.forceprint),
+ 'quiet': (opts.quiet or any_getting or any_printing),
'no_warnings': opts.no_warnings,
'forceurl': opts.geturl,
'forcetitle': opts.gettitle,
@@ -757,7 +715,7 @@ def parse_options(argv=None):
'forcejson': opts.dumpjson or opts.print_json,
'dump_single_json': opts.dump_single_json,
'force_write_download_archive': opts.force_write_download_archive,
- 'simulate': (print_only or any_getting or None) if opts.simulate is None else opts.simulate,
+ 'simulate': (any_getting or None) if opts.simulate is None else opts.simulate,
'skip_download': opts.skip_download,
'format': opts.format,
'allow_unplayable_formats': opts.allow_unplayable_formats,
@@ -778,7 +736,6 @@ def parse_options(argv=None):
'windowsfilenames': opts.windowsfilenames,
'ignoreerrors': opts.ignoreerrors,
'force_generic_extractor': opts.force_generic_extractor,
- 'allowed_extractors': opts.allowed_extractors or ['default'],
'ratelimit': opts.ratelimit,
'throttledratelimit': opts.throttledratelimit,
'overwrites': opts.overwrites,
@@ -903,10 +860,17 @@ def parse_options(argv=None):
'_warnings': warnings,
'_deprecation_warnings': deprecation_warnings,
'compat_opts': opts.compat_opts,
- })
+ }
def _real_main(argv=None):
+ # Compatibility fixes for Windows
+ if sys.platform == 'win32':
+ # https://github.com/ytdl-org/youtube-dl/issues/820
+ codecs.register(lambda name: codecs.lookup('utf-8') if name == 'cp65001' else None)
+
+ workaround_optparse_bug9161()
+
setproctitle('yt-dlp')
parser, opts, all_urls, ydl_opts = parse_options(argv)
@@ -920,11 +884,6 @@ def _real_main(argv=None):
if print_extractor_information(opts, all_urls):
return
- # We may need ffmpeg_location without having access to the YoutubeDL instance
- # See https://github.com/yt-dlp/yt-dlp/issues/2191
- if opts.ffmpeg_location:
- FFmpegPostProcessor._ffmpeg_location.set(opts.ffmpeg_location)
-
with YoutubeDL(ydl_opts) as ydl:
pre_process = opts.update_self or opts.rm_cachedir
actual_use = all_urls or opts.load_info_filename
@@ -962,8 +921,6 @@ def _real_main(argv=None):
def main(argv=None):
- global _IN_CLI
- _IN_CLI = True
try:
_exit(*variadic(_real_main(argv)))
except DownloadError:
diff --git a/yt_dlp/downloader/hls.py b/yt_dlp/downloader/hls.py
index 2010f3dc9..4520edcd1 100644
--- a/yt_dlp/downloader/hls.py
+++ b/yt_dlp/downloader/hls.py
@@ -65,9 +65,9 @@ class HlsFD(FragmentFD):
has_ffmpeg = FFmpegFD.available()
no_crypto = not Cryptodome_AES and '#EXT-X-KEY:METHOD=AES-128' in s
if no_crypto and has_ffmpeg:
- can_download, message = False, 'The stream has AES-128 encryption and pycryptodomex is not available'
+ can_download, message = False, 'The stream has AES-128 encryption and pycryptodome is not available'
elif no_crypto:
- message = ('The stream has AES-128 encryption and neither ffmpeg nor pycryptodomex are available; '
+ message = ('The stream has AES-128 encryption and neither ffmpeg nor pycryptodome are available; '
'Decryption will be performed natively, but will be extremely slow')
elif info_dict.get('extractor_key') == 'Generic' and re.search(r'(?m)#EXT-X-MEDIA-SEQUENCE:(?!0$)', s):
install_ffmpeg = '' if has_ffmpeg else 'install ffmpeg and '
diff --git a/yt_dlp/extractor/iqiyi.py b/yt_dlp/extractor/iqiyi.py
index dbc688fb9..c41f6db40 100644
--- a/yt_dlp/extractor/iqiyi.py
+++ b/yt_dlp/extractor/iqiyi.py
@@ -6,13 +6,11 @@ import time
from .common import InfoExtractor
from ..compat import (
compat_str,
- compat_urllib_parse_urlencode,
compat_urllib_parse_unquote
)
from .openload import PhantomJSwrapper
from ..utils import (
clean_html,
- decode_packed_codes,
ExtractorError,
float_or_none,
format_field,
@@ -37,135 +35,6 @@ def md5_text(text):
return hashlib.md5(text.encode('utf-8')).hexdigest()
-class IqiyiSDK:
- def __init__(self, target, ip, timestamp):
- self.target = target
- self.ip = ip
- self.timestamp = timestamp
-
- @staticmethod
- def split_sum(data):
- return compat_str(sum(map(lambda p: int(p, 16), list(data))))
-
- @staticmethod
- def digit_sum(num):
- if isinstance(num, int):
- num = compat_str(num)
- return compat_str(sum(map(int, num)))
-
- def even_odd(self):
- even = self.digit_sum(compat_str(self.timestamp)[::2])
- odd = self.digit_sum(compat_str(self.timestamp)[1::2])
- return even, odd
-
- def preprocess(self, chunksize):
- self.target = md5_text(self.target)
- chunks = []
- for i in range(32 // chunksize):
- chunks.append(self.target[chunksize * i:chunksize * (i + 1)])
- if 32 % chunksize:
- chunks.append(self.target[32 - 32 % chunksize:])
- return chunks, list(map(int, self.ip.split('.')))
-
- def mod(self, modulus):
- chunks, ip = self.preprocess(32)
- self.target = chunks[0] + ''.join(map(lambda p: compat_str(p % modulus), ip))
-
- def split(self, chunksize):
- modulus_map = {
- 4: 256,
- 5: 10,
- 8: 100,
- }
-
- chunks, ip = self.preprocess(chunksize)
- ret = ''
- for i in range(len(chunks)):
- ip_part = compat_str(ip[i] % modulus_map[chunksize]) if i < 4 else ''
- if chunksize == 8:
- ret += ip_part + chunks[i]
- else:
- ret += chunks[i] + ip_part
- self.target = ret
-
- def handle_input16(self):
- self.target = md5_text(self.target)
- self.target = self.split_sum(self.target[:16]) + self.target + self.split_sum(self.target[16:])
-
- def handle_input8(self):
- self.target = md5_text(self.target)
- ret = ''
- for i in range(4):
- part = self.target[8 * i:8 * (i + 1)]
- ret += self.split_sum(part) + part
- self.target = ret
-
- def handleSum(self):
- self.target = md5_text(self.target)
- self.target = self.split_sum(self.target) + self.target
-
- def date(self, scheme):
- self.target = md5_text(self.target)
- d = time.localtime(self.timestamp)
- strings = {
- 'y': compat_str(d.tm_year),
- 'm': '%02d' % d.tm_mon,
- 'd': '%02d' % d.tm_mday,
- }
- self.target += ''.join(map(lambda c: strings[c], list(scheme)))
-
- def split_time_even_odd(self):
- even, odd = self.even_odd()
- self.target = odd + md5_text(self.target) + even
-
- def split_time_odd_even(self):
- even, odd = self.even_odd()
- self.target = even + md5_text(self.target) + odd
-
- def split_ip_time_sum(self):
- chunks, ip = self.preprocess(32)
- self.target = compat_str(sum(ip)) + chunks[0] + self.digit_sum(self.timestamp)
-
- def split_time_ip_sum(self):
- chunks, ip = self.preprocess(32)
- self.target = self.digit_sum(self.timestamp) + chunks[0] + compat_str(sum(ip))
-
-
-class IqiyiSDKInterpreter:
- def __init__(self, sdk_code):
- self.sdk_code = sdk_code
-
- def run(self, target, ip, timestamp):
- self.sdk_code = decode_packed_codes(self.sdk_code)
-
- functions = re.findall(r'input=([a-zA-Z0-9]+)\(input', self.sdk_code)
-
- sdk = IqiyiSDK(target, ip, timestamp)
-
- other_functions = {
- 'handleSum': sdk.handleSum,
- 'handleInput8': sdk.handle_input8,
- 'handleInput16': sdk.handle_input16,
- 'splitTimeEvenOdd': sdk.split_time_even_odd,
- 'splitTimeOddEven': sdk.split_time_odd_even,
- 'splitIpTimeSum': sdk.split_ip_time_sum,
- 'splitTimeIpSum': sdk.split_time_ip_sum,
- }
- for function in functions:
- if re.match(r'mod\d+', function):
- sdk.mod(int(function[3:]))
- elif re.match(r'date[ymd]{3}', function):
- sdk.date(function[4:])
- elif re.match(r'split\d+', function):
- sdk.split(int(function[5:]))
- elif function in other_functions:
- other_functions[function]()
- else:
- raise ExtractorError('Unknown function %s' % function)
-
- return sdk.target
-
-
class IqiyiIE(InfoExtractor):
IE_NAME = 'iqiyi'
IE_DESC = '爱奇艺'
@@ -246,47 +115,8 @@ class IqiyiIE(InfoExtractor):
return ohdave_rsa_encrypt(data, e, N)
- def _perform_login(self, username, password):
-
- data = self._download_json(
- 'http://kylin.iqiyi.com/get_token', None,
- note='Get token for logging', errnote='Unable to get token for logging')
- sdk = data['sdk']
- timestamp = int(time.time())
- target = '/apis/reglogin/login.action?lang=zh_TW&area_code=null&email=%s&passwd=%s&agenttype=1&from=undefined&keeplogin=0&piccode=&fromurl=&_pos=1' % (
- username, self._rsa_fun(password.encode('utf-8')))
-
- interp = IqiyiSDKInterpreter(sdk)
- sign = interp.run(target, data['ip'], timestamp)
-
- validation_params = {
- 'target': target,
- 'server': 'BEA3AA1908656AABCCFF76582C4C6660',
- 'token': data['token'],
- 'bird_src': 'f8d91d57af224da7893dd397d52d811a',
- 'sign': sign,
- 'bird_t': timestamp,
- }
- validation_result = self._download_json(
- 'http://kylin.iqiyi.com/validate?' + compat_urllib_parse_urlencode(validation_params), None,
- note='Validate credentials', errnote='Unable to validate credentials')
-
- MSG_MAP = {
- 'P00107': 'please login via the web interface and enter the CAPTCHA code',
- 'P00117': 'bad username or password',
- }
-
- code = validation_result['code']
- if code != 'A00000':
- msg = MSG_MAP.get(code)
- if not msg:
- msg = 'error %s' % code
- if validation_result.get('msg'):
- msg += ': ' + validation_result['msg']
- self.report_warning('unable to log in: ' + msg)
- return False
-
- return True
+ def _perform_login(self):
+ raise ExtractorError("iQiyi's non-free authentication algorithm has made login impossible", expected=True)
def get_raw_data(self, tvid, video_id):
tm = int(time.time() * 1000)
diff --git a/yt_dlp/extractor/ivi.py b/yt_dlp/extractor/ivi.py
index dc6a48196..27a222a47 100644
--- a/yt_dlp/extractor/ivi.py
+++ b/yt_dlp/extractor/ivi.py
@@ -139,7 +139,7 @@ class IviIE(InfoExtractor):
elif site == 353:
continue
elif not pycryptodome_found:
- raise ExtractorError('pycryptodomex not found. Please install', expected=True)
+ raise ExtractorError('pycryptodome not found. Please install', expected=True)
elif message:
extractor_msg += ': ' + message
raise ExtractorError(extractor_msg % video_id, expected=True)
diff --git a/yt_dlp/options.py b/yt_dlp/options.py
index bee867aa9..6138167e4 100644
--- a/yt_dlp/options.py
+++ b/yt_dlp/options.py
@@ -20,7 +20,6 @@ from .postprocessor import (
SponsorBlockPP,
)
from .postprocessor.modify_chapters import DEFAULT_SPONSORBLOCK_CHAPTER_TITLE
-from .update import detect_variant, is_non_updateable
from .utils import (
OUTTMPL_TYPES,
POSTPROCESS_WHEN,
@@ -322,16 +321,6 @@ def create_parser():
action='version',
help='Print program version and exit')
general.add_option(
- '-U', '--update',
- action='store_true', dest='update_self',
- help=format_field(
- is_non_updateable(), None, 'Check if updates are available. %s',
- default='Update this program to the latest version'))
- general.add_option(
- '--no-update',
- action='store_false', dest='update_self',
- help='Do not check for updates (default)')
- general.add_option(
'-i', '--ignore-errors',
action='store_true', dest='ignoreerrors',
help='Ignore download and postprocessing errors. The download will be considered successful even if the postprocessing fails')
diff --git a/yt_dlp/update.py b/yt_dlp/update.py
deleted file mode 100644
index ac3e28057..000000000
--- a/yt_dlp/update.py
+++ /dev/null
@@ -1,347 +0,0 @@
-import atexit
-import contextlib
-import hashlib
-import json
-import os
-import platform
-import re
-import subprocess
-import sys
-from zipimport import zipimporter
-
-from .compat import functools # isort: split
-from .compat import compat_realpath, compat_shlex_quote
-from .utils import (
- Popen,
- cached_method,
- deprecation_warning,
- remove_end,
- shell_quote,
- system_identifier,
- traverse_obj,
- version_tuple,
-)
-from .version import UPDATE_HINT, VARIANT, __version__
-
-REPOSITORY = 'yt-dlp/yt-dlp'
-API_URL = f'https://api.github.com/repos/{REPOSITORY}/releases'
-
-
-@functools.cache
-def _get_variant_and_executable_path():
- """@returns (variant, executable_path)"""
- if getattr(sys, 'frozen', False):
- path = sys.executable
- if not hasattr(sys, '_MEIPASS'):
- return 'py2exe', path
- elif sys._MEIPASS == os.path.dirname(path):
- return f'{sys.platform}_dir', path
- elif sys.platform == 'darwin':
- machine = '_legacy' if version_tuple(platform.mac_ver()[0]) < (10, 15) else ''
- else:
- machine = f'_{platform.machine().lower()}'
- # Ref: https://en.wikipedia.org/wiki/Uname#Examples
- if machine[1:] in ('x86', 'x86_64', 'amd64', 'i386', 'i686'):
- machine = '_x86' if platform.architecture()[0][:2] == '32' else ''
- return f'{remove_end(sys.platform, "32")}{machine}_exe', path
-
- path = os.path.dirname(__file__)
- if isinstance(__loader__, zipimporter):
- return 'zip', os.path.join(path, '..')
- elif (os.path.basename(sys.argv[0]) in ('__main__.py', '-m')
- and os.path.exists(os.path.join(path, '../.git/HEAD'))):
- return 'source', path
- return 'unknown', path
-
-
-def detect_variant():
- return VARIANT or _get_variant_and_executable_path()[0]
-
-
-@functools.cache
-def current_git_head():
- if detect_variant() != 'source':
- return
- with contextlib.suppress(Exception):
- stdout, _, _ = Popen.run(
- ['git', 'rev-parse', '--short', 'HEAD'],
- text=True, cwd=os.path.dirname(os.path.abspath(__file__)),
- stdout=subprocess.PIPE, stderr=subprocess.PIPE)
- if re.fullmatch('[0-9a-f]+', stdout.strip()):
- return stdout.strip()
-
-
-_FILE_SUFFIXES = {
- 'zip': '',
- 'py2exe': '_min.exe',
- 'win_exe': '.exe',
- 'win_x86_exe': '_x86.exe',
- 'darwin_exe': '_macos',
- 'darwin_legacy_exe': '_macos_legacy',
- 'linux_exe': '_linux',
- 'linux_aarch64_exe': '_linux_aarch64',
- 'linux_armv7l_exe': '_linux_armv7l',
-}
-
-_NON_UPDATEABLE_REASONS = {
- **{variant: None for variant in _FILE_SUFFIXES}, # Updatable
- **{variant: f'Auto-update is not supported for unpackaged {name} executable; Re-download the latest release'
- for variant, name in {'win32_dir': 'Windows', 'darwin_dir': 'MacOS', 'linux_dir': 'Linux'}.items()},
- 'source': 'You cannot update when running from source code; Use git to pull the latest changes',
- 'unknown': 'You installed yt-dlp with a package manager or setup.py; Use that to update',
- 'other': 'You are using an unofficial build of yt-dlp; Build the executable again',
-}
-
-
-def is_non_updateable():
- if UPDATE_HINT:
- return UPDATE_HINT
- return _NON_UPDATEABLE_REASONS.get(
- detect_variant(), _NON_UPDATEABLE_REASONS['unknown' if VARIANT else 'other'])
-
-
-def _sha256_file(path):
- h = hashlib.sha256()
- mv = memoryview(bytearray(128 * 1024))
- with open(os.path.realpath(path), 'rb', buffering=0) as f:
- for n in iter(lambda: f.readinto(mv), 0):
- h.update(mv[:n])
- return h.hexdigest()
-
-
-class Updater:
- def __init__(self, ydl):
- self.ydl = ydl
-
- @functools.cached_property
- def _tag(self):
- if version_tuple(__version__) >= version_tuple(self.latest_version):
- return 'latest'
-
- identifier = f'{detect_variant()} {system_identifier()}'
- for line in self._download('_update_spec', 'latest').decode().splitlines():
- if not line.startswith('lock '):
- continue
- _, tag, pattern = line.split(' ', 2)
- if re.match(pattern, identifier):
- return f'tags/{tag}'
- return 'latest'
-
- @cached_method
- def _get_version_info(self, tag):
- self.ydl.write_debug(f'Fetching release info: {API_URL}/{tag}')
- return json.loads(self.ydl.urlopen(f'{API_URL}/{tag}').read().decode())
-
- @property
- def current_version(self):
- """Current version"""
- return __version__
-
- @property
- def new_version(self):
- """Version of the latest release we can update to"""
- if self._tag.startswith('tags/'):
- return self._tag[5:]
- return self._get_version_info(self._tag)['tag_name']
-
- @property
- def latest_version(self):
- """Version of the latest release"""
- return self._get_version_info('latest')['tag_name']
-
- @property
- def has_update(self):
- """Whether there is an update available"""
- return version_tuple(__version__) < version_tuple(self.new_version)
-
- @functools.cached_property
- def filename(self):
- """Filename of the executable"""
- return compat_realpath(_get_variant_and_executable_path()[1])
-
- def _download(self, name, tag):
- url = traverse_obj(self._get_version_info(tag), (
- 'assets', lambda _, v: v['name'] == name, 'browser_download_url'), get_all=False)
- if not url:
- raise Exception('Unable to find download URL')
- self.ydl.write_debug(f'Downloading {name} from {url}')
- return self.ydl.urlopen(url).read()
-
- @functools.cached_property
- def release_name(self):
- """The release filename"""
- return f'yt-dlp{_FILE_SUFFIXES[detect_variant()]}'
-
- @functools.cached_property
- def release_hash(self):
- """Hash of the latest release"""
- hash_data = dict(ln.split()[::-1] for ln in self._download('SHA2-256SUMS', self._tag).decode().splitlines())
- return hash_data[self.release_name]
-
- def _report_error(self, msg, expected=False):
- self.ydl.report_error(msg, tb=False if expected else None)
- self.ydl._download_retcode = 100
-
- def _report_permission_error(self, file):
- self._report_error(f'Unable to write to {file}; Try running as administrator', True)
-
- def _report_network_error(self, action, delim=';'):
- self._report_error(f'Unable to {action}{delim} Visit https://github.com/{REPOSITORY}/releases/latest', True)
-
- def check_update(self):
- """Report whether there is an update available"""
- try:
- self.ydl.to_screen(
- f'Latest version: {self.latest_version}, Current version: {self.current_version}')
- if not self.has_update:
- if self._tag == 'latest':
- return self.ydl.to_screen(f'yt-dlp is up to date ({__version__})')
- return self.ydl.report_warning(
- 'yt-dlp cannot be updated any further since you are on an older Python version')
- except Exception:
- return self._report_network_error('obtain version info', delim='; Please try again later or')
-
- if not is_non_updateable():
- self.ydl.to_screen(f'Current Build Hash {_sha256_file(self.filename)}')
- return True
-
- def update(self):
- """Update yt-dlp executable to the latest version"""
- if not self.check_update():
- return
- err = is_non_updateable()
- if err:
- return self._report_error(err, True)
- self.ydl.to_screen(f'Updating to version {self.new_version} ...')
-
- directory = os.path.dirname(self.filename)
- if not os.access(self.filename, os.W_OK):
- return self._report_permission_error(self.filename)
- elif not os.access(directory, os.W_OK):
- return self._report_permission_error(directory)
-
- new_filename, old_filename = f'{self.filename}.new', f'{self.filename}.old'
- if detect_variant() == 'zip': # Can be replaced in-place
- new_filename, old_filename = self.filename, None
-
- try:
- if os.path.exists(old_filename or ''):
- os.remove(old_filename)
- except OSError:
- return self._report_error('Unable to remove the old version')
-
- try:
- newcontent = self._download(self.release_name, self._tag)
- except OSError:
- return self._report_network_error('download latest version')
- except Exception:
- return self._report_network_error('fetch updates')
-
- try:
- expected_hash = self.release_hash
- except Exception:
- self.ydl.report_warning('no hash information found for the release')
- else:
- if hashlib.sha256(newcontent).hexdigest() != expected_hash:
- return self._report_network_error('verify the new executable')
-
- try:
- with open(new_filename, 'wb') as outf:
- outf.write(newcontent)
- except OSError:
- return self._report_permission_error(new_filename)
-
- if old_filename:
- mask = os.stat(self.filename).st_mode
- try:
- os.rename(self.filename, old_filename)
- except OSError:
- return self._report_error('Unable to move current version')
-
- try:
- os.rename(new_filename, self.filename)
- except OSError:
- self._report_error('Unable to overwrite current version')
- return os.rename(old_filename, self.filename)
-
- if detect_variant() in ('win32_exe', 'py2exe'):
- atexit.register(Popen, f'ping 127.0.0.1 -n 5 -w 1000 & del /F "{old_filename}"',
- shell=True, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
- elif old_filename:
- try:
- os.remove(old_filename)
- except OSError:
- self._report_error('Unable to remove the old version')
-
- try:
- os.chmod(self.filename, mask)
- except OSError:
- return self._report_error(
- f'Unable to set permissions. Run: sudo chmod a+rx {compat_shlex_quote(self.filename)}')
-
- self.ydl.to_screen(f'Updated yt-dlp to version {self.new_version}')
- return True
-
- @functools.cached_property
- def cmd(self):
- """The command-line to run the executable, if known"""
- # There is no sys.orig_argv in py < 3.10. Also, it can be [] when frozen
- if getattr(sys, 'orig_argv', None):
- return sys.orig_argv
- elif getattr(sys, 'frozen', False):
- return sys.argv
-
- def restart(self):
- """Restart the executable"""
- assert self.cmd, 'Must be frozen or Py >= 3.10'
- self.ydl.write_debug(f'Restarting: {shell_quote(self.cmd)}')
- _, _, returncode = Popen.run(self.cmd)
- return returncode
-
-
-def run_update(ydl):
- """Update the program file with the latest version from the repository
- @returns Whether there was a successful update (No update = False)
- """
- return Updater(ydl).update()
-
-
-# Deprecated
-def update_self(to_screen, verbose, opener):
- import traceback
-
- deprecation_warning(f'"{__name__}.update_self" is deprecated and may be removed '
- f'in a future version. Use "{__name__}.run_update(ydl)" instead')
-
- printfn = to_screen
-
- class FakeYDL():
- to_screen = printfn
-
- def report_warning(self, msg, *args, **kwargs):
- return printfn(f'WARNING: {msg}', *args, **kwargs)
-
- def report_error(self, msg, tb=None):
- printfn(f'ERROR: {msg}')
- if not verbose:
- return
- if tb is None:
- # Copied from YoutubeDL.trouble
- if sys.exc_info()[0]:
- tb = ''
- if hasattr(sys.exc_info()[1], 'exc_info') and sys.exc_info()[1].exc_info[0]:
- tb += ''.join(traceback.format_exception(*sys.exc_info()[1].exc_info))
- tb += traceback.format_exc()
- else:
- tb_data = traceback.format_list(traceback.extract_stack())
- tb = ''.join(tb_data)
- if tb:
- printfn(tb)
-
- def write_debug(self, msg, *args, **kwargs):
- printfn(f'[debug] {msg}', *args, **kwargs)
-
- def urlopen(self, url):
- return opener.open(url)
-
- return run_update(FakeYDL())
diff --git a/yt_dlp/utils.py b/yt_dlp/utils.py
index ed1b24335..51d74363b 100644
--- a/yt_dlp/utils.py
+++ b/yt_dlp/utils.py
@@ -1046,10 +1046,9 @@ def make_HTTPS_handler(params, **kwargs):
def bug_reports_message(before=';'):
- from .update import REPOSITORY
-
- msg = (f'please report this issue on https://github.com/{REPOSITORY}/issues?q= , '
- 'filling out the appropriate issue template. Confirm you are on the latest version using yt-dlp -U')
+ msg = ('please report this issue on https://issues.hyperbola.info/ , '
+ 'filling out the appropriate issue template. '
+ 'Confirm you are on the latest version using pacman -Su')
before = before.rstrip()
if not before or before.endswith(('.', '!', '?')):