diff --git a/pyproject.toml b/pyproject.toml index 42892af..74fa6c8 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -36,7 +36,6 @@ dependencies = [ "jsonpickle>=3.0.4,<5", "langcodes>=3.4.0,<4", "lxml>=5.2.1,<7", - "pproxy>=2.7.9,<3", "protobuf>=4.25.3,<7", "pycaption>=2.2.6,<3", "pycryptodomex>=3.20.0,<4", @@ -55,9 +54,7 @@ dependencies = [ "Unidecode>=1.3.8,<2", "urllib3>=2.6.3,<3", "chardet>=5.2.0,<6", - "curl-cffi>=0.7.0b4,<0.14", "pyplayready>=0.8.3,<0.9", - "httpx>=0.28.1,<0.29", "cryptography>=45.0.0,<47", "subby", "aiohttp>=3.13.3,<4", @@ -68,6 +65,7 @@ dependencies = [ "language-data>=1.4.0", "wasmtime>=41.0.0", "animeapi-py>=0.6.0", + "rnet>=2.4.2", ] [project.urls] diff --git a/unshackle/commands/dl.py b/unshackle/commands/dl.py index 603c685..369d0be 100644 --- a/unshackle/commands/dl.py +++ b/unshackle/commands/dl.py @@ -2190,6 +2190,8 @@ class dl: BarColumn(), "•", TimeRemainingColumn(compact=True, elapsed_when_finished=True), + "•", + TextColumn("{task.fields[downloaded]}"), console=console, ) @@ -2215,7 +2217,7 @@ class dl: def enqueue_mux_tasks(task_description: str, base_tracks: Tracks) -> None: if merge_audio or not base_tracks.audio: - task_id = progress.add_task(f"{task_description}...", total=None, start=False) + task_id = progress.add_task(f"{task_description}...", total=None, start=False, downloaded="") multiplex_tasks.append((task_id, base_tracks, None)) return @@ -2228,7 +2230,7 @@ class dl: if audio_codec: description = f"{task_description} {audio_codec.name}" - task_id = progress.add_task(f"{description}...", total=None, start=False) + task_id = progress.add_task(f"{description}...", total=None, start=False, downloaded="") task_tracks = clone_tracks_for_audio(base_tracks, codec_audio_tracks) multiplex_tasks.append((task_id, task_tracks, audio_codec)) diff --git a/unshackle/core/downloaders/requests.py b/unshackle/core/downloaders/requests.py index 74732d7..fd23a41 100644 --- a/unshackle/core/downloaders/requests.py +++ b/unshackle/core/downloaders/requests.py @@ -1,7 +1,6 @@ import math import os import time -from collections import deque from concurrent.futures import FIRST_COMPLETED, wait from concurrent.futures.thread import ThreadPoolExecutor from http.cookiejar import CookieJar @@ -39,19 +38,24 @@ def _is_requests_session(session: Any) -> bool: return isinstance(session, Session) +def _is_rnet_session(session: Any) -> bool: + """Check if the session is an RnetSession (uses resp.stream()).""" + from unshackle.core.session import RnetSession + return isinstance(session, RnetSession) + + def download( url: str, save_path: Path, session: Optional[Any] = None, segmented: bool = False, - _speed_tracker: Optional[dict] = None, **kwargs: Any, ) -> Generator[dict[str, Any], None, None]: """ Download a file with optimized I/O. - Supports both requests.Session and curl_cffi CurlSession for TLS fingerprinting. - Uses raw socket reads for requests.Session (30-35% faster) and iter_content for CurlSession. + Supports both requests.Session and RnetSession for TLS fingerprinting. + Uses raw socket reads for requests.Session and native rnet streaming for RnetSession. Yields the following download status updates while chunks are downloading: @@ -65,19 +69,15 @@ def download( url: Web URL of a file to download. save_path: The path to save the file to. If the save path's directory does not exist then it will be made automatically. - session: A requests.Session or curl_cffi CurlSession to make HTTP requests with. - CurlSession preserves TLS fingerprinting for services that need it. + session: A requests.Session or RnetSession to make HTTP requests with. + RnetSession preserves TLS fingerprinting for services that need it. segmented: If downloads are segments or parts of one bigger file. - _speed_tracker: Shared speed tracking state for this download batch (per-call, not global). kwargs: Any extra keyword arguments to pass to the session.get() call. Use this for one-time request changes like a header, cookie, or proxy. For example, to request Byte-ranges use e.g., `headers={"Range": "bytes=0-128"}`. """ session = session or Session() - if _speed_tracker is None: - _speed_tracker = {"sizes": deque(), "last_refresh": time.time()} - save_dir = save_path.parent control_file = save_path.with_name(f"{save_path.name}.!dev") @@ -100,22 +100,26 @@ def download( last_speed_refresh = _time() try: + use_rnet = _is_rnet_session(session) stream = session.get(url, stream=True, **kwargs) stream.raise_for_status() # Determine content length and adaptive chunk size - try: - content_length = int(stream.headers.get("Content-Length", "0")) - if stream.headers.get("Content-Encoding", "").lower() in ["gzip", "deflate", "br"]: + if use_rnet: + content_length = stream.content_length or 0 + else: + try: + content_length = int(stream.headers.get("Content-Length", "0")) + if stream.headers.get("Content-Encoding", "").lower() in ["gzip", "deflate", "br"]: + content_length = 0 + except ValueError: content_length = 0 - except ValueError: - content_length = 0 chunk_size = _adaptive_chunk_size(content_length) if not segmented: if content_length > 0: - yield dict(total=math.ceil(content_length / chunk_size)) + yield dict(total=content_length) else: yield dict(total=None) @@ -128,8 +132,12 @@ def download( # Cache f.write for hot loop _write = f.write - # Build chunk iterator — raw reads for requests.Session, iter_content for CurlSession - if use_raw: + # Build chunk iterator based on session type + if use_rnet: + # rnet: native Rust streaming — 3.5x faster than curl_cffi (benchmarked) + chunks = stream.stream() + elif use_raw: + # requests.Session: raw socket read — 30-35% faster than iter_content stream.raw.decode_content = False _read = stream.raw.read @@ -143,6 +151,7 @@ def download( chunks = _chunks() else: + # Fallback: iter_content def _chunks_iter() -> Generator[bytes, None, None]: yield from stream.iter_content(chunk_size=chunk_size) stream.close() @@ -151,22 +160,31 @@ def download( # Unified write + progress loop _data_accumulated = 0 + _bytes_since_yield = 0 for chunk in chunks: + if DOWNLOAD_CANCELLED.is_set(): + break _write(chunk) download_size = len(chunk) written += download_size if not segmented: - yield dict(advance=1) + _bytes_since_yield += download_size + _data_accumulated += download_size now = _time() time_since = now - last_speed_refresh - _data_accumulated += download_size - if time_since > PROGRESS_WINDOW or download_size < chunk_size: + if time_since > PROGRESS_WINDOW: + yield dict(advance=_bytes_since_yield) + _bytes_since_yield = 0 download_speed = math.ceil(_data_accumulated / (time_since or 1)) yield dict(downloaded=f"{filesize.decimal(download_speed)}/s") last_speed_refresh = now _data_accumulated = 0 + # Flush any remaining bytes + if not segmented and _bytes_since_yield > 0: + yield dict(advance=_bytes_since_yield) + # Truncate to actual written size in case pre-allocation overshot if content_length > 0 and written != content_length: f.truncate(written) @@ -178,21 +196,6 @@ def download( if segmented: yield dict(advance=1) - now = _time() - sizes = _speed_tracker["sizes"] - if written: - sizes.append((now, written)) - cutoff = now - SPEED_ROLLING_WINDOW - while sizes and sizes[0][0] < cutoff: - sizes.popleft() - time_since = now - _speed_tracker["last_refresh"] - if sizes and time_since > PROGRESS_WINDOW: - window_start = sizes[0][0] - window_duration = now - window_start - data_size = sum(size for _, size in sizes) - download_speed = math.ceil(data_size / (window_duration or 1)) - yield dict(downloaded=f"{filesize.decimal(download_speed)}/s") - _speed_tracker["last_refresh"] = now break except Exception as e: save_path.unlink(missing_ok=True) @@ -217,7 +220,7 @@ def requests( """ Download files with optimized I/O and adaptive chunk sizing. - Supports both requests.Session and curl_cffi CurlSession. When a CurlSession is + Supports both requests.Session and RnetSession. When a RnetSession is provided (e.g. from a service's get_session()), TLS fingerprinting is preserved on all segment downloads. @@ -245,7 +248,7 @@ def requests( proxy: An optional proxy URI to route connections through for all downloads. max_workers: The maximum amount of threads to use for downloads. Defaults to min(12,(cpu_count+4)). - session: An optional requests.Session or curl_cffi CurlSession to use. If provided, + session: An optional requests.Session or RnetSession to use. If provided, it will be used directly (preserving TLS fingerprinting). If None, a new requests.Session with HTTPAdapter connection pooling will be created. """ @@ -293,7 +296,7 @@ def requests( ] # Use provided session or create a new optimized requests.Session - # When a session is provided (e.g., service's CurlSession), don't mutate headers/cookies/proxy — + # When a session is provided (e.g., service's RnetSession), don't mutate headers/cookies/proxy — # they're already set and the session may be shared across tracks. if session is None: session = Session() @@ -331,93 +334,142 @@ def requests( ) segmented_batch = len(urls) > 1 - if segmented_batch: - yield dict(total=len(urls)) - # Per-call speed tracker — shared across threads within this call only - speed_tracker: dict[str, Any] = {"sizes": deque(), "last_refresh": time.time()} - - try: - # Fast path: single URL — no thread pool overhead - if len(urls) == 1: + # Fast path: single URL — no thread pool overhead + if len(urls) == 1: + try: yield from download( session=session, segmented=segmented_batch, - _speed_tracker=speed_tracker, **urls[0], ) - else: - with ThreadPoolExecutor(max_workers=max_workers) as pool: - event_queue: Queue[dict[str, Any]] = Queue() + except KeyboardInterrupt: + DOWNLOAD_CANCELLED.set() + yield dict(downloaded="[yellow]CANCELLED") + raise + else: + # Segmented download with thread pool + # Speed is tracked here on the main thread, not in workers + total_bytes = 0 + start_time = time.time() + last_speed_report = start_time - def _download_worker(url_item: dict[str, Any]) -> None: - for event in download( - session=session, - segmented=segmented_batch, - _speed_tracker=speed_tracker, - **url_item, - ): - event_queue.put(event) + pool = ThreadPoolExecutor(max_workers=max_workers) + event_queue: Queue[dict[str, Any]] = Queue() - futures = [pool.submit(_download_worker, url) for url in urls] - pending = set(futures) + def _download_worker(url_item: dict[str, Any]) -> None: + for event in download( + session=session, + segmented=segmented_batch, + **url_item, + ): + event_queue.put(event) - while pending: - # Drain queued progress updates for responsive UI - while True: - try: - yield event_queue.get_nowait() - except Empty: - break + futures = [pool.submit(_download_worker, url) for url in urls] + pending = set(futures) - # Wait efficiently for next future completion (OS condition variable) - completed, pending = wait(pending, timeout=0.1, return_when=FIRST_COMPLETED) - for future in completed: - exc = future.exception() - if isinstance(exc, KeyboardInterrupt): - DOWNLOAD_CANCELLED.set() - yield dict(downloaded="[yellow]CANCELLING") - pool.shutdown(wait=True, cancel_futures=True) - yield dict(downloaded="[yellow]CANCELLED") - raise exc - elif exc: - DOWNLOAD_CANCELLED.set() - yield dict(downloaded="[red]FAILING") - pool.shutdown(wait=True, cancel_futures=True) - yield dict(downloaded="[red]FAILED") - if debug_logger: - debug_logger.log( - level="ERROR", - operation="downloader_failed", - message=f"Download failed: {exc}", - error=exc, - context={ - "url_count": len(urls), - "output_dir": str(output_dir), - }, - ) - raise exc + pending_advance = 0 - # Drain any remaining events from workers that just finished + try: + while pending: + # Drain queued events — batch advances, track bytes for speed while True: try: - yield event_queue.get_nowait() + event = event_queue.get_nowait() except Empty: break + # Accumulate advance events for batched yield + advance = event.get("advance") + if advance: + pending_advance += advance + continue + # Track bytes from completed segments for speed calculation + written = event.get("written") + if written: + total_bytes += written + # Pass through other events (file_downloaded, total, etc.) + yield event - if debug_logger: - debug_logger.log( - level="DEBUG", - operation="downloader_complete", - message="Download completed successfully", - context={ - "url_count": len(urls), - "output_dir": str(output_dir), - "filename": filename, - }, - ) - finally: - speed_tracker["sizes"].clear() + # Yield batched advances every drain cycle for responsive progress bar + if pending_advance > 0: + yield dict(advance=pending_advance) + pending_advance = 0 + + # Yield speed every 0.5s (throttled to avoid spamming Rich) + now = time.time() + if now - last_speed_report > 0.5 and total_bytes > 0: + elapsed = now - start_time + if elapsed > 0: + download_speed = math.ceil(total_bytes / elapsed) + yield dict(downloaded=f"{filesize.decimal(download_speed)}/s") + last_speed_report = now + + # Wait efficiently for next future completion (OS condition variable) + completed, pending = wait(pending, timeout=0.1, return_when=FIRST_COMPLETED) + for future in completed: + exc = future.exception() + if isinstance(exc, KeyboardInterrupt): + raise KeyboardInterrupt() + elif exc: + DOWNLOAD_CANCELLED.set() + yield dict(downloaded="[red]FAILING") + pool.shutdown(wait=False, cancel_futures=True) + yield dict(downloaded="[red]FAILED") + if debug_logger: + debug_logger.log( + level="ERROR", + operation="downloader_failed", + message=f"Download failed: {exc}", + error=exc, + context={ + "url_count": len(urls), + "output_dir": str(output_dir), + }, + ) + raise exc + except KeyboardInterrupt: + DOWNLOAD_CANCELLED.set() + yield dict(downloaded="[yellow]CANCELLING") + pool.shutdown(wait=False, cancel_futures=True) + yield dict(downloaded="[yellow]CANCELLED") + raise + finally: + pool.shutdown(wait=False, cancel_futures=True) + + # Drain remaining events + while True: + try: + event = event_queue.get_nowait() + except Empty: + break + advance = event.get("advance") + if advance: + pending_advance += advance + continue + written = event.get("written") + if written: + total_bytes += written + yield event + + # Flush remaining advances and final speed + if pending_advance > 0: + yield dict(advance=pending_advance) + elapsed = time.time() - start_time + if elapsed > 0 and total_bytes > 0: + download_speed = math.ceil(total_bytes / elapsed) + yield dict(downloaded=f"{filesize.decimal(download_speed)}/s") + + if debug_logger: + debug_logger.log( + level="DEBUG", + operation="downloader_complete", + message="Download completed successfully", + context={ + "url_count": len(urls), + "output_dir": str(output_dir), + "filename": filename, + }, + ) __all__ = ("requests",) diff --git a/unshackle/core/drm/clearkey.py b/unshackle/core/drm/clearkey.py index 089fa71..c1c93e4 100644 --- a/unshackle/core/drm/clearkey.py +++ b/unshackle/core/drm/clearkey.py @@ -8,10 +8,11 @@ from urllib.parse import urljoin from Cryptodome.Cipher import AES from Cryptodome.Util.Padding import unpad -from curl_cffi.requests import Session as CurlSession from m3u8.model import Key from requests import Session +from unshackle.core.session import RnetSession + class ClearKey: """AES Clear Key DRM System.""" @@ -70,8 +71,8 @@ class ClearKey: """ if not isinstance(m3u_key, Key): raise ValueError(f"Provided M3U Key is in an unexpected type {m3u_key!r}") - if not isinstance(session, (Session, CurlSession, type(None))): - raise TypeError(f"Expected session to be a {Session} or {CurlSession}, not a {type(session)}") + if not isinstance(session, (Session, RnetSession, type(None))): + raise TypeError(f"Expected session to be a {Session} or {RnetSession}, not a {type(session)}") if not m3u_key.method.startswith("AES"): raise ValueError(f"Provided M3U Key is not an AES Clear Key, {m3u_key.method}") diff --git a/unshackle/core/manifests/dash.py b/unshackle/core/manifests/dash.py index 3ee61fd..a09d705 100644 --- a/unshackle/core/manifests/dash.py +++ b/unshackle/core/manifests/dash.py @@ -16,7 +16,6 @@ from uuid import UUID from zlib import crc32 import requests -from curl_cffi.requests import Session as CurlSession from langcodes import Language, tag_is_valid from lxml.etree import Element, ElementTree from pyplayready.system.pssh import PSSH as PR_PSSH @@ -28,6 +27,7 @@ from unshackle.core.cdm.detect import is_playready_cdm from unshackle.core.constants import DOWNLOAD_CANCELLED, DOWNLOAD_LICENCE_ONLY, AnyTrack from unshackle.core.drm import DRM_T, PlayReady, Widevine from unshackle.core.events import events +from unshackle.core.session import RnetSession from unshackle.core.tracks import Audio, Subtitle, Tracks, Video from unshackle.core.utilities import get_debug_logger, is_close_match, try_ensure_utf8 from unshackle.core.utils.xml import load_xml @@ -49,7 +49,7 @@ class DASH: self.url = url @classmethod - def from_url(cls, url: str, session: Optional[Union[Session, CurlSession]] = None, **args: Any) -> DASH: + def from_url(cls, url: str, session: Optional[Union[Session, RnetSession]] = None, **args: Any) -> DASH: if not url: raise requests.URLRequired("DASH manifest URL must be provided for relative path computations.") if not isinstance(url, str): @@ -57,8 +57,8 @@ class DASH: if not session: session = Session() - elif not isinstance(session, (Session, CurlSession)): - raise TypeError(f"Expected session to be a {Session} or {CurlSession}, not {session!r}") + elif not isinstance(session, (Session, RnetSession)): + raise TypeError(f"Expected session to be a {Session} or {RnetSession}, not {session!r}") res = session.get(url, **args) if res.url != url: @@ -264,8 +264,8 @@ class DASH: ): if not session: session = Session() - elif not isinstance(session, (Session, CurlSession)): - raise TypeError(f"Expected session to be a {Session} or {CurlSession}, not {session!r}") + elif not isinstance(session, (Session, RnetSession)): + raise TypeError(f"Expected session to be a {Session} or {RnetSession}, not {session!r}") if proxy: session.proxies.update({"all": proxy}) @@ -589,7 +589,7 @@ class DASH: manifest: ElementTree, track: AnyTrack, track_url: str, - session: Union[Session, CurlSession], + session: Union[Session, RnetSession], ) -> tuple[ Optional[bytes], list[tuple[str, Optional[str]]], diff --git a/unshackle/core/manifests/hls.py b/unshackle/core/manifests/hls.py index 7c6ee63..e5344e7 100644 --- a/unshackle/core/manifests/hls.py +++ b/unshackle/core/manifests/hls.py @@ -17,8 +17,6 @@ from zlib import crc32 import m3u8 import requests -from curl_cffi.requests import Response as CurlResponse -from curl_cffi.requests import Session as CurlSession from langcodes import Language, tag_is_valid from m3u8 import M3U8 from pyplayready.cdm import Cdm as PlayReadyCdm @@ -32,12 +30,13 @@ from unshackle.core.cdm.detect import is_playready_cdm, is_widevine_cdm from unshackle.core.constants import DOWNLOAD_CANCELLED, DOWNLOAD_LICENCE_ONLY, AnyTrack from unshackle.core.drm import DRM_T, ClearKey, MonaLisa, PlayReady, Widevine from unshackle.core.events import events +from unshackle.core.session import RnetResponse, RnetSession from unshackle.core.tracks import Audio, Subtitle, Tracks, Video from unshackle.core.utilities import get_debug_logger, get_extension, is_close_match, try_ensure_utf8 class HLS: - def __init__(self, manifest: M3U8, session: Optional[Union[Session, CurlSession]] = None): + def __init__(self, manifest: M3U8, session: Optional[Union[Session, RnetSession]] = None): if not manifest: raise ValueError("HLS manifest must be provided.") if not isinstance(manifest, M3U8): @@ -49,7 +48,7 @@ class HLS: self.session = session or Session() @classmethod - def from_url(cls, url: str, session: Optional[Union[Session, CurlSession]] = None, **args: Any) -> HLS: + def from_url(cls, url: str, session: Optional[Union[Session, RnetSession]] = None, **args: Any) -> HLS: if not url: raise requests.URLRequired("HLS manifest URL must be provided.") if not isinstance(url, str): @@ -57,22 +56,22 @@ class HLS: if not session: session = Session() - elif not isinstance(session, (Session, CurlSession)): - raise TypeError(f"Expected session to be a {Session} or {CurlSession}, not {session!r}") + elif not isinstance(session, (Session, RnetSession)): + raise TypeError(f"Expected session to be a {Session} or {RnetSession}, not {session!r}") res = session.get(url, **args) - # Handle requests and curl_cffi response objects + # Handle requests and rnet response objects if isinstance(res, requests.Response): if not res.ok: raise requests.ConnectionError("Failed to request the M3U(8) document.", response=res) content = res.text - elif isinstance(res, CurlResponse): + elif isinstance(res, RnetResponse): if not res.ok: raise requests.ConnectionError("Failed to request the M3U(8) document.", response=res) content = res.text else: - raise TypeError(f"Expected response to be a requests.Response or curl_cffi.Response, not {type(res)}") + raise TypeError(f"Expected response to be a requests.Response or rnet.Response, not {type(res)}") master = m3u8.loads(content, uri=url) @@ -281,7 +280,7 @@ class HLS: save_path: Path, save_dir: Path, progress: partial, - session: Optional[Union[Session, CurlSession]] = None, + session: Optional[Union[Session, RnetSession]] = None, proxy: Optional[str] = None, max_workers: Optional[int] = None, license_widevine: Optional[Callable] = None, @@ -290,8 +289,8 @@ class HLS: ) -> None: if not session: session = Session() - elif not isinstance(session, (Session, CurlSession)): - raise TypeError(f"Expected session to be a {Session} or {CurlSession}, not {session!r}") + elif not isinstance(session, (Session, RnetSession)): + raise TypeError(f"Expected session to be a {Session} or {RnetSession}, not {session!r}") if proxy: # Handle proxies differently based on session type @@ -305,14 +304,14 @@ class HLS: else: # Get the playlist text and handle both session types response = session.get(track.url) - if isinstance(response, requests.Response) or isinstance(response, CurlResponse): + if isinstance(response, requests.Response) or isinstance(response, RnetResponse): if not response.ok: log.error(f"Failed to request the invariant M3U8 playlist: {response.status_code}") sys.exit(1) playlist_text = response.text else: raise TypeError( - f"Expected response to be a requests.Response or curl_cffi.Response, not {type(response)}" + f"Expected response to be a requests.Response or rnet.Response, not {type(response)}" ) master = m3u8.loads(playlist_text, uri=track.url) @@ -613,12 +612,12 @@ class HLS: ) # Check response based on session type - if isinstance(res, requests.Response) or isinstance(res, CurlResponse): + if isinstance(res, requests.Response) or isinstance(res, RnetResponse): res.raise_for_status() init_content = res.content else: raise TypeError( - f"Expected response to be requests.Response or curl_cffi.Response, not {type(res)}" + f"Expected response to be requests.Response or rnet.Response, not {type(res)}" ) map_data = (segment.init_section, init_content) @@ -832,7 +831,7 @@ class HLS: @staticmethod def parse_session_data_keys( - manifest: M3U8, session: Optional[Union[Session, CurlSession]] = None + manifest: M3U8, session: Optional[Union[Session, RnetSession]] = None ) -> list[m3u8.model.Key]: """Parse `com.apple.hls.keys` session data and return Key objects.""" keys: list[m3u8.model.Key] = [] @@ -907,7 +906,7 @@ class HLS: def get_track_kid_from_init( master: M3U8, track: AnyTrack, - session: Union[Session, CurlSession], + session: Union[Session, RnetSession], ) -> Optional[UUID]: """ Extract the track's Key ID from its init segment (EXT-X-MAP). @@ -974,7 +973,7 @@ class HLS: @staticmethod def get_drm( key: Union[m3u8.model.SessionKey, m3u8.model.Key], - session: Optional[Union[Session, CurlSession]] = None, + session: Optional[Union[Session, RnetSession]] = None, ) -> DRM_T: """ Convert HLS EXT-X-KEY data to an initialized DRM object. @@ -986,8 +985,8 @@ class HLS: Raises a NotImplementedError if the key system is not supported. """ - if not isinstance(session, (Session, CurlSession, type(None))): - raise TypeError(f"Expected session to be a {Session} or {CurlSession}, not {type(session)}") + if not isinstance(session, (Session, RnetSession, type(None))): + raise TypeError(f"Expected session to be a {Session} or {RnetSession}, not {type(session)}") if not session: session = Session() diff --git a/unshackle/core/manifests/ism.py b/unshackle/core/manifests/ism.py index 7f4d4a9..875fd5e 100644 --- a/unshackle/core/manifests/ism.py +++ b/unshackle/core/manifests/ism.py @@ -9,7 +9,6 @@ from pathlib import Path from typing import Any, Callable, Optional, Union import requests -from curl_cffi.requests import Session as CurlSession from langcodes import Language, tag_is_valid from lxml.etree import Element from pyplayready.system.pssh import PSSH as PR_PSSH @@ -19,6 +18,7 @@ from requests import Session from unshackle.core.constants import DOWNLOAD_CANCELLED, DOWNLOAD_LICENCE_ONLY, AnyTrack from unshackle.core.drm import DRM_T, PlayReady, Widevine from unshackle.core.events import events +from unshackle.core.session import RnetSession from unshackle.core.tracks import Audio, Subtitle, Track, Tracks, Video from unshackle.core.utilities import get_debug_logger, try_ensure_utf8 from unshackle.core.utils.xml import load_xml @@ -34,13 +34,13 @@ class ISM: self.url = url @classmethod - def from_url(cls, url: str, session: Optional[Union[Session, CurlSession]] = None, **kwargs: Any) -> "ISM": + def from_url(cls, url: str, session: Optional[Union[Session, RnetSession]] = None, **kwargs: Any) -> "ISM": if not url: raise requests.URLRequired("ISM manifest URL must be provided") if not session: session = Session() - elif not isinstance(session, (Session, CurlSession)): - raise TypeError(f"Expected session to be a {Session} or {CurlSession}, not {session!r}") + elif not isinstance(session, (Session, RnetSession)): + raise TypeError(f"Expected session to be a {Session} or {RnetSession}, not {session!r}") res = session.get(url, **kwargs) if res.url != url: url = res.url diff --git a/unshackle/core/manifests/m3u8.py b/unshackle/core/manifests/m3u8.py index 761d73c..2f39f84 100644 --- a/unshackle/core/manifests/m3u8.py +++ b/unshackle/core/manifests/m3u8.py @@ -5,10 +5,10 @@ from __future__ import annotations from typing import Optional, Union import m3u8 -from curl_cffi.requests import Session as CurlSession from requests import Session from unshackle.core.manifests.hls import HLS +from unshackle.core.session import RnetSession from unshackle.core.tracks import Tracks @@ -16,7 +16,7 @@ def parse( master: m3u8.M3U8, language: str, *, - session: Optional[Union[Session, CurlSession]] = None, + session: Optional[Union[Session, RnetSession]] = None, ) -> Tracks: """Parse a variant playlist to ``Tracks`` with basic information, defer DRM loading.""" tracks = HLS(master, session=session).to_tracks(language) diff --git a/unshackle/core/session.py b/unshackle/core/session.py index 974dd95..bf49286 100644 --- a/unshackle/core/session.py +++ b/unshackle/core/session.py @@ -1,96 +1,452 @@ -"""Session utilities for creating HTTP sessions with different backends.""" +"""Session utilities for creating HTTP sessions with TLS fingerprinting via rnet (Rust/BoringSSL).""" from __future__ import annotations +import http import logging import random import time -import warnings +from collections.abc import Iterator, MutableMapping from datetime import datetime, timezone from email.utils import parsedate_to_datetime -from typing import Any -from urllib.parse import urlparse +from http.cookiejar import CookieJar +from typing import Any, Optional +from urllib.parse import urlencode, urlparse, urlunparse -from curl_cffi.requests import Response, Session, exceptions +import rnet +from requests import HTTPError, Request +from requests.structures import CaseInsensitiveDict from unshackle.core.config import config -# Globally suppress curl_cffi HTTPS proxy warnings since some proxy providers -# (like NordVPN) require HTTPS URLs but curl_cffi expects HTTP format -warnings.filterwarnings( - "ignore", message="Make sure you are using https over https proxy.*", category=RuntimeWarning, module="curl_cffi.*" -) +# --------------------------------------------------------------------------- +# Impersonate preset mapping — rnet uses named presets (no custom JA3/Akamai) +# --------------------------------------------------------------------------- -FINGERPRINT_PRESETS = { - "okhttp4": { - "ja3": ( - "771," # TLS 1.2 - "4865-4866-4867-49195-49196-52393-49199-49200-52392-49171-49172-156-157-47-53," # Ciphers - "0-23-65281-10-11-35-16-5-13-51-45-43," # Extensions - "29-23-24," # Named groups (x25519, secp256r1, secp384r1) - "0" # EC point formats - ), - "akamai": "4:16777216|16711681|0|m,p,a,s", - "description": "OkHttp 3.x/4.x (BoringSSL TLS stack)", - }, - "okhttp5": { - "ja3": ( - "771," # TLS 1.2 - "4865-4866-4867-49195-49199-49196-49200-52393-52392-49171-49172-156-157-47-53," # Ciphers - "0-23-65281-10-11-35-16-5-13-51-45-43," # Extensions - "29-23-24," # Named groups (x25519, secp256r1, secp384r1) - "0" # EC point formats - ), - "akamai": "4:16777216|16711681|0|m,p,a,s", - "description": "OkHttp 5.x (BoringSSL TLS stack)", - }, - "shield_okhttp": { - "ja3": ( - "771," # TLS 1.2 - "4865-4866-4867-49195-49199-49196-49200-52393-52392-49171-49172-156-157-47-53," # Ciphers (OkHttp 4.11) - "0-23-65281-10-11-35-16-5-13-51-45-43-21," # Extensions (incl padding ext 21) - "29-23-24," # Named groups (x25519, secp256r1, secp384r1) - "0" # EC point formats - ), - "akamai": "4:16777216|16711681|0|m,p,a,s", - "description": "NVIDIA SHIELD Android TV OkHttp 4.11 (captured JA3)", - }, +DEFAULT_IMPERSONATE = rnet.Impersonate.Chrome131 + + +def _resolve_impersonate(browser: str) -> rnet.Impersonate: + """Resolve a browser string to an rnet.Impersonate preset. + + Accepts exact rnet preset names (e.g. "Chrome131", "OkHttp4_12", "Edge101"). + See https://github.com/0x676e67/rnet for the full list of available presets. + """ + preset = getattr(rnet.Impersonate, browser, None) + if preset is not None: + return preset + raise ValueError( + f"Unknown impersonate preset: {browser!r}. " + f"Use exact rnet preset names like 'Chrome131', 'OkHttp4_12', 'Edge101'. " + f"See rnet.Impersonate for all available presets." + ) + +# Map string method names to rnet.Method enum +_METHOD_MAP: dict[str, rnet.Method] = { + "GET": rnet.Method.GET, + "POST": rnet.Method.POST, + "PUT": rnet.Method.PUT, + "DELETE": rnet.Method.DELETE, + "HEAD": rnet.Method.HEAD, + "OPTIONS": rnet.Method.OPTIONS, + "PATCH": rnet.Method.PATCH, + "TRACE": rnet.Method.TRACE, } -class MaxRetriesError(exceptions.RequestException): - def __init__(self, message, cause=None): +# --------------------------------------------------------------------------- +# Response headers adapter — bytes → str +# --------------------------------------------------------------------------- + + +class RnetResponseHeaders(MutableMapping): + """Read-only str-based view over rnet's bytes-based HeaderMap.""" + + def __init__(self, header_map: Any) -> None: + self._map = header_map + + def _decode(self, val: Any) -> str: + return val.decode("utf-8", errors="replace") if isinstance(val, (bytes, bytearray)) else str(val) + + def __getitem__(self, key: str) -> str: + val = self._map[key] + return self._decode(val) + + def __setitem__(self, key: str, value: str) -> None: + raise TypeError("Response headers are read-only") + + def __delitem__(self, key: str) -> None: + raise TypeError("Response headers are read-only") + + def __contains__(self, key: object) -> bool: + if not isinstance(key, str): + return False + return self._map.contains_key(key) + + def __iter__(self) -> Iterator[str]: + seen: set[str] = set() + for k, _ in self._map.items(): + dk = self._decode(k) + if dk not in seen: + seen.add(dk) + yield dk + + def __len__(self) -> int: + return self._map.keys_len() + + def get(self, key: str, default: Optional[str] = None) -> Optional[str]: + val = self._map.get(key) + if val is None: + return default + return self._decode(val) + + def items(self) -> list[tuple[str, str]]: + return [(self._decode(k), self._decode(v)) for k, v in self._map.items()] + + +# --------------------------------------------------------------------------- +# Response wrapper — requests-compatible interface +# --------------------------------------------------------------------------- + + +class RnetResponse: + """Wraps rnet.BlockingResponse with a requests-compatible API.""" + + def __init__(self, resp: Any) -> None: + self._resp = resp + self._headers: Optional[RnetResponseHeaders] = None + self._content: Optional[bytes] = None + self._text: Optional[str] = None + self._streamed = False + + @property + def status_code(self) -> int: + return int(str(self._resp.status_code)) + + @property + def ok(self) -> bool: + return self._resp.ok + + @property + def headers(self) -> RnetResponseHeaders: + if self._headers is None: + self._headers = RnetResponseHeaders(self._resp.headers) + return self._headers + + @property + def url(self) -> str: + return str(self._resp.url) + + @property + def content_length(self) -> Optional[int]: + return self._resp.content_length + + @property + def content(self) -> bytes: + if self._content is None: + self._content = self._resp.bytes() + return self._content + + @property + def text(self) -> str: + if self._text is None: + encoding = self._resp.encoding or "utf-8" + self._text = self.content.decode(encoding, errors="replace") + return self._text + + @property + def reason(self) -> str: + try: + return http.HTTPStatus(self.status_code).phrase + except ValueError: + return "Unknown" + + @property + def cookies(self) -> Any: + return self._resp.cookies + + def json(self, **kwargs: Any) -> Any: + import json as _json + return _json.loads(self.content) + + def raise_for_status(self) -> None: + if not self.ok: + raise HTTPError( + f"{self.status_code} {self.reason}: {self.url}", + response=self, + ) + + def iter_content(self, chunk_size: Optional[int] = None) -> Iterator[bytes]: + """Re-chunk rnet's variable-size stream into fixed-size pieces.""" + self._streamed = True + if chunk_size is None or chunk_size <= 0: + yield from self._resp.stream() + return + + buf = bytearray() + for chunk in self._resp.stream(): + buf.extend(chunk) + while len(buf) >= chunk_size: + yield bytes(buf[:chunk_size]) + buf = buf[chunk_size:] + if buf: + yield bytes(buf) + + def stream(self) -> Iterator[bytes]: + """Direct pass-through of rnet's native stream iterator.""" + self._streamed = True + yield from self._resp.stream() + + def close(self) -> None: + try: + self._resp.close() + except Exception: + pass + + +# --------------------------------------------------------------------------- +# Session headers adapter — persists via client.update() +# --------------------------------------------------------------------------- + + +class RnetSessionHeaders(CaseInsensitiveDict): + """Dict-like headers that persist to the rnet client via update().""" + + def __init__(self, client: Any) -> None: + self._client = client + super().__init__() + + def _sync(self) -> None: + """Push current headers to the rnet client.""" + if hasattr(self, "_store") and self._store: + self._client.update(headers={k: v for k, v in self.items()}) + + def __setitem__(self, key: str, value: str) -> None: + super().__setitem__(key, value) + self._sync() + + def update(self, __m: Any = None, **kwargs: Any) -> None: + if __m: + if hasattr(__m, "items"): + for k, v in __m.items(): + super().__setitem__(k, v) + else: + for k, v in __m: + super().__setitem__(k, v) + for k, v in kwargs.items(): + super().__setitem__(k, v) + self._sync() + + def pop(self, key: str, *args: Any) -> Any: + result = super().pop(key, *args) + # rnet doesn't support removing individual headers, but we track locally + # and always send the full set on next update + return result + + def __delitem__(self, key: str) -> None: + super().__delitem__(key) + + +# --------------------------------------------------------------------------- +# Session cookies adapter +# --------------------------------------------------------------------------- + + +class RnetCookieAdapter(MutableMapping): + """Cookie adapter that bridges requests-style cookie access to rnet.""" + + def __init__(self, client: Any) -> None: + self._client = client + self._cookies: dict[str, dict[str, str]] = {} # {domain: {name: value}} + self._flat: dict[str, str] = {} # flat name→value for simple access + + def update(self, other: Any = None, **kwargs: Any) -> None: + if other is None: + other = {} + if isinstance(other, CookieJar): + for cookie in other: + domain = cookie.domain or "" + name = cookie.name + value = cookie.value or "" + self._flat[name] = value + self._cookies.setdefault(domain, {})[name] = value + try: + url = f"https://{domain.lstrip('.')}" if domain else "https://localhost" + self._client.set_cookie(url, rnet.Cookie(name, value)) + except Exception: + pass + elif isinstance(other, dict): + for name, value in other.items(): + self._flat[name] = value + self._client.set_cookie("https://localhost", rnet.Cookie(name, str(value))) + self._flat.update(other) + elif hasattr(other, "items"): + for name, value in other.items(): + self._flat[name] = str(value) + self._client.set_cookie("https://localhost", rnet.Cookie(name, str(value))) + + for name, value in kwargs.items(): + self._flat[name] = value + self._client.set_cookie("https://localhost", rnet.Cookie(name, value)) + + def get(self, name: str, default: Optional[str] = None, domain: Optional[str] = None, + path: Optional[str] = None) -> Optional[str]: + if domain and domain in self._cookies: + return self._cookies[domain].get(name, default) + return self._flat.get(name, default) + + def set(self, name: str, value: str, domain: str = "localhost") -> None: + self._flat[name] = value + self._cookies.setdefault(domain, {})[name] = value + url = f"https://{domain.lstrip('.')}" + self._client.set_cookie(url, rnet.Cookie(name, value)) + + def __getitem__(self, name: str) -> str: + return self._flat[name] + + def __setitem__(self, name: str, value: str) -> None: + self.set(name, value) + + def __delitem__(self, name: str) -> None: + self._flat.pop(name, None) + for domain_cookies in self._cookies.values(): + domain_cookies.pop(name, None) + + def __contains__(self, name: object) -> bool: + return name in self._flat + + def __iter__(self) -> Iterator: + return iter(self._flat) + + def __len__(self) -> int: + return len(self._flat) + + def __bool__(self) -> bool: + return bool(self._flat) + + def items(self) -> list[tuple[str, str]]: + return list(self._flat.items()) + + def keys(self) -> list[str]: + return list(self._flat.keys()) + + def values(self) -> list[str]: + return list(self._flat.values()) + + +# --------------------------------------------------------------------------- +# Session proxy adapter +# --------------------------------------------------------------------------- + + +class RnetProxyDict(dict): + """Dict-like proxy config that syncs to the rnet client.""" + + def __init__(self, client: Any) -> None: + super().__init__() + self._client = client + + def _sync(self) -> None: + proxy = self.get("all") or self.get("https") or self.get("http") + if proxy: + self._client.update(proxy=proxy) + + def update(self, __m: Any = None, **kwargs: Any) -> None: + super().update(__m or {}, **kwargs) + self._sync() + + def __setitem__(self, key: str, value: str) -> None: + super().__setitem__(key, value) + self._sync() + + +# --------------------------------------------------------------------------- +# Exceptions +# --------------------------------------------------------------------------- + + +class MaxRetriesError(Exception): + def __init__(self, message: str, cause: Optional[Exception] = None) -> None: super().__init__(message) self.__cause__ = cause -class CurlSession(Session): +# --------------------------------------------------------------------------- +# RnetSession — main session class +# --------------------------------------------------------------------------- + + +class RnetSession: + """ + TLS-fingerprinted HTTP session powered by rnet (Rust/BoringSSL). + + Drop-in replacement for CurlSession with requests-compatible API. + Supports browser impersonation (Chrome, Firefox, Edge, Safari, OkHttp), + retry with exponential backoff, cookie persistence, and proxy support. + """ + def __init__( self, max_retries: int = 5, backoff_factor: float = 0.2, max_backoff: float = 60.0, - status_forcelist: list[int] | None = None, - allowed_methods: set[str] | None = None, - catch_exceptions: tuple[type[Exception], ...] | None = None, + status_forcelist: Optional[list[int]] = None, + allowed_methods: Optional[set[str]] = None, + catch_exceptions: Optional[tuple[type[Exception], ...]] = None, **session_kwargs: Any, - ): - super().__init__(**session_kwargs) - + ) -> None: + # Extract retry config before passing to rnet self.max_retries = max_retries self.backoff_factor = backoff_factor self.max_backoff = max_backoff self.status_forcelist = status_forcelist or [429, 500, 502, 503, 504] self.allowed_methods = allowed_methods or {"GET", "POST", "HEAD", "OPTIONS", "PUT", "DELETE", "TRACE"} self.catch_exceptions = catch_exceptions or ( - exceptions.ConnectionError, - exceptions.ProxyError, - exceptions.SSLError, - exceptions.Timeout, + rnet.ConnectionError, + rnet.TimeoutError, + rnet.RequestError, ) self.log = logging.getLogger(self.__class__.__name__) - def get_sleep_time(self, response: Response | None, attempt: int) -> float | None: + # Extract rnet-compatible kwargs + client_kwargs: dict[str, Any] = {} + for key in ("impersonate", "timeout", "proxy", "verify", "redirect"): + if key in session_kwargs: + client_kwargs[key] = session_kwargs.pop(key) + + # Always enable cookie store + client_kwargs["cookie_store"] = True + + # Handle verify=False + self.verify: bool = client_kwargs.pop("verify", True) + if not self.verify: + client_kwargs["danger_accept_invalid_certs"] = True + + self._client = rnet.BlockingClient(**client_kwargs) + + # Set up attribute adapters + self.headers = RnetSessionHeaders(self._client) + self.cookies = RnetCookieAdapter(self._client) + self.proxies = RnetProxyDict(self._client) + + # Handle initial headers/cookies/proxies from kwargs + if "headers" in session_kwargs: + self.headers.update(session_kwargs.pop("headers")) + if "cookies" in session_kwargs: + self.cookies.update(session_kwargs.pop("cookies")) + if "proxies" in session_kwargs: + self.proxies.update(session_kwargs.pop("proxies")) + + def _build_url(self, url: str, params: Optional[dict] = None) -> str: + """URL-encode params dict into the URL (rnet ignores params kwarg).""" + if not params: + return url + parsed = urlparse(url) + separator = "&" if parsed.query else "" + query = parsed.query + separator + urlencode(params, doseq=True) if parsed.query else urlencode(params, doseq=True) + return urlunparse(parsed._replace(query=query)) + + def get_sleep_time(self, response: Optional[RnetResponse], attempt: int) -> Optional[float]: if response: retry_after = response.headers.get("Retry-After") if retry_after: @@ -108,19 +464,42 @@ class CurlSession(Session): sleep_time = backoff_value + random.uniform(-jitter, jitter) return min(sleep_time, self.max_backoff) - def request(self, method: str, url: str, **kwargs: Any) -> Response: - if method.upper() not in self.allowed_methods: - return super().request(method, url, **kwargs) + def request(self, method: str, url: str, **kwargs: Any) -> RnetResponse: + method_upper = method.upper() if isinstance(method, str) else str(method).upper() - last_exception = None - response = None + # Build URL with params + url = self._build_url(url, kwargs.pop("params", None)) + + # Default allow_redirects=True + kwargs.setdefault("allow_redirects", True) + + # Pass verify setting + if not self.verify: + kwargs.setdefault("verify", False) + + # Remove kwargs rnet doesn't understand + kwargs.pop("stream", None) # rnet responses are always lazy + + # Resolve method enum + rnet_method = _METHOD_MAP.get(method_upper) + if rnet_method is None: + raise ValueError(f"Unsupported HTTP method: {method}") + + # Skip retry for non-allowed methods + if method_upper not in self.allowed_methods: + raw_resp = self._client.request(rnet_method, url, **kwargs) + return RnetResponse(raw_resp) + + last_exception: Optional[Exception] = None + response: Optional[RnetResponse] = None for attempt in range(self.max_retries + 1): try: - response = super().request(method, url, **kwargs) + raw_resp = self._client.request(rnet_method, url, **kwargs) + response = RnetResponse(raw_resp) if response.status_code not in self.status_forcelist: return response - last_exception = exceptions.HTTPError(f"Received status code: {response.status_code}") + last_exception = HTTPError(f"Received status code: {response.status_code}") self.log.warning( f"{response.status_code} {response.reason}({urlparse(url).path}). Retrying... " f"({attempt + 1}/{self.max_retries})" @@ -142,120 +521,100 @@ class CurlSession(Session): raise MaxRetriesError(f"Max retries exceeded for {method} {url}", cause=last_exception) + def get(self, url: str, **kwargs: Any) -> RnetResponse: + return self.request("GET", url, **kwargs) + + def post(self, url: str, **kwargs: Any) -> RnetResponse: + return self.request("POST", url, **kwargs) + + def put(self, url: str, **kwargs: Any) -> RnetResponse: + return self.request("PUT", url, **kwargs) + + def delete(self, url: str, **kwargs: Any) -> RnetResponse: + return self.request("DELETE", url, **kwargs) + + def head(self, url: str, **kwargs: Any) -> RnetResponse: + return self.request("HEAD", url, **kwargs) + + def options(self, url: str, **kwargs: Any) -> RnetResponse: + return self.request("OPTIONS", url, **kwargs) + + def patch(self, url: str, **kwargs: Any) -> RnetResponse: + return self.request("PATCH", url, **kwargs) + + def prepare_request(self, req: Request) -> Request: + """Compatibility shim for services using prepared requests.""" + # Merge session headers into request headers + if req.headers: + merged = dict(self.headers) + merged.update(req.headers) + req.headers = merged + else: + req.headers = dict(self.headers) + return req + + def send(self, req: Request, **kwargs: Any) -> RnetResponse: + """Compatibility shim for services using prepared requests.""" + method = req.method or "GET" + url = req.url or "" + + send_kwargs: dict[str, Any] = {} + if req.headers: + send_kwargs["headers"] = dict(req.headers) + if req.body: + send_kwargs["data"] = req.body + if req.json: + send_kwargs["json"] = req.json + + send_kwargs.update(kwargs) + return self.request(method, url, **send_kwargs) + + def mount(self, prefix: str, adapter: Any) -> None: + """No-op — rnet handles TLS and connection pooling natively.""" + pass + + def close(self) -> None: + """No-op — rnet manages its own resources.""" + pass + + +# --------------------------------------------------------------------------- +# session() factory +# --------------------------------------------------------------------------- + def session( - browser: str | None = None, - ja3: str | None = None, - akamai: str | None = None, - extra_fp: dict | None = None, - **kwargs, -) -> CurlSession: + browser: Optional[str] = None, + **kwargs: Any, +) -> RnetSession: """ - Create a curl_cffi session that impersonates a browser or custom TLS/HTTP fingerprint. - - This is a full replacement for requests.Session with browser impersonation - and anti-bot capabilities. The session uses curl-impersonate under the hood - to mimic real browser behavior. + Create an rnet session with TLS fingerprinting (browser/app impersonation). Args: - browser: Browser to impersonate (e.g. "chrome124", "firefox", "safari") OR - fingerprint preset name (e.g. "okhttp4"). - Uses the configured default from curl_impersonate.browser if not specified. - Available presets: okhttp4, okhttp5 - See https://github.com/lexiforest/curl_cffi#sessions for browser options. - ja3: Custom JA3 TLS fingerprint string (format: "SSLVersion,Ciphers,Extensions,Curves,PointFormats"). - When provided, curl_cffi will use this exact TLS fingerprint instead of the browser's default. - See https://curl-cffi.readthedocs.io/en/latest/impersonate/customize.html - akamai: Custom Akamai HTTP/2 fingerprint string (format: "SETTINGS|WINDOW_UPDATE|PRIORITY|PSEUDO_HEADERS"). - When provided, curl_cffi will use this exact HTTP/2 fingerprint instead of the browser's default. - See https://curl-cffi.readthedocs.io/en/latest/impersonate/customize.html - extra_fp: Additional fingerprint parameters dict for advanced customization. - See https://curl-cffi.readthedocs.io/en/latest/impersonate/customize.html - **kwargs: Additional arguments passed to CurlSession constructor: - - headers: Additional headers (dict) - - cookies: Cookie jar or dict - - auth: HTTP basic auth tuple (username, password) - - proxies: Proxy configuration dict - - verify: SSL certificate verification (bool, default True) - - timeout: Request timeout in seconds (float or tuple) - - allow_redirects: Follow redirects (bool, default True) - - max_redirects: Maximum redirect count (int) - - cert: Client certificate (str or tuple) - - Extra arguments for retry handler: - - max_retries: Maximum number of retries (int, default 5) - - backoff_factor: Backoff factor (float, default 0.2) - - max_backoff: Maximum backoff time (float, default 60.0) - - status_forcelist: List of status codes to force retry (list, default [429, 500, 502, 503, 504]) - - allowed_methods: List of allowed HTTP methods (set, default {"GET", "POST", "HEAD", "OPTIONS", "PUT", "DELETE", "TRACE"}) - - catch_exceptions: List of exceptions to catch (tuple, default (exceptions.ConnectionError, exceptions.ProxyError, exceptions.SSLError, exceptions.Timeout)) + browser: Exact rnet.Impersonate preset name. Examples: + "Chrome131", "OkHttp4_12", "Edge101", "Firefox135", + "Safari18", "OkHttp5", "Opera118" + Uses the configured default from config if not specified. + See rnet.Impersonate for all available presets. + **kwargs: Additional arguments passed to RnetSession constructor. Returns: - curl_cffi.requests.Session configured with browser impersonation or custom fingerprints, - common headers, and equivalent retry behavior to requests.Session. + RnetSession configured with browser impersonation and retry behavior. Examples: - # Standard browser impersonation - from unshackle.core.session import session - - class MyService(Service): - @staticmethod - def get_session(): - return session() # Uses config default browser - - # Use OkHttp 4.x preset for Android TV - class AndroidService(Service): - @staticmethod - def get_session(): - return session("okhttp4") - - # Custom fingerprint (manual) - class CustomService(Service): - @staticmethod - def get_session(): - return session( - ja3="771,4865-4866-4867-49195...", - akamai="1:65536;2:0;4:6291456;6:262144|15663105|0|m,a,s,p", - ) - - # With retry configuration - class MyService(Service): - @staticmethod - def get_session(): - return session( - "okhttp4", - max_retries=5, - status_forcelist=[429, 500], - allowed_methods={"GET", "HEAD", "OPTIONS"}, - ) + session() # Default browser from config + session("OkHttp4_12") # OkHttp 4.12 fingerprint + session("Chrome131") # Chrome 131 + session("Edge101", max_retries=3) # Edge 101 with custom retry """ + if browser is None: + browser = config.curl_impersonate.get("browser", "Chrome131") - if browser and browser in FINGERPRINT_PRESETS: - preset = FINGERPRINT_PRESETS[browser] - if ja3 is None: - ja3 = preset.get("ja3") - if akamai is None: - akamai = preset.get("akamai") - if extra_fp is None: - extra_fp = preset.get("extra_fp") - browser = None + impersonate = _resolve_impersonate(browser) - if browser is None and ja3 is None and akamai is None: - browser = config.curl_impersonate.get("browser", "chrome") + session_kwargs: dict[str, Any] = {"impersonate": impersonate} + session_kwargs.update(kwargs) - session_config = {} - if browser: - session_config["impersonate"] = browser - - if ja3: - session_config["ja3"] = ja3 - if akamai: - session_config["akamai"] = akamai - if extra_fp: - session_config["extra_fp"] = extra_fp - - session_config.update(kwargs) - - session_obj = CurlSession(**session_config) + session_obj = RnetSession(**session_kwargs) session_obj.headers.update(config.headers) return session_obj diff --git a/unshackle/core/tracks/track.py b/unshackle/core/tracks/track.py index d73b528..4272647 100644 --- a/unshackle/core/tracks/track.py +++ b/unshackle/core/tracks/track.py @@ -13,7 +13,6 @@ from typing import Any, Callable, Iterable, Optional, Union from uuid import UUID from zlib import crc32 -from curl_cffi.requests import Session as CurlSession from langcodes import Language from requests import Session @@ -24,6 +23,7 @@ from unshackle.core.constants import DOWNLOAD_CANCELLED, DOWNLOAD_LICENCE_ONLY from unshackle.core.downloaders import requests from unshackle.core.drm import DRM_T, PlayReady, Widevine from unshackle.core.events import events +from unshackle.core.session import RnetSession from unshackle.core.utilities import get_boxes, try_ensure_utf8 from unshackle.core.utils.subprocess import ffprobe @@ -326,6 +326,9 @@ class Track: ): file_downloaded = status_update.get("file_downloaded") if not file_downloaded: + downloaded = status_update.get("downloaded") + if downloaded and downloaded.endswith("/s"): + status_update["downloaded"] = f"URL {downloaded}" progress(**status_update) # see https://github.com/devine-dl/devine/issues/71 @@ -584,8 +587,8 @@ class Track: raise TypeError(f"Expected url to be a {str}, not {type(url)}") if not isinstance(byte_range, (str, type(None))): raise TypeError(f"Expected byte_range to be a {str}, not {type(byte_range)}") - if not isinstance(session, (Session, CurlSession, type(None))): - raise TypeError(f"Expected session to be a {Session} or {CurlSession}, not {type(session)}") + if not isinstance(session, (Session, RnetSession, type(None))): + raise TypeError(f"Expected session to be a {Session} or {RnetSession}, not {type(session)}") if not url: if self.descriptor != self.Descriptor.URL: @@ -623,10 +626,11 @@ class Track: init_data = res.content else: init_data = None - with session.get(url, stream=True) as s: - for chunk in s.iter_content(content_length): - init_data = chunk - break + s = session.get(url, stream=True) + for chunk in s.iter_content(content_length): + init_data = chunk + break + s.close() if not init_data: raise ValueError(f"Failed to read {content_length} bytes from the track URI.") diff --git a/uv.lock b/uv.lock index ed0b7e5..5f87ebb 100644 --- a/uv.lock +++ b/uv.lock @@ -123,20 +123,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/ba/98/6775d71cf7d88d554e8394f5ce5cda90041c99fdf1b2b60af02001e8c790/animeapi_py-3.8.1-py3-none-any.whl", hash = "sha256:c29f6e633d17bb613f459aa6514c0baab7ae325881f8a109eb6e4b3be5c22827", size = 26983, upload-time = "2026-02-25T15:29:16.685Z" }, ] -[[package]] -name = "anyio" -version = "4.12.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "exceptiongroup", marker = "python_full_version < '3.11'" }, - { name = "idna" }, - { name = "typing-extensions" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/96/f0/5eb65b2bb0d09ac6776f2eb54adee6abe8228ea05b20a5ad0e4945de8aac/anyio-4.12.1.tar.gz", hash = "sha256:41cfcc3a4c85d3f05c932da7c26d0201ac36f72abd4435ba90d0464a3ffed703", size = 228685, upload-time = "2026-01-06T11:45:21.246Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/38/0e/27be9fdef66e72d64c0cdc3cc2823101b80585f8119b5c112c2e8f5f7dab/anyio-4.12.1-py3-none-any.whl", hash = "sha256:d405828884fc140aa80a3c667b8beed277f1dfedec42ba031bd6ac3db606ab6c", size = 113592, upload-time = "2026-01-06T11:45:19.497Z" }, -] - [[package]] name = "appdirs" version = "1.4.4" @@ -442,27 +428,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/a7/ec/bb273b7208c606890dc36540fe667d06ce840a6f62f9fae7e658fcdc90fb/cssutils-2.11.1-py3-none-any.whl", hash = "sha256:a67bfdfdff4f3867fab43698ec4897c1a828eca5973f4073321b3bccaf1199b1", size = 385747, upload-time = "2024-06-04T15:51:37.499Z" }, ] -[[package]] -name = "curl-cffi" -version = "0.13.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "certifi" }, - { name = "cffi" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/4e/3d/f39ca1f8fdf14408888e7c25e15eed63eac5f47926e206fb93300d28378c/curl_cffi-0.13.0.tar.gz", hash = "sha256:62ecd90a382bd5023750e3606e0aa7cb1a3a8ba41c14270b8e5e149ebf72c5ca", size = 151303, upload-time = "2025-08-06T13:05:42.988Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/19/d1/acabfd460f1de26cad882e5ef344d9adde1507034528cb6f5698a2e6a2f1/curl_cffi-0.13.0-cp39-abi3-macosx_10_9_x86_64.whl", hash = "sha256:434cadbe8df2f08b2fc2c16dff2779fb40b984af99c06aa700af898e185bb9db", size = 5686337, upload-time = "2025-08-06T13:05:28.985Z" }, - { url = "https://files.pythonhosted.org/packages/2c/1c/cdb4fb2d16a0e9de068e0e5bc02094e105ce58a687ff30b4c6f88e25a057/curl_cffi-0.13.0-cp39-abi3-macosx_11_0_arm64.whl", hash = "sha256:59afa877a9ae09efa04646a7d068eeea48915a95d9add0a29854e7781679fcd7", size = 2994613, upload-time = "2025-08-06T13:05:31.027Z" }, - { url = "https://files.pythonhosted.org/packages/04/3e/fdf617c1ec18c3038b77065d484d7517bb30f8fb8847224eb1f601a4e8bc/curl_cffi-0.13.0-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d06ed389e45a7ca97b17c275dbedd3d6524560270e675c720e93a2018a766076", size = 7931353, upload-time = "2025-08-06T13:05:32.273Z" }, - { url = "https://files.pythonhosted.org/packages/3d/10/6f30c05d251cf03ddc2b9fd19880f3cab8c193255e733444a2df03b18944/curl_cffi-0.13.0-cp39-abi3-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b4e0de45ab3b7a835c72bd53640c2347415111b43421b5c7a1a0b18deae2e541", size = 7486378, upload-time = "2025-08-06T13:05:33.672Z" }, - { url = "https://files.pythonhosted.org/packages/77/81/5bdb7dd0d669a817397b2e92193559bf66c3807f5848a48ad10cf02bf6c7/curl_cffi-0.13.0-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8eb4083371bbb94e9470d782de235fb5268bf43520de020c9e5e6be8f395443f", size = 8328585, upload-time = "2025-08-06T13:05:35.28Z" }, - { url = "https://files.pythonhosted.org/packages/ce/c1/df5c6b4cfad41c08442e0f727e449f4fb5a05f8aa564d1acac29062e9e8e/curl_cffi-0.13.0-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:28911b526e8cd4aa0e5e38401bfe6887e8093907272f1f67ca22e6beb2933a51", size = 8739831, upload-time = "2025-08-06T13:05:37.078Z" }, - { url = "https://files.pythonhosted.org/packages/1a/91/6dd1910a212f2e8eafe57877bcf97748eb24849e1511a266687546066b8a/curl_cffi-0.13.0-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:6d433ffcb455ab01dd0d7bde47109083aa38b59863aa183d29c668ae4c96bf8e", size = 8711908, upload-time = "2025-08-06T13:05:38.741Z" }, - { url = "https://files.pythonhosted.org/packages/6d/e4/15a253f9b4bf8d008c31e176c162d2704a7e0c5e24d35942f759df107b68/curl_cffi-0.13.0-cp39-abi3-win_amd64.whl", hash = "sha256:66a6b75ce971de9af64f1b6812e275f60b88880577bac47ef1fa19694fa21cd3", size = 1614510, upload-time = "2025-08-06T13:05:40.451Z" }, - { url = "https://files.pythonhosted.org/packages/f9/0f/9c5275f17ad6ff5be70edb8e0120fdc184a658c9577ca426d4230f654beb/curl_cffi-0.13.0-cp39-abi3-win_arm64.whl", hash = "sha256:d438a3b45244e874794bc4081dc1e356d2bb926dcc7021e5a8fef2e2105ef1d8", size = 1365753, upload-time = "2025-08-06T13:05:41.879Z" }, -] - [[package]] name = "dacite" version = "1.9.2" @@ -490,18 +455,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/e8/35/4a113189f7138035a21bd255d30dc7bffc77c942c93b7948d2eac2e22429/ECPy-1.2.5-py3-none-any.whl", hash = "sha256:559c92e42406d9d1a6b2b8fc26e6ad7bc985f33903b72f426a56cb1073a25ce3", size = 43075, upload-time = "2020-10-26T11:56:13.613Z" }, ] -[[package]] -name = "exceptiongroup" -version = "1.3.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "typing-extensions" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/50/79/66800aadf48771f6b62f7eb014e352e5d06856655206165d775e675a02c9/exceptiongroup-1.3.1.tar.gz", hash = "sha256:8b412432c6055b0b7d14c310000ae93352ed6754f70fa8f7c34141f91c4e3219", size = 30371, upload-time = "2025-11-21T23:01:54.787Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/8a/0e/97c33bf5009bdbac74fd2beace167cab3f978feb69cc36f1ef79360d6c4e/exceptiongroup-1.3.1-py3-none-any.whl", hash = "sha256:a7a39a3bd276781e98394987d3a5701d0c4edffb633bb7a5144577f82c773598", size = 16740, upload-time = "2025-11-21T23:01:53.443Z" }, -] - [[package]] name = "fastjsonschema" version = "2.19.1" @@ -622,43 +575,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/ab/6e/81d47999aebc1b155f81eca4477a616a70f238a2549848c38983f3c22a82/ftfy-6.3.1-py3-none-any.whl", hash = "sha256:7c70eb532015cd2f9adb53f101fb6c7945988d023a085d127d1573dc49dd0083", size = 44821, upload-time = "2024-10-26T00:50:33.425Z" }, ] -[[package]] -name = "h11" -version = "0.16.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/01/ee/02a2c011bdab74c6fb3c75474d40b3052059d95df7e73351460c8588d963/h11-0.16.0.tar.gz", hash = "sha256:4e35b956cf45792e4caa5885e69fba00bdbc6ffafbfa020300e549b208ee5ff1", size = 101250, upload-time = "2025-04-24T03:35:25.427Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/04/4b/29cac41a4d98d144bf5f6d33995617b185d14b22401f75ca86f384e87ff1/h11-0.16.0-py3-none-any.whl", hash = "sha256:63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86", size = 37515, upload-time = "2025-04-24T03:35:24.344Z" }, -] - -[[package]] -name = "httpcore" -version = "1.0.9" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "certifi" }, - { name = "h11" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/06/94/82699a10bca87a5556c9c59b5963f2d039dbd239f25bc2a63907a05a14cb/httpcore-1.0.9.tar.gz", hash = "sha256:6e34463af53fd2ab5d807f399a9b45ea31c3dfa2276f15a2c3f00afff6e176e8", size = 85484, upload-time = "2025-04-24T22:06:22.219Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/7e/f5/f66802a942d491edb555dd61e3a9961140fd64c90bce1eafd741609d334d/httpcore-1.0.9-py3-none-any.whl", hash = "sha256:2d400746a40668fc9dec9810239072b40b4484b640a8c38fd654a024c7a1bf55", size = 78784, upload-time = "2025-04-24T22:06:20.566Z" }, -] - -[[package]] -name = "httpx" -version = "0.28.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "anyio" }, - { name = "certifi" }, - { name = "httpcore" }, - { name = "idna" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/b1/df/48c586a5fe32a0f01324ee087459e112ebb7224f646c0b5023f5e79e9956/httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc", size = 141406, upload-time = "2024-12-06T15:37:23.222Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/2a/39/e50c7c3a983047577ee07d2a9e53faf5a69493943ec3f6a384bdc792deb2/httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad", size = 73517, upload-time = "2024-12-06T15:37:21.509Z" }, -] - [[package]] name = "identify" version = "2.6.16" @@ -1046,14 +962,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/cb/28/3bfe2fa5a7b9c46fe7e13c97bda14c895fb10fa2ebf1d0abb90e0cea7ee1/platformdirs-4.5.1-py3-none-any.whl", hash = "sha256:d03afa3963c806a9bed9d5125c8f4cb2fdaf74a55ab60e5d59b3fde758104d31", size = 18731, upload-time = "2025-12-05T13:52:56.823Z" }, ] -[[package]] -name = "pproxy" -version = "2.7.9" -source = { registry = "https://pypi.org/simple" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/d4/c6/673a10a729061d2594b85aedd7dd2e470db4d54b12d4f95a306353bb2967/pproxy-2.7.9-py3-none-any.whl", hash = "sha256:a073d02616a47c43e1d20a547918c307dbda598c6d53869b165025f3cfe58e80", size = 42842, upload-time = "2024-01-16T11:33:35.286Z" }, -] - [[package]] name = "pre-commit" version = "4.5.1" @@ -1434,6 +1342,53 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/52/d2/d2ffaecbfff0c057b5824a82b57b709b1c5b2966c970e4c5d6e1d8109b21/rlaphoenix.m3u8-3.4.0-py3-none-any.whl", hash = "sha256:cd2c22195c747d52c63189d4bd5f664e1fc5ea202f5a7396b7336581f26a2838", size = 24767, upload-time = "2023-03-09T21:37:38.326Z" }, ] +[[package]] +name = "rnet" +version = "2.4.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/10/bc/e5e4395e67803405900b98d503a23c1125432a5a73d2c311dd2ebe11b7fc/rnet-2.4.2.tar.gz", hash = "sha256:9fc9ea17a7afea799e10670f0c1da939f500c440760aeefe42209644ffef5bf5", size = 515573, upload-time = "2025-08-02T23:26:27.795Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b9/5e/09b4fcb92611b6c51db2b7abb0a126aa87a76350e1da783ea35e3c9711af/rnet-2.4.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:9e5c8e485396dc86cdd39bf036747866f9ccf1c462ed660c65df4fea57b7d8b7", size = 3703136, upload-time = "2025-08-02T23:25:24.945Z" }, + { url = "https://files.pythonhosted.org/packages/60/0e/40b06dec2a172e2136d0c731880f5932b4383da470dc0ccf17f3fdd196da/rnet-2.4.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b092c70d4943d914272c58bc17e2382054c3180828564f378411cdfebc752f7a", size = 3429794, upload-time = "2025-08-02T23:25:12.382Z" }, + { url = "https://files.pythonhosted.org/packages/68/31/4e51497c8722379c79b054bb6d98e0273f42248de948f7dbc3c4dcde88cb/rnet-2.4.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5f881c1334d8f65b8c3c54eacccc487b21ea778762dc40e20d94ee8f841a2bb9", size = 3661754, upload-time = "2025-08-02T23:24:59.127Z" }, + { url = "https://files.pythonhosted.org/packages/9f/c3/9b43dde7c6b505eae0d0c23133b612b07d9221f0423fac55abbda78d5bdb/rnet-2.4.2-cp310-cp310-manylinux_2_34_aarch64.whl", hash = "sha256:ad5d2af6097493a84f9ef006f709fa4a3d42957f38aa84dd6283f8856e94e773", size = 3609141, upload-time = "2025-08-02T23:24:20.516Z" }, + { url = "https://files.pythonhosted.org/packages/19/37/37e5a0b9eb1c4a782c399443c5d498b24a2d40baa86842afd1588f4b4508/rnet-2.4.2-cp310-cp310-manylinux_2_34_armv7l.whl", hash = "sha256:5cdaf7a141a045cae13961b206406ccc34d8b9f3bac9d5e44bd26f14c33ca657", size = 3424711, upload-time = "2025-08-02T23:24:34.339Z" }, + { url = "https://files.pythonhosted.org/packages/1b/6c/aef21e909707d0bbfd347a843fefbed2fd50255c7a99ff4251fce82e2362/rnet-2.4.2-cp310-cp310-manylinux_2_34_i686.whl", hash = "sha256:df33b9f4e5e2bdc21aba4189628a6827d950718f863904c5ee3f43a40c60089a", size = 3686201, upload-time = "2025-08-02T23:24:46.31Z" }, + { url = "https://files.pythonhosted.org/packages/1a/c5/5ed5ee58cba531681e73099e619c2d36e8453e28764c71682a32c373b30c/rnet-2.4.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a76a4976e065ff2af0fbfa13ea14e2b2f449ba6ea708125029d54738e3c638cf", size = 3957076, upload-time = "2025-08-02T23:25:37.751Z" }, + { url = "https://files.pythonhosted.org/packages/97/88/2ac698c25fe8c7a108d0bf7b76afa0049d9f4c1ae7162542434970936a00/rnet-2.4.2-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:1030ca77af54bfee5739d4bb34f403329b154cdbab4bcd2feeb20fab22955359", size = 3919451, upload-time = "2025-08-02T23:25:49.469Z" }, + { url = "https://files.pythonhosted.org/packages/3e/1b/129029ba55eeb1daa58ab7e88a06f2a95b8246b207fbe8bbc04f9f23d2cd/rnet-2.4.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:fc5dd872523a4b5f21ea7092fd9440a0677f609e3b971c60673b4dbd984745a9", size = 4005497, upload-time = "2025-08-02T23:26:02.816Z" }, + { url = "https://files.pythonhosted.org/packages/a3/a2/4df4e00e1f3b04c902ab494147140fea308d139c5f7697aedcf949d8f225/rnet-2.4.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:080d04ceaf7be30505d11360b33d5d43668b557a7b86de3c548882d1de19bc4f", size = 4166618, upload-time = "2025-08-02T23:26:15.245Z" }, + { url = "https://files.pythonhosted.org/packages/09/93/cbf1d634d17b220bb7ba52fd38afd98101a010bf5c873af5815eba6e601d/rnet-2.4.2-cp310-cp310-win32.whl", hash = "sha256:9e8f79f055630780e1334255b1167b30b99989e31a87e10295e143240eb519d5", size = 3207306, upload-time = "2025-08-02T23:26:53.616Z" }, + { url = "https://files.pythonhosted.org/packages/d2/36/dd76e90d1fea4688f64cb6263244500fea6b1c8f979bb1651f132515a617/rnet-2.4.2-cp310-cp310-win_amd64.whl", hash = "sha256:f4e891d603c6fe4f28857b161d6ee10975633a5ee1867050962aef3954cf3e1a", size = 3561188, upload-time = "2025-08-02T23:26:41.141Z" }, + { url = "https://files.pythonhosted.org/packages/d9/08/beb3c97573688b23f081d35f6280db9438c3a32ec7dc6ba8479107f8d913/rnet-2.4.2-cp310-cp310-win_arm64.whl", hash = "sha256:372e9a7764f6947a8484774827829e291a8f299b80f93cf9318483c60b1c1921", size = 3202587, upload-time = "2025-08-02T23:26:29.299Z" }, + { url = "https://files.pythonhosted.org/packages/5c/b3/7cbd1daf6cf3a5eb56615128e5a9fb5f3fda6457d511791766c39cc71203/rnet-2.4.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:0106d7b43ea92a02458eea5e5c76ac67ff978f5715293c836164c4a05a7eb890", size = 3703182, upload-time = "2025-08-02T23:25:26.218Z" }, + { url = "https://files.pythonhosted.org/packages/d2/2f/4bd07edd1785445b95e717ad93c5845b18e8d4df578e1c62c11c77a9aea4/rnet-2.4.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:14074800998098403540b9b624e78f7dd811605ac0f1a6081a12ad5e6e1fd1ac", size = 3429858, upload-time = "2025-08-02T23:25:13.59Z" }, + { url = "https://files.pythonhosted.org/packages/43/4e/d71e2c30526c54ace931f95c5134cb474aaa9f3142e4e11f651bb1ec7b27/rnet-2.4.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:573c637accaf7f3c7aa6d2241224ed577444c00e8af4e631b243b3cae765c502", size = 3661678, upload-time = "2025-08-02T23:25:00.677Z" }, + { url = "https://files.pythonhosted.org/packages/f1/27/a33ac1b61d29015e832ff960b274929288b6901cca3cf415e1f6a0aec1ed/rnet-2.4.2-cp311-cp311-manylinux_2_34_aarch64.whl", hash = "sha256:48fca3430dc4d90c920c08474a0db0ec3e6465226a08345b10b6cc58c8b0c23e", size = 3609069, upload-time = "2025-08-02T23:24:22.862Z" }, + { url = "https://files.pythonhosted.org/packages/e1/f5/628153c9228e7430650e11c1f40cdb53a1a23592d98c39aafb534217278e/rnet-2.4.2-cp311-cp311-manylinux_2_34_armv7l.whl", hash = "sha256:7bf06f481297304d426cd7c6b36babc3859ae242cde276f038f6f51cff7fd4de", size = 3424456, upload-time = "2025-08-02T23:24:35.592Z" }, + { url = "https://files.pythonhosted.org/packages/5e/c9/c6444cfa9c935ef2b9a273470812b8555cec262dbab7f20325fd67a27c1d/rnet-2.4.2-cp311-cp311-manylinux_2_34_i686.whl", hash = "sha256:ca351af5ccb531d308eeb7ae3dcbfba038a14d4897e22139d76f8cd88eed649f", size = 3686160, upload-time = "2025-08-02T23:24:47.5Z" }, + { url = "https://files.pythonhosted.org/packages/97/6a/d7c48b8400b30c1931a800c79b429692758ef349b1a210bb9f499f199687/rnet-2.4.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:129eab0183ca50fd5f57b24b3b4387a5edca727e4004c1debcb5c23ecba6c128", size = 3957128, upload-time = "2025-08-02T23:25:39Z" }, + { url = "https://files.pythonhosted.org/packages/bd/cf/ddabfa4299dbeefae488a54e95684c0c68c00b5d3cff3b8212d1adf2b206/rnet-2.4.2-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:4a0449fae24b95b29f8a3f433f7866fc6c94d9ee37d2a5d94b7154eb436ee448", size = 3919406, upload-time = "2025-08-02T23:25:50.78Z" }, + { url = "https://files.pythonhosted.org/packages/9b/fe/e92e5dacfc97041cbf335c10e0a45b7ac71e0d30c51e0a0dc51d35d1ce0b/rnet-2.4.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:b8a17765351c3f75ce725ee4d946a255c1b3920462252edffd737e81ce996fc7", size = 4005641, upload-time = "2025-08-02T23:26:04.192Z" }, + { url = "https://files.pythonhosted.org/packages/92/a6/156f5801328adc4296f6686e27f69ea22cc0c17d1f108759caa53bcedeb5/rnet-2.4.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:e73fb0e89965ed31b22644e221bdd928ebcb6b3f8ce75da4f083cb92baef844f", size = 4166702, upload-time = "2025-08-02T23:26:16.539Z" }, + { url = "https://files.pythonhosted.org/packages/00/e6/42a36a76238e10b157e1265be38f2fc66eeb4eaa5b9b3dfdcd4b581e2e6f/rnet-2.4.2-cp311-cp311-win32.whl", hash = "sha256:f3296e85f3f8da7165d8b7df5633f8443b1f2597215646e8e090d1affaa3d1b0", size = 3207638, upload-time = "2025-08-02T23:26:54.904Z" }, + { url = "https://files.pythonhosted.org/packages/ba/6e/92d99f03522cddffb4d00dbac4b63daafbd7966a915ec689bb713da45d3e/rnet-2.4.2-cp311-cp311-win_amd64.whl", hash = "sha256:847529308ab9cf59f0d4ac5a9d1fe051894a26aadf3b8f8b20a862302587725f", size = 3561173, upload-time = "2025-08-02T23:26:42.451Z" }, + { url = "https://files.pythonhosted.org/packages/e9/bc/4a4d19425adf6a62459da608988b4de0f43c71d252cf0b15517cdb46649e/rnet-2.4.2-cp311-cp311-win_arm64.whl", hash = "sha256:b2eb935265a0771f9b323f2980455b5478550919d18572d523ac2cb5f328e7f7", size = 3202633, upload-time = "2025-08-02T23:26:30.611Z" }, + { url = "https://files.pythonhosted.org/packages/c9/22/434a9aa0228a4fa2abe48b04d36214f5cbe08af45afdb833ac7cc02cd913/rnet-2.4.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:b853a9809588a569011142b9bae142ad982387640edcfd38fba4337b044900ae", size = 3694856, upload-time = "2025-08-02T23:25:27.818Z" }, + { url = "https://files.pythonhosted.org/packages/9b/ca/b49c2dce89381b7697ccb771a6850eea13934ef1eb37a8ef2ba27d925643/rnet-2.4.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:da547d7be92261ead4bc0ce23e30823c760d638055fd301da18c6521ec245fc8", size = 3420543, upload-time = "2025-08-02T23:25:14.783Z" }, + { url = "https://files.pythonhosted.org/packages/e5/cb/7c5979932069c9f40651d4aca487bfe639a94098eb123d7ec466f7f7730d/rnet-2.4.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc5bf17b3ed46455fe70a784dff0ccc7beaf54984d554536e644b6d1dacea63e", size = 3658152, upload-time = "2025-08-02T23:25:01.876Z" }, + { url = "https://files.pythonhosted.org/packages/62/2f/83b754b1383a8cf6e696cb547e0ec4d47ba58dc838b16341be6f1af0ede6/rnet-2.4.2-cp312-cp312-manylinux_2_34_aarch64.whl", hash = "sha256:3fe4531b0bffc26d10e3baec2f3d0deb59fb8ff157c56b985d9bd2d6060b2715", size = 3602597, upload-time = "2025-08-02T23:24:24.421Z" }, + { url = "https://files.pythonhosted.org/packages/91/4a/3012990ec2f309baf41f70929bac0f166db3a7ce5a6bca1143ba6e9b4610/rnet-2.4.2-cp312-cp312-manylinux_2_34_armv7l.whl", hash = "sha256:c51cc5648efdc97bb17d88aab30f0596924766dc137109865bff72539141a81e", size = 3418020, upload-time = "2025-08-02T23:24:36.835Z" }, + { url = "https://files.pythonhosted.org/packages/a9/5b/6780b490a7d9dfc76c17c68f84b9d5cfb602bdc5db4ca5774930e7b7933e/rnet-2.4.2-cp312-cp312-manylinux_2_34_i686.whl", hash = "sha256:b13ee78075389050ae537d9c6957d8de820d0c7f3c7053dfc3e103e0538890a7", size = 3679644, upload-time = "2025-08-02T23:24:49.07Z" }, + { url = "https://files.pythonhosted.org/packages/c0/d4/a092274c9513d67f802cc6f3472068f6cbf30652d00d4b5c29617c20479d/rnet-2.4.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1d2e21ed40fcaead89a9f711354f92d5d2690e621a0a6f37edf6d655d1994d58", size = 3953384, upload-time = "2025-08-02T23:25:40.244Z" }, + { url = "https://files.pythonhosted.org/packages/b6/5f/e4660f38921f41ab2199228d173d6f5d881f391c7b686695dd383fd41693/rnet-2.4.2-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:8c5672ff8cdb9042a275187badd28279f979e20dcf175da22fce666af7b1b273", size = 3913721, upload-time = "2025-08-02T23:25:52.214Z" }, + { url = "https://files.pythonhosted.org/packages/44/89/1e81dd97c9ab45bfed871b5cb7fec50893f1a6be6bfd2c237cf3b902cf63/rnet-2.4.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:82037372e94fd7bc999ccac1b971da1a0f15a469979777c11dd225fddb249de1", size = 4001858, upload-time = "2025-08-02T23:26:05.506Z" }, + { url = "https://files.pythonhosted.org/packages/c4/5c/475c7c9bff6e94a7e5d457e8de2b5786a1f1a7488ad48b29cafddbb530bf/rnet-2.4.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:582f71311cb42db9a396bb95f39577fc4ac5e94d6de17696d2900a05814e5ac6", size = 4162005, upload-time = "2025-08-02T23:26:17.797Z" }, + { url = "https://files.pythonhosted.org/packages/5a/36/c4bdcdcdd9682fcb1fe01a371e8c25bff949bd719fd78021112538951bd3/rnet-2.4.2-cp312-cp312-win32.whl", hash = "sha256:355b849b67b131fbeffb7b5ee9a4057d3b4f576c1c63a59698a49f86c3a0bc80", size = 3200189, upload-time = "2025-08-02T23:26:56.208Z" }, + { url = "https://files.pythonhosted.org/packages/ef/cd/cb6f11f33e0a7d567b980c2b7e19f5f0e827a9ea33c53c2de350ef23f121/rnet-2.4.2-cp312-cp312-win_amd64.whl", hash = "sha256:d47a3fb6339e62b06cabeed8dc4aab28050cadc02a8dcbf56b688fb1ca2c7171", size = 3560606, upload-time = "2025-08-02T23:26:43.797Z" }, + { url = "https://files.pythonhosted.org/packages/9c/70/5ded6c684343fd1a59e5b9ed4ffc7ec783d080bac32ba98c503d363914c0/rnet-2.4.2-cp312-cp312-win_arm64.whl", hash = "sha256:71d3f845b0f44d2353133ac8ee3bea39d00a6766356aa0d1f545b739380d0bea", size = 3199395, upload-time = "2025-08-02T23:26:31.975Z" }, +] + [[package]] name = "ruamel-yaml" version = "0.18.17" @@ -1663,15 +1618,12 @@ dependencies = [ { name = "construct" }, { name = "crccheck" }, { name = "cryptography" }, - { name = "curl-cffi" }, { name = "filelock" }, { name = "fonttools" }, - { name = "httpx" }, { name = "jsonpickle" }, { name = "langcodes" }, { name = "language-data" }, { name = "lxml" }, - { name = "pproxy" }, { name = "protobuf" }, { name = "pycaption" }, { name = "pycountry" }, @@ -1688,6 +1640,7 @@ dependencies = [ { name = "requests", extra = ["socks"] }, { name = "rich" }, { name = "rlaphoenix-m3u8" }, + { name = "rnet" }, { name = "ruamel-yaml" }, { name = "sortedcontainers" }, { name = "subby" }, @@ -1722,15 +1675,12 @@ requires-dist = [ { name = "construct", specifier = ">=2.8.8,<3" }, { name = "crccheck", specifier = ">=1.3.0,<2" }, { name = "cryptography", specifier = ">=45.0.0,<47" }, - { name = "curl-cffi", specifier = ">=0.7.0b4,<0.14" }, { name = "filelock", specifier = ">=3.20.3,<4" }, { name = "fonttools", specifier = ">=4.60.2,<5" }, - { name = "httpx", specifier = ">=0.28.1,<0.29" }, { name = "jsonpickle", specifier = ">=3.0.4,<5" }, { name = "langcodes", specifier = ">=3.4.0,<4" }, { name = "language-data", specifier = ">=1.4.0" }, { name = "lxml", specifier = ">=5.2.1,<7" }, - { name = "pproxy", specifier = ">=2.7.9,<3" }, { name = "protobuf", specifier = ">=4.25.3,<7" }, { name = "pycaption", specifier = ">=2.2.6,<3" }, { name = "pycountry", specifier = ">=24.6.1" }, @@ -1747,6 +1697,7 @@ requires-dist = [ { name = "requests", extras = ["socks"], specifier = ">=2.32.5,<3" }, { name = "rich", specifier = ">=13.7.1,<15" }, { name = "rlaphoenix-m3u8", specifier = ">=3.4.0,<4" }, + { name = "rnet", specifier = ">=2.4.2" }, { name = "ruamel-yaml", specifier = ">=0.18.6,<0.19" }, { name = "sortedcontainers", specifier = ">=2.4.0,<3" }, { name = "subby", git = "https://github.com/vevv/subby.git?rev=1ea6a52028c5bea8177c8abc91716d74e4d097e1" },