8 Commits
2.1.0 ... 2.2.0

Author SHA1 Message Date
Andy
d1d3daf750 chore(release): bump version to 2.2.0 2026-01-15 03:07:18 +00:00
Andy
a7b6e9e680 feat(drm): add CDM-aware PlayReady fallback detection
Add PlayReady PSSH/KID extraction from track and init data with CDM-aware ordering. When PlayReady CDM is selected, tries PlayReady first then falls back to Widevine. When Widevine CDM is selected (default), tries Widevine first then falls back to PlayReady.
2026-01-15 02:49:56 +00:00
Andy
415544775b fix(vaults): adaptive batch sizing for bulk key operations 2026-01-14 23:04:54 +00:00
Andy
6740dd3dfa Revert "fix(vaults): batch bulk key operations to avoid query limits"
This reverts commit 7e7bc7aecf.
2026-01-14 23:00:43 +00:00
Andy
fcd70e5b0f fix(titles): detect HDR10 in hybrid DV filenames correctly
Hybrid DV+HDR10 files were named "DV.H.265" instead of "DV.HDR.H.265" because the HDR10 detection only checked hdr_format_full which contains "Dolby Vision / SMPTE ST 2094". The "HDR10" indicator is in hdr_format_commercial, not hdr_format_full.

Now checks both fields for HDR10 compatibility indicators.
2026-01-14 22:25:58 +00:00
Andy
7e7bc7aecf fix(vaults): batch bulk key operations to avoid query limits 2026-01-11 08:21:02 +00:00
Andy
ede38648db fix(util): improve test command error detection and add natural sorting 2026-01-07 16:22:45 +00:00
Andy
17a91ee4bb feat(debug): add comprehensive debug logging for downloaders and muxing 2026-01-05 09:50:33 +00:00
18 changed files with 546 additions and 61 deletions

View File

@@ -5,6 +5,31 @@ All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/), The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/),
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
## [2.2.0] - 2026-01-15
### Added
- **CDM-Aware PlayReady Fallback Detection**: Intelligent DRM fallback based on selected CDM
- Adds PlayReady PSSH/KID extraction from track and init data with CDM-aware ordering
- When PlayReady CDM is selected, tries PlayReady first then falls back to Widevine
- When Widevine CDM is selected (default), tries Widevine first then falls back to PlayReady
- **Comprehensive Debug Logging**: Enhanced debug logging for downloaders and muxing
- Added detailed debug logging to aria2c, curl_impersonate, n_m3u8dl_re, and requests downloaders
- Enhanced manifest parsers (DASH, HLS, ISM) with debug logging
- Added debug logging to track muxing operations
### Fixed
- **Hybrid DV+HDR10 Filename Detection**: Fixed HDR10 detection in hybrid Dolby Vision filenames
- Hybrid DV+HDR10 files were incorrectly named "DV.H.265" instead of "DV.HDR.H.265"
- Now checks both `hdr_format_full` and `hdr_format_commercial` fields for HDR10 indicators
- **Vault Adaptive Batch Sizing**: Improved bulk key operations with adaptive batch sizing
- Prevents query limit issues when retrieving large numbers of keys from vaults
- Dynamically adjusts batch sizes based on vault response characteristics
- **Test Command Improvements**: Enhanced test command error detection and sorting
- Improved error detection in test command output
- Added natural sorting for test results
## [2.1.0] - 2025-11-27 ## [2.1.0] - 2025-11-27
### Added ### Added

View File

@@ -4,7 +4,7 @@ build-backend = "hatchling.build"
[project] [project]
name = "unshackle" name = "unshackle"
version = "2.1.0" version = "2.2.0"
description = "Modular Movie, TV, and Music Archival Software." description = "Modular Movie, TV, and Music Archival Software."
authors = [{ name = "unshackle team" }] authors = [{ name = "unshackle team" }]
requires-python = ">=3.10,<3.13" requires-python = ">=3.10,<3.13"

View File

@@ -1,3 +1,4 @@
import re
import subprocess import subprocess
from pathlib import Path from pathlib import Path
@@ -8,6 +9,11 @@ from unshackle.core import binaries
from unshackle.core.constants import context_settings from unshackle.core.constants import context_settings
def _natural_sort_key(path: Path) -> list:
"""Sort key for natural sorting (S01E01 before S01E10)."""
return [int(part) if part.isdigit() else part.lower() for part in re.split(r"(\d+)", path.name)]
@click.group(short_help="Various helper scripts and programs.", context_settings=context_settings) @click.group(short_help="Various helper scripts and programs.", context_settings=context_settings)
def util() -> None: def util() -> None:
"""Various helper scripts and programs.""" """Various helper scripts and programs."""
@@ -49,7 +55,7 @@ def crop(path: Path, aspect: str, letter: bool, offset: int, preview: bool) -> N
raise click.ClickException('FFmpeg executable "ffmpeg" not found but is required.') raise click.ClickException('FFmpeg executable "ffmpeg" not found but is required.')
if path.is_dir(): if path.is_dir():
paths = list(path.glob("*.mkv")) + list(path.glob("*.mp4")) paths = sorted(list(path.glob("*.mkv")) + list(path.glob("*.mp4")), key=_natural_sort_key)
else: else:
paths = [path] paths = [path]
for video_path in paths: for video_path in paths:
@@ -140,7 +146,7 @@ def range_(path: Path, full: bool, preview: bool) -> None:
raise click.ClickException('FFmpeg executable "ffmpeg" not found but is required.') raise click.ClickException('FFmpeg executable "ffmpeg" not found but is required.')
if path.is_dir(): if path.is_dir():
paths = list(path.glob("*.mkv")) + list(path.glob("*.mp4")) paths = sorted(list(path.glob("*.mkv")) + list(path.glob("*.mp4")), key=_natural_sort_key)
else: else:
paths = [path] paths = [path]
for video_path in paths: for video_path in paths:
@@ -225,16 +231,18 @@ def test(path: Path, map_: str) -> None:
raise click.ClickException('FFmpeg executable "ffmpeg" not found but is required.') raise click.ClickException('FFmpeg executable "ffmpeg" not found but is required.')
if path.is_dir(): if path.is_dir():
paths = list(path.glob("*.mkv")) + list(path.glob("*.mp4")) paths = sorted(list(path.glob("*.mkv")) + list(path.glob("*.mp4")), key=_natural_sort_key)
else: else:
paths = [path] paths = [path]
for video_path in paths: for video_path in paths:
print("Starting...") print(f"Testing: {video_path.name}")
p = subprocess.Popen( p = subprocess.Popen(
[ [
binaries.FFMPEG, binaries.FFMPEG,
"-hide_banner", "-hide_banner",
"-benchmark", "-benchmark",
"-err_detect",
"+crccheck+bitstream+buffer+careful+compliant+aggressive",
"-i", "-i",
str(video_path), str(video_path),
"-map", "-map",
@@ -255,13 +263,13 @@ def test(path: Path, map_: str) -> None:
reached_output = True reached_output = True
if not reached_output: if not reached_output:
continue continue
if line.startswith("["): # error of some kind if line.startswith("[") and not line.startswith("[out#"):
errors += 1 errors += 1
stream, error = line.split("] ", maxsplit=1) stream, error = line.split("] ", maxsplit=1)
stream = stream.split(" @ ")[0] stream = stream.split(" @ ")[0]
line = f"{stream} ERROR: {error}" line = f"{stream} ERROR: {error}"
print(line) print(line)
p.stderr.close() p.stderr.close()
print(f"Finished with {errors} Errors, Cleaning up...") print(f"Finished with {errors} error(s)")
p.terminate() p.terminate()
p.wait() p.wait()

View File

@@ -1 +1 @@
__version__ = "2.1.0" __version__ = "2.2.0"

View File

@@ -19,7 +19,7 @@ from unshackle.core import binaries
from unshackle.core.config import config from unshackle.core.config import config
from unshackle.core.console import console from unshackle.core.console import console
from unshackle.core.constants import DOWNLOAD_CANCELLED from unshackle.core.constants import DOWNLOAD_CANCELLED
from unshackle.core.utilities import get_extension, get_free_port from unshackle.core.utilities import get_debug_logger, get_extension, get_free_port
def rpc(caller: Callable, secret: str, method: str, params: Optional[list[Any]] = None) -> Any: def rpc(caller: Callable, secret: str, method: str, params: Optional[list[Any]] = None) -> Any:
@@ -58,6 +58,8 @@ def download(
proxy: Optional[str] = None, proxy: Optional[str] = None,
max_workers: Optional[int] = None, max_workers: Optional[int] = None,
) -> Generator[dict[str, Any], None, None]: ) -> Generator[dict[str, Any], None, None]:
debug_logger = get_debug_logger()
if not urls: if not urls:
raise ValueError("urls must be provided and not empty") raise ValueError("urls must be provided and not empty")
elif not isinstance(urls, (str, dict, list)): elif not isinstance(urls, (str, dict, list)):
@@ -91,6 +93,13 @@ def download(
urls = [urls] urls = [urls]
if not binaries.Aria2: if not binaries.Aria2:
if debug_logger:
debug_logger.log(
level="ERROR",
operation="downloader_aria2c_binary_missing",
message="Aria2c executable not found in PATH or local binaries directory",
context={"searched_names": ["aria2c", "aria2"]},
)
raise EnvironmentError("Aria2c executable not found...") raise EnvironmentError("Aria2c executable not found...")
if proxy and not proxy.lower().startswith("http://"): if proxy and not proxy.lower().startswith("http://"):
@@ -180,6 +189,28 @@ def download(
continue continue
arguments.extend(["--header", f"{header}: {value}"]) arguments.extend(["--header", f"{header}: {value}"])
if debug_logger:
first_url = urls[0] if isinstance(urls[0], str) else urls[0].get("url", "")
url_display = first_url[:200] + "..." if len(first_url) > 200 else first_url
debug_logger.log(
level="DEBUG",
operation="downloader_aria2c_start",
message="Starting Aria2c download",
context={
"binary_path": str(binaries.Aria2),
"url_count": len(urls),
"first_url": url_display,
"output_dir": str(output_dir),
"filename": filename,
"max_concurrent_downloads": max_concurrent_downloads,
"max_connection_per_server": max_connection_per_server,
"split": split,
"file_allocation": file_allocation,
"has_proxy": bool(proxy),
"rpc_port": rpc_port,
},
)
yield dict(total=len(urls)) yield dict(total=len(urls))
try: try:
@@ -226,6 +257,20 @@ def download(
textwrap.wrap(error, width=console.width - 20, initial_indent="") textwrap.wrap(error, width=console.width - 20, initial_indent="")
) )
console.log(Text.from_ansi("\n[Aria2c]: " + error_pretty)) console.log(Text.from_ansi("\n[Aria2c]: " + error_pretty))
if debug_logger:
debug_logger.log(
level="ERROR",
operation="downloader_aria2c_download_error",
message=f"Aria2c download failed: {dl['errorMessage']}",
context={
"gid": dl["gid"],
"error_code": dl["errorCode"],
"error_message": dl["errorMessage"],
"used_uri": used_uri[:200] + "..." if len(used_uri) > 200 else used_uri,
"completed_length": dl.get("completedLength"),
"total_length": dl.get("totalLength"),
},
)
raise ValueError(error) raise ValueError(error)
if number_stopped == len(urls): if number_stopped == len(urls):
@@ -237,7 +282,31 @@ def download(
p.wait() p.wait()
if p.returncode != 0: if p.returncode != 0:
if debug_logger:
debug_logger.log(
level="ERROR",
operation="downloader_aria2c_failed",
message=f"Aria2c exited with code {p.returncode}",
context={
"returncode": p.returncode,
"url_count": len(urls),
"output_dir": str(output_dir),
},
)
raise subprocess.CalledProcessError(p.returncode, arguments) raise subprocess.CalledProcessError(p.returncode, arguments)
if debug_logger:
debug_logger.log(
level="DEBUG",
operation="downloader_aria2c_complete",
message="Aria2c download completed successfully",
context={
"url_count": len(urls),
"output_dir": str(output_dir),
"filename": filename,
},
)
except ConnectionResetError: except ConnectionResetError:
# interrupted while passing URI to download # interrupted while passing URI to download
raise KeyboardInterrupt() raise KeyboardInterrupt()
@@ -251,9 +320,20 @@ def download(
DOWNLOAD_CANCELLED.set() # skip pending track downloads DOWNLOAD_CANCELLED.set() # skip pending track downloads
yield dict(downloaded="[yellow]CANCELLED") yield dict(downloaded="[yellow]CANCELLED")
raise raise
except Exception: except Exception as e:
DOWNLOAD_CANCELLED.set() # skip pending track downloads DOWNLOAD_CANCELLED.set() # skip pending track downloads
yield dict(downloaded="[red]FAILED") yield dict(downloaded="[red]FAILED")
if debug_logger and not isinstance(e, (subprocess.CalledProcessError, ValueError)):
debug_logger.log(
level="ERROR",
operation="downloader_aria2c_exception",
message=f"Unexpected error during Aria2c download: {e}",
error=e,
context={
"url_count": len(urls),
"output_dir": str(output_dir),
},
)
raise raise
finally: finally:
rpc(caller=partial(rpc_session.post, url=rpc_uri), secret=rpc_secret, method="aria2.shutdown") rpc(caller=partial(rpc_session.post, url=rpc_uri), secret=rpc_secret, method="aria2.shutdown")

View File

@@ -11,7 +11,7 @@ from rich import filesize
from unshackle.core.config import config from unshackle.core.config import config
from unshackle.core.constants import DOWNLOAD_CANCELLED from unshackle.core.constants import DOWNLOAD_CANCELLED
from unshackle.core.utilities import get_extension from unshackle.core.utilities import get_debug_logger, get_extension
MAX_ATTEMPTS = 5 MAX_ATTEMPTS = 5
RETRY_WAIT = 2 RETRY_WAIT = 2
@@ -189,6 +189,8 @@ def curl_impersonate(
if not isinstance(max_workers, (int, type(None))): if not isinstance(max_workers, (int, type(None))):
raise TypeError(f"Expected max_workers to be {int}, not {type(max_workers)}") raise TypeError(f"Expected max_workers to be {int}, not {type(max_workers)}")
debug_logger = get_debug_logger()
if not isinstance(urls, list): if not isinstance(urls, list):
urls = [urls] urls = [urls]
@@ -209,6 +211,24 @@ def curl_impersonate(
if proxy: if proxy:
session.proxies.update({"all": proxy}) session.proxies.update({"all": proxy})
if debug_logger:
first_url = urls[0].get("url", "") if urls else ""
url_display = first_url[:200] + "..." if len(first_url) > 200 else first_url
debug_logger.log(
level="DEBUG",
operation="downloader_curl_impersonate_start",
message="Starting curl_impersonate download",
context={
"url_count": len(urls),
"first_url": url_display,
"output_dir": str(output_dir),
"filename": filename,
"max_workers": max_workers,
"browser": BROWSER,
"has_proxy": bool(proxy),
},
)
yield dict(total=len(urls)) yield dict(total=len(urls))
download_sizes = [] download_sizes = []
@@ -235,11 +255,23 @@ def curl_impersonate(
# tell dl that it was cancelled # tell dl that it was cancelled
# the pool is already shut down, so exiting loop is fine # the pool is already shut down, so exiting loop is fine
raise raise
except Exception: except Exception as e:
DOWNLOAD_CANCELLED.set() # skip pending track downloads DOWNLOAD_CANCELLED.set() # skip pending track downloads
yield dict(downloaded="[red]FAILING") yield dict(downloaded="[red]FAILING")
pool.shutdown(wait=True, cancel_futures=True) pool.shutdown(wait=True, cancel_futures=True)
yield dict(downloaded="[red]FAILED") yield dict(downloaded="[red]FAILED")
if debug_logger:
debug_logger.log(
level="ERROR",
operation="downloader_curl_impersonate_failed",
message=f"curl_impersonate download failed: {e}",
error=e,
context={
"url_count": len(urls),
"output_dir": str(output_dir),
"browser": BROWSER,
},
)
# tell dl that it failed # tell dl that it failed
# the pool is already shut down, so exiting loop is fine # the pool is already shut down, so exiting loop is fine
raise raise
@@ -260,5 +292,17 @@ def curl_impersonate(
last_speed_refresh = now last_speed_refresh = now
download_sizes.clear() download_sizes.clear()
if debug_logger:
debug_logger.log(
level="DEBUG",
operation="downloader_curl_impersonate_complete",
message="curl_impersonate download completed successfully",
context={
"url_count": len(urls),
"output_dir": str(output_dir),
"filename": filename,
},
)
__all__ = ("curl_impersonate",) __all__ = ("curl_impersonate",)

View File

@@ -13,6 +13,7 @@ from unshackle.core import binaries
from unshackle.core.config import config from unshackle.core.config import config
from unshackle.core.console import console from unshackle.core.console import console
from unshackle.core.constants import DOWNLOAD_CANCELLED from unshackle.core.constants import DOWNLOAD_CANCELLED
from unshackle.core.utilities import get_debug_logger
PERCENT_RE = re.compile(r"(\d+\.\d+%)") PERCENT_RE = re.compile(r"(\d+\.\d+%)")
SPEED_RE = re.compile(r"(\d+\.\d+(?:MB|KB)ps)") SPEED_RE = re.compile(r"(\d+\.\d+(?:MB|KB)ps)")
@@ -176,7 +177,6 @@ def build_download_args(
"--thread-count": thread_count, "--thread-count": thread_count,
"--download-retry-count": retry_count, "--download-retry-count": retry_count,
"--write-meta-json": False, "--write-meta-json": False,
"--no-log": True,
} }
if proxy: if proxy:
args["--custom-proxy"] = proxy args["--custom-proxy"] = proxy
@@ -224,6 +224,8 @@ def download(
content_keys: dict[str, Any] | None, content_keys: dict[str, Any] | None,
skip_merge: bool | None = False, skip_merge: bool | None = False,
) -> Generator[dict[str, Any], None, None]: ) -> Generator[dict[str, Any], None, None]:
debug_logger = get_debug_logger()
if not urls: if not urls:
raise ValueError("urls must be provided and not empty") raise ValueError("urls must be provided and not empty")
if not isinstance(urls, (str, dict, list)): if not isinstance(urls, (str, dict, list)):
@@ -275,7 +277,39 @@ def download(
skip_merge=skip_merge, skip_merge=skip_merge,
ad_keyword=ad_keyword, ad_keyword=ad_keyword,
) )
arguments.extend(get_track_selection_args(track)) selection_args = get_track_selection_args(track)
arguments.extend(selection_args)
log_file_path: Path | None = None
if debug_logger:
log_file_path = output_dir / f".n_m3u8dl_re_{filename}.log"
arguments.extend(["--log-file-path", str(log_file_path)])
track_url_display = track.url[:200] + "..." if len(track.url) > 200 else track.url
debug_logger.log(
level="DEBUG",
operation="downloader_n_m3u8dl_re_start",
message="Starting N_m3u8DL-RE download",
context={
"binary_path": str(binaries.N_m3u8DL_RE),
"track_id": getattr(track, "id", None),
"track_type": track.__class__.__name__,
"track_url": track_url_display,
"output_dir": str(output_dir),
"filename": filename,
"thread_count": thread_count,
"retry_count": retry_count,
"has_content_keys": bool(content_keys),
"content_key_count": len(content_keys) if content_keys else 0,
"has_proxy": bool(proxy),
"skip_merge": skip_merge,
"has_custom_args": bool(track.downloader_args),
"selection_args": selection_args,
"descriptor": track.descriptor.name if hasattr(track, "descriptor") else None,
},
)
else:
arguments.extend(["--no-log", "true"])
yield {"total": 100} yield {"total": 100}
yield {"downloaded": "Parsing streams..."} yield {"downloaded": "Parsing streams..."}
@@ -310,11 +344,45 @@ def download(
yield {"completed": progress} if progress < 100 else {"downloaded": "Merging"} yield {"completed": progress} if progress < 100 else {"downloaded": "Merging"}
process.wait() process.wait()
if process.returncode != 0: if process.returncode != 0:
if debug_logger and log_file_path:
log_contents = ""
if log_file_path.exists():
try:
log_contents = log_file_path.read_text(encoding="utf-8", errors="replace")
except Exception:
log_contents = "<failed to read log file>"
debug_logger.log(
level="ERROR",
operation="downloader_n_m3u8dl_re_failed",
message=f"N_m3u8DL-RE exited with code {process.returncode}",
context={
"returncode": process.returncode,
"track_id": getattr(track, "id", None),
"track_type": track.__class__.__name__,
"last_line": last_line,
"log_file_contents": log_contents,
},
)
if error_match := ERROR_RE.search(last_line): if error_match := ERROR_RE.search(last_line):
raise ValueError(f"[N_m3u8DL-RE]: {error_match.group(1)}") raise ValueError(f"[N_m3u8DL-RE]: {error_match.group(1)}")
raise subprocess.CalledProcessError(process.returncode, arguments) raise subprocess.CalledProcessError(process.returncode, arguments)
if debug_logger:
debug_logger.log(
level="DEBUG",
operation="downloader_n_m3u8dl_re_complete",
message="N_m3u8DL-RE download completed successfully",
context={
"track_id": getattr(track, "id", None),
"track_type": track.__class__.__name__,
"output_dir": str(output_dir),
"filename": filename,
},
)
except ConnectionResetError: except ConnectionResetError:
# interrupted while passing URI to download # interrupted while passing URI to download
raise KeyboardInterrupt() raise KeyboardInterrupt()
@@ -322,10 +390,35 @@ def download(
DOWNLOAD_CANCELLED.set() # skip pending track downloads DOWNLOAD_CANCELLED.set() # skip pending track downloads
yield {"downloaded": "[yellow]CANCELLED"} yield {"downloaded": "[yellow]CANCELLED"}
raise raise
except Exception: except Exception as e:
DOWNLOAD_CANCELLED.set() # skip pending track downloads DOWNLOAD_CANCELLED.set() # skip pending track downloads
yield {"downloaded": "[red]FAILED"} yield {"downloaded": "[red]FAILED"}
if debug_logger and log_file_path and not isinstance(e, (subprocess.CalledProcessError, ValueError)):
log_contents = ""
if log_file_path.exists():
try:
log_contents = log_file_path.read_text(encoding="utf-8", errors="replace")
except Exception:
log_contents = "<failed to read log file>"
debug_logger.log(
level="ERROR",
operation="downloader_n_m3u8dl_re_exception",
message=f"Unexpected error during N_m3u8DL-RE download: {e}",
error=e,
context={
"track_id": getattr(track, "id", None),
"track_type": track.__class__.__name__,
"log_file_contents": log_contents,
},
)
raise raise
finally:
if log_file_path and log_file_path.exists():
try:
log_file_path.unlink()
except Exception:
pass
def n_m3u8dl_re( def n_m3u8dl_re(

View File

@@ -12,7 +12,7 @@ from requests.adapters import HTTPAdapter
from rich import filesize from rich import filesize
from unshackle.core.constants import DOWNLOAD_CANCELLED from unshackle.core.constants import DOWNLOAD_CANCELLED
from unshackle.core.utilities import get_extension from unshackle.core.utilities import get_debug_logger, get_extension
MAX_ATTEMPTS = 5 MAX_ATTEMPTS = 5
RETRY_WAIT = 2 RETRY_WAIT = 2
@@ -215,6 +215,8 @@ def requests(
if not isinstance(max_workers, (int, type(None))): if not isinstance(max_workers, (int, type(None))):
raise TypeError(f"Expected max_workers to be {int}, not {type(max_workers)}") raise TypeError(f"Expected max_workers to be {int}, not {type(max_workers)}")
debug_logger = get_debug_logger()
if not isinstance(urls, list): if not isinstance(urls, list):
urls = [urls] urls = [urls]
@@ -241,6 +243,23 @@ def requests(
if proxy: if proxy:
session.proxies.update({"all": proxy}) session.proxies.update({"all": proxy})
if debug_logger:
first_url = urls[0].get("url", "") if urls else ""
url_display = first_url[:200] + "..." if len(first_url) > 200 else first_url
debug_logger.log(
level="DEBUG",
operation="downloader_requests_start",
message="Starting requests download",
context={
"url_count": len(urls),
"first_url": url_display,
"output_dir": str(output_dir),
"filename": filename,
"max_workers": max_workers,
"has_proxy": bool(proxy),
},
)
yield dict(total=len(urls)) yield dict(total=len(urls))
try: try:
@@ -256,14 +275,37 @@ def requests(
# tell dl that it was cancelled # tell dl that it was cancelled
# the pool is already shut down, so exiting loop is fine # the pool is already shut down, so exiting loop is fine
raise raise
except Exception: except Exception as e:
DOWNLOAD_CANCELLED.set() # skip pending track downloads DOWNLOAD_CANCELLED.set() # skip pending track downloads
yield dict(downloaded="[red]FAILING") yield dict(downloaded="[red]FAILING")
pool.shutdown(wait=True, cancel_futures=True) pool.shutdown(wait=True, cancel_futures=True)
yield dict(downloaded="[red]FAILED") yield dict(downloaded="[red]FAILED")
if debug_logger:
debug_logger.log(
level="ERROR",
operation="downloader_requests_failed",
message=f"Requests download failed: {e}",
error=e,
context={
"url_count": len(urls),
"output_dir": str(output_dir),
},
)
# tell dl that it failed # tell dl that it failed
# the pool is already shut down, so exiting loop is fine # the pool is already shut down, so exiting loop is fine
raise raise
if debug_logger:
debug_logger.log(
level="DEBUG",
operation="downloader_requests_complete",
message="Requests download completed successfully",
context={
"url_count": len(urls),
"output_dir": str(output_dir),
"filename": filename,
},
)
finally: finally:
DOWNLOAD_SIZES.clear() DOWNLOAD_SIZES.clear()

View File

@@ -18,6 +18,7 @@ import requests
from curl_cffi.requests import Session as CurlSession from curl_cffi.requests import Session as CurlSession
from langcodes import Language, tag_is_valid from langcodes import Language, tag_is_valid
from lxml.etree import Element, ElementTree from lxml.etree import Element, ElementTree
from pyplayready.cdm import Cdm as PlayReadyCdm
from pyplayready.system.pssh import PSSH as PR_PSSH from pyplayready.system.pssh import PSSH as PR_PSSH
from pywidevine.cdm import Cdm as WidevineCdm from pywidevine.cdm import Cdm as WidevineCdm
from pywidevine.pssh import PSSH from pywidevine.pssh import PSSH
@@ -28,7 +29,7 @@ from unshackle.core.downloaders import requests as requests_downloader
from unshackle.core.drm import DRM_T, PlayReady, Widevine from unshackle.core.drm import DRM_T, PlayReady, Widevine
from unshackle.core.events import events from unshackle.core.events import events
from unshackle.core.tracks import Audio, Subtitle, Tracks, Video from unshackle.core.tracks import Audio, Subtitle, Tracks, Video
from unshackle.core.utilities import is_close_match, try_ensure_utf8 from unshackle.core.utilities import get_debug_logger, is_close_match, try_ensure_utf8
from unshackle.core.utils.xml import load_xml from unshackle.core.utils.xml import load_xml
@@ -466,11 +467,22 @@ class DASH:
track.data["dash"]["segment_durations"] = segment_durations track.data["dash"]["segment_durations"] = segment_durations
if not track.drm and isinstance(track, (Video, Audio)): if not track.drm and isinstance(track, (Video, Audio)):
try: if isinstance(cdm, PlayReadyCdm):
track.drm = [Widevine.from_init_data(init_data)] try:
except Widevine.Exceptions.PSSHNotFound: track.drm = [PlayReady.from_init_data(init_data)]
# it might not have Widevine DRM, or might not have found the PSSH except PlayReady.Exceptions.PSSHNotFound:
log.warning("No Widevine PSSH was found for this track, is it DRM free?") try:
track.drm = [Widevine.from_init_data(init_data)]
except Widevine.Exceptions.PSSHNotFound:
log.warning("No PlayReady or Widevine PSSH was found for this track, is it DRM free?")
else:
try:
track.drm = [Widevine.from_init_data(init_data)]
except Widevine.Exceptions.PSSHNotFound:
try:
track.drm = [PlayReady.from_init_data(init_data)]
except PlayReady.Exceptions.PSSHNotFound:
log.warning("No Widevine or PlayReady PSSH was found for this track, is it DRM free?")
if track.drm: if track.drm:
track_kid = track_kid or track.get_key_id(url=segments[0][0], session=session) track_kid = track_kid or track.get_key_id(url=segments[0][0], session=session)
@@ -518,6 +530,24 @@ class DASH:
if downloader.__name__ == "n_m3u8dl_re": if downloader.__name__ == "n_m3u8dl_re":
downloader_args.update({"filename": track.id, "track": track}) downloader_args.update({"filename": track.id, "track": track})
debug_logger = get_debug_logger()
if debug_logger:
debug_logger.log(
level="DEBUG",
operation="manifest_dash_download_start",
message="Starting DASH manifest download",
context={
"track_id": getattr(track, "id", None),
"track_type": track.__class__.__name__,
"total_segments": len(segments),
"downloader": downloader.__name__,
"has_drm": bool(track.drm),
"drm_types": [drm.__class__.__name__ for drm in (track.drm or [])],
"save_path": str(save_path),
"has_init_data": bool(init_data),
},
)
for status_update in downloader(**downloader_args): for status_update in downloader(**downloader_args):
file_downloaded = status_update.get("file_downloaded") file_downloaded = status_update.get("file_downloaded")
if file_downloaded: if file_downloaded:

View File

@@ -32,7 +32,7 @@ from unshackle.core.downloaders import requests as requests_downloader
from unshackle.core.drm import DRM_T, ClearKey, PlayReady, Widevine from unshackle.core.drm import DRM_T, ClearKey, PlayReady, Widevine
from unshackle.core.events import events from unshackle.core.events import events
from unshackle.core.tracks import Audio, Subtitle, Tracks, Video from unshackle.core.tracks import Audio, Subtitle, Tracks, Video
from unshackle.core.utilities import get_extension, is_close_match, try_ensure_utf8 from unshackle.core.utilities import get_debug_logger, get_extension, is_close_match, try_ensure_utf8
class HLS: class HLS:
@@ -350,6 +350,24 @@ class HLS:
} }
) )
debug_logger = get_debug_logger()
if debug_logger:
debug_logger.log(
level="DEBUG",
operation="manifest_hls_download_start",
message="Starting HLS manifest download",
context={
"track_id": getattr(track, "id", None),
"track_type": track.__class__.__name__,
"total_segments": total_segments,
"downloader": downloader.__name__,
"has_drm": bool(session_drm),
"drm_type": session_drm.__class__.__name__ if session_drm else None,
"skip_merge": skip_merge,
"save_path": str(save_path),
},
)
for status_update in downloader(**downloader_args): for status_update in downloader(**downloader_args):
file_downloaded = status_update.get("file_downloaded") file_downloaded = status_update.get("file_downloaded")
if file_downloaded: if file_downloaded:

View File

@@ -21,7 +21,7 @@ from unshackle.core.constants import DOWNLOAD_CANCELLED, DOWNLOAD_LICENCE_ONLY,
from unshackle.core.drm import DRM_T, PlayReady, Widevine from unshackle.core.drm import DRM_T, PlayReady, Widevine
from unshackle.core.events import events from unshackle.core.events import events
from unshackle.core.tracks import Audio, Subtitle, Track, Tracks, Video from unshackle.core.tracks import Audio, Subtitle, Track, Tracks, Video
from unshackle.core.utilities import try_ensure_utf8 from unshackle.core.utilities import get_debug_logger, try_ensure_utf8
from unshackle.core.utils.xml import load_xml from unshackle.core.utils.xml import load_xml
@@ -283,6 +283,24 @@ class ISM:
} }
) )
debug_logger = get_debug_logger()
if debug_logger:
debug_logger.log(
level="DEBUG",
operation="manifest_ism_download_start",
message="Starting ISM manifest download",
context={
"track_id": getattr(track, "id", None),
"track_type": track.__class__.__name__,
"total_segments": len(segments),
"downloader": downloader.__name__,
"has_drm": bool(session_drm),
"drm_type": session_drm.__class__.__name__ if session_drm else None,
"skip_merge": skip_merge,
"save_path": str(save_path),
},
)
for status_update in downloader(**downloader_args): for status_update in downloader(**downloader_args):
file_downloaded = status_update.get("file_downloaded") file_downloaded = status_update.get("file_downloaded")
if file_downloaded: if file_downloaded:

View File

@@ -185,7 +185,10 @@ class Episode(Title):
if hdr_format: if hdr_format:
if hdr_format_full.startswith("Dolby Vision"): if hdr_format_full.startswith("Dolby Vision"):
name += " DV" name += " DV"
if any(indicator in hdr_format_full for indicator in ["HDR10", "SMPTE ST 2086"]): if any(
indicator in (hdr_format_full + " " + hdr_format)
for indicator in ["HDR10", "SMPTE ST 2086"]
):
name += " HDR" name += " HDR"
else: else:
name += f" {DYNAMIC_RANGE_MAP.get(hdr_format)} " name += f" {DYNAMIC_RANGE_MAP.get(hdr_format)} "

View File

@@ -136,7 +136,10 @@ class Movie(Title):
if hdr_format: if hdr_format:
if hdr_format_full.startswith("Dolby Vision"): if hdr_format_full.startswith("Dolby Vision"):
name += " DV" name += " DV"
if any(indicator in hdr_format_full for indicator in ["HDR10", "SMPTE ST 2086"]): if any(
indicator in (hdr_format_full + " " + hdr_format)
for indicator in ["HDR10", "SMPTE ST 2086"]
):
name += " HDR" name += " HDR"
else: else:
name += f" {DYNAMIC_RANGE_MAP.get(hdr_format)} " name += f" {DYNAMIC_RANGE_MAP.get(hdr_format)} "

View File

@@ -295,12 +295,23 @@ class Track:
try: try:
if not self.drm and track_type in ("Video", "Audio"): if not self.drm and track_type in ("Video", "Audio"):
# the service might not have explicitly defined the `drm` property # the service might not have explicitly defined the `drm` property
# try find widevine DRM information from the init data of URL # try find DRM information from the init data of URL based on CDM type
try: if isinstance(cdm, PlayReadyCdm):
self.drm = [Widevine.from_track(self, session)] try:
except Widevine.Exceptions.PSSHNotFound: self.drm = [PlayReady.from_track(self, session)]
# it might not have Widevine DRM, or might not have found the PSSH except PlayReady.Exceptions.PSSHNotFound:
log.warning("No Widevine PSSH was found for this track, is it DRM free?") try:
self.drm = [Widevine.from_track(self, session)]
except Widevine.Exceptions.PSSHNotFound:
log.warning("No PlayReady or Widevine PSSH was found for this track, is it DRM free?")
else:
try:
self.drm = [Widevine.from_track(self, session)]
except Widevine.Exceptions.PSSHNotFound:
try:
self.drm = [PlayReady.from_track(self, session)]
except PlayReady.Exceptions.PSSHNotFound:
log.warning("No Widevine or PlayReady PSSH was found for this track, is it DRM free?")
if self.drm: if self.drm:
track_kid = self.get_key_id(session=session) track_kid = self.get_key_id(session=session)

View File

@@ -22,7 +22,7 @@ from unshackle.core.tracks.chapters import Chapter, Chapters
from unshackle.core.tracks.subtitle import Subtitle from unshackle.core.tracks.subtitle import Subtitle
from unshackle.core.tracks.track import Track from unshackle.core.tracks.track import Track
from unshackle.core.tracks.video import Video from unshackle.core.tracks.video import Video
from unshackle.core.utilities import is_close_match, sanitize_filename from unshackle.core.utilities import get_debug_logger, is_close_match, sanitize_filename
from unshackle.core.utils.collections import as_list, flatten from unshackle.core.utils.collections import as_list, flatten
@@ -507,6 +507,35 @@ class Tracks:
if not output_path: if not output_path:
raise ValueError("No tracks provided, at least one track must be provided.") raise ValueError("No tracks provided, at least one track must be provided.")
debug_logger = get_debug_logger()
if debug_logger:
debug_logger.log(
level="DEBUG",
operation="mux_start",
message="Starting mkvmerge muxing",
context={
"title": title,
"output_path": str(output_path),
"video_count": len(self.videos),
"audio_count": len(self.audio),
"subtitle_count": len(self.subtitles),
"attachment_count": len(self.attachments),
"has_chapters": bool(self.chapters),
"video_tracks": [
{"id": v.id, "codec": getattr(v, "codec", None), "language": str(v.language)}
for v in self.videos
],
"audio_tracks": [
{"id": a.id, "codec": getattr(a, "codec", None), "language": str(a.language)}
for a in self.audio
],
"subtitle_tracks": [
{"id": s.id, "codec": getattr(s, "codec", None), "language": str(s.language)}
for s in self.subtitles
],
},
)
# let potential failures go to caller, caller should handle # let potential failures go to caller, caller should handle
try: try:
errors = [] errors = []
@@ -516,7 +545,33 @@ class Tracks:
errors.append(line) errors.append(line)
if "progress" in line: if "progress" in line:
progress(total=100, completed=int(line.strip()[14:-1])) progress(total=100, completed=int(line.strip()[14:-1]))
return output_path, p.wait(), errors
returncode = p.wait()
if debug_logger:
if returncode != 0 or errors:
debug_logger.log(
level="ERROR",
operation="mux_failed",
message=f"mkvmerge exited with code {returncode}",
context={
"returncode": returncode,
"output_path": str(output_path),
"errors": errors,
},
)
else:
debug_logger.log(
level="DEBUG",
operation="mux_complete",
message="mkvmerge muxing completed successfully",
context={
"output_path": str(output_path),
"output_exists": output_path.exists() if output_path else False,
},
)
return output_path, returncode, errors
finally: finally:
if chapters_path: if chapters_path:
chapters_path.unlink() chapters_path.unlink()

View File

@@ -114,32 +114,71 @@ class API(Vault):
return added or updated return added or updated
def add_keys(self, service: str, kid_keys: dict[Union[UUID, str], str]) -> int: def add_keys(self, service: str, kid_keys: dict[Union[UUID, str], str]) -> int:
data = self.session.post( # Normalize keys
url=f"{self.uri}/{service.lower()}", normalized_keys = {str(kid).replace("-", ""): key for kid, key in kid_keys.items()}
json={"content_keys": {str(kid).replace("-", ""): key for kid, key in kid_keys.items()}}, kid_list = list(normalized_keys.keys())
headers={"Accept": "application/json"},
).json()
code = int(data.get("code", 0)) if not kid_list:
message = data.get("message") return 0
error = {
0: None,
1: Exceptions.AuthRejected,
2: Exceptions.TooManyRequests,
3: Exceptions.ServiceTagInvalid,
4: Exceptions.KeyIdInvalid,
5: Exceptions.ContentKeyInvalid,
}.get(code, ValueError)
if error: # Try batches starting at 500, stepping down by 100 on failure, fallback to 1
raise error(f"{message} ({code})") batch_size = 500
total_added = 0
i = 0
# each kid:key that was new to the vault (optional) while i < len(kid_list):
added = int(data.get("added")) batch_kids = kid_list[i : i + batch_size]
# each key for a kid that was changed/updated (optional) batch_keys = {kid: normalized_keys[kid] for kid in batch_kids}
updated = int(data.get("updated"))
return added + updated try:
response = self.session.post(
url=f"{self.uri}/{service.lower()}",
json={"content_keys": batch_keys},
headers={"Accept": "application/json"},
)
# Check for HTTP errors that suggest batch is too large
if response.status_code in (413, 414, 400) and batch_size > 1:
if batch_size > 100:
batch_size -= 100
else:
batch_size = 1
continue
data = response.json()
except Exception:
# JSON decode error or connection issue - try smaller batch
if batch_size > 1:
if batch_size > 100:
batch_size -= 100
else:
batch_size = 1
continue
raise
code = int(data.get("code", 0))
message = data.get("message")
error = {
0: None,
1: Exceptions.AuthRejected,
2: Exceptions.TooManyRequests,
3: Exceptions.ServiceTagInvalid,
4: Exceptions.KeyIdInvalid,
5: Exceptions.ContentKeyInvalid,
}.get(code, ValueError)
if error:
raise error(f"{message} ({code})")
# each kid:key that was new to the vault (optional)
added = int(data.get("added", 0))
# each key for a kid that was changed/updated (optional)
updated = int(data.get("updated", 0))
total_added += added + updated
i += batch_size
return total_added
def get_services(self) -> Iterator[str]: def get_services(self) -> Iterator[str]:
data = self.session.post(url=self.uri, headers={"Accept": "application/json"}).json() data = self.session.post(url=self.uri, headers={"Accept": "application/json"}).json()

View File

@@ -119,9 +119,25 @@ class SQLite(Vault):
cursor = conn.cursor() cursor = conn.cursor()
try: try:
placeholders = ",".join(["?"] * len(kid_keys)) # Query existing KIDs in batches to avoid SQLite variable limit
cursor.execute(f"SELECT kid FROM `{service}` WHERE kid IN ({placeholders})", list(kid_keys.keys())) # Try larger batch first (newer SQLite supports 32766), fall back to 500 if needed
existing_kids = {row[0] for row in cursor.fetchall()} existing_kids: set[str] = set()
kid_list = list(kid_keys.keys())
batch_size = 32000
i = 0
while i < len(kid_list):
batch = kid_list[i : i + batch_size]
placeholders = ",".join(["?"] * len(batch))
try:
cursor.execute(f"SELECT kid FROM `{service}` WHERE kid IN ({placeholders})", batch)
existing_kids.update(row[0] for row in cursor.fetchall())
i += batch_size
except sqlite3.OperationalError as e:
if "too many SQL variables" in str(e) and batch_size > 500:
batch_size = 500
continue
raise
new_keys = {kid: key for kid, key in kid_keys.items() if kid not in existing_kids} new_keys = {kid: key for kid, key in kid_keys.items() if kid not in existing_kids}

2
uv.lock generated
View File

@@ -1565,7 +1565,7 @@ wheels = [
[[package]] [[package]]
name = "unshackle" name = "unshackle"
version = "2.1.0" version = "2.2.0"
source = { editable = "." } source = { editable = "." }
dependencies = [ dependencies = [
{ name = "aiohttp-swagger3" }, { name = "aiohttp-swagger3" },