mirror of
https://github.com/unshackle-dl/unshackle.git
synced 2026-03-10 16:39:01 +00:00
feat(debug): add JSONL debug logging to decryption, muxing, and all downloaders
Expand debug logging coverage for better diagnostics when investigating download/decryption issues like QUICKTIME/cbcs problem.
This commit is contained in:
@@ -441,6 +441,37 @@ def download(
|
||||
yield dict(downloaded=f"{filesize.decimal(dl_speed)}/s")
|
||||
|
||||
time.sleep(1)
|
||||
|
||||
if debug_logger:
|
||||
output_files = []
|
||||
output_total_size = 0
|
||||
if output_dir.exists():
|
||||
try:
|
||||
for f in sorted(output_dir.iterdir()):
|
||||
if f.is_file():
|
||||
fsize = f.stat().st_size
|
||||
output_files.append(f.name)
|
||||
output_total_size += fsize
|
||||
except OSError:
|
||||
output_files = ["<error listing files>"]
|
||||
|
||||
debug_logger.log(
|
||||
level="DEBUG",
|
||||
operation="downloader_aria2c_complete",
|
||||
message="Aria2c download completed successfully",
|
||||
context={
|
||||
"url_count": len(urls),
|
||||
"gid_count": len(gids),
|
||||
"completed_count": len(completed),
|
||||
"output_dir": str(output_dir),
|
||||
"output_dir_exists": output_dir.exists(),
|
||||
"output_files_count": len(output_files),
|
||||
"output_files": output_files[:10],
|
||||
"output_total_size": output_total_size,
|
||||
"filename": filename,
|
||||
},
|
||||
)
|
||||
|
||||
except KeyboardInterrupt:
|
||||
DOWNLOAD_CANCELLED.set()
|
||||
raise
|
||||
|
||||
@@ -293,6 +293,18 @@ def curl_impersonate(
|
||||
download_sizes.clear()
|
||||
|
||||
if debug_logger:
|
||||
output_files = []
|
||||
output_total_size = 0
|
||||
if output_dir.exists():
|
||||
try:
|
||||
for f in sorted(output_dir.iterdir()):
|
||||
if f.is_file():
|
||||
fsize = f.stat().st_size
|
||||
output_files.append(f.name)
|
||||
output_total_size += fsize
|
||||
except OSError:
|
||||
output_files = ["<error listing files>"]
|
||||
|
||||
debug_logger.log(
|
||||
level="DEBUG",
|
||||
operation="downloader_curl_impersonate_complete",
|
||||
@@ -300,6 +312,10 @@ def curl_impersonate(
|
||||
context={
|
||||
"url_count": len(urls),
|
||||
"output_dir": str(output_dir),
|
||||
"output_dir_exists": output_dir.exists(),
|
||||
"output_files_count": len(output_files),
|
||||
"output_files": output_files[:10],
|
||||
"output_total_size": output_total_size,
|
||||
"filename": filename,
|
||||
},
|
||||
)
|
||||
|
||||
@@ -316,6 +316,17 @@ def download(
|
||||
])
|
||||
|
||||
track_url_display = track.url[:200] + "..." if len(track.url) > 200 else track.url
|
||||
# Determine decryption engine info for logging
|
||||
decryption_engine = None
|
||||
decryption_binary = None
|
||||
if content_keys:
|
||||
decryption_config = config.decryption.lower()
|
||||
decryption_engine = DECRYPTION_ENGINE.get(decryption_config) or "SHAKA_PACKAGER"
|
||||
if decryption_engine == "SHAKA_PACKAGER" and binaries.ShakaPackager:
|
||||
decryption_binary = str(binaries.ShakaPackager)
|
||||
elif decryption_engine == "MP4DECRYPT" and binaries.Mp4decrypt:
|
||||
decryption_binary = str(binaries.Mp4decrypt)
|
||||
|
||||
debug_logger.log(
|
||||
level="DEBUG",
|
||||
operation="downloader_n_m3u8dl_re_start",
|
||||
@@ -331,6 +342,9 @@ def download(
|
||||
"retry_count": retry_count,
|
||||
"has_content_keys": bool(content_keys),
|
||||
"content_key_count": len(content_keys) if content_keys else 0,
|
||||
"decryption_engine": decryption_engine,
|
||||
"decryption_binary": decryption_binary,
|
||||
"decryption_config": config.decryption if content_keys else None,
|
||||
"has_proxy": bool(proxy),
|
||||
"skip_merge": skip_merge,
|
||||
"has_custom_args": bool(track.downloader_args),
|
||||
|
||||
@@ -303,6 +303,18 @@ def requests(
|
||||
raise
|
||||
|
||||
if debug_logger:
|
||||
output_files = []
|
||||
output_total_size = 0
|
||||
if output_dir.exists():
|
||||
try:
|
||||
for f in sorted(output_dir.iterdir()):
|
||||
if f.is_file():
|
||||
fsize = f.stat().st_size
|
||||
output_files.append(f.name)
|
||||
output_total_size += fsize
|
||||
except OSError:
|
||||
output_files = ["<error listing files>"]
|
||||
|
||||
debug_logger.log(
|
||||
level="DEBUG",
|
||||
operation="downloader_requests_complete",
|
||||
@@ -310,6 +322,10 @@ def requests(
|
||||
context={
|
||||
"url_count": len(urls),
|
||||
"output_dir": str(output_dir),
|
||||
"output_dir_exists": output_dir.exists(),
|
||||
"output_files_count": len(output_files),
|
||||
"output_files": output_files[:10],
|
||||
"output_total_size": output_total_size,
|
||||
"filename": filename,
|
||||
},
|
||||
)
|
||||
|
||||
@@ -20,7 +20,7 @@ from unshackle.core import binaries
|
||||
from unshackle.core.config import config
|
||||
from unshackle.core.console import console
|
||||
from unshackle.core.constants import AnyTrack
|
||||
from unshackle.core.utilities import get_boxes
|
||||
from unshackle.core.utilities import get_boxes, get_debug_logger
|
||||
from unshackle.core.utils.subprocess import ffprobe
|
||||
|
||||
|
||||
@@ -257,6 +257,21 @@ class Widevine:
|
||||
|
||||
decrypter = str(getattr(config, "decryption", "")).lower()
|
||||
|
||||
debug_logger = get_debug_logger()
|
||||
if debug_logger:
|
||||
debug_logger.log(
|
||||
level="DEBUG",
|
||||
operation="drm_decrypt_start",
|
||||
message=f"Starting Widevine decryption with {decrypter or 'shaka_packager (default)'}",
|
||||
context={
|
||||
"decrypter": decrypter or "shaka_packager",
|
||||
"path": str(path),
|
||||
"file_size": path.stat().st_size if path.exists() else 0,
|
||||
"content_key_count": len(self.content_keys),
|
||||
"kids": [kid.hex for kid in self.content_keys.keys()],
|
||||
},
|
||||
)
|
||||
|
||||
if decrypter == "mp4decrypt":
|
||||
return self._decrypt_with_mp4decrypt(path)
|
||||
else:
|
||||
@@ -264,6 +279,8 @@ class Widevine:
|
||||
|
||||
def _decrypt_with_mp4decrypt(self, path: Path) -> None:
|
||||
"""Decrypt using mp4decrypt"""
|
||||
debug_logger = get_debug_logger()
|
||||
|
||||
if not binaries.Mp4decrypt:
|
||||
raise EnvironmentError("mp4decrypt executable not found but is required.")
|
||||
|
||||
@@ -284,10 +301,48 @@ class Widevine:
|
||||
str(output_path),
|
||||
]
|
||||
|
||||
if debug_logger:
|
||||
debug_logger.log(
|
||||
level="DEBUG",
|
||||
operation="drm_mp4decrypt_start",
|
||||
message="Running mp4decrypt",
|
||||
context={
|
||||
"binary": str(binaries.Mp4decrypt),
|
||||
"input_path": str(path),
|
||||
"input_size": path.stat().st_size if path.exists() else 0,
|
||||
"output_path": str(output_path),
|
||||
"key_count": len(self.content_keys),
|
||||
"command": " ".join(str(c) for c in cmd[:3]) + " --key <REDACTED> ...",
|
||||
},
|
||||
)
|
||||
|
||||
try:
|
||||
subprocess.run(cmd, check=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True, encoding='utf-8')
|
||||
result = subprocess.run(cmd, check=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True, encoding='utf-8')
|
||||
if debug_logger:
|
||||
debug_logger.log(
|
||||
level="DEBUG",
|
||||
operation="drm_mp4decrypt_complete",
|
||||
message="mp4decrypt completed successfully",
|
||||
context={
|
||||
"output_path": str(output_path),
|
||||
"output_exists": output_path.exists(),
|
||||
"output_size": output_path.stat().st_size if output_path.exists() else 0,
|
||||
"stderr": (result.stderr or "")[:500],
|
||||
},
|
||||
)
|
||||
except subprocess.CalledProcessError as e:
|
||||
error_msg = e.stderr if e.stderr else f"mp4decrypt failed with exit code {e.returncode}"
|
||||
if debug_logger:
|
||||
debug_logger.log(
|
||||
level="ERROR",
|
||||
operation="drm_mp4decrypt_failed",
|
||||
message=f"mp4decrypt failed with exit code {e.returncode}",
|
||||
context={
|
||||
"returncode": e.returncode,
|
||||
"stderr": (e.stderr or "")[:1000],
|
||||
"stdout": (e.stdout or "")[:500],
|
||||
},
|
||||
)
|
||||
raise subprocess.CalledProcessError(e.returncode, cmd, output=e.stdout, stderr=error_msg)
|
||||
|
||||
if not output_path.exists():
|
||||
@@ -300,6 +355,8 @@ class Widevine:
|
||||
|
||||
def _decrypt_with_shaka_packager(self, path: Path) -> None:
|
||||
"""Decrypt using Shaka Packager (original method)"""
|
||||
debug_logger = get_debug_logger()
|
||||
|
||||
if not binaries.ShakaPackager:
|
||||
raise EnvironmentError("Shaka Packager executable not found but is required.")
|
||||
|
||||
@@ -328,6 +385,21 @@ class Widevine:
|
||||
config.directories.temp,
|
||||
]
|
||||
|
||||
if debug_logger:
|
||||
debug_logger.log(
|
||||
level="DEBUG",
|
||||
operation="drm_shaka_start",
|
||||
message="Running Shaka Packager decryption",
|
||||
context={
|
||||
"binary": str(binaries.ShakaPackager),
|
||||
"input_path": str(path),
|
||||
"input_size": path.stat().st_size if path.exists() else 0,
|
||||
"output_path": str(output_path),
|
||||
"key_count": len(self.content_keys),
|
||||
"kids": [kid.hex for kid in self.content_keys.keys()],
|
||||
},
|
||||
)
|
||||
|
||||
p = subprocess.Popen(
|
||||
[binaries.ShakaPackager, *arguments],
|
||||
stdout=subprocess.DEVNULL,
|
||||
@@ -366,6 +438,21 @@ class Widevine:
|
||||
|
||||
p.wait()
|
||||
|
||||
if debug_logger:
|
||||
debug_logger.log(
|
||||
level="DEBUG" if p.returncode == 0 and not had_error else "ERROR",
|
||||
operation="drm_shaka_complete" if p.returncode == 0 and not had_error else "drm_shaka_failed",
|
||||
message="Shaka Packager completed" if p.returncode == 0 and not had_error else "Shaka Packager failed",
|
||||
context={
|
||||
"returncode": p.returncode,
|
||||
"had_error": had_error,
|
||||
"stream_skipped": stream_skipped,
|
||||
"output_exists": output_path.exists(),
|
||||
"output_size": output_path.stat().st_size if output_path.exists() else 0,
|
||||
"shaka_log": shaka_log_buffer.strip()[:1000] if shaka_log_buffer else None,
|
||||
},
|
||||
)
|
||||
|
||||
if p.returncode != 0 or had_error:
|
||||
raise subprocess.CalledProcessError(p.returncode, [binaries.ShakaPackager, *arguments])
|
||||
|
||||
|
||||
@@ -642,7 +642,24 @@ class DASH:
|
||||
|
||||
if skip_merge:
|
||||
# N_m3u8DL-RE handles merging and decryption internally
|
||||
shutil.move(segments_to_merge[0], save_path)
|
||||
source_file = segments_to_merge[0]
|
||||
if debug_logger:
|
||||
debug_logger.log(
|
||||
level="DEBUG",
|
||||
operation="manifest_dash_skip_merge_move",
|
||||
message="Moving n_m3u8dl_re output (decryption handled by downloader)",
|
||||
context={
|
||||
"track_id": getattr(track, "id", None),
|
||||
"track_type": track.__class__.__name__,
|
||||
"source_file": str(source_file),
|
||||
"source_size": source_file.stat().st_size if source_file.exists() else 0,
|
||||
"dest_path": str(save_path),
|
||||
"has_drm": bool(drm),
|
||||
"drm_type": drm.__class__.__name__ if drm else None,
|
||||
"content_key_count": len(drm.content_keys) if drm and hasattr(drm, "content_keys") else 0,
|
||||
},
|
||||
)
|
||||
shutil.move(source_file, save_path)
|
||||
if drm:
|
||||
track.drm = None
|
||||
events.emit(events.Types.TRACK_DECRYPTED, track=track, drm=drm, segment=None)
|
||||
@@ -676,8 +693,34 @@ class DASH:
|
||||
events.emit(events.Types.TRACK_DOWNLOADED, track=track)
|
||||
|
||||
if not skip_merge and drm:
|
||||
if debug_logger:
|
||||
debug_logger.log(
|
||||
level="DEBUG",
|
||||
operation="manifest_dash_decrypt_start",
|
||||
message="Starting post-download decryption",
|
||||
context={
|
||||
"track_id": getattr(track, "id", None),
|
||||
"track_type": track.__class__.__name__,
|
||||
"save_path": str(save_path),
|
||||
"file_size": save_path.stat().st_size if save_path.exists() else 0,
|
||||
"drm_type": drm.__class__.__name__,
|
||||
"content_key_count": len(drm.content_keys) if hasattr(drm, "content_keys") else 0,
|
||||
},
|
||||
)
|
||||
progress(downloaded="Decrypting", completed=0, total=100)
|
||||
drm.decrypt(save_path)
|
||||
if debug_logger:
|
||||
debug_logger.log(
|
||||
level="DEBUG",
|
||||
operation="manifest_dash_decrypt_complete",
|
||||
message="Post-download decryption completed",
|
||||
context={
|
||||
"track_id": getattr(track, "id", None),
|
||||
"track_type": track.__class__.__name__,
|
||||
"save_path": str(save_path),
|
||||
"file_size": save_path.stat().st_size if save_path.exists() else 0,
|
||||
},
|
||||
)
|
||||
track.drm = None
|
||||
events.emit(events.Types.TRACK_DECRYPTED, track=track, drm=drm, segment=None)
|
||||
progress(downloaded="Decrypting", advance=100)
|
||||
|
||||
@@ -535,6 +535,10 @@ class Tracks:
|
||||
raise ValueError("No tracks provided, at least one track must be provided.")
|
||||
|
||||
debug_logger = get_debug_logger()
|
||||
|
||||
# Build the full command for both execution and logging
|
||||
full_cmd = [*cl, "--output", str(output_path), "--gui-mode"]
|
||||
|
||||
if debug_logger:
|
||||
debug_logger.log(
|
||||
level="DEBUG",
|
||||
@@ -548,16 +552,38 @@ class Tracks:
|
||||
"subtitle_count": len(self.subtitles),
|
||||
"attachment_count": len(self.attachments),
|
||||
"has_chapters": bool(self.chapters),
|
||||
"command": [str(c) for c in full_cmd],
|
||||
"video_tracks": [
|
||||
{"id": v.id, "codec": getattr(v, "codec", None), "language": str(v.language)}
|
||||
{
|
||||
"id": v.id,
|
||||
"codec": str(getattr(v, "codec", None)),
|
||||
"language": str(v.language),
|
||||
"path": str(v.path) if v.path else None,
|
||||
"path_exists": v.path.exists() if v.path else False,
|
||||
"file_size": v.path.stat().st_size if v.path and v.path.exists() else 0,
|
||||
"drm": str(v.drm) if v.drm else None,
|
||||
}
|
||||
for v in self.videos
|
||||
],
|
||||
"audio_tracks": [
|
||||
{"id": a.id, "codec": getattr(a, "codec", None), "language": str(a.language)}
|
||||
{
|
||||
"id": a.id,
|
||||
"codec": str(getattr(a, "codec", None)),
|
||||
"language": str(a.language),
|
||||
"path": str(a.path) if a.path else None,
|
||||
"path_exists": a.path.exists() if a.path else False,
|
||||
"file_size": a.path.stat().st_size if a.path and a.path.exists() else 0,
|
||||
"drm": str(a.drm) if a.drm else None,
|
||||
}
|
||||
for a in self.audio
|
||||
],
|
||||
"subtitle_tracks": [
|
||||
{"id": s.id, "codec": getattr(s, "codec", None), "language": str(s.language)}
|
||||
{
|
||||
"id": s.id,
|
||||
"codec": str(getattr(s, "codec", None)),
|
||||
"language": str(s.language),
|
||||
"path": str(s.path) if s.path else None,
|
||||
}
|
||||
for s in self.subtitles
|
||||
],
|
||||
},
|
||||
@@ -566,7 +592,7 @@ class Tracks:
|
||||
# let potential failures go to caller, caller should handle
|
||||
try:
|
||||
errors = []
|
||||
p = subprocess.Popen([*cl, "--output", str(output_path), "--gui-mode"], text=True, stdout=subprocess.PIPE)
|
||||
p = subprocess.Popen(full_cmd, text=True, stdout=subprocess.PIPE)
|
||||
for line in iter(p.stdout.readline, ""):
|
||||
if line.startswith("#GUI#error") or line.startswith("#GUI#warning"):
|
||||
errors.append(line)
|
||||
@@ -595,6 +621,7 @@ class Tracks:
|
||||
context={
|
||||
"output_path": str(output_path),
|
||||
"output_exists": output_path.exists() if output_path else False,
|
||||
"output_size": output_path.stat().st_size if output_path and output_path.exists() else 0,
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
Reference in New Issue
Block a user