diff --git a/unshackle/core/downloaders/aria2c.py b/unshackle/core/downloaders/aria2c.py index bc43460..6f5b5d0 100644 --- a/unshackle/core/downloaders/aria2c.py +++ b/unshackle/core/downloaders/aria2c.py @@ -19,7 +19,7 @@ from unshackle.core import binaries from unshackle.core.config import config from unshackle.core.console import console from unshackle.core.constants import DOWNLOAD_CANCELLED -from unshackle.core.utilities import get_extension, get_free_port +from unshackle.core.utilities import get_debug_logger, get_extension, get_free_port def rpc(caller: Callable, secret: str, method: str, params: Optional[list[Any]] = None) -> Any: @@ -58,6 +58,8 @@ def download( proxy: Optional[str] = None, max_workers: Optional[int] = None, ) -> Generator[dict[str, Any], None, None]: + debug_logger = get_debug_logger() + if not urls: raise ValueError("urls must be provided and not empty") elif not isinstance(urls, (str, dict, list)): @@ -91,6 +93,13 @@ def download( urls = [urls] if not binaries.Aria2: + if debug_logger: + debug_logger.log( + level="ERROR", + operation="downloader_aria2c_binary_missing", + message="Aria2c executable not found in PATH or local binaries directory", + context={"searched_names": ["aria2c", "aria2"]}, + ) raise EnvironmentError("Aria2c executable not found...") if proxy and not proxy.lower().startswith("http://"): @@ -180,6 +189,28 @@ def download( continue arguments.extend(["--header", f"{header}: {value}"]) + if debug_logger: + first_url = urls[0] if isinstance(urls[0], str) else urls[0].get("url", "") + url_display = first_url[:200] + "..." if len(first_url) > 200 else first_url + debug_logger.log( + level="DEBUG", + operation="downloader_aria2c_start", + message="Starting Aria2c download", + context={ + "binary_path": str(binaries.Aria2), + "url_count": len(urls), + "first_url": url_display, + "output_dir": str(output_dir), + "filename": filename, + "max_concurrent_downloads": max_concurrent_downloads, + "max_connection_per_server": max_connection_per_server, + "split": split, + "file_allocation": file_allocation, + "has_proxy": bool(proxy), + "rpc_port": rpc_port, + }, + ) + yield dict(total=len(urls)) try: @@ -226,6 +257,20 @@ def download( textwrap.wrap(error, width=console.width - 20, initial_indent="") ) console.log(Text.from_ansi("\n[Aria2c]: " + error_pretty)) + if debug_logger: + debug_logger.log( + level="ERROR", + operation="downloader_aria2c_download_error", + message=f"Aria2c download failed: {dl['errorMessage']}", + context={ + "gid": dl["gid"], + "error_code": dl["errorCode"], + "error_message": dl["errorMessage"], + "used_uri": used_uri[:200] + "..." if len(used_uri) > 200 else used_uri, + "completed_length": dl.get("completedLength"), + "total_length": dl.get("totalLength"), + }, + ) raise ValueError(error) if number_stopped == len(urls): @@ -237,7 +282,31 @@ def download( p.wait() if p.returncode != 0: + if debug_logger: + debug_logger.log( + level="ERROR", + operation="downloader_aria2c_failed", + message=f"Aria2c exited with code {p.returncode}", + context={ + "returncode": p.returncode, + "url_count": len(urls), + "output_dir": str(output_dir), + }, + ) raise subprocess.CalledProcessError(p.returncode, arguments) + + if debug_logger: + debug_logger.log( + level="DEBUG", + operation="downloader_aria2c_complete", + message="Aria2c download completed successfully", + context={ + "url_count": len(urls), + "output_dir": str(output_dir), + "filename": filename, + }, + ) + except ConnectionResetError: # interrupted while passing URI to download raise KeyboardInterrupt() @@ -251,9 +320,20 @@ def download( DOWNLOAD_CANCELLED.set() # skip pending track downloads yield dict(downloaded="[yellow]CANCELLED") raise - except Exception: + except Exception as e: DOWNLOAD_CANCELLED.set() # skip pending track downloads yield dict(downloaded="[red]FAILED") + if debug_logger and not isinstance(e, (subprocess.CalledProcessError, ValueError)): + debug_logger.log( + level="ERROR", + operation="downloader_aria2c_exception", + message=f"Unexpected error during Aria2c download: {e}", + error=e, + context={ + "url_count": len(urls), + "output_dir": str(output_dir), + }, + ) raise finally: rpc(caller=partial(rpc_session.post, url=rpc_uri), secret=rpc_secret, method="aria2.shutdown") diff --git a/unshackle/core/downloaders/curl_impersonate.py b/unshackle/core/downloaders/curl_impersonate.py index 52dab7a..d278e91 100644 --- a/unshackle/core/downloaders/curl_impersonate.py +++ b/unshackle/core/downloaders/curl_impersonate.py @@ -11,7 +11,7 @@ from rich import filesize from unshackle.core.config import config from unshackle.core.constants import DOWNLOAD_CANCELLED -from unshackle.core.utilities import get_extension +from unshackle.core.utilities import get_debug_logger, get_extension MAX_ATTEMPTS = 5 RETRY_WAIT = 2 @@ -189,6 +189,8 @@ def curl_impersonate( if not isinstance(max_workers, (int, type(None))): raise TypeError(f"Expected max_workers to be {int}, not {type(max_workers)}") + debug_logger = get_debug_logger() + if not isinstance(urls, list): urls = [urls] @@ -209,6 +211,24 @@ def curl_impersonate( if proxy: session.proxies.update({"all": proxy}) + if debug_logger: + first_url = urls[0].get("url", "") if urls else "" + url_display = first_url[:200] + "..." if len(first_url) > 200 else first_url + debug_logger.log( + level="DEBUG", + operation="downloader_curl_impersonate_start", + message="Starting curl_impersonate download", + context={ + "url_count": len(urls), + "first_url": url_display, + "output_dir": str(output_dir), + "filename": filename, + "max_workers": max_workers, + "browser": BROWSER, + "has_proxy": bool(proxy), + }, + ) + yield dict(total=len(urls)) download_sizes = [] @@ -235,11 +255,23 @@ def curl_impersonate( # tell dl that it was cancelled # the pool is already shut down, so exiting loop is fine raise - except Exception: + except Exception as e: DOWNLOAD_CANCELLED.set() # skip pending track downloads yield dict(downloaded="[red]FAILING") pool.shutdown(wait=True, cancel_futures=True) yield dict(downloaded="[red]FAILED") + if debug_logger: + debug_logger.log( + level="ERROR", + operation="downloader_curl_impersonate_failed", + message=f"curl_impersonate download failed: {e}", + error=e, + context={ + "url_count": len(urls), + "output_dir": str(output_dir), + "browser": BROWSER, + }, + ) # tell dl that it failed # the pool is already shut down, so exiting loop is fine raise @@ -260,5 +292,17 @@ def curl_impersonate( last_speed_refresh = now download_sizes.clear() + if debug_logger: + debug_logger.log( + level="DEBUG", + operation="downloader_curl_impersonate_complete", + message="curl_impersonate download completed successfully", + context={ + "url_count": len(urls), + "output_dir": str(output_dir), + "filename": filename, + }, + ) + __all__ = ("curl_impersonate",) diff --git a/unshackle/core/downloaders/n_m3u8dl_re.py b/unshackle/core/downloaders/n_m3u8dl_re.py index 7472c59..815c697 100644 --- a/unshackle/core/downloaders/n_m3u8dl_re.py +++ b/unshackle/core/downloaders/n_m3u8dl_re.py @@ -13,6 +13,7 @@ from unshackle.core import binaries from unshackle.core.config import config from unshackle.core.console import console from unshackle.core.constants import DOWNLOAD_CANCELLED +from unshackle.core.utilities import get_debug_logger PERCENT_RE = re.compile(r"(\d+\.\d+%)") SPEED_RE = re.compile(r"(\d+\.\d+(?:MB|KB)ps)") @@ -176,7 +177,6 @@ def build_download_args( "--thread-count": thread_count, "--download-retry-count": retry_count, "--write-meta-json": False, - "--no-log": True, } if proxy: args["--custom-proxy"] = proxy @@ -224,6 +224,8 @@ def download( content_keys: dict[str, Any] | None, skip_merge: bool | None = False, ) -> Generator[dict[str, Any], None, None]: + debug_logger = get_debug_logger() + if not urls: raise ValueError("urls must be provided and not empty") if not isinstance(urls, (str, dict, list)): @@ -275,7 +277,39 @@ def download( skip_merge=skip_merge, ad_keyword=ad_keyword, ) - arguments.extend(get_track_selection_args(track)) + selection_args = get_track_selection_args(track) + arguments.extend(selection_args) + + log_file_path: Path | None = None + if debug_logger: + log_file_path = output_dir / f".n_m3u8dl_re_{filename}.log" + arguments.extend(["--log-file-path", str(log_file_path)]) + + track_url_display = track.url[:200] + "..." if len(track.url) > 200 else track.url + debug_logger.log( + level="DEBUG", + operation="downloader_n_m3u8dl_re_start", + message="Starting N_m3u8DL-RE download", + context={ + "binary_path": str(binaries.N_m3u8DL_RE), + "track_id": getattr(track, "id", None), + "track_type": track.__class__.__name__, + "track_url": track_url_display, + "output_dir": str(output_dir), + "filename": filename, + "thread_count": thread_count, + "retry_count": retry_count, + "has_content_keys": bool(content_keys), + "content_key_count": len(content_keys) if content_keys else 0, + "has_proxy": bool(proxy), + "skip_merge": skip_merge, + "has_custom_args": bool(track.downloader_args), + "selection_args": selection_args, + "descriptor": track.descriptor.name if hasattr(track, "descriptor") else None, + }, + ) + else: + arguments.extend(["--no-log", "true"]) yield {"total": 100} yield {"downloaded": "Parsing streams..."} @@ -310,11 +344,45 @@ def download( yield {"completed": progress} if progress < 100 else {"downloaded": "Merging"} process.wait() + if process.returncode != 0: + if debug_logger and log_file_path: + log_contents = "" + if log_file_path.exists(): + try: + log_contents = log_file_path.read_text(encoding="utf-8", errors="replace") + except Exception: + log_contents = "" + + debug_logger.log( + level="ERROR", + operation="downloader_n_m3u8dl_re_failed", + message=f"N_m3u8DL-RE exited with code {process.returncode}", + context={ + "returncode": process.returncode, + "track_id": getattr(track, "id", None), + "track_type": track.__class__.__name__, + "last_line": last_line, + "log_file_contents": log_contents, + }, + ) if error_match := ERROR_RE.search(last_line): raise ValueError(f"[N_m3u8DL-RE]: {error_match.group(1)}") raise subprocess.CalledProcessError(process.returncode, arguments) + if debug_logger: + debug_logger.log( + level="DEBUG", + operation="downloader_n_m3u8dl_re_complete", + message="N_m3u8DL-RE download completed successfully", + context={ + "track_id": getattr(track, "id", None), + "track_type": track.__class__.__name__, + "output_dir": str(output_dir), + "filename": filename, + }, + ) + except ConnectionResetError: # interrupted while passing URI to download raise KeyboardInterrupt() @@ -322,10 +390,35 @@ def download( DOWNLOAD_CANCELLED.set() # skip pending track downloads yield {"downloaded": "[yellow]CANCELLED"} raise - except Exception: + except Exception as e: DOWNLOAD_CANCELLED.set() # skip pending track downloads yield {"downloaded": "[red]FAILED"} + if debug_logger and log_file_path and not isinstance(e, (subprocess.CalledProcessError, ValueError)): + log_contents = "" + if log_file_path.exists(): + try: + log_contents = log_file_path.read_text(encoding="utf-8", errors="replace") + except Exception: + log_contents = "" + + debug_logger.log( + level="ERROR", + operation="downloader_n_m3u8dl_re_exception", + message=f"Unexpected error during N_m3u8DL-RE download: {e}", + error=e, + context={ + "track_id": getattr(track, "id", None), + "track_type": track.__class__.__name__, + "log_file_contents": log_contents, + }, + ) raise + finally: + if log_file_path and log_file_path.exists(): + try: + log_file_path.unlink() + except Exception: + pass def n_m3u8dl_re( diff --git a/unshackle/core/downloaders/requests.py b/unshackle/core/downloaders/requests.py index 49c1759..06cab3d 100644 --- a/unshackle/core/downloaders/requests.py +++ b/unshackle/core/downloaders/requests.py @@ -12,7 +12,7 @@ from requests.adapters import HTTPAdapter from rich import filesize from unshackle.core.constants import DOWNLOAD_CANCELLED -from unshackle.core.utilities import get_extension +from unshackle.core.utilities import get_debug_logger, get_extension MAX_ATTEMPTS = 5 RETRY_WAIT = 2 @@ -215,6 +215,8 @@ def requests( if not isinstance(max_workers, (int, type(None))): raise TypeError(f"Expected max_workers to be {int}, not {type(max_workers)}") + debug_logger = get_debug_logger() + if not isinstance(urls, list): urls = [urls] @@ -241,6 +243,23 @@ def requests( if proxy: session.proxies.update({"all": proxy}) + if debug_logger: + first_url = urls[0].get("url", "") if urls else "" + url_display = first_url[:200] + "..." if len(first_url) > 200 else first_url + debug_logger.log( + level="DEBUG", + operation="downloader_requests_start", + message="Starting requests download", + context={ + "url_count": len(urls), + "first_url": url_display, + "output_dir": str(output_dir), + "filename": filename, + "max_workers": max_workers, + "has_proxy": bool(proxy), + }, + ) + yield dict(total=len(urls)) try: @@ -256,14 +275,37 @@ def requests( # tell dl that it was cancelled # the pool is already shut down, so exiting loop is fine raise - except Exception: + except Exception as e: DOWNLOAD_CANCELLED.set() # skip pending track downloads yield dict(downloaded="[red]FAILING") pool.shutdown(wait=True, cancel_futures=True) yield dict(downloaded="[red]FAILED") + if debug_logger: + debug_logger.log( + level="ERROR", + operation="downloader_requests_failed", + message=f"Requests download failed: {e}", + error=e, + context={ + "url_count": len(urls), + "output_dir": str(output_dir), + }, + ) # tell dl that it failed # the pool is already shut down, so exiting loop is fine raise + + if debug_logger: + debug_logger.log( + level="DEBUG", + operation="downloader_requests_complete", + message="Requests download completed successfully", + context={ + "url_count": len(urls), + "output_dir": str(output_dir), + "filename": filename, + }, + ) finally: DOWNLOAD_SIZES.clear() diff --git a/unshackle/core/manifests/dash.py b/unshackle/core/manifests/dash.py index 442ac96..68fdded 100644 --- a/unshackle/core/manifests/dash.py +++ b/unshackle/core/manifests/dash.py @@ -28,7 +28,7 @@ from unshackle.core.downloaders import requests as requests_downloader from unshackle.core.drm import DRM_T, PlayReady, Widevine from unshackle.core.events import events from unshackle.core.tracks import Audio, Subtitle, Tracks, Video -from unshackle.core.utilities import is_close_match, try_ensure_utf8 +from unshackle.core.utilities import get_debug_logger, is_close_match, try_ensure_utf8 from unshackle.core.utils.xml import load_xml @@ -518,6 +518,24 @@ class DASH: if downloader.__name__ == "n_m3u8dl_re": downloader_args.update({"filename": track.id, "track": track}) + debug_logger = get_debug_logger() + if debug_logger: + debug_logger.log( + level="DEBUG", + operation="manifest_dash_download_start", + message="Starting DASH manifest download", + context={ + "track_id": getattr(track, "id", None), + "track_type": track.__class__.__name__, + "total_segments": len(segments), + "downloader": downloader.__name__, + "has_drm": bool(track.drm), + "drm_types": [drm.__class__.__name__ for drm in (track.drm or [])], + "save_path": str(save_path), + "has_init_data": bool(init_data), + }, + ) + for status_update in downloader(**downloader_args): file_downloaded = status_update.get("file_downloaded") if file_downloaded: diff --git a/unshackle/core/manifests/hls.py b/unshackle/core/manifests/hls.py index 6f49c6a..fb0320b 100644 --- a/unshackle/core/manifests/hls.py +++ b/unshackle/core/manifests/hls.py @@ -32,7 +32,7 @@ from unshackle.core.downloaders import requests as requests_downloader from unshackle.core.drm import DRM_T, ClearKey, PlayReady, Widevine from unshackle.core.events import events from unshackle.core.tracks import Audio, Subtitle, Tracks, Video -from unshackle.core.utilities import get_extension, is_close_match, try_ensure_utf8 +from unshackle.core.utilities import get_debug_logger, get_extension, is_close_match, try_ensure_utf8 class HLS: @@ -350,6 +350,24 @@ class HLS: } ) + debug_logger = get_debug_logger() + if debug_logger: + debug_logger.log( + level="DEBUG", + operation="manifest_hls_download_start", + message="Starting HLS manifest download", + context={ + "track_id": getattr(track, "id", None), + "track_type": track.__class__.__name__, + "total_segments": total_segments, + "downloader": downloader.__name__, + "has_drm": bool(session_drm), + "drm_type": session_drm.__class__.__name__ if session_drm else None, + "skip_merge": skip_merge, + "save_path": str(save_path), + }, + ) + for status_update in downloader(**downloader_args): file_downloaded = status_update.get("file_downloaded") if file_downloaded: diff --git a/unshackle/core/manifests/ism.py b/unshackle/core/manifests/ism.py index 346c9e6..8cb6a3b 100644 --- a/unshackle/core/manifests/ism.py +++ b/unshackle/core/manifests/ism.py @@ -21,7 +21,7 @@ from unshackle.core.constants import DOWNLOAD_CANCELLED, DOWNLOAD_LICENCE_ONLY, from unshackle.core.drm import DRM_T, PlayReady, Widevine from unshackle.core.events import events from unshackle.core.tracks import Audio, Subtitle, Track, Tracks, Video -from unshackle.core.utilities import try_ensure_utf8 +from unshackle.core.utilities import get_debug_logger, try_ensure_utf8 from unshackle.core.utils.xml import load_xml @@ -283,6 +283,24 @@ class ISM: } ) + debug_logger = get_debug_logger() + if debug_logger: + debug_logger.log( + level="DEBUG", + operation="manifest_ism_download_start", + message="Starting ISM manifest download", + context={ + "track_id": getattr(track, "id", None), + "track_type": track.__class__.__name__, + "total_segments": len(segments), + "downloader": downloader.__name__, + "has_drm": bool(session_drm), + "drm_type": session_drm.__class__.__name__ if session_drm else None, + "skip_merge": skip_merge, + "save_path": str(save_path), + }, + ) + for status_update in downloader(**downloader_args): file_downloaded = status_update.get("file_downloaded") if file_downloaded: diff --git a/unshackle/core/tracks/tracks.py b/unshackle/core/tracks/tracks.py index eeacd47..fd9d78d 100644 --- a/unshackle/core/tracks/tracks.py +++ b/unshackle/core/tracks/tracks.py @@ -22,7 +22,7 @@ from unshackle.core.tracks.chapters import Chapter, Chapters from unshackle.core.tracks.subtitle import Subtitle from unshackle.core.tracks.track import Track from unshackle.core.tracks.video import Video -from unshackle.core.utilities import is_close_match, sanitize_filename +from unshackle.core.utilities import get_debug_logger, is_close_match, sanitize_filename from unshackle.core.utils.collections import as_list, flatten @@ -507,6 +507,35 @@ class Tracks: if not output_path: raise ValueError("No tracks provided, at least one track must be provided.") + debug_logger = get_debug_logger() + if debug_logger: + debug_logger.log( + level="DEBUG", + operation="mux_start", + message="Starting mkvmerge muxing", + context={ + "title": title, + "output_path": str(output_path), + "video_count": len(self.videos), + "audio_count": len(self.audio), + "subtitle_count": len(self.subtitles), + "attachment_count": len(self.attachments), + "has_chapters": bool(self.chapters), + "video_tracks": [ + {"id": v.id, "codec": getattr(v, "codec", None), "language": str(v.language)} + for v in self.videos + ], + "audio_tracks": [ + {"id": a.id, "codec": getattr(a, "codec", None), "language": str(a.language)} + for a in self.audio + ], + "subtitle_tracks": [ + {"id": s.id, "codec": getattr(s, "codec", None), "language": str(s.language)} + for s in self.subtitles + ], + }, + ) + # let potential failures go to caller, caller should handle try: errors = [] @@ -516,7 +545,33 @@ class Tracks: errors.append(line) if "progress" in line: progress(total=100, completed=int(line.strip()[14:-1])) - return output_path, p.wait(), errors + + returncode = p.wait() + + if debug_logger: + if returncode != 0 or errors: + debug_logger.log( + level="ERROR", + operation="mux_failed", + message=f"mkvmerge exited with code {returncode}", + context={ + "returncode": returncode, + "output_path": str(output_path), + "errors": errors, + }, + ) + else: + debug_logger.log( + level="DEBUG", + operation="mux_complete", + message="mkvmerge muxing completed successfully", + context={ + "output_path": str(output_path), + "output_exists": output_path.exists() if output_path else False, + }, + ) + + return output_path, returncode, errors finally: if chapters_path: chapters_path.unlink()