mirror of
https://github.com/unshackle-dl/unshackle.git
synced 2026-03-13 01:49:00 +00:00
Compare commits
6 Commits
1.4.8
...
062e060fca
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
062e060fca | ||
|
|
e4bb7e9135 | ||
|
|
0f4a68ca62 | ||
|
|
97f7eb0674 | ||
|
|
4459ec4db6 | ||
|
|
2afc59624d |
@@ -547,9 +547,12 @@ Configuration data for pywidevine's serve functionality run through unshackle.
|
||||
This effectively allows you to run `unshackle serve` to start serving pywidevine Serve-compliant CDMs right from your
|
||||
local widevine device files.
|
||||
|
||||
- `api_secret` - Secret key for REST API authentication. When set, enables the REST API server alongside the CDM serve functionality. This key is required for authenticating API requests.
|
||||
|
||||
For example,
|
||||
|
||||
```yaml
|
||||
api_secret: "your-secret-key-here"
|
||||
users:
|
||||
secret_key_for_jane: # 32bit hex recommended, case-sensitive
|
||||
devices: # list of allowed devices for this user
|
||||
|
||||
@@ -58,6 +58,7 @@ dependencies = [
|
||||
"httpx>=0.28.1,<0.29",
|
||||
"cryptography>=45.0.0",
|
||||
"subby",
|
||||
"aiohttp-swagger3>=0.9.0,<1",
|
||||
"pysubs2>=1.7.0,<2",
|
||||
]
|
||||
|
||||
|
||||
@@ -1,19 +1,26 @@
|
||||
import logging
|
||||
import subprocess
|
||||
|
||||
import click
|
||||
from aiohttp import web
|
||||
|
||||
from unshackle.core import binaries
|
||||
from unshackle.core.api import cors_middleware, setup_routes, setup_swagger
|
||||
from unshackle.core.config import config
|
||||
from unshackle.core.constants import context_settings
|
||||
|
||||
|
||||
@click.command(short_help="Serve your Local Widevine Devices for Remote Access.", context_settings=context_settings)
|
||||
@click.command(
|
||||
short_help="Serve your Local Widevine Devices and REST API for Remote Access.", context_settings=context_settings
|
||||
)
|
||||
@click.option("-h", "--host", type=str, default="0.0.0.0", help="Host to serve from.")
|
||||
@click.option("-p", "--port", type=int, default=8786, help="Port to serve from.")
|
||||
@click.option("--caddy", is_flag=True, default=False, help="Also serve with Caddy.")
|
||||
def serve(host: str, port: int, caddy: bool) -> None:
|
||||
@click.option("--api-only", is_flag=True, default=False, help="Serve only the REST API, not pywidevine CDM.")
|
||||
@click.option("--no-key", is_flag=True, default=False, help="Disable API key authentication (allows all requests).")
|
||||
def serve(host: str, port: int, caddy: bool, api_only: bool, no_key: bool) -> None:
|
||||
"""
|
||||
Serve your Local Widevine Devices for Remote Access.
|
||||
Serve your Local Widevine Devices and REST API for Remote Access.
|
||||
|
||||
\b
|
||||
Host as 127.0.0.1 may block remote access even if port-forwarded.
|
||||
@@ -23,8 +30,25 @@ def serve(host: str, port: int, caddy: bool) -> None:
|
||||
You may serve with Caddy at the same time with --caddy. You can use Caddy
|
||||
as a reverse-proxy to serve with HTTPS. The config used will be the Caddyfile
|
||||
next to the unshackle config.
|
||||
|
||||
\b
|
||||
The REST API provides programmatic access to unshackle functionality.
|
||||
Configure authentication in your config under serve.users and serve.api_secret.
|
||||
"""
|
||||
from pywidevine import serve
|
||||
from pywidevine import serve as pywidevine_serve
|
||||
|
||||
log = logging.getLogger("serve")
|
||||
|
||||
# Validate API secret for REST API routes (unless --no-key is used)
|
||||
if not no_key:
|
||||
api_secret = config.serve.get("api_secret")
|
||||
if not api_secret:
|
||||
raise click.ClickException(
|
||||
"API secret key is not configured. Please add 'api_secret' to the 'serve' section in your config."
|
||||
)
|
||||
else:
|
||||
api_secret = None
|
||||
log.warning("Running with --no-key: Authentication is DISABLED for all API endpoints!")
|
||||
|
||||
if caddy:
|
||||
if not binaries.Caddy:
|
||||
@@ -39,7 +63,51 @@ def serve(host: str, port: int, caddy: bool) -> None:
|
||||
if not config.serve.get("devices"):
|
||||
config.serve["devices"] = []
|
||||
config.serve["devices"].extend(list(config.directories.wvds.glob("*.wvd")))
|
||||
serve.run(config.serve, host, port)
|
||||
|
||||
if api_only:
|
||||
# API-only mode: serve just the REST API
|
||||
log.info("Starting REST API server (pywidevine CDM disabled)")
|
||||
if no_key:
|
||||
app = web.Application(middlewares=[cors_middleware])
|
||||
app["config"] = {"users": []}
|
||||
else:
|
||||
app = web.Application(middlewares=[cors_middleware, pywidevine_serve.authentication])
|
||||
app["config"] = {"users": [api_secret]}
|
||||
setup_routes(app)
|
||||
setup_swagger(app)
|
||||
log.info(f"REST API endpoints available at http://{host}:{port}/api/")
|
||||
log.info(f"Swagger UI available at http://{host}:{port}/api/docs/")
|
||||
log.info("(Press CTRL+C to quit)")
|
||||
web.run_app(app, host=host, port=port, print=None)
|
||||
else:
|
||||
# Integrated mode: serve both pywidevine + REST API
|
||||
log.info("Starting integrated server (pywidevine CDM + REST API)")
|
||||
|
||||
# Create integrated app with both pywidevine and API routes
|
||||
if no_key:
|
||||
app = web.Application(middlewares=[cors_middleware])
|
||||
app["config"] = dict(config.serve)
|
||||
app["config"]["users"] = []
|
||||
else:
|
||||
app = web.Application(middlewares=[cors_middleware, pywidevine_serve.authentication])
|
||||
# Setup config - add API secret to users for authentication
|
||||
serve_config = dict(config.serve)
|
||||
if not serve_config.get("users"):
|
||||
serve_config["users"] = []
|
||||
if api_secret not in serve_config["users"]:
|
||||
serve_config["users"].append(api_secret)
|
||||
app["config"] = serve_config
|
||||
|
||||
app.on_startup.append(pywidevine_serve._startup)
|
||||
app.on_cleanup.append(pywidevine_serve._cleanup)
|
||||
app.add_routes(pywidevine_serve.routes)
|
||||
setup_routes(app)
|
||||
setup_swagger(app)
|
||||
|
||||
log.info(f"REST API endpoints available at http://{host}:{port}/api/")
|
||||
log.info(f"Swagger UI available at http://{host}:{port}/api/docs/")
|
||||
log.info("(Press CTRL+C to quit)")
|
||||
web.run_app(app, host=host, port=port, print=None)
|
||||
finally:
|
||||
if caddy_p:
|
||||
caddy_p.kill()
|
||||
|
||||
@@ -1 +1 @@
|
||||
__version__ = "1.4.8"
|
||||
__version__ = "2.0.0"
|
||||
|
||||
3
unshackle/core/api/__init__.py
Normal file
3
unshackle/core/api/__init__.py
Normal file
@@ -0,0 +1,3 @@
|
||||
from unshackle.core.api.routes import cors_middleware, setup_routes, setup_swagger
|
||||
|
||||
__all__ = ["setup_routes", "setup_swagger", "cors_middleware"]
|
||||
630
unshackle/core/api/download_manager.py
Normal file
630
unshackle/core/api/download_manager.py
Normal file
@@ -0,0 +1,630 @@
|
||||
import asyncio
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
import tempfile
|
||||
import threading
|
||||
import uuid
|
||||
from dataclasses import dataclass, field
|
||||
from enum import Enum
|
||||
from typing import Any, Callable, Dict, List, Optional
|
||||
from datetime import datetime, timedelta
|
||||
from contextlib import suppress
|
||||
|
||||
log = logging.getLogger("download_manager")
|
||||
|
||||
|
||||
class JobStatus(Enum):
|
||||
QUEUED = "queued"
|
||||
DOWNLOADING = "downloading"
|
||||
COMPLETED = "completed"
|
||||
FAILED = "failed"
|
||||
CANCELLED = "cancelled"
|
||||
|
||||
|
||||
@dataclass
|
||||
class DownloadJob:
|
||||
"""Represents a download job with all its parameters and status."""
|
||||
|
||||
job_id: str
|
||||
status: JobStatus
|
||||
created_time: datetime
|
||||
service: str
|
||||
title_id: str
|
||||
parameters: Dict[str, Any]
|
||||
|
||||
# Progress tracking
|
||||
started_time: Optional[datetime] = None
|
||||
completed_time: Optional[datetime] = None
|
||||
progress: float = 0.0
|
||||
|
||||
# Results and error info
|
||||
output_files: List[str] = field(default_factory=list)
|
||||
error_message: Optional[str] = None
|
||||
error_details: Optional[str] = None
|
||||
|
||||
# Cancellation support
|
||||
cancel_event: threading.Event = field(default_factory=threading.Event)
|
||||
|
||||
def to_dict(self, include_full_details: bool = False) -> Dict[str, Any]:
|
||||
"""Convert job to dictionary for JSON response."""
|
||||
result = {
|
||||
"job_id": self.job_id,
|
||||
"status": self.status.value,
|
||||
"created_time": self.created_time.isoformat(),
|
||||
"service": self.service,
|
||||
"title_id": self.title_id,
|
||||
"progress": self.progress,
|
||||
}
|
||||
|
||||
if include_full_details:
|
||||
result.update(
|
||||
{
|
||||
"parameters": self.parameters,
|
||||
"started_time": self.started_time.isoformat() if self.started_time else None,
|
||||
"completed_time": self.completed_time.isoformat() if self.completed_time else None,
|
||||
"output_files": self.output_files,
|
||||
"error_message": self.error_message,
|
||||
"error_details": self.error_details,
|
||||
}
|
||||
)
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def _perform_download(
|
||||
job_id: str,
|
||||
service: str,
|
||||
title_id: str,
|
||||
params: Dict[str, Any],
|
||||
cancel_event: Optional[threading.Event] = None,
|
||||
progress_callback: Optional[Callable[[Dict[str, Any]], None]] = None,
|
||||
) -> List[str]:
|
||||
"""Execute the synchronous download logic for a job."""
|
||||
|
||||
def _check_cancel(stage: str):
|
||||
if cancel_event and cancel_event.is_set():
|
||||
raise Exception(f"Job was cancelled {stage}")
|
||||
|
||||
from io import StringIO
|
||||
from contextlib import redirect_stdout, redirect_stderr
|
||||
|
||||
_check_cancel("before execution started")
|
||||
|
||||
# Import dl.py components lazily to avoid circular deps during module import
|
||||
import click
|
||||
import yaml
|
||||
from unshackle.commands.dl import dl
|
||||
from unshackle.core.config import config
|
||||
from unshackle.core.services import Services
|
||||
from unshackle.core.utils.click_types import ContextData
|
||||
from unshackle.core.utils.collections import merge_dict
|
||||
|
||||
log.info(f"Starting sync download for job {job_id}")
|
||||
|
||||
# Load service configuration
|
||||
service_config_path = Services.get_path(service) / config.filenames.config
|
||||
if service_config_path.exists():
|
||||
service_config = yaml.safe_load(service_config_path.read_text(encoding="utf8"))
|
||||
else:
|
||||
service_config = {}
|
||||
merge_dict(config.services.get(service), service_config)
|
||||
|
||||
from unshackle.commands.dl import dl as dl_command
|
||||
|
||||
ctx = click.Context(dl_command.cli)
|
||||
ctx.invoked_subcommand = service
|
||||
ctx.obj = ContextData(config=service_config, cdm=None, proxy_providers=[], profile=params.get("profile"))
|
||||
ctx.params = {
|
||||
"proxy": params.get("proxy"),
|
||||
"no_proxy": params.get("no_proxy", False),
|
||||
"profile": params.get("profile"),
|
||||
"tag": params.get("tag"),
|
||||
"tmdb_id": params.get("tmdb_id"),
|
||||
"tmdb_name": params.get("tmdb_name", False),
|
||||
"tmdb_year": params.get("tmdb_year", False),
|
||||
}
|
||||
|
||||
dl_instance = dl(
|
||||
ctx=ctx,
|
||||
no_proxy=params.get("no_proxy", False),
|
||||
profile=params.get("profile"),
|
||||
proxy=params.get("proxy"),
|
||||
tag=params.get("tag"),
|
||||
tmdb_id=params.get("tmdb_id"),
|
||||
tmdb_name=params.get("tmdb_name", False),
|
||||
tmdb_year=params.get("tmdb_year", False),
|
||||
)
|
||||
|
||||
service_module = Services.load(service)
|
||||
|
||||
_check_cancel("before service instantiation")
|
||||
|
||||
try:
|
||||
import inspect
|
||||
|
||||
service_init_params = inspect.signature(service_module.__init__).parameters
|
||||
|
||||
service_ctx = click.Context(click.Command(service))
|
||||
service_ctx.parent = ctx
|
||||
service_ctx.obj = ctx.obj
|
||||
|
||||
service_kwargs = {}
|
||||
|
||||
if "title" in service_init_params:
|
||||
service_kwargs["title"] = title_id
|
||||
|
||||
for key, value in params.items():
|
||||
if key in service_init_params and key not in ["service", "title_id"]:
|
||||
service_kwargs[key] = value
|
||||
|
||||
for param_name, param_info in service_init_params.items():
|
||||
if param_name not in service_kwargs and param_name not in ["self", "ctx"]:
|
||||
if param_info.default is inspect.Parameter.empty:
|
||||
if param_name == "movie":
|
||||
service_kwargs[param_name] = "/movies/" in title_id
|
||||
elif param_name == "meta_lang":
|
||||
service_kwargs[param_name] = None
|
||||
else:
|
||||
log.warning(f"Unknown required parameter '{param_name}' for service {service}, using None")
|
||||
service_kwargs[param_name] = None
|
||||
|
||||
service_instance = service_module(service_ctx, **service_kwargs)
|
||||
|
||||
except Exception as exc: # noqa: BLE001 - propagate meaningful failure
|
||||
log.error(f"Failed to create service instance: {exc}")
|
||||
raise
|
||||
|
||||
original_download_dir = config.directories.downloads
|
||||
|
||||
_check_cancel("before download execution")
|
||||
|
||||
stdout_capture = StringIO()
|
||||
stderr_capture = StringIO()
|
||||
|
||||
# Simple progress tracking if callback provided
|
||||
if progress_callback:
|
||||
# Report initial progress
|
||||
progress_callback({"progress": 0.0, "status": "starting"})
|
||||
|
||||
# Simple approach: report progress at key points
|
||||
original_result = dl_instance.result
|
||||
|
||||
def result_with_progress(*args, **kwargs):
|
||||
try:
|
||||
# Report that download started
|
||||
progress_callback({"progress": 5.0, "status": "downloading"})
|
||||
|
||||
# Call original method
|
||||
result = original_result(*args, **kwargs)
|
||||
|
||||
# Report completion
|
||||
progress_callback({"progress": 100.0, "status": "completed"})
|
||||
return result
|
||||
except Exception as e:
|
||||
progress_callback({"progress": 0.0, "status": "failed", "error": str(e)})
|
||||
raise
|
||||
|
||||
dl_instance.result = result_with_progress
|
||||
|
||||
try:
|
||||
with redirect_stdout(stdout_capture), redirect_stderr(stderr_capture):
|
||||
dl_instance.result(
|
||||
service=service_instance,
|
||||
quality=params.get("quality", []),
|
||||
vcodec=params.get("vcodec"),
|
||||
acodec=params.get("acodec"),
|
||||
vbitrate=params.get("vbitrate"),
|
||||
abitrate=params.get("abitrate"),
|
||||
range_=params.get("range", []),
|
||||
channels=params.get("channels"),
|
||||
no_atmos=params.get("no_atmos", False),
|
||||
wanted=params.get("wanted", []),
|
||||
lang=params.get("lang", ["orig"]),
|
||||
v_lang=params.get("v_lang", []),
|
||||
a_lang=params.get("a_lang", []),
|
||||
s_lang=params.get("s_lang", ["all"]),
|
||||
require_subs=params.get("require_subs", []),
|
||||
forced_subs=params.get("forced_subs", False),
|
||||
sub_format=params.get("sub_format"),
|
||||
video_only=params.get("video_only", False),
|
||||
audio_only=params.get("audio_only", False),
|
||||
subs_only=params.get("subs_only", False),
|
||||
chapters_only=params.get("chapters_only", False),
|
||||
no_subs=params.get("no_subs", False),
|
||||
no_audio=params.get("no_audio", False),
|
||||
no_chapters=params.get("no_chapters", False),
|
||||
slow=params.get("slow", False),
|
||||
list_=False,
|
||||
list_titles=False,
|
||||
skip_dl=params.get("skip_dl", False),
|
||||
export=params.get("export"),
|
||||
cdm_only=params.get("cdm_only"),
|
||||
no_proxy=params.get("no_proxy", False),
|
||||
no_folder=params.get("no_folder", False),
|
||||
no_source=params.get("no_source", False),
|
||||
workers=params.get("workers"),
|
||||
downloads=params.get("downloads", 1),
|
||||
best_available=params.get("best_available", False),
|
||||
)
|
||||
|
||||
except SystemExit as exc:
|
||||
if exc.code != 0:
|
||||
stdout_str = stdout_capture.getvalue()
|
||||
stderr_str = stderr_capture.getvalue()
|
||||
log.error(f"Download exited with code {exc.code}")
|
||||
log.error(f"Stdout: {stdout_str}")
|
||||
log.error(f"Stderr: {stderr_str}")
|
||||
raise Exception(f"Download failed with exit code {exc.code}")
|
||||
|
||||
except Exception as exc: # noqa: BLE001 - propagate to caller
|
||||
stdout_str = stdout_capture.getvalue()
|
||||
stderr_str = stderr_capture.getvalue()
|
||||
log.error(f"Download execution failed: {exc}")
|
||||
log.error(f"Stdout: {stdout_str}")
|
||||
log.error(f"Stderr: {stderr_str}")
|
||||
raise
|
||||
|
||||
log.info(f"Download completed for job {job_id}, files in {original_download_dir}")
|
||||
|
||||
return []
|
||||
|
||||
|
||||
class DownloadQueueManager:
|
||||
"""Manages download job queue with configurable concurrency limits."""
|
||||
|
||||
def __init__(self, max_concurrent_downloads: int = 2, job_retention_hours: int = 24):
|
||||
self.max_concurrent_downloads = max_concurrent_downloads
|
||||
self.job_retention_hours = job_retention_hours
|
||||
|
||||
self._jobs: Dict[str, DownloadJob] = {}
|
||||
self._job_queue: asyncio.Queue = asyncio.Queue()
|
||||
self._active_downloads: Dict[str, asyncio.Task] = {}
|
||||
self._download_processes: Dict[str, asyncio.subprocess.Process] = {}
|
||||
self._job_temp_files: Dict[str, Dict[str, str]] = {}
|
||||
self._workers_started = False
|
||||
self._shutdown_event = asyncio.Event()
|
||||
|
||||
log.info(
|
||||
f"Initialized download queue manager: max_concurrent={max_concurrent_downloads}, retention_hours={job_retention_hours}"
|
||||
)
|
||||
|
||||
def create_job(self, service: str, title_id: str, **parameters) -> DownloadJob:
|
||||
"""Create a new download job and add it to the queue."""
|
||||
job_id = str(uuid.uuid4())
|
||||
job = DownloadJob(
|
||||
job_id=job_id,
|
||||
status=JobStatus.QUEUED,
|
||||
created_time=datetime.now(),
|
||||
service=service,
|
||||
title_id=title_id,
|
||||
parameters=parameters,
|
||||
)
|
||||
|
||||
self._jobs[job_id] = job
|
||||
self._job_queue.put_nowait(job)
|
||||
|
||||
log.info(f"Created download job {job_id} for {service}:{title_id}")
|
||||
return job
|
||||
|
||||
def get_job(self, job_id: str) -> Optional[DownloadJob]:
|
||||
"""Get job by ID."""
|
||||
return self._jobs.get(job_id)
|
||||
|
||||
def list_jobs(self) -> List[DownloadJob]:
|
||||
"""List all jobs."""
|
||||
return list(self._jobs.values())
|
||||
|
||||
def cancel_job(self, job_id: str) -> bool:
|
||||
"""Cancel a job if it's queued or downloading."""
|
||||
job = self._jobs.get(job_id)
|
||||
if not job:
|
||||
return False
|
||||
|
||||
if job.status == JobStatus.QUEUED:
|
||||
job.status = JobStatus.CANCELLED
|
||||
job.cancel_event.set() # Signal cancellation
|
||||
log.info(f"Cancelled queued job {job_id}")
|
||||
return True
|
||||
elif job.status == JobStatus.DOWNLOADING:
|
||||
# Set the cancellation event first - this will be checked by the download thread
|
||||
job.cancel_event.set()
|
||||
job.status = JobStatus.CANCELLED
|
||||
log.info(f"Signaled cancellation for downloading job {job_id}")
|
||||
|
||||
# Cancel the active download task
|
||||
task = self._active_downloads.get(job_id)
|
||||
if task:
|
||||
task.cancel()
|
||||
log.info(f"Cancelled download task for job {job_id}")
|
||||
|
||||
process = self._download_processes.get(job_id)
|
||||
if process:
|
||||
try:
|
||||
process.terminate()
|
||||
log.info(f"Terminated worker process for job {job_id}")
|
||||
except ProcessLookupError:
|
||||
log.debug(f"Worker process for job {job_id} already exited")
|
||||
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
def cleanup_old_jobs(self) -> int:
|
||||
"""Remove jobs older than retention period."""
|
||||
cutoff_time = datetime.now() - timedelta(hours=self.job_retention_hours)
|
||||
jobs_to_remove = []
|
||||
|
||||
for job_id, job in self._jobs.items():
|
||||
if job.status in [JobStatus.COMPLETED, JobStatus.FAILED, JobStatus.CANCELLED]:
|
||||
if job.completed_time and job.completed_time < cutoff_time:
|
||||
jobs_to_remove.append(job_id)
|
||||
elif not job.completed_time and job.created_time < cutoff_time:
|
||||
jobs_to_remove.append(job_id)
|
||||
|
||||
for job_id in jobs_to_remove:
|
||||
del self._jobs[job_id]
|
||||
|
||||
if jobs_to_remove:
|
||||
log.info(f"Cleaned up {len(jobs_to_remove)} old jobs")
|
||||
|
||||
return len(jobs_to_remove)
|
||||
|
||||
async def start_workers(self):
|
||||
"""Start worker tasks to process the download queue."""
|
||||
if self._workers_started:
|
||||
return
|
||||
|
||||
self._workers_started = True
|
||||
|
||||
# Start worker tasks
|
||||
for i in range(self.max_concurrent_downloads):
|
||||
asyncio.create_task(self._download_worker(f"worker-{i}"))
|
||||
|
||||
# Start cleanup task
|
||||
asyncio.create_task(self._cleanup_worker())
|
||||
|
||||
log.info(f"Started {self.max_concurrent_downloads} download workers")
|
||||
|
||||
async def shutdown(self):
|
||||
"""Shutdown the queue manager and cancel all active downloads."""
|
||||
log.info("Shutting down download queue manager")
|
||||
self._shutdown_event.set()
|
||||
|
||||
# Cancel all active downloads
|
||||
for task in self._active_downloads.values():
|
||||
task.cancel()
|
||||
|
||||
# Terminate worker processes
|
||||
for job_id, process in list(self._download_processes.items()):
|
||||
try:
|
||||
process.terminate()
|
||||
except ProcessLookupError:
|
||||
log.debug(f"Worker process for job {job_id} already exited during shutdown")
|
||||
|
||||
for job_id, process in list(self._download_processes.items()):
|
||||
try:
|
||||
await asyncio.wait_for(process.wait(), timeout=5)
|
||||
except asyncio.TimeoutError:
|
||||
log.warning(f"Worker process for job {job_id} did not exit, killing")
|
||||
process.kill()
|
||||
await process.wait()
|
||||
finally:
|
||||
self._download_processes.pop(job_id, None)
|
||||
|
||||
# Clean up any remaining temp files
|
||||
for paths in self._job_temp_files.values():
|
||||
for path in paths.values():
|
||||
try:
|
||||
os.remove(path)
|
||||
except OSError:
|
||||
pass
|
||||
self._job_temp_files.clear()
|
||||
|
||||
# Wait for workers to finish
|
||||
if self._active_downloads:
|
||||
await asyncio.gather(*self._active_downloads.values(), return_exceptions=True)
|
||||
|
||||
async def _download_worker(self, worker_name: str):
|
||||
"""Worker task that processes jobs from the queue."""
|
||||
log.debug(f"Download worker {worker_name} started")
|
||||
|
||||
while not self._shutdown_event.is_set():
|
||||
try:
|
||||
# Wait for a job or shutdown signal
|
||||
job = await asyncio.wait_for(self._job_queue.get(), timeout=1.0)
|
||||
|
||||
if job.status == JobStatus.CANCELLED:
|
||||
continue
|
||||
|
||||
# Start processing the job
|
||||
job.status = JobStatus.DOWNLOADING
|
||||
job.started_time = datetime.now()
|
||||
|
||||
log.info(f"Worker {worker_name} starting job {job.job_id}")
|
||||
|
||||
# Create download task
|
||||
download_task = asyncio.create_task(self._execute_download(job))
|
||||
self._active_downloads[job.job_id] = download_task
|
||||
|
||||
try:
|
||||
await download_task
|
||||
except asyncio.CancelledError:
|
||||
job.status = JobStatus.CANCELLED
|
||||
log.info(f"Job {job.job_id} was cancelled")
|
||||
except Exception as e:
|
||||
job.status = JobStatus.FAILED
|
||||
job.error_message = str(e)
|
||||
log.error(f"Job {job.job_id} failed: {e}")
|
||||
finally:
|
||||
job.completed_time = datetime.now()
|
||||
if job.job_id in self._active_downloads:
|
||||
del self._active_downloads[job.job_id]
|
||||
|
||||
except asyncio.TimeoutError:
|
||||
continue
|
||||
except Exception as e:
|
||||
log.error(f"Worker {worker_name} error: {e}")
|
||||
|
||||
async def _execute_download(self, job: DownloadJob):
|
||||
"""Execute the actual download for a job."""
|
||||
log.info(f"Executing download for job {job.job_id}")
|
||||
|
||||
try:
|
||||
output_files = await self._run_download_async(job)
|
||||
job.status = JobStatus.COMPLETED
|
||||
job.output_files = output_files
|
||||
job.progress = 100.0
|
||||
log.info(f"Download completed for job {job.job_id}: {len(output_files)} files")
|
||||
except Exception as e:
|
||||
job.status = JobStatus.FAILED
|
||||
job.error_message = str(e)
|
||||
job.error_details = str(e)
|
||||
log.error(f"Download failed for job {job.job_id}: {e}")
|
||||
raise
|
||||
|
||||
async def _run_download_async(self, job: DownloadJob) -> List[str]:
|
||||
"""Invoke a worker subprocess to execute the download."""
|
||||
|
||||
payload = {
|
||||
"job_id": job.job_id,
|
||||
"service": job.service,
|
||||
"title_id": job.title_id,
|
||||
"parameters": job.parameters,
|
||||
}
|
||||
|
||||
payload_fd, payload_path = tempfile.mkstemp(prefix=f"unshackle_job_{job.job_id}_", suffix="_payload.json")
|
||||
os.close(payload_fd)
|
||||
result_fd, result_path = tempfile.mkstemp(prefix=f"unshackle_job_{job.job_id}_", suffix="_result.json")
|
||||
os.close(result_fd)
|
||||
progress_fd, progress_path = tempfile.mkstemp(prefix=f"unshackle_job_{job.job_id}_", suffix="_progress.json")
|
||||
os.close(progress_fd)
|
||||
|
||||
with open(payload_path, "w", encoding="utf-8") as handle:
|
||||
json.dump(payload, handle)
|
||||
|
||||
process = await asyncio.create_subprocess_exec(
|
||||
sys.executable,
|
||||
"-m",
|
||||
"unshackle.core.api.download_worker",
|
||||
payload_path,
|
||||
result_path,
|
||||
progress_path,
|
||||
stdout=asyncio.subprocess.PIPE,
|
||||
stderr=asyncio.subprocess.PIPE,
|
||||
)
|
||||
|
||||
self._download_processes[job.job_id] = process
|
||||
self._job_temp_files[job.job_id] = {"payload": payload_path, "result": result_path, "progress": progress_path}
|
||||
|
||||
communicate_task = asyncio.create_task(process.communicate())
|
||||
|
||||
stdout_bytes = b""
|
||||
stderr_bytes = b""
|
||||
|
||||
try:
|
||||
while True:
|
||||
done, _ = await asyncio.wait({communicate_task}, timeout=0.5)
|
||||
if communicate_task in done:
|
||||
stdout_bytes, stderr_bytes = communicate_task.result()
|
||||
break
|
||||
|
||||
# Check for progress updates
|
||||
try:
|
||||
if os.path.exists(progress_path):
|
||||
with open(progress_path, "r", encoding="utf-8") as handle:
|
||||
progress_data = json.load(handle)
|
||||
if "progress" in progress_data:
|
||||
new_progress = float(progress_data["progress"])
|
||||
if new_progress != job.progress:
|
||||
job.progress = new_progress
|
||||
log.info(f"Job {job.job_id} progress updated: {job.progress}%")
|
||||
except (FileNotFoundError, json.JSONDecodeError, ValueError) as e:
|
||||
log.debug(f"Could not read progress for job {job.job_id}: {e}")
|
||||
|
||||
if job.cancel_event.is_set() or job.status == JobStatus.CANCELLED:
|
||||
log.info(f"Cancellation detected for job {job.job_id}, terminating worker process")
|
||||
process.terminate()
|
||||
try:
|
||||
await asyncio.wait_for(communicate_task, timeout=5)
|
||||
except asyncio.TimeoutError:
|
||||
log.warning(f"Worker process for job {job.job_id} did not terminate, killing")
|
||||
process.kill()
|
||||
await asyncio.wait_for(communicate_task, timeout=5)
|
||||
raise asyncio.CancelledError("Job was cancelled")
|
||||
|
||||
returncode = process.returncode
|
||||
stdout = stdout_bytes.decode("utf-8", errors="ignore")
|
||||
stderr = stderr_bytes.decode("utf-8", errors="ignore")
|
||||
|
||||
if stdout.strip():
|
||||
log.debug(f"Worker stdout for job {job.job_id}: {stdout.strip()}")
|
||||
if stderr.strip():
|
||||
log.warning(f"Worker stderr for job {job.job_id}: {stderr.strip()}")
|
||||
|
||||
result_data: Optional[Dict[str, Any]] = None
|
||||
try:
|
||||
with open(result_path, "r", encoding="utf-8") as handle:
|
||||
result_data = json.load(handle)
|
||||
except FileNotFoundError:
|
||||
log.error(f"Result file missing for job {job.job_id}")
|
||||
except json.JSONDecodeError as exc:
|
||||
log.error(f"Failed to parse worker result for job {job.job_id}: {exc}")
|
||||
|
||||
if returncode != 0:
|
||||
message = result_data.get("message") if result_data else "unknown error"
|
||||
raise Exception(f"Worker exited with code {returncode}: {message}")
|
||||
|
||||
if not result_data or result_data.get("status") != "success":
|
||||
message = result_data.get("message") if result_data else "worker did not report success"
|
||||
raise Exception(f"Worker failure: {message}")
|
||||
|
||||
return result_data.get("output_files", [])
|
||||
|
||||
finally:
|
||||
if not communicate_task.done():
|
||||
communicate_task.cancel()
|
||||
with suppress(asyncio.CancelledError):
|
||||
await communicate_task
|
||||
|
||||
self._download_processes.pop(job.job_id, None)
|
||||
|
||||
temp_paths = self._job_temp_files.pop(job.job_id, {})
|
||||
for path in temp_paths.values():
|
||||
try:
|
||||
os.remove(path)
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
def _execute_download_sync(self, job: DownloadJob) -> List[str]:
|
||||
"""Execute download synchronously using existing dl.py logic."""
|
||||
return _perform_download(job.job_id, job.service, job.title_id, job.parameters.copy(), job.cancel_event)
|
||||
|
||||
async def _cleanup_worker(self):
|
||||
"""Worker that periodically cleans up old jobs."""
|
||||
while not self._shutdown_event.is_set():
|
||||
try:
|
||||
await asyncio.sleep(3600) # Run every hour
|
||||
self.cleanup_old_jobs()
|
||||
except Exception as e:
|
||||
log.error(f"Cleanup worker error: {e}")
|
||||
|
||||
|
||||
# Global instance
|
||||
download_manager: Optional[DownloadQueueManager] = None
|
||||
|
||||
|
||||
def get_download_manager() -> DownloadQueueManager:
|
||||
"""Get the global download manager instance."""
|
||||
global download_manager
|
||||
if download_manager is None:
|
||||
# Load configuration from unshackle config
|
||||
from unshackle.core.config import config
|
||||
|
||||
max_concurrent = getattr(config, "max_concurrent_downloads", 2)
|
||||
retention_hours = getattr(config, "download_job_retention_hours", 24)
|
||||
|
||||
download_manager = DownloadQueueManager(max_concurrent, retention_hours)
|
||||
|
||||
return download_manager
|
||||
84
unshackle/core/api/download_worker.py
Normal file
84
unshackle/core/api/download_worker.py
Normal file
@@ -0,0 +1,84 @@
|
||||
"""Standalone worker process entry point for executing download jobs."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import logging
|
||||
import sys
|
||||
import traceback
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict
|
||||
|
||||
from .download_manager import _perform_download
|
||||
|
||||
log = logging.getLogger("download_worker")
|
||||
|
||||
|
||||
def _read_payload(path: Path) -> Dict[str, Any]:
|
||||
with path.open("r", encoding="utf-8") as handle:
|
||||
return json.load(handle)
|
||||
|
||||
|
||||
def _write_result(path: Path, payload: Dict[str, Any]) -> None:
|
||||
path.parent.mkdir(parents=True, exist_ok=True)
|
||||
with path.open("w", encoding="utf-8") as handle:
|
||||
json.dump(payload, handle)
|
||||
|
||||
|
||||
def main(argv: list[str]) -> int:
|
||||
if len(argv) not in [3, 4]:
|
||||
print(
|
||||
"Usage: python -m unshackle.core.api.download_worker <payload_path> <result_path> [progress_path]",
|
||||
file=sys.stderr,
|
||||
)
|
||||
return 2
|
||||
|
||||
payload_path = Path(argv[1])
|
||||
result_path = Path(argv[2])
|
||||
progress_path = Path(argv[3]) if len(argv) > 3 else None
|
||||
|
||||
result: Dict[str, Any] = {}
|
||||
exit_code = 0
|
||||
|
||||
try:
|
||||
payload = _read_payload(payload_path)
|
||||
job_id = payload["job_id"]
|
||||
service = payload["service"]
|
||||
title_id = payload["title_id"]
|
||||
params = payload.get("parameters", {})
|
||||
|
||||
log.info(f"Worker starting job {job_id} ({service}:{title_id})")
|
||||
|
||||
def progress_callback(progress_data: Dict[str, Any]) -> None:
|
||||
"""Write progress updates to file for main process to read."""
|
||||
if progress_path:
|
||||
try:
|
||||
log.info(f"Writing progress update: {progress_data}")
|
||||
_write_result(progress_path, progress_data)
|
||||
log.info(f"Progress update written to {progress_path}")
|
||||
except Exception as e:
|
||||
log.error(f"Failed to write progress update: {e}")
|
||||
|
||||
output_files = _perform_download(
|
||||
job_id, service, title_id, params, cancel_event=None, progress_callback=progress_callback
|
||||
)
|
||||
|
||||
result = {"status": "success", "output_files": output_files}
|
||||
|
||||
except Exception as exc: # noqa: BLE001 - capture for parent process
|
||||
exit_code = 1
|
||||
tb = traceback.format_exc()
|
||||
log.error(f"Worker failed with error: {exc}")
|
||||
result = {"status": "error", "message": str(exc), "traceback": tb}
|
||||
|
||||
finally:
|
||||
try:
|
||||
_write_result(result_path, result)
|
||||
except Exception as exc: # noqa: BLE001 - last resort logging
|
||||
log.error(f"Failed to write worker result file: {exc}")
|
||||
|
||||
return exit_code
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main(sys.argv))
|
||||
653
unshackle/core/api/handlers.py
Normal file
653
unshackle/core/api/handlers.py
Normal file
@@ -0,0 +1,653 @@
|
||||
import logging
|
||||
from typing import Any, Dict, List, Optional
|
||||
|
||||
from aiohttp import web
|
||||
|
||||
from unshackle.core.constants import AUDIO_CODEC_MAP, DYNAMIC_RANGE_MAP, VIDEO_CODEC_MAP
|
||||
from unshackle.core.proxies.basic import Basic
|
||||
from unshackle.core.proxies.hola import Hola
|
||||
from unshackle.core.proxies.nordvpn import NordVPN
|
||||
from unshackle.core.proxies.surfsharkvpn import SurfsharkVPN
|
||||
from unshackle.core.services import Services
|
||||
from unshackle.core.titles import Episode, Movie, Title_T
|
||||
from unshackle.core.tracks import Audio, Subtitle, Video
|
||||
|
||||
log = logging.getLogger("api")
|
||||
|
||||
|
||||
def initialize_proxy_providers() -> List[Any]:
|
||||
"""Initialize and return available proxy providers."""
|
||||
proxy_providers = []
|
||||
try:
|
||||
from unshackle.core import binaries
|
||||
|
||||
# Load the main unshackle config to get proxy provider settings
|
||||
from unshackle.core.config import config as main_config
|
||||
|
||||
log.debug(f"Main config proxy providers: {getattr(main_config, 'proxy_providers', {})}")
|
||||
log.debug(f"Available proxy provider configs: {list(getattr(main_config, 'proxy_providers', {}).keys())}")
|
||||
|
||||
# Use main_config instead of the service-specific config for proxy providers
|
||||
proxy_config = getattr(main_config, "proxy_providers", {})
|
||||
|
||||
if proxy_config.get("basic"):
|
||||
log.debug("Loading Basic proxy provider")
|
||||
proxy_providers.append(Basic(**proxy_config["basic"]))
|
||||
if proxy_config.get("nordvpn"):
|
||||
log.debug("Loading NordVPN proxy provider")
|
||||
proxy_providers.append(NordVPN(**proxy_config["nordvpn"]))
|
||||
if proxy_config.get("surfsharkvpn"):
|
||||
log.debug("Loading SurfsharkVPN proxy provider")
|
||||
proxy_providers.append(SurfsharkVPN(**proxy_config["surfsharkvpn"]))
|
||||
if hasattr(binaries, "HolaProxy") and binaries.HolaProxy:
|
||||
log.debug("Loading Hola proxy provider")
|
||||
proxy_providers.append(Hola())
|
||||
|
||||
for proxy_provider in proxy_providers:
|
||||
log.info(f"Loaded {proxy_provider.__class__.__name__}: {proxy_provider}")
|
||||
|
||||
if not proxy_providers:
|
||||
log.warning("No proxy providers were loaded. Check your proxy provider configuration in unshackle.yaml")
|
||||
|
||||
except Exception as e:
|
||||
log.warning(f"Failed to initialize some proxy providers: {e}")
|
||||
|
||||
return proxy_providers
|
||||
|
||||
|
||||
def resolve_proxy(proxy: str, proxy_providers: List[Any]) -> str:
|
||||
"""Resolve proxy parameter to actual proxy URI."""
|
||||
import re
|
||||
|
||||
if not proxy:
|
||||
return proxy
|
||||
|
||||
# Check if explicit proxy URI
|
||||
if re.match(r"^https?://", proxy):
|
||||
return proxy
|
||||
|
||||
# Handle provider:country format (e.g., "nordvpn:us")
|
||||
requested_provider = None
|
||||
if re.match(r"^[a-z]+:.+$", proxy, re.IGNORECASE):
|
||||
requested_provider, proxy = proxy.split(":", maxsplit=1)
|
||||
|
||||
# Handle country code format (e.g., "us", "uk")
|
||||
if re.match(r"^[a-z]{2}(?:\d+)?$", proxy, re.IGNORECASE):
|
||||
proxy = proxy.lower()
|
||||
|
||||
if requested_provider:
|
||||
# Find specific provider (case-insensitive matching)
|
||||
proxy_provider = next(
|
||||
(x for x in proxy_providers if x.__class__.__name__.lower() == requested_provider.lower()),
|
||||
None,
|
||||
)
|
||||
if not proxy_provider:
|
||||
available_providers = [x.__class__.__name__ for x in proxy_providers]
|
||||
raise ValueError(
|
||||
f"The proxy provider '{requested_provider}' was not recognized. Available providers: {available_providers}"
|
||||
)
|
||||
|
||||
proxy_uri = proxy_provider.get_proxy(proxy)
|
||||
if not proxy_uri:
|
||||
raise ValueError(f"The proxy provider {requested_provider} had no proxy for {proxy}")
|
||||
|
||||
log.info(f"Using {proxy_provider.__class__.__name__} Proxy: {proxy_uri}")
|
||||
return proxy_uri
|
||||
else:
|
||||
# Try all providers
|
||||
for proxy_provider in proxy_providers:
|
||||
proxy_uri = proxy_provider.get_proxy(proxy)
|
||||
if proxy_uri:
|
||||
log.info(f"Using {proxy_provider.__class__.__name__} Proxy: {proxy_uri}")
|
||||
return proxy_uri
|
||||
|
||||
raise ValueError(f"No proxy provider had a proxy for {proxy}")
|
||||
|
||||
# Return as-is if not recognized format
|
||||
log.info(f"Using explicit Proxy: {proxy}")
|
||||
return proxy
|
||||
|
||||
|
||||
def validate_service(service_tag: str) -> Optional[str]:
|
||||
"""Validate and normalize service tag."""
|
||||
try:
|
||||
normalized = Services.get_tag(service_tag)
|
||||
service_path = Services.get_path(normalized)
|
||||
if not service_path.exists():
|
||||
return None
|
||||
return normalized
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
|
||||
def serialize_title(title: Title_T) -> Dict[str, Any]:
|
||||
"""Convert a title object to JSON-serializable dict."""
|
||||
if isinstance(title, Episode):
|
||||
episode_name = title.name if title.name else f"Episode {title.number:02d}"
|
||||
result = {
|
||||
"type": "episode",
|
||||
"name": episode_name,
|
||||
"series_title": str(title.title),
|
||||
"season": title.season,
|
||||
"number": title.number,
|
||||
"year": title.year,
|
||||
"id": str(title.id) if hasattr(title, "id") else None,
|
||||
}
|
||||
elif isinstance(title, Movie):
|
||||
result = {
|
||||
"type": "movie",
|
||||
"name": str(title.name) if hasattr(title, "name") else str(title),
|
||||
"year": title.year,
|
||||
"id": str(title.id) if hasattr(title, "id") else None,
|
||||
}
|
||||
else:
|
||||
result = {
|
||||
"type": "other",
|
||||
"name": str(title.name) if hasattr(title, "name") else str(title),
|
||||
"id": str(title.id) if hasattr(title, "id") else None,
|
||||
}
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def serialize_video_track(track: Video) -> Dict[str, Any]:
|
||||
"""Convert video track to JSON-serializable dict."""
|
||||
codec_name = track.codec.name if hasattr(track.codec, "name") else str(track.codec)
|
||||
range_name = track.range.name if hasattr(track.range, "name") else str(track.range)
|
||||
|
||||
return {
|
||||
"id": str(track.id),
|
||||
"codec": codec_name,
|
||||
"codec_display": VIDEO_CODEC_MAP.get(codec_name, codec_name),
|
||||
"bitrate": int(track.bitrate / 1000) if track.bitrate else None,
|
||||
"width": track.width,
|
||||
"height": track.height,
|
||||
"resolution": f"{track.width}x{track.height}" if track.width and track.height else None,
|
||||
"fps": track.fps if track.fps else None,
|
||||
"range": range_name,
|
||||
"range_display": DYNAMIC_RANGE_MAP.get(range_name, range_name),
|
||||
"language": str(track.language) if track.language else None,
|
||||
"drm": str(track.drm) if hasattr(track, "drm") and track.drm else None,
|
||||
}
|
||||
|
||||
|
||||
def serialize_audio_track(track: Audio) -> Dict[str, Any]:
|
||||
"""Convert audio track to JSON-serializable dict."""
|
||||
codec_name = track.codec.name if hasattr(track.codec, "name") else str(track.codec)
|
||||
|
||||
return {
|
||||
"id": str(track.id),
|
||||
"codec": codec_name,
|
||||
"codec_display": AUDIO_CODEC_MAP.get(codec_name, codec_name),
|
||||
"bitrate": int(track.bitrate / 1000) if track.bitrate else None,
|
||||
"channels": track.channels if track.channels else None,
|
||||
"language": str(track.language) if track.language else None,
|
||||
"atmos": track.atmos if hasattr(track, "atmos") else False,
|
||||
"descriptive": track.descriptive if hasattr(track, "descriptive") else False,
|
||||
"drm": str(track.drm) if hasattr(track, "drm") and track.drm else None,
|
||||
}
|
||||
|
||||
|
||||
def serialize_subtitle_track(track: Subtitle) -> Dict[str, Any]:
|
||||
"""Convert subtitle track to JSON-serializable dict."""
|
||||
return {
|
||||
"id": str(track.id),
|
||||
"codec": track.codec.name if hasattr(track.codec, "name") else str(track.codec),
|
||||
"language": str(track.language) if track.language else None,
|
||||
"forced": track.forced if hasattr(track, "forced") else False,
|
||||
"sdh": track.sdh if hasattr(track, "sdh") else False,
|
||||
"cc": track.cc if hasattr(track, "cc") else False,
|
||||
}
|
||||
|
||||
|
||||
async def list_titles_handler(data: Dict[str, Any]) -> web.Response:
|
||||
"""Handle list-titles request."""
|
||||
service_tag = data.get("service")
|
||||
title_id = data.get("title_id")
|
||||
profile = data.get("profile")
|
||||
|
||||
if not service_tag:
|
||||
return web.json_response({"status": "error", "message": "Missing required parameter: service"}, status=400)
|
||||
|
||||
if not title_id:
|
||||
return web.json_response({"status": "error", "message": "Missing required parameter: title_id"}, status=400)
|
||||
|
||||
normalized_service = validate_service(service_tag)
|
||||
if not normalized_service:
|
||||
return web.json_response(
|
||||
{"status": "error", "message": f"Invalid or unavailable service: {service_tag}"}, status=400
|
||||
)
|
||||
|
||||
try:
|
||||
import inspect
|
||||
|
||||
import click
|
||||
import yaml
|
||||
|
||||
from unshackle.commands.dl import dl
|
||||
from unshackle.core.config import config
|
||||
from unshackle.core.utils.click_types import ContextData
|
||||
from unshackle.core.utils.collections import merge_dict
|
||||
|
||||
service_config_path = Services.get_path(normalized_service) / config.filenames.config
|
||||
if service_config_path.exists():
|
||||
service_config = yaml.safe_load(service_config_path.read_text(encoding="utf8"))
|
||||
else:
|
||||
service_config = {}
|
||||
merge_dict(config.services.get(normalized_service), service_config)
|
||||
|
||||
@click.command()
|
||||
@click.pass_context
|
||||
def dummy_service(ctx: click.Context) -> None:
|
||||
pass
|
||||
|
||||
# Handle proxy configuration
|
||||
proxy_param = data.get("proxy")
|
||||
no_proxy = data.get("no_proxy", False)
|
||||
proxy_providers = []
|
||||
|
||||
if not no_proxy:
|
||||
proxy_providers = initialize_proxy_providers()
|
||||
|
||||
if proxy_param and not no_proxy:
|
||||
try:
|
||||
resolved_proxy = resolve_proxy(proxy_param, proxy_providers)
|
||||
proxy_param = resolved_proxy
|
||||
except ValueError as e:
|
||||
return web.json_response({"status": "error", "message": f"Proxy error: {e}"}, status=400)
|
||||
|
||||
ctx = click.Context(dummy_service)
|
||||
ctx.obj = ContextData(config=service_config, cdm=None, proxy_providers=proxy_providers, profile=profile)
|
||||
ctx.params = {"proxy": proxy_param, "no_proxy": no_proxy}
|
||||
|
||||
service_module = Services.load(normalized_service)
|
||||
|
||||
dummy_service.name = normalized_service
|
||||
dummy_service.params = [click.Argument([title_id], type=str)]
|
||||
ctx.invoked_subcommand = normalized_service
|
||||
|
||||
service_ctx = click.Context(dummy_service, parent=ctx)
|
||||
service_ctx.obj = ctx.obj
|
||||
|
||||
service_kwargs = {"title": title_id}
|
||||
|
||||
# Add additional parameters from request data
|
||||
for key, value in data.items():
|
||||
if key not in ["service", "title_id", "profile", "season", "episode", "wanted", "proxy", "no_proxy"]:
|
||||
service_kwargs[key] = value
|
||||
|
||||
# Get service parameter info and click command defaults
|
||||
service_init_params = inspect.signature(service_module.__init__).parameters
|
||||
|
||||
# Extract default values from the click command
|
||||
if hasattr(service_module, "cli") and hasattr(service_module.cli, "params"):
|
||||
for param in service_module.cli.params:
|
||||
if hasattr(param, "name") and param.name not in service_kwargs:
|
||||
# Add default value if parameter is not already provided
|
||||
if hasattr(param, "default") and param.default is not None:
|
||||
service_kwargs[param.name] = param.default
|
||||
|
||||
# Handle required parameters that don't have click defaults
|
||||
for param_name, param_info in service_init_params.items():
|
||||
if param_name not in service_kwargs and param_name not in ["self", "ctx"]:
|
||||
# Check if parameter is required (no default value in signature)
|
||||
if param_info.default is inspect.Parameter.empty:
|
||||
# Provide sensible defaults for common required parameters
|
||||
if param_name == "meta_lang":
|
||||
service_kwargs[param_name] = None
|
||||
elif param_name == "movie":
|
||||
service_kwargs[param_name] = False
|
||||
else:
|
||||
# Log warning for unknown required parameters
|
||||
log.warning(f"Unknown required parameter '{param_name}' for service {normalized_service}")
|
||||
|
||||
# Filter out any parameters that the service doesn't accept
|
||||
filtered_kwargs = {}
|
||||
for key, value in service_kwargs.items():
|
||||
if key in service_init_params:
|
||||
filtered_kwargs[key] = value
|
||||
|
||||
service_instance = service_module(service_ctx, **filtered_kwargs)
|
||||
|
||||
cookies = dl.get_cookie_jar(normalized_service, profile)
|
||||
credential = dl.get_credentials(normalized_service, profile)
|
||||
service_instance.authenticate(cookies, credential)
|
||||
|
||||
titles = service_instance.get_titles()
|
||||
|
||||
if hasattr(titles, "__iter__") and not isinstance(titles, str):
|
||||
title_list = [serialize_title(t) for t in titles]
|
||||
else:
|
||||
title_list = [serialize_title(titles)]
|
||||
|
||||
return web.json_response({"titles": title_list})
|
||||
|
||||
except Exception as e:
|
||||
log.exception("Error listing titles")
|
||||
return web.json_response({"status": "error", "message": str(e)}, status=500)
|
||||
|
||||
|
||||
async def list_tracks_handler(data: Dict[str, Any]) -> web.Response:
|
||||
"""Handle list-tracks request."""
|
||||
service_tag = data.get("service")
|
||||
title_id = data.get("title_id")
|
||||
profile = data.get("profile")
|
||||
|
||||
if not service_tag:
|
||||
return web.json_response({"status": "error", "message": "Missing required parameter: service"}, status=400)
|
||||
|
||||
if not title_id:
|
||||
return web.json_response({"status": "error", "message": "Missing required parameter: title_id"}, status=400)
|
||||
|
||||
normalized_service = validate_service(service_tag)
|
||||
if not normalized_service:
|
||||
return web.json_response(
|
||||
{"status": "error", "message": f"Invalid or unavailable service: {service_tag}"}, status=400
|
||||
)
|
||||
|
||||
try:
|
||||
import inspect
|
||||
|
||||
import click
|
||||
import yaml
|
||||
|
||||
from unshackle.commands.dl import dl
|
||||
from unshackle.core.config import config
|
||||
from unshackle.core.utils.click_types import ContextData
|
||||
from unshackle.core.utils.collections import merge_dict
|
||||
|
||||
service_config_path = Services.get_path(normalized_service) / config.filenames.config
|
||||
if service_config_path.exists():
|
||||
service_config = yaml.safe_load(service_config_path.read_text(encoding="utf8"))
|
||||
else:
|
||||
service_config = {}
|
||||
merge_dict(config.services.get(normalized_service), service_config)
|
||||
|
||||
@click.command()
|
||||
@click.pass_context
|
||||
def dummy_service(ctx: click.Context) -> None:
|
||||
pass
|
||||
|
||||
# Handle proxy configuration
|
||||
proxy_param = data.get("proxy")
|
||||
no_proxy = data.get("no_proxy", False)
|
||||
proxy_providers = []
|
||||
|
||||
if not no_proxy:
|
||||
proxy_providers = initialize_proxy_providers()
|
||||
|
||||
if proxy_param and not no_proxy:
|
||||
try:
|
||||
resolved_proxy = resolve_proxy(proxy_param, proxy_providers)
|
||||
proxy_param = resolved_proxy
|
||||
except ValueError as e:
|
||||
return web.json_response({"status": "error", "message": f"Proxy error: {e}"}, status=400)
|
||||
|
||||
ctx = click.Context(dummy_service)
|
||||
ctx.obj = ContextData(config=service_config, cdm=None, proxy_providers=proxy_providers, profile=profile)
|
||||
ctx.params = {"proxy": proxy_param, "no_proxy": no_proxy}
|
||||
|
||||
service_module = Services.load(normalized_service)
|
||||
|
||||
dummy_service.name = normalized_service
|
||||
dummy_service.params = [click.Argument([title_id], type=str)]
|
||||
ctx.invoked_subcommand = normalized_service
|
||||
|
||||
service_ctx = click.Context(dummy_service, parent=ctx)
|
||||
service_ctx.obj = ctx.obj
|
||||
|
||||
service_kwargs = {"title": title_id}
|
||||
|
||||
# Add additional parameters from request data
|
||||
for key, value in data.items():
|
||||
if key not in ["service", "title_id", "profile", "season", "episode", "wanted", "proxy", "no_proxy"]:
|
||||
service_kwargs[key] = value
|
||||
|
||||
# Get service parameter info and click command defaults
|
||||
service_init_params = inspect.signature(service_module.__init__).parameters
|
||||
|
||||
# Extract default values from the click command
|
||||
if hasattr(service_module, "cli") and hasattr(service_module.cli, "params"):
|
||||
for param in service_module.cli.params:
|
||||
if hasattr(param, "name") and param.name not in service_kwargs:
|
||||
# Add default value if parameter is not already provided
|
||||
if hasattr(param, "default") and param.default is not None:
|
||||
service_kwargs[param.name] = param.default
|
||||
|
||||
# Handle required parameters that don't have click defaults
|
||||
for param_name, param_info in service_init_params.items():
|
||||
if param_name not in service_kwargs and param_name not in ["self", "ctx"]:
|
||||
# Check if parameter is required (no default value in signature)
|
||||
if param_info.default is inspect.Parameter.empty:
|
||||
# Provide sensible defaults for common required parameters
|
||||
if param_name == "meta_lang":
|
||||
service_kwargs[param_name] = None
|
||||
elif param_name == "movie":
|
||||
service_kwargs[param_name] = False
|
||||
else:
|
||||
# Log warning for unknown required parameters
|
||||
log.warning(f"Unknown required parameter '{param_name}' for service {normalized_service}")
|
||||
|
||||
# Filter out any parameters that the service doesn't accept
|
||||
filtered_kwargs = {}
|
||||
for key, value in service_kwargs.items():
|
||||
if key in service_init_params:
|
||||
filtered_kwargs[key] = value
|
||||
|
||||
service_instance = service_module(service_ctx, **filtered_kwargs)
|
||||
|
||||
cookies = dl.get_cookie_jar(normalized_service, profile)
|
||||
credential = dl.get_credentials(normalized_service, profile)
|
||||
service_instance.authenticate(cookies, credential)
|
||||
|
||||
titles = service_instance.get_titles()
|
||||
|
||||
wanted_param = data.get("wanted")
|
||||
season = data.get("season")
|
||||
episode = data.get("episode")
|
||||
|
||||
if hasattr(titles, "__iter__") and not isinstance(titles, str):
|
||||
titles_list = list(titles)
|
||||
|
||||
wanted = None
|
||||
if wanted_param:
|
||||
from unshackle.core.utils.click_types import SeasonRange
|
||||
|
||||
try:
|
||||
season_range = SeasonRange()
|
||||
wanted = season_range.parse_tokens(wanted_param)
|
||||
log.debug(f"Parsed wanted '{wanted_param}' into {len(wanted)} episodes: {wanted[:10]}...")
|
||||
except Exception as e:
|
||||
return web.json_response(
|
||||
{"status": "error", "message": f"Invalid wanted parameter: {e}"}, status=400
|
||||
)
|
||||
elif season is not None and episode is not None:
|
||||
wanted = [f"{season}x{episode}"]
|
||||
|
||||
if wanted:
|
||||
# Filter titles based on wanted episodes, similar to how dl.py does it
|
||||
matching_titles = []
|
||||
log.debug(f"Filtering {len(titles_list)} titles with {len(wanted)} wanted episodes")
|
||||
for title in titles_list:
|
||||
if isinstance(title, Episode):
|
||||
episode_key = f"{title.season}x{title.number}"
|
||||
if episode_key in wanted:
|
||||
log.debug(f"Episode {episode_key} matches wanted list")
|
||||
matching_titles.append(title)
|
||||
else:
|
||||
log.debug(f"Episode {episode_key} not in wanted list")
|
||||
else:
|
||||
matching_titles.append(title)
|
||||
|
||||
log.debug(f"Found {len(matching_titles)} matching titles")
|
||||
|
||||
if not matching_titles:
|
||||
return web.json_response(
|
||||
{"status": "error", "message": "No episodes found matching wanted criteria"}, status=404
|
||||
)
|
||||
|
||||
# If multiple episodes match, return tracks for all episodes
|
||||
if len(matching_titles) > 1 and all(isinstance(t, Episode) for t in matching_titles):
|
||||
episodes_data = []
|
||||
failed_episodes = []
|
||||
|
||||
# Sort matching titles by season and episode number for consistent ordering
|
||||
sorted_titles = sorted(matching_titles, key=lambda t: (t.season, t.number))
|
||||
|
||||
for title in sorted_titles:
|
||||
try:
|
||||
tracks = service_instance.get_tracks(title)
|
||||
video_tracks = sorted(tracks.videos, key=lambda t: t.bitrate or 0, reverse=True)
|
||||
audio_tracks = sorted(tracks.audio, key=lambda t: t.bitrate or 0, reverse=True)
|
||||
|
||||
episode_data = {
|
||||
"title": serialize_title(title),
|
||||
"video": [serialize_video_track(t) for t in video_tracks],
|
||||
"audio": [serialize_audio_track(t) for t in audio_tracks],
|
||||
"subtitles": [serialize_subtitle_track(t) for t in tracks.subtitles],
|
||||
}
|
||||
episodes_data.append(episode_data)
|
||||
log.debug(f"Successfully got tracks for {title.season}x{title.number}")
|
||||
except SystemExit:
|
||||
# Service calls sys.exit() for unavailable episodes - catch and skip
|
||||
failed_episodes.append(f"S{title.season}E{title.number:02d}")
|
||||
log.debug(f"Episode {title.season}x{title.number} not available, skipping")
|
||||
continue
|
||||
except Exception as e:
|
||||
# Handle other errors gracefully
|
||||
failed_episodes.append(f"S{title.season}E{title.number:02d}")
|
||||
log.debug(f"Error getting tracks for {title.season}x{title.number}: {e}")
|
||||
continue
|
||||
|
||||
if episodes_data:
|
||||
response = {"episodes": episodes_data}
|
||||
if failed_episodes:
|
||||
response["unavailable_episodes"] = failed_episodes
|
||||
return web.json_response(response)
|
||||
else:
|
||||
return web.json_response(
|
||||
{
|
||||
"status": "error",
|
||||
"message": f"No available episodes found. Unavailable: {', '.join(failed_episodes)}",
|
||||
},
|
||||
status=404,
|
||||
)
|
||||
else:
|
||||
# Single episode or movie
|
||||
first_title = matching_titles[0]
|
||||
else:
|
||||
first_title = titles_list[0]
|
||||
else:
|
||||
first_title = titles
|
||||
|
||||
tracks = service_instance.get_tracks(first_title)
|
||||
|
||||
video_tracks = sorted(tracks.videos, key=lambda t: t.bitrate or 0, reverse=True)
|
||||
audio_tracks = sorted(tracks.audio, key=lambda t: t.bitrate or 0, reverse=True)
|
||||
|
||||
response = {
|
||||
"title": serialize_title(first_title),
|
||||
"video": [serialize_video_track(t) for t in video_tracks],
|
||||
"audio": [serialize_audio_track(t) for t in audio_tracks],
|
||||
"subtitles": [serialize_subtitle_track(t) for t in tracks.subtitles],
|
||||
}
|
||||
|
||||
return web.json_response(response)
|
||||
|
||||
except Exception as e:
|
||||
log.exception("Error listing tracks")
|
||||
return web.json_response({"status": "error", "message": str(e)}, status=500)
|
||||
|
||||
|
||||
async def download_handler(data: Dict[str, Any]) -> web.Response:
|
||||
"""Handle download request - create and queue a download job."""
|
||||
from unshackle.core.api.download_manager import get_download_manager
|
||||
|
||||
service_tag = data.get("service")
|
||||
title_id = data.get("title_id")
|
||||
|
||||
if not service_tag:
|
||||
return web.json_response({"status": "error", "message": "Missing required parameter: service"}, status=400)
|
||||
|
||||
if not title_id:
|
||||
return web.json_response({"status": "error", "message": "Missing required parameter: title_id"}, status=400)
|
||||
|
||||
normalized_service = validate_service(service_tag)
|
||||
if not normalized_service:
|
||||
return web.json_response(
|
||||
{"status": "error", "message": f"Invalid or unavailable service: {service_tag}"}, status=400
|
||||
)
|
||||
|
||||
try:
|
||||
# Get download manager and start workers if needed
|
||||
manager = get_download_manager()
|
||||
await manager.start_workers()
|
||||
|
||||
# Create download job with filtered parameters (exclude service and title_id as they're already passed)
|
||||
filtered_params = {k: v for k, v in data.items() if k not in ["service", "title_id"]}
|
||||
job = manager.create_job(normalized_service, title_id, **filtered_params)
|
||||
|
||||
return web.json_response(
|
||||
{"job_id": job.job_id, "status": job.status.value, "created_time": job.created_time.isoformat()}, status=202
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
log.exception("Error creating download job")
|
||||
return web.json_response({"status": "error", "message": str(e)}, status=500)
|
||||
|
||||
|
||||
async def list_download_jobs_handler(data: Dict[str, Any]) -> web.Response:
|
||||
"""Handle list download jobs request."""
|
||||
from unshackle.core.api.download_manager import get_download_manager
|
||||
|
||||
try:
|
||||
manager = get_download_manager()
|
||||
jobs = manager.list_jobs()
|
||||
|
||||
job_list = [job.to_dict(include_full_details=False) for job in jobs]
|
||||
|
||||
return web.json_response({"jobs": job_list})
|
||||
|
||||
except Exception as e:
|
||||
log.exception("Error listing download jobs")
|
||||
return web.json_response({"status": "error", "message": str(e)}, status=500)
|
||||
|
||||
|
||||
async def get_download_job_handler(job_id: str) -> web.Response:
|
||||
"""Handle get specific download job request."""
|
||||
from unshackle.core.api.download_manager import get_download_manager
|
||||
|
||||
try:
|
||||
manager = get_download_manager()
|
||||
job = manager.get_job(job_id)
|
||||
|
||||
if not job:
|
||||
return web.json_response({"status": "error", "message": "Job not found"}, status=404)
|
||||
|
||||
return web.json_response(job.to_dict(include_full_details=True))
|
||||
|
||||
except Exception as e:
|
||||
log.exception(f"Error getting download job {job_id}")
|
||||
return web.json_response({"status": "error", "message": str(e)}, status=500)
|
||||
|
||||
|
||||
async def cancel_download_job_handler(job_id: str) -> web.Response:
|
||||
"""Handle cancel download job request."""
|
||||
from unshackle.core.api.download_manager import get_download_manager
|
||||
|
||||
try:
|
||||
manager = get_download_manager()
|
||||
|
||||
if not manager.get_job(job_id):
|
||||
return web.json_response({"status": "error", "message": "Job not found"}, status=404)
|
||||
|
||||
success = manager.cancel_job(job_id)
|
||||
|
||||
if success:
|
||||
return web.json_response({"status": "success", "message": "Job cancelled"})
|
||||
else:
|
||||
return web.json_response({"status": "error", "message": "Job cannot be cancelled"}, status=400)
|
||||
|
||||
except Exception as e:
|
||||
log.exception(f"Error cancelling download job {job_id}")
|
||||
return web.json_response({"status": "error", "message": str(e)}, status=500)
|
||||
388
unshackle/core/api/routes.py
Normal file
388
unshackle/core/api/routes.py
Normal file
@@ -0,0 +1,388 @@
|
||||
import logging
|
||||
|
||||
from aiohttp import web
|
||||
from aiohttp_swagger3 import SwaggerDocs, SwaggerInfo, SwaggerUiSettings
|
||||
|
||||
from unshackle.core import __version__
|
||||
from unshackle.core.api.handlers import (cancel_download_job_handler, download_handler, get_download_job_handler,
|
||||
list_download_jobs_handler, list_titles_handler, list_tracks_handler)
|
||||
from unshackle.core.services import Services
|
||||
from unshackle.core.update_checker import UpdateChecker
|
||||
|
||||
|
||||
@web.middleware
|
||||
async def cors_middleware(request: web.Request, handler):
|
||||
"""Add CORS headers to all responses."""
|
||||
# Handle preflight requests
|
||||
if request.method == "OPTIONS":
|
||||
response = web.Response()
|
||||
else:
|
||||
response = await handler(request)
|
||||
|
||||
# Add CORS headers
|
||||
response.headers["Access-Control-Allow-Origin"] = "*"
|
||||
response.headers["Access-Control-Allow-Methods"] = "GET, POST, PUT, DELETE, OPTIONS"
|
||||
response.headers["Access-Control-Allow-Headers"] = "Content-Type, X-API-Key, Authorization"
|
||||
response.headers["Access-Control-Max-Age"] = "3600"
|
||||
|
||||
return response
|
||||
|
||||
|
||||
log = logging.getLogger("api")
|
||||
|
||||
|
||||
async def health(request: web.Request) -> web.Response:
|
||||
"""
|
||||
Health check endpoint.
|
||||
---
|
||||
summary: Health check
|
||||
description: Get server health status, version info, and update availability
|
||||
responses:
|
||||
'200':
|
||||
description: Health status
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: object
|
||||
properties:
|
||||
status:
|
||||
type: string
|
||||
example: ok
|
||||
version:
|
||||
type: string
|
||||
example: "2.0.0"
|
||||
update_check:
|
||||
type: object
|
||||
properties:
|
||||
update_available:
|
||||
type: boolean
|
||||
nullable: true
|
||||
current_version:
|
||||
type: string
|
||||
latest_version:
|
||||
type: string
|
||||
nullable: true
|
||||
"""
|
||||
try:
|
||||
latest_version = await UpdateChecker.check_for_updates(__version__)
|
||||
update_info = {
|
||||
"update_available": latest_version is not None,
|
||||
"current_version": __version__,
|
||||
"latest_version": latest_version,
|
||||
}
|
||||
except Exception as e:
|
||||
log.warning(f"Failed to check for updates: {e}")
|
||||
update_info = {"update_available": None, "current_version": __version__, "latest_version": None}
|
||||
|
||||
return web.json_response({"status": "ok", "version": __version__, "update_check": update_info})
|
||||
|
||||
|
||||
async def services(request: web.Request) -> web.Response:
|
||||
"""
|
||||
List available services.
|
||||
---
|
||||
summary: List services
|
||||
description: Get all available streaming services with their details
|
||||
responses:
|
||||
'200':
|
||||
description: List of services
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: object
|
||||
properties:
|
||||
services:
|
||||
type: array
|
||||
items:
|
||||
type: object
|
||||
properties:
|
||||
tag:
|
||||
type: string
|
||||
aliases:
|
||||
type: array
|
||||
items:
|
||||
type: string
|
||||
geofence:
|
||||
type: array
|
||||
items:
|
||||
type: string
|
||||
title_regex:
|
||||
type: string
|
||||
nullable: true
|
||||
help:
|
||||
type: string
|
||||
nullable: true
|
||||
'500':
|
||||
description: Server error
|
||||
"""
|
||||
try:
|
||||
service_tags = Services.get_tags()
|
||||
services_info = []
|
||||
|
||||
for tag in service_tags:
|
||||
service_data = {"tag": tag, "aliases": [], "geofence": [], "title_regex": None, "help": None}
|
||||
|
||||
try:
|
||||
service_module = Services.load(tag)
|
||||
|
||||
if hasattr(service_module, "ALIASES"):
|
||||
service_data["aliases"] = list(service_module.ALIASES)
|
||||
|
||||
if hasattr(service_module, "GEOFENCE"):
|
||||
service_data["geofence"] = list(service_module.GEOFENCE)
|
||||
|
||||
if hasattr(service_module, "TITLE_RE"):
|
||||
service_data["title_regex"] = service_module.TITLE_RE
|
||||
|
||||
if service_module.__doc__:
|
||||
service_data["help"] = service_module.__doc__.strip()
|
||||
|
||||
except Exception as e:
|
||||
log.warning(f"Could not load details for service {tag}: {e}")
|
||||
|
||||
services_info.append(service_data)
|
||||
|
||||
return web.json_response({"services": services_info})
|
||||
except Exception as e:
|
||||
log.exception("Error listing services")
|
||||
return web.json_response({"status": "error", "message": str(e)}, status=500)
|
||||
|
||||
|
||||
async def list_titles(request: web.Request) -> web.Response:
|
||||
"""
|
||||
List titles for a service and title ID.
|
||||
---
|
||||
summary: List titles
|
||||
description: Get available titles for a service and title ID
|
||||
requestBody:
|
||||
required: true
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: object
|
||||
required:
|
||||
- service
|
||||
- title_id
|
||||
properties:
|
||||
service:
|
||||
type: string
|
||||
description: Service tag
|
||||
title_id:
|
||||
type: string
|
||||
description: Title identifier
|
||||
responses:
|
||||
'200':
|
||||
description: List of titles
|
||||
'400':
|
||||
description: Invalid request
|
||||
"""
|
||||
try:
|
||||
data = await request.json()
|
||||
except Exception:
|
||||
return web.json_response({"status": "error", "message": "Invalid JSON request body"}, status=400)
|
||||
|
||||
return await list_titles_handler(data)
|
||||
|
||||
|
||||
async def list_tracks(request: web.Request) -> web.Response:
|
||||
"""
|
||||
List tracks for a title, separated by type.
|
||||
---
|
||||
summary: List tracks
|
||||
description: Get available video, audio, and subtitle tracks for a title
|
||||
requestBody:
|
||||
required: true
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: object
|
||||
required:
|
||||
- service
|
||||
- title_id
|
||||
properties:
|
||||
service:
|
||||
type: string
|
||||
description: Service tag
|
||||
title_id:
|
||||
type: string
|
||||
description: Title identifier
|
||||
wanted:
|
||||
type: string
|
||||
description: Specific episode/season (optional)
|
||||
proxy:
|
||||
type: string
|
||||
description: Proxy configuration (optional)
|
||||
responses:
|
||||
'200':
|
||||
description: Track information
|
||||
'400':
|
||||
description: Invalid request
|
||||
"""
|
||||
try:
|
||||
data = await request.json()
|
||||
except Exception:
|
||||
return web.json_response({"status": "error", "message": "Invalid JSON request body"}, status=400)
|
||||
|
||||
return await list_tracks_handler(data)
|
||||
|
||||
|
||||
async def download(request: web.Request) -> web.Response:
|
||||
"""
|
||||
Download content based on provided parameters.
|
||||
---
|
||||
summary: Download content
|
||||
description: Download video content based on specified parameters
|
||||
requestBody:
|
||||
required: true
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: object
|
||||
required:
|
||||
- service
|
||||
- title_id
|
||||
properties:
|
||||
service:
|
||||
type: string
|
||||
description: Service tag
|
||||
title_id:
|
||||
type: string
|
||||
description: Title identifier
|
||||
responses:
|
||||
'200':
|
||||
description: Download started
|
||||
'400':
|
||||
description: Invalid request
|
||||
"""
|
||||
try:
|
||||
data = await request.json()
|
||||
except Exception:
|
||||
return web.json_response({"status": "error", "message": "Invalid JSON request body"}, status=400)
|
||||
|
||||
return await download_handler(data)
|
||||
|
||||
|
||||
async def download_jobs(request: web.Request) -> web.Response:
|
||||
"""
|
||||
List all download jobs.
|
||||
---
|
||||
summary: List download jobs
|
||||
description: Get list of all download jobs with their status
|
||||
responses:
|
||||
'200':
|
||||
description: List of download jobs
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: object
|
||||
properties:
|
||||
jobs:
|
||||
type: array
|
||||
items:
|
||||
type: object
|
||||
properties:
|
||||
job_id:
|
||||
type: string
|
||||
status:
|
||||
type: string
|
||||
created_time:
|
||||
type: string
|
||||
service:
|
||||
type: string
|
||||
title_id:
|
||||
type: string
|
||||
progress:
|
||||
type: number
|
||||
'500':
|
||||
description: Server error
|
||||
"""
|
||||
return await list_download_jobs_handler({})
|
||||
|
||||
|
||||
async def download_job_detail(request: web.Request) -> web.Response:
|
||||
"""
|
||||
Get download job details.
|
||||
---
|
||||
summary: Get download job
|
||||
description: Get detailed information about a specific download job
|
||||
parameters:
|
||||
- name: job_id
|
||||
in: path
|
||||
required: true
|
||||
schema:
|
||||
type: string
|
||||
responses:
|
||||
'200':
|
||||
description: Download job details
|
||||
'404':
|
||||
description: Job not found
|
||||
'500':
|
||||
description: Server error
|
||||
"""
|
||||
job_id = request.match_info["job_id"]
|
||||
return await get_download_job_handler(job_id)
|
||||
|
||||
|
||||
async def cancel_download_job(request: web.Request) -> web.Response:
|
||||
"""
|
||||
Cancel download job.
|
||||
---
|
||||
summary: Cancel download job
|
||||
description: Cancel a queued or running download job
|
||||
parameters:
|
||||
- name: job_id
|
||||
in: path
|
||||
required: true
|
||||
schema:
|
||||
type: string
|
||||
responses:
|
||||
'200':
|
||||
description: Job cancelled successfully
|
||||
'400':
|
||||
description: Job cannot be cancelled
|
||||
'404':
|
||||
description: Job not found
|
||||
'500':
|
||||
description: Server error
|
||||
"""
|
||||
job_id = request.match_info["job_id"]
|
||||
return await cancel_download_job_handler(job_id)
|
||||
|
||||
|
||||
def setup_routes(app: web.Application) -> None:
|
||||
"""Setup all API routes."""
|
||||
app.router.add_get("/api/health", health)
|
||||
app.router.add_get("/api/services", services)
|
||||
app.router.add_post("/api/list-titles", list_titles)
|
||||
app.router.add_post("/api/list-tracks", list_tracks)
|
||||
app.router.add_post("/api/download", download)
|
||||
app.router.add_get("/api/download/jobs", download_jobs)
|
||||
app.router.add_get("/api/download/jobs/{job_id}", download_job_detail)
|
||||
app.router.add_delete("/api/download/jobs/{job_id}", cancel_download_job)
|
||||
|
||||
|
||||
def setup_swagger(app: web.Application) -> None:
|
||||
"""Setup Swagger UI documentation."""
|
||||
swagger = SwaggerDocs(
|
||||
app,
|
||||
swagger_ui_settings=SwaggerUiSettings(path="/api/docs/"),
|
||||
info=SwaggerInfo(
|
||||
title="Unshackle REST API",
|
||||
version=__version__,
|
||||
description="REST API for Unshackle - Modular Movie, TV, and Music Archival Software",
|
||||
),
|
||||
)
|
||||
|
||||
# Add routes with OpenAPI documentation
|
||||
swagger.add_routes(
|
||||
[
|
||||
web.get("/api/health", health),
|
||||
web.get("/api/services", services),
|
||||
web.post("/api/list-titles", list_titles),
|
||||
web.post("/api/list-tracks", list_tracks),
|
||||
web.post("/api/download", download),
|
||||
web.get("/api/download/jobs", download_jobs),
|
||||
web.get("/api/download/jobs/{job_id}", download_job_detail),
|
||||
web.delete("/api/download/jobs/{job_id}", cancel_download_job),
|
||||
]
|
||||
)
|
||||
@@ -260,6 +260,7 @@ subtitle:
|
||||
|
||||
# Configuration for pywidevine's serve functionality
|
||||
serve:
|
||||
api_secret: "your-secret-key-here"
|
||||
users:
|
||||
secret_key_for_user:
|
||||
devices:
|
||||
|
||||
199
uv.lock
generated
199
uv.lock
generated
@@ -80,6 +80,22 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/2b/d8/fa65d2a349fe938b76d309db1a56a75c4fb8cc7b17a398b698488a939903/aiohttp-3.12.15-cp312-cp312-win_amd64.whl", hash = "sha256:b390ef5f62bb508a9d67cb3bba9b8356e23b3996da7062f1a57ce1a79d2b3d34", size = 450266, upload-time = "2025-07-29T05:51:17.239Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "aiohttp-swagger3"
|
||||
version = "0.10.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "aiohttp" },
|
||||
{ name = "attrs" },
|
||||
{ name = "fastjsonschema" },
|
||||
{ name = "pyyaml" },
|
||||
{ name = "rfc3339-validator" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/a1/06/00ccb2c8afdde4ca7c3cac424d54715c7d90cdd4e13e1ca71d68f5b2e665/aiohttp_swagger3-0.10.0.tar.gz", hash = "sha256:a333c59328f64dd64587e5f276ee84dc256f587d09f2da6ddaae3812fa4d4f33", size = 1839028, upload-time = "2025-02-11T10:51:26.974Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/0a/8f/db4cb843999a3088846d170f38eda2182b50b5733387be8102fed171c53f/aiohttp_swagger3-0.10.0-py3-none-any.whl", hash = "sha256:0ae2d2ba7dbd8ea8fe1cffe8f0197db5d0aa979eb9679bd699ecd87923912509", size = 1826491, upload-time = "2025-02-11T10:51:25.174Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "aiosignal"
|
||||
version = "1.4.0"
|
||||
@@ -468,6 +484,15 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/36/f4/c6e662dade71f56cd2f3735141b265c3c79293c109549c1e6933b0651ffc/exceptiongroup-1.3.0-py3-none-any.whl", hash = "sha256:4d111e6e0c13d0644cad6ddaa7ed0261a0b36971f6d23e7ec9b4b9097da78a10", size = 16674, upload-time = "2025-05-10T17:42:49.33Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "fastjsonschema"
|
||||
version = "2.19.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/ba/7f/cedf77ace50aa60c566deaca9066750f06e1fcf6ad24f254d255bb976dd6/fastjsonschema-2.19.1.tar.gz", hash = "sha256:e3126a94bdc4623d3de4485f8d468a12f02a67921315ddc87836d6e456dc789d", size = 372732, upload-time = "2023-12-28T14:02:06.823Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/9c/b9/79691036d4a8f9857e74d1728b23f34f583b81350a27492edda58d5604e1/fastjsonschema-2.19.1-py3-none-any.whl", hash = "sha256:3672b47bc94178c9f23dbb654bf47440155d4db9df5f7bc47643315f9c405cd0", size = 23388, upload-time = "2023-12-28T14:02:04.512Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "filelock"
|
||||
version = "3.19.1"
|
||||
@@ -645,72 +670,67 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "lxml"
|
||||
version = "6.0.2"
|
||||
version = "5.4.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/aa/88/262177de60548e5a2bfc46ad28232c9e9cbde697bd94132aeb80364675cb/lxml-6.0.2.tar.gz", hash = "sha256:cd79f3367bd74b317dda655dc8fcfa304d9eb6e4fb06b7168c5cf27f96e0cd62", size = 4073426, upload-time = "2025-09-22T04:04:59.287Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/76/3d/14e82fc7c8fb1b7761f7e748fd47e2ec8276d137b6acfe5a4bb73853e08f/lxml-5.4.0.tar.gz", hash = "sha256:d12832e1dbea4be280b22fd0ea7c9b87f0d8fc51ba06e92dc62d52f804f78ebd", size = 3679479, upload-time = "2025-04-23T01:50:29.322Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/db/8a/f8192a08237ef2fb1b19733f709db88a4c43bc8ab8357f01cb41a27e7f6a/lxml-6.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e77dd455b9a16bbd2a5036a63ddbd479c19572af81b624e79ef422f929eef388", size = 8590589, upload-time = "2025-09-22T04:00:10.51Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/12/64/27bcd07ae17ff5e5536e8d88f4c7d581b48963817a13de11f3ac3329bfa2/lxml-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5d444858b9f07cefff6455b983aea9a67f7462ba1f6cbe4a21e8bf6791bf2153", size = 4629671, upload-time = "2025-09-22T04:00:15.411Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/02/5a/a7d53b3291c324e0b6e48f3c797be63836cc52156ddf8f33cd72aac78866/lxml-6.0.2-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:f952dacaa552f3bb8834908dddd500ba7d508e6ea6eb8c52eb2d28f48ca06a31", size = 4999961, upload-time = "2025-09-22T04:00:17.619Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f5/55/d465e9b89df1761674d8672bb3e4ae2c47033b01ec243964b6e334c6743f/lxml-6.0.2-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:71695772df6acea9f3c0e59e44ba8ac50c4f125217e84aab21074a1a55e7e5c9", size = 5157087, upload-time = "2025-09-22T04:00:19.868Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/62/38/3073cd7e3e8dfc3ba3c3a139e33bee3a82de2bfb0925714351ad3d255c13/lxml-6.0.2-cp310-cp310-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:17f68764f35fd78d7c4cc4ef209a184c38b65440378013d24b8aecd327c3e0c8", size = 5067620, upload-time = "2025-09-22T04:00:21.877Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/4a/d3/1e001588c5e2205637b08985597827d3827dbaaece16348c8822bfe61c29/lxml-6.0.2-cp310-cp310-manylinux_2_26_i686.manylinux_2_28_i686.whl", hash = "sha256:058027e261afed589eddcfe530fcc6f3402d7fd7e89bfd0532df82ebc1563dba", size = 5406664, upload-time = "2025-09-22T04:00:23.714Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/20/cf/cab09478699b003857ed6ebfe95e9fb9fa3d3c25f1353b905c9b73cfb624/lxml-6.0.2-cp310-cp310-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a8ffaeec5dfea5881d4c9d8913a32d10cfe3923495386106e4a24d45300ef79c", size = 5289397, upload-time = "2025-09-22T04:00:25.544Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a3/84/02a2d0c38ac9a8b9f9e5e1bbd3f24b3f426044ad618b552e9549ee91bd63/lxml-6.0.2-cp310-cp310-manylinux_2_31_armv7l.whl", hash = "sha256:f2e3b1a6bb38de0bc713edd4d612969dd250ca8b724be8d460001a387507021c", size = 4772178, upload-time = "2025-09-22T04:00:27.602Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/56/87/e1ceadcc031ec4aa605fe95476892d0b0ba3b7f8c7dcdf88fdeff59a9c86/lxml-6.0.2-cp310-cp310-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:d6690ec5ec1cce0385cb20896b16be35247ac8c2046e493d03232f1c2414d321", size = 5358148, upload-time = "2025-09-22T04:00:29.323Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/fe/13/5bb6cf42bb228353fd4ac5f162c6a84fd68a4d6f67c1031c8cf97e131fc6/lxml-6.0.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f2a50c3c1d11cad0ebebbac357a97b26aa79d2bcaf46f256551152aa85d3a4d1", size = 5112035, upload-time = "2025-09-22T04:00:31.061Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e4/e2/ea0498552102e59834e297c5c6dff8d8ded3db72ed5e8aad77871476f073/lxml-6.0.2-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:3efe1b21c7801ffa29a1112fab3b0f643628c30472d507f39544fd48e9549e34", size = 4799111, upload-time = "2025-09-22T04:00:33.11Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6a/9e/8de42b52a73abb8af86c66c969b3b4c2a96567b6ac74637c037d2e3baa60/lxml-6.0.2-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:59c45e125140b2c4b33920d21d83681940ca29f0b83f8629ea1a2196dc8cfe6a", size = 5351662, upload-time = "2025-09-22T04:00:35.237Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/28/a2/de776a573dfb15114509a37351937c367530865edb10a90189d0b4b9b70a/lxml-6.0.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:452b899faa64f1805943ec1c0c9ebeaece01a1af83e130b69cdefeda180bb42c", size = 5314973, upload-time = "2025-09-22T04:00:37.086Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/50/a0/3ae1b1f8964c271b5eec91db2043cf8c6c0bce101ebb2a633b51b044db6c/lxml-6.0.2-cp310-cp310-win32.whl", hash = "sha256:1e786a464c191ca43b133906c6903a7e4d56bef376b75d97ccbb8ec5cf1f0a4b", size = 3611953, upload-time = "2025-09-22T04:00:39.224Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d1/70/bd42491f0634aad41bdfc1e46f5cff98825fb6185688dc82baa35d509f1a/lxml-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:dacf3c64ef3f7440e3167aa4b49aa9e0fb99e0aa4f9ff03795640bf94531bcb0", size = 4032695, upload-time = "2025-09-22T04:00:41.402Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d2/d0/05c6a72299f54c2c561a6c6cbb2f512e047fca20ea97a05e57931f194ac4/lxml-6.0.2-cp310-cp310-win_arm64.whl", hash = "sha256:45f93e6f75123f88d7f0cfd90f2d05f441b808562bf0bc01070a00f53f5028b5", size = 3680051, upload-time = "2025-09-22T04:00:43.525Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/77/d5/becbe1e2569b474a23f0c672ead8a29ac50b2dc1d5b9de184831bda8d14c/lxml-6.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:13e35cbc684aadf05d8711a5d1b5857c92e5e580efa9a0d2be197199c8def607", size = 8634365, upload-time = "2025-09-22T04:00:45.672Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/28/66/1ced58f12e804644426b85d0bb8a4478ca77bc1761455da310505f1a3526/lxml-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3b1675e096e17c6fe9c0e8c81434f5736c0739ff9ac6123c87c2d452f48fc938", size = 4650793, upload-time = "2025-09-22T04:00:47.783Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/11/84/549098ffea39dfd167e3f174b4ce983d0eed61f9d8d25b7bf2a57c3247fc/lxml-6.0.2-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8ac6e5811ae2870953390452e3476694196f98d447573234592d30488147404d", size = 4944362, upload-time = "2025-09-22T04:00:49.845Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ac/bd/f207f16abf9749d2037453d56b643a7471d8fde855a231a12d1e095c4f01/lxml-6.0.2-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:5aa0fc67ae19d7a64c3fe725dc9a1bb11f80e01f78289d05c6f62545affec438", size = 5083152, upload-time = "2025-09-22T04:00:51.709Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/15/ae/bd813e87d8941d52ad5b65071b1affb48da01c4ed3c9c99e40abb266fbff/lxml-6.0.2-cp311-cp311-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:de496365750cc472b4e7902a485d3f152ecf57bd3ba03ddd5578ed8ceb4c5964", size = 5023539, upload-time = "2025-09-22T04:00:53.593Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/02/cd/9bfef16bd1d874fbe0cb51afb00329540f30a3283beb9f0780adbb7eec03/lxml-6.0.2-cp311-cp311-manylinux_2_26_i686.manylinux_2_28_i686.whl", hash = "sha256:200069a593c5e40b8f6fc0d84d86d970ba43138c3e68619ffa234bc9bb806a4d", size = 5344853, upload-time = "2025-09-22T04:00:55.524Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b8/89/ea8f91594bc5dbb879734d35a6f2b0ad50605d7fb419de2b63d4211765cc/lxml-6.0.2-cp311-cp311-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7d2de809c2ee3b888b59f995625385f74629707c9355e0ff856445cdcae682b7", size = 5225133, upload-time = "2025-09-22T04:00:57.269Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b9/37/9c735274f5dbec726b2db99b98a43950395ba3d4a1043083dba2ad814170/lxml-6.0.2-cp311-cp311-manylinux_2_31_armv7l.whl", hash = "sha256:b2c3da8d93cf5db60e8858c17684c47d01fee6405e554fb55018dd85fc23b178", size = 4677944, upload-time = "2025-09-22T04:00:59.052Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/20/28/7dfe1ba3475d8bfca3878365075abe002e05d40dfaaeb7ec01b4c587d533/lxml-6.0.2-cp311-cp311-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:442de7530296ef5e188373a1ea5789a46ce90c4847e597856570439621d9c553", size = 5284535, upload-time = "2025-09-22T04:01:01.335Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e7/cf/5f14bc0de763498fc29510e3532bf2b4b3a1c1d5d0dff2e900c16ba021ef/lxml-6.0.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:2593c77efde7bfea7f6389f1ab249b15ed4aa5bc5cb5131faa3b843c429fbedb", size = 5067343, upload-time = "2025-09-22T04:01:03.13Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1c/b0/bb8275ab5472f32b28cfbbcc6db7c9d092482d3439ca279d8d6fa02f7025/lxml-6.0.2-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:3e3cb08855967a20f553ff32d147e14329b3ae70ced6edc2f282b94afbc74b2a", size = 4725419, upload-time = "2025-09-22T04:01:05.013Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/25/4c/7c222753bc72edca3b99dbadba1b064209bc8ed4ad448af990e60dcce462/lxml-6.0.2-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:2ed6c667fcbb8c19c6791bbf40b7268ef8ddf5a96940ba9404b9f9a304832f6c", size = 5275008, upload-time = "2025-09-22T04:01:07.327Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6c/8c/478a0dc6b6ed661451379447cdbec77c05741a75736d97e5b2b729687828/lxml-6.0.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b8f18914faec94132e5b91e69d76a5c1d7b0c73e2489ea8929c4aaa10b76bbf7", size = 5248906, upload-time = "2025-09-22T04:01:09.452Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2d/d9/5be3a6ab2784cdf9accb0703b65e1b64fcdd9311c9f007630c7db0cfcce1/lxml-6.0.2-cp311-cp311-win32.whl", hash = "sha256:6605c604e6daa9e0d7f0a2137bdc47a2e93b59c60a65466353e37f8272f47c46", size = 3610357, upload-time = "2025-09-22T04:01:11.102Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e2/7d/ca6fb13349b473d5732fb0ee3eec8f6c80fc0688e76b7d79c1008481bf1f/lxml-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e5867f2651016a3afd8dd2c8238baa66f1e2802f44bc17e236f547ace6647078", size = 4036583, upload-time = "2025-09-22T04:01:12.766Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ab/a2/51363b5ecd3eab46563645f3a2c3836a2fc67d01a1b87c5017040f39f567/lxml-6.0.2-cp311-cp311-win_arm64.whl", hash = "sha256:4197fb2534ee05fd3e7afaab5d8bfd6c2e186f65ea7f9cd6a82809c887bd1285", size = 3680591, upload-time = "2025-09-22T04:01:14.874Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f3/c8/8ff2bc6b920c84355146cd1ab7d181bc543b89241cfb1ebee824a7c81457/lxml-6.0.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:a59f5448ba2ceccd06995c95ea59a7674a10de0810f2ce90c9006f3cbc044456", size = 8661887, upload-time = "2025-09-22T04:01:17.265Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/37/6f/9aae1008083bb501ef63284220ce81638332f9ccbfa53765b2b7502203cf/lxml-6.0.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:e8113639f3296706fbac34a30813929e29247718e88173ad849f57ca59754924", size = 4667818, upload-time = "2025-09-22T04:01:19.688Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f1/ca/31fb37f99f37f1536c133476674c10b577e409c0a624384147653e38baf2/lxml-6.0.2-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:a8bef9b9825fa8bc816a6e641bb67219489229ebc648be422af695f6e7a4fa7f", size = 4950807, upload-time = "2025-09-22T04:01:21.487Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/da/87/f6cb9442e4bada8aab5ae7e1046264f62fdbeaa6e3f6211b93f4c0dd97f1/lxml-6.0.2-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:65ea18d710fd14e0186c2f973dc60bb52039a275f82d3c44a0e42b43440ea534", size = 5109179, upload-time = "2025-09-22T04:01:23.32Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c8/20/a7760713e65888db79bbae4f6146a6ae5c04e4a204a3c48896c408cd6ed2/lxml-6.0.2-cp312-cp312-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c371aa98126a0d4c739ca93ceffa0fd7a5d732e3ac66a46e74339acd4d334564", size = 5023044, upload-time = "2025-09-22T04:01:25.118Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a2/b0/7e64e0460fcb36471899f75831509098f3fd7cd02a3833ac517433cb4f8f/lxml-6.0.2-cp312-cp312-manylinux_2_26_i686.manylinux_2_28_i686.whl", hash = "sha256:700efd30c0fa1a3581d80a748157397559396090a51d306ea59a70020223d16f", size = 5359685, upload-time = "2025-09-22T04:01:27.398Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b9/e1/e5df362e9ca4e2f48ed6411bd4b3a0ae737cc842e96877f5bf9428055ab4/lxml-6.0.2-cp312-cp312-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c33e66d44fe60e72397b487ee92e01da0d09ba2d66df8eae42d77b6d06e5eba0", size = 5654127, upload-time = "2025-09-22T04:01:29.629Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c6/d1/232b3309a02d60f11e71857778bfcd4acbdb86c07db8260caf7d008b08f8/lxml-6.0.2-cp312-cp312-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:90a345bbeaf9d0587a3aaffb7006aa39ccb6ff0e96a57286c0cb2fd1520ea192", size = 5253958, upload-time = "2025-09-22T04:01:31.535Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/35/35/d955a070994725c4f7d80583a96cab9c107c57a125b20bb5f708fe941011/lxml-6.0.2-cp312-cp312-manylinux_2_31_armv7l.whl", hash = "sha256:064fdadaf7a21af3ed1dcaa106b854077fbeada827c18f72aec9346847cd65d0", size = 4711541, upload-time = "2025-09-22T04:01:33.801Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1e/be/667d17363b38a78c4bd63cfd4b4632029fd68d2c2dc81f25ce9eb5224dd5/lxml-6.0.2-cp312-cp312-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:fbc74f42c3525ac4ffa4b89cbdd00057b6196bcefe8bce794abd42d33a018092", size = 5267426, upload-time = "2025-09-22T04:01:35.639Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ea/47/62c70aa4a1c26569bc958c9ca86af2bb4e1f614e8c04fb2989833874f7ae/lxml-6.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6ddff43f702905a4e32bc24f3f2e2edfe0f8fde3277d481bffb709a4cced7a1f", size = 5064917, upload-time = "2025-09-22T04:01:37.448Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/bd/55/6ceddaca353ebd0f1908ef712c597f8570cc9c58130dbb89903198e441fd/lxml-6.0.2-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:6da5185951d72e6f5352166e3da7b0dc27aa70bd1090b0eb3f7f7212b53f1bb8", size = 4788795, upload-time = "2025-09-22T04:01:39.165Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/cf/e8/fd63e15da5e3fd4c2146f8bbb3c14e94ab850589beab88e547b2dbce22e1/lxml-6.0.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:57a86e1ebb4020a38d295c04fc79603c7899e0df71588043eb218722dabc087f", size = 5676759, upload-time = "2025-09-22T04:01:41.506Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/76/47/b3ec58dc5c374697f5ba37412cd2728f427d056315d124dd4b61da381877/lxml-6.0.2-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:2047d8234fe735ab77802ce5f2297e410ff40f5238aec569ad7c8e163d7b19a6", size = 5255666, upload-time = "2025-09-22T04:01:43.363Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/19/93/03ba725df4c3d72afd9596eef4a37a837ce8e4806010569bedfcd2cb68fd/lxml-6.0.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:6f91fd2b2ea15a6800c8e24418c0775a1694eefc011392da73bc6cef2623b322", size = 5277989, upload-time = "2025-09-22T04:01:45.215Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c6/80/c06de80bfce881d0ad738576f243911fccf992687ae09fd80b734712b39c/lxml-6.0.2-cp312-cp312-win32.whl", hash = "sha256:3ae2ce7d6fedfb3414a2b6c5e20b249c4c607f72cb8d2bb7cc9c6ec7c6f4e849", size = 3611456, upload-time = "2025-09-22T04:01:48.243Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f7/d7/0cdfb6c3e30893463fb3d1e52bc5f5f99684a03c29a0b6b605cfae879cd5/lxml-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:72c87e5ee4e58a8354fb9c7c84cbf95a1c8236c127a5d1b7683f04bed8361e1f", size = 4011793, upload-time = "2025-09-22T04:01:50.042Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ea/7b/93c73c67db235931527301ed3785f849c78991e2e34f3fd9a6663ffda4c5/lxml-6.0.2-cp312-cp312-win_arm64.whl", hash = "sha256:61cb10eeb95570153e0c0e554f58df92ecf5109f75eacad4a95baa709e26c3d6", size = 3672836, upload-time = "2025-09-22T04:01:52.145Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e7/9c/780c9a8fce3f04690b374f72f41306866b0400b9d0fdf3e17aaa37887eed/lxml-6.0.2-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:e748d4cf8fef2526bb2a589a417eba0c8674e29ffcb570ce2ceca44f1e567bf6", size = 3939264, upload-time = "2025-09-22T04:04:32.892Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f5/5a/1ab260c00adf645d8bf7dec7f920f744b032f69130c681302821d5debea6/lxml-6.0.2-pp310-pypy310_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:4ddb1049fa0579d0cbd00503ad8c58b9ab34d1254c77bc6a5576d96ec7853dba", size = 4216435, upload-time = "2025-09-22T04:04:34.907Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f2/37/565f3b3d7ffede22874b6d86be1a1763d00f4ea9fc5b9b6ccb11e4ec8612/lxml-6.0.2-pp310-pypy310_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:cb233f9c95f83707dae461b12b720c1af9c28c2d19208e1be03387222151daf5", size = 4325913, upload-time = "2025-09-22T04:04:37.205Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/22/ec/f3a1b169b2fb9d03467e2e3c0c752ea30e993be440a068b125fc7dd248b0/lxml-6.0.2-pp310-pypy310_pp73-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:bc456d04db0515ce3320d714a1eac7a97774ff0849e7718b492d957da4631dd4", size = 4269357, upload-time = "2025-09-22T04:04:39.322Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/77/a2/585a28fe3e67daa1cf2f06f34490d556d121c25d500b10082a7db96e3bcd/lxml-6.0.2-pp310-pypy310_pp73-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2613e67de13d619fd283d58bda40bff0ee07739f624ffee8b13b631abf33083d", size = 4412295, upload-time = "2025-09-22T04:04:41.647Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7b/d9/a57dd8bcebd7c69386c20263830d4fa72d27e6b72a229ef7a48e88952d9a/lxml-6.0.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:24a8e756c982c001ca8d59e87c80c4d9dcd4d9b44a4cbeb8d9be4482c514d41d", size = 3516913, upload-time = "2025-09-22T04:04:43.602Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/0b/11/29d08bc103a62c0eba8016e7ed5aeebbf1e4312e83b0b1648dd203b0e87d/lxml-6.0.2-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:1c06035eafa8404b5cf475bb37a9f6088b0aca288d4ccc9d69389750d5543700", size = 3949829, upload-time = "2025-09-22T04:04:45.608Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/12/b3/52ab9a3b31e5ab8238da241baa19eec44d2ab426532441ee607165aebb52/lxml-6.0.2-pp311-pypy311_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:c7d13103045de1bdd6fe5d61802565f1a3537d70cd3abf596aa0af62761921ee", size = 4226277, upload-time = "2025-09-22T04:04:47.754Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a0/33/1eaf780c1baad88224611df13b1c2a9dfa460b526cacfe769103ff50d845/lxml-6.0.2-pp311-pypy311_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:0a3c150a95fbe5ac91de323aa756219ef9cf7fde5a3f00e2281e30f33fa5fa4f", size = 4330433, upload-time = "2025-09-22T04:04:49.907Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7a/c1/27428a2ff348e994ab4f8777d3a0ad510b6b92d37718e5887d2da99952a2/lxml-6.0.2-pp311-pypy311_pp73-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:60fa43be34f78bebb27812ed90f1925ec99560b0fa1decdb7d12b84d857d31e9", size = 4272119, upload-time = "2025-09-22T04:04:51.801Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f0/d0/3020fa12bcec4ab62f97aab026d57c2f0cfd480a558758d9ca233bb6a79d/lxml-6.0.2-pp311-pypy311_pp73-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:21c73b476d3cfe836be731225ec3421fa2f048d84f6df6a8e70433dff1376d5a", size = 4417314, upload-time = "2025-09-22T04:04:55.024Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6c/77/d7f491cbc05303ac6801651aabeb262d43f319288c1ea96c66b1d2692ff3/lxml-6.0.2-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:27220da5be049e936c3aca06f174e8827ca6445a4353a1995584311487fc4e3e", size = 3518768, upload-time = "2025-09-22T04:04:57.097Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f5/1f/a3b6b74a451ceb84b471caa75c934d2430a4d84395d38ef201d539f38cd1/lxml-5.4.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e7bc6df34d42322c5289e37e9971d6ed114e3776b45fa879f734bded9d1fea9c", size = 8076838, upload-time = "2025-04-23T01:44:29.325Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/36/af/a567a55b3e47135b4d1f05a1118c24529104c003f95851374b3748139dc1/lxml-5.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6854f8bd8a1536f8a1d9a3655e6354faa6406621cf857dc27b681b69860645c7", size = 4381827, upload-time = "2025-04-23T01:44:33.345Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/50/ba/4ee47d24c675932b3eb5b6de77d0f623c2db6dc466e7a1f199792c5e3e3a/lxml-5.4.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:696ea9e87442467819ac22394ca36cb3d01848dad1be6fac3fb612d3bd5a12cf", size = 5204098, upload-time = "2025-04-23T01:44:35.809Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f2/0f/b4db6dfebfefe3abafe360f42a3d471881687fd449a0b86b70f1f2683438/lxml-5.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ef80aeac414f33c24b3815ecd560cee272786c3adfa5f31316d8b349bfade28", size = 4930261, upload-time = "2025-04-23T01:44:38.271Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/0b/1f/0bb1bae1ce056910f8db81c6aba80fec0e46c98d77c0f59298c70cd362a3/lxml-5.4.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b9c2754cef6963f3408ab381ea55f47dabc6f78f4b8ebb0f0b25cf1ac1f7609", size = 5529621, upload-time = "2025-04-23T01:44:40.921Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/21/f5/e7b66a533fc4a1e7fa63dd22a1ab2ec4d10319b909211181e1ab3e539295/lxml-5.4.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7a62cc23d754bb449d63ff35334acc9f5c02e6dae830d78dab4dd12b78a524f4", size = 4983231, upload-time = "2025-04-23T01:44:43.871Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/11/39/a38244b669c2d95a6a101a84d3c85ba921fea827e9e5483e93168bf1ccb2/lxml-5.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f82125bc7203c5ae8633a7d5d20bcfdff0ba33e436e4ab0abc026a53a8960b7", size = 5084279, upload-time = "2025-04-23T01:44:46.632Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/db/64/48cac242347a09a07740d6cee7b7fd4663d5c1abd65f2e3c60420e231b27/lxml-5.4.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:b67319b4aef1a6c56576ff544b67a2a6fbd7eaee485b241cabf53115e8908b8f", size = 4927405, upload-time = "2025-04-23T01:44:49.843Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/98/89/97442835fbb01d80b72374f9594fe44f01817d203fa056e9906128a5d896/lxml-5.4.0-cp310-cp310-manylinux_2_28_ppc64le.whl", hash = "sha256:a8ef956fce64c8551221f395ba21d0724fed6b9b6242ca4f2f7beb4ce2f41997", size = 5550169, upload-time = "2025-04-23T01:44:52.791Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f1/97/164ca398ee654eb21f29c6b582685c6c6b9d62d5213abc9b8380278e9c0a/lxml-5.4.0-cp310-cp310-manylinux_2_28_s390x.whl", hash = "sha256:0a01ce7d8479dce84fc03324e3b0c9c90b1ece9a9bb6a1b6c9025e7e4520e78c", size = 5062691, upload-time = "2025-04-23T01:44:56.108Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d0/bc/712b96823d7feb53482d2e4f59c090fb18ec7b0d0b476f353b3085893cda/lxml-5.4.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:91505d3ddebf268bb1588eb0f63821f738d20e1e7f05d3c647a5ca900288760b", size = 5133503, upload-time = "2025-04-23T01:44:59.222Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d4/55/a62a39e8f9da2a8b6002603475e3c57c870cd9c95fd4b94d4d9ac9036055/lxml-5.4.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a3bcdde35d82ff385f4ede021df801b5c4a5bcdfb61ea87caabcebfc4945dc1b", size = 4999346, upload-time = "2025-04-23T01:45:02.088Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ea/47/a393728ae001b92bb1a9e095e570bf71ec7f7fbae7688a4792222e56e5b9/lxml-5.4.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:aea7c06667b987787c7d1f5e1dfcd70419b711cdb47d6b4bb4ad4b76777a0563", size = 5627139, upload-time = "2025-04-23T01:45:04.582Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5e/5f/9dcaaad037c3e642a7ea64b479aa082968de46dd67a8293c541742b6c9db/lxml-5.4.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:a7fb111eef4d05909b82152721a59c1b14d0f365e2be4c742a473c5d7372f4f5", size = 5465609, upload-time = "2025-04-23T01:45:07.649Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a7/0a/ebcae89edf27e61c45023005171d0ba95cb414ee41c045ae4caf1b8487fd/lxml-5.4.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:43d549b876ce64aa18b2328faff70f5877f8c6dede415f80a2f799d31644d776", size = 5192285, upload-time = "2025-04-23T01:45:10.456Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/42/ad/cc8140ca99add7d85c92db8b2354638ed6d5cc0e917b21d36039cb15a238/lxml-5.4.0-cp310-cp310-win32.whl", hash = "sha256:75133890e40d229d6c5837b0312abbe5bac1c342452cf0e12523477cd3aa21e7", size = 3477507, upload-time = "2025-04-23T01:45:12.474Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e9/39/597ce090da1097d2aabd2f9ef42187a6c9c8546d67c419ce61b88b336c85/lxml-5.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:de5b4e1088523e2b6f730d0509a9a813355b7f5659d70eb4f319c76beea2e250", size = 3805104, upload-time = "2025-04-23T01:45:15.104Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/81/2d/67693cc8a605a12e5975380d7ff83020dcc759351b5a066e1cced04f797b/lxml-5.4.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:98a3912194c079ef37e716ed228ae0dcb960992100461b704aea4e93af6b0bb9", size = 8083240, upload-time = "2025-04-23T01:45:18.566Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/73/53/b5a05ab300a808b72e848efd152fe9c022c0181b0a70b8bca1199f1bed26/lxml-5.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0ea0252b51d296a75f6118ed0d8696888e7403408ad42345d7dfd0d1e93309a7", size = 4387685, upload-time = "2025-04-23T01:45:21.387Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d8/cb/1a3879c5f512bdcd32995c301886fe082b2edd83c87d41b6d42d89b4ea4d/lxml-5.4.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b92b69441d1bd39f4940f9eadfa417a25862242ca2c396b406f9272ef09cdcaa", size = 4991164, upload-time = "2025-04-23T01:45:23.849Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f9/94/bbc66e42559f9d04857071e3b3d0c9abd88579367fd2588a4042f641f57e/lxml-5.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:20e16c08254b9b6466526bc1828d9370ee6c0d60a4b64836bc3ac2917d1e16df", size = 4746206, upload-time = "2025-04-23T01:45:26.361Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/66/95/34b0679bee435da2d7cae895731700e519a8dfcab499c21662ebe671603e/lxml-5.4.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7605c1c32c3d6e8c990dd28a0970a3cbbf1429d5b92279e37fda05fb0c92190e", size = 5342144, upload-time = "2025-04-23T01:45:28.939Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e0/5d/abfcc6ab2fa0be72b2ba938abdae1f7cad4c632f8d552683ea295d55adfb/lxml-5.4.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ecf4c4b83f1ab3d5a7ace10bafcb6f11df6156857a3c418244cef41ca9fa3e44", size = 4825124, upload-time = "2025-04-23T01:45:31.361Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5a/78/6bd33186c8863b36e084f294fc0a5e5eefe77af95f0663ef33809cc1c8aa/lxml-5.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0cef4feae82709eed352cd7e97ae062ef6ae9c7b5dbe3663f104cd2c0e8d94ba", size = 4876520, upload-time = "2025-04-23T01:45:34.191Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3b/74/4d7ad4839bd0fc64e3d12da74fc9a193febb0fae0ba6ebd5149d4c23176a/lxml-5.4.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:df53330a3bff250f10472ce96a9af28628ff1f4efc51ccba351a8820bca2a8ba", size = 4765016, upload-time = "2025-04-23T01:45:36.7Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/24/0d/0a98ed1f2471911dadfc541003ac6dd6879fc87b15e1143743ca20f3e973/lxml-5.4.0-cp311-cp311-manylinux_2_28_ppc64le.whl", hash = "sha256:aefe1a7cb852fa61150fcb21a8c8fcea7b58c4cb11fbe59c97a0a4b31cae3c8c", size = 5362884, upload-time = "2025-04-23T01:45:39.291Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/48/de/d4f7e4c39740a6610f0f6959052b547478107967362e8424e1163ec37ae8/lxml-5.4.0-cp311-cp311-manylinux_2_28_s390x.whl", hash = "sha256:ef5a7178fcc73b7d8c07229e89f8eb45b2908a9238eb90dcfc46571ccf0383b8", size = 4902690, upload-time = "2025-04-23T01:45:42.386Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/07/8c/61763abd242af84f355ca4ef1ee096d3c1b7514819564cce70fd18c22e9a/lxml-5.4.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:d2ed1b3cb9ff1c10e6e8b00941bb2e5bb568b307bfc6b17dffbbe8be5eecba86", size = 4944418, upload-time = "2025-04-23T01:45:46.051Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f9/c5/6d7e3b63e7e282619193961a570c0a4c8a57fe820f07ca3fe2f6bd86608a/lxml-5.4.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:72ac9762a9f8ce74c9eed4a4e74306f2f18613a6b71fa065495a67ac227b3056", size = 4827092, upload-time = "2025-04-23T01:45:48.943Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/71/4a/e60a306df54680b103348545706a98a7514a42c8b4fbfdcaa608567bb065/lxml-5.4.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f5cb182f6396706dc6cc1896dd02b1c889d644c081b0cdec38747573db88a7d7", size = 5418231, upload-time = "2025-04-23T01:45:51.481Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/27/f2/9754aacd6016c930875854f08ac4b192a47fe19565f776a64004aa167521/lxml-5.4.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:3a3178b4873df8ef9457a4875703488eb1622632a9cee6d76464b60e90adbfcd", size = 5261798, upload-time = "2025-04-23T01:45:54.146Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/38/a2/0c49ec6941428b1bd4f280650d7b11a0f91ace9db7de32eb7aa23bcb39ff/lxml-5.4.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:e094ec83694b59d263802ed03a8384594fcce477ce484b0cbcd0008a211ca751", size = 4988195, upload-time = "2025-04-23T01:45:56.685Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7a/75/87a3963a08eafc46a86c1131c6e28a4de103ba30b5ae903114177352a3d7/lxml-5.4.0-cp311-cp311-win32.whl", hash = "sha256:4329422de653cdb2b72afa39b0aa04252fca9071550044904b2e7036d9d97fe4", size = 3474243, upload-time = "2025-04-23T01:45:58.863Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/fa/f9/1f0964c4f6c2be861c50db380c554fb8befbea98c6404744ce243a3c87ef/lxml-5.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:fd3be6481ef54b8cfd0e1e953323b7aa9d9789b94842d0e5b142ef4bb7999539", size = 3815197, upload-time = "2025-04-23T01:46:01.096Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f8/4c/d101ace719ca6a4ec043eb516fcfcb1b396a9fccc4fcd9ef593df34ba0d5/lxml-5.4.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:b5aff6f3e818e6bdbbb38e5967520f174b18f539c2b9de867b1e7fde6f8d95a4", size = 8127392, upload-time = "2025-04-23T01:46:04.09Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/11/84/beddae0cec4dd9ddf46abf156f0af451c13019a0fa25d7445b655ba5ccb7/lxml-5.4.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:942a5d73f739ad7c452bf739a62a0f83e2578afd6b8e5406308731f4ce78b16d", size = 4415103, upload-time = "2025-04-23T01:46:07.227Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d0/25/d0d93a4e763f0462cccd2b8a665bf1e4343dd788c76dcfefa289d46a38a9/lxml-5.4.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:460508a4b07364d6abf53acaa0a90b6d370fafde5693ef37602566613a9b0779", size = 5024224, upload-time = "2025-04-23T01:46:10.237Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/31/ce/1df18fb8f7946e7f3388af378b1f34fcf253b94b9feedb2cec5969da8012/lxml-5.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:529024ab3a505fed78fe3cc5ddc079464e709f6c892733e3f5842007cec8ac6e", size = 4769913, upload-time = "2025-04-23T01:46:12.757Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/4e/62/f4a6c60ae7c40d43657f552f3045df05118636be1165b906d3423790447f/lxml-5.4.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7ca56ebc2c474e8f3d5761debfd9283b8b18c76c4fc0967b74aeafba1f5647f9", size = 5290441, upload-time = "2025-04-23T01:46:16.037Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/9e/aa/04f00009e1e3a77838c7fc948f161b5d2d5de1136b2b81c712a263829ea4/lxml-5.4.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a81e1196f0a5b4167a8dafe3a66aa67c4addac1b22dc47947abd5d5c7a3f24b5", size = 4820165, upload-time = "2025-04-23T01:46:19.137Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c9/1f/e0b2f61fa2404bf0f1fdf1898377e5bd1b74cc9b2cf2c6ba8509b8f27990/lxml-5.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:00b8686694423ddae324cf614e1b9659c2edb754de617703c3d29ff568448df5", size = 4932580, upload-time = "2025-04-23T01:46:21.963Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/24/a2/8263f351b4ffe0ed3e32ea7b7830f845c795349034f912f490180d88a877/lxml-5.4.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:c5681160758d3f6ac5b4fea370495c48aac0989d6a0f01bb9a72ad8ef5ab75c4", size = 4759493, upload-time = "2025-04-23T01:46:24.316Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/05/00/41db052f279995c0e35c79d0f0fc9f8122d5b5e9630139c592a0b58c71b4/lxml-5.4.0-cp312-cp312-manylinux_2_28_ppc64le.whl", hash = "sha256:2dc191e60425ad70e75a68c9fd90ab284df64d9cd410ba8d2b641c0c45bc006e", size = 5324679, upload-time = "2025-04-23T01:46:27.097Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1d/be/ee99e6314cdef4587617d3b3b745f9356d9b7dd12a9663c5f3b5734b64ba/lxml-5.4.0-cp312-cp312-manylinux_2_28_s390x.whl", hash = "sha256:67f779374c6b9753ae0a0195a892a1c234ce8416e4448fe1e9f34746482070a7", size = 4890691, upload-time = "2025-04-23T01:46:30.009Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ad/36/239820114bf1d71f38f12208b9c58dec033cbcf80101cde006b9bde5cffd/lxml-5.4.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:79d5bfa9c1b455336f52343130b2067164040604e41f6dc4d8313867ed540079", size = 4955075, upload-time = "2025-04-23T01:46:32.33Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d4/e1/1b795cc0b174efc9e13dbd078a9ff79a58728a033142bc6d70a1ee8fc34d/lxml-5.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3d3c30ba1c9b48c68489dc1829a6eede9873f52edca1dda900066542528d6b20", size = 4838680, upload-time = "2025-04-23T01:46:34.852Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/72/48/3c198455ca108cec5ae3662ae8acd7fd99476812fd712bb17f1b39a0b589/lxml-5.4.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:1af80c6316ae68aded77e91cd9d80648f7dd40406cef73df841aa3c36f6907c8", size = 5391253, upload-time = "2025-04-23T01:46:37.608Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d6/10/5bf51858971c51ec96cfc13e800a9951f3fd501686f4c18d7d84fe2d6352/lxml-5.4.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:4d885698f5019abe0de3d352caf9466d5de2baded00a06ef3f1216c1a58ae78f", size = 5261651, upload-time = "2025-04-23T01:46:40.183Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2b/11/06710dd809205377da380546f91d2ac94bad9ff735a72b64ec029f706c85/lxml-5.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:aea53d51859b6c64e7c51d522c03cc2c48b9b5d6172126854cc7f01aa11f52bc", size = 5024315, upload-time = "2025-04-23T01:46:43.333Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f5/b0/15b6217834b5e3a59ebf7f53125e08e318030e8cc0d7310355e6edac98ef/lxml-5.4.0-cp312-cp312-win32.whl", hash = "sha256:d90b729fd2732df28130c064aac9bb8aff14ba20baa4aee7bd0795ff1187545f", size = 3486149, upload-time = "2025-04-23T01:46:45.684Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/91/1e/05ddcb57ad2f3069101611bd5f5084157d90861a2ef460bf42f45cced944/lxml-5.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:1dc4ca99e89c335a7ed47d38964abcb36c5910790f9bd106f2a8fa2ee0b909d2", size = 3817095, upload-time = "2025-04-23T01:46:48.521Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c6/b0/e4d1cbb8c078bc4ae44de9c6a79fec4e2b4151b1b4d50af71d799e76b177/lxml-5.4.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:1b717b00a71b901b4667226bba282dd462c42ccf618ade12f9ba3674e1fabc55", size = 3892319, upload-time = "2025-04-23T01:49:22.069Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5b/aa/e2bdefba40d815059bcb60b371a36fbfcce970a935370e1b367ba1cc8f74/lxml-5.4.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:27a9ded0f0b52098ff89dd4c418325b987feed2ea5cc86e8860b0f844285d740", size = 4211614, upload-time = "2025-04-23T01:49:24.599Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3c/5f/91ff89d1e092e7cfdd8453a939436ac116db0a665e7f4be0cd8e65c7dc5a/lxml-5.4.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b7ce10634113651d6f383aa712a194179dcd496bd8c41e191cec2099fa09de5", size = 4306273, upload-time = "2025-04-23T01:49:27.355Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/be/7c/8c3f15df2ca534589717bfd19d1e3482167801caedfa4d90a575facf68a6/lxml-5.4.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:53370c26500d22b45182f98847243efb518d268374a9570409d2e2276232fd37", size = 4208552, upload-time = "2025-04-23T01:49:29.949Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7d/d8/9567afb1665f64d73fc54eb904e418d1138d7f011ed00647121b4dd60b38/lxml-5.4.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c6364038c519dffdbe07e3cf42e6a7f8b90c275d4d1617a69bb59734c1a2d571", size = 4331091, upload-time = "2025-04-23T01:49:32.842Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f1/ab/fdbbd91d8d82bf1a723ba88ec3e3d76c022b53c391b0c13cad441cdb8f9e/lxml-5.4.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:b12cb6527599808ada9eb2cd6e0e7d3d8f13fe7bbb01c6311255a15ded4c7ab4", size = 3487862, upload-time = "2025-04-23T01:49:36.296Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -1153,22 +1173,21 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "pyplayready"
|
||||
version = "0.6.3"
|
||||
version = "0.6.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "aiohttp" },
|
||||
{ name = "click" },
|
||||
{ name = "construct" },
|
||||
{ name = "cryptography" },
|
||||
{ name = "ecpy" },
|
||||
{ name = "lxml" },
|
||||
{ name = "pycryptodome" },
|
||||
{ name = "pyyaml" },
|
||||
{ name = "requests" },
|
||||
{ name = "xmltodict" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/53/f2/6d75b6d10a8361b53a2acbe959d51aa586418e9af497381a9f5c436ca488/pyplayready-0.6.3.tar.gz", hash = "sha256:b9b82a32c2cced9c43f910eb1fb891545f1491dc063c1eb9c20634e2417eda76", size = 58019, upload-time = "2025-08-20T19:32:43.642Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/39/5f/aba36faf0f7feafa4b82bb9e38a0d8c70048e068416a931ee54a565ee3db/pyplayready-0.6.0.tar.gz", hash = "sha256:2b874596a8532efa5d7f2380e8de2cdb611a96cd69b0da5182ab1902083566e9", size = 99157, upload-time = "2025-02-06T13:16:02.763Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/5b/7f/64d5ff5d765f9f2138ee7cc196fd9401f9eae0fb514c66660ad4e56584fa/pyplayready-0.6.3-py3-none-any.whl", hash = "sha256:82f35434e790a7da21df57ec053a2924ceb63622c5a6c5ff9f0fa03db0531c57", size = 66162, upload-time = "2025-08-20T19:32:42.62Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/eb/85/a5b7dba7d5420c8f5d133123376a135fda69973f3e8d7c05c58a516a54e5/pyplayready-0.6.0-py3-none-any.whl", hash = "sha256:7f85ba94f2ae0d0c964d2c84e3a4f99bfa947fb120069c70af6c17f83ed6a7f3", size = 114232, upload-time = "2025-02-06T13:16:01.448Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -1180,15 +1199,6 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/8d/59/b4572118e098ac8e46e399a1dd0f2d85403ce8bbaad9ec79373ed6badaf9/PySocks-1.7.1-py3-none-any.whl", hash = "sha256:2725bd0a9925919b9b51739eea5f9e2bae91e83288108a9ad338b2e3a4435ee5", size = 16725, upload-time = "2019-09-20T02:06:22.938Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pysubs2"
|
||||
version = "1.8.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/31/4a/becf78d9d3df56e6c4a9c50b83794e5436b6c5ab6dd8a3f934e94c89338c/pysubs2-1.8.0.tar.gz", hash = "sha256:3397bb58a4a15b1325ba2ae3fd4d7c214e2c0ddb9f33190d6280d783bb433b20", size = 1130048, upload-time = "2024-12-24T12:39:47.769Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/99/09/0fc0719162e5ad723f71d41cf336f18b6b5054d70dc0fe42ace6b4d2bdc9/pysubs2-1.8.0-py3-none-any.whl", hash = "sha256:05716f5039a9ebe32cd4d7673f923cf36204f3a3e99987f823ab83610b7035a0", size = 43516, upload-time = "2024-12-24T12:39:44.469Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pywidevine"
|
||||
version = "1.8.0"
|
||||
@@ -1267,6 +1277,18 @@ socks = [
|
||||
{ name = "pysocks" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rfc3339-validator"
|
||||
version = "0.1.4"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "six" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/28/ea/a9387748e2d111c3c2b275ba970b735e04e15cdb1eb30693b6b5708c4dbd/rfc3339_validator-0.1.4.tar.gz", hash = "sha256:138a2abdf93304ad60530167e51d2dfb9549521a836871b88d7f4695d0022f6b", size = 5513, upload-time = "2021-05-12T16:37:54.178Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/7b/44/4e421b96b67b2daff264473f7465db72fbdf36a07e05494f50300cc7b0c6/rfc3339_validator-0.1.4-py2.py3-none-any.whl", hash = "sha256:24f6ec1eda14ef823da9e36ec7113124b39c04d50a4d3d3a3c2859577e7791fa", size = 3490, upload-time = "2021-05-12T16:37:52.536Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rich"
|
||||
version = "13.9.4"
|
||||
@@ -1373,6 +1395,15 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/a3/dc/17031897dae0efacfea57dfd3a82fdd2a2aeb58e0ff71b77b87e44edc772/setuptools-80.9.0-py3-none-any.whl", hash = "sha256:062d34222ad13e0cc312a4c02d73f059e86a4acbfbdea8f8f76b28c99f306922", size = 1201486, upload-time = "2025-05-27T00:56:49.664Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "six"
|
||||
version = "1.17.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/94/e7/b2c673351809dca68a0e064b6af791aa332cf192da575fd474ed7d6f16a2/six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81", size = 34031, upload-time = "2024-12-04T17:35:28.174Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/b7/ce/149a00dd41f10bc29e5921b496af8b574d8413afcd5e30dfa0ed46c2cc5e/six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274", size = 11050, upload-time = "2024-12-04T17:35:26.475Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "sniffio"
|
||||
version = "1.3.1"
|
||||
@@ -1408,8 +1439,8 @@ sdist = { url = "https://files.pythonhosted.org/packages/66/b7/4a1bc231e0681ebf3
|
||||
|
||||
[[package]]
|
||||
name = "subby"
|
||||
version = "0.3.23"
|
||||
source = { git = "https://github.com/vevv/subby.git?rev=5a925c367ffb3f5e53fd114ae222d3be1fdff35d#5a925c367ffb3f5e53fd114ae222d3be1fdff35d" }
|
||||
version = "0.3.21"
|
||||
source = { git = "https://github.com/vevv/subby.git#390cb2f4a55e98057cdd65314d8cbffd5d0a11f1" }
|
||||
dependencies = [
|
||||
{ name = "beautifulsoup4" },
|
||||
{ name = "click" },
|
||||
@@ -1517,6 +1548,7 @@ name = "unshackle"
|
||||
version = "1.4.8"
|
||||
source = { editable = "." }
|
||||
dependencies = [
|
||||
{ name = "aiohttp-swagger3" },
|
||||
{ name = "appdirs" },
|
||||
{ name = "brotli" },
|
||||
{ name = "chardet" },
|
||||
@@ -1538,7 +1570,6 @@ dependencies = [
|
||||
{ name = "pymp4" },
|
||||
{ name = "pymysql" },
|
||||
{ name = "pyplayready" },
|
||||
{ name = "pysubs2" },
|
||||
{ name = "pywidevine", extra = ["serve"] },
|
||||
{ name = "pyyaml" },
|
||||
{ name = "requests", extra = ["socks"] },
|
||||
@@ -1567,6 +1598,7 @@ dev = [
|
||||
|
||||
[package.metadata]
|
||||
requires-dist = [
|
||||
{ name = "aiohttp-swagger3", specifier = ">=0.9.0,<1" },
|
||||
{ name = "appdirs", specifier = ">=1.4.4,<2" },
|
||||
{ name = "brotli", specifier = ">=1.1.0,<2" },
|
||||
{ name = "chardet", specifier = ">=5.2.0,<6" },
|
||||
@@ -1587,8 +1619,7 @@ requires-dist = [
|
||||
{ name = "pymediainfo", specifier = ">=6.1.0,<7" },
|
||||
{ name = "pymp4", specifier = ">=1.4.0,<2" },
|
||||
{ name = "pymysql", specifier = ">=1.1.0,<2" },
|
||||
{ name = "pyplayready", specifier = ">=0.6.3,<0.7" },
|
||||
{ name = "pysubs2", specifier = ">=1.7.0,<2" },
|
||||
{ name = "pyplayready", specifier = ">=0.6.0,<0.7" },
|
||||
{ name = "pywidevine", extras = ["serve"], specifier = ">=1.8.0,<2" },
|
||||
{ name = "pyyaml", specifier = ">=6.0.1,<7" },
|
||||
{ name = "requests", extras = ["socks"], specifier = ">=2.31.0,<3" },
|
||||
@@ -1596,7 +1627,7 @@ requires-dist = [
|
||||
{ name = "rlaphoenix-m3u8", specifier = ">=3.4.0,<4" },
|
||||
{ name = "ruamel-yaml", specifier = ">=0.18.6,<0.19" },
|
||||
{ name = "sortedcontainers", specifier = ">=2.4.0,<3" },
|
||||
{ name = "subby", git = "https://github.com/vevv/subby.git?rev=5a925c367ffb3f5e53fd114ae222d3be1fdff35d" },
|
||||
{ name = "subby", git = "https://github.com/vevv/subby.git" },
|
||||
{ name = "subtitle-filter", specifier = ">=1.4.9,<2" },
|
||||
{ name = "unidecode", specifier = ">=1.3.8,<2" },
|
||||
{ name = "urllib3", specifier = ">=2.2.1,<3" },
|
||||
|
||||
Reference in New Issue
Block a user