From 965482a1e410fa4418275318800d774e3c0def70 Mon Sep 17 00:00:00 2001 From: Andy Date: Tue, 25 Nov 2025 20:14:48 +0000 Subject: [PATCH] feat: merge upstream dev branch - Add Gluetun dynamic VPN-to-HTTP proxy provider - Add remote services and authentication system - Add country code utilities - Add Docker binary detection - Update proxy providers --- CONFIG.md | 956 ++++++++++- docs/GLUETUN.md | 159 ++ pyproject.toml | 1 + unshackle/commands/dl.py | 26 +- unshackle/commands/env.py | 1 + unshackle/commands/remote_auth.py | 225 +++ unshackle/commands/search.py | 9 +- unshackle/commands/serve.py | 58 +- unshackle/core/api/api_keys.py | 137 ++ unshackle/core/api/remote_handlers.py | 1879 ++++++++++++++++++++++ unshackle/core/api/routes.py | 20 + unshackle/core/api/session_serializer.py | 236 +++ unshackle/core/binaries.py | 2 + unshackle/core/config.py | 2 + unshackle/core/local_session_cache.py | 274 ++++ unshackle/core/proxies/__init__.py | 3 +- unshackle/core/proxies/gluetun.py | 1261 +++++++++++++++ unshackle/core/proxies/nordvpn.py | 70 +- unshackle/core/proxies/surfsharkvpn.py | 64 +- unshackle/core/remote_auth.py | 279 ++++ unshackle/core/remote_service.py | 593 +++++++ unshackle/core/remote_services.py | 245 +++ unshackle/core/service.py | 47 + unshackle/core/services.py | 54 +- unshackle/core/utilities.py | 75 + unshackle/unshackle-example.yaml | 89 + uv.lock | 11 + 27 files changed, 6678 insertions(+), 98 deletions(-) create mode 100644 docs/GLUETUN.md create mode 100644 unshackle/commands/remote_auth.py create mode 100644 unshackle/core/api/api_keys.py create mode 100644 unshackle/core/api/remote_handlers.py create mode 100644 unshackle/core/api/session_serializer.py create mode 100644 unshackle/core/local_session_cache.py create mode 100644 unshackle/core/proxies/gluetun.py create mode 100644 unshackle/core/remote_auth.py create mode 100644 unshackle/core/remote_service.py create mode 100644 unshackle/core/remote_services.py diff --git a/CONFIG.md b/CONFIG.md index 15eef05..f777f2d 100644 --- a/CONFIG.md +++ b/CONFIG.md @@ -31,10 +31,11 @@ which does not keep comments. ## cdm (dict) -Pre-define which Widevine or PlayReady device to use for each Service by Service Tag as Key (case-sensitive). -The value should be a WVD or PRD filename without the file extension. When -loading the device, unshackle will look in both the `WVDs` and `PRDs` directories -for a matching file. +Pre-define which Widevine or PlayReady device to use for each Service by Service Tag as Key (case-sensitive). +The value should be a WVD or PRD filename without the file extension, or a remote CDM name defined in `remote_cdm`. +When loading a local device, unshackle will look in both the `WVDs` and `PRDs` directories for a matching file. + +### Basic CDM Selection For example, @@ -43,6 +44,8 @@ AMZN: chromecdm_903_l3 NF: nexus_6_l1 ``` +### Profile-Based CDM Selection + You may also specify this device based on the profile used. For example, @@ -55,7 +58,9 @@ DSNP: jane_uhd: nexus_5_l1 ``` -You can also specify a fallback value to predefine if a match was not made. +### Default Fallback + +You can also specify a fallback value to predefine if a match was not made. This can be done using `default` key. This can help reduce redundancy in your specifications. For example, the following has the same result as the previous example, as well as all other @@ -68,6 +73,256 @@ DSNP: default: chromecdm_903_l3 ``` +### Quality-Based CDM Selection + +**NEW:** You can now select different CDMs based on video resolution (quality). This allows you to use local CDMs +for lower qualities and automatically switch to remote CDMs for higher qualities that require L1/L2 security levels. + +unshackle automatically detects the highest quality video track and selects the appropriate CDM before downloading. + +#### Supported Quality Operators + +- **Exact match**: `"480"`, `"720"`, `"1080"`, `"2160"` - Matches exact resolution +- **Greater than or equal**: `">=1080"` - Matches 1080p and above (1440p, 2160p, etc.) +- **Greater than**: `">720"` - Matches above 720p (1080p, 1440p, 2160p, etc.) +- **Less than or equal**: `"<=1080"` - Matches 1080p and below +- **Less than**: `"<1080"` - Matches below 1080p + +**Note**: Quality keys must be quoted strings to preserve operators in YAML. + +#### Example: Local for SD/HD, Remote for 4K + +```yaml +cdm: + NETFLIX: + "<=1080": local_l3 # Use local CDM for 1080p and below + ">=1440": remote_l1 # Use remote L1 CDM for 1440p and above + default: local_l3 # Fallback if no quality match + + DISNEY: + "480": local_l3_mobile # Use mobile L3 for 480p + "720": local_l3 # Use local L3 for 720p + "1080": local_l3_hd # Use local L3 for 1080p + ">1080": remote_l1 # Use remote L1 for above 1080p +``` + +#### Example: Mixed Profile and Quality Selection + +```yaml +cdm: + AMAZON: + # Profile-based selection + john_account: johns_local_l3 + jane_account: janes_remote_l1 + + # Quality-based selection (for default profile) + "<=720": local_l3 + ">=1080": remote_l1 + + default: local_l3 +``` + +#### Example: Switching Between Widevine and PlayReady Based on Quality + +Some services may use different DRM systems for different quality levels. For example, AMAZON might use +Widevine (ChromeCDM) for SD/HD content but require PlayReady (SL3) for UHD content: + +```yaml +cdm: + AMAZON: + # Use local/remote Widevine ChromeCDM for 1080p and below + "<=1080": local_chromecdm + + # Use remote PlayReady SL3 for above 1080p (1440p, 2160p) + ">1080": remote_sl3 + + default: local_chromecdm + +# If using local CDMs, place chromecdm.wvd in your WVDs directory +# If using remote CDMs, configure them below: +remote_cdm: + - name: remote_chromecdm + type: decrypt_labs # Or custom_api + device_name: ChromeCDM + host: https://your-cdm-api.com + secret: YOUR_API_KEY + + - name: remote_sl3 + type: decrypt_labs # Or custom_api + device_name: SL3 + device_type: PLAYREADY + host: https://your-cdm-api.com + secret: YOUR_API_KEY +``` + +**How it works:** +- When downloading 720p or 1080p content → Uses `local_chromecdm` (local Widevine L3) +- When downloading 1440p or 2160p content → Uses `remote_sl3` (remote PlayReady SL3) +- unshackle automatically detects the video quality and selects the appropriate CDM +- The DRM type is verified against the content's actual DRM system + +**Note:** This configuration assumes the service uses different DRM systems for different qualities. +Most services use a single DRM system across all qualities, but some (like AMAZON) may vary by region or quality tier. + +### DRM-Specific CDM Selection (Widevine/PlayReady) + +For services that support multiple DRM systems, you can specify different CDMs based on the DRM type. +unshackle automatically detects the DRM system used by content and switches to the appropriate CDM. + +#### Example: Separate Widevine and PlayReady CDMs + +```yaml +cdm: + DISNEY: + widevine: + default: local_wv # Local Widevine CDM + ">=2160": remote_l1 # Remote L1 for 4K Widevine + + playready: + default: local_pr # Local PlayReady CDM + ">=1080": remote_sl2 # Remote SL2 for HD+ PlayReady +``` + +#### Example: AMAZON - Quality-Based with DRM Type Override + +For AMAZON, you might want to use ChromeCDM (Widevine) for SD/HD content and PlayReady SL3 for UHD content. +Here's a more explicit configuration using DRM-specific overrides: + +```yaml +cdm: + AMAZON: + # DRM-specific configuration with quality-based selection + widevine: + "<=1080": local_chromecdm # Local ChromeCDM for 1080p and below + default: local_chromecdm + + playready: + ">1080": remote_sl3 # Remote PlayReady SL3 for above 1080p + "<=1080": local_pr # Optional: Local PlayReady for lower quality + default: remote_sl3 + + # Fallback for unknown DRM types + default: local_chromecdm + +# Define remote CDMs (if using remote for high quality) +remote_cdm: + - name: remote_sl3 + type: decrypt_labs # Or custom_api + device_name: SL3 + device_type: PLAYREADY + host: https://your-cdm-api.com + secret: YOUR_API_KEY + + - name: remote_chromecdm + type: decrypt_labs # Or custom_api + device_name: ChromeCDM + host: https://your-cdm-api.com + secret: YOUR_API_KEY +``` + +**How it works:** +- If content uses **Widevine** → Uses `local_chromecdm` for all qualities up to 1080p +- If content uses **PlayReady** and quality > 1080p → Uses `remote_sl3` (remote SL3) +- If content uses **PlayReady** and quality ≤ 1080p → Uses `local_pr` (local, optional) +- Fallback for unknown DRM → Uses `local_chromecdm` + +**Alternative: Simple quality-based approach** (when DRM type varies by quality): + +```yaml +cdm: + AMAZON: + "<=1080": local_chromecdm # Local Widevine for SD/HD + ">1080": remote_sl3 # Remote PlayReady for UHD + default: local_chromecdm +``` + +This simpler approach works when the service consistently uses Widevine for SD/HD and PlayReady for UHD. + +### How Automatic DRM Switching Works + +When downloading content, unshackle: + +1. **Detects video quality** - Analyzes all video tracks and determines the highest resolution +2. **Applies quality rules** - Matches resolution against your quality-based CDM configuration +3. **Detects DRM type** - Identifies whether content uses Widevine or PlayReady +4. **Switches CDM automatically** - Loads the appropriate CDM based on DRM type and quality +5. **Falls back if needed** - Uses local CDM if remote CDM is unavailable + +For example, if you download 4K content that uses Widevine: +- System detects 2160p resolution +- Matches `">=2160": remote_l1` rule +- Detects Widevine DRM +- Automatically loads `remote_l1` remote CDM +- If remote CDM fails, falls back to local CDM (if available) + +### Local to Remote CDM Fallback + +When you configure both local and remote CDMs, unshackle follows this priority order: + +1. **Remote CDM** (if defined in `remote_cdm` and matched by quality/DRM rules) +2. **Local PlayReady** (.prd files in `PRDs` directory) +3. **Local Widevine** (.wvd files in `WVDs` directory) + +This ensures that if a remote CDM API is unavailable, unshackle can still use local devices as fallback. + +#### Example: Complete Configuration with Fallback + +```yaml +cdm: + NETFLIX: + # Use local for low quality, remote for high quality + "<=720": local_l3_sd # Local WVD file + "1080": local_l3_hd # Local WVD file + ">=1440": remote_l1 # Remote L1 API + default: local_l3_sd + +# Define remote CDMs +remote_cdm: + - name: remote_l1 + type: decrypt_labs # Or custom_api + device_name: L1 + host: https://your-cdm-api.com + secret: YOUR_API_KEY + + - name: remote_sl2 + type: decrypt_labs # Or custom_api + device_name: SL2 # PlayReady SL2000 + device_type: PLAYREADY + host: https://your-cdm-api.com + secret: YOUR_API_KEY +``` + +**Result:** +- **480p/720p content** → Uses `local_l3_sd` (local .wvd file) +- **1080p content** → Uses `local_l3_hd` (local .wvd file) +- **1440p/2160p content** → Uses `remote_l1` (remote API) +- **If remote API fails** → Falls back to local .wvd files if available + +### Advanced: Service Certificate Configuration + +Some services require L1/L2 security levels for high-quality content. When using remote L1/L2 CDMs, +you may need to configure the service certificate in the `services` section. See the [services](#services-dict) +section for certificate configuration details. + +### Configuration Priority Order + +When multiple configuration types are defined, unshackle follows this selection hierarchy: + +1. **Profile-specific** (if `-p/--profile` specified on command line) +2. **DRM-specific** (widevine/playready keys) +3. **Quality-based** (resolution with operators: >=, >, <=, <, exact) +4. **Service-level default** (default key under service) +5. **Global default** (top-level default key) + +### Summary + +- **Basic**: Simple service → CDM mapping +- **Profile**: Different CDMs per user profile +- **Quality**: Automatic CDM selection based on video resolution +- **DRM Type**: Separate CDMs for Widevine vs PlayReady +- **Fallback**: Local CDM fallback if remote CDM unavailable +- **Automatic**: Zero manual intervention - unshackle handles all switching + ## chapter_fallback_name (str) The Chapter Name to use when exporting a Chapter without a Name. @@ -111,18 +366,49 @@ Please be aware that this information is sensitive and to keep it safe. Do not s ## curl_impersonate (dict) -- `browser` - The Browser to impersonate as. A list of available Browsers and Versions are listed here: - +Configuration for curl_cffi browser impersonation and custom fingerprinting. + +- `browser` - The Browser to impersonate as OR a fingerprint preset name. A list of available Browsers and Versions + are listed here: Default: `"chrome124"` -For example, +### Available Fingerprint Presets + +- `okhttp4` - Android TV OkHttp 4.x fingerprint preset (for better Android TV compatibility) +- `okhttp5` - Android TV OkHttp 5.x fingerprint preset (for better Android TV compatibility) + +### Custom Fingerprinting + +For advanced users, you can specify custom TLS and HTTP/2 fingerprints: + +- `ja3` (str): Custom JA3 TLS fingerprint string (format: "SSLVersion,Ciphers,Extensions,Curves,PointFormats") +- `akamai` (str): Custom Akamai HTTP/2 fingerprint string (format: "SETTINGS|WINDOW_UPDATE|PRIORITY|PSEUDO_HEADERS") +- `extra_fp` (dict): Additional fingerprint parameters for advanced customization + +For example, using a browser preset: ```yaml curl_impersonate: browser: "chrome120" ``` +Using an Android TV preset: + +```yaml +curl_impersonate: + browser: "okhttp4" +``` + +Using custom fingerprints: + +```yaml +curl_impersonate: + browser: "chrome120" + ja3: "custom_ja3_fingerprint_string" + akamai: "custom_akamai_fingerprint_string" +``` + ## directories (dict) Override the default directories used across unshackle. @@ -158,12 +444,14 @@ There are directories not listed that cannot be modified as they are crucial to ## dl (dict) -Pre-define default options and switches of the `dl` command. +Pre-define default options and switches of the `dl` command. The values will be ignored if explicitly set in the CLI call. -The Key must be the same value Python click would resolve it to as an argument. +The Key must be the same value Python click would resolve it to as an argument. E.g., `@click.option("-r", "--range", "range_", type=...` actually resolves as `range_` variable. +### Common Options + For example to set the default primary language to download to German, ```yaml @@ -199,6 +487,29 @@ or to change the output subtitle format from the default (original format) to We sub_format: vtt ``` +### Additional Available Options + +The following additional flags can be pre-configured as defaults: + +- `latest_episode` (bool): Download only the most recent episode (corresponds to `--latest-episode` / `-le` flag) +- `no_video` (bool): Skip downloading video tracks (corresponds to `--no-video` / `-nv` flag) +- `audio_description` (bool): Download audio description tracks (corresponds to `--audio-description` / `-ad` flag) +- `forced_subs` (bool): Include forced subtitle tracks (corresponds to `--forced-subs` / `-fs` flag) +- `no_cache` (bool): Bypass title cache (corresponds to `--no-cache` flag) +- `reset_cache` (bool): Clear title cache before fetching (corresponds to `--reset-cache` flag) +- `best_available` (bool): Continue with best quality if requested unavailable (corresponds to `--best-available` flag) + +For example, + +```yaml +dl: + latest_episode: true # Always download only the latest episode + audio_description: true # Include audio description tracks by default + best_available: true # Use best available quality as fallback +``` + +**Note**: These options can also be set per-service by nesting them under a service tag. + ## downloader (str | dict) Choose what software to use to download data throughout unshackle where needed. @@ -226,6 +537,51 @@ downloader: The `default` entry is optional. If omitted, `requests` will be used for services not listed. +## debug (bool) + +Enable comprehensive JSON-based debug logging for troubleshooting and service development. +When enabled, creates JSON Lines (`.jsonl`) log files with complete debugging context. + +Default: `false` + +When enabled (via `--debug` flag or `debug: true` in config): + +- Creates structured JSON Lines log files: `logs/unshackle_debug_{service}_{timestamp}.jsonl` +- Logs session info, CLI parameters, service configuration, CDM details, authentication status +- Logs title/track metadata, DRM operations, vault queries +- Logs errors with full stack traces +- Also creates text log: `logs/unshackle_root_{timestamp}.log` + +For example, + +```yaml +debug: true +``` + +**Security Note**: Passwords, tokens, cookies, and session tokens are ALWAYS redacted regardless of this setting. + +## debug_keys (bool) + +Control whether actual decryption keys (CEKs) are logged in debug logs. + +Default: `false` + +When set to `true`, includes actual content encryption keys in debug logs. This is useful for debugging +key retrieval and decryption issues. + +For example, + +```yaml +debug_keys: true +``` + +**Security Notes**: + +- Only affects content_key and key fields (the actual CEKs) +- Key metadata (kid, keys_count, key_id) is always logged regardless of this setting +- Passwords, tokens, cookies, and session tokens remain redacted even when this is enabled +- Use with caution and ensure debug logs are stored securely + ## decryption (str | dict) Choose what software to use to decrypt DRM-protected content throughout unshackle where needed. @@ -257,14 +613,31 @@ Simple configuration (single method for all services): decryption: mp4decrypt ``` +## decrypt_labs_api_key (str) + +API key for DecryptLabs CDM service integration. + +When set, enables the use of DecryptLabs remote CDM services in your `remote_cdm` configuration. +This is used specifically for `type: "decrypt_labs"` entries in the remote CDM list. + +For example, + +```yaml +decrypt_labs_api_key: "your_api_key_here" +``` + +**Note**: This is different from the per-CDM `secret` field in `remote_cdm` entries. This provides a global +API key that can be referenced across multiple DecryptLabs CDM configurations. + ## filenames (dict) -Override the default filenames used across unshackle. +Override the default filenames used across unshackle. The filenames use various variables that are replaced during runtime. The following filenames are available and may be overridden: - `log` - Log filenames. Uses `{name}` and `{time}` variables. +- `debug_log` - Debug log filenames in JSON Lines format. Uses `{service}` and `{time}` variables. - `config` - Service configuration filenames. - `root_config` - Root configuration filename. - `chapters` - Chapter export filenames. Uses `{title}` and `{random}` variables. @@ -275,6 +648,7 @@ For example, ```yaml filenames: log: "unshackle_{name}_{time}.log" + debug_log: "unshackle_debug_{service}_{time}.jsonl" config: "config.yaml" root_config: "unshackle.yaml" chapters: "Chapters_{title}_{random}.txt" @@ -408,31 +782,6 @@ n_m3u8dl_re: use_proxy: true ``` -## nordvpn (dict) - -**Legacy configuration. Use `proxy_providers.nordvpn` instead.** - -Set your NordVPN Service credentials with `username` and `password` keys to automate the use of NordVPN as a Proxy -system where required. - -You can also specify specific servers to use per-region with the `server_map` key. -Sometimes a specific server works best for a service than others, so hard-coding one for a day or two helps. - -For example, - -```yaml -nordvpn: - username: zxqsR7C5CyGwmGb6KSvk8qsZ # example of the login format - password: wXVHmht22hhRKUEQ32PQVjCZ - server_map: - us: 12 # force US server #12 for US proxies -``` - -The username and password should NOT be your normal NordVPN Account Credentials. -They should be the `Service credentials` which can be found on your Nord Account Dashboard. - -Note that `gb` is used instead of `uk` to be more consistent across regional systems. - ## proxy_providers (dict) Enable external proxy provider services. These proxies will be used automatically where needed as defined by the @@ -486,7 +835,7 @@ Note that `gb` is used instead of `uk` to be more consistent across regional sys ### surfsharkvpn (dict) -Enable Surfshark VPN proxy service using Surfshark Service credentials (not your login password). +Enable Surfshark VPN proxy service using Surfshark Service credentials (not your login password). You may pin specific server IDs per region using `server_map`. ```yaml @@ -498,9 +847,32 @@ server_map: au: 4621 # force AU server #4621 ``` +### windscribevpn (dict) + +Enable Windscribe VPN proxy service using Windscribe Service credentials (not your login password). +You may pin specific server hostnames per region using `server_map`. + +```yaml +username: your_windscribe_username # From https://windscribe.com/getconfig/openvpn +password: your_windscribe_password # Service credentials (not your login password) +server_map: + us: "us-central-096.totallyacdn.com" # Force specific US server + gb: "uk-london-055.totallyacdn.com" # Force specific GB server + de: "de-frankfurt-001.totallyacdn.com" # Force specific DE server +``` + +**Note**: The username and password should be your Windscribe OpenVPN credentials, which can be obtained from +the Windscribe configuration generator. The `server_map` uses full server hostnames (not just numbers like NordVPN). + +You can use Windscribe proxies by specifying `--proxy=windscribevpn:us` or such. Server selection works similar +to other providers - use `--proxy=windscribevpn:us` for automatic server or specify the full hostname if needed. + ### hola (dict) -Enable Hola VPN proxy service. This is a simple provider that doesn't require configuration. +Enable Hola VPN proxy service for datacenter and residential proxies. + +This provider uses the open-source `hola-proxy` tool and requires no account credentials. +Simply include an empty configuration to enable it. For example, @@ -509,28 +881,332 @@ proxy_providers: hola: {} ``` -Note: Hola VPN is automatically enabled when proxy_providers is configured, no additional setup is required. +**Requirements**: The `hola-proxy` binary must be installed and available in your system PATH or in the +unshackle binaries directory. + +**Note**: Hola uses a peer-to-peer VPN network. Consider the privacy implications before using this provider. + +### gluetun (dict) + +Enable Gluetun VPN proxy service, which creates Docker containers running Gluetun to bridge VPN connections +to HTTP proxies. This supports 50+ VPN providers through a single, unified interface. + +**Requirements**: Docker must be installed and running. Check with `unshackle env check`. + +```yaml +gluetun: + base_port: 8888 # Starting port for HTTP proxies + auto_cleanup: true # Remove containers when done + container_prefix: "unshackle-gluetun" # Docker container name prefix + verify_ip: true # Verify VPN IP matches expected region + + providers: + windscribe: + vpn_type: wireguard + credentials: + private_key: "YOUR_WIREGUARD_PRIVATE_KEY" + addresses: "YOUR_WIREGUARD_ADDRESS" # e.g., "10.x.x.x/32" + server_countries: + us: US + uk: GB + ca: CA +``` + +**Usage**: Use the format `--proxy gluetun::`, e.g.: +- `--proxy gluetun:windscribe:us` - Connect via Windscribe to US +- `--proxy gluetun:nordvpn:de` - Connect via NordVPN to Germany + +**Supported VPN Types**: +- `wireguard` - For providers like Windscribe, NordVPN, Surfshark (recommended) +- `openvpn` - For providers like ExpressVPN, PIA + +See the example config file for more provider configurations. ## remote_cdm (list\[dict]) -Use [pywidevine] Serve-compliant Remote CDMs in unshackle as if it was a local widevine device file. -The name of each defined device maps as if it was a local device and should be used like a local device. +Configure remote CDM (Content Decryption Module) APIs to use for decrypting DRM-protected content. +Remote CDMs allow you to use high-security CDMs (L1/L2 for Widevine, SL2000/SL3000 for PlayReady) without +having the physical device files locally. + +unshackle supports multiple types of remote CDM providers: + +1. **DecryptLabs CDM** - Official DecryptLabs KeyXtractor API with intelligent caching +2. **Custom API CDM** - Highly configurable adapter for any third-party CDM API +3. **Legacy PyWidevine Serve** - Standard pywidevine serve-compliant APIs + +The name of each defined remote CDM can be referenced in the `cdm` configuration as if it was a local device file. + +### DecryptLabs Remote CDM + +DecryptLabs provides a professional CDM API service with support for multiple device types and intelligent key caching. + +**Supported Devices:** +- **Widevine**: `ChromeCDM` (L3), `L1` (Security Level 1), `L2` (Security Level 2) +- **PlayReady**: `SL2` (SL2000), `SL3` (SL3000) + +**Configuration:** + +```yaml +remote_cdm: + # Widevine L1 Device + - name: decrypt_labs_l1 + type: decrypt_labs # Required: identifies as DecryptLabs CDM + device_name: L1 # Required: must match exactly (L1, L2, ChromeCDM, SL2, SL3) + host: https://keyxtractor.decryptlabs.com + secret: YOUR_API_KEY # Your DecryptLabs API key + + # Widevine L2 Device + - name: decrypt_labs_l2 + type: decrypt_labs + device_name: L2 + host: https://keyxtractor.decryptlabs.com + secret: YOUR_API_KEY + + # Chrome CDM (L3) + - name: decrypt_labs_chrome + type: decrypt_labs + device_name: ChromeCDM + host: https://keyxtractor.decryptlabs.com + secret: YOUR_API_KEY + + # PlayReady SL2000 + - name: decrypt_labs_playready_sl2 + type: decrypt_labs + device_name: SL2 + device_type: PLAYREADY # Required for PlayReady + host: https://keyxtractor.decryptlabs.com + secret: YOUR_API_KEY + + # PlayReady SL3000 + - name: decrypt_labs_playready_sl3 + type: decrypt_labs + device_name: SL3 + device_type: PLAYREADY + host: https://keyxtractor.decryptlabs.com + secret: YOUR_API_KEY +``` + +**Features:** +- Intelligent key caching system (reduces API calls) +- Automatic integration with unshackle's vault system +- Support for both Widevine and PlayReady +- Multiple security levels (L1, L2, L3, SL2000, SL3000) + +**Note:** The `device_type` and `security_level` fields are optional metadata. They don't affect API communication +but are used for internal device identification. + +### Custom API Remote CDM + +A highly configurable CDM adapter that can work with virtually any third-party CDM API through YAML configuration. +This allows you to integrate custom CDM services without writing code. + +**Configuration Philosophy:** +- **90%** of new CDM providers: Only YAML config needed +- **9%** of cases: Add new transform type +- **1%** of cases: Add new auth strategy + +**Basic Example:** + +```yaml +remote_cdm: + - name: custom_chrome_cdm + type: custom_api # Required: identifies as Custom API CDM + host: https://your-cdm-api.com + timeout: 30 # Optional: request timeout in seconds + + device: + name: ChromeCDM + type: CHROME # CHROME, ANDROID, PLAYREADY + system_id: 27175 + security_level: 3 + + auth: + type: bearer # bearer, header, basic, body + key: YOUR_API_TOKEN + + endpoints: + get_request: + path: /get-challenge + method: POST + decrypt_response: + path: /get-keys + method: POST + + caching: + enabled: true # Enable key caching + use_vaults: true # Integrate with vault system +``` + +**Advanced Example with Field Mapping:** + +```yaml +remote_cdm: + - name: advanced_custom_api + type: custom_api + host: https://api.example.com + device: + name: L1 + type: ANDROID + security_level: 1 + + # Authentication configuration + auth: + type: header + header_name: X-API-Key + key: YOUR_SECRET_KEY + custom_headers: + User-Agent: Unshackle/2.0.0 + X-Client-Version: "1.0" + + # Endpoint configuration + endpoints: + get_request: + path: /v2/challenge + method: POST + timeout: 30 + decrypt_response: + path: /v2/decrypt + method: POST + timeout: 30 + + # Request parameter mapping + request_mapping: + get_request: + param_names: + init_data: pssh # Rename 'init_data' to 'pssh' + scheme: device_type # Rename 'scheme' to 'device_type' + static_params: + api_version: "2.0" # Add static parameter + decrypt_response: + param_names: + license_request: challenge + license_response: license + + # Response field mapping + response_mapping: + get_request: + fields: + challenge: data.challenge # Deep field access + session_id: session.id + success_conditions: + - status == 'ok' # Validate response + decrypt_response: + fields: + keys: data.keys + key_fields: + kid: key_id # Map 'kid' field + key: content_key # Map 'key' field + + caching: + enabled: true + use_vaults: true + check_cached_first: true # Check cache before API calls +``` + +**Supported Authentication Types:** +- `bearer` - Bearer token authentication +- `header` - Custom header authentication +- `basic` - HTTP Basic authentication +- `body` - Credentials in request body + +### Legacy PyWidevine Serve Format + +Standard pywidevine serve-compliant remote CDM configuration (backwards compatibility). + +```yaml +remote_cdm: + - name: legacy_chrome_cdm + device_name: chrome + device_type: CHROME + system_id: 27175 + security_level: 3 + host: https://domain.com/api + secret: secret_key +``` + +**Note:** If `type` is not specified, unshackle assumes legacy format. For DecryptLabs or Custom API, +always specify `type: decrypt_labs` or `type: custom_api`. + +### Integration with Quality-Based CDM Selection + +Remote CDMs can be used in quality-based and DRM-specific CDM configurations: + +```yaml +cdm: + NETFLIX: + "<=1080": local_l3 # Local for SD/HD + ">=1440": remote_l1 # Remote for 4K+ + + widevine: + ">=2160": remote_l1 # Remote L1 for 4K Widevine + default: local_wv + + playready: + ">=1080": remote_sl2 # Remote SL2 for HD+ PlayReady + default: local_pr + +remote_cdm: + - name: remote_l1 + type: decrypt_labs # Or custom_api + device_name: L1 + host: https://your-cdm-api.com + secret: YOUR_API_KEY + + - name: remote_sl2 + type: decrypt_labs # Or custom_api + device_name: SL2 + device_type: PLAYREADY + host: https://your-cdm-api.com + secret: YOUR_API_KEY +``` + +### Key Features + +**Intelligent Caching:** +- Remote CDMs integrate with unshackle's vault system +- Keys are cached locally to reduce API calls +- Cached keys are checked before making license requests +- Multiple vault sources supported (SQLite, MySQL, API) + +**Automatic Fallback:** +- If remote CDM fails, unshackle falls back to local devices (if available) +- Priority: Remote CDM → Local PRD → Local WVD + +**DRM Type Detection:** +- Automatically switches between Widevine and PlayReady remote CDMs +- Based on content DRM system detection + +**Quality-Based Selection:** +- Use different remote CDMs based on video resolution +- Combine with local CDMs for cost-effective downloads + +[pywidevine]: https://github.com/rlaphoenix/pywidevine + +## remote_services (list\[dict]) + +Configure connections to remote unshackle REST API servers to access services running on other instances. +This allows you to use services from remote unshackle installations as if they were local. + +Each entry requires: + +- `url` (str): The base URL of the remote unshackle REST API server +- `api_key` (str): API key for authenticating with the remote server +- `name` (str, optional): Friendly name for the remote service (for logging/display purposes) For example, ```yaml -- name: chromecdm_903_l3 # name must be unique for each remote CDM - # the device type, system id and security level must match the values of the device on the API - # if any of the information is wrong, it will raise an error, if you do not know it ask the API owner - device_type: CHROME - system_id: 1234 - security_level: 3 - host: "http://xxxxxxxxxxxxxxxx/the_cdm_endpoint" - secret: "secret/api key" - device_name: "remote device to use" # the device name from the API, usually a wvd filename +remote_services: + - url: "https://remote-unshackle.example.com" + api_key: "your_api_key_here" + name: "Remote US Server" + - url: "https://remote-unshackle-eu.example.com" + api_key: "another_api_key" + name: "Remote EU Server" ``` -[pywidevine]: https://github.com/rlaphoenix/pywidevine +**Note**: The remote unshackle instances must have the REST API enabled and running. Services from all +configured remote servers will be available alongside your local services. ## scene_naming (bool) @@ -577,22 +1253,120 @@ users: Configuration data for each Service. The Service will have the data within this section merged into the `config.yaml` before provided to the Service class. -Think of this config to be used for more sensitive configuration data, like user or device-specific API keys, IDs, -device attributes, and so on. A `config.yaml` file is typically shared and not meant to be modified, so use this for -any sensitive configuration data. +This configuration serves two purposes: -The Key is the Service Tag, but can take any arbitrary form for its value. It's expected to begin as either a list or -a dictionary. +1. **Service-specific data**: Sensitive configuration like user or device-specific API keys, IDs, device attributes, and + so on. A `config.yaml` file is typically shared and not meant to be modified, so use this for any sensitive data. + +2. **Per-service configuration overrides**: Override any global configuration option on a per-service basis for fine-tuned + control. This allows you to customize behavior for services with special requirements. + +The Key is the Service Tag, and the value can take any form (typically a dictionary or list). + +### Basic Service Configuration For example, ```yaml -NOW: - client: - auth_scheme: MESSO - # ... more sensitive data +services: + NOW: + client: + auth_scheme: MESSO + # ... more sensitive data ``` +### Service-Specific Configuration Overrides + +**New in v2.0.0**: You can override ANY global configuration option on a per-service basis. Supported overrides include: + +- `dl` - Download command defaults +- `aria2c` - aria2c downloader settings +- `n_m3u8dl_re` - N_m3u8DL-RE downloader settings +- `curl_impersonate` - Browser impersonation settings +- `subtitle` - Subtitle processing options +- `muxing` - Muxing behavior +- `headers` - HTTP headers +- And more... + +### Comprehensive Example + +```yaml +services: + EXAMPLE: + # Standard service configuration + api_key: "service_api_key" + + # Service certificate for Widevine L1/L2 (base64 encoded) + certificate: | + CAUSwwUKvQIIAxIQ5US6QAvBDzfTtjb4tU/7QxiH8c+TBSKOAjCCAQoCggEBAObzvlu2hZRs... + # (full base64 certificate) + + # Profile-specific configurations + profiles: + john_sd: + device: + app_name: "AIV" + device_model: "SHIELD Android TV" + jane_uhd: + device: + app_name: "AIV" + device_model: "Fire TV Stick 4K" + + # Override dl command defaults for this service + dl: + downloads: 4 # Limit concurrent track downloads + workers: 8 # Reduce workers per track + lang: ["en", "es-419"] # Different language priority + sub_format: srt # Force SRT subtitle format + + # Override n_m3u8dl_re downloader settings + n_m3u8dl_re: + thread_count: 8 # Lower thread count for rate-limited service + use_proxy: true # Force proxy usage + retry_count: 10 # More retries for unstable connections + + # Override aria2c downloader settings + aria2c: + max_concurrent_downloads: 2 + max_connection_per_server: 1 + split: 3 + + # Override subtitle processing + subtitle: + conversion_method: pycaption + sdh_method: auto + + # Service-specific headers + headers: + User-Agent: "Service-specific user agent string" + Accept-Language: "en-US,en;q=0.9" + + # Override muxing options + muxing: + set_title: true + + # Example: Rate-limited service requiring conservative settings + RATE_LIMITED_SERVICE: + dl: + downloads: 2 + workers: 4 + n_m3u8dl_re: + thread_count: 4 + retry_count: 20 + aria2c: + max_concurrent_downloads: 1 + max_connection_per_server: 1 +``` + +### Important Notes + +- Overrides are merged with global config, not replaced +- Only specified keys are overridden; others use global defaults +- Reserved keys (`profiles`, `api_key`, `certificate`, etc.) are NOT treated as overrides +- Any dict-type config option can be overridden +- CLI arguments always take priority over service-specific config +- This feature enables fine-tuned control without modifying global settings + ## set_terminal_bg (bool) Controls whether unshackle should set the terminal background color. Default: `false` @@ -603,6 +1377,26 @@ For example, set_terminal_bg: true ``` +## simkl_client_id (str) + +Client ID for SIMKL API integration. SIMKL is used as a metadata source for improved title matching and tagging, +especially when a TMDB API key is not configured. + +To obtain a SIMKL Client ID: + +1. Create an account at +2. Go to +3. Register a new application to receive your Client ID + +For example, + +```yaml +simkl_client_id: "your_client_id_here" +``` + +**Note**: While optional, having a SIMKL Client ID improves metadata lookup reliability and reduces the chance +of rate limiting. SIMKL serves as an alternative or fallback metadata source to TMDB. + ## tag (str) Group or Username to postfix to the end of all download filenames following a dash. @@ -651,28 +1445,58 @@ tmdb_api_key: cf66bf18956kca5311ada3bebb84eb9a # Not a real key ## subtitle (dict) -Control subtitle conversion and SDH (hearing-impaired) stripping behavior. +Control subtitle conversion, SDH (hearing-impaired) stripping behavior, and formatting preservation. + +### Conversion and Processing Options - `conversion_method`: How to convert subtitles between formats. Default: `auto`. - - `auto`: Use subby for WebVTT/SAMI, standard for others. - - `subby`: Always use subby with CommonIssuesFixer. + - `auto`: Smart routing - use subby for WebVTT/SAMI, pycaption for others. + - `subby`: Always use subby with CommonIssuesFixer for advanced processing. - `subtitleedit`: Prefer SubtitleEdit when available; otherwise fallback to standard conversion. - `pycaption`: Use only the pycaption library (no SubtitleEdit, no subby). - `pysubs2`: Use pysubs2 library (supports SRT, SSA, ASS, WebVTT, TTML, SAMI, MicroDVD, MPL2, TMP formats). - `sdh_method`: How to strip SDH cues. Default: `auto`. - - `auto`: Try subby for SRT first, then SubtitleEdit, then filter-subs. + + - `auto`: Try subby for SRT first, then SubtitleEdit, then subtitle-filter. - `subby`: Use subby's SDHStripper (SRT only). - `subtitleedit`: Use SubtitleEdit's RemoveTextForHI when available. - - `filter-subs`: Use the subtitle-filter library. + - `filter-subs`: Use the subtitle-filter library directly. -Example: +- `strip_sdh`: Automatically create stripped (non-SDH) versions of SDH subtitles. Default: `true`. + + Set to `false` to disable automatic SDH stripping entirely. When `true`, unshackle will automatically + detect SDH subtitles and create clean versions alongside the originals. + +- `convert_before_strip`: Auto-convert VTT/other formats to SRT before using subtitle-filter. Default: `true`. + + This ensures compatibility when subtitle-filter is used as the fallback SDH stripping method, as + subtitle-filter works best with SRT format. + +- `preserve_formatting`: Preserve original subtitle formatting (tags, positioning, styling). Default: `true`. + + When `true`, skips pycaption processing for WebVTT files to keep tags like ``, ``, positioning, + and other formatting intact. Combined with no `sub_format` setting, ensures subtitles remain in their + original format. + +### Example Configuration ```yaml subtitle: conversion_method: auto sdh_method: auto + strip_sdh: true + convert_before_strip: true + preserve_formatting: true +``` + +### Minimal Configuration (Disable Processing) + +```yaml +subtitle: + strip_sdh: false # Don't strip SDH + preserve_formatting: true # Keep all formatting intact ``` ## update_checks (bool) diff --git a/docs/GLUETUN.md b/docs/GLUETUN.md new file mode 100644 index 0000000..1787c30 --- /dev/null +++ b/docs/GLUETUN.md @@ -0,0 +1,159 @@ +# Gluetun VPN Proxy + +Gluetun provides Docker-managed VPN proxies supporting 50+ VPN providers. + +## Prerequisites + +**Docker must be installed and running.** + +```bash +# Linux +curl -fsSL https://get.docker.com | sh +sudo usermod -aG docker $USER # Then log out/in + +# Windows/Mac +# Install Docker Desktop: https://www.docker.com/products/docker-desktop/ +``` + +## Quick Start + +### 1. Configuration + +Add to `~/.config/unshackle/unshackle.yaml`: + +```yaml +proxy_providers: + gluetun: + providers: + nordvpn: + vpn_type: wireguard + credentials: + private_key: YOUR_PRIVATE_KEY +``` + +### 2. Usage + +Use 2-letter country codes directly: + +```bash +uv run unshackle dl SERVICE CONTENT --proxy gluetun:nordvpn:us +uv run unshackle dl SERVICE CONTENT --proxy gluetun:nordvpn:uk +``` + +Format: `gluetun:provider:region` + +## Provider Credential Requirements + +Each provider has different credential requirements. See the [Gluetun Wiki](https://github.com/qdm12/gluetun-wiki/tree/main/setup/providers) for complete details. + +| Provider | VPN Type | Required Credentials | +|----------|----------|---------------------| +| NordVPN | WireGuard | `private_key` only | +| ProtonVPN | WireGuard | `private_key` only | +| Windscribe | WireGuard | `private_key`, `addresses`, `preshared_key` (all required) | +| Surfshark | WireGuard | `private_key`, `addresses` | +| Mullvad | WireGuard | `private_key`, `addresses` | +| IVPN | WireGuard | `private_key`, `addresses` | +| ExpressVPN | OpenVPN | `username`, `password` (no WireGuard support) | +| Any | OpenVPN | `username`, `password` | + +### Configuration Examples + +**NordVPN/ProtonVPN** (only private_key needed): +```yaml +providers: + nordvpn: + vpn_type: wireguard + credentials: + private_key: YOUR_PRIVATE_KEY +``` + +**Windscribe** (all three credentials required): +```yaml +providers: + windscribe: + vpn_type: wireguard + credentials: + private_key: YOUR_PRIVATE_KEY + addresses: 10.x.x.x/32 + preshared_key: YOUR_PRESHARED_KEY # Required, can be empty string +``` + +**OpenVPN** (any provider): +```yaml +providers: + expressvpn: + vpn_type: openvpn + credentials: + username: YOUR_USERNAME + password: YOUR_PASSWORD +``` + +## Server Selection + +Most providers use `SERVER_COUNTRIES`, but some use `SERVER_REGIONS`: + +| Variable | Providers | +|----------|-----------| +| `SERVER_COUNTRIES` | NordVPN, ProtonVPN, Surfshark, Mullvad, ExpressVPN, and most others | +| `SERVER_REGIONS` | Windscribe, VyprVPN, VPN Secure | + +Unshackle handles this automatically - just use 2-letter country codes. + +## Global Settings + +```yaml +proxy_providers: + gluetun: + providers: {...} + base_port: 8888 # Starting port (default: 8888) + auto_cleanup: true # Remove containers on exit (default: true) + verify_ip: true # Verify IP matches region (default: true) + container_prefix: "unshackle-gluetun" + auth_user: username # Proxy auth (optional) + auth_password: password +``` + +## Features + +- **Container Reuse**: First request takes 10-30s; subsequent requests are instant +- **IP Verification**: Automatically verifies VPN exit IP matches requested region +- **Concurrent Sessions**: Multiple downloads share the same container +- **Specific Servers**: Use `--proxy gluetun:nordvpn:us1239` for specific server selection + +## Container Management + +```bash +# View containers +docker ps | grep unshackle-gluetun + +# Check logs +docker logs unshackle-gluetun-nordvpn-us + +# Remove all containers +docker ps -a | grep unshackle-gluetun | awk '{print $1}' | xargs docker rm -f +``` + +## Troubleshooting + +### Docker Permission Denied (Linux) +```bash +sudo usermod -aG docker $USER +# Then log out and log back in +``` + +### VPN Connection Failed +Check container logs for specific errors: +```bash +docker logs unshackle-gluetun-nordvpn-us +``` + +Common issues: +- Invalid/missing credentials +- Windscribe requires `preshared_key` (can be empty string) +- VPN provider server issues + +## Resources + +- [Gluetun Wiki](https://github.com/qdm12/gluetun-wiki) - Official provider documentation +- [Gluetun GitHub](https://github.com/qdm12/gluetun) diff --git a/pyproject.toml b/pyproject.toml index 96f9cdc..43eafd6 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -62,6 +62,7 @@ dependencies = [ "aiohttp-swagger3>=0.9.0,<1", "pysubs2>=1.7.0,<2", "PyExecJS>=1.5.1,<2", + "pycountry>=24.6.1", ] [project.urls] diff --git a/unshackle/commands/dl.py b/unshackle/commands/dl.py index 5a38b20..535b1f0 100644 --- a/unshackle/commands/dl.py +++ b/unshackle/commands/dl.py @@ -48,7 +48,7 @@ from unshackle.core.constants import DOWNLOAD_LICENCE_ONLY, AnyTrack, context_se from unshackle.core.credential import Credential from unshackle.core.drm import DRM_T, PlayReady, Widevine from unshackle.core.events import events -from unshackle.core.proxies import Basic, Hola, NordVPN, SurfsharkVPN, WindscribeVPN +from unshackle.core.proxies import Basic, Gluetun, Hola, NordVPN, SurfsharkVPN, WindscribeVPN from unshackle.core.service import Service from unshackle.core.services import Services from unshackle.core.title_cacher import get_account_hash @@ -261,13 +261,6 @@ class dl: default=None, help="Wanted episodes, e.g. `S01-S05,S07`, `S01E01-S02E03`, `S02-S02E03`, e.t.c, defaults to all.", ) - @click.option( - "-le", - "--latest-episode", - is_flag=True, - default=False, - help="Download only the single most recent episode available.", - ) @click.option( "-l", "--lang", @@ -275,6 +268,12 @@ class dl: default="orig", help="Language wanted for Video and Audio. Use 'orig' to select the original language, e.g. 'orig,en' for both original and English.", ) + @click.option( + "--latest-episode", + is_flag=True, + default=False, + help="Download only the single most recent episode available.", + ) @click.option( "-vl", "--v-lang", @@ -665,6 +664,8 @@ class dl: self.proxy_providers.append(SurfsharkVPN(**config.proxy_providers["surfsharkvpn"])) if config.proxy_providers.get("windscribevpn"): self.proxy_providers.append(WindscribeVPN(**config.proxy_providers["windscribevpn"])) + if config.proxy_providers.get("gluetun"): + self.proxy_providers.append(Gluetun(**config.proxy_providers["gluetun"])) if binaries.HolaProxy: self.proxy_providers.append(Hola()) for proxy_provider in self.proxy_providers: @@ -675,7 +676,8 @@ class dl: if re.match(r"^[a-z]+:.+$", proxy, re.IGNORECASE): # requesting proxy from a specific proxy provider requested_provider, proxy = proxy.split(":", maxsplit=1) - if re.match(r"^[a-z]{2}(?:\d+)?$", proxy, re.IGNORECASE): + # Match simple region codes (us, ca, uk1) or provider:region format (nordvpn:ca, windscribe:us) + if re.match(r"^[a-z]{2}(?:\d+)?$", proxy, re.IGNORECASE) or re.match(r"^[a-z]+:[a-z]{2}(?:\d+)?$", proxy, re.IGNORECASE): proxy = proxy.lower() with console.status(f"Getting a Proxy to {proxy}...", spinner="dots"): if requested_provider: @@ -699,8 +701,14 @@ class dl: proxy = ctx.params["proxy"] = proxy_uri self.log.info(f"Using {proxy_provider.__class__.__name__} Proxy: {proxy}") break + # Store proxy query info for service-specific overrides + ctx.params["proxy_query"] = proxy + ctx.params["proxy_provider"] = requested_provider else: self.log.info(f"Using explicit Proxy: {proxy}") + # For explicit proxies, store None for query/provider + ctx.params["proxy_query"] = None + ctx.params["proxy_provider"] = None ctx.obj = ContextData( config=self.service_config, cdm=self.cdm, proxy_providers=self.proxy_providers, profile=self.profile diff --git a/unshackle/commands/env.py b/unshackle/commands/env.py index 504cbf6..94d9a0b 100644 --- a/unshackle/commands/env.py +++ b/unshackle/commands/env.py @@ -97,6 +97,7 @@ def check() -> None: "cat": "Network", }, {"name": "Caddy", "binary": binaries.Caddy, "required": False, "desc": "Web server", "cat": "Network"}, + {"name": "Docker", "binary": binaries.Docker, "required": False, "desc": "Gluetun VPN", "cat": "Network"}, ] # Track overall status diff --git a/unshackle/commands/remote_auth.py b/unshackle/commands/remote_auth.py new file mode 100644 index 0000000..23b6de2 --- /dev/null +++ b/unshackle/commands/remote_auth.py @@ -0,0 +1,225 @@ +"""CLI command for authenticating remote services.""" + +from typing import Optional + +import click +from rich.table import Table + +from unshackle.core.config import config +from unshackle.core.console import console +from unshackle.core.constants import context_settings +from unshackle.core.remote_auth import RemoteAuthenticator + + +@click.group(short_help="Manage remote service authentication.", context_settings=context_settings) +def remote_auth() -> None: + """Authenticate and manage sessions for remote services.""" + pass + + +@remote_auth.command(name="authenticate") +@click.argument("service", type=str) +@click.option( + "-r", "--remote", type=str, help="Remote server name or URL (from config)", required=False +) +@click.option("-p", "--profile", type=str, help="Profile to use for authentication") +def authenticate_command(service: str, remote: Optional[str], profile: Optional[str]) -> None: + """ + Authenticate a service locally and upload session to remote server. + + This command: + 1. Authenticates the service locally (shows browser, handles 2FA, etc.) + 2. Extracts the authenticated session + 3. Uploads the session to the remote server + + The server will use this pre-authenticated session for all requests. + + Examples: + unshackle remote-auth authenticate DSNP + unshackle remote-auth authenticate NF --profile john + unshackle remote-auth auth AMZN --remote my-server + """ + # Get remote server config + remote_config = _get_remote_config(remote) + if not remote_config: + return + + remote_url = remote_config["url"] + api_key = remote_config["api_key"] + server_name = remote_config.get("name", remote_url) + + console.print(f"\n[bold cyan]Authenticating {service} for remote server:[/bold cyan] {server_name}") + console.print(f"[dim]Server: {remote_url}[/dim]\n") + + # Create authenticator + authenticator = RemoteAuthenticator(remote_url, api_key) + + # Authenticate and save locally + success = authenticator.authenticate_and_save(service, profile) + + if success: + console.print(f"\n[bold green]✓ Success![/bold green] Session saved locally. You can now use remote_{service} service.") + else: + console.print(f"\n[bold red]✗ Failed to authenticate {service}[/bold red]") + raise click.Abort() + + +@remote_auth.command(name="status") +@click.option( + "-r", "--remote", type=str, help="Remote server name or URL (from config)", required=False +) +def status_command(remote: Optional[str]) -> None: + """ + Show status of all authenticated sessions in local cache. + + Examples: + unshackle remote-auth status + unshackle remote-auth status --remote my-server + """ + import datetime + + from unshackle.core.local_session_cache import get_local_session_cache + + # Get local session cache + cache = get_local_session_cache() + + # Get remote server config (optional filter) + remote_url = None + if remote: + remote_config = _get_remote_config(remote) + if remote_config: + remote_url = remote_config["url"] + server_name = remote_config.get("name", remote_url) + else: + server_name = "All Remotes" + + # Get sessions (filtered by remote if specified) + sessions = cache.list_sessions(remote_url) + + if not sessions: + if remote_url: + console.print(f"\n[yellow]No authenticated sessions for {server_name}[/yellow]") + else: + console.print("\n[yellow]No authenticated sessions in local cache[/yellow]") + console.print("\nUse [cyan]unshackle remote-auth authenticate [/cyan] to add sessions") + return + + # Display sessions in table + table = Table(title=f"Local Authenticated Sessions - {server_name}") + table.add_column("Remote", style="magenta") + table.add_column("Service", style="cyan") + table.add_column("Profile", style="green") + table.add_column("Cached", style="dim") + table.add_column("Age", style="yellow") + table.add_column("Status", style="bold") + + for session in sessions: + cached_time = datetime.datetime.fromtimestamp(session["cached_at"]).strftime("%Y-%m-%d %H:%M") + + # Format age + age_seconds = session["age_seconds"] + if age_seconds < 3600: + age_str = f"{age_seconds // 60}m" + elif age_seconds < 86400: + age_str = f"{age_seconds // 3600}h" + else: + age_str = f"{age_seconds // 86400}d" + + # Status + status = "[red]Expired" if session["expired"] else "[green]Valid" + + # Short remote URL for display + remote_display = session["remote_url"].replace("https://", "").replace("http://", "") + if len(remote_display) > 30: + remote_display = remote_display[:27] + "..." + + table.add_row( + remote_display, + session["service_tag"], + session["profile"], + cached_time, + age_str, + status + ) + + console.print() + console.print(table) + console.print("\n[dim]Sessions are stored locally and expire after 24 hours[/dim]") + console.print() + + +@remote_auth.command(name="delete") +@click.argument("service", type=str) +@click.option( + "-r", "--remote", type=str, help="Remote server name or URL (from config)", required=False +) +@click.option("-p", "--profile", type=str, default="default", help="Profile name") +def delete_command(service: str, remote: Optional[str], profile: str) -> None: + """ + Delete an authenticated session from local cache. + + Examples: + unshackle remote-auth delete DSNP + unshackle remote-auth delete NF --profile john + """ + from unshackle.core.local_session_cache import get_local_session_cache + + # Get remote server config + remote_config = _get_remote_config(remote) + if not remote_config: + return + + remote_url = remote_config["url"] + + cache = get_local_session_cache() + + console.print(f"\n[yellow]Deleting local session for {service} (profile: {profile})...[/yellow]") + + deleted = cache.delete_session(remote_url, service, profile) + + if deleted: + console.print("[green]✓ Session deleted from local cache[/green]") + else: + console.print(f"[red]✗ No session found for {service} (profile: {profile})[/red]") + + +def _get_remote_config(remote: Optional[str]) -> Optional[dict]: + """ + Get remote server configuration. + + Args: + remote: Remote server name or URL, or None for first configured remote + + Returns: + Remote config dict or None + """ + if not config.remote_services: + console.print("[red]No remote services configured in unshackle.yaml[/red]") + console.print("\nAdd a remote service to your config:") + console.print("[dim]remote_services:") + console.print(" - url: https://your-server.com") + console.print(" api_key: your-api-key") + console.print(" name: my-server[/dim]") + return None + + # If no remote specified, use the first one + if not remote: + return config.remote_services[0] + + # Check if remote is a name + for remote_config in config.remote_services: + if remote_config.get("name") == remote: + return remote_config + + # Check if remote is a URL + for remote_config in config.remote_services: + if remote_config.get("url") == remote: + return remote_config + + console.print(f"[red]Remote server '{remote}' not found in config[/red]") + console.print("\nAvailable remotes:") + for remote_config in config.remote_services: + name = remote_config.get("name", remote_config.get("url")) + console.print(f" - {name}") + + return None diff --git a/unshackle/commands/search.py b/unshackle/commands/search.py index a6d63bb..450f263 100644 --- a/unshackle/commands/search.py +++ b/unshackle/commands/search.py @@ -16,7 +16,7 @@ from unshackle.core import binaries from unshackle.core.config import config from unshackle.core.console import console from unshackle.core.constants import context_settings -from unshackle.core.proxies import Basic, Hola, NordVPN, SurfsharkVPN +from unshackle.core.proxies import Basic, Gluetun, Hola, NordVPN, SurfsharkVPN, WindscribeVPN from unshackle.core.service import Service from unshackle.core.services import Services from unshackle.core.utils.click_types import ContextData @@ -71,6 +71,10 @@ def search(ctx: click.Context, no_proxy: bool, profile: Optional[str] = None, pr proxy_providers.append(NordVPN(**config.proxy_providers["nordvpn"])) if config.proxy_providers.get("surfsharkvpn"): proxy_providers.append(SurfsharkVPN(**config.proxy_providers["surfsharkvpn"])) + if config.proxy_providers.get("windscribevpn"): + proxy_providers.append(WindscribeVPN(**config.proxy_providers["windscribevpn"])) + if config.proxy_providers.get("gluetun"): + proxy_providers.append(Gluetun(**config.proxy_providers["gluetun"])) if binaries.HolaProxy: proxy_providers.append(Hola()) for proxy_provider in proxy_providers: @@ -81,7 +85,8 @@ def search(ctx: click.Context, no_proxy: bool, profile: Optional[str] = None, pr if re.match(r"^[a-z]+:.+$", proxy, re.IGNORECASE): # requesting proxy from a specific proxy provider requested_provider, proxy = proxy.split(":", maxsplit=1) - if re.match(r"^[a-z]{2}(?:\d+)?$", proxy, re.IGNORECASE): + # Match simple region codes (us, ca, uk1) or provider:region format (nordvpn:ca, windscribe:us) + if re.match(r"^[a-z]{2}(?:\d+)?$", proxy, re.IGNORECASE) or re.match(r"^[a-z]+:[a-z]{2}(?:\d+)?$", proxy, re.IGNORECASE): proxy = proxy.lower() with console.status(f"Getting a Proxy to {proxy}...", spinner="dots"): if requested_provider: diff --git a/unshackle/commands/serve.py b/unshackle/commands/serve.py index a28d633..692a82b 100644 --- a/unshackle/commands/serve.py +++ b/unshackle/commands/serve.py @@ -24,7 +24,13 @@ from unshackle.core.constants import context_settings default=False, help="Include technical debug information (tracebacks, stderr) in API error responses.", ) -def serve(host: str, port: int, caddy: bool, api_only: bool, no_key: bool, debug_api: bool) -> None: +@click.option( + "--debug", + is_flag=True, + default=False, + help="Enable debug logging for API operations.", +) +def serve(host: str, port: int, caddy: bool, api_only: bool, no_key: bool, debug_api: bool, debug: bool) -> None: """ Serve your Local Widevine Devices and REST API for Remote Access. @@ -39,12 +45,60 @@ def serve(host: str, port: int, caddy: bool, api_only: bool, no_key: bool, debug \b The REST API provides programmatic access to unshackle functionality. - Configure authentication in your config under serve.users and serve.api_secret. + Configure authentication in your config under serve.api_secret and serve.api_keys. + + \b + API KEY TIERS: + Premium API keys can use server-side CDM for decryption. Configure in unshackle.yaml: + + \b + serve: + api_secret: "your-api-secret" + api_keys: + - key: "basic-user-key" + tier: "basic" + allowed_cdms: [] + - key: "premium-user-key" + tier: "premium" + default_cdm: "chromecdm_2101" + allowed_cdms: ["*"] # or list specific CDMs: ["chromecdm_2101", "chromecdm_2202"] + + \b + REMOTE SERVICES: + The server exposes endpoints that allow remote unshackle clients to use + your configured services without needing the service implementations. + Remote clients can authenticate, get titles/tracks, and receive session data + for downloading. Configure remote clients in unshackle.yaml: + + \b + remote_services: + - url: "http://your-server:8786" + api_key: "your-api-key" + name: "my-server" + + \b + Available remote endpoints: + - GET /api/remote/services - List available services + - POST /api/remote/{service}/search - Search for content + - POST /api/remote/{service}/titles - Get titles + - POST /api/remote/{service}/tracks - Get tracks + - POST /api/remote/{service}/chapters - Get chapters + - POST /api/remote/{service}/license - Get DRM license (uses client CDM) + - POST /api/remote/{service}/decrypt - Decrypt using server CDM (premium only) """ from pywidevine import serve as pywidevine_serve log = logging.getLogger("serve") + # Configure logging level based on --debug flag + if debug: + logging.basicConfig(level=logging.DEBUG, format="%(name)s - %(levelname)s - %(message)s") + log.info("Debug logging enabled for API operations") + else: + # Set API loggers to WARNING to reduce noise unless --debug is used + logging.getLogger("api").setLevel(logging.WARNING) + logging.getLogger("api.remote").setLevel(logging.WARNING) + # Validate API secret for REST API routes (unless --no-key is used) if not no_key: api_secret = config.serve.get("api_secret") diff --git a/unshackle/core/api/api_keys.py b/unshackle/core/api/api_keys.py new file mode 100644 index 0000000..255a45c --- /dev/null +++ b/unshackle/core/api/api_keys.py @@ -0,0 +1,137 @@ +"""API key tier management for remote services.""" + +import logging +from typing import Any, Dict, List, Optional + +from aiohttp import web + +log = logging.getLogger("api.keys") + + +def get_api_key_from_request(request: web.Request) -> Optional[str]: + """ + Extract API key from request headers. + + Args: + request: aiohttp request object + + Returns: + API key string or None + """ + return request.headers.get("X-API-Key") or request.headers.get("Authorization", "").replace("Bearer ", "") + + +def get_api_key_config(app: web.Application, api_key: str) -> Optional[Dict[str, Any]]: + """ + Get configuration for a specific API key. + + Args: + app: aiohttp application + api_key: API key to look up + + Returns: + API key configuration dict or None if not found + """ + config = app.get("config", {}) + + # Check new-style tiered API keys + api_keys = config.get("api_keys", []) + for key_config in api_keys: + if isinstance(key_config, dict) and key_config.get("key") == api_key: + return key_config + + # Check legacy users list (backward compatibility) + users = config.get("users", []) + if api_key in users: + return { + "key": api_key, + "tier": "basic", + "allowed_cdms": [] + } + + return None + + +def is_premium_user(app: web.Application, api_key: str) -> bool: + """ + Check if an API key belongs to a premium user. + + Premium users can use server-side CDM for decryption. + + Args: + app: aiohttp application + api_key: API key to check + + Returns: + True if premium user, False otherwise + """ + key_config = get_api_key_config(app, api_key) + if not key_config: + return False + + tier = key_config.get("tier", "basic") + return tier == "premium" + + +def get_allowed_cdms(app: web.Application, api_key: str) -> List[str]: + """ + Get list of CDMs that an API key is allowed to use. + + Args: + app: aiohttp application + api_key: API key to check + + Returns: + List of allowed CDM names, or empty list if not premium + """ + key_config = get_api_key_config(app, api_key) + if not key_config: + return [] + + allowed_cdms = key_config.get("allowed_cdms", []) + + # Handle wildcard + if allowed_cdms == "*" or allowed_cdms == ["*"]: + return ["*"] + + return allowed_cdms if isinstance(allowed_cdms, list) else [] + + +def get_default_cdm(app: web.Application, api_key: str) -> Optional[str]: + """ + Get default CDM for an API key. + + Args: + app: aiohttp application + api_key: API key to check + + Returns: + Default CDM name or None + """ + key_config = get_api_key_config(app, api_key) + if not key_config: + return None + + return key_config.get("default_cdm") + + +def can_use_cdm(app: web.Application, api_key: str, cdm_name: str) -> bool: + """ + Check if an API key can use a specific CDM. + + Args: + app: aiohttp application + api_key: API key to check + cdm_name: CDM name to check access for + + Returns: + True if allowed, False otherwise + """ + allowed_cdms = get_allowed_cdms(app, api_key) + + # Wildcard access + if "*" in allowed_cdms: + return True + + # Specific CDM access + return cdm_name in allowed_cdms diff --git a/unshackle/core/api/remote_handlers.py b/unshackle/core/api/remote_handlers.py new file mode 100644 index 0000000..b4a8cc3 --- /dev/null +++ b/unshackle/core/api/remote_handlers.py @@ -0,0 +1,1879 @@ +"""API handlers for remote service functionality.""" + +import http.cookiejar +import inspect +import logging +import tempfile +import time +from pathlib import Path +from typing import Any, Dict, Optional + +import click +import yaml +from aiohttp import web + +from unshackle.commands.dl import dl +from unshackle.core.api.api_keys import can_use_cdm, get_api_key_from_request, get_default_cdm, is_premium_user +from unshackle.core.api.handlers import (serialize_audio_track, serialize_subtitle_track, serialize_title, + serialize_video_track, validate_service) +from unshackle.core.api.session_serializer import deserialize_session, serialize_session +from unshackle.core.config import config +from unshackle.core.credential import Credential +from unshackle.core.search_result import SearchResult +from unshackle.core.services import Services +from unshackle.core.titles import Episode +from unshackle.core.utils.click_types import ContextData +from unshackle.core.utils.collections import merge_dict + +log = logging.getLogger("api.remote") + +# Session expiry time in seconds (24 hours) +SESSION_EXPIRY_TIME = 86400 + + +def load_cookies_from_content(cookies_content: Optional[str]) -> Optional[http.cookiejar.MozillaCookieJar]: + """ + Load cookies from raw cookie file content. + + Args: + cookies_content: Raw content of a Netscape/Mozilla format cookie file + + Returns: + MozillaCookieJar object or None + """ + if not cookies_content: + return None + + # Write to temporary file + with tempfile.NamedTemporaryFile(mode="w", suffix=".txt", delete=False) as f: + f.write(cookies_content) + temp_path = f.name + + try: + # Load using standard cookie jar + cookie_jar = http.cookiejar.MozillaCookieJar(temp_path) + cookie_jar.load(ignore_discard=True, ignore_expires=True) + return cookie_jar + finally: + # Clean up temp file + Path(temp_path).unlink(missing_ok=True) + + +def create_credential_from_dict(cred_data: Optional[Dict[str, str]]) -> Optional[Credential]: + """ + Create a Credential object from dictionary. + + Args: + cred_data: Dictionary with 'username' and 'password' keys + + Returns: + Credential object or None + """ + if not cred_data or "username" not in cred_data or "password" not in cred_data: + return None + + return Credential(username=cred_data["username"], password=cred_data["password"]) + + +def validate_session_expiry(session_data: Dict[str, Any]) -> Optional[str]: + """ + Validate if a session is expired. + + Args: + session_data: Session data with cached_at timestamp + + Returns: + Error code if session is expired, None if valid + """ + if not session_data: + return None + + cached_at = session_data.get("cached_at") + if not cached_at: + # No timestamp - assume valid (backward compatibility) + return None + + age = time.time() - cached_at + if age > SESSION_EXPIRY_TIME: + log.warning(f"Session expired (age: {age:.0f}s, limit: {SESSION_EXPIRY_TIME}s)") + return "SESSION_EXPIRED" + + # Warn if session is close to expiry (within 1 hour) + if age > (SESSION_EXPIRY_TIME - 3600): + remaining = SESSION_EXPIRY_TIME - age + log.info(f"Session expires soon (remaining: {remaining:.0f}s)") + + return None + + +def get_auth_from_request(data: Dict[str, Any], service_tag: str, profile: Optional[str] = None): + """ + Get authentication from request data or fallback to server config. + + Server is STATELESS - it never stores sessions. + Client sends pre-authenticated session with each request. + + Priority order: + 1. Pre-authenticated session from client (sent with request) + 2. Client-provided credentials/cookies in request + 3. Server-side credentials/cookies from config (fallback) + + Args: + data: Request data + service_tag: Service tag + profile: Profile name + + Returns: + Tuple of (cookies, credential, pre_authenticated_session, session_error) + where session_error is an error code if session is expired + """ + # First priority: Check for pre-authenticated session sent by client + pre_authenticated_session = data.get("pre_authenticated_session") + + if pre_authenticated_session: + log.info(f"Using client's pre-authenticated session for {service_tag}") + + # Validate session expiry + session_error = validate_session_expiry(pre_authenticated_session) + if session_error: + log.warning(f"Session validation failed: {session_error}") + return None, None, None, session_error + + # Return None, None to indicate we'll use the pre-authenticated session + return None, None, pre_authenticated_session, None + + # Second priority: Try to get from client request + cookies_content = data.get("cookies") + credential_data = data.get("credential") + + if cookies_content: + cookies = load_cookies_from_content(cookies_content) + else: + # Fallback to server-side cookies if not provided by client + cookies = dl.get_cookie_jar(service_tag, profile) + + if credential_data: + credential = create_credential_from_dict(credential_data) + else: + # Fallback to server-side credentials if not provided by client + credential = dl.get_credentials(service_tag, profile) + + return cookies, credential, None, None + + +async def remote_list_services(request: web.Request) -> web.Response: + """ + List all available services on this remote server. + --- + summary: List remote services + description: Get all available services that can be accessed remotely + responses: + '200': + description: List of available services + content: + application/json: + schema: + type: object + properties: + status: + type: string + example: success + services: + type: array + items: + type: object + properties: + tag: + type: string + aliases: + type: array + items: + type: string + geofence: + type: array + items: + type: string + help: + type: string + '500': + description: Server error + """ + try: + service_tags = Services.get_tags() + services_info = [] + + for tag in service_tags: + service_data = { + "tag": tag, + "aliases": [], + "geofence": [], + "help": None, + } + + try: + service_module = Services.load(tag) + + if hasattr(service_module, "ALIASES"): + service_data["aliases"] = list(service_module.ALIASES) + + if hasattr(service_module, "GEOFENCE"): + service_data["geofence"] = list(service_module.GEOFENCE) + + if service_module.__doc__: + service_data["help"] = service_module.__doc__.strip() + + except Exception as e: + log.warning(f"Could not load details for service {tag}: {e}") + + services_info.append(service_data) + + return web.json_response({"status": "success", "services": services_info}) + + except Exception: + log.exception("Error listing remote services") + return web.json_response({"status": "error", "message": "Internal server error while listing services"}, status=500) + + +async def remote_search(request: web.Request) -> web.Response: + """ + Search for content on a remote service. + --- + summary: Search remote service + description: Search for content using a remote service + parameters: + - name: service + in: path + required: true + schema: + type: string + requestBody: + required: true + content: + application/json: + schema: + type: object + required: + - query + properties: + query: + type: string + description: Search query + profile: + type: string + description: Profile to use for credentials + responses: + '200': + description: Search results + '400': + description: Invalid request + '500': + description: Server error + """ + service_tag = request.match_info.get("service") + + try: + data = await request.json() + except Exception: + return web.json_response({"status": "error", "message": "Invalid JSON request body"}, status=400) + + query = data.get("query") + if not query: + return web.json_response({"status": "error", "message": "Missing required parameter: query"}, status=400) + + normalized_service = validate_service(service_tag) + if not normalized_service: + return web.json_response( + {"status": "error", "message": f"Invalid or unavailable service: {service_tag}"}, status=400 + ) + + try: + profile = data.get("profile") + + service_config_path = Services.get_path(normalized_service) / config.filenames.config + if service_config_path.exists(): + service_config = yaml.safe_load(service_config_path.read_text(encoding="utf8")) + else: + service_config = {} + merge_dict(config.services.get(normalized_service), service_config) + + @click.command() + @click.pass_context + def dummy_service(ctx: click.Context) -> None: + pass + + # Handle proxy configuration + # Client MUST send resolved proxy with credentials (e.g., http://user:pass@host:port) + # Server does NOT resolve proxy providers - client must do that + proxy_param = data.get("proxy") + no_proxy = data.get("no_proxy", False) + + if proxy_param and not no_proxy: + import re + + # Validate that client sent a fully resolved proxy URL + if re.match(r"^https?://", proxy_param): + log.info("Using client-resolved proxy with credentials") + else: + # Reject unresolved proxy parameters + log.error(f"[SECURITY] Client sent unresolved proxy parameter: {proxy_param}") + return web.json_response({ + "status": "error", + "error_code": "INVALID_PROXY", + "message": f"Proxy must be a fully resolved URL (http://... or https://...). " + f"Cannot use proxy provider shortcuts like '{proxy_param}'. " + f"Please resolve the proxy on the client side before sending to server." + }, status=400) + + ctx = click.Context(dummy_service) + ctx.obj = ContextData(config=service_config, cdm=None, proxy_providers=[], profile=profile) + ctx.params = {"proxy": proxy_param, "no_proxy": no_proxy} + + service_module = Services.load(normalized_service) + + dummy_service.name = normalized_service + ctx.invoked_subcommand = normalized_service + + service_ctx = click.Context(dummy_service, parent=ctx) + service_ctx.obj = ctx.obj + + # Get service initialization parameters + service_init_params = inspect.signature(service_module.__init__).parameters + service_kwargs = {} + + # Extract defaults from click command + if hasattr(service_module, "cli") and hasattr(service_module.cli, "params"): + for param in service_module.cli.params: + if hasattr(param, "name") and param.name not in service_kwargs: + if hasattr(param, "default") and param.default is not None: + service_kwargs[param.name] = param.default + + # Add query parameter + if "query" in service_init_params: + service_kwargs["query"] = query + + # Filter to only valid parameters + filtered_kwargs = {k: v for k, v in service_kwargs.items() if k in service_init_params} + + service_instance = service_module(service_ctx, **filtered_kwargs) + + # Authenticate with client-provided or server-side auth + cookies, credential, pre_authenticated_session, session_error = get_auth_from_request(data, normalized_service, profile) + + # Check for session expiry + if session_error == "SESSION_EXPIRED": + return web.json_response({ + "status": "error", + "error_code": "SESSION_EXPIRED", + "message": f"Session expired for {normalized_service}. Please re-authenticate." + }, status=401) + + try: + if pre_authenticated_session: + # Use pre-authenticated session sent by client (server is stateless) + deserialize_session(pre_authenticated_session, service_instance.session) + else: + # Authenticate with credentials/cookies + if not cookies and not credential: + # No auth data available - tell client to authenticate + return web.json_response({ + "status": "error", + "error_code": "AUTH_REQUIRED", + "message": f"Authentication required for {normalized_service}. No credentials or session available." + }, status=401) + + service_instance.authenticate(cookies, credential) + except Exception as auth_error: + # Authentication failed - tell client to re-authenticate + log.warning(f"Authentication failed for {normalized_service}: {auth_error}") + return web.json_response({ + "status": "error", + "error_code": "AUTH_REQUIRED", + "message": f"Authentication failed for {normalized_service}. Please authenticate locally." + }, status=401) + + # Perform search + search_results = [] + if hasattr(service_instance, "search"): + for result in service_instance.search(): + if isinstance(result, SearchResult): + search_results.append( + { + "id": str(result.id_), + "title": result.title, + "description": result.description, + "label": result.label, + "url": result.url, + } + ) + + # Serialize session data + session_data = serialize_session(service_instance.session) + + return web.json_response({"status": "success", "results": search_results, "session": session_data}) + + except Exception: + log.exception("Error performing remote search") + return web.json_response({"status": "error", "message": "Internal server error while performing search"}, status=500) + + +async def remote_get_titles(request: web.Request) -> web.Response: + """ + Get titles from a remote service. + --- + summary: Get titles from remote service + description: Get available titles for content from a remote service + parameters: + - name: service + in: path + required: true + schema: + type: string + requestBody: + required: true + content: + application/json: + schema: + type: object + required: + - title + properties: + title: + type: string + description: Title identifier, URL, or any format accepted by the service + profile: + type: string + description: Profile to use for credentials + proxy: + type: string + description: Proxy region code (e.g., "ca", "us") or full proxy URL - uses server's proxy configuration + no_proxy: + type: boolean + description: Disable proxy usage + cookies: + type: string + description: Raw Netscape/Mozilla format cookie file content (optional - uses server cookies if not provided) + credential: + type: object + description: Credentials object with username and password (optional - uses server credentials if not provided) + properties: + username: + type: string + password: + type: string + responses: + '200': + description: Titles and session data + '400': + description: Invalid request + '500': + description: Server error + """ + service_tag = request.match_info.get("service") + + try: + data = await request.json() + except Exception: + return web.json_response({"status": "error", "message": "Invalid JSON request body"}, status=400) + + # Accept 'title', 'title_id', or 'url' for flexibility + title = data.get("title") or data.get("title_id") or data.get("url") + if not title: + return web.json_response( + { + "status": "error", + "message": "Missing required parameter: title (can be URL, ID, or any format accepted by the service)", + }, + status=400, + ) + + normalized_service = validate_service(service_tag) + if not normalized_service: + return web.json_response( + {"status": "error", "message": f"Invalid or unavailable service: {service_tag}"}, status=400 + ) + + try: + profile = data.get("profile") + + service_config_path = Services.get_path(normalized_service) / config.filenames.config + if service_config_path.exists(): + service_config = yaml.safe_load(service_config_path.read_text(encoding="utf8")) + else: + service_config = {} + merge_dict(config.services.get(normalized_service), service_config) + + @click.command() + @click.pass_context + def dummy_service(ctx: click.Context) -> None: + pass + + # Handle proxy configuration + # Client MUST send resolved proxy with credentials (e.g., http://user:pass@host:port) + # Server does NOT resolve proxy providers - client must do that + proxy_param = data.get("proxy") + no_proxy = data.get("no_proxy", False) + + if proxy_param and not no_proxy: + import re + + # Validate that client sent a fully resolved proxy URL + if re.match(r"^https?://", proxy_param): + log.info("Using client-resolved proxy with credentials") + else: + # Reject unresolved proxy parameters + log.error(f"[SECURITY] Client sent unresolved proxy parameter: {proxy_param}") + return web.json_response({ + "status": "error", + "error_code": "INVALID_PROXY", + "message": f"Proxy must be a fully resolved URL (http://... or https://...). " + f"Cannot use proxy provider shortcuts like '{proxy_param}'. " + f"Please resolve the proxy on the client side before sending to server." + }, status=400) + + ctx = click.Context(dummy_service) + ctx.obj = ContextData(config=service_config, cdm=None, proxy_providers=[], profile=profile) + ctx.params = {"proxy": proxy_param, "no_proxy": no_proxy} + + service_module = Services.load(normalized_service) + + dummy_service.name = normalized_service + dummy_service.params = [click.Argument([title], type=str)] + ctx.invoked_subcommand = normalized_service + + service_ctx = click.Context(dummy_service, parent=ctx) + service_ctx.obj = ctx.obj + + service_kwargs = {"title": title} + + # Add additional parameters from request data + for key, value in data.items(): + if key not in ["title", "title_id", "url", "profile", "proxy", "no_proxy"]: + service_kwargs[key] = value + + # Get service parameter info and click command defaults + service_init_params = inspect.signature(service_module.__init__).parameters + + # Extract default values from the click command + if hasattr(service_module, "cli") and hasattr(service_module.cli, "params"): + for param in service_module.cli.params: + if hasattr(param, "name") and param.name not in service_kwargs: + if hasattr(param, "default") and param.default is not None: + service_kwargs[param.name] = param.default + + # Handle required parameters + for param_name, param_info in service_init_params.items(): + if param_name not in service_kwargs and param_name not in ["self", "ctx"]: + if param_info.default is inspect.Parameter.empty: + if param_name == "meta_lang": + service_kwargs[param_name] = None + elif param_name == "movie": + service_kwargs[param_name] = False + + # Filter to only valid parameters + filtered_kwargs = {k: v for k, v in service_kwargs.items() if k in service_init_params} + + service_instance = service_module(service_ctx, **filtered_kwargs) + + # Authenticate with client-provided or server-side auth + cookies, credential, pre_authenticated_session, session_error = get_auth_from_request(data, normalized_service, profile) + + # Check for session expiry + if session_error == "SESSION_EXPIRED": + return web.json_response({ + "status": "error", + "error_code": "SESSION_EXPIRED", + "message": f"Session expired for {normalized_service}. Please re-authenticate." + }, status=401) + + try: + if pre_authenticated_session: + # Use pre-authenticated session sent by client (server is stateless) + deserialize_session(pre_authenticated_session, service_instance.session) + else: + # Authenticate with credentials/cookies + if not cookies and not credential: + # No auth data available - tell client to authenticate + return web.json_response({ + "status": "error", + "error_code": "AUTH_REQUIRED", + "message": f"Authentication required for {normalized_service}. No credentials or session available." + }, status=401) + + service_instance.authenticate(cookies, credential) + except Exception as auth_error: + # Authentication failed - tell client to re-authenticate + log.warning(f"Authentication failed for {normalized_service}: {auth_error}") + return web.json_response({ + "status": "error", + "error_code": "AUTH_REQUIRED", + "message": f"Authentication failed for {normalized_service}. Please authenticate locally." + }, status=401) + + # Get titles + titles = service_instance.get_titles() + + if hasattr(titles, "__iter__") and not isinstance(titles, str): + title_list = [serialize_title(t) for t in titles] + else: + title_list = [serialize_title(titles)] + + # Serialize session data + session_data = serialize_session(service_instance.session) + + # Include geofence info so client knows to activate VPN + geofence = [] + if hasattr(service_module, "GEOFENCE"): + geofence = list(service_module.GEOFENCE) + + return web.json_response({ + "status": "success", + "titles": title_list, + "session": session_data, + "geofence": geofence + }) + + except Exception: + log.exception("Error getting remote titles") + return web.json_response({"status": "error", "message": "Internal server error while getting titles"}, status=500) + + +async def remote_get_tracks(request: web.Request) -> web.Response: + """ + Get tracks from a remote service. + --- + summary: Get tracks from remote service + description: Get available tracks for a title from a remote service + parameters: + - name: service + in: path + required: true + schema: + type: string + requestBody: + required: true + content: + application/json: + schema: + type: object + required: + - title + properties: + title: + type: string + description: Title identifier, URL, or any format accepted by the service + wanted: + type: string + description: Specific episodes/seasons + profile: + type: string + description: Profile to use for credentials + proxy: + type: string + description: Proxy region code (e.g., "ca", "us") or full proxy URL - uses server's proxy configuration + no_proxy: + type: boolean + description: Disable proxy usage + cookies: + type: string + description: Raw Netscape/Mozilla format cookie file content (optional - uses server cookies if not provided) + credential: + type: object + description: Credentials object with username and password (optional - uses server credentials if not provided) + properties: + username: + type: string + password: + type: string + responses: + '200': + description: Tracks and session data + '400': + description: Invalid request + '500': + description: Server error + """ + service_tag = request.match_info.get("service") + + try: + data = await request.json() + except Exception: + return web.json_response({"status": "error", "message": "Invalid JSON request body"}, status=400) + + # Accept 'title', 'title_id', or 'url' for flexibility + title = data.get("title") or data.get("title_id") or data.get("url") + if not title: + return web.json_response( + { + "status": "error", + "message": "Missing required parameter: title (can be URL, ID, or any format accepted by the service)", + }, + status=400, + ) + + normalized_service = validate_service(service_tag) + if not normalized_service: + return web.json_response( + {"status": "error", "message": f"Invalid or unavailable service: {service_tag}"}, status=400 + ) + + try: + profile = data.get("profile") + + service_config_path = Services.get_path(normalized_service) / config.filenames.config + if service_config_path.exists(): + service_config = yaml.safe_load(service_config_path.read_text(encoding="utf8")) + else: + service_config = {} + merge_dict(config.services.get(normalized_service), service_config) + + @click.command() + @click.pass_context + def dummy_service(ctx: click.Context) -> None: + pass + + # Handle proxy configuration + # Client MUST send resolved proxy with credentials (e.g., http://user:pass@host:port) + # Server does NOT resolve proxy providers - client must do that + proxy_param = data.get("proxy") + no_proxy = data.get("no_proxy", False) + + if proxy_param and not no_proxy: + import re + + # Validate that client sent a fully resolved proxy URL + if re.match(r"^https?://", proxy_param): + log.info("Using client-resolved proxy with credentials") + else: + # Reject unresolved proxy parameters + log.error(f"[SECURITY] Client sent unresolved proxy parameter: {proxy_param}") + return web.json_response({ + "status": "error", + "error_code": "INVALID_PROXY", + "message": f"Proxy must be a fully resolved URL (http://... or https://...). " + f"Cannot use proxy provider shortcuts like '{proxy_param}'. " + f"Please resolve the proxy on the client side before sending to server." + }, status=400) + + ctx = click.Context(dummy_service) + ctx.obj = ContextData(config=service_config, cdm=None, proxy_providers=[], profile=profile) + ctx.params = {"proxy": proxy_param, "no_proxy": no_proxy} + + service_module = Services.load(normalized_service) + + dummy_service.name = normalized_service + dummy_service.params = [click.Argument([title], type=str)] + ctx.invoked_subcommand = normalized_service + + service_ctx = click.Context(dummy_service, parent=ctx) + service_ctx.obj = ctx.obj + + service_kwargs = {"title": title} + + # Add additional parameters + for key, value in data.items(): + if key not in ["title", "title_id", "url", "profile", "wanted", "season", "episode", "proxy", "no_proxy"]: + service_kwargs[key] = value + + # Get service parameters + service_init_params = inspect.signature(service_module.__init__).parameters + + # Extract defaults from click command + if hasattr(service_module, "cli") and hasattr(service_module.cli, "params"): + for param in service_module.cli.params: + if hasattr(param, "name") and param.name not in service_kwargs: + if hasattr(param, "default") and param.default is not None: + service_kwargs[param.name] = param.default + + # Handle required parameters + for param_name, param_info in service_init_params.items(): + if param_name not in service_kwargs and param_name not in ["self", "ctx"]: + if param_info.default is inspect.Parameter.empty: + if param_name == "meta_lang": + service_kwargs[param_name] = None + elif param_name == "movie": + service_kwargs[param_name] = False + + # Filter to valid parameters + filtered_kwargs = {k: v for k, v in service_kwargs.items() if k in service_init_params} + + service_instance = service_module(service_ctx, **filtered_kwargs) + + # Authenticate with client-provided or server-side auth + cookies, credential, pre_authenticated_session, session_error = get_auth_from_request(data, normalized_service, profile) + + # Check for session expiry + if session_error == "SESSION_EXPIRED": + return web.json_response({ + "status": "error", + "error_code": "SESSION_EXPIRED", + "message": f"Session expired for {normalized_service}. Please re-authenticate." + }, status=401) + + try: + if pre_authenticated_session: + # Use pre-authenticated session sent by client (server is stateless) + deserialize_session(pre_authenticated_session, service_instance.session) + else: + # Authenticate with credentials/cookies + if not cookies and not credential: + # No auth data available - tell client to authenticate + return web.json_response({ + "status": "error", + "error_code": "AUTH_REQUIRED", + "message": f"Authentication required for {normalized_service}. No credentials or session available." + }, status=401) + + service_instance.authenticate(cookies, credential) + except Exception as auth_error: + # Authentication failed - tell client to re-authenticate + log.warning(f"Authentication failed for {normalized_service}: {auth_error}") + return web.json_response({ + "status": "error", + "error_code": "AUTH_REQUIRED", + "message": f"Authentication failed for {normalized_service}. Please authenticate locally." + }, status=401) + + # Get titles + titles = service_instance.get_titles() + + wanted_param = data.get("wanted") + season = data.get("season") + episode = data.get("episode") + + if hasattr(titles, "__iter__") and not isinstance(titles, str): + titles_list = list(titles) + + wanted = None + if wanted_param: + from unshackle.core.utils.click_types import SeasonRange + + try: + season_range = SeasonRange() + wanted = season_range.parse_tokens(wanted_param) + except Exception as e: + return web.json_response( + {"status": "error", "message": f"Invalid wanted parameter: {e}"}, status=400 + ) + elif season is not None and episode is not None: + wanted = [f"{season}x{episode}"] + + if wanted: + matching_titles = [] + for title in titles_list: + if isinstance(title, Episode): + episode_key = f"{title.season}x{title.number}" + if episode_key in wanted: + matching_titles.append(title) + else: + matching_titles.append(title) + + if not matching_titles: + return web.json_response( + {"status": "error", "message": "No episodes found matching wanted criteria"}, status=404 + ) + + # Handle multiple episodes + if len(matching_titles) > 1 and all(isinstance(t, Episode) for t in matching_titles): + episodes_data = [] + failed_episodes = [] + + sorted_titles = sorted(matching_titles, key=lambda t: (t.season, t.number)) + + for title in sorted_titles: + try: + tracks = service_instance.get_tracks(title) + video_tracks = sorted(tracks.videos, key=lambda t: t.bitrate or 0, reverse=True) + audio_tracks = sorted(tracks.audio, key=lambda t: t.bitrate or 0, reverse=True) + + episode_data = { + "title": serialize_title(title), + "video": [serialize_video_track(t) for t in video_tracks], + "audio": [serialize_audio_track(t) for t in audio_tracks], + "subtitles": [serialize_subtitle_track(t) for t in tracks.subtitles], + } + episodes_data.append(episode_data) + except (SystemExit, Exception): + failed_episodes.append(f"S{title.season}E{title.number:02d}") + continue + + if episodes_data: + session_data = serialize_session(service_instance.session) + + # Include geofence info + geofence = [] + if hasattr(service_module, "GEOFENCE"): + geofence = list(service_module.GEOFENCE) + + response = { + "status": "success", + "episodes": episodes_data, + "session": session_data, + "geofence": geofence + } + if failed_episodes: + response["unavailable_episodes"] = failed_episodes + return web.json_response(response) + else: + return web.json_response( + { + "status": "error", + "message": f"No available episodes. Unavailable: {', '.join(failed_episodes)}", + }, + status=404, + ) + else: + first_title = matching_titles[0] + else: + first_title = titles_list[0] + else: + first_title = titles + + # Get tracks for single title + tracks = service_instance.get_tracks(first_title) + + video_tracks = sorted(tracks.videos, key=lambda t: t.bitrate or 0, reverse=True) + audio_tracks = sorted(tracks.audio, key=lambda t: t.bitrate or 0, reverse=True) + + # Serialize session data + session_data = serialize_session(service_instance.session) + + # Include geofence info + geofence = [] + if hasattr(service_module, "GEOFENCE"): + geofence = list(service_module.GEOFENCE) + + response_data = { + "status": "success", + "title": serialize_title(first_title), + "video": [serialize_video_track(t) for t in video_tracks], + "audio": [serialize_audio_track(t) for t in audio_tracks], + "subtitles": [serialize_subtitle_track(t) for t in tracks.subtitles], + "session": session_data, + "geofence": geofence + } + + return web.json_response(response_data) + + except Exception: + log.exception("Error getting remote tracks") + return web.json_response({"status": "error", "message": "Internal server error while getting tracks"}, status=500) + + +async def remote_get_chapters(request: web.Request) -> web.Response: + """ + Get chapters from a remote service. + --- + summary: Get chapters from remote service + description: Get available chapters for a title from a remote service + parameters: + - name: service + in: path + required: true + schema: + type: string + requestBody: + required: true + content: + application/json: + schema: + type: object + required: + - title + properties: + title: + type: string + description: Title identifier, URL, or any format accepted by the service + profile: + type: string + description: Profile to use for credentials + proxy: + type: string + description: Proxy region code (e.g., "ca", "us") or full proxy URL - uses server's proxy configuration + no_proxy: + type: boolean + description: Disable proxy usage + cookies: + type: string + description: Raw Netscape/Mozilla format cookie file content (optional - uses server cookies if not provided) + credential: + type: object + description: Credentials object with username and password (optional - uses server credentials if not provided) + properties: + username: + type: string + password: + type: string + responses: + '200': + description: Chapters and session data + '400': + description: Invalid request + '500': + description: Server error + """ + service_tag = request.match_info.get("service") + + try: + data = await request.json() + except Exception: + return web.json_response({"status": "error", "message": "Invalid JSON request body"}, status=400) + + # Accept 'title', 'title_id', or 'url' for flexibility + title = data.get("title") or data.get("title_id") or data.get("url") + if not title: + return web.json_response( + { + "status": "error", + "message": "Missing required parameter: title (can be URL, ID, or any format accepted by the service)", + }, + status=400, + ) + + normalized_service = validate_service(service_tag) + if not normalized_service: + return web.json_response( + {"status": "error", "message": f"Invalid or unavailable service: {service_tag}"}, status=400 + ) + + try: + profile = data.get("profile") + + service_config_path = Services.get_path(normalized_service) / config.filenames.config + if service_config_path.exists(): + service_config = yaml.safe_load(service_config_path.read_text(encoding="utf8")) + else: + service_config = {} + merge_dict(config.services.get(normalized_service), service_config) + + @click.command() + @click.pass_context + def dummy_service(ctx: click.Context) -> None: + pass + + # Handle proxy configuration + # Client MUST send resolved proxy with credentials (e.g., http://user:pass@host:port) + # Server does NOT resolve proxy providers - client must do that + proxy_param = data.get("proxy") + no_proxy = data.get("no_proxy", False) + + if proxy_param and not no_proxy: + import re + + # Validate that client sent a fully resolved proxy URL + if re.match(r"^https?://", proxy_param): + log.info("Using client-resolved proxy with credentials") + else: + # Reject unresolved proxy parameters + log.error(f"[SECURITY] Client sent unresolved proxy parameter: {proxy_param}") + return web.json_response({ + "status": "error", + "error_code": "INVALID_PROXY", + "message": f"Proxy must be a fully resolved URL (http://... or https://...). " + f"Cannot use proxy provider shortcuts like '{proxy_param}'. " + f"Please resolve the proxy on the client side before sending to server." + }, status=400) + + ctx = click.Context(dummy_service) + ctx.obj = ContextData(config=service_config, cdm=None, proxy_providers=[], profile=profile) + ctx.params = {"proxy": proxy_param, "no_proxy": no_proxy} + + service_module = Services.load(normalized_service) + + dummy_service.name = normalized_service + dummy_service.params = [click.Argument([title], type=str)] + ctx.invoked_subcommand = normalized_service + + service_ctx = click.Context(dummy_service, parent=ctx) + service_ctx.obj = ctx.obj + + service_kwargs = {"title": title} + + # Add additional parameters + for key, value in data.items(): + if key not in ["title", "title_id", "url", "profile", "proxy", "no_proxy"]: + service_kwargs[key] = value + + # Get service parameters + service_init_params = inspect.signature(service_module.__init__).parameters + + # Extract defaults + if hasattr(service_module, "cli") and hasattr(service_module.cli, "params"): + for param in service_module.cli.params: + if hasattr(param, "name") and param.name not in service_kwargs: + if hasattr(param, "default") and param.default is not None: + service_kwargs[param.name] = param.default + + # Handle required parameters + for param_name, param_info in service_init_params.items(): + if param_name not in service_kwargs and param_name not in ["self", "ctx"]: + if param_info.default is inspect.Parameter.empty: + if param_name == "meta_lang": + service_kwargs[param_name] = None + elif param_name == "movie": + service_kwargs[param_name] = False + + # Filter to valid parameters + filtered_kwargs = {k: v for k, v in service_kwargs.items() if k in service_init_params} + + service_instance = service_module(service_ctx, **filtered_kwargs) + + # Authenticate with client-provided or server-side auth + cookies, credential, pre_authenticated_session, session_error = get_auth_from_request(data, normalized_service, profile) + + # Check for session expiry + if session_error == "SESSION_EXPIRED": + return web.json_response({ + "status": "error", + "error_code": "SESSION_EXPIRED", + "message": f"Session expired for {normalized_service}. Please re-authenticate." + }, status=401) + + try: + if pre_authenticated_session: + # Use pre-authenticated session sent by client (server is stateless) + deserialize_session(pre_authenticated_session, service_instance.session) + else: + # Authenticate with credentials/cookies + if not cookies and not credential: + # No auth data available - tell client to authenticate + return web.json_response({ + "status": "error", + "error_code": "AUTH_REQUIRED", + "message": f"Authentication required for {normalized_service}. No credentials or session available." + }, status=401) + + service_instance.authenticate(cookies, credential) + except Exception as auth_error: + # Authentication failed - tell client to re-authenticate + log.warning(f"Authentication failed for {normalized_service}: {auth_error}") + return web.json_response({ + "status": "error", + "error_code": "AUTH_REQUIRED", + "message": f"Authentication failed for {normalized_service}. Please authenticate locally." + }, status=401) + + # Get titles + titles = service_instance.get_titles() + + if hasattr(titles, "__iter__") and not isinstance(titles, str): + first_title = list(titles)[0] + else: + first_title = titles + + # Get chapters if service supports it + chapters_data = [] + if hasattr(service_instance, "get_chapters"): + chapters = service_instance.get_chapters(first_title) + if chapters: + for chapter in chapters: + chapters_data.append( + { + "timestamp": chapter.timestamp, + "name": chapter.name if hasattr(chapter, "name") else None, + } + ) + + # Serialize session data + session_data = serialize_session(service_instance.session) + + return web.json_response({"status": "success", "chapters": chapters_data, "session": session_data}) + + except Exception: + log.exception("Error getting remote chapters") + return web.json_response({"status": "error", "message": "Internal server error while getting chapters"}, status=500) + + +async def remote_get_license(request: web.Request) -> web.Response: + """ + Get DRM license from a remote service using client's CDM. + + The server does NOT need a CDM - it just facilitates the license request + using the client's pre-authenticated session. The client decrypts using + their own CDM. + --- + summary: Get DRM license from remote service + description: Request license acquisition using client session (server does not need CDM) + parameters: + - name: service + in: path + required: true + schema: + type: string + requestBody: + required: true + content: + application/json: + schema: + type: object + required: + - title + - track_id + - challenge + properties: + title: + type: string + description: Title identifier + track_id: + type: string + description: Track ID for license + challenge: + type: string + description: Base64-encoded license challenge from client's CDM + session: + type: integer + description: CDM session ID + profile: + type: string + description: Profile to use + pre_authenticated_session: + type: object + description: Client's pre-authenticated session + responses: + '200': + description: License response + content: + application/json: + schema: + type: object + properties: + status: + type: string + example: success + license: + type: string + description: Base64-encoded license response + session: + type: object + description: Updated session data + '400': + description: Invalid request + '401': + description: Authentication required + '500': + description: Server error + """ + service_tag = request.match_info.get("service") + + try: + data = await request.json() + except Exception: + return web.json_response({"status": "error", "message": "Invalid JSON request body"}, status=400) + + # Validate required parameters + title = data.get("title") + track_id = data.get("track_id") + challenge = data.get("challenge") + + if not all([title, track_id, challenge]): + return web.json_response( + { + "status": "error", + "message": "Missing required parameters: title, track_id, challenge" + }, + status=400 + ) + + normalized_service = validate_service(service_tag) + if not normalized_service: + return web.json_response( + {"status": "error", "message": f"Invalid or unavailable service: {service_tag}"}, + status=400 + ) + + try: + profile = data.get("profile") + + service_config_path = Services.get_path(normalized_service) / config.filenames.config + if service_config_path.exists(): + service_config = yaml.safe_load(service_config_path.read_text(encoding="utf8")) + else: + service_config = {} + merge_dict(config.services.get(normalized_service), service_config) + + @click.command() + @click.pass_context + def dummy_service(ctx: click.Context) -> None: + pass + + # Handle proxy configuration + # Client MUST send resolved proxy with credentials (e.g., http://user:pass@host:port) + # Server does NOT resolve proxy providers - client must do that + proxy_param = data.get("proxy") + no_proxy = data.get("no_proxy", False) + + if proxy_param and not no_proxy: + import re + + # Validate that client sent a fully resolved proxy URL + if re.match(r"^https?://", proxy_param): + log.info("Using client-resolved proxy with credentials") + else: + # Reject unresolved proxy parameters + log.error(f"[SECURITY] Client sent unresolved proxy parameter: {proxy_param}") + return web.json_response({ + "status": "error", + "error_code": "INVALID_PROXY", + "message": f"Proxy must be a fully resolved URL (http://... or https://...). " + f"Cannot use proxy provider shortcuts like '{proxy_param}'. " + f"Please resolve the proxy on the client side before sending to server." + }, status=400) + + ctx = click.Context(dummy_service) + ctx.obj = ContextData(config=service_config, cdm=None, proxy_providers=[], profile=profile) + ctx.params = {"proxy": proxy_param, "no_proxy": no_proxy} + + service_module = Services.load(normalized_service) + + dummy_service.name = normalized_service + ctx.invoked_subcommand = normalized_service + + service_ctx = click.Context(dummy_service, parent=ctx) + service_ctx.obj = ctx.obj + + service_kwargs = {"title": title} + + # Add additional parameters + for key, value in data.items(): + if key not in ["title", "track_id", "challenge", "session", "profile", "proxy", "no_proxy", "pre_authenticated_session", "credential", "cookies"]: + service_kwargs[key] = value + + # Get service parameters + service_init_params = inspect.signature(service_module.__init__).parameters + + # Extract defaults + if hasattr(service_module, "cli") and hasattr(service_module.cli, "params"): + for param in service_module.cli.params: + if hasattr(param, "name") and param.name not in service_kwargs: + if hasattr(param, "default") and param.default is not None: + service_kwargs[param.name] = param.default + + # Handle required parameters + for param_name, param_info in service_init_params.items(): + if param_name not in service_kwargs and param_name not in ["self", "ctx"]: + if param_info.default is inspect.Parameter.empty: + if param_name == "meta_lang": + service_kwargs[param_name] = None + elif param_name == "movie": + service_kwargs[param_name] = False + + # Filter to valid parameters + filtered_kwargs = {k: v for k, v in service_kwargs.items() if k in service_init_params} + + service_instance = service_module(service_ctx, **filtered_kwargs) + + # Authenticate with client-provided or server-side auth + cookies, credential, pre_authenticated_session, session_error = get_auth_from_request(data, normalized_service, profile) + + # Check for session expiry + if session_error == "SESSION_EXPIRED": + return web.json_response({ + "status": "error", + "error_code": "SESSION_EXPIRED", + "message": f"Session expired for {normalized_service}. Please re-authenticate." + }, status=401) + + try: + if pre_authenticated_session: + # Use pre-authenticated session sent by client (server is stateless) + deserialize_session(pre_authenticated_session, service_instance.session) + else: + # Authenticate with credentials/cookies + if not cookies and not credential: + # No auth data available - tell client to authenticate + return web.json_response({ + "status": "error", + "error_code": "AUTH_REQUIRED", + "message": f"Authentication required for {normalized_service}. No credentials or session available." + }, status=401) + + service_instance.authenticate(cookies, credential) + except Exception as auth_error: + # Authentication failed - tell client to re-authenticate + log.warning(f"Authentication failed for {normalized_service}: {auth_error}") + return web.json_response({ + "status": "error", + "error_code": "AUTH_REQUIRED", + "message": f"Authentication failed for {normalized_service}. Please authenticate locally." + }, status=401) + + # Get titles to find the correct one + titles = service_instance.get_titles() + if hasattr(titles, "__iter__") and not isinstance(titles, str): + first_title = list(titles)[0] + else: + first_title = titles + + # Get tracks to find license URL + tracks = service_instance.get_tracks(first_title) + + # Find the track with the matching ID + target_track = None + for track in tracks.videos + tracks.audio: + if str(track.id) == str(track_id) or track.id == track_id: + target_track = track + break + + if not target_track: + return web.json_response({ + "status": "error", + "message": f"Track {track_id} not found" + }, status=404) + + # Get license URL and headers from track + if not hasattr(target_track, "drm") or not target_track.drm: + return web.json_response({ + "status": "error", + "message": f"Track {track_id} is not DRM-protected" + }, status=400) + + # Extract license information + license_url = None + license_headers = {} + + # Try to get license URL from DRM info + for drm_info in target_track.drm: + if hasattr(drm_info, "license_url"): + license_url = drm_info.license_url + if hasattr(drm_info, "license_headers"): + license_headers = drm_info.license_headers or {} + break + + if not license_url: + return web.json_response({ + "status": "error", + "message": "No license URL found for track" + }, status=400) + + # Make license request using service session + import base64 + challenge_data = base64.b64decode(challenge) + + license_response = service_instance.session.post( + license_url, + data=challenge_data, + headers=license_headers + ) + + if license_response.status_code != 200: + return web.json_response({ + "status": "error", + "message": f"License request failed: {license_response.status_code}" + }, status=500) + + # Return base64-encoded license + license_b64 = base64.b64encode(license_response.content).decode("utf-8") + + # Serialize session data + session_data = serialize_session(service_instance.session) + + return web.json_response({ + "status": "success", + "license": license_b64, + "session": session_data + }) + + except Exception: + log.exception("Error getting remote license") + return web.json_response({"status": "error", "message": "Internal server error while getting license"}, status=500) + + +async def remote_decrypt(request: web.Request) -> web.Response: + """ + Decrypt DRM content using server's CDM (premium users only). + + This endpoint is for premium API key holders who can use the server's + CDM infrastructure. Regular users must use their own CDM with the + license endpoint. + + --- + summary: Decrypt DRM content using server CDM + description: Use server's CDM to decrypt content (premium tier only) + parameters: + - name: service + in: path + required: true + schema: + type: string + requestBody: + required: true + content: + application/json: + schema: + type: object + required: + - title + - track_id + - pssh + properties: + title: + type: string + description: Title identifier + track_id: + type: string + description: Track ID for decryption + pssh: + type: string + description: Base64-encoded PSSH box + cdm: + type: string + description: Specific CDM to use (optional, uses default if not specified) + license_url: + type: string + description: License server URL (optional, extracted from track if not provided) + profile: + type: string + description: Profile to use + pre_authenticated_session: + type: object + description: Client's pre-authenticated session + responses: + '200': + description: Decryption keys + content: + application/json: + schema: + type: object + properties: + status: + type: string + example: success + keys: + type: array + items: + type: object + properties: + kid: + type: string + key: + type: string + type: + type: string + session: + type: object + description: Updated session data + '400': + description: Invalid request + '401': + description: Authentication required + '403': + description: Not authorized for premium features + '500': + description: Server error + """ + service_tag = request.match_info.get("service") + + # Check if user is premium + api_key = get_api_key_from_request(request) + if not api_key: + return web.json_response({ + "status": "error", + "error_code": "NO_API_KEY", + "message": "API key required" + }, status=401) + + if not is_premium_user(request.app, api_key): + return web.json_response({ + "status": "error", + "error_code": "PREMIUM_REQUIRED", + "message": "This endpoint requires a premium API key. Use /api/remote/{service}/license with your own CDM instead." + }, status=403) + + try: + data = await request.json() + except Exception: + return web.json_response({"status": "error", "message": "Invalid JSON request body"}, status=400) + + # Validate required parameters + title = data.get("title") + track_id = data.get("track_id") + pssh = data.get("pssh") + + if not all([title, track_id, pssh]): + return web.json_response( + { + "status": "error", + "message": "Missing required parameters: title, track_id, pssh" + }, + status=400 + ) + + # Determine which CDM to use + requested_cdm = data.get("cdm") + if not requested_cdm: + # Use default CDM for this API key + requested_cdm = get_default_cdm(request.app, api_key) + + if not requested_cdm: + return web.json_response({ + "status": "error", + "message": "No CDM specified and no default CDM configured for your API key" + }, status=400) + + # Check if user can use this CDM + if not can_use_cdm(request.app, api_key, requested_cdm): + return web.json_response({ + "status": "error", + "error_code": "CDM_NOT_ALLOWED", + "message": f"Your API key is not authorized to use CDM: {requested_cdm}" + }, status=403) + + normalized_service = validate_service(service_tag) + if not normalized_service: + return web.json_response( + {"status": "error", "message": f"Invalid or unavailable service: {service_tag}"}, + status=400 + ) + + try: + from pywidevine.cdm import Cdm as WidevineCdm + from pywidevine.device import Device + + # Load the requested CDM + log.info(f"Premium user using server CDM: {requested_cdm}") + + # Get CDM device path + cdm_device_path = None + if requested_cdm.endswith(".wvd"): + # Direct path to WVD file + cdm_device_path = Path(requested_cdm) + else: + # Look in configured CDM directory + cdm_dir = config.directories.wvds + potential_path = cdm_dir / f"{requested_cdm}.wvd" + if potential_path.exists(): + cdm_device_path = potential_path + + if not cdm_device_path or not cdm_device_path.exists(): + return web.json_response({ + "status": "error", + "message": f"CDM device not found: {requested_cdm}" + }, status=404) + + # Initialize CDM + device = Device.load(cdm_device_path) + cdm = WidevineCdm.from_device(device) + + # Open CDM session + session_id = cdm.open() + + # Parse PSSH + import base64 + pssh_data = base64.b64decode(pssh) + + # Set service certificate if needed (some services require it) + # This would be service-specific + + # Get challenge + challenge = cdm.get_license_challenge(session_id, pssh_data) + + # Get license URL + license_url = data.get("license_url") + + # If no license URL provided, get it from track + if not license_url: + profile = data.get("profile") + + service_config_path = Services.get_path(normalized_service) / config.filenames.config + if service_config_path.exists(): + service_config = yaml.safe_load(service_config_path.read_text(encoding="utf8")) + else: + service_config = {} + merge_dict(config.services.get(normalized_service), service_config) + + @click.command() + @click.pass_context + def dummy_service(ctx: click.Context) -> None: + pass + + # Handle proxy configuration + # Client MUST send resolved proxy with credentials + # Server does NOT resolve proxy providers - client must do that + proxy_param = data.get("proxy") + no_proxy = data.get("no_proxy", False) + + if proxy_param and not no_proxy: + import re + + # Validate that client sent a fully resolved proxy URL + if re.match(r"^https?://", proxy_param): + log.info("Using client-resolved proxy with credentials") + else: + # Reject unresolved proxy parameters + log.error(f"[SECURITY] Client sent unresolved proxy parameter: {proxy_param}") + cdm.close(session_id) + return web.json_response({ + "status": "error", + "error_code": "INVALID_PROXY", + "message": f"Proxy must be a fully resolved URL (http://... or https://...). " + f"Cannot use proxy provider shortcuts like '{proxy_param}'. " + f"Please resolve the proxy on the client side before sending to server." + }, status=400) + + ctx = click.Context(dummy_service) + ctx.obj = ContextData(config=service_config, cdm=None, proxy_providers=[], profile=profile) + ctx.params = {"proxy": proxy_param, "no_proxy": no_proxy} + + service_module = Services.load(normalized_service) + dummy_service.name = normalized_service + ctx.invoked_subcommand = normalized_service + + service_ctx = click.Context(dummy_service, parent=ctx) + service_ctx.obj = ctx.obj + + service_kwargs = {"title": title} + + # Get service parameters + service_init_params = inspect.signature(service_module.__init__).parameters + + # Extract defaults + if hasattr(service_module, "cli") and hasattr(service_module.cli, "params"): + for param in service_module.cli.params: + if hasattr(param, "name") and param.name not in service_kwargs: + if hasattr(param, "default") and param.default is not None: + service_kwargs[param.name] = param.default + + # Handle required parameters + for param_name, param_info in service_init_params.items(): + if param_name not in service_kwargs and param_name not in ["self", "ctx"]: + if param_info.default is inspect.Parameter.empty: + if param_name == "meta_lang": + service_kwargs[param_name] = None + elif param_name == "movie": + service_kwargs[param_name] = False + + # Filter to valid parameters + filtered_kwargs = {k: v for k, v in service_kwargs.items() if k in service_init_params} + + service_instance = service_module(service_ctx, **filtered_kwargs) + + # Authenticate + cookies, credential, pre_authenticated_session, session_error = get_auth_from_request(data, normalized_service, profile) + + if session_error == "SESSION_EXPIRED": + cdm.close(session_id) + return web.json_response({ + "status": "error", + "error_code": "SESSION_EXPIRED", + "message": f"Session expired for {normalized_service}. Please re-authenticate." + }, status=401) + + try: + if pre_authenticated_session: + deserialize_session(pre_authenticated_session, service_instance.session) + else: + if not cookies and not credential: + cdm.close(session_id) + return web.json_response({ + "status": "error", + "error_code": "AUTH_REQUIRED", + "message": f"Authentication required for {normalized_service}." + }, status=401) + service_instance.authenticate(cookies, credential) + except Exception as auth_error: + cdm.close(session_id) + log.warning(f"Authentication failed for {normalized_service}: {auth_error}") + return web.json_response({ + "status": "error", + "error_code": "AUTH_REQUIRED", + "message": f"Authentication failed for {normalized_service}.", + "details": str(auth_error) + }, status=401) + + # Get titles and tracks to find license URL + titles = service_instance.get_titles() + if hasattr(titles, "__iter__") and not isinstance(titles, str): + first_title = list(titles)[0] + else: + first_title = titles + + tracks = service_instance.get_tracks(first_title) + + # Find the track + target_track = None + for track in tracks.videos + tracks.audio: + if str(track.id) == str(track_id) or track.id == track_id: + target_track = track + break + + if not target_track: + cdm.close(session_id) + return web.json_response({ + "status": "error", + "message": f"Track {track_id} not found" + }, status=404) + + if not hasattr(target_track, "drm") or not target_track.drm: + cdm.close(session_id) + return web.json_response({ + "status": "error", + "message": f"Track {track_id} is not DRM-protected" + }, status=400) + + # Extract license URL + license_headers = {} + for drm_info in target_track.drm: + if hasattr(drm_info, "license_url"): + license_url = drm_info.license_url + if hasattr(drm_info, "license_headers"): + license_headers = drm_info.license_headers or {} + break + + if not license_url: + cdm.close(session_id) + return web.json_response({ + "status": "error", + "message": "No license URL found for track" + }, status=400) + + # Make license request + license_response = service_instance.session.post( + license_url, + data=challenge, + headers=license_headers + ) + + if license_response.status_code != 200: + cdm.close(session_id) + return web.json_response({ + "status": "error", + "message": f"License request failed: {license_response.status_code}" + }, status=500) + + # Parse license + cdm.parse_license(session_id, license_response.content) + + # Get keys + keys = [] + for key in cdm.get_keys(session_id): + if key.type == "CONTENT": + keys.append({ + "kid": key.kid.hex(), + "key": key.key.hex(), + "type": key.type + }) + + # Close CDM session + cdm.close(session_id) + + # Serialize session + session_data = serialize_session(service_instance.session) + + return web.json_response({ + "status": "success", + "keys": keys, + "session": session_data, + "cdm_used": requested_cdm + }) + + else: + # License URL provided directly + # Make license request (need to provide session for this) + cdm.close(session_id) + return web.json_response({ + "status": "error", + "message": "Direct license URL not yet supported, omit license_url to auto-detect from service" + }, status=400) + + except Exception: + log.exception("Error in server-side decryption") + return web.json_response({"status": "error", "message": "Internal server error during decryption"}, status=500) diff --git a/unshackle/core/api/routes.py b/unshackle/core/api/routes.py index a5202c5..a458dd6 100644 --- a/unshackle/core/api/routes.py +++ b/unshackle/core/api/routes.py @@ -8,6 +8,9 @@ from unshackle.core import __version__ from unshackle.core.api.errors import APIError, APIErrorCode, build_error_response, handle_api_exception from unshackle.core.api.handlers import (cancel_download_job_handler, download_handler, get_download_job_handler, list_download_jobs_handler, list_titles_handler, list_tracks_handler) +from unshackle.core.api.remote_handlers import (remote_decrypt, remote_get_chapters, remote_get_license, + remote_get_titles, remote_get_tracks, remote_list_services, + remote_search) from unshackle.core.services import Services from unshackle.core.update_checker import UpdateChecker @@ -730,6 +733,15 @@ def setup_routes(app: web.Application) -> None: app.router.add_get("/api/download/jobs/{job_id}", download_job_detail) app.router.add_delete("/api/download/jobs/{job_id}", cancel_download_job) + # Remote service endpoints + app.router.add_get("/api/remote/services", remote_list_services) + app.router.add_post("/api/remote/{service}/search", remote_search) + app.router.add_post("/api/remote/{service}/titles", remote_get_titles) + app.router.add_post("/api/remote/{service}/tracks", remote_get_tracks) + app.router.add_post("/api/remote/{service}/chapters", remote_get_chapters) + app.router.add_post("/api/remote/{service}/license", remote_get_license) + app.router.add_post("/api/remote/{service}/decrypt", remote_decrypt) + def setup_swagger(app: web.Application) -> None: """Setup Swagger UI documentation.""" @@ -754,5 +766,13 @@ def setup_swagger(app: web.Application) -> None: web.get("/api/download/jobs", download_jobs), web.get("/api/download/jobs/{job_id}", download_job_detail), web.delete("/api/download/jobs/{job_id}", cancel_download_job), + # Remote service routes + web.get("/api/remote/services", remote_list_services), + web.post("/api/remote/{service}/search", remote_search), + web.post("/api/remote/{service}/titles", remote_get_titles), + web.post("/api/remote/{service}/tracks", remote_get_tracks), + web.post("/api/remote/{service}/chapters", remote_get_chapters), + web.post("/api/remote/{service}/license", remote_get_license), + web.post("/api/remote/{service}/decrypt", remote_decrypt), ] ) diff --git a/unshackle/core/api/session_serializer.py b/unshackle/core/api/session_serializer.py new file mode 100644 index 0000000..733b179 --- /dev/null +++ b/unshackle/core/api/session_serializer.py @@ -0,0 +1,236 @@ +"""Session serialization helpers for remote services.""" + +from http.cookiejar import CookieJar +from typing import Any, Dict, Optional + +import requests + +from unshackle.core.credential import Credential + + +def serialize_session(session: requests.Session) -> Dict[str, Any]: + """ + Serialize a requests.Session into a JSON-serializable dictionary. + + Extracts cookies, headers, and other session data that can be + transferred to a remote client for downloading. + + Args: + session: The requests.Session to serialize + + Returns: + Dictionary containing serialized session data + """ + session_data = { + "cookies": {}, + "headers": {}, + "proxies": session.proxies.copy() if session.proxies else {}, + } + + # Serialize cookies + if session.cookies: + for cookie in session.cookies: + session_data["cookies"][cookie.name] = { + "value": cookie.value, + "domain": cookie.domain, + "path": cookie.path, + "secure": cookie.secure, + "expires": cookie.expires, + } + + # Serialize headers (exclude proxy-authorization for security) + if session.headers: + for key, value in session.headers.items(): + # Skip proxy-related headers as they're server-specific + if key.lower() not in ["proxy-authorization"]: + session_data["headers"][key] = value + + return session_data + + +def deserialize_session( + session_data: Dict[str, Any], target_session: Optional[requests.Session] = None +) -> requests.Session: + """ + Deserialize session data into a requests.Session. + + Applies cookies, headers, and other session data from a remote server + to a local session for downloading. + + Args: + session_data: Dictionary containing serialized session data + target_session: Optional existing session to update (creates new if None) + + Returns: + requests.Session with applied session data + """ + if target_session is None: + target_session = requests.Session() + + # Apply cookies + if "cookies" in session_data: + for cookie_name, cookie_data in session_data["cookies"].items(): + target_session.cookies.set( + name=cookie_name, + value=cookie_data["value"], + domain=cookie_data.get("domain"), + path=cookie_data.get("path", "/"), + secure=cookie_data.get("secure", False), + expires=cookie_data.get("expires"), + ) + + # Apply headers + if "headers" in session_data: + target_session.headers.update(session_data["headers"]) + + # Note: We don't apply proxies from remote as the local client + # should use its own proxy configuration + + return target_session + + +def extract_session_tokens(session: requests.Session) -> Dict[str, Any]: + """ + Extract authentication tokens and similar data from a session. + + Looks for common authentication patterns like Bearer tokens, + API keys in headers, etc. + + Args: + session: The requests.Session to extract tokens from + + Returns: + Dictionary containing extracted tokens + """ + tokens = {} + + # Check for Authorization header + if "Authorization" in session.headers: + tokens["authorization"] = session.headers["Authorization"] + + # Check for common API key headers + for key in ["X-API-Key", "Api-Key", "X-Auth-Token"]: + if key in session.headers: + tokens[key.lower().replace("-", "_")] = session.headers[key] + + return tokens + + +def apply_session_tokens(tokens: Dict[str, Any], target_session: requests.Session) -> None: + """ + Apply authentication tokens to a session. + + Args: + tokens: Dictionary containing tokens to apply + target_session: Session to apply tokens to + """ + # Apply Authorization header + if "authorization" in tokens: + target_session.headers["Authorization"] = tokens["authorization"] + + # Apply other token headers + token_header_map = { + "x_api_key": "X-API-Key", + "api_key": "Api-Key", + "x_auth_token": "X-Auth-Token", + } + + for token_key, header_name in token_header_map.items(): + if token_key in tokens: + target_session.headers[header_name] = tokens[token_key] + + +def serialize_cookies(cookie_jar: Optional[CookieJar]) -> Dict[str, Any]: + """ + Serialize a CookieJar into a JSON-serializable dictionary. + + Args: + cookie_jar: The CookieJar to serialize + + Returns: + Dictionary containing serialized cookies + """ + if not cookie_jar: + return {} + + cookies = {} + for cookie in cookie_jar: + cookies[cookie.name] = { + "value": cookie.value, + "domain": cookie.domain, + "path": cookie.path, + "secure": cookie.secure, + "expires": cookie.expires, + } + + return cookies + + +def deserialize_cookies(cookies_data: Dict[str, Any]) -> CookieJar: + """ + Deserialize cookies into a CookieJar. + + Args: + cookies_data: Dictionary containing serialized cookies + + Returns: + CookieJar with cookies + """ + import http.cookiejar + + cookie_jar = http.cookiejar.CookieJar() + + for cookie_name, cookie_data in cookies_data.items(): + cookie = http.cookiejar.Cookie( + version=0, + name=cookie_name, + value=cookie_data["value"], + port=None, + port_specified=False, + domain=cookie_data.get("domain", ""), + domain_specified=bool(cookie_data.get("domain")), + domain_initial_dot=cookie_data.get("domain", "").startswith("."), + path=cookie_data.get("path", "/"), + path_specified=True, + secure=cookie_data.get("secure", False), + expires=cookie_data.get("expires"), + discard=False, + comment=None, + comment_url=None, + rest={}, + ) + cookie_jar.set_cookie(cookie) + + return cookie_jar + + +def serialize_credential(credential: Optional[Credential]) -> Optional[Dict[str, str]]: + """ + Serialize a Credential into a JSON-serializable dictionary. + + Args: + credential: The Credential to serialize + + Returns: + Dictionary containing username and password, or None + """ + if not credential: + return None + + return {"username": credential.username, "password": credential.password} + + +def deserialize_credential(credential_data: Optional[Dict[str, str]]) -> Optional[Credential]: + """ + Deserialize credential data into a Credential object. + + Args: + credential_data: Dictionary containing username and password + + Returns: + Credential object or None + """ + if not credential_data: + return None + + return Credential(username=credential_data["username"], password=credential_data["password"]) diff --git a/unshackle/core/binaries.py b/unshackle/core/binaries.py index f846256..1b56061 100644 --- a/unshackle/core/binaries.py +++ b/unshackle/core/binaries.py @@ -52,6 +52,7 @@ Mkvpropedit = find("mkvpropedit") DoviTool = find("dovi_tool") HDR10PlusTool = find("hdr10plus_tool", "HDR10Plus_tool") Mp4decrypt = find("mp4decrypt") +Docker = find("docker") __all__ = ( @@ -71,5 +72,6 @@ __all__ = ( "DoviTool", "HDR10PlusTool", "Mp4decrypt", + "Docker", "find", ) diff --git a/unshackle/core/config.py b/unshackle/core/config.py index 6eb7b26..242a0b0 100644 --- a/unshackle/core/config.py +++ b/unshackle/core/config.py @@ -103,6 +103,8 @@ class Config: self.debug: bool = kwargs.get("debug", False) self.debug_keys: bool = kwargs.get("debug_keys", False) + self.remote_services: list[dict] = kwargs.get("remote_services") or [] + @classmethod def from_yaml(cls, path: Path) -> Config: if not path.exists(): diff --git a/unshackle/core/local_session_cache.py b/unshackle/core/local_session_cache.py new file mode 100644 index 0000000..ae54ade --- /dev/null +++ b/unshackle/core/local_session_cache.py @@ -0,0 +1,274 @@ +"""Local client-side session cache for remote services. + +Sessions are stored ONLY on the client machine, never on the server. +The server is completely stateless and receives session data with each request. +""" + +import json +import logging +import time +from pathlib import Path +from typing import Any, Dict, Optional + +log = logging.getLogger("LocalSessionCache") + + +class LocalSessionCache: + """ + Client-side session cache. + + Stores authenticated sessions locally (similar to cookies/cache). + Server never stores sessions - client sends session with each request. + """ + + def __init__(self, cache_dir: Path): + """ + Initialize local session cache. + + Args: + cache_dir: Directory to store session cache files + """ + self.cache_dir = cache_dir + self.cache_dir.mkdir(parents=True, exist_ok=True) + self.sessions_file = cache_dir / "remote_sessions.json" + + # Load existing sessions + self.sessions: Dict[str, Dict[str, Dict[str, Any]]] = self._load_sessions() + + def _load_sessions(self) -> Dict[str, Dict[str, Dict[str, Any]]]: + """Load sessions from cache file.""" + if not self.sessions_file.exists(): + return {} + + try: + data = json.loads(self.sessions_file.read_text(encoding="utf-8")) + log.debug(f"Loaded {len(data)} remote sessions from cache") + return data + except Exception as e: + log.error(f"Failed to load sessions cache: {e}") + return {} + + def _save_sessions(self) -> None: + """Save sessions to cache file.""" + try: + self.sessions_file.write_text( + json.dumps(self.sessions, indent=2, ensure_ascii=False), + encoding="utf-8" + ) + log.debug(f"Saved {len(self.sessions)} remote sessions to cache") + except Exception as e: + log.error(f"Failed to save sessions cache: {e}") + + def store_session( + self, + remote_url: str, + service_tag: str, + profile: str, + session_data: Dict[str, Any] + ) -> None: + """ + Store an authenticated session locally. + + Args: + remote_url: Remote server URL (as key) + service_tag: Service tag + profile: Profile name + session_data: Authenticated session data + """ + # Create nested structure + if remote_url not in self.sessions: + self.sessions[remote_url] = {} + if service_tag not in self.sessions[remote_url]: + self.sessions[remote_url][service_tag] = {} + + # Store session with metadata + self.sessions[remote_url][service_tag][profile] = { + "session_data": session_data, + "cached_at": time.time(), + "service_tag": service_tag, + "profile": profile, + } + + self._save_sessions() + log.info(f"Cached session for {service_tag} (profile: {profile}, remote: {remote_url})") + + def get_session( + self, + remote_url: str, + service_tag: str, + profile: str + ) -> Optional[Dict[str, Any]]: + """ + Retrieve a cached session. + + Args: + remote_url: Remote server URL + service_tag: Service tag + profile: Profile name + + Returns: + Session data or None if not found/expired + """ + try: + session_entry = self.sessions[remote_url][service_tag][profile] + + # Check if expired (24 hours) + age = time.time() - session_entry["cached_at"] + if age > 86400: # 24 hours + log.info(f"Session expired for {service_tag} (age: {age:.0f}s)") + self.delete_session(remote_url, service_tag, profile) + return None + + log.debug(f"Using cached session for {service_tag} (profile: {profile})") + return session_entry["session_data"] + + except KeyError: + log.debug(f"No cached session for {service_tag} (profile: {profile})") + return None + + def has_session( + self, + remote_url: str, + service_tag: str, + profile: str + ) -> bool: + """ + Check if a valid session exists. + + Args: + remote_url: Remote server URL + service_tag: Service tag + profile: Profile name + + Returns: + True if valid session exists + """ + session = self.get_session(remote_url, service_tag, profile) + return session is not None + + def delete_session( + self, + remote_url: str, + service_tag: str, + profile: str + ) -> bool: + """ + Delete a cached session. + + Args: + remote_url: Remote server URL + service_tag: Service tag + profile: Profile name + + Returns: + True if session was deleted + """ + try: + del self.sessions[remote_url][service_tag][profile] + + # Clean up empty nested dicts + if not self.sessions[remote_url][service_tag]: + del self.sessions[remote_url][service_tag] + if not self.sessions[remote_url]: + del self.sessions[remote_url] + + self._save_sessions() + log.info(f"Deleted cached session for {service_tag} (profile: {profile})") + return True + + except KeyError: + return False + + def list_sessions(self, remote_url: Optional[str] = None) -> list[Dict[str, Any]]: + """ + List all cached sessions. + + Args: + remote_url: Optional filter by remote URL + + Returns: + List of session metadata + """ + sessions = [] + + remotes = [remote_url] if remote_url else self.sessions.keys() + + for remote in remotes: + if remote not in self.sessions: + continue + + for service_tag, profiles in self.sessions[remote].items(): + for profile, entry in profiles.items(): + age = time.time() - entry["cached_at"] + + sessions.append({ + "remote_url": remote, + "service_tag": service_tag, + "profile": profile, + "cached_at": entry["cached_at"], + "age_seconds": int(age), + "expired": age > 86400, + "has_cookies": bool(entry["session_data"].get("cookies")), + "has_headers": bool(entry["session_data"].get("headers")), + }) + + return sessions + + def cleanup_expired(self) -> int: + """ + Remove expired sessions (older than 24 hours). + + Returns: + Number of sessions removed + """ + removed = 0 + current_time = time.time() + + for remote_url in list(self.sessions.keys()): + for service_tag in list(self.sessions[remote_url].keys()): + for profile in list(self.sessions[remote_url][service_tag].keys()): + entry = self.sessions[remote_url][service_tag][profile] + age = current_time - entry["cached_at"] + + if age > 86400: # 24 hours + del self.sessions[remote_url][service_tag][profile] + removed += 1 + log.info(f"Removed expired session for {service_tag} (age: {age:.0f}s)") + + # Clean up empty dicts + if not self.sessions[remote_url][service_tag]: + del self.sessions[remote_url][service_tag] + if not self.sessions[remote_url]: + del self.sessions[remote_url] + + if removed > 0: + self._save_sessions() + + return removed + + +# Global instance +_local_session_cache: Optional[LocalSessionCache] = None + + +def get_local_session_cache() -> LocalSessionCache: + """ + Get the global local session cache instance. + + Returns: + LocalSessionCache instance + """ + global _local_session_cache + + if _local_session_cache is None: + from unshackle.core.config import config + cache_dir = config.directories.cache / "remote_sessions" + _local_session_cache = LocalSessionCache(cache_dir) + + # Clean up expired sessions on init + _local_session_cache.cleanup_expired() + + return _local_session_cache + + +__all__ = ["LocalSessionCache", "get_local_session_cache"] diff --git a/unshackle/core/proxies/__init__.py b/unshackle/core/proxies/__init__.py index ecb97de..4a53298 100644 --- a/unshackle/core/proxies/__init__.py +++ b/unshackle/core/proxies/__init__.py @@ -1,7 +1,8 @@ from .basic import Basic +from .gluetun import Gluetun from .hola import Hola from .nordvpn import NordVPN from .surfsharkvpn import SurfsharkVPN from .windscribevpn import WindscribeVPN -__all__ = ("Basic", "Hola", "NordVPN", "SurfsharkVPN", "WindscribeVPN") +__all__ = ("Basic", "Gluetun", "Hola", "NordVPN", "SurfsharkVPN", "WindscribeVPN") diff --git a/unshackle/core/proxies/gluetun.py b/unshackle/core/proxies/gluetun.py new file mode 100644 index 0000000..83986d6 --- /dev/null +++ b/unshackle/core/proxies/gluetun.py @@ -0,0 +1,1261 @@ +import atexit +import logging +import os +import re +import subprocess +import threading +import time +from typing import Optional + +import requests + +from unshackle.core import binaries +from unshackle.core.proxies.proxy import Proxy +from unshackle.core.utilities import get_country_code, get_country_name, get_debug_logger, get_ip_info + +# Global registry for cleanup on exit +_gluetun_instances: list["Gluetun"] = [] +_cleanup_lock = threading.Lock() +_cleanup_registered = False + + +def _cleanup_all_gluetun_containers(): + """Cleanup all Gluetun containers on exit.""" + # Get instances without holding the lock during cleanup + with _cleanup_lock: + instances = list(_gluetun_instances) + _gluetun_instances.clear() + + # Cleanup each instance (no lock held, so no deadlock possible) + for instance in instances: + try: + instance.cleanup() + except Exception: + pass + + +def _register_cleanup(): + """Register cleanup handlers (only once).""" + global _cleanup_registered + with _cleanup_lock: + if not _cleanup_registered: + # Only use atexit for cleanup - don't override signal handlers + # This allows Ctrl+C to work normally while still cleaning up on exit + atexit.register(_cleanup_all_gluetun_containers) + _cleanup_registered = True + + +class Gluetun(Proxy): + """ + Dynamic Gluetun VPN-to-HTTP Proxy Provider with multi-provider support. + + Automatically manages Docker containers running Gluetun for WireGuard/OpenVPN VPN connections. + Supports multiple VPN providers in a single configuration using query format: provider:region + + Supported VPN providers: windscribe, expressvpn, nordvpn, surfshark, protonvpn, mullvad, + privateinternetaccess, cyberghost, vyprvpn, torguard, and 50+ more. + + Configuration example in unshackle.yaml: + proxy_providers: + gluetun: + providers: + windscribe: + vpn_type: wireguard + credentials: + private_key: YOUR_KEY + addresses: YOUR_ADDRESS + server_countries: + us: US + uk: GB + nordvpn: + vpn_type: wireguard + credentials: + private_key: YOUR_KEY + addresses: YOUR_ADDRESS + server_countries: + us: US + de: DE + # Global settings (optional) + base_port: 8888 + auto_cleanup: true + container_prefix: "unshackle-gluetun" + + Usage: + --proxy gluetun:windscribe:us + --proxy gluetun:nordvpn:de + """ + + # Mapping of common VPN provider names to Gluetun identifiers + PROVIDER_MAPPING = { + "windscribe": "windscribe", + "expressvpn": "expressvpn", + "nordvpn": "nordvpn", + "surfshark": "surfshark", + "protonvpn": "protonvpn", + "mullvad": "mullvad", + "pia": "private internet access", + "privateinternetaccess": "private internet access", + "cyberghost": "cyberghost", + "vyprvpn": "vyprvpn", + "torguard": "torguard", + "ipvanish": "ipvanish", + "purevpn": "purevpn", + } + + def __init__( + self, + providers: Optional[dict] = None, + base_port: int = 8888, + auto_cleanup: bool = True, + container_prefix: str = "unshackle-gluetun", + auth_user: Optional[str] = None, + auth_password: Optional[str] = None, + verify_ip: bool = True, + **kwargs, + ): + """ + Initialize Gluetun proxy provider with multi-provider support. + + Args: + providers: Dict of VPN provider configurations + Format: { + "windscribe": { + "vpn_type": "wireguard", + "credentials": {"private_key": "...", "addresses": "..."}, + "server_countries": {"us": "US", "uk": "GB"} + }, + "nordvpn": {...} + } + base_port: Starting port for HTTP proxies (default: 8888) + auto_cleanup: Automatically remove stopped containers (default: True) + container_prefix: Docker container name prefix (default: "unshackle-gluetun") + auth_user: Optional HTTP proxy authentication username + auth_password: Optional HTTP proxy authentication password + verify_ip: Automatically verify IP and region after connection (default: True) + """ + # Check Docker availability using binaries module + if not binaries.Docker: + raise RuntimeError( + "Docker is not available. Please install Docker to use Gluetun proxy.\n" + "Visit: https://docs.docker.com/engine/install/" + ) + + self.providers = providers or {} + self.base_port = base_port + self.auto_cleanup = auto_cleanup + self.container_prefix = container_prefix + self.auth_user = auth_user + self.auth_password = auth_password + self.verify_ip = verify_ip + + # Track active containers: {query_key: {"container_name": ..., "port": ..., ...}} + self.active_containers = {} + + # Lock for thread-safe port allocation + self._port_lock = threading.Lock() + + # Validate provider configurations + for provider_name, config in self.providers.items(): + self._validate_provider_config(provider_name, config) + + # Register this instance for cleanup on exit + _register_cleanup() + with _cleanup_lock: + _gluetun_instances.append(self) + + # Log initialization + debug_logger = get_debug_logger() + if debug_logger: + debug_logger.log( + level="INFO", + operation="gluetun_init", + message=f"Gluetun proxy provider initialized with {len(self.providers)} provider(s)", + context={ + "providers": list(self.providers.keys()), + "base_port": base_port, + "auto_cleanup": auto_cleanup, + "verify_ip": verify_ip, + "container_prefix": container_prefix, + }, + ) + + def __repr__(self) -> str: + provider_count = len(self.providers) + return f"Gluetun ({provider_count} provider{['s', ''][provider_count == 1]})" + + def get_proxy(self, query: str) -> Optional[str]: + """ + Get an HTTP proxy URI for a Gluetun VPN connection. + + Args: + query: Query format: "provider:region" (e.g., "windscribe:us", "nordvpn:uk") + + Returns: + HTTP proxy URI or None if unavailable + """ + # Parse query + parts = query.split(":") + if len(parts) != 2: + raise ValueError( + f"Invalid query format: '{query}'. Expected 'provider:region' (e.g., 'windscribe:us')" + ) + + provider_name = parts[0].lower() + region = parts[1].lower() + + # Check if provider is configured + if provider_name not in self.providers: + available = ", ".join(self.providers.keys()) + raise ValueError( + f"VPN provider '{provider_name}' not configured. Available providers: {available}" + ) + + # Create query key for tracking + query_key = f"{provider_name}:{region}" + container_name = f"{self.container_prefix}-{provider_name}-{region}" + + debug_logger = get_debug_logger() + + # Check if container already exists (in memory OR in Docker) + # This handles multiple concurrent Unshackle sessions + if query_key in self.active_containers: + container = self.active_containers[query_key] + if self._is_container_running(container["container_name"]): + if debug_logger: + debug_logger.log( + level="DEBUG", + operation="gluetun_container_reuse", + message=f"Reusing existing container (in-memory): {query_key}", + context={ + "query_key": query_key, + "container_name": container["container_name"], + "port": container["port"], + }, + ) + # Re-verify if needed + if self.verify_ip: + self._verify_container(query_key) + return self._build_proxy_uri(container["port"]) + else: + # Not in memory, but might exist in Docker (from another session) + existing_info = self._get_existing_container_info(container_name) + if existing_info: + # Container exists in Docker, reuse it + self.active_containers[query_key] = existing_info + if debug_logger: + debug_logger.log( + level="INFO", + operation="gluetun_container_reuse_docker", + message=f"Reusing existing Docker container: {query_key}", + context={ + "query_key": query_key, + "container_name": container_name, + "port": existing_info["port"], + }, + ) + # Re-verify if needed + if self.verify_ip: + self._verify_container(query_key) + return self._build_proxy_uri(existing_info["port"]) + + # Get provider configuration + provider_config = self.providers[provider_name] + + # Determine server location + server_countries = provider_config.get("server_countries", {}) + server_cities = provider_config.get("server_cities", {}) + server_hostnames = provider_config.get("server_hostnames", {}) + + country = server_countries.get(region) + city = server_cities.get(region) + hostname = server_hostnames.get(region) + + # Check if region is a specific server pattern (e.g., us1239, uk5678) + # Format: 2-letter country code + number + specific_server_match = re.match(r"^([a-z]{2})(\d+)$", region, re.IGNORECASE) + + if specific_server_match and not country and not city and not hostname: + # Specific server requested (e.g., us1239) + country_code = specific_server_match.group(1).upper() + server_num = specific_server_match.group(2) + + # Build hostname based on provider + hostname = self._build_server_hostname(provider_name, country_code, server_num) + country = country_code # Set country for verification + + # If not explicitly mapped and not a specific server, try to use query as country code + elif not country and not city and not hostname: + if re.match(r"^[a-z]{2}$", region): + # Convert country code to full name for Gluetun + country = get_country_name(region) + if not country: + raise ValueError( + f"Country code '{region}' not recognized. " + f"Configure it in server_countries or use a valid ISO 3166-1 alpha-2 code." + ) + else: + raise ValueError( + f"Region '{region}' not recognized for provider '{provider_name}'. " + f"Configure it in server_countries or server_cities, or use a 2-letter country code." + ) + + # Remove any stopped container with the same name + self._remove_stopped_container(container_name) + + # Find available port + port = self._get_available_port() + + # Create container (name already set above) + try: + self._create_container( + container_name=container_name, + port=port, + provider_name=provider_name, + provider_config=provider_config, + country=country, + city=city, + hostname=hostname, + ) + + # Store container info + self.active_containers[query_key] = { + "container_name": container_name, + "port": port, + "provider": provider_name, + "region": region, + "country": country, + "city": city, + "hostname": hostname, + } + + # Wait for container to be ready (60s timeout for VPN connection) + if not self._wait_for_container(container_name, timeout=60): + # Get container logs for better error message + logs = self._get_container_logs(container_name, tail=30) + error_msg = f"Gluetun container '{container_name}' failed to start" + if hasattr(self, '_last_wait_error') and self._last_wait_error: + error_msg += f": {self._last_wait_error}" + if logs: + # Extract last few relevant lines + log_lines = [line for line in logs.strip().split('\n') if line.strip()][-5:] + error_msg += "\nRecent logs:\n" + "\n".join(log_lines) + raise RuntimeError(error_msg) + + # Verify IP and region if enabled + if self.verify_ip: + self._verify_container(query_key) + + return self._build_proxy_uri(port) + + except Exception as e: + # Cleanup on failure + self._remove_container(container_name) + if query_key in self.active_containers: + del self.active_containers[query_key] + raise RuntimeError(f"Failed to create Gluetun container: {e}") + + def cleanup(self): + """Stop and remove all managed Gluetun containers.""" + debug_logger = get_debug_logger() + container_count = len(self.active_containers) + + if container_count > 0 and debug_logger: + debug_logger.log( + level="DEBUG", + operation="gluetun_cleanup_start", + message=f"Cleaning up {container_count} Gluetun container(s)", + context={ + "container_count": container_count, + "containers": list(self.active_containers.keys()), + }, + ) + + for query_key, container_info in list(self.active_containers.items()): + container_name = container_info["container_name"] + self._remove_container(container_name) + + if debug_logger: + debug_logger.log( + level="DEBUG", + operation="gluetun_container_removed", + message=f"Removed Gluetun container: {container_name}", + context={ + "query_key": query_key, + "container_name": container_name, + }, + ) + + self.active_containers.clear() + + if container_count > 0 and debug_logger: + debug_logger.log( + level="INFO", + operation="gluetun_cleanup_complete", + message=f"Cleanup complete: removed {container_count} container(s)", + context={"container_count": container_count}, + success=True, + ) + + def _validate_provider_config(self, provider_name: str, config: dict): + """Validate a provider's configuration.""" + vpn_type = config.get("vpn_type", "wireguard").lower() + credentials = config.get("credentials", {}) + + if vpn_type not in ["wireguard", "openvpn"]: + raise ValueError( + f"Provider '{provider_name}': Invalid vpn_type '{vpn_type}'. Use 'wireguard' or 'openvpn'" + ) + + if vpn_type == "wireguard": + # private_key is always required for WireGuard + if "private_key" not in credentials: + raise ValueError( + f"Provider '{provider_name}': WireGuard requires 'private_key' in credentials" + ) + + # Provider-specific WireGuard requirements based on Gluetun wiki: + # - NordVPN, ProtonVPN: only private_key required + # - Windscribe: private_key, addresses, AND preshared_key required (preshared_key MUST be set) + # - Surfshark, Mullvad, IVPN: private_key AND addresses required + provider_lower = provider_name.lower() + + # Windscribe requires preshared_key (can be empty string, but must be set) + if provider_lower == "windscribe": + if "preshared_key" not in credentials: + raise ValueError( + f"Provider '{provider_name}': Windscribe WireGuard requires 'preshared_key' in credentials " + "(can be empty string, but must be set). Get it from windscribe.com/getconfig/wireguard" + ) + if "addresses" not in credentials: + raise ValueError( + f"Provider '{provider_name}': Windscribe WireGuard requires 'addresses' in credentials. " + "Get it from windscribe.com/getconfig/wireguard" + ) + + # Providers that require addresses (but not preshared_key) + elif provider_lower in ["surfshark", "mullvad", "ivpn"]: + if "addresses" not in credentials: + raise ValueError( + f"Provider '{provider_name}': WireGuard requires 'addresses' in credentials" + ) + + elif vpn_type == "openvpn": + if "username" not in credentials or "password" not in credentials: + raise ValueError( + f"Provider '{provider_name}': OpenVPN requires 'username' and 'password' in credentials" + ) + + def _get_available_port(self) -> int: + """Find an available port starting from base_port (thread-safe).""" + with self._port_lock: + used_ports = {info["port"] for info in self.active_containers.values()} + port = self.base_port + while port in used_ports or self._is_port_in_use(port): + port += 1 + return port + + def _is_port_in_use(self, port: int) -> bool: + """Check if a port is in use on the system or by any Docker container.""" + import socket + + # First check if the port is available on the system + try: + with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s: + s.bind(("127.0.0.1", port)) + except OSError: + # Port is in use by something on the system + return True + + # Also check Docker containers (in case of port forwarding) + try: + result = subprocess.run( + ["docker", "ps", "--format", "{{.Ports}}"], + capture_output=True, + text=True, + timeout=5, + ) + if result.returncode == 0: + return f":{port}->" in result.stdout or f"0.0.0.0:{port}" in result.stdout + return False + except (subprocess.TimeoutExpired, FileNotFoundError): + return False + + def _build_server_hostname(self, provider_name: str, country_code: str, server_num: str) -> str: + """ + Build a server hostname for specific server selection. + + Args: + provider_name: VPN provider name (e.g., "nordvpn") + country_code: 2-letter country code (e.g., "US") + server_num: Server number (e.g., "1239") + + Returns: + Server hostname (e.g., "us1239.nordvpn.com") + """ + # Convert to lowercase for hostname + country_lower = country_code.lower() + + # Provider-specific hostname formats + hostname_formats = { + "nordvpn": f"{country_lower}{server_num}.nordvpn.com", + "surfshark": f"{country_lower}-{server_num}.prod.surfshark.com", + "expressvpn": f"{country_lower}-{server_num}.expressvpn.com", + "cyberghost": f"{country_lower}-s{server_num}.cg-dialup.net", + # Generic fallback for other providers + } + + # Get provider-specific format or use generic + if provider_name in hostname_formats: + return hostname_formats[provider_name] + else: + # Generic format: country_code + server_num + return f"{country_lower}{server_num}" + + def _ensure_image_available(self, image: str = "qmcgaw/gluetun:latest") -> bool: + """ + Ensure the Gluetun Docker image is available locally. + + If the image is not present, it will be pulled. This prevents + the container creation from timing out during the first run. + + Args: + image: Docker image name with tag + + Returns: + True if image is available, False otherwise + """ + log = logging.getLogger("Gluetun") + + # Check if image exists locally + try: + result = subprocess.run( + ["docker", "image", "inspect", image], + capture_output=True, + text=True, + timeout=10, + encoding="utf-8", + errors="replace", + ) + if result.returncode == 0: + return True + log.debug(f"Image inspect failed: {result.stderr}") + except subprocess.TimeoutExpired: + log.warning("Docker image inspect timed out") + except FileNotFoundError: + log.error("Docker command not found - is Docker installed and in PATH?") + return False + + # Image not found, pull it + log.info(f"Pulling Docker image {image}...") + try: + result = subprocess.run( + ["docker", "pull", image], + capture_output=True, + text=True, + timeout=300, # 5 minutes for pull + encoding="utf-8", + errors="replace", + ) + if result.returncode == 0: + return True + log.error(f"Docker pull failed: {result.stderr}") + return False + except subprocess.TimeoutExpired: + raise RuntimeError(f"Timed out pulling Docker image '{image}'") + + def _create_container( + self, + container_name: str, + port: int, + provider_name: str, + provider_config: dict, + country: Optional[str] = None, + city: Optional[str] = None, + hostname: Optional[str] = None, + ): + """Create and start a Gluetun Docker container.""" + debug_logger = get_debug_logger() + start_time = time.time() + + if debug_logger: + debug_logger.log( + level="DEBUG", + operation="gluetun_container_create_start", + message=f"Creating Gluetun container: {container_name}", + context={ + "container_name": container_name, + "port": port, + "provider": provider_name, + "country": country, + "city": city, + "hostname": hostname, + }, + ) + + # Ensure the Gluetun image is available (pulls if needed) + gluetun_image = "qmcgaw/gluetun:latest" + if not self._ensure_image_available(gluetun_image): + if debug_logger: + debug_logger.log( + level="ERROR", + operation="gluetun_image_pull_failed", + message=f"Failed to pull Docker image: {gluetun_image}", + success=False, + ) + raise RuntimeError(f"Failed to ensure Gluetun Docker image '{gluetun_image}' is available") + + vpn_type = provider_config.get("vpn_type", "wireguard").lower() + credentials = provider_config.get("credentials", {}) + extra_env = provider_config.get("extra_env", {}) + + # Normalize provider name + gluetun_provider = self.PROVIDER_MAPPING.get(provider_name.lower(), provider_name.lower()) + + # Build environment variables + env_vars = { + "VPN_SERVICE_PROVIDER": gluetun_provider, + "VPN_TYPE": vpn_type, + "HTTPPROXY": "on", + "HTTPPROXY_LISTENING_ADDRESS": ":8888", + "HTTPPROXY_LOG": "on", + "TZ": os.environ.get("TZ", "UTC"), + "LOG_LEVEL": "info", + } + + # Add credentials + if vpn_type == "wireguard": + env_vars["WIREGUARD_PRIVATE_KEY"] = credentials["private_key"] + # addresses is optional - not needed for some providers like NordVPN + if "addresses" in credentials: + env_vars["WIREGUARD_ADDRESSES"] = credentials["addresses"] + # preshared_key is required for Windscribe, optional for others + if "preshared_key" in credentials: + env_vars["WIREGUARD_PRESHARED_KEY"] = credentials["preshared_key"] + elif vpn_type == "openvpn": + env_vars["OPENVPN_USER"] = credentials.get("username", "") + env_vars["OPENVPN_PASSWORD"] = credentials.get("password", "") + + # Add server location + # Priority: hostname > country + city > country only + # Note: Different providers support different server selection variables + # - Most providers: SERVER_COUNTRIES, SERVER_CITIES + # - Windscribe, VyprVPN, VPN Secure: SERVER_REGIONS, SERVER_CITIES (no SERVER_COUNTRIES) + if hostname: + # Specific server hostname requested (e.g., us1239.nordvpn.com) + env_vars["SERVER_HOSTNAMES"] = hostname + else: + # Providers that use SERVER_REGIONS instead of SERVER_COUNTRIES + region_only_providers = {"windscribe", "vyprvpn", "vpn secure"} + uses_regions = gluetun_provider in region_only_providers + + # Use country/city selection + if country: + if uses_regions: + env_vars["SERVER_REGIONS"] = country + else: + env_vars["SERVER_COUNTRIES"] = country + if city: + env_vars["SERVER_CITIES"] = city + + # Add authentication if configured + if self.auth_user: + env_vars["HTTPPROXY_USER"] = self.auth_user + if self.auth_password: + env_vars["HTTPPROXY_PASSWORD"] = self.auth_password + + # Merge extra environment variables + env_vars.update(extra_env) + + # Build docker run command + cmd = [ + "docker", + "run", + "-d", + "--name", + container_name, + "--cap-add=NET_ADMIN", + "--device=/dev/net/tun", + "-p", + f"127.0.0.1:{port}:8888/tcp", + ] + + # Add environment variables + for key, value in env_vars.items(): + cmd.extend(["-e", f"{key}={value}"]) + + # Add Gluetun image + cmd.append("qmcgaw/gluetun:latest") + + # Execute docker run + try: + result = subprocess.run( + cmd, + capture_output=True, + text=True, + timeout=30, + encoding="utf-8", + errors="replace", + ) + + if result.returncode != 0: + error_msg = result.stderr or "unknown error" + if debug_logger: + debug_logger.log( + level="ERROR", + operation="gluetun_container_create_failed", + message=f"Docker run failed for {container_name}", + context={ + "container_name": container_name, + "return_code": result.returncode, + "stderr": error_msg, + }, + success=False, + duration_ms=(time.time() - start_time) * 1000, + ) + raise RuntimeError(f"Docker run failed: {error_msg}") + + # Log successful container creation + if debug_logger: + duration_ms = (time.time() - start_time) * 1000 + debug_logger.log( + level="INFO", + operation="gluetun_container_created", + message=f"Gluetun container created: {container_name}", + context={ + "container_name": container_name, + "port": port, + "provider": provider_name, + "vpn_type": vpn_type, + "country": country, + "city": city, + "hostname": hostname, + "container_id": result.stdout.strip()[:12] if result.stdout else None, + }, + success=True, + duration_ms=duration_ms, + ) + + except subprocess.TimeoutExpired: + if debug_logger: + debug_logger.log( + level="ERROR", + operation="gluetun_container_create_timeout", + message=f"Docker run timed out for {container_name}", + context={"container_name": container_name}, + success=False, + duration_ms=(time.time() - start_time) * 1000, + ) + raise RuntimeError("Docker run command timed out") + + def _is_container_running(self, container_name: str) -> bool: + """Check if a Docker container is running.""" + try: + result = subprocess.run( + ["docker", "ps", "--filter", f"name={container_name}", "--format", "{{.Names}}"], + capture_output=True, + text=True, + timeout=5, + ) + return result.returncode == 0 and container_name in result.stdout + except (subprocess.TimeoutExpired, FileNotFoundError): + return False + + def _get_existing_container_info(self, container_name: str) -> Optional[dict]: + """ + Check if a container exists in Docker and get its info. + + This handles multiple Unshackle sessions - if another session already + created the container, we'll reuse it instead of trying to create a duplicate. + + Args: + container_name: Name of the container to check + + Returns: + Dict with container info if exists and running, None otherwise + """ + try: + # Check if container is running + if not self._is_container_running(container_name): + return None + + # Get container port mapping + # Format: "127.0.0.1:8888->8888/tcp" + result = subprocess.run( + ["docker", "inspect", container_name, "--format", "{{.NetworkSettings.Ports}}"], + capture_output=True, + text=True, + timeout=5, + ) + + if result.returncode != 0: + return None + + # Parse port from output like "map[8888/tcp:[{127.0.0.1 8888}]]" + port_match = re.search(r'127\.0\.0\.1\s+(\d+)', result.stdout) + if not port_match: + return None + + port = int(port_match.group(1)) + + # Extract provider and region from container name + # Format: unshackle-gluetun-provider-region + name_pattern = f"{self.container_prefix}-(.+)-([^-]+)$" + name_match = re.match(name_pattern, container_name) + if not name_match: + return None + + provider_name = name_match.group(1) + region = name_match.group(2) + + # Get expected country and hostname from config (if available) + country = None + hostname = None + + # Check if region is a specific server (e.g., us1239) + specific_server_match = re.match(r"^([a-z]{2})(\d+)$", region, re.IGNORECASE) + if specific_server_match: + country_code = specific_server_match.group(1).upper() + server_num = specific_server_match.group(2) + hostname = self._build_server_hostname(provider_name, country_code, server_num) + country = country_code + + # Otherwise check config + elif provider_name in self.providers: + provider_config = self.providers[provider_name] + server_countries = provider_config.get("server_countries", {}) + country = server_countries.get(region) + + if not country and re.match(r"^[a-z]{2}$", region): + country = region.upper() + + return { + "container_name": container_name, + "port": port, + "provider": provider_name, + "region": region, + "country": country, + "city": None, + "hostname": hostname, + } + + except (subprocess.TimeoutExpired, FileNotFoundError, ValueError): + return None + + def _wait_for_container(self, container_name: str, timeout: int = 60) -> bool: + """ + Wait for Gluetun container to be ready by checking logs for proxy readiness. + + Gluetun logs "http proxy listening" when the HTTP proxy is ready to accept connections. + + Args: + container_name: Name of the container to wait for + timeout: Maximum time to wait in seconds (default: 60) + + Returns: + True if container is ready, False if it failed or timed out + """ + log = logging.getLogger("Gluetun") + debug_logger = get_debug_logger() + start_time = time.time() + last_error = None + last_status = None + + if debug_logger: + debug_logger.log( + level="DEBUG", + operation="gluetun_container_wait_start", + message=f"Waiting for container to be ready: {container_name}", + context={"container_name": container_name, "timeout": timeout}, + ) + + while time.time() - start_time < timeout: + try: + # First check if container is still running + if not self._is_container_running(container_name): + # Container may have exited - check if it crashed + exit_info = self._get_container_exit_info(container_name) + if exit_info: + last_error = f"Container exited with code {exit_info.get('exit_code', 'unknown')}" + time.sleep(1) + continue + + # Check logs for readiness indicators + result = subprocess.run( + ["docker", "logs", container_name, "--tail", "100"], + capture_output=True, + text=True, + timeout=5, + encoding="utf-8", + errors="replace", + ) + + if result.returncode == 0: + # Combine stdout and stderr for checking (handle None values) + stdout = result.stdout or "" + stderr = result.stderr or "" + all_logs = (stdout + stderr).lower() + + # Gluetun needs both proxy listening AND VPN connected + # The proxy starts before VPN is ready, so we need to wait for VPN + proxy_ready = "[http proxy] listening" in all_logs + vpn_ready = "initialization sequence completed" in all_logs + + # Log status changes to help debug slow connections + current_status = None + if vpn_ready: + current_status = "VPN connected" + elif "peer connection initiated" in all_logs: + current_status = "VPN connecting..." + elif "[openvpn]" in all_logs or "[wireguard]" in all_logs: + current_status = "Starting VPN..." + elif "[firewall]" in all_logs: + current_status = "Configuring firewall..." + + if current_status and current_status != last_status: + log.info(current_status) + last_status = current_status + + if proxy_ready and vpn_ready: + # Give a brief moment for the proxy to fully initialize + time.sleep(1) + duration_ms = (time.time() - start_time) * 1000 + if debug_logger: + debug_logger.log( + level="INFO", + operation="gluetun_container_ready", + message=f"Gluetun container is ready: {container_name}", + context={ + "container_name": container_name, + "proxy_ready": proxy_ready, + "vpn_ready": vpn_ready, + }, + success=True, + duration_ms=duration_ms, + ) + return True + + # Check for fatal errors that indicate VPN connection failure + error_indicators = [ + "fatal", + "cannot connect", + "authentication failed", + "invalid credentials", + "connection refused", + "no valid servers", + ] + + for error in error_indicators: + if error in all_logs: + # Extract the error line for better messaging + for line in (stdout + stderr).split('\n'): + if error in line.lower(): + last_error = line.strip() + break + # Fatal errors mean we should stop waiting + if "fatal" in all_logs or "invalid credentials" in all_logs: + return False + + except subprocess.TimeoutExpired: + pass + + time.sleep(2) + + # Store the last error for potential logging + if last_error: + self._last_wait_error = last_error + + # Log timeout/failure + duration_ms = (time.time() - start_time) * 1000 + if debug_logger: + debug_logger.log( + level="ERROR", + operation="gluetun_container_wait_timeout", + message=f"Gluetun container failed to become ready: {container_name}", + context={ + "container_name": container_name, + "timeout": timeout, + "last_error": last_error, + "last_status": last_status, + }, + success=False, + duration_ms=duration_ms, + ) + return False + + def _get_container_exit_info(self, container_name: str) -> Optional[dict]: + """Get exit information for a stopped container.""" + try: + result = subprocess.run( + [ + "docker", "inspect", container_name, + "--format", "{{.State.ExitCode}}:{{.State.Error}}" + ], + capture_output=True, + text=True, + timeout=5, + ) + if result.returncode == 0: + parts = result.stdout.strip().split(":", 1) + return { + "exit_code": int(parts[0]) if parts[0].isdigit() else -1, + "error": parts[1] if len(parts) > 1 else "" + } + return None + except (subprocess.TimeoutExpired, FileNotFoundError, ValueError): + return None + + def _get_container_logs(self, container_name: str, tail: int = 50) -> str: + """Get recent logs from a container for error reporting.""" + try: + result = subprocess.run( + ["docker", "logs", container_name, "--tail", str(tail)], + capture_output=True, + text=True, + timeout=10, + encoding="utf-8", + errors="replace", + ) + return (result.stdout or "") + (result.stderr or "") + except (subprocess.TimeoutExpired, FileNotFoundError): + return "" + + def _verify_container(self, query_key: str, max_retries: int = 3): + """ + Verify container's VPN IP and region using ipinfo.io lookup. + + Uses the shared get_ip_info function with a session configured to use + the Gluetun proxy. Retries with exponential backoff if the network + isn't ready immediately after the VPN connects. + + Args: + query_key: The container query key (provider:region) + max_retries: Maximum number of retry attempts (default: 3) + + Raises: + RuntimeError: If verification fails after all retries + """ + debug_logger = get_debug_logger() + start_time = time.time() + + if query_key not in self.active_containers: + return + + container = self.active_containers[query_key] + proxy_url = self._build_proxy_uri(container["port"]) + expected_country = container.get("country", "").upper() + + if debug_logger: + debug_logger.log( + level="DEBUG", + operation="gluetun_verify_start", + message=f"Verifying VPN IP for: {query_key}", + context={ + "query_key": query_key, + "container_name": container.get("container_name"), + "expected_country": expected_country, + "max_retries": max_retries, + }, + ) + + last_error = None + + # Create a session with the proxy configured + session = requests.Session() + session.proxies = {"http": proxy_url, "https": proxy_url} + + # Retry with exponential backoff + for attempt in range(max_retries): + try: + # Get external IP through the proxy using shared utility + ip_info = get_ip_info(session) + + if ip_info: + actual_country = ip_info.get("country", "").upper() + + # Check if country matches (if we have an expected country) + # ipinfo.io returns country codes (CA), but we may have full names (Canada) + # Normalize both to country codes for comparison using shared utility + if expected_country: + # Convert expected country name to code if it's a full name + expected_code = get_country_code(expected_country) or expected_country + expected_code = expected_code.upper() + + if actual_country != expected_code: + duration_ms = (time.time() - start_time) * 1000 + if debug_logger: + debug_logger.log( + level="ERROR", + operation="gluetun_verify_mismatch", + message=f"Region mismatch for {query_key}", + context={ + "query_key": query_key, + "expected_country": expected_code, + "actual_country": actual_country, + "ip": ip_info.get("ip"), + "city": ip_info.get("city"), + "org": ip_info.get("org"), + }, + success=False, + duration_ms=duration_ms, + ) + raise RuntimeError( + f"Region mismatch for {container['provider']}:{container['region']}: " + f"Expected '{expected_code}' but got '{actual_country}' " + f"(IP: {ip_info.get('ip')}, City: {ip_info.get('city')})" + ) + + # Verification successful + duration_ms = (time.time() - start_time) * 1000 + if debug_logger: + debug_logger.log( + level="INFO", + operation="gluetun_verify_success", + message=f"VPN IP verified for: {query_key}", + context={ + "query_key": query_key, + "ip": ip_info.get("ip"), + "country": actual_country, + "city": ip_info.get("city"), + "org": ip_info.get("org"), + "attempts": attempt + 1, + }, + success=True, + duration_ms=duration_ms, + ) + return + + # ip_info was None, retry + last_error = "Failed to get IP info from ipinfo.io" + + except RuntimeError: + raise # Re-raise region mismatch errors immediately + except Exception as e: + last_error = str(e) + if debug_logger: + debug_logger.log( + level="DEBUG", + operation="gluetun_verify_retry", + message=f"Verification attempt {attempt + 1} failed, retrying", + context={ + "query_key": query_key, + "attempt": attempt + 1, + "error": last_error, + }, + ) + + # Wait before retry (exponential backoff) + if attempt < max_retries - 1: + wait_time = 2 ** attempt # 1, 2, 4 seconds + time.sleep(wait_time) + + # All retries exhausted + duration_ms = (time.time() - start_time) * 1000 + if debug_logger: + debug_logger.log( + level="ERROR", + operation="gluetun_verify_failed", + message=f"VPN verification failed after {max_retries} attempts", + context={ + "query_key": query_key, + "max_retries": max_retries, + "last_error": last_error, + }, + success=False, + duration_ms=duration_ms, + ) + raise RuntimeError( + f"Failed to verify VPN IP for {container['provider']}:{container['region']} " + f"after {max_retries} attempts. Last error: {last_error}" + ) + + def _remove_stopped_container(self, container_name: str) -> bool: + """ + Remove a stopped container with the given name if it exists. + + This prevents "container name already in use" errors when a previous + container wasn't properly cleaned up. + + Args: + container_name: Name of the container to check and remove + + Returns: + True if a container was removed, False otherwise + """ + try: + # Check if container exists (running or stopped) + result = subprocess.run( + ["docker", "ps", "-a", "--filter", f"name=^{container_name}$", "--format", "{{.Names}}:{{.Status}}"], + capture_output=True, + text=True, + timeout=5, + ) + + if result.returncode != 0 or not result.stdout.strip(): + return False + + # Parse status - format is "name:Up 2 hours" or "name:Exited (0) 2 hours ago" + output = result.stdout.strip() + if container_name not in output: + return False + + # Check if container is stopped (not running) + if "Exited" in output or "Created" in output or "Dead" in output: + # Container exists but is stopped - remove it + subprocess.run( + ["docker", "rm", "-f", container_name], + capture_output=True, + text=True, + timeout=10, + ) + return True + + return False + + except (subprocess.TimeoutExpired, FileNotFoundError): + return False + + def _remove_container(self, container_name: str): + """Stop and remove a Docker container.""" + try: + if self.auto_cleanup: + # Use docker rm -f to force remove (stops and removes in one command) + subprocess.run( + ["docker", "rm", "-f", container_name], + capture_output=True, + text=True, + timeout=10, + ) + else: + # Just stop the container + subprocess.run( + ["docker", "stop", container_name], + capture_output=True, + text=True, + timeout=10, + ) + except subprocess.TimeoutExpired: + # Force kill if timeout + try: + subprocess.run( + ["docker", "rm", "-f", container_name], + capture_output=True, + text=True, + timeout=5, + ) + except subprocess.TimeoutExpired: + pass + + def _build_proxy_uri(self, port: int) -> str: + """Build HTTP proxy URI.""" + if self.auth_user and self.auth_password: + return f"http://{self.auth_user}:{self.auth_password}@localhost:{port}" + return f"http://localhost:{port}" + + def __del__(self): + """Cleanup containers on object destruction.""" + if hasattr(self, 'auto_cleanup') and self.auto_cleanup: + try: + if hasattr(self, 'active_containers') and self.active_containers: + self.cleanup() + except Exception: + pass diff --git a/unshackle/core/proxies/nordvpn.py b/unshackle/core/proxies/nordvpn.py index a50891d..33418ff 100644 --- a/unshackle/core/proxies/nordvpn.py +++ b/unshackle/core/proxies/nordvpn.py @@ -1,4 +1,5 @@ import json +import random import re from typing import Optional @@ -46,8 +47,21 @@ class NordVPN(Proxy): HTTP proxies under port 80 were disabled on the 15th of Feb, 2021: https://nordvpn.com/blog/removing-http-proxies + + Supports: + - Country code: "us", "ca", "gb" + - Country ID: "228" + - Specific server: "us1234" + - City selection: "us:seattle", "ca:calgary" """ query = query.lower() + city = None + + # Check if query includes city specification (e.g., "ca:calgary") + if ":" in query: + query, city = query.split(":", maxsplit=1) + city = city.strip() + if re.match(r"^[a-z]{2}\d+$", query): # country and nordvpn server id, e.g., us1, fr1234 hostname = f"{query}.nordvpn.com" @@ -64,7 +78,12 @@ class NordVPN(Proxy): # NordVPN doesnt have servers in this region return - server_mapping = self.server_map.get(country["code"].lower()) + # Check server_map for pinned servers (can include city) + server_map_key = f"{country['code'].lower()}:{city}" if city else country["code"].lower() + server_mapping = self.server_map.get(server_map_key) or ( + self.server_map.get(country["code"].lower()) if not city else None + ) + if server_mapping: # country was set to a specific server ID in config hostname = f"{country['code'].lower()}{server_mapping}.nordvpn.com" @@ -76,7 +95,19 @@ class NordVPN(Proxy): f"The NordVPN Country {query} currently has no recommended servers. " "Try again later. If the issue persists, double-check the query." ) - hostname = recommended_servers[0]["hostname"] + + # Filter by city if specified + if city: + city_servers = self.filter_servers_by_city(recommended_servers, city) + if not city_servers: + raise ValueError( + f"No servers found in city '{city}' for country '{country['name']}'. " + "Try a different city or check the city name spelling." + ) + recommended_servers = city_servers + + # Pick a random server from the filtered list + hostname = random.choice(recommended_servers)["hostname"] if hostname.startswith("gb"): # NordVPN uses the alpha2 of 'GB' in API responses, but 'UK' in the hostname @@ -95,6 +126,41 @@ class NordVPN(Proxy): ): return country + @staticmethod + def filter_servers_by_city(servers: list[dict], city: str) -> list[dict]: + """ + Filter servers by city name. + + The API returns servers with location data that includes city information. + This method filters servers to only those in the specified city. + + Args: + servers: List of server dictionaries from the NordVPN API + city: City name to filter by (case-insensitive) + + Returns: + List of servers in the specified city + """ + city_lower = city.lower() + filtered = [] + + for server in servers: + # Each server has a 'locations' list with location data + locations = server.get("locations", []) + for location in locations: + # City data can be in different formats: + # - {"city": {"name": "Seattle", ...}} + # - {"city": "Seattle"} + city_data = location.get("city") + if city_data: + # Handle both dict and string formats + city_name = city_data.get("name") if isinstance(city_data, dict) else city_data + if city_name and city_name.lower() == city_lower: + filtered.append(server) + break # Found a match, no need to check other locations for this server + + return filtered + @staticmethod def get_recommended_servers(country_id: int) -> list[dict]: """ diff --git a/unshackle/core/proxies/surfsharkvpn.py b/unshackle/core/proxies/surfsharkvpn.py index 32bf518..491906d 100644 --- a/unshackle/core/proxies/surfsharkvpn.py +++ b/unshackle/core/proxies/surfsharkvpn.py @@ -44,8 +44,21 @@ class SurfsharkVPN(Proxy): def get_proxy(self, query: str) -> Optional[str]: """ Get an HTTP(SSL) proxy URI for a SurfsharkVPN server. + + Supports: + - Country code: "us", "ca", "gb" + - Country ID: "228" + - Specific server: "us-bos" (Boston) + - City selection: "us:seattle", "ca:toronto" """ query = query.lower() + city = None + + # Check if query includes city specification (e.g., "us:seattle") + if ":" in query: + query, city = query.split(":", maxsplit=1) + city = city.strip() + if re.match(r"^[a-z]{2}\d+$", query): # country and surfsharkvpn server id, e.g., au-per, be-anr, us-bos hostname = f"{query}.prod.surfshark.com" @@ -62,13 +75,18 @@ class SurfsharkVPN(Proxy): # SurfsharkVPN doesnt have servers in this region return - server_mapping = self.server_map.get(country["countryCode"].lower()) + # Check server_map for pinned servers (can include city) + server_map_key = f"{country['countryCode'].lower()}:{city}" if city else country["countryCode"].lower() + server_mapping = self.server_map.get(server_map_key) or ( + self.server_map.get(country["countryCode"].lower()) if not city else None + ) + if server_mapping: # country was set to a specific server ID in config hostname = f"{country['code'].lower()}{server_mapping}.prod.surfshark.com" else: # get the random server ID - random_server = self.get_random_server(country["countryCode"]) + random_server = self.get_random_server(country["countryCode"], city) if not random_server: raise ValueError( f"The SurfsharkVPN Country {query} currently has no random servers. " @@ -92,18 +110,44 @@ class SurfsharkVPN(Proxy): ): return country - def get_random_server(self, country_id: str): + def get_random_server(self, country_id: str, city: Optional[str] = None): """ - Get the list of random Server for a Country. + Get a random server for a Country, optionally filtered by city. - Note: There may not always be more than one recommended server. + Args: + country_id: The country code (e.g., "US", "CA") + city: Optional city name to filter by (case-insensitive) + + Note: The API may include a 'location' field with city information. + If not available, this will return any server from the country. """ - country = [x["connectionName"] for x in self.countries if x["countryCode"].lower() == country_id.lower()] + servers = [x for x in self.countries if x["countryCode"].lower() == country_id.lower()] + + # Filter by city if specified + if city: + city_lower = city.lower() + # Check if servers have a 'location' field for city filtering + city_servers = [ + x + for x in servers + if x.get("location", "").lower() == city_lower or x.get("city", "").lower() == city_lower + ] + + if city_servers: + servers = city_servers + else: + raise ValueError( + f"No servers found in city '{city}' for country '{country_id}'. " + "Try a different city or check the city name spelling." + ) + + # Get connection names from filtered servers + connection_names = [x["connectionName"] for x in servers] + try: - country = random.choice(country) - return country - except Exception: - raise ValueError("Could not get random countrycode from the countries list.") + return random.choice(connection_names) + except (IndexError, KeyError): + raise ValueError(f"Could not get random server for country '{country_id}'.") @staticmethod def get_countries() -> list[dict]: diff --git a/unshackle/core/remote_auth.py b/unshackle/core/remote_auth.py new file mode 100644 index 0000000..3b2a947 --- /dev/null +++ b/unshackle/core/remote_auth.py @@ -0,0 +1,279 @@ +"""Client-side authentication for remote services. + +This module handles authenticating services locally on the client side, +then sending the authenticated session to the remote server. + +This approach allows: +- Interactive browser-based logins +- 2FA/CAPTCHA handling +- OAuth flows +- Any authentication that requires user interaction + +The server NEVER sees credentials - only authenticated sessions. +""" + +import logging +from typing import Any, Dict, Optional + +import click +import requests +import yaml + +from unshackle.core.api.session_serializer import serialize_session +from unshackle.core.config import config +from unshackle.core.console import console +from unshackle.core.credential import Credential +from unshackle.core.local_session_cache import get_local_session_cache +from unshackle.core.services import Services +from unshackle.core.utils.click_types import ContextData +from unshackle.core.utils.collections import merge_dict + +log = logging.getLogger("RemoteAuth") + + +class RemoteAuthenticator: + """ + Handles client-side authentication for remote services. + + Workflow: + 1. Load service locally + 2. Authenticate using local credentials/cookies (can show browser, handle 2FA) + 3. Extract authenticated session + 4. Upload session to remote server + 5. Server uses the pre-authenticated session + """ + + def __init__(self, remote_url: str, api_key: str): + """ + Initialize remote authenticator. + + Args: + remote_url: Base URL of remote server + api_key: API key for remote server + """ + self.remote_url = remote_url.rstrip("/") + self.api_key = api_key + self.session = requests.Session() + self.session.headers.update({"X-API-Key": self.api_key, "Content-Type": "application/json"}) + + def authenticate_service_locally( + self, service_tag: str, profile: Optional[str] = None, force_reauth: bool = False + ) -> Dict[str, Any]: + """ + Authenticate a service locally and extract the session. + + This runs the service authentication on the CLIENT side where browsers, + 2FA, and interactive prompts can work. + + Args: + service_tag: Service to authenticate (e.g., "DSNP", "NF") + profile: Optional profile to use for credentials + force_reauth: Force re-authentication even if session exists + + Returns: + Serialized session data + + Raises: + ValueError: If service not found or authentication fails + """ + console.print(f"[cyan]Authenticating {service_tag} locally...[/cyan]") + + # Validate service exists + if service_tag not in Services.get_tags(): + raise ValueError(f"Service {service_tag} not found locally") + + # Load service + service_module = Services.load(service_tag) + + # Load service config + service_config_path = Services.get_path(service_tag) / config.filenames.config + if service_config_path.exists(): + service_config = yaml.safe_load(service_config_path.read_text(encoding="utf8")) + else: + service_config = {} + merge_dict(config.services.get(service_tag), service_config) + + # Create Click context + @click.command() + @click.pass_context + def dummy_command(ctx: click.Context) -> None: + pass + + ctx = click.Context(dummy_command) + ctx.obj = ContextData(config=service_config, cdm=None, proxy_providers=[], profile=profile) + + # Create service instance + try: + # Get service initialization parameters + import inspect + + service_init_params = inspect.signature(service_module.__init__).parameters + service_kwargs = {} + + # Extract defaults from click command + if hasattr(service_module, "cli") and hasattr(service_module.cli, "params"): + for param in service_module.cli.params: + if hasattr(param, "name") and param.name not in service_kwargs: + if hasattr(param, "default") and param.default is not None: + service_kwargs[param.name] = param.default + + # Filter to only valid parameters + filtered_kwargs = {k: v for k, v in service_kwargs.items() if k in service_init_params} + + # Create service instance + service_instance = service_module(ctx, **filtered_kwargs) + + # Get credentials and cookies + cookies = self._get_cookie_jar(service_tag, profile) + credential = self._get_credentials(service_tag, profile) + + # Authenticate the service + console.print("[yellow]Authenticating... (this may show browser or prompts)[/yellow]") + service_instance.authenticate(cookies=cookies, credential=credential) + + # Serialize the authenticated session + session_data = serialize_session(service_instance.session) + + # Add metadata + session_data["service_tag"] = service_tag + session_data["profile"] = profile + session_data["authenticated"] = True + + console.print(f"[green]✓ {service_tag} authenticated successfully![/green]") + log.info(f"Authenticated {service_tag} (profile: {profile or 'default'})") + + return session_data + + except Exception as e: + console.print(f"[red]✗ Authentication failed: {e}[/red]") + log.error(f"Failed to authenticate {service_tag}: {e}") + raise ValueError(f"Authentication failed for {service_tag}: {e}") + + def save_session_locally(self, session_data: Dict[str, Any]) -> bool: + """ + Save authenticated session to local cache. + + The session is stored only on the client machine, never on the server. + The server is completely stateless. + + Args: + session_data: Serialized session data + + Returns: + True if save successful + """ + service_tag = session_data.get("service_tag") + profile = session_data.get("profile", "default") + + console.print("[cyan]Saving session to local cache...[/cyan]") + + try: + # Get local session cache + cache = get_local_session_cache() + + # Store session locally + cache.store_session( + remote_url=self.remote_url, + service_tag=service_tag, + profile=profile, + session_data=session_data + ) + + console.print("[green]✓ Session saved locally![/green]") + log.info(f"Saved session for {service_tag} (profile: {profile}) to local cache") + return True + + except Exception as e: + console.print(f"[red]✗ Save failed: {e}[/red]") + log.error(f"Failed to save session locally: {e}") + return False + + def authenticate_and_save(self, service_tag: str, profile: Optional[str] = None) -> bool: + """ + Authenticate locally and save session to local cache in one step. + + Args: + service_tag: Service to authenticate + profile: Optional profile + + Returns: + True if successful + """ + try: + # Authenticate locally + session_data = self.authenticate_service_locally(service_tag, profile) + + # Save to local cache + return self.save_session_locally(session_data) + + except Exception as e: + console.print(f"[red]Authentication and save failed: {e}[/red]") + return False + + def check_local_session_status(self, service_tag: str, profile: Optional[str] = None) -> Dict[str, Any]: + """ + Check if a session exists in local cache. + + Args: + service_tag: Service tag + profile: Optional profile + + Returns: + Session status info + """ + try: + cache = get_local_session_cache() + session_data = cache.get_session(self.remote_url, service_tag, profile or "default") + + if session_data: + # Get metadata + sessions = cache.list_sessions(self.remote_url) + for session in sessions: + if session["service_tag"] == service_tag and session["profile"] == (profile or "default"): + return { + "status": "success", + "exists": True, + "session_info": session + } + + return { + "status": "success", + "exists": False, + "message": f"No session found for {service_tag} (profile: {profile or 'default'})" + } + + except Exception as e: + log.error(f"Failed to check session status: {e}") + return {"status": "error", "message": "Failed to check session status"} + + def _get_cookie_jar(self, service_tag: str, profile: Optional[str]): + """Get cookie jar for service and profile.""" + from unshackle.commands.dl import dl + + return dl.get_cookie_jar(service_tag, profile) + + def _get_credentials(self, service_tag: str, profile: Optional[str]) -> Optional[Credential]: + """Get credentials for service and profile.""" + from unshackle.commands.dl import dl + + return dl.get_credentials(service_tag, profile) + + +def authenticate_remote_service(remote_url: str, api_key: str, service_tag: str, profile: Optional[str] = None) -> bool: + """ + Helper function to authenticate a remote service. + + Args: + remote_url: Remote server URL + api_key: API key + service_tag: Service to authenticate + profile: Optional profile + + Returns: + True if successful + """ + authenticator = RemoteAuthenticator(remote_url, api_key) + return authenticator.authenticate_and_save(service_tag, profile) + + +__all__ = ["RemoteAuthenticator", "authenticate_remote_service"] diff --git a/unshackle/core/remote_service.py b/unshackle/core/remote_service.py new file mode 100644 index 0000000..23d178c --- /dev/null +++ b/unshackle/core/remote_service.py @@ -0,0 +1,593 @@ +"""Remote service implementation for connecting to remote unshackle servers.""" + +import logging +import time +from collections.abc import Generator +from http.cookiejar import CookieJar +from typing import Any, Dict, Optional, Union + +import click +import requests +from rich.padding import Padding +from rich.rule import Rule + +from unshackle.core.api.session_serializer import deserialize_session +from unshackle.core.console import console +from unshackle.core.credential import Credential +from unshackle.core.local_session_cache import get_local_session_cache +from unshackle.core.search_result import SearchResult +from unshackle.core.titles import Episode, Movie, Movies, Series +from unshackle.core.tracks import Chapter, Chapters, Tracks +from unshackle.core.tracks.audio import Audio +from unshackle.core.tracks.subtitle import Subtitle +from unshackle.core.tracks.video import Video + + +class RemoteService: + """ + Remote Service wrapper that connects to a remote unshackle server. + + This class mimics the Service interface but delegates all operations + to a remote unshackle server via API calls. It receives session data + from the remote server which is then used locally for downloading. + """ + + ALIASES: tuple[str, ...] = () + GEOFENCE: tuple[str, ...] = () + + def __init__( + self, + ctx: click.Context, + remote_url: str, + api_key: str, + service_tag: str, + service_metadata: Dict[str, Any], + **kwargs, + ): + """ + Initialize remote service. + + Args: + ctx: Click context + remote_url: Base URL of the remote unshackle server + api_key: API key for authentication + service_tag: The service tag on the remote server (e.g., "DSNP") + service_metadata: Metadata about the service from remote discovery + **kwargs: Additional service-specific parameters + """ + console.print(Padding(Rule(f"[rule.text]Remote Service: {service_tag}"), (1, 2))) + + self.log = logging.getLogger(f"RemoteService.{service_tag}") + self.remote_url = remote_url.rstrip("/") + self.api_key = api_key + self.service_tag = service_tag + self.service_metadata = service_metadata + self.ctx = ctx + self.kwargs = kwargs + + # Set GEOFENCE and ALIASES from metadata + if "geofence" in service_metadata: + self.GEOFENCE = tuple(service_metadata["geofence"]) + if "aliases" in service_metadata: + self.ALIASES = tuple(service_metadata["aliases"]) + + # Create a session for API calls to the remote server + self.api_session = requests.Session() + self.api_session.headers.update({"X-API-Key": self.api_key, "Content-Type": "application/json"}) + + # This session will receive data from remote for actual downloading + self.session = requests.Session() + + # Store authentication state + self.authenticated = False + self.credential = None + self.cookies_content = None # Raw cookie file content to send to remote + + # Get profile from context if available + self.profile = "default" + if hasattr(ctx, "obj") and hasattr(ctx.obj, "profile"): + self.profile = ctx.obj.profile or "default" + + # Initialize proxy providers for resolving proxy credentials + self._proxy_providers = None + if hasattr(ctx, "obj") and hasattr(ctx.obj, "proxy_providers"): + self._proxy_providers = ctx.obj.proxy_providers + + def _resolve_proxy_locally(self, proxy: str) -> Optional[str]: + """ + Resolve proxy parameter locally using client's proxy providers. + + This allows the client to resolve proxy providers (like NordVPN) and + send the full proxy URI with credentials to the server. + + Args: + proxy: Proxy parameter (e.g., "nordvpn:ca1066", "us2104", or full URI) + + Returns: + Resolved proxy URI with credentials, or None if no_proxy + """ + if not proxy: + return None + + import re + + # If already a full URI, return as-is + if re.match(r"^https?://", proxy): + self.log.debug(f"Using explicit proxy URI: {proxy}") + return proxy + + # Try to resolve using local proxy providers + if self._proxy_providers: + try: + from unshackle.core.api.handlers import resolve_proxy + + resolved = resolve_proxy(proxy, self._proxy_providers) + self.log.info(f"Resolved proxy '{proxy}' to: {resolved}") + return resolved + except Exception as e: + self.log.warning(f"Failed to resolve proxy locally: {e}") + # Fall back to sending proxy parameter as-is for server to resolve + return proxy + else: + self.log.debug(f"No proxy providers available, sending proxy as-is: {proxy}") + return proxy + + def _add_proxy_to_request(self, data: Dict[str, Any]) -> None: + """ + Add resolved proxy information to request data. + + Resolves proxy using local proxy providers and adds to request. + Server will use the resolved proxy URI (with credentials). + + Args: + data: Request data dictionary to modify + """ + if hasattr(self.ctx, "params"): + no_proxy = self.ctx.params.get("no_proxy", False) + proxy_param = self.ctx.params.get("proxy") + + if no_proxy: + data["no_proxy"] = True + elif proxy_param: + # Resolve proxy locally to get credentials + resolved_proxy = self._resolve_proxy_locally(proxy_param) + if resolved_proxy: + data["proxy"] = resolved_proxy + self.log.debug(f"Sending resolved proxy to server: {resolved_proxy}") + + def _make_request(self, endpoint: str, data: Optional[Dict[str, Any]] = None, retry_count: int = 0) -> Dict[str, Any]: + """ + Make an API request to the remote server with retry logic. + + Automatically handles authentication: + 1. Check for cached session - send with request if found + 2. If session expired, re-authenticate automatically + 3. If no session, send credentials (server tries to auth) + 4. If server returns AUTH_REQUIRED, authenticate locally + 5. Retry request with new session + + Args: + endpoint: API endpoint path (e.g., "/api/remote/DSNP/titles") + data: Optional JSON data to send + retry_count: Current retry attempt (for internal use) + + Returns: + Response JSON data + + Raises: + ConnectionError: If the request fails after all retries + """ + url = f"{self.remote_url}{endpoint}" + max_retries = 3 # Max network retries + retry_delays = [2, 4, 8] # Exponential backoff in seconds + + # Ensure data is a dictionary + if data is None: + data = {} + + # Priority 1: Check for pre-authenticated session in local cache + cache = get_local_session_cache() + cached_session = cache.get_session(self.remote_url, self.service_tag, self.profile) + + if cached_session: + # Send pre-authenticated session data (server never stores it) + self.log.debug(f"Using cached session for {self.service_tag}") + data["pre_authenticated_session"] = cached_session + else: + # Priority 2: Fallback to credentials/cookies (old behavior) + # This allows server to authenticate if no local session exists + if self.cookies_content: + data["cookies"] = self.cookies_content + + if self.credential: + data["credential"] = {"username": self.credential.username, "password": self.credential.password} + + try: + if data: + response = self.api_session.post(url, json=data) + else: + response = self.api_session.get(url) + + response.raise_for_status() + result = response.json() + + # Check if session expired - re-authenticate automatically + if result.get("error_code") == "SESSION_EXPIRED": + console.print(f"[yellow]Session expired for {self.service_tag}[/yellow]") + console.print("[cyan]Re-authenticating...[/cyan]") + + # Delete expired session from cache + cache.delete_session(self.remote_url, self.service_tag, self.profile) + + # Perform local authentication + session_data = self._authenticate_locally() + + if session_data: + # Save to cache for future requests + cache.store_session( + remote_url=self.remote_url, + service_tag=self.service_tag, + profile=self.profile, + session_data=session_data + ) + + # Retry request with new session + data["pre_authenticated_session"] = session_data + # Remove old auth data + data.pop("cookies", None) + data.pop("credential", None) + + # Retry the request + response = self.api_session.post(url, json=data) + response.raise_for_status() + result = response.json() + + # Check if server requires authentication + elif result.get("error_code") == "AUTH_REQUIRED" and not cached_session: + console.print(f"[yellow]Authentication required for {self.service_tag}[/yellow]") + console.print("[cyan]Authenticating locally...[/cyan]") + + # Perform local authentication + session_data = self._authenticate_locally() + + if session_data: + # Save to cache for future requests + cache.store_session( + remote_url=self.remote_url, + service_tag=self.service_tag, + profile=self.profile, + session_data=session_data + ) + + # Retry request with authenticated session + data["pre_authenticated_session"] = session_data + # Remove old auth data + data.pop("cookies", None) + data.pop("credential", None) + + # Retry the request + response = self.api_session.post(url, json=data) + response.raise_for_status() + result = response.json() + + # Apply session data if present + if "session" in result: + deserialize_session(result["session"], self.session) + + return result + + except requests.RequestException as e: + # Retry on network errors with exponential backoff + if retry_count < max_retries: + delay = retry_delays[retry_count] + self.log.warning(f"Request failed (attempt {retry_count + 1}/{max_retries + 1}): {e}") + self.log.info(f"Retrying in {delay} seconds...") + time.sleep(delay) + return self._make_request(endpoint, data, retry_count + 1) + else: + self.log.error(f"Remote API request failed after {max_retries + 1} attempts: {e}") + raise ConnectionError(f"Failed to communicate with remote server after {max_retries + 1} attempts: {e}") + + def _authenticate_locally(self) -> Optional[Dict[str, Any]]: + """ + Authenticate the service locally when server requires it. + + This performs interactive authentication (browser, 2FA, etc.) + and returns the authenticated session. + + Returns: + Serialized session data or None if authentication fails + """ + from unshackle.core.remote_auth import RemoteAuthenticator + + try: + authenticator = RemoteAuthenticator(self.remote_url, self.api_key) + session_data = authenticator.authenticate_service_locally(self.service_tag, self.profile) + console.print("[green]✓ Authentication successful![/green]") + return session_data + + except Exception as e: + console.print(f"[red]✗ Authentication failed: {e}[/red]") + self.log.error(f"Local authentication failed: {e}") + return None + + def authenticate(self, cookies: Optional[CookieJar] = None, credential: Optional[Credential] = None) -> None: + """ + Prepare authentication data to send to remote service. + + Stores cookies and credentials to send with each API request. + The remote server will use these for authentication. + + Args: + cookies: Cookie jar from local configuration + credential: Credentials from local configuration + """ + self.log.info("Preparing authentication for remote server...") + self.credential = credential + + # Read cookies file content if cookies provided + if cookies and hasattr(cookies, "filename") and cookies.filename: + try: + from pathlib import Path + + cookie_file = Path(cookies.filename) + if cookie_file.exists(): + self.cookies_content = cookie_file.read_text() + self.log.info(f"Loaded cookies from {cookie_file}") + except Exception as e: + self.log.warning(f"Could not read cookie file: {e}") + + self.authenticated = True + self.log.info("Authentication data ready for remote server") + + def search(self, query: Optional[str] = None) -> Generator[SearchResult, None, None]: + """ + Search for content on the remote service. + + Args: + query: Search query string + + Yields: + SearchResult objects + """ + if query is None: + query = self.kwargs.get("query", "") + + self.log.info(f"Searching remote service for: {query}") + + data = {"query": query} + + # Add proxy information (resolved locally with credentials) + self._add_proxy_to_request(data) + + response = self._make_request(f"/api/remote/{self.service_tag}/search", data) + + if response.get("status") == "success" and "results" in response: + for result in response["results"]: + yield SearchResult( + id_=result["id"], + title=result["title"], + description=result.get("description"), + label=result.get("label"), + url=result.get("url"), + ) + + def get_titles(self) -> Union[Movies, Series]: + """ + Get titles from the remote service. + + Returns: + Movies or Series object containing title information + """ + title = self.kwargs.get("title") + + if not title: + raise ValueError("No title provided") + + self.log.info(f"Getting titles from remote service for: {title}") + + data = {"title": title} + + # Add additional parameters + for key, value in self.kwargs.items(): + if key not in ["title"]: + data[key] = value + + # Add proxy information (resolved locally with credentials) + self._add_proxy_to_request(data) + + response = self._make_request(f"/api/remote/{self.service_tag}/titles", data) + + if response.get("status") != "success" or "titles" not in response: + raise ValueError(f"Failed to get titles from remote: {response.get('message', 'Unknown error')}") + + titles_data = response["titles"] + + # Deserialize titles + titles = [] + for title_info in titles_data: + if title_info["type"] == "movie": + titles.append( + Movie( + id_=title_info.get("id", title), + service=self.__class__, + name=title_info["name"], + year=title_info.get("year"), + data=title_info, + ) + ) + elif title_info["type"] == "episode": + titles.append( + Episode( + id_=title_info.get("id", title), + service=self.__class__, + title=title_info.get("series_title", title_info["name"]), + season=title_info.get("season", 0), + number=title_info.get("number", 0), + name=title_info.get("name"), + year=title_info.get("year"), + data=title_info, + ) + ) + + # Return appropriate container + if titles and isinstance(titles[0], Episode): + return Series(titles) + else: + return Movies(titles) + + def get_tracks(self, title: Union[Movie, Episode]) -> Tracks: + """ + Get tracks from the remote service. + + Args: + title: Title object to get tracks for + + Returns: + Tracks object containing video, audio, and subtitle tracks + """ + self.log.info(f"Getting tracks from remote service for: {title}") + + title_input = self.kwargs.get("title") + data = {"title": title_input} + + # Add episode information if applicable + if isinstance(title, Episode): + data["season"] = title.season + data["episode"] = title.number + + # Add additional parameters + for key, value in self.kwargs.items(): + if key not in ["title"]: + data[key] = value + + # Add proxy information (resolved locally with credentials) + self._add_proxy_to_request(data) + + response = self._make_request(f"/api/remote/{self.service_tag}/tracks", data) + + if response.get("status") != "success": + raise ValueError(f"Failed to get tracks from remote: {response.get('message', 'Unknown error')}") + + # Handle multiple episodes response + if "episodes" in response: + # For multiple episodes, return tracks for the matching title + for episode_data in response["episodes"]: + episode_title = episode_data["title"] + if ( + isinstance(title, Episode) + and episode_title.get("season") == title.season + and episode_title.get("number") == title.number + ): + return self._deserialize_tracks(episode_data, title) + + raise ValueError(f"Could not find tracks for {title.season}x{title.number} in remote response") + + # Single title response + return self._deserialize_tracks(response, title) + + def _deserialize_tracks(self, data: Dict[str, Any], title: Union[Movie, Episode]) -> Tracks: + """ + Deserialize tracks from API response. + + Args: + data: Track data from API + title: Title object these tracks belong to + + Returns: + Tracks object + """ + tracks = Tracks() + + # Deserialize video tracks + for video_data in data.get("video", []): + video = Video( + id_=video_data["id"], + url="", # URL will be populated during download from manifests + codec=Video.Codec[video_data["codec"]], + bitrate=video_data.get("bitrate", 0) * 1000 if video_data.get("bitrate") else None, + width=video_data.get("width"), + height=video_data.get("height"), + fps=video_data.get("fps"), + range_=Video.Range[video_data["range"]] if video_data.get("range") else None, + language=video_data.get("language"), + drm=video_data.get("drm"), + ) + tracks.add(video) + + # Deserialize audio tracks + for audio_data in data.get("audio", []): + audio = Audio( + id_=audio_data["id"], + url="", # URL will be populated during download + codec=Audio.Codec[audio_data["codec"]], + bitrate=audio_data.get("bitrate", 0) * 1000 if audio_data.get("bitrate") else None, + channels=audio_data.get("channels"), + language=audio_data.get("language"), + descriptive=audio_data.get("descriptive", False), + drm=audio_data.get("drm"), + ) + if audio_data.get("atmos"): + audio.atmos = True + tracks.add(audio) + + # Deserialize subtitle tracks + for subtitle_data in data.get("subtitles", []): + subtitle = Subtitle( + id_=subtitle_data["id"], + url="", # URL will be populated during download + codec=Subtitle.Codec[subtitle_data["codec"]], + language=subtitle_data.get("language"), + forced=subtitle_data.get("forced", False), + sdh=subtitle_data.get("sdh", False), + cc=subtitle_data.get("cc", False), + ) + tracks.add(subtitle) + + return tracks + + def get_chapters(self, title: Union[Movie, Episode]) -> Chapters: + """ + Get chapters from the remote service. + + Args: + title: Title object to get chapters for + + Returns: + Chapters object + """ + self.log.info(f"Getting chapters from remote service for: {title}") + + title_input = self.kwargs.get("title") + data = {"title": title_input} + + # Add episode information if applicable + if isinstance(title, Episode): + data["season"] = title.season + data["episode"] = title.number + + # Add proxy information (resolved locally with credentials) + self._add_proxy_to_request(data) + + response = self._make_request(f"/api/remote/{self.service_tag}/chapters", data) + + if response.get("status") != "success": + self.log.warning(f"Failed to get chapters from remote: {response.get('message', 'Unknown error')}") + return Chapters() + + chapters = Chapters() + for chapter_data in response.get("chapters", []): + chapters.add(Chapter(timestamp=chapter_data["timestamp"], name=chapter_data.get("name"))) + + return chapters + + @staticmethod + def get_session() -> requests.Session: + """ + Create a session for the remote service. + + Returns: + A requests.Session object + """ + session = requests.Session() + return session diff --git a/unshackle/core/remote_services.py b/unshackle/core/remote_services.py new file mode 100644 index 0000000..cca45a3 --- /dev/null +++ b/unshackle/core/remote_services.py @@ -0,0 +1,245 @@ +"""Remote service discovery and management.""" + +import logging +from pathlib import Path +from typing import Any, Dict, List, Optional + +import requests + +from unshackle.core.config import config +from unshackle.core.remote_service import RemoteService + +log = logging.getLogger("RemoteServices") + + +class RemoteServiceManager: + """ + Manages discovery and registration of remote services. + + This class connects to configured remote unshackle servers, + discovers available services, and creates RemoteService instances + that can be used like local services. + """ + + def __init__(self): + """Initialize the remote service manager.""" + self.remote_services: Dict[str, type] = {} + self.remote_configs: List[Dict[str, Any]] = [] + + def discover_services(self) -> None: + """ + Discover services from all configured remote servers. + + Reads the remote_services configuration, connects to each server, + retrieves available services, and creates RemoteService classes + for each discovered service. + """ + if not config.remote_services: + log.debug("No remote services configured") + return + + log.info(f"Discovering services from {len(config.remote_services)} remote server(s)...") + + for remote_config in config.remote_services: + try: + self._discover_from_server(remote_config) + except Exception as e: + log.error(f"Failed to discover services from {remote_config.get('url')}: {e}") + continue + + log.info(f"Discovered {len(self.remote_services)} remote service(s)") + + def _discover_from_server(self, remote_config: Dict[str, Any]) -> None: + """ + Discover services from a single remote server. + + Args: + remote_config: Configuration for the remote server + (must contain 'url' and 'api_key') + """ + url = remote_config.get("url", "").rstrip("/") + api_key = remote_config.get("api_key", "") + server_name = remote_config.get("name", url) + + if not url: + log.warning("Remote service configuration missing 'url', skipping") + return + + if not api_key: + log.warning(f"Remote service {url} missing 'api_key', skipping") + return + + log.info(f"Connecting to remote server: {server_name}") + + try: + # Query the remote server for available services + response = requests.get( + f"{url}/api/remote/services", + headers={"X-API-Key": api_key, "Content-Type": "application/json"}, + timeout=10, + ) + + response.raise_for_status() + data = response.json() + + if data.get("status") != "success" or "services" not in data: + log.error(f"Invalid response from {url}: {data}") + return + + services = data["services"] + log.info(f"Found {len(services)} service(s) on {server_name}") + + # Create RemoteService classes for each service + for service_info in services: + self._register_remote_service(url, api_key, service_info, server_name) + + except requests.RequestException as e: + log.error(f"Failed to connect to remote server {url}: {e}") + raise + + def _register_remote_service( + self, remote_url: str, api_key: str, service_info: Dict[str, Any], server_name: str + ) -> None: + """ + Register a remote service as a local service class. + + Args: + remote_url: Base URL of the remote server + api_key: API key for authentication + service_info: Service metadata from the remote server + server_name: Friendly name of the remote server + """ + service_tag = service_info.get("tag") + if not service_tag: + log.warning(f"Service info missing 'tag': {service_info}") + return + + # Create a unique tag for the remote service + # Use "remote_" prefix to distinguish from local services + remote_tag = f"remote_{service_tag}" + + # Check if this remote service is already registered + if remote_tag in self.remote_services: + log.debug(f"Remote service {remote_tag} already registered, skipping") + return + + log.info(f"Registering remote service: {remote_tag} from {server_name}") + + # Create a dynamic class that inherits from RemoteService + # This allows us to create instances with the cli() method for Click integration + class DynamicRemoteService(RemoteService): + """Dynamically created remote service class.""" + + def __init__(self, ctx, **kwargs): + super().__init__( + ctx=ctx, + remote_url=remote_url, + api_key=api_key, + service_tag=service_tag, + service_metadata=service_info, + **kwargs, + ) + + @staticmethod + def cli(): + """CLI method for Click integration.""" + import click + + # Create a dynamic Click command for this service + @click.command( + name=remote_tag, + short_help=f"Remote: {service_info.get('help', service_tag)}", + help=service_info.get("help", f"Remote service for {service_tag}"), + ) + @click.argument("title", type=str, required=False) + @click.option("-q", "--query", type=str, help="Search query") + @click.pass_context + def remote_service_cli(ctx, title=None, query=None, **kwargs): + # Combine title and kwargs + params = {**kwargs} + if title: + params["title"] = title + if query: + params["query"] = query + + return DynamicRemoteService(ctx, **params) + + return remote_service_cli + + # Set class name for better debugging + DynamicRemoteService.__name__ = remote_tag + DynamicRemoteService.__module__ = "unshackle.remote_services" + + # Set GEOFENCE and ALIASES + if "geofence" in service_info: + DynamicRemoteService.GEOFENCE = tuple(service_info["geofence"]) + if "aliases" in service_info: + # Add "remote_" prefix to aliases too + DynamicRemoteService.ALIASES = tuple(f"remote_{alias}" for alias in service_info["aliases"]) + + # Register the service + self.remote_services[remote_tag] = DynamicRemoteService + + def get_service(self, tag: str) -> Optional[type]: + """ + Get a remote service class by tag. + + Args: + tag: Service tag (e.g., "remote_DSNP") + + Returns: + RemoteService class or None if not found + """ + return self.remote_services.get(tag) + + def get_all_services(self) -> Dict[str, type]: + """ + Get all registered remote services. + + Returns: + Dictionary mapping service tags to RemoteService classes + """ + return self.remote_services.copy() + + def get_service_path(self, tag: str) -> Optional[Path]: + """ + Get the path for a remote service. + + Remote services don't have local paths, so this returns None. + This method exists for compatibility with the Services interface. + + Args: + tag: Service tag + + Returns: + None (remote services have no local path) + """ + return None + + +# Global instance +_remote_service_manager: Optional[RemoteServiceManager] = None + + +def get_remote_service_manager() -> RemoteServiceManager: + """ + Get the global RemoteServiceManager instance. + + Creates the instance on first call and discovers services. + + Returns: + RemoteServiceManager instance + """ + global _remote_service_manager + + if _remote_service_manager is None: + _remote_service_manager = RemoteServiceManager() + try: + _remote_service_manager.discover_services() + except Exception as e: + log.error(f"Failed to discover remote services: {e}") + + return _remote_service_manager + + +__all__ = ("RemoteServiceManager", "get_remote_service_manager") diff --git a/unshackle/core/service.py b/unshackle/core/service.py index dd748ad..d39cb55 100644 --- a/unshackle/core/service.py +++ b/unshackle/core/service.py @@ -53,8 +53,55 @@ class Service(metaclass=ABCMeta): if not ctx.parent or not ctx.parent.params.get("no_proxy"): if ctx.parent: proxy = ctx.parent.params["proxy"] + proxy_query = ctx.parent.params.get("proxy_query") + proxy_provider_name = ctx.parent.params.get("proxy_provider") else: proxy = None + proxy_query = None + proxy_provider_name = None + + # Check for service-specific proxy mapping + service_name = self.__class__.__name__ + service_config_dict = config.services.get(service_name, {}) + proxy_map = service_config_dict.get("proxy_map", {}) + + if proxy_map and proxy_query: + # Build the full proxy query key (e.g., "nordvpn:ca" or "us") + if proxy_provider_name: + full_proxy_key = f"{proxy_provider_name}:{proxy_query}" + else: + full_proxy_key = proxy_query + + # Check if there's a mapping for this query + mapped_value = proxy_map.get(full_proxy_key) + if mapped_value: + self.log.info(f"Found service-specific proxy mapping: {full_proxy_key} -> {mapped_value}") + # Query the proxy provider with the mapped value + if proxy_provider_name: + # Specific provider requested + proxy_provider = next( + (x for x in ctx.obj.proxy_providers if x.__class__.__name__.lower() == proxy_provider_name), + None, + ) + if proxy_provider: + mapped_proxy_uri = proxy_provider.get_proxy(mapped_value) + if mapped_proxy_uri: + proxy = mapped_proxy_uri + self.log.info(f"Using mapped proxy from {proxy_provider.__class__.__name__}: {proxy}") + else: + self.log.warning(f"Failed to get proxy for mapped value '{mapped_value}', using default") + else: + self.log.warning(f"Proxy provider '{proxy_provider_name}' not found, using default proxy") + else: + # No specific provider, try all providers + for proxy_provider in ctx.obj.proxy_providers: + mapped_proxy_uri = proxy_provider.get_proxy(mapped_value) + if mapped_proxy_uri: + proxy = mapped_proxy_uri + self.log.info(f"Using mapped proxy from {proxy_provider.__class__.__name__}: {proxy}") + break + else: + self.log.warning(f"No provider could resolve mapped value '{mapped_value}', using default") if not proxy: # don't override the explicit proxy set by the user, even if they may be geoblocked diff --git a/unshackle/core/services.py b/unshackle/core/services.py index 0ba317f..97f64bf 100644 --- a/unshackle/core/services.py +++ b/unshackle/core/services.py @@ -25,6 +25,17 @@ class Services(click.MultiCommand): # Click-specific methods + @staticmethod + def _get_remote_services(): + """Get remote services from the manager (lazy import to avoid circular dependency).""" + try: + from unshackle.core.remote_services import get_remote_service_manager + + manager = get_remote_service_manager() + return manager.get_all_services() + except Exception: + return {} + def list_commands(self, ctx: click.Context) -> list[str]: """Returns a list of all available Services as command names for Click.""" return Services.get_tags() @@ -51,13 +62,25 @@ class Services(click.MultiCommand): @staticmethod def get_tags() -> list[str]: - """Returns a list of service tags from all available Services.""" - return [x.parent.stem for x in _SERVICES] + """Returns a list of service tags from all available Services (local + remote).""" + local_tags = [x.parent.stem for x in _SERVICES] + remote_services = Services._get_remote_services() + remote_tags = list(remote_services.keys()) + return local_tags + remote_tags @staticmethod def get_path(name: str) -> Path: """Get the directory path of a command.""" tag = Services.get_tag(name) + + # Check if it's a remote service + remote_services = Services._get_remote_services() + if tag in remote_services: + # Remote services don't have local paths + # Return a dummy path or raise an appropriate error + # For now, we'll raise KeyError to indicate no path exists + raise KeyError(f"Remote service '{tag}' has no local path") + for service in _SERVICES: if service.parent.stem == tag: return service.parent @@ -72,19 +95,38 @@ class Services(click.MultiCommand): """ original_value = value value = value.lower() + + # Check local services for path in _SERVICES: tag = path.parent.stem if value in (tag.lower(), *_ALIASES.get(tag, [])): return tag + + # Check remote services + remote_services = Services._get_remote_services() + for tag, service_class in remote_services.items(): + if value == tag.lower(): + return tag + if hasattr(service_class, "ALIASES"): + if value in (alias.lower() for alias in service_class.ALIASES): + return tag + return original_value @staticmethod def load(tag: str) -> Service: - """Load a Service module by Service tag.""" + """Load a Service module by Service tag (local or remote).""" + # Check local services first module = _MODULES.get(tag) - if not module: - raise KeyError(f"There is no Service added by the Tag '{tag}'") - return module + if module: + return module + + # Check remote services + remote_services = Services._get_remote_services() + if tag in remote_services: + return remote_services[tag] + + raise KeyError(f"There is no Service added by the Tag '{tag}'") __all__ = ("Services",) diff --git a/unshackle/core/utilities.py b/unshackle/core/utilities.py index 5aaf6f0..69322e5 100644 --- a/unshackle/core/utilities.py +++ b/unshackle/core/utilities.py @@ -19,6 +19,7 @@ from urllib.parse import ParseResult, urlparse from uuid import uuid4 import chardet +import pycountry import requests from construct import ValidationError from fontTools import ttLib @@ -272,6 +273,80 @@ def ap_case(text: str, keep_spaces: bool = False, stop_words: tuple[str] = None) ) +# Common country code aliases that differ from ISO 3166-1 alpha-2 +COUNTRY_CODE_ALIASES = { + "uk": "gb", # United Kingdom -> Great Britain +} + + +def get_country_name(code: str) -> Optional[str]: + """ + Convert a 2-letter country code to full country name. + + Args: + code: ISO 3166-1 alpha-2 country code (e.g., 'ca', 'us', 'gb', 'uk') + + Returns: + Full country name (e.g., 'Canada', 'United States', 'United Kingdom') or None if not found + + Examples: + >>> get_country_name('ca') + 'Canada' + >>> get_country_name('US') + 'United States' + >>> get_country_name('uk') + 'United Kingdom' + """ + # Handle common aliases + code = COUNTRY_CODE_ALIASES.get(code.lower(), code.lower()) + + try: + country = pycountry.countries.get(alpha_2=code.upper()) + if country: + return country.name + except (KeyError, LookupError): + pass + return None + + +def get_country_code(name: str) -> Optional[str]: + """ + Convert a country name to its 2-letter ISO 3166-1 alpha-2 code. + + Args: + name: Full country name (e.g., 'Canada', 'United States', 'United Kingdom') + + Returns: + 2-letter country code in uppercase (e.g., 'CA', 'US', 'GB') or None if not found + + Examples: + >>> get_country_code('Canada') + 'CA' + >>> get_country_code('united states') + 'US' + >>> get_country_code('United Kingdom') + 'GB' + """ + try: + # Try exact name match first + country = pycountry.countries.get(name=name.title()) + if country: + return country.alpha_2.upper() + + # Try common name (e.g., "Bolivia" vs "Bolivia, Plurinational State of") + country = pycountry.countries.get(common_name=name.title()) + if country: + return country.alpha_2.upper() + + # Try fuzzy search as fallback + results = pycountry.countries.search_fuzzy(name) + if results: + return results[0].alpha_2.upper() + except (KeyError, LookupError): + pass + return None + + def get_ip_info(session: Optional[requests.Session] = None) -> dict: """ Use ipinfo.io to get IP location information. diff --git a/unshackle/unshackle-example.yaml b/unshackle/unshackle-example.yaml index 36e7c2c..0e25aa6 100644 --- a/unshackle/unshackle-example.yaml +++ b/unshackle/unshackle-example.yaml @@ -408,6 +408,19 @@ services: app_name: "AIV" device_model: "Fire TV Stick 4K" + # Service-specific proxy mappings + # Override global proxy selection with specific servers for this service + # When --proxy matches a key in proxy_map, the mapped server will be used + # instead of the default/random server selection + proxy_map: + nordvpn:ca: ca1577 # Use ca1577 when --proxy nordvpn:ca is specified + nordvpn:us: us9842 # Use us9842 when --proxy nordvpn:us is specified + us: 123 # Use server 123 (from any provider) when --proxy us is specified + gb: 456 # Use server 456 (from any provider) when --proxy gb is specified + # Without this service, --proxy nordvpn:ca picks a random CA server + # With this config, --proxy nordvpn:ca EXAMPLE uses ca1577 specifically + # Other services or no service specified will still use random selection + # NEW: Configuration overrides (can be combined with profiles and certificates) # Override dl command defaults for this service dl: @@ -478,8 +491,15 @@ proxy_providers: nordvpn: username: username_from_service_credentials password: password_from_service_credentials + # server_map: global mapping that applies to ALL services + # Difference from service-specific proxy_map: + # - server_map: applies to ALL services when --proxy nordvpn:us is used + # - proxy_map: only applies to the specific service configured (see services: EXAMPLE: proxy_map above) + # - proxy_map takes precedence over server_map for that service server_map: us: 12 # force US server #12 for US proxies + ca:calgary: 2534 # force CA server #2534 for Calgary proxies + us:seattle: 7890 # force US server #7890 for Seattle proxies surfsharkvpn: username: your_surfshark_service_username # Service credentials from https://my.surfshark.com/vpn/manual-setup/main/openvpn password: your_surfshark_service_password # Service credentials (not your login password) @@ -487,12 +507,81 @@ proxy_providers: us: 3844 # force US server #3844 for US proxies gb: 2697 # force GB server #2697 for GB proxies au: 4621 # force AU server #4621 for AU proxies + us:seattle: 5678 # force US server #5678 for Seattle proxies + ca:toronto: 1234 # force CA server #1234 for Toronto proxies windscribevpn: username: your_windscribe_username # Service credentials from https://windscribe.com/getconfig/openvpn password: your_windscribe_password # Service credentials (not your login password) server_map: us: "us-central-096.totallyacdn.com" # force US server gb: "uk-london-055.totallyacdn.com" # force GB server + us:seattle: "us-west-011.totallyacdn.com" # force US Seattle server + ca:toronto: "ca-toronto-012.totallyacdn.com" # force CA Toronto server + + # Gluetun: Dynamic Docker-based VPN proxy (supports 50+ VPN providers) + # Creates Docker containers running Gluetun to bridge VPN connections to HTTP proxies + # Requires Docker to be installed and running + # Usage: --proxy gluetun:windscribe:us or --proxy gluetun:nordvpn:de + gluetun: + # Global settings + base_port: 8888 # Starting port for HTTP proxies (increments for each container) + auto_cleanup: true # Automatically remove containers when done + container_prefix: "unshackle-gluetun" # Docker container name prefix + verify_ip: true # Verify VPN IP matches expected region + # Optional HTTP proxy authentication (for the proxy itself, not VPN) + # auth_user: proxy_user + # auth_password: proxy_password + + # VPN provider configurations + providers: + # Windscribe (WireGuard) - Get credentials from https://windscribe.com/getconfig/wireguard + windscribe: + vpn_type: wireguard + credentials: + private_key: "YOUR_WIREGUARD_PRIVATE_KEY" + addresses: "YOUR_WIREGUARD_ADDRESS" # e.g., "10.x.x.x/32" + # Map friendly names to country codes + server_countries: + us: US + uk: GB + ca: CA + de: DE + + # NordVPN (OpenVPN) - Get service credentials from https://my.nordaccount.com/dashboard/nordvpn/manual-configuration/ + # Note: Service credentials are NOT your email+password - generate them from the link above + # nordvpn: + # vpn_type: openvpn + # credentials: + # username: "YOUR_NORDVPN_SERVICE_USERNAME" + # password: "YOUR_NORDVPN_SERVICE_PASSWORD" + # server_countries: + # us: US + # uk: GB + + # ExpressVPN (OpenVPN) - Get credentials from ExpressVPN setup page + # expressvpn: + # vpn_type: openvpn + # credentials: + # username: "YOUR_EXPRESSVPN_USERNAME" + # password: "YOUR_EXPRESSVPN_PASSWORD" + # server_countries: + # us: US + # uk: GB + + # Surfshark (WireGuard) - Get credentials from https://my.surfshark.com/vpn/manual-setup/main/wireguard + # surfshark: + # vpn_type: wireguard + # credentials: + # private_key: "YOUR_SURFSHARK_PRIVATE_KEY" + # addresses: "YOUR_SURFSHARK_ADDRESS" + # server_countries: + # us: US + # uk: GB + + # Specific server selection: Use format like "us1239" to select specific servers + # Example: --proxy gluetun:nordvpn:us1239 connects to us1239.nordvpn.com + # Supported providers: nordvpn, surfshark, expressvpn, cyberghost + basic: GB: - "socks5://username:password@bhx.socks.ipvanish.com:1080" # 1 (Birmingham) diff --git a/uv.lock b/uv.lock index c426e06..b1fa8a6 100644 --- a/uv.lock +++ b/uv.lock @@ -1070,6 +1070,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/35/c7/d13c57e5a3408df2e5d910853e957ec8e253b41ba531e0f32036c8321240/pycaption-2.2.19-py3-none-any.whl", hash = "sha256:7eb84a05d40bb80400689f9431d05d8b77dec6535938b419ebed2c9d67283a4f", size = 124970, upload-time = "2025-09-30T07:15:21.945Z" }, ] +[[package]] +name = "pycountry" +version = "24.6.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/76/57/c389fa68c50590881a75b7883eeb3dc15e9e73a0fdc001cdd45c13290c92/pycountry-24.6.1.tar.gz", hash = "sha256:b61b3faccea67f87d10c1f2b0fc0be714409e8fcdcc1315613174f6466c10221", size = 6043910, upload-time = "2024-06-01T04:12:15.05Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b1/ec/1fb891d8a2660716aadb2143235481d15ed1cbfe3ad669194690b0604492/pycountry-24.6.1-py3-none-any.whl", hash = "sha256:f1a4fb391cd7214f8eefd39556d740adcc233c778a27f8942c8dca351d6ce06f", size = 6335189, upload-time = "2024-06-01T04:11:49.711Z" }, +] + [[package]] name = "pycparser" version = "2.22" @@ -1585,6 +1594,7 @@ dependencies = [ { name = "pproxy" }, { name = "protobuf" }, { name = "pycaption" }, + { name = "pycountry" }, { name = "pycryptodomex" }, { name = "pyexecjs" }, { name = "pyjwt" }, @@ -1638,6 +1648,7 @@ requires-dist = [ { name = "pproxy", specifier = ">=2.7.9,<3" }, { name = "protobuf", specifier = ">=4.25.3,<7" }, { name = "pycaption", specifier = ">=2.2.6,<3" }, + { name = "pycountry", specifier = ">=24.6.1" }, { name = "pycryptodomex", specifier = ">=3.20.0,<4" }, { name = "pyexecjs", specifier = ">=1.5.1,<2" }, { name = "pyjwt", specifier = ">=2.8.0,<3" },