diff --git a/unshackle/services/Netflix/MSL/MSLKeys.py b/unshackle/services/Netflix/MSL/MSLKeys.py new file mode 100644 index 0000000..1c85b78 --- /dev/null +++ b/unshackle/services/Netflix/MSL/MSLKeys.py @@ -0,0 +1,10 @@ +from .MSLObject import MSLObject + + +class MSLKeys(MSLObject): + def __init__(self, encryption=None, sign=None, rsa=None, mastertoken=None, cdm_session=None): + self.encryption = encryption + self.sign = sign + self.rsa = rsa + self.mastertoken = mastertoken + self.cdm_session = cdm_session diff --git a/unshackle/services/Netflix/MSL/MSLObject.py b/unshackle/services/Netflix/MSL/MSLObject.py new file mode 100644 index 0000000..47264bb --- /dev/null +++ b/unshackle/services/Netflix/MSL/MSLObject.py @@ -0,0 +1,6 @@ +import jsonpickle + + +class MSLObject: + def __repr__(self): + return "<{} {}>".format(self.__class__.__name__, jsonpickle.encode(self, unpicklable=False)) diff --git a/unshackle/services/Netflix/MSL/__init__.py b/unshackle/services/Netflix/MSL/__init__.py new file mode 100644 index 0000000..ccc7704 --- /dev/null +++ b/unshackle/services/Netflix/MSL/__init__.py @@ -0,0 +1,408 @@ +import base64 +import gzip +import json +import logging +import os +import random +import re +import sys +import time +import zlib +from datetime import datetime +from io import BytesIO + +import jsonpickle +import requests +from Cryptodome.Cipher import AES, PKCS1_OAEP +from Cryptodome.Hash import HMAC, SHA256 +from Cryptodome.PublicKey import RSA +from Cryptodome.Random import get_random_bytes +from Cryptodome.Util import Padding + +from unshackle.core.cacher import Cacher + +from .MSLKeys import MSLKeys +from .schemes import EntityAuthenticationSchemes # noqa: F401 +from .schemes import KeyExchangeSchemes +from .schemes.EntityAuthentication import EntityAuthentication +from .schemes.KeyExchangeRequest import KeyExchangeRequest +# from vinetrimmer.utils.widevine.device import RemoteDevice + +class MSL: + log = logging.getLogger("MSL") + + def __init__(self, session, endpoint, sender, keys, message_id, user_auth=None): + self.session = session + self.endpoint = endpoint + self.sender = sender + self.keys = keys + self.user_auth = user_auth + self.message_id = message_id + + @classmethod + def handshake(cls, scheme: KeyExchangeSchemes, session: requests.Session, endpoint: str, sender: str, cache: Cacher): + cache = cache.get(sender) + message_id = random.randint(0, pow(2, 52)) + msl_keys = MSL.load_cache_data(cache) + + if msl_keys is not None: + cls.log.info("Using cached MSL data") + else: + msl_keys = MSLKeys() + if scheme != KeyExchangeSchemes.Widevine: + msl_keys.rsa = RSA.generate(2048) + + # if not cdm: + # raise cls.log.exit("- No cached data and no CDM specified") + + # if not msl_keys_path: + # raise cls.log.exit("- No cached data and no MSL key path specified") + + # Key Exchange Scheme Widevine currently not implemented + # if scheme == KeyExchangeSchemes.Widevine: + # msl_keys.cdm_session = cdm.open( + # pssh=b"\x0A\x7A\x00\x6C\x38\x2B", + # raw=True, + # offline=True + # ) + # keyrequestdata = KeyExchangeRequest.Widevine( + # keyrequest=cdm.get_license_challenge(msl_keys.cdm_session) + # ) + # else: + keyrequestdata = KeyExchangeRequest.AsymmetricWrapped( + keypairid="superKeyPair", + mechanism="JWK_RSA", + publickey=msl_keys.rsa.publickey().exportKey(format="DER") + ) + + data = jsonpickle.encode({ + "entityauthdata": EntityAuthentication.Unauthenticated(sender), + "headerdata": base64.b64encode(MSL.generate_msg_header( + message_id=message_id, + sender=sender, + is_handshake=True, + keyrequestdata=keyrequestdata + ).encode("utf-8")).decode("utf-8"), + "signature": "" + }, unpicklable=False) + data += json.dumps({ + "payload": base64.b64encode(json.dumps({ + "messageid": message_id, + "data": "", + "sequencenumber": 1, + "endofmsg": True + }).encode("utf-8")).decode("utf-8"), + "signature": "" + }) + + try: + r = session.post( + url=endpoint, + data=data + ) + except requests.HTTPError as e: + raise cls.log.exit(f"- Key exchange failed, response data is unexpected: {e.response.text}") + + key_exchange = r.json() # expecting no payloads, so this is fine + if "errordata" in key_exchange: + raise cls.log.exit("- Key exchange failed: " + json.loads(base64.b64decode( + key_exchange["errordata"] + ).decode())["errormsg"]) + + # parse the crypto keys + key_response_data = json.JSONDecoder().decode(base64.b64decode( + key_exchange["headerdata"] + ).decode("utf-8"))["keyresponsedata"] + + if key_response_data["scheme"] != str(scheme): + raise cls.log.exit("- Key exchange scheme mismatch occurred") + + key_data = key_response_data["keydata"] + # if scheme == KeyExchangeSchemes.Widevine: + # if isinstance(cdm.device, RemoteDevice): + # msl_keys.encryption, msl_keys.sign = cdm.device.exchange( + # cdm.sessions[msl_keys.cdm_session], + # license_res=key_data["cdmkeyresponse"], + # enc_key_id=base64.b64decode(key_data["encryptionkeyid"]), + # hmac_key_id=base64.b64decode(key_data["hmackeyid"]) + # ) + # cdm.parse_license(msl_keys.cdm_session, key_data["cdmkeyresponse"]) + # else: + # cdm.parse_license(msl_keys.cdm_session, key_data["cdmkeyresponse"]) + # keys = cdm.get_keys(msl_keys.cdm_session) + # msl_keys.encryption = MSL.get_widevine_key( + # kid=base64.b64decode(key_data["encryptionkeyid"]), + # keys=keys, + # permissions=["AllowEncrypt", "AllowDecrypt"] + # ) + # msl_keys.sign = MSL.get_widevine_key( + # kid=base64.b64decode(key_data["hmackeyid"]), + # keys=keys, + # permissions=["AllowSign", "AllowSignatureVerify"] + # ) + # else: + cipher_rsa = PKCS1_OAEP.new(msl_keys.rsa) + msl_keys.encryption = MSL.base64key_decode( + json.JSONDecoder().decode(cipher_rsa.decrypt( + base64.b64decode(key_data["encryptionkey"]) + ).decode("utf-8"))["k"] + ) + msl_keys.sign = MSL.base64key_decode( + json.JSONDecoder().decode(cipher_rsa.decrypt( + base64.b64decode(key_data["hmackey"]) + ).decode("utf-8"))["k"] + ) + msl_keys.mastertoken = key_response_data["mastertoken"] + + MSL.cache_keys(msl_keys, cache) + cls.log.info("MSL handshake successful") + return cls( + session=session, + endpoint=endpoint, + sender=sender, + keys=msl_keys, + message_id=message_id + ) + + @staticmethod + def load_cache_data(cacher: Cacher): + if not cacher or cacher == {}: + return None + # with open(msl_keys_path, encoding="utf-8") as fd: + # msl_keys = jsonpickle.decode(fd.read()) + msl_keys = jsonpickle.decode(cacher.data) + if msl_keys.rsa: + # noinspection PyTypeChecker + # expects RsaKey, but is a string, this is because jsonpickle can't pickle RsaKey object + # so as a workaround it exports to PEM, and then when reading, it imports that PEM back + # to an RsaKey :) + msl_keys.rsa = RSA.importKey(msl_keys.rsa) + # If it's expired or close to, return None as it's unusable + if msl_keys.mastertoken and ((datetime.utcfromtimestamp(int(json.JSONDecoder().decode( + base64.b64decode(msl_keys.mastertoken["tokendata"]).decode("utf-8") + )["expiration"])) - datetime.now()).total_seconds() / 60 / 60) < 10: + return None + return msl_keys + + @staticmethod + def cache_keys(msl_keys, cache: Cacher): + # os.makedirs(os.path.dirname(cache), exist_ok=True) + if msl_keys.rsa: + # jsonpickle can't pickle RsaKey objects :( + msl_keys.rsa = msl_keys.rsa.export_key() + # with open(cache, "w", encoding="utf-8") as fd: + # fd.write() + cache.set(jsonpickle.encode(msl_keys)) + if msl_keys.rsa: + # re-import now + msl_keys.rsa = RSA.importKey(msl_keys.rsa) + + @staticmethod + def generate_msg_header(message_id, sender, is_handshake, userauthdata=None, keyrequestdata=None, + compression="GZIP"): + """ + The MSL header carries all MSL data used for entity and user authentication, message encryption + and verification, and service tokens. Portions of the MSL header are encrypted. + https://github.com/Netflix/msl/wiki/Messages#header-data + + :param message_id: number against which payload chunks are bound to protect against replay. + :param sender: ESN + :param is_handshake: This flag is set true if the message is a handshake message and will not include any + payload chunks. It will include keyrequestdata. + :param userauthdata: UserAuthData + :param keyrequestdata: KeyRequestData + :param compression: Supported compression algorithms. + + :return: The base64 encoded JSON String of the header + """ + header_data = { + "messageid": message_id, + "renewable": True, # MUST be True if is_handshake + "handshake": is_handshake, + "capabilities": { + "compressionalgos": [compression] if compression else [], + "languages": ["en-US"], # bcp-47 + "encoderformats": ["JSON"] + }, + "timestamp": int(time.time()), + # undocumented or unused: + "sender": sender, + "nonreplayable": False, + "recipient": "Netflix", + } + if userauthdata: + header_data["userauthdata"] = userauthdata + if keyrequestdata: + header_data["keyrequestdata"] = [keyrequestdata] + return jsonpickle.encode(header_data, unpicklable=False) + + @classmethod + def get_widevine_key(cls, kid, keys, permissions): + for key in keys: + if key.kid != kid: + continue + if key.type != "OPERATOR_SESSION": + cls.log.warning(f"Widevine Key Exchange: Wrong key type (not operator session) key {key}") + continue + if not set(permissions) <= set(key.permissions): + cls.log.warning(f"Widevine Key Exchange: Incorrect permissions, key {key}, needed perms {permissions}") + continue + return key.key + return None + + def send_message(self, endpoint, params, application_data, userauthdata=None): + message = self.create_message(application_data, userauthdata) + res = self.session.post(url=endpoint, data=message, params=params) + header, payload_data = self.parse_message(res.text) + if "errordata" in header: + raise self.log.exit( + "- MSL response message contains an error: {}".format( + json.loads(base64.b64decode(header["errordata"].encode("utf-8")).decode("utf-8")) + ) + ) + return header, payload_data + + def create_message(self, application_data, userauthdata=None): + self.message_id += 1 # new message must ue a new message id + headerdata = self.encrypt(self.generate_msg_header( + message_id=self.message_id, + sender=self.sender, + is_handshake=False, + userauthdata=userauthdata + )) + + header = json.dumps({ + "headerdata": base64.b64encode(headerdata.encode("utf-8")).decode("utf-8"), + "signature": self.sign(headerdata).decode("utf-8"), + "mastertoken": self.keys.mastertoken + }) + + payload_chunks = [self.encrypt(json.dumps({ + "messageid": self.message_id, + "data": self.gzip_compress(json.dumps(application_data).encode("utf-8")).decode("utf-8"), + "compressionalgo": "GZIP", + "sequencenumber": 1, # todo ; use sequence_number from master token instead? + "endofmsg": True + }))] + + message = header + for payload_chunk in payload_chunks: + message += json.dumps({ + "payload": base64.b64encode(payload_chunk.encode("utf-8")).decode("utf-8"), + "signature": self.sign(payload_chunk).decode("utf-8") + }) + + return message + + def decrypt_payload_chunks(self, payload_chunks): + """ + Decrypt and extract data from payload chunks + + :param payload_chunks: List of payload chunks + :return: json object + """ + raw_data = "" + + for payload_chunk in payload_chunks: + # todo ; verify signature of payload_chunk["signature"] against payload_chunk["payload"] + # expecting base64-encoded json string + payload_chunk = json.loads(base64.b64decode(payload_chunk["payload"]).decode("utf-8")) + # decrypt the payload + payload_decrypted = AES.new( + key=self.keys.encryption, + mode=AES.MODE_CBC, + iv=base64.b64decode(payload_chunk["iv"]) + ).decrypt(base64.b64decode(payload_chunk["ciphertext"])) + payload_decrypted = Padding.unpad(payload_decrypted, 16) + payload_decrypted = json.loads(payload_decrypted.decode("utf-8")) + # decode and uncompress data if compressed + payload_data = base64.b64decode(payload_decrypted["data"]) + if payload_decrypted.get("compressionalgo") == "GZIP": + payload_data = zlib.decompress(payload_data, 16 + zlib.MAX_WBITS) + raw_data += payload_data.decode("utf-8") + + data = json.loads(raw_data) + if "error" in data: + error = data["error"] + error_display = error.get("display") + error_detail = re.sub(r" \(E3-[^)]+\)", "", error.get("detail", "")) + + if error_display: + self.log.critical(f"- {error_display}") + if error_detail: + self.log.critical(f"- {error_detail}") + + if not (error_display or error_detail): + self.log.critical(f"- {error}") + + # sys.exit(1) + + return data["result"] + + def parse_message(self, message): + """ + Parse an MSL message into a header and list of payload chunks + + :param message: MSL message + :returns: a 2-item tuple containing message and list of payload chunks if available + """ + parsed_message = json.loads("[{}]".format(message.replace("}{", "},{"))) + + header = parsed_message[0] + encrypted_payload_chunks = parsed_message[1:] if len(parsed_message) > 1 else [] + if encrypted_payload_chunks: + payload_chunks = self.decrypt_payload_chunks(encrypted_payload_chunks) + else: + payload_chunks = {} + + return header, payload_chunks + + @staticmethod + def gzip_compress(data): + out = BytesIO() + with gzip.GzipFile(fileobj=out, mode="w") as fd: + fd.write(data) + return base64.b64encode(out.getvalue()) + + @staticmethod + def base64key_decode(payload): + length = len(payload) % 4 + if length == 2: + payload += "==" + elif length == 3: + payload += "=" + elif length != 0: + raise ValueError("Invalid base64 string") + return base64.urlsafe_b64decode(payload.encode("utf-8")) + + def encrypt(self, plaintext): + """ + Encrypt the given Plaintext with the encryption key + :param plaintext: + :return: Serialized JSON String of the encryption Envelope + """ + iv = get_random_bytes(16) + return json.dumps({ + "ciphertext": base64.b64encode( + AES.new( + self.keys.encryption, + AES.MODE_CBC, + iv + ).encrypt( + Padding.pad(plaintext.encode("utf-8"), 16) + ) + ).decode("utf-8"), + "keyid": "{}_{}".format(self.sender, json.loads( + base64.b64decode(self.keys.mastertoken["tokendata"]).decode("utf-8") + )["sequencenumber"]), + "sha256": "AA==", + "iv": base64.b64encode(iv).decode("utf-8") + }) + + def sign(self, text): + """ + Calculates the HMAC signature for the given text with the current sign key and SHA256 + :param text: + :return: Base64 encoded signature + """ + return base64.b64encode(HMAC.new(self.keys.sign, text.encode("utf-8"), SHA256).digest()) diff --git a/unshackle/services/Netflix/MSL/schemes/EntityAuthentication.py b/unshackle/services/Netflix/MSL/schemes/EntityAuthentication.py new file mode 100644 index 0000000..8246e1a --- /dev/null +++ b/unshackle/services/Netflix/MSL/schemes/EntityAuthentication.py @@ -0,0 +1,59 @@ +from .. import EntityAuthenticationSchemes +from ..MSLObject import MSLObject + + +# noinspection PyPep8Naming +class EntityAuthentication(MSLObject): + def __init__(self, scheme, authdata): + """ + Data used to identify and authenticate the entity associated with a message. + https://github.com/Netflix/msl/wiki/Entity-Authentication-%28Configuration%29 + + :param scheme: Entity Authentication Scheme identifier + :param authdata: Entity Authentication data + """ + self.scheme = str(scheme) + self.authdata = authdata + + @classmethod + def Unauthenticated(cls, identity): + """ + The unauthenticated entity authentication scheme does not provide encryption or authentication and only + identifies the entity. Therefore entity identities can be harvested and spoofed. The benefit of this + authentication scheme is that the entity has control over its identity. This may be useful if the identity is + derived from or related to other data, or if retaining the identity is desired across state resets or in the + event of MSL errors requiring entity re-authentication. + """ + return cls( + scheme=EntityAuthenticationSchemes.Unauthenticated, + authdata={"identity": identity} + ) + + @classmethod + def Widevine(cls, devtype, keyrequest): + """ + The Widevine entity authentication scheme is used by devices with the Widevine CDM. It does not provide + encryption or authentication and only identifies the entity. Therefore entity identities can be harvested + and spoofed. The entity identity is composed from the provided device type and Widevine key request data. The + Widevine CDM properties can be extracted from the key request data. + + When coupled with the Widevine key exchange scheme, the entity identity can be cryptographically validated by + comparing the entity authentication key request data against the key exchange key request data. + + Note that the local entity will not know its entity identity when using this scheme. + + > Devtype + + An arbitrary value identifying the device type the local entity wishes to assume. The data inside the Widevine + key request may be optionally used to validate the claimed device type. + + :param devtype: Local entity device type + :param keyrequest: Widevine key request + """ + return cls( + scheme=EntityAuthenticationSchemes.Widevine, + authdata={ + "devtype": devtype, + "keyrequest": keyrequest + } + ) diff --git a/unshackle/services/Netflix/MSL/schemes/KeyExchangeRequest.py b/unshackle/services/Netflix/MSL/schemes/KeyExchangeRequest.py new file mode 100644 index 0000000..4c3dda5 --- /dev/null +++ b/unshackle/services/Netflix/MSL/schemes/KeyExchangeRequest.py @@ -0,0 +1,80 @@ +import base64 + +from .. import KeyExchangeSchemes +from ..MSLObject import MSLObject + + +# noinspection PyPep8Naming +class KeyExchangeRequest(MSLObject): + def __init__(self, scheme, keydata): + """ + Session key exchange data from a requesting entity. + https://github.com/Netflix/msl/wiki/Key-Exchange-%28Configuration%29 + + :param scheme: Key Exchange Scheme identifier + :param keydata: Key Request data + """ + self.scheme = str(scheme) + self.keydata = keydata + + @classmethod + def AsymmetricWrapped(cls, keypairid, mechanism, publickey): + """ + Asymmetric wrapped key exchange uses a generated ephemeral asymmetric key pair for key exchange. It will + typically be used when there is no other data or keys from which to base secure key exchange. + + This mechanism provides perfect forward secrecy but does not guarantee that session keys will only be available + to the requesting entity if the requesting MSL stack has been modified to perform the operation on behalf of a + third party. + + > Key Pair ID + + The key pair ID is included as a sanity check. + + > Mechanism & Public Key + + The following mechanisms are associated public key formats are currently supported. + + Field Public Key Format Description + RSA SPKI RSA-OAEP encrypt/decrypt + ECC SPKI ECIES encrypt/decrypt + JWEJS_RSA SPKI RSA-OAEP JSON Web Encryption JSON Serialization + JWE_RSA SPKI RSA-OAEP JSON Web Encryption Compact Serialization + JWK_RSA SPKI RSA-OAEP JSON Web Key + JWK_RSAES SPKI RSA PKCS#1 JSON Web Key + + :param keypairid: key pair ID + :param mechanism: asymmetric key type + :param publickey: public key + """ + return cls( + scheme=KeyExchangeSchemes.AsymmetricWrapped, + keydata={ + "keypairid": keypairid, + "mechanism": mechanism, + "publickey": base64.b64encode(publickey).decode("utf-8") + } + ) + + @classmethod + def Widevine(cls, keyrequest): + """ + Google Widevine provides a secure key exchange mechanism. When requested the Widevine component will issue a + one-time use key request. The Widevine server library can be used to authenticate the request and return + randomly generated symmetric keys in a protected key response bound to the request and Widevine client library. + The key response also specifies the key identities, types and their permitted usage. + + The Widevine key request also contains a model identifier and a unique device identifier with an expectation of + long-term persistence. These values are available from the Widevine client library and can be retrieved from + the key request by the Widevine server library. + + The Widevine client library will protect the returned keys from inspection or misuse. + + :param keyrequest: Base64-encoded Widevine CDM license challenge (PSSH: b'\x0A\x7A\x00\x6C\x38\x2B') + """ + if not isinstance(keyrequest, str): + keyrequest = base64.b64encode(keyrequest).decode() + return cls( + scheme=KeyExchangeSchemes.Widevine, + keydata={"keyrequest": keyrequest} + ) diff --git a/unshackle/services/Netflix/MSL/schemes/UserAuthentication.py b/unshackle/services/Netflix/MSL/schemes/UserAuthentication.py new file mode 100644 index 0000000..15dabb2 --- /dev/null +++ b/unshackle/services/Netflix/MSL/schemes/UserAuthentication.py @@ -0,0 +1,59 @@ +from ..MSLObject import MSLObject +from . import UserAuthenticationSchemes + + +# noinspection PyPep8Naming +class UserAuthentication(MSLObject): + def __init__(self, scheme, authdata): + """ + Data used to identify and authenticate the user associated with a message. + https://github.com/Netflix/msl/wiki/User-Authentication-%28Configuration%29 + + :param scheme: User Authentication Scheme identifier + :param authdata: User Authentication data + """ + self.scheme = str(scheme) + self.authdata = authdata + + @classmethod + def EmailPassword(cls, email, password): + """ + Email and password is a standard user authentication scheme in wide use. + + :param email: user email address + :param password: user password + """ + return cls( + scheme=UserAuthenticationSchemes.EmailPassword, + authdata={ + "email": email, + "password": password + } + ) + + @classmethod + def NetflixIDCookies(cls, netflixid, securenetflixid): + """ + Netflix ID HTTP cookies are used when the user has previously logged in to a web site. Possession of the + cookies serves as proof of user identity, in the same manner as they do when communicating with the web site. + + The Netflix ID cookie and Secure Netflix ID cookie are HTTP cookies issued by the Netflix web site after + subscriber login. The Netflix ID cookie is encrypted and identifies the subscriber and analogous to a + subscriber’s username. The Secure Netflix ID cookie is tied to a Netflix ID cookie and only sent over HTTPS + and analogous to a subscriber’s password. + + In some cases the Netflix ID and Secure Netflix ID cookies will be unavailable to the MSL stack or application. + If either or both of the Netflix ID or Secure Netflix ID cookies are absent in the above data structure the + HTTP cookie headers will be queried for it; this is only acceptable when HTTPS is used as the underlying + transport protocol. + + :param netflixid: Netflix ID cookie + :param securenetflixid: Secure Netflix ID cookie + """ + return cls( + scheme=UserAuthenticationSchemes.NetflixIDCookies, + authdata={ + "netflixid": netflixid, + "securenetflixid": securenetflixid + } + ) diff --git a/unshackle/services/Netflix/MSL/schemes/__init__.py b/unshackle/services/Netflix/MSL/schemes/__init__.py new file mode 100644 index 0000000..a1f61d7 --- /dev/null +++ b/unshackle/services/Netflix/MSL/schemes/__init__.py @@ -0,0 +1,24 @@ +from enum import Enum + + +class Scheme(Enum): + def __str__(self): + return str(self.value) + + +class EntityAuthenticationSchemes(Scheme): + """https://github.com/Netflix/msl/wiki/Entity-Authentication-%28Configuration%29""" + Unauthenticated = "NONE" + Widevine = "WIDEVINE" + + +class UserAuthenticationSchemes(Scheme): + """https://github.com/Netflix/msl/wiki/User-Authentication-%28Configuration%29""" + EmailPassword = "EMAIL_PASSWORD" + NetflixIDCookies = "NETFLIXID" + + +class KeyExchangeSchemes(Scheme): + """https://github.com/Netflix/msl/wiki/Key-Exchange-%28Configuration%29""" + AsymmetricWrapped = "ASYMMETRIC_WRAPPED" + Widevine = "WIDEVINE" diff --git a/unshackle/services/Netflix/__init__.py b/unshackle/services/Netflix/__init__.py new file mode 100644 index 0000000..c806141 --- /dev/null +++ b/unshackle/services/Netflix/__init__.py @@ -0,0 +1,978 @@ +import base64 +from datetime import datetime +import json +from math import e + +import random +import sys +import time +import typing +from uuid import UUID +import click +import re +from typing import List, Literal, Optional, Set, Union, Tuple +from http.cookiejar import CookieJar +from itertools import zip_longest +from Crypto.Random import get_random_bytes + +import jsonpickle +from pymp4.parser import Box +from pywidevine import PSSH, Cdm +import requests +from langcodes import Language + +from unshackle.core.constants import AnyTrack +from unshackle.core.credential import Credential +from unshackle.core.drm.widevine import Widevine +from unshackle.core.service import Service +from unshackle.core.titles import Titles_T, Title_T +from unshackle.core.titles.episode import Episode, Series +from unshackle.core.titles.movie import Movie, Movies +from unshackle.core.titles.title import Title +from unshackle.core.tracks import Tracks, Chapters +from unshackle.core.tracks.audio import Audio +from unshackle.core.tracks.chapter import Chapter +from unshackle.core.tracks.subtitle import Subtitle +from unshackle.core.tracks.track import Track +from unshackle.core.tracks.video import Video +from unshackle.core.utils.collections import flatten, as_list + +from unshackle.core.tracks.attachment import Attachment +from unshackle.core.drm.playready import PlayReady +from unshackle.core.titles.song import Song +from unshackle.utils.base62 import decode +from .MSL import MSL, KeyExchangeSchemes +from .MSL.schemes.UserAuthentication import UserAuthentication + +class Netflix(Service): + """ + Service for https://netflix.com + Version: 1.0.0 + + Authorization: Cookies + Security: UHD@SL3000/L1 FHD@SL3000/L1 + """ + TITLE_RE = [ + r"^(?:https?://(?:www\.)?netflix\.com(?:/[a-z0-9]{2})?/(?:title/|watch/|.+jbv=))?(?P\d+)", + r"^https?://(?:www\.)?unogs\.com/title/(?P\d+)", + ] + ALIASES= ("NF", "Netflix") + NF_LANG_MAP = { + "es": "es-419", + "pt": "pt-PT", + } + + @staticmethod + @click.command(name="Netflix", short_help="https://netflix.com") + @click.argument("title", type=str) + @click.option("-drm", "--drm-system", type=click.Choice(["widevine", "playready"], case_sensitive=False), + default="widevine", + help="which drm system to use") + @click.option("-p", "--profile", type=click.Choice(["MPL", "HPL", "QC", "MPL+HPL", "MPL+HPL+QC", "MPL+QC"], case_sensitive=False), + default=None, + help="H.264 profile to use. Default is best available.") + @click.option("--meta-lang", type=str, help="Language to use for metadata") + @click.option("-ht","--hydrate-track", is_flag=True, default=False, help="Hydrate missing audio and subtitle.") + @click.option("-hb", "--high-bitrate", is_flag=True, default=False, help="Get more video bitrate") + @click.pass_context + def cli(ctx, **kwargs): + return Netflix(ctx, **kwargs) + + def __init__(self, ctx: click.Context, title: str, drm_system: Literal["widevine", "playready"], profile: str, meta_lang: str, hydrate_track: bool, high_bitrate: bool): + super().__init__(ctx) + # General + self.title = title + self.profile = profile + self.meta_lang = meta_lang + self.hydrate_track = hydrate_track + self.drm_system = drm_system + self.profiles: List[str] = [] + self.requested_profiles: List[str] = [] + self.high_bitrate = high_bitrate + + # MSL + self.esn = self.cache.get("ESN") + self.msl: Optional[MSL] = None + self.userauthdata = None + + # Download options + self.range = ctx.parent.params.get("range_") or [Video.Range.SDR] + self.vcodec = ctx.parent.params.get("vcodec") or Video.Codec.AVC # Defaults to H264 + self.acodec : Audio.Codec = ctx.parent.params.get("acodec") or Audio.Codec.EC3 + self.quality: List[int] = ctx.parent.params.get("quality") + self.audio_only = ctx.parent.params.get("audio_only") + self.subs_only = ctx.parent.params.get("subs_only") + self.chapters_only = ctx.parent.params.get("chapters_only") + + + def authenticate(self, cookies: Optional[CookieJar] = None, credential: Optional[Credential] = None) -> None: + # Configure first before download + self.log.debug("Authenticating Netflix service") + auth = super().authenticate(cookies, credential) + if not cookies: + raise EnvironmentError("Service requires Cookies for Authentication.") + self.configure() + return auth + + def get_titles(self) -> Titles_T: + metadata = self.get_metadata(self.title) + if "video" not in metadata: + self.log.error(f"Failed to get metadata: {metadata}") + sys.exit(1) + titles: Titles_T | None = None + if metadata["video"]["type"] == "movie": + movie = Movie( + id_=self.title, + name=metadata["video"]["title"], + year=metadata["video"]["year"], + # language=self.get_original_language(self.get_manifest()), + service=self.__class__, + data=metadata["video"], + description=metadata["video"]["synopsis"] + ) + movie.language = self.get_original_language(self.get_manifest(movie, self.profiles)) + titles = Movies([ + movie + ]) + else: + # self.log.warning(f"Metadata: {jsonpickle.encode(metadata, indent=2)}") + # print(metadata) + episode_list: List[Episode] = [] + for season in metadata["video"]["seasons"]: + for episodes in season["episodes"]: + episode = Episode( + id_=self.title, + title=metadata["video"]["title"], + year=season["year"], + service=self.__class__, + season=season["seq"], + number=episodes["seq"], + name=episodes["title"], + data=episodes, + description=episodes["synopsis"], + ) + try: + episode.language = self.get_original_language(self.get_manifest(episode, self.profiles)) + self.log.debug(f"Episode S{episode.season:02d}E{episode.number:02d}: {episode.language}") + except Exception as e: + self.log.warning(f"Failed to get original language for episode S{season['seq']:02d}E{episodes['seq']:02d}: {e}") + # Fallback: try to get the original language from the first episode that worked + # or default to English if none worked + if episode_list and hasattr(episode_list[0], 'language') and episode_list[0].language: + episode.language = episode_list[0].language + else: + episode.language = Language.get("en") + self.log.info(f"Using fallback language for episode: {episode.language}") + + episode_list.append( + episode + ) + + + titles = Series(episode_list) + + + + return titles + + + + def get_tracks(self, title: Title_T) -> Tracks: + + tracks = Tracks() + + # If Video Codec is H.264 is selected but `self.profile is none` profile QC has to be requested seperately + if self.vcodec == Video.Codec.AVC: + # self.log.info(f"Profile: {self.profile}") + try: + manifest = self.get_manifest(title, self.profiles) + movie_track = self.manifest_as_tracks(manifest, title, self.hydrate_track) + tracks.add(movie_track) + + if self.profile is not None: + self.log.info(f"Requested profiles: {self.profile}") + else: + qc_720_profile = [x for x in self.config["profiles"]["video"][self.vcodec.extension.upper()]["QC"] if "l40" not in x and 720 in self.quality] + qc_manifest = self.get_manifest(title, qc_720_profile if 720 in self.quality else self.config["profiles"]["video"][self.vcodec.extension.upper()]["QC"]) + qc_tracks = self.manifest_as_tracks(qc_manifest, title, False) + tracks.add(qc_tracks.videos) + + mpl_manifest = self.get_manifest(title, [x for x in self.config["profiles"]["video"][self.vcodec.extension.upper()]["MPL"] if "l40" not in x]) + mpl_tracks = self.manifest_as_tracks(mpl_manifest, title, False) + tracks.add(mpl_tracks.videos) + except Exception as e: + self.log.error(e) + else: + if self.high_bitrate: + splitted_profiles = self.split_profiles(self.profiles) + for index, profile_list in enumerate(splitted_profiles): + try: + self.log.debug(f"Index: {index}. Getting profiles: {profile_list}") + manifest = self.get_manifest(title, profile_list) + manifest_tracks = self.manifest_as_tracks(manifest, title, self.hydrate_track if index == 0 else False) + tracks.add(manifest_tracks if index == 0 else manifest_tracks.videos) + except Exception: + self.log.error(f"Error getting profile: {profile_list}. Skipping") + continue + else: + try: + manifest = self.get_manifest(title, self.profiles) + manifest_tracks = self.manifest_as_tracks(manifest, title, self.hydrate_track) + tracks.add(manifest_tracks) + except Exception as e: + self.log.error(e) + + + + # Add Attachments for profile picture + if isinstance(title, Movie): + if title.data and "boxart" in title.data and title.data["boxart"]: + tracks.add( + Attachment.from_url( + url=title.data["boxart"][0]["url"] + ) + ) + else: + if title.data and "stills" in title.data and title.data["stills"]: + tracks.add( + Attachment.from_url(title.data["stills"][0]["url"]) + ) + + return tracks + + def split_profiles(self, profiles: List[str]) -> List[List[str]]: + """ + Split profiles with names containing specific patterns based on video codec + For H264: uses patterns "l30", "l31", "l40" (lowercase) + For non-H264: uses patterns "L30", "L31", "L40", "L41", "L50", "L51" (uppercase) + Returns List[List[str]] type with profiles grouped by pattern + """ + # Define the profile patterns to match based on video codec + if self.vcodec == Video.Codec.AVC: # H264 + patterns = ["l30", "l31", "l40"] + else: + patterns = ["L30", "L31", "L40", "L41", "L50", "L51"] + + # Group profiles by pattern + result: List[List[str]] = [] + for pattern in patterns: + pattern_group = [] + for profile in profiles: + if pattern in profile: + pattern_group.append(profile) + if pattern_group: # Only add non-empty groups + result.append(pattern_group) + + return result + + + def get_chapters(self, title: Title_T) -> Chapters: + chapters: Chapters = Chapters() + + if not title.data: + return chapters + + try: + # self.log.info(f"Title data: {title.data}") + if "skipMarkers" in title.data and "credit" in title.data["skipMarkers"]: + credits = title.data["skipMarkers"]["credit"] + if credits.get("start", 0) > 0 and credits.get("end", 0) > 0: + chapters.add(Chapter( + timestamp=credits["start"], # Milliseconds + name="Intro" + )) + chapters.add( + Chapter( + timestamp=credits["end"], # Milliseconds + ) + ) + + if "creditsOffset" in title.data and title.data["creditsOffset"] is not None: + chapters.add(Chapter( + timestamp=float(title.data["creditsOffset"]), # this is seconds, needed to assign to float + name="Credits" + )) + except Exception as e: + self.log.warning(f"Failed to process chapters: {e}") + + return chapters + + def get_widevine_license(self, *, challenge: bytes, title: Movie | Episode | Song, track: AnyTrack) -> bytes | str | None: + if not self.msl: + self.log.error(f"MSL Client is not intialized!") + sys.exit(1) + application_data = { + "version": 2, + "url": track.data["license_url"], + "id": int(time.time() * 10000), + "esn": self.esn.data, + "languages": ["en-US"], + # "uiVersion": "shakti-v9dddfde5", + "clientVersion": "6.0026.291.011", + "params": [{ + "sessionId": base64.b64encode(get_random_bytes(16)).decode("utf-8"), + "clientTime": int(time.time()), + "challengeBase64": base64.b64encode(challenge).decode("utf-8"), + "xid": str(int((int(time.time()) + 0.1612) * 1000)), + }], + "echo": "sessionId" + } + header, payload_data = self.msl.send_message( + endpoint=self.config["endpoints"]["license"], + params={ + "reqAttempt": 1, + "reqName": "license", + }, + application_data=application_data, + userauthdata=self.userauthdata + ) + if not payload_data: + self.log.error(f" - Failed to get license: {header['message']} [{header['code']}]") + sys.exit(1) + if "error" in payload_data[0]: + error = payload_data[0]["error"] + error_display = error.get("display") + error_detail = re.sub(r" \(E3-[^)]+\)", "", error.get("detail", "")) + + if error_display: + self.log.critical(f" - {error_display}") + if error_detail: + self.log.critical(f" - {error_detail}") + + if not (error_display or error_detail): + self.log.critical(f" - {error}") + + sys.exit(1) + return payload_data[0]["licenseResponseBase64"] + + def get_playready_license(self, *, challenge: bytes, title: Movie | Episode | Song, track: AnyTrack) -> bytes | str | None: + return None + # return super().get_widevine_license(challenge=challenge, title=title, track=track) + + def configure(self): + # self.log.info(ctx) + # if profile is none from argument let's use them all profile in video codec scope + # self.log.info(f"Requested profiles: {self.profile}") + if self.profile is None: + self.profiles = self.config["profiles"]["video"][self.vcodec.extension.upper()] + + + if self.profile is not None: + self.requested_profiles = self.profile.split('+') + self.log.info(f"Requested profile: {self.requested_profiles}") + else: + # self.log.info(f"Video Range: {self.range}") + self.requested_profiles = self.config["profiles"]["video"][self.vcodec.extension.upper()] + # Make sure video codec is supported by Netflix + if self.vcodec.extension.upper() not in self.config["profiles"]["video"]: + raise ValueError(f"Video Codec {self.vcodec} is not supported by Netflix") + + if self.range[0].name not in list(self.config["profiles"]["video"][self.vcodec.extension.upper()].keys()) and self.vcodec != Video.Codec.AVC and self.vcodec != Video.Codec.VP9: + self.log.error(f"Video range {self.range[0].name} is not supported by Video Codec: {self.vcodec}") + sys.exit(1) + + if len(self.range) > 1: + self.log.error(f"Multiple video range is not supported right now.") + sys.exit(1) + + if self.vcodec == Video.Codec.AVC and self.range[0] != Video.Range.SDR: + self.log.error(f"H.264 Video Codec only supports SDR") + sys.exit(1) + + self.profiles = self.get_profiles() + self.log.info("Intializing a MSL client") + self.get_esn() + scheme = KeyExchangeSchemes.AsymmetricWrapped + self.log.info(f"Scheme: {scheme}") + + + self.msl = MSL.handshake( + scheme=scheme, + session=self.session, + endpoint=self.config["endpoints"]["manifest"], + sender=self.esn.data, + cache=self.cache.get("MSL") + ) + cookie = self.session.cookies.get_dict() + self.userauthdata = UserAuthentication.NetflixIDCookies( + netflixid=cookie["NetflixId"], + securenetflixid=cookie["SecureNetflixId"] + ) + + + def get_profiles(self): + result_profiles = [] + + if self.vcodec == Video.Codec.AVC: + if self.requested_profiles is not None: + for requested_profiles in self.requested_profiles: + result_profiles.extend(flatten(list(self.config["profiles"]["video"][self.vcodec.extension.upper()][requested_profiles]))) + return result_profiles + + result_profiles.extend(flatten(list(self.config["profiles"]["video"][self.vcodec.extension.upper()].values()))) + return result_profiles + + # Handle case for codec VP9 + if self.vcodec == Video.Codec.VP9 and self.range[0] != Video.Range.HDR10: + result_profiles.extend(self.config["profiles"]["video"][self.vcodec.extension.upper()].values()) + return result_profiles + for profiles in self.config["profiles"]["video"][self.vcodec.extension.upper()]: + for range in self.range: + if range in profiles: + result_profiles.extend(self.config["profiles"]["video"][self.vcodec.extension.upper()][range.name]) + # sys.exit(1) + self.log.debug(f"Result_profiles: {result_profiles}") + return result_profiles + + def get_esn(self): + ESN_GEN = "".join(random.choice("0123456789ABCDEF") for _ in range(30)) + esn_value = f"NFCDIE-03-{ESN_GEN}" + # Check if ESN is expired or doesn't exist + if self.esn.data is None or self.esn.data == {} or (hasattr(self.esn, 'expired') and self.esn.expired): + # Set new ESN with 6-hour expiration + self.esn.set(esn_value, 1 * 60 * 60) # 6 hours in seconds + self.log.info(f"Generated new ESN with 1-hour expiration") + else: + self.log.info(f"Using cached ESN.") + self.log.info(f"ESN: {self.esn.data}") + + + def get_metadata(self, title_id: str): + """ + Obtain Metadata information about a title by it's ID. + :param title_id: Title's ID. + :returns: Title Metadata. + """ + + try: + metadata = self.session.get( + self.config["endpoints"]["metadata"].format(build_id="release"), + params={ + "movieid": title_id, + "drmSystem": self.config["configuration"]["drm_system"], + "isWatchlistEnabled": False, + "isShortformEnabled": False, + "languages": self.meta_lang + } + ).json() + except requests.HTTPError as e: + if e.response.status_code == 500: + self.log.warning( + " - Recieved a HTTP 500 error while getting metadata, deleting cached reactContext data" + ) + # self.cache. + # os.unlink(self.get_cache("web_data.json")) + # return self.get_metadata(self, title_id) + raise Exception(f"Error getting metadata: {e}") + except json.JSONDecodeError: + self.log.error(" - Failed to get metadata, title might not be available in your region.") + sys.exit(1) + else: + if "status" in metadata and metadata["status"] == "error": + self.log.error( + f" - Failed to get metadata, cookies might be expired. ({metadata['message']})" + ) + sys.exit(1) + return metadata + + def _get_empty_manifest(self): + """Return an empty manifest structure to prevent crashes when manifest retrieval fails""" + return { + "video_tracks": [{ + "streams": [], + "drmHeader": {"bytes": b""} + }], + "audio_tracks": [], + "timedtexttracks": [], + "links": { + "license": {"href": ""} + } + } + + def get_manifest(self, title: Title_T, video_profiles: List[str], required_text_track_id: Optional[str] = None, required_audio_track_id: Optional[str] = None): + try: + # Log context information for debugging + title_id = title.data.get('episodeId', title.data.get('id', 'unknown')) if title.data else 'unknown' + self.log.debug(f"Getting manifest for title_id: {title_id}, video_profiles_count: {len(video_profiles)}, required_audio_track_id: {required_audio_track_id}, required_text_track_id: {required_text_track_id}") + + audio_profiles = self.config["profiles"]["audio"].values() + video_profiles = sorted(set(flatten(as_list( + video_profiles, + audio_profiles, + self.config["profiles"]["video"]["H264"]["BPL"] if self.vcodec == Video.Codec.AVC else [], + self.config["profiles"]["subtitles"], + )))) + + + + self.log.debug("Profiles:\n\t" + "\n\t".join(video_profiles)) + + if not self.msl: + self.log.error(f"MSL Client is not initialized for title_id: {title_id}") + return self._get_empty_manifest() + + params = { + "reqAttempt": 1, + "reqPriority": 10, + "reqName": "manifest", + } + _, payload_chunks = self.msl.send_message( + endpoint=self.config["endpoints"]["manifest"], + params=params, + application_data={ + "version": 2, + "url": "manifest", + "id": int(time.time()), + "esn": self.esn.data, + "languages": ["en-US"], + "clientVersion": "6.0026.291.011", + "params": { + "clientVersion": "6.0051.090.911", + "challenge": self.config["payload_challenge_pr"] if self.drm_system == 'playready' else self.config["payload_challenge"], + "challanges": { + "default": self.config["payload_challenge_pr"] if self.drm_system == 'playready' else self.config["payload_challenge"] + }, + "contentPlaygraph": ["v2"], + "deviceSecurityLevel": "3000", + "drmVersion": 25, + "desiredVmaf": "plus_lts", + "desiredSegmentVmaf": "plus_lts", + "flavor": "STANDARD", # ? PRE_FETCH, SUPPLEMENTAL + "drmType": self.drm_system, + "imageSubtitleHeight": 1080, + "isBranching": False, + "isNonMember": False, + "isUIAutoPlay": False, + "licenseType": "standard", + "liveAdsCapability": "replace", + "liveMetadataFormat": "INDEXED_SEGMENT_TEMPLATE", + "manifestVersion": "v2", + "osName": "windows", + "osVersion": "10.0", + "platform": "138.0.0.0", + "profilesGroups": [{ + "name": "default", + "profiles": video_profiles + }], + "profiles": video_profiles, + "preferAssistiveAudio": False, + "requestSegmentVmaf": False, + "requiredAudioTrackId": required_audio_track_id, # This is for getting missing audio tracks (value get from `new_track_id``) + "requiredTextTrackId": required_text_track_id, # This is for getting missing subtitle. (value get from `new_track_id``) + "supportsAdBreakHydration": False, + "supportsNetflixMediaEvents": True, + "supportsPartialHydration": True, # This is important if you want get available all tracks. but you must fetch each missing url tracks with "requiredAudioTracksId" or "requiredTextTrackId" + "supportsPreReleasePin": True, + "supportsUnequalizedDownloadables": True, + "supportsWatermark": True, + "titleSpecificData": { + (title.data.get("episodeId") if title.data else None) or (title.data.get("id") if title.data else "unknown"): {"unletterboxed": False} + }, + "type": "standard", # ? PREPARE + "uiPlatform": "SHAKTI", + "uiVersion": "shakti-v49577320", + "useBetterTextUrls": True, + "useHttpsStreams": True, + "usePsshBox": True, + "videoOutputInfo": [{ + # todo ; make this return valid, but "secure" values, maybe it helps + "type": "DigitalVideoOutputDescriptor", + "outputType": "unknown", + "supportedHdcpVersions": self.config["configuration"]["supported_hdcp_versions"], + "isHdcpEngaged": self.config["configuration"]["is_hdcp_engaged"] + }], + "viewableId": (title.data.get("episodeId") if title.data else None) or (title.data.get("id") if title.data else "unknown"), + "xid": str(int((int(time.time()) + 0.1612) * 1000)), + "showAllSubDubTracks": True, + } + }, + userauthdata=self.userauthdata + ) + if "errorDetails" in payload_chunks: + self.log.error(f"Manifest call failed for title_id: {title_id}, required_audio_track_id: {required_audio_track_id}, required_text_track_id: {required_text_track_id}, error: {payload_chunks['errorDetails']}") + return self._get_empty_manifest() + # with open(f"./manifest_{"+".join(video_profiles)}.json", mode='w') as r: + # r.write(jsonpickle.encode(payload_chunks, indent=4)) + return payload_chunks + except Exception as e: + title_id = title.data.get('episodeId', title.data.get('id', 'unknown')) if title.data else 'unknown' + profile_count = len(video_profiles) if 'video_profiles' in locals() else 0 + self.log.error(f"Exception in get_manifest: {e}") + self.log.error(f"Context - title_id: {title_id}, video_profiles_count: {profile_count}, required_audio_track_id: {required_audio_track_id or 'None'}, required_text_track_id: {required_text_track_id or 'None'}") + if 'video_profiles' in locals() and video_profiles: + self.log.error(f"Video profiles being processed: {', '.join(video_profiles[:5])}{'...' if len(video_profiles) > 5 else ''}") + return self._get_empty_manifest() + + @staticmethod + def get_original_language(manifest) -> Language: + try: + # First, try to find the original language from audio tracks + if "audio_tracks" in manifest and manifest["audio_tracks"]: + for language in manifest["audio_tracks"]: + if "languageDescription" in language and language["languageDescription"].endswith(" [Original]"): + return Language.get(language["language"]) + + # Fallback 1: Try to parse from defaultTrackOrderList + if "defaultTrackOrderList" in manifest and manifest["defaultTrackOrderList"]: + try: + media_id = manifest["defaultTrackOrderList"][0]["mediaId"] + lang_code = media_id.split(";")[2] + if lang_code: + return Language.get(lang_code) + except (IndexError, KeyError, AttributeError): + pass + + # Fallback 2: Try to get the first available audio track language + if "audio_tracks" in manifest and manifest["audio_tracks"]: + for audio_track in manifest["audio_tracks"]: + if "language" in audio_track and audio_track["language"]: + return Language.get(audio_track["language"]) + + # Fallback 3: Default to English if all else fails + return Language.get("en") + + except Exception as e: + # If anything goes wrong, default to English + return Language.get("en") + + def get_widevine_service_certificate(self, *, challenge: bytes, title: Movie | Episode | Song, track: AnyTrack) -> bytes | str: + return self.config["certificate"] + + def manifest_as_tracks(self, manifest, title: Title_T, hydrate_tracks = False) -> Tracks: + + tracks = Tracks() + + try: + # Handle empty or invalid manifest + if not manifest or not isinstance(manifest, dict): + self.log.warning("Empty or invalid manifest received, returning empty tracks") + return tracks + + # Check if manifest has required structure + if "video_tracks" not in manifest or not manifest["video_tracks"]: + self.log.warning("No video tracks in manifest, returning empty tracks") + return tracks + + if "links" not in manifest or "license" not in manifest["links"]: + self.log.warning("No license URL in manifest, cannot process tracks") + return tracks + + original_language = self.get_original_language(manifest) + self.log.debug(f"Original language: {original_language}") + license_url = manifest["links"]["license"]["href"] + + # Process video tracks + if "streams" in manifest["video_tracks"][0] and manifest["video_tracks"][0]["streams"]: + # self.log.info(f"Video: {jsonpickle.encode(manifest["video_tracks"], indent=2)}") + # self.log.info() + for video_index, video in enumerate(reversed(manifest["video_tracks"][0]["streams"])): + try: + # self.log.info(video) + id = video["downloadable_id"] + # self.log.info(f"Adding video {video["res_w"]}x{video["res_h"]}, bitrate: {(float(video["framerate_value"]) / video["framerate_scale"]) if "framerate_value" in video else None} with profile {video["content_profile"]}. kid: {video["drmHeaderId"]}") + tracks.add( + Video( + id_=video["downloadable_id"], + url=video["urls"][0]["url"], + codec=Video.Codec.from_netflix_profile(video["content_profile"]), + bitrate=video["bitrate"] * 1000, + width=video["res_w"], + height=video["res_h"], + fps=(float(video["framerate_value"]) / video["framerate_scale"]) if "framerate_value" in video else None, + language=Language.get(original_language), + edition=video["content_profile"], + range_=self.parse_video_range_from_profile(video["content_profile"]), + is_original_lang=True, + drm=[Widevine( + pssh=PSSH( + manifest["video_tracks"][0]["drmHeader"]["bytes"] + ), + kid=video["drmHeaderId"] + )] if manifest["video_tracks"][0].get("drmHeader", {}).get("bytes") else [], + data={ + 'license_url': license_url + } + ) + ) + except Exception as e: + video_id = video.get("downloadable_id", "unknown") if isinstance(video, dict) else "unknown" + self.log.warning(f"Failed to process video track at index {video_index}, video_id: {video_id}, error: {e}") + continue + + # Process audio tracks + unavailable_audio_tracks: List[Tuple[str, str]] = [] + if "audio_tracks" in manifest: + for audio_index, audio in enumerate(manifest["audio_tracks"]): + try: + audio_id = audio.get("id", "unknown") + audio_lang = audio.get("language", "unknown") + if len(audio.get("streams", [])) < 1: + # This + # self.log.debug(f"Audio lang {audio["languageDescription"]} is available but no stream available.") + if "new_track_id" in audio and "id" in audio: + unavailable_audio_tracks.append((audio["new_track_id"], audio["id"])) # Assign to `unavailable_subtitle` for request missing audio tracks later + self.log.debug(f"Audio track at index {audio_index}, audio_id: {audio_id}, language: {audio_lang} has no streams available") + continue + # self.log.debug(f"Adding audio lang: {audio["language"]} with profile: {audio["content_profile"]}") + is_original_lang = audio.get("language") == original_language.language + # self.log.info(f"is audio {audio["languageDescription"]} original language: {is_original_lang}") + for stream_index, stream in enumerate(audio["streams"]): + try: + stream_id = stream.get("downloadable_id", "unknown") + tracks.add( + Audio( + id_=stream["downloadable_id"], + url=stream["urls"][0]["url"], + codec=Audio.Codec.from_netflix_profile(stream["content_profile"]), + language=Language.get(self.NF_LANG_MAP.get(audio["language"]) or audio["language"]), + is_original_lang=is_original_lang, + bitrate=stream["bitrate"] * 1000, + channels=stream["channels"], + descriptive=audio.get("rawTrackType", "").lower() == "assistive", + name="[Original]" if Language.get(audio["language"]).language == original_language.language else None, + joc=6 if "atmos" in stream["content_profile"] else None + ) + ) + except Exception as e: + stream_id = stream.get("downloadable_id", "unknown") if isinstance(stream, dict) else "unknown" + self.log.warning(f"Failed to process audio stream at audio_index {audio_index}, stream_index {stream_index}, audio_id: {audio_id}, stream_id: {stream_id}, language: {audio_lang}, error: {e}") + continue + except Exception as e: + audio_id = audio.get("id", "unknown") if isinstance(audio, dict) else "unknown" + audio_lang = audio.get("language", "unknown") if isinstance(audio, dict) else "unknown" + self.log.warning(f"Failed to process audio track at index {audio_index}, audio_id: {audio_id}, language: {audio_lang}, error: {e}") + continue + + + + + # Process subtitle tracks + unavailable_subtitle: List[Tuple[str, str]] = [] + if "timedtexttracks" in manifest: + for subtitle_index, subtitle in enumerate(manifest["timedtexttracks"]): + try: + subtitle_id = subtitle.get("id", "unknown") + subtitle_lang = subtitle.get("language", "unknown") + if "isNoneTrack" in subtitle and subtitle["isNoneTrack"] == True: + continue + if subtitle.get("hydrated") == False: + # This subtitles is there but has to request stream first + if "new_track_id" in subtitle and "id" in subtitle: + unavailable_subtitle.append((subtitle["new_track_id"], subtitle["id"])) # Assign to `unavailable_subtitle` for request missing subtitles later + # self.log.debug(f"Audio language: {subtitle["languageDescription"]} id: {subtitle["new_track_id"]} is not hydrated.") + self.log.debug(f"Subtitle track at index {subtitle_index}, subtitle_id: {subtitle_id}, language: {subtitle_lang} is not hydrated") + continue + + if subtitle.get("languageDescription") == 'Off': + # I don't why this subtitles is requested, i consider for skip these subtitles for now + continue + # pass + + if "downloadableIds" not in subtitle or not subtitle["downloadableIds"]: + self.log.debug(f"Subtitle track at index {subtitle_index}, subtitle_id: {subtitle_id}, language: {subtitle_lang} has no downloadableIds") + continue + + id = list(subtitle["downloadableIds"].values()) + if not id: + self.log.debug(f"Subtitle track at index {subtitle_index}, subtitle_id: {subtitle_id}, language: {subtitle_lang} has empty downloadableIds") + continue + + language = Language.get(subtitle["language"]) + + if "ttDownloadables" not in subtitle or not subtitle["ttDownloadables"]: + self.log.debug(f"Subtitle track at index {subtitle_index}, subtitle_id: {subtitle_id}, language: {subtitle_lang} has no ttDownloadables") + continue + + profile = next(iter(subtitle["ttDownloadables"].keys())) + tt_downloadables = next(iter(subtitle["ttDownloadables"].values())) + is_original_lang = subtitle.get("language") == original_language.language + # self.log.info(f"is subtitle {subtitle["languageDescription"]} original language {is_original_lang}") + # self.log.info(f"ddd") + tracks.add( + Subtitle( + id_=id[0], + url=tt_downloadables["urls"][0]["url"], + codec=Subtitle.Codec.from_netflix_profile(profile), + language=language, + forced=subtitle.get("isForcedNarrative", False), + cc=subtitle.get("rawTrackType") == "closedcaptions", + sdh=subtitle.get("trackVariant") == 'STRIPPED_SDH' if "trackVariant" in subtitle else False, + is_original_lang=is_original_lang, + name=("[Original]" if language.language == original_language.language else None or "[Dubbing]" if "trackVariant" in subtitle and subtitle["trackVariant"] == "DUBTITLE" else None), + ) + ) + except Exception as e: + subtitle_id = subtitle.get("id", "unknown") if isinstance(subtitle, dict) else "unknown" + subtitle_lang = subtitle.get("language", "unknown") if isinstance(subtitle, dict) else "unknown" + self.log.warning(f"Failed to process subtitle track at index {subtitle_index}, subtitle_id: {subtitle_id}, language: {subtitle_lang}, error: {e}") + continue + + if hydrate_tracks == False: + return tracks + + # Hydrate missing tracks + self.log.info(f"Getting all missing audio and subtitle tracks") + + # Handle mismatched lengths - use last successful subtitle track when needed + last_successful_subtitle = ("N/A", "N/A") if not unavailable_subtitle else unavailable_subtitle[-1] + + # Process audio tracks first, then handle subtitles separately if needed + max_length = max(len(unavailable_audio_tracks), len(unavailable_subtitle)) + + for hydration_index in range(max_length): + # Get audio track info for this index + audio_hydration = unavailable_audio_tracks[hydration_index] if hydration_index < len(unavailable_audio_tracks) else ("N/A", "N/A") + + # Get subtitle track info for this index, or use last successful one if available + if hydration_index < len(unavailable_subtitle): + subtitle_hydration = unavailable_subtitle[hydration_index] + is_real_subtitle_request = True # This is a real subtitle to be added to tracks + elif unavailable_subtitle: # Use last successful subtitle track for context only + subtitle_hydration = last_successful_subtitle + is_real_subtitle_request = False # This is just for context, don't add to tracks + else: + subtitle_hydration = ("N/A", "N/A") + is_real_subtitle_request = False + + try: + # Log what we're trying to hydrate + self.log.debug(f"Hydrating tracks at index {hydration_index}, audio_track_id: {audio_hydration[1] if audio_hydration[1] != 'N/A' else 'N/A'}, subtitle_track_id: {subtitle_hydration[1] if subtitle_hydration[1] != 'N/A' else 'N/A'}, is_real_subtitle: {is_real_subtitle_request}") + + # Only call get_manifest if we have audio to hydrate + should_hydrate_audio = audio_hydration[0] != 'N/A' and audio_hydration[1] != 'N/A' + + if not should_hydrate_audio: + self.log.debug(f"Skipping hydration at index {hydration_index} - no audio tracks to hydrate") + continue + + # Always use a valid subtitle track ID for the manifest request to avoid API errors + # Use the subtitle track (real or reused) if available, otherwise use N/A + subtitle_track_for_request = subtitle_hydration[0] if subtitle_hydration[0] != 'N/A' else None + + # If we still don't have a subtitle track ID, skip this hydration to avoid API error + if subtitle_track_for_request is None: + self.log.warning(f"Skipping hydration at index {hydration_index} - no subtitle track available for API request context") + continue + + hydrated_manifest = self.get_manifest(title, self.profiles, subtitle_track_for_request, audio_hydration[0]) + + # Handle hydrated audio tracks + if should_hydrate_audio and "audio_tracks" in hydrated_manifest: + try: + audios = next((item for item in hydrated_manifest["audio_tracks"] if 'id' in item and item["id"] == audio_hydration[1]), None) + if audios and "streams" in audios: + audio_lang = audios.get("language", "unknown") + self.log.debug(f"Processing hydrated audio track_id: {audio_hydration[1]}, language: {audio_lang}, streams_count: {len(audios['streams'])}") + for stream_index, stream in enumerate(audios["streams"]): + try: + stream_id = stream.get("downloadable_id", "unknown") + tracks.add( + Audio( + id_=stream["downloadable_id"], + url=stream["urls"][0]["url"], + codec=Audio.Codec.from_netflix_profile(stream["content_profile"]), + language=Language.get(self.NF_LANG_MAP.get(audios["language"]) or audios["language"]), + is_original_lang=audios["language"] == original_language.language, + bitrate=stream["bitrate"] * 1000, + channels=stream["channels"], + descriptive=audios.get("rawTrackType", "").lower() == "assistive", + name="[Original]" if Language.get(audios["language"]).language == original_language.language else None, + joc=16 if "atmos" in stream["content_profile"] else None + ) + ) + except Exception as e: + stream_id = stream.get("downloadable_id", "unknown") if isinstance(stream, dict) else "unknown" + self.log.warning(f"Failed to process hydrated audio stream at hydration_index {hydration_index}, stream_index {stream_index}, audio_track_id: {audio_hydration[1]}, stream_id: {stream_id}, error: {e}") + continue + else: + self.log.warning(f"No audio streams found for hydrated audio_track_id: {audio_hydration[1]} at hydration_index {hydration_index}") + except Exception as e: + self.log.warning(f"Failed to find hydrated audio track at hydration_index {hydration_index}, audio_track_id: {audio_hydration[1]}, error: {e}") + + # Handle hydrated subtitle tracks (only if it's a real subtitle request, not reused) + if is_real_subtitle_request and subtitle_hydration[0] != 'N/A' and subtitle_hydration[1] != 'N/A' and "timedtexttracks" in hydrated_manifest: + try: + subtitles = next((item for item in hydrated_manifest["timedtexttracks"] if 'id' in item and item["id"] == subtitle_hydration[1]), None) + if subtitles and "downloadableIds" in subtitles and "ttDownloadables" in subtitles: + subtitle_lang = subtitles.get("language", "unknown") + self.log.debug(f"Processing hydrated subtitle track_id: {subtitle_hydration[1]}, language: {subtitle_lang}") + # self.log.info(jsonpickle.encode(subtitles, indent=2)) + # sel + + id = list(subtitles["downloadableIds"].values()) + if id: + language = Language.get(subtitles["language"]) + profile = next(iter(subtitles["ttDownloadables"].keys())) + tt_downloadables = next(iter(subtitles["ttDownloadables"].values())) + tracks.add( + Subtitle( + id_=id[0], + url=tt_downloadables["urls"][0]["url"], + codec=Subtitle.Codec.from_netflix_profile(profile), + language=language, + forced=subtitles.get("isForcedNarrative", False), + cc=subtitles.get("rawTrackType") == "closedcaptions", + sdh=subtitles.get("trackVariant") == 'STRIPPED_SDH' if "trackVariant" in subtitles else False, + is_original_lang=subtitles.get("language") == original_language.language, + name=("[Original]" if language.language == original_language.language else None or "[Dubbing]" if "trackVariant" in subtitles and subtitles["trackVariant"] == "DUBTITLE" else None), + ) + ) + else: + self.log.warning(f"No downloadable IDs found for hydrated subtitle_track_id: {subtitle_hydration[1]} at hydration_index {hydration_index}") + else: + self.log.warning(f"No subtitle data found for hydrated subtitle_track_id: {subtitle_hydration[1]} at hydration_index {hydration_index}") + except Exception as e: + self.log.warning(f"Failed to process hydrated subtitle track at hydration_index {hydration_index}, subtitle_track_id: {subtitle_hydration[1]}, error: {e}") + elif not is_real_subtitle_request and subtitle_hydration[1] != 'N/A': + self.log.debug(f"Used subtitle track context for API request at hydration_index {hydration_index}, subtitle_track_id: {subtitle_hydration[1]} (not adding to tracks)") + + except Exception as e: + self.log.warning(f"Failed to hydrate tracks at hydration_index {hydration_index}, audio_track_id: {audio_hydration[1] if audio_hydration[1] != 'N/A' else 'N/A'}, subtitle_track_id: {subtitle_hydration[1] if subtitle_hydration[1] != 'N/A' else 'N/A'}, error: {e}") + continue + + except Exception as e: + self.log.error(f"Exception in manifest_as_tracks: {e}") + self.log.debug(f"Failed to process manifest for title: {title.data.get('episodeId', title.data.get('id', 'unknown')) if title.data else 'unknown'}") + # Return empty tracks on any critical error + + return tracks + + + def parse_video_range_from_profile(self, profile: str) -> Video.Range: + """ + Parse the video range from a Netflix profile string. + + Args: + profile (str): The Netflix profile string (e.g., "hevc-main10-L30-dash-cenc") + + Returns: + Video.Range: The corresponding Video.Range enum value + + Examples: + >>> parse_video_range_from_profile("hevc-main10-L30-dash-cenc") + + >>> parse_video_range_from_profile("hevc-dv5-main10-L30-dash-cenc") + + """ + + # Get video profiles from config + video_profiles = self.config.get("profiles", {}).get("video", {}) + + # Search through all codecs and ranges to find the profile + for codec, ranges in video_profiles.items(): + # if codec == 'H264': + # return Video.Range.SDR # for H264 video always return SDR + for range_name, profiles in ranges.items(): + # self.log.info(f"Checking range {range_name}") + if profile in profiles: + # Return the corresponding Video.Range enum value + try: + # self.log.info(f"Found {range_name}") + return Video.Range(range_name) + except ValueError: + # If range_name is not a valid Video.Range, return SDR as default + self.log.debug(f"Video range is not valid {range_name}") + return Video.Range.SDR + + # If profile not found, return SDR as default + return Video.Range.SDR \ No newline at end of file diff --git a/unshackle/services/Netflix/config.yaml b/unshackle/services/Netflix/config.yaml new file mode 100644 index 0000000..b988fe1 --- /dev/null +++ b/unshackle/services/Netflix/config.yaml @@ -0,0 +1,216 @@ +certificate: | + CAUSwwUKvQIIAxIQ5US6QAvBDzfTtjb4tU/7QxiH8c+TBSKOAjCCAQoCggEBAObzvlu2hZRsapAPx4Aa4GUZj4/GjxgXUtBH4THSkM40x63wQeyVxlEEo + 1D/T1FkVM/S+tiKbJiIGaT0Yb5LTAHcJEhODB40TXlwPfcxBjJLfOkF3jP6wIlqbb6OPVkDi6KMTZ3EYL6BEFGfD1ag/LDsPxG6EZIn3k4S3ODcej6YSz + G4TnGD0szj5m6uj/2azPZsWAlSNBRUejmP6Tiota7g5u6AWZz0MsgCiEvnxRHmTRee+LO6U4dswzF3Odr2XBPD/hIAtp0RX8JlcGazBS0GABMMo2qNfCi + SiGdyl2xZJq4fq99LoVfCLNChkn1N2NIYLrStQHa35pgObvhwi7ECAwEAAToQdGVzdC5uZXRmbGl4LmNvbRKAA4TTLzJbDZaKfozb9vDv5qpW5A/DNL9g + bnJJi/AIZB3QOW2veGmKT3xaKNQ4NSvo/EyfVlhc4ujd4QPrFgYztGLNrxeyRF0J8XzGOPsvv9Mc9uLHKfiZQuy21KZYWF7HNedJ4qpAe6gqZ6uq7Se7f + 2JbelzENX8rsTpppKvkgPRIKLspFwv0EJQLPWD1zjew2PjoGEwJYlKbSbHVcUNygplaGmPkUCBThDh7p/5Lx5ff2d/oPpIlFvhqntmfOfumt4i+ZL3fFa + ObvkjpQFVAajqmfipY0KAtiUYYJAJSbm2DnrqP7+DmO9hmRMm9uJkXC2MxbmeNtJHAHdbgKsqjLHDiqwk1JplFMoC9KNMp2pUNdX9TkcrtJoEDqIn3zX9 + p+itdt3a9mVFc7/ZL4xpraYdQvOwP5LmXj9galK3s+eQJ7bkX6cCi+2X+iBmCMx4R0XJ3/1gxiM5LiStibCnfInub1nNgJDojxFA3jH/IuUcblEf/5Y0s + 1SzokBnR8V0KbA== +payload_challenge: "CAES+h8SLAoqChQIARIQAAAAAAPSZ0kAAAAAAAAAABABGhAxh+ZXIBStq5I1WvKvRokMGAEgp4eQwQYwFjjP4NqWCUKsHwoQdGVzdC5uZXRmbGl4LmNvbRIQ5US6QAvBDzfTtjb4tU/7QxrwHLV4JwLRNIOHqbl510Lr7rMQp3dDWG281aju8rtYMlq6+JCsyqa66pZqOW6tNYZ2QuD6yhbTBqPZ89lHbSENkNaySCSQfuOKi/19Kbij0BMABsCg0Yvp9DpjtYlo95Xk09KrKPDlk5Us/21fxqsE6preIgyf8T/ZR8WngqEfWu3OqRkRKLOitr5SGDVi5lmyQo/FbeYQ+ebhYbkJ0uXNTtxT9M2X+n3v/3xmzn6SK45v2+LnHJcYcoUz8EPnXZfTwyGVNDOYp8tqtLQ9Cm9W0ZGZKnUS6fVaDlHM/6rZDVmtCh4WvWmOInoab0uZqudN9KuzvB7Gh6hAbgq/lkPWTJ53HoHKXcnIrcE1rAIIYG2UZoh3IkLXMKg3UPDZP4FEyT0DmJa28FliY6g84LAPmw/+x1cjEOutyfd/cZXy6KPrqNhszK/DjqokzOpOE/usL9wQNR9lcHUhLY6CUP6QBIfnJXN7wmn/PGFqnMnuL7y63Di3Da5GTzTiH8niHKBkDxD1XwlR7jvB1Z7LT6oHxiacVRuWilcRDyPbJQjfbUWV61s2LWiIRChqaNabsY5zQPL376PXfDCVcnDd9xt/V5KImiYxJg9IeUvvPoMUN7x8CwzzwkAXnhRicOqruZtAfSkq/NlUgGY7soLEXDFkGDk9rGHGdhIj5wY28T00d2Zli4WU7DFNU8oRsRrYQlzEFTbPnUfH9tWhUGnpVtYeBIXnU/tlo2UohQ/x10FMyPTPo98N+bMPLVgud3Lt8BH3+4xEE8QWcH8T7LXosuZn3XJv3jBlfRBnF3Q2nX774wBnWGtGZxLfmoPsr+Otb5GhNeo/BfXNFHLEJqdcoFtz4yBwup0mMD0L4wlPaXPzi0YJx3vFByTYNsT5LUoafOQChk29WgcKbuYtIhY3SyYWshW0DCgOrJwavonjB6AMJUydk1dJXXfXCw39IN+2ZhPopGJ2FuQ+kuOH04i8fRAuKjQ2QjI3bFiIVdwUktzNEp1s5nAXS51Tv5+vWHBdUKDb9avz5C2uoJ3ey54t79aV+/cAe7NVZmTabXa34wuKONXMOCBuNME9od822AjuamkU7Qx3hwbGnZThaPkk+K2/cMD7vtqii9hFyeSU7sQeB7zXfuA5c5leQx94KHF0PIDKNl2WumEPcz6Xd98n6UeY1z/D4Z3f+/82GfCbQIz+UqNlWXnSmdM8Orw/6V57WJQudjC/3JWZ7Spmf22+XP9NCfMf/frPn0FA3OSn5x3CsIV9CLwUvTg+02LdWCNXm2fF6wAZM/DaC8ha645SpeBcjl6yNOCsSFTidjZ/soR9nbx96II0v6ZVrMFi03+C0UAAgL1p9cLU1SvCkIqva44ouSKNTZDjVrBSc7jg10Rfet8ftnk5ahe5bqAhNCqSmUaslxyMf7Fgao+Qr8Nw47FiDU0ZoqXzCXHFltzLxioebEqr5BXAo2CwaK4GaT6wOYO/HtosHVlkhIehA5sRHPPuMXbOeajzLE4xqI+x9JM21J/gw2iVJU2ZJM+OrG6OrK6fEPhxSfVWErskeZw/NJvihSOkVRFxlJ8xSShEV3jd2zq5EN9frckWc+lNG0nz+DtHjfmmghs26uvvMTBX4mmboHVa1SRcSGc7HGBl+wnojfJvrKnIR/EauTkWPnMN+sKP1Eed9tPQaOH1JErkjcQW+EKgbpVesuPPuGjqR0+LPeMwgqvQbg4ES5i/9t2hwmW/1AXyJlaC7awEDnIUD0GmaIWYedkarklIO5pAfDJGemXrE2pCjcNekrGesXzgcL405UX7vUGz4yg1w341+8YAUHjYEtYXqWA8Zs2tK+BTX6bCiJ/7MVfHNOcxRm3RWpufAhqDkXvEfDsC63falxOtyGrlRHGK9A+D3p0s1fGfV7Zg9wSlbmWN3xEbULNDyxgYBnz4dImoeb12KlL2VK/gezbanOaXoGmFXPIt+ddQCNGD4NC+LIcJLPvayaErKzs6Uxi1i6Kw1xSehZLdxzBmkvv2U7pA5ClsRffho7jyGb6jv00dafdPWGCJ28OxRtZM6kQFU2wj2ldEtXZR4HNNnjCFwgxcrTsVuNjt71+oKRcWvk8P04GH+Fm5moIync8vnkjq/9N4KFreojV6HUalDyyAWy3jW9sFebJCyaxVLwctU7GUO3dXkfODz5gNgvV4baLb2LOArMFgQvbDGAnvWb+Ql95m65bY+7KdcV6m0eh9w6FkqTtXFU8n9/lns1vqxRxKLrPaDh3bJ5pOBUibhIsAS/Zx+xk5kE0aGy7RiIH6GtvldLoNjuZbn5VqujT3ARC7NpmzB7QcwM4FuOzsd48KkeiWXro0AKx4GadIXeeTrr3K6vJUxOgmLIZv/v/pZcn7uzBkwTrjAJ1xivgEt62iycXvG5AnbYklHjkJpZTg2X+nCCY45KwUGiyATQtpyI3xTZ8XsB3mL4l78uOT0IIk43Q/tkNyKm94HH+kS4OQXE8UdvWNcmnDD/knFCXbIOptc14tWhZ4eixwNVrjGqEXCDConnTBatKsL8lHzpbCuIcbGnzbme+b2OWKD9XlQKo+5atSRV2YrdViAMj8HeFMqhIeBUSglJWN7wgtzghglSgwObmYt6uw/yLzWm6fUVn2Bu2VPoQ4/q/63eSB8mPBEyZJ328dfQtxj2nJZfBMzGWN1I6TZ8wnoUYxfDQ3Omsn2/geXA2kLu1RGgcxxOANkBRDqNChQG3uODSVQfTblj/xr1lm+/ogTyXXGHw0Or9NaS5cmVAUWUGONMv2Eac1a6YwFLQ97cINl2bcsoTLRsKWDuG05gQNfeEDpgeNhAXzJp/o564fzsQoWBO5InaZ1/GJH4gCM/SlSHt8MJTqtKl15zDcMbgNyIqdttshDO8fngQ/0eN3fjWNB/pdiNiJZ4HQNri+u3QkCffqEai8funtXRMXSKqSPJAYKPWZvebSCveiz289Ibb76ZbQHL5jb0ynajPBVquhKAEpEovIQCLlwquA/IChiuDsLbnNDHjJY92rfnV6DFvdAfy9bORcPvxd550GDolZHcmq6GiisXhcsyYb+xgmGsGHjpUDvTG22P0clxgjp/dSzyjOZvX3zJGrXyHZKtWxBafKmcFFnk0L603tqmbSYi6bZNkIpGQrBnMoiAuBwKqnBPYfplSD7TS1gfB7ccEguKj5eLr9kC53o/avdcWatnjBpCKqFJTL3rkzL+7crJ39Mh28AsgAT1XxrCSXDOxt3YQoPjWSgC6ceZWtbXd1o77FFVF+UGCC/geTny1siAcwZ6POctGGMm/nHnY9aoZPl9NTLpXMuyhR4F7IgnU6uhkt/BBHq5CsLeF1b7VrR1A9LJaLgaoPsVrisXsMZDmVRjZtUNdSYsTYQNAdGDCLjGt9hHpzZXub4b62ylOIBJrtN6mp5JK6VJOGAqq7rJKjRw0neurBVxSf/8HjMFx2Ei/zUD65l7qTgLR3VnDEUL/00C6xfd1YEYs0TcK7jqFhUa50+ePeHErKGl548O2RLr8A2qe7EiY1wT78YivhfwVlkFJrO8GuRrkjM8CiWieBlEIILQydtMkfqMsjvFmcm2CUwbspR7mvzvPJrjj3xq8G2JZwfyize6QoVTlNiaduWO/MSIQIVbO6FIqv/pr+aD9EmKBvJFZGdrSXSsnKXojRV7JNnxsTusupFOja11nboeyR3hbq/p9kzP+tFjgXZbdrMBM1JFJNR74T74vGdCVqYhQZbygCZ9he0PqMPKY30NlIeVZ69bPnlRmMkm7yUveRL3qBR3hrd5Gb4Gvgz3jy7b74nkOqtV+v/ch+wMUBqQc7Ja/Jz5pESKfsUsvt6GyKmXH6SDzdXjBhIKxg5iRuDSuwsqJA5D3TcIzhxJHWA0J2XxZ9YFSA8K9uNyLwxqGIvAw08iFXCoHtnO2coQMHahvQEhbpwGgeBntCxT41/bmantccYvWGPHEBeXdfeOMqiRtbMAqUXVYonMm9OrUBAkyePGOFYF3N6WQlkOln16/vz/O+qRQeyjEFAjr0+8NRVzhurMGmltqaHXUPjGeTEnLmvbEfeCaOjcxytrTZmnQ39we/n0JigeMSPtG2WV0Nb5P38SuLZQlgR/uZMq25DY7zMZs9S5OyaX0li9Tf86G8Eyga0xAnRnggGwdLTXC/3+D6sXUC7DyYxKIbs1a9/GWpOwoouKTVQv6j71bn3GgiumpdP0n6lO1kmMke1ibTAXaBqX+BIF6awQDTO+bHr//72z1YdEXFaPSpt0Fuo9CrymHo+PMH0v1yJW8yFe9u//OV3zrjZzQffjLdo/w2yGM1JBtcl7h+/WejnCZVGGQygfvr0xgrw0viG1uwvXB0QQSfXeGNAvJ22+oOiZaion8AJeZDrvGGt9a6aLlECqT35OgVwRcFqP4kfLh7h6pzE+H3pavBN+H7xz4HO6cc3uPUuL4sbcIgEm3v+4m5++44ejSsOcuwWzJcnCYmPsVtR8lyVq7xp/x/xtmgfDHMMEB+tVhJ0rh+Wue54uTsmuW7xuPxwvB1iU45/p6pBOAXLkKyZI67EMdpILcxUhvQPw3rK2LRmZXLErCiseWKEvSLmI6e5/E2sleU0Z+G8ltPtjQaadURPkGMvYFWHxiJH1sJnN7BhNOoHAggDuco61HWndlzLw3iZ0H4FNHHpUEQQ/m18EgKkq7k4LX1+rir67RdjxtWKOjZ+LSzo9WGN2phfqeKjcXJO10VzMYqs7VVgvUQkxymDoTtRLAHniYMTwAhSIknQoTlr1wFWRmrvblK0zzibbhXCbtlTwcHBfvENLr0Zkfa8b6im1ERFhwm3JMlEOyz9PuALTaw+zXBC+KCVKgQN3dql9UWLUy2Zuw/U97tMvesLzCSw1uMnqBG/Obiqus09V01peVOG8a4F9nqDPcToY8gwILKwPhjQ9ql1NtPziIQ9IGw6pFHdv0DB2veniuwiSqAAod+KFvACWPqrmFlzZkpr8Ifxg54ngr87Zqy/+hWjg3Au7cNLiVNNHjUtc9eHRAG9XhEkZczijtSk9p9SxJIrL6db32+MineczAiXu8OZn75Y2a52bm/n4z8ij1yuPx1HuNiJKTrmYWZi5E3RDURYKX4FIzQU9bjYXscfxsUMAd9cxTfSlfPdniv3wNyRkKiUEyTa4oTj0y/2YiEUvU4nTGNvo4ucHTro9P1MpRPvzwPql0tkVw8DKGd6+RScVR6eP8LWRCtaYiuMeT1W4TfHLZGjYOvub9zCW3Kr3mcr+nV+u8F67D4WW/O6nnEKTK/VppejXyfH/uR9Gx4IXQMkJdKCzQuMTAuMjg4NS4wGoABKyt/5RjgPtImFkqVB5RRfIb+hQzcOKCTwMNckP0SMXa++uIm/VjSuPU6CIvPSpyRIAuPfZYWVMiR3nQLPCPwlC3tCVwD3iKm3Ls1SaDvKooCng87N6RTWqUvn1eVtrMetIoy50TUnxRSCiQQbQldbn96tpzg1GBQVt3vg2kU9OlKFAAAAAEAAAAUAAUAEJLWsE/8GMtB" +payload_challenge_pr: "<?xml version="1.0" encoding="utf-8"?><soap:Envelope xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:xsd="http://www.w3.org/2001/XMLSchema" xmlns:soap="http://schemas.xmlsoap.org/soap/envelope/"><soap:Body><AcquireLicense xmlns="http://schemas.microsoft.com/DRM/2007/03/protocols"><challenge><Challenge xmlns="http://schemas.microsoft.com/DRM/2007/03/protocols/messages"><LA xmlns="http://schemas.microsoft.com/DRM/2007/03/protocols" Id="SignedData" xml:space="preserve"><Version>4</Version><ContentHeader><WRMHEADER xmlns="http://schemas.microsoft.com/DRM/2007/03/PlayReadyHeader" version="4.2.0.0"><DATA><PROTECTINFO><KIDS><KID ALGID="AESCTR" VALUE="AAAAAMYExIEAAAAAAAAAAA=="></KID></KIDS></PROTECTINFO><LA_URL>http://capprsvr06/silverlight5/rightsmanager.asmx</LA_URL><LUI_URL>http://capprsvr06/silverlight5/rightsmanager.asmx</LUI_URL><DECRYPTORSETUP>ONDEMAND</DECRYPTORSETUP></DATA></WRMHEADER></ContentHeader><CLIENTINFO><CLIENTVERSION>10.0.16384.10011</CLIENTVERSION></CLIENTINFO><LicenseNonce>2wpHvxrtvGAWMHf/UHGhdg==</LicenseNonce><ClientTime>1742927921</ClientTime><EncryptedData xmlns="http://www.w3.org/2001/04/xmlenc#" Type="http://www.w3.org/2001/04/xmlenc#Element"><EncryptionMethod Algorithm="http://www.w3.org/2001/04/xmlenc#aes128-cbc"></EncryptionMethod><KeyInfo xmlns="http://www.w3.org/2000/09/xmldsig#"><EncryptedKey xmlns="http://www.w3.org/2001/04/xmlenc#"><EncryptionMethod Algorithm="http://schemas.microsoft.com/DRM/2007/03/protocols#ecc256"></EncryptionMethod><KeyInfo xmlns="http://www.w3.org/2000/09/xmldsig#"><KeyName>WMRMServer</KeyName></KeyInfo><CipherData><CipherValue>YlEXdgVUxlkbwigEGwDIBBFWiBybSgIDozY+G9x31xFGsnnyKJ9GBUdsHkzvHLiYFply25UJrvRUT7tOve96HriocKg+hHfp1WtOFhEjI2dXpyk4jfcKmCFiQ7w/LzCNPTVHAWIb4Yvg3lmqg6S5GWXIh08DZVwm7SV6d7GNbLQ=</CipherValue></CipherData></EncryptedKey></KeyInfo><CipherData><CipherValue>OyQ2VIyPB4hApX6ztOFrh7x0ter8d0WnOkBpzAd9/85W4tIHiJD2nvR6SjaVT5HqeI0c1D+OPggJkePbZjWvnVjt6Ev1RyH48Qjy4VYRsSnXonBCw7FW1rLXg1wzBp+gjsNfCpvjP+EOJoz4EGn0wUMC+f/Vx9KuVPA4fNL1n2aHhjxBhdxn3XmY/NijfWDJauWk8BYgIg6W3/i+wGHuaUmLCypjq+3yWpCbr7UlNqfUGZFJwPXD6/6mUe+/6ebuVVEsokqlXjJjukBPOkq3JOSg/Fw2U2kwLjXp5Ke4xeNAyPMWJ5XvwY/HqB/usaTHfTM8vSfH5OrEdMxvD0S8eevN/GUWv7nFiSe9vWATdJ3+Y687Y3X6+BEqYfxReNFrhdXz2+ywWgU+V1RrjuigMPPDlD5JCUPFt+bqNz249Un1bIpn/98CDY9Y2VQ0yiGOuX83fnD7IpM+VsJn6aEKpllXNZPHAs3ZmerdisMoaxD/TdZAPJ8K5nu+SKWHjRO/VTO4G5uZLc7HCGTOrmzTXXbdO8BTbYG0EUbnsfQUOBJb0IXr09QyKgtJB9jqho7yWpE2egG1yeJeuBWRC49cSkXrF9RkRWYgrB2kFzGuyz9wFTf7LCqtaX3qtWl9B12OI+ig/MlcLxlvU66KMXjt7MLRKEe7/5M3R7CYFr0uKw1K748M3EuN+uFznhK2+v0oIOXahxFxxswFUvbc/jmgu2qfnIUncHIPGXN24w8GN5ZGBbYG6kHCSdONnjK+F6VIdR3OmNP368aU+d+i9Um8hPb4NgfD+QfwZ+NvWgMmip9JdU7Xr2g2NrE2qh7fey/1/3HNU1M7GYeM9+13u197seYLx6r9DOqhYOv2aLaRuoM/06j9mQFlalO7NTxkWmfBim1xWjwQz1xfrY70huoWDpUkvIoWxguGgCr4beA/OOq22CGQbpETG8sKlRMUJaG0xJsepls9V5mWImBulQi/yNO7lGBlfdbXb3qV8GufrsYWKPs3LkelKcQ6/wIC56Rvs/bD3E8YwA9UHZS2LTtVu675UekUroppjyJJFCmGpR0+h2ARF0DJt2BBfsVwZOmL0/vMQtl2TDJVivV8ZpNOKIvtqR2zgz9EiZFWlp/NipyTZ3u5AYPe/4Cak4N3hWPUz4L7vIefhx5H+IyDeemSSQtvdAKLJfs05R32GBigLgXiyjvd5k6XI9c42cWIbFdz38WTkyiJDxpAadPDMaKMuDe5jU1QvoTKOSMaNFaaSxSM36hKEPwOlb8alzuCN7ldEpo1iBZ5HyRK8Q4QwZvvunltH0ld1NuK9zOkSOUudRRC+bEBBuRgkkFqD4cwaDJUWo6qetp/ESIy2fP2cpdwXFU6/4jJEYzGg5jcwu6g2URhoLwhdpTJgKD4jUondmizFcpdOiwPhx0q0C7nt8f9gaiZ/1rI+4Q1SAmng+jzs3DRUsPjaoZIZN7mg0fei3ZZtZezUzEQxHDAJwPYXaoHAbiTX+tkYphMa4e3pj8I6ZuPRis8sbjaEixFYolL1eGCuYBodv4htBH7Knw/1XpaUYqQKuPIgZ9ZuKqQuUJlnX1YRP+RUBd478f8Up1DLwecy3kv0O/UtHQQ7jLPD1EahOHpWvOujxDe99odRwopTLzlTnRaOzDDkDTUz4ybXjz+zcLKDKg8EHPDTinzwWxISayVYOb8vChX24FdQBl48zbPxASd8CKO6yoBDOttAINEXajvIFYWhr+H+VJfspo4rHzczLW4mEKQRWXcaU3k3G9m3rilAsnRUQPNvOE8z/NU5wzqCs1JWc1KwXsT3onDrZydHh2AFWQth/r0OEuNtOwiCO92zva3qpNoXPVmfJV3/EQjSGqKWGRxnqz1otESbc/siMQ+RgNWDa9PnxECb9heQZpIDW0ryxwcMuH07QtEtLXvBPPTEyJVYJduonsPt2T7cf8SALQOnhwc9aompCf0dHpmG+MYF82iUClgsn2rZuGg66VtcuDW1uK/X5Elvn6CI5s9MmljugORHjDzVHJN2CncNih0UMp2cbdaNeyoZ6fhsj4lYTX0zpp2CeIN8aVqw4cAH+VVZqN83d5AEaLGURol93dT1+2tTab+0Hq/MF7yMy2dnlyUHogYe32T45k6zuLYjK2ygZ4yfNfBVW89jVAkrvohNcBIPw1+e2qcpol8+LT3Dk+3TEv/RvLiKMYxV/KyS+jQoF96Sr4usiVKUx4ImdJ5oU6ZfNZjHV6xxa8svQWyh3t6K/kPQGQTDXthjLMwcTjveLAD59EgarSZC1cukVxgrsbFGUvAssOn3V7zK0Yhab55aLgZguO5bumdtGdzJ773+zg3v27Vq1tHdyAi6wAgJd38H7yXJSy9Z/auK3rP+tabLwPUDpQRJ9xQ5S0Mn6iIc5MKvoqwjq0vMo8eaqCSNq40fpQIaY7QJdxWZ0WpyfzrBvLPJNT+d8YBVH/VfQ9Pe1KfVwz86+8zckVCgAQrUfiv4PbDe6wEtIAb+//UK48xwgUghPjpvihAMHi8zqkDWzmndDZfe1I46eictYBFez87ebhlFKQYzdXkqjkO4lr6tXICrW0uHTzgvRmbpcaFtGhvujJZrYrmaibQ7iGP4IQlNspNytq2u8HRM9AGYo0b02pjCXYMVZ+mo3Vc4hrQwZBr8c3UR7g47UIzhtU6BhCU8LGk0uU6MgsnDGXQ3C7dnrHjDZaxpIZTeF/90xiJ5G3YCZp5iUp28dotq+orJFLicKLlWaZeWT3mlFUxmjcHX9O1Et/uvJZyEcVvdZdf0NprnCJEeNc+a08yKgtfB4VKJTCOkYJWv41uMj5x3fRbZRbLUuq44+T7cpy5FPy66U+8AFq47ilTMoeQk2s8Z0rjbogDhGJ7KmD8ve0hGUyzPRg0DPw3YaFlpikMudM6olND9NwC5Ue/JqNu4CbHzLy/T08uKmtmocxXp6uHgq7euz+rpv9lUVy/drKU2S5ZCsebh2ve+ZQX0yJarw08T+a0l6PA2aQCMw3m7EaTzYnx2oKJbDXkzuZtW39J6OZVBD2GJZWZqa8YuvXNWj6H+d3VzMvMyUFfz3L1vlLyB/a0ZL2Vqpni+qlUNHuoRu5desTLrSWwmoEt/MUtkBRAEVILCgsWq5IgYszDcLoqWDY9FOoDPe/zojm3ds8dqnIIheWlBNNxEG/2gHkERBSjg3cFsLEQvdtBFrchbv7g2g5+106fzMhpxbGfcksRFh5+Q80PEIOwG0YOkg0tqD6PJRGOTaLWiqwiVN+TTQ6ePlCX64v6RMFHOILUVvL/30wPDOU5HkAwVkw+Hc8QyO86pE6IJnnZyZdQUDOCDGwLzJw7Jkun2xBWoT8tSb8AShO40ERd1F8/VHhB75oMkKlJRZieDT8s3If4PDbWzsQH0bWOnpa/kaoxZZY28FvdaZXKhqxWM0g+pyyG0G9yIElCGe2jJaZWzrhElhUb1TBoSB7+6A7k4G/axg==</CipherValue></CipherData></EncryptedData></LA><Signature xmlns="http://www.w3.org/2000/09/xmldsig#"><SignedInfo xmlns="http://www.w3.org/2000/09/xmldsig#"><CanonicalizationMethod Algorithm="http://www.w3.org/TR/2001/REC-xml-c14n-20010315"></CanonicalizationMethod><SignatureMethod Algorithm="http://schemas.microsoft.com/DRM/2007/03/protocols#ecdsa-sha256"></SignatureMethod><Reference URI="#SignedData"><DigestMethod Algorithm="http://schemas.microsoft.com/DRM/2007/03/protocols#sha256"></DigestMethod><DigestValue>br1PtJ3OjeDZSJ8BXVLrydHfytZh8N7RENDBe2gLsVs=</DigestValue></Reference></SignedInfo><SignatureValue>SYIPsr443B4koBXhViEKqLYPsVM65Er0a27SA1VkxMnfwc300o7R+SmJFud540lMNx77/vPEBbZxuKeji5friw==</SignatureValue><KeyInfo xmlns="http://www.w3.org/2000/09/xmldsig#"><KeyValue><ECCKeyValue><PublicKey>Xo25Q2ub9RjCg6mZyGGXzZd/6GlWS6JmuxttF/oYyDywe7M4fbnC6s8AVjwe/n/9uBjINCthICqkixZliKl7qA==</PublicKey></ECCKeyValue></KeyValue></KeyInfo></Signature></Challenge></challenge></AcquireLicense></soap:Body></soap:Envelope>" + +esn_map: + # key map of CDM WVD `SystemID = 'ESN you want to use for that CDM WVD'` + 8159: "NFANDROID1-PRV-P-GOOGLEPIXEL" +endpoints: + website: "https://www.netflix.com/nq/website/memberapi/{build_id}/pathEvaluator" + manifest: "https://www.netflix.com/msl/playapi/cadmium/licensedmanifest/1" + license: "https://www.netflix.com/nq/msl_v1/cadmium/pbo_licenses/%5E1.0.0/router" + metadata: "https://www.netflix.com/nq/website/memberapi/release/metadata" + # https://www.netflix.com/msl/playapi/cadmium/logblob/1?reqAttempt=1&reqPriority=0&reqName=logblob + # possibly newer api? seems to work fine on handshaking, rest untested + +configuration: + drm_system: "playready" # chrome and android: widevine, edge: playready + drm_version: 30 # widevine: 25, playready: 30 + supported_hdcp_versions: ["2.2"] # 720p-max: 1.4, chrome: empty, 4k: 2.2 + is_hdcp_engaged: true # chrome: false + +profiles: + video: + H264: + BPL: + - "playready-h264bpl30-dash" + + MPL: + - "playready-h264mpl30-dash" + - "playready-h264mpl31-dash" + - "playready-h264mpl40-dash" + + HPL: + - "playready-h264hpl22-dash" + - "playready-h264hpl30-dash" + - "playready-h264hpl31-dash" + - "playready-h264hpl40-dash" + - "h264hpl22-dash-playready-live" + - "h264hpl30-dash-playready-live" + - "h264hpl31-dash-playready-live" + - "h264hpl40-dash-playready-live" + + QC: + - "h264mpl30-dash-playready-prk-qc" + - "h264mpl31-dash-playready-prk-qc" + - "h264mpl40-dash-playready-prk-qc" + H265: + SDR: + - "hevc-main10-L30-dash-cenc" + - "hevc-main10-L30-dash-cenc-prk" + - "hevc-main10-L30-dash-cenc-prk-do" + - "hevc-main10-L31-dash-cenc" + - "hevc-main10-L31-dash-cenc-prk" + - "hevc-main10-L31-dash-cenc-prk-do" + - "hevc-main10-L40-dash-cenc" + - "hevc-main10-L40-dash-cenc-prk" + - "hevc-main10-L40-dash-cenc-prk-do" + - "hevc-main10-L41-dash-cenc" + - "hevc-main10-L41-dash-cenc-prk" + - "hevc-main10-L41-dash-cenc-prk-do" + - "hevc-main10-L50-dash-cenc" + - "hevc-main10-L50-dash-cenc-prk" + - "hevc-main10-L50-dash-cenc-prk-do" + - "hevc-main10-L51-dash-cenc" + - "hevc-main10-L51-dash-cenc-prk" + - "hevc-main10-L51-dash-cenc-prk-do" + - "hevc-main10-L30-dash-cenc-live" + - "hevc-main10-L31-dash-cenc-live" + - "hevc-main10-L40-dash-cenc-live" + - "hevc-main10-L41-dash-cenc-live" + - "hevc-main10-L50-dash-cenc-live" + - "hevc-main10-L51-dash-cenc-live" + + HDR10: + - "hevc-hdr-main10-L30-dash-cenc" + - "hevc-hdr-main10-L30-dash-cenc-prk" + - "hevc-hdr-main10-L30-dash-cenc-prk-do" + - "hevc-hdr-main10-L31-dash-cenc" + - "hevc-hdr-main10-L31-dash-cenc-prk" + - "hevc-hdr-main10-L31-dash-cenc-prk-do" + - "hevc-hdr-main10-L40-dash-cenc" + - "hevc-hdr-main10-L40-dash-cenc-prk" + - "hevc-hdr-main10-L40-dash-cenc-prk-do" + - "hevc-hdr-main10-L41-dash-cenc" + - "hevc-hdr-main10-L41-dash-cenc-prk" + - "hevc-hdr-main10-L41-dash-cenc-prk-do" + - "hevc-hdr-main10-L50-dash-cenc" + - "hevc-hdr-main10-L50-dash-cenc-prk" + - "hevc-hdr-main10-L50-dash-cenc-prk-do" + - "hevc-hdr-main10-L51-dash-cenc" + - "hevc-hdr-main10-L51-dash-cenc-prk" + - "hevc-hdr-main10-L51-dash-cenc-prk-do" + - "hevc-hdr-main10-L30-dash-cenc-live" + - "hevc-hdr-main10-L31-dash-cenc-live" + - "hevc-hdr-main10-L40-dash-cenc-live" + - "hevc-hdr-main10-L41-dash-cenc-live" + - "hevc-hdr-main10-L50-dash-cenc-live" + - "hevc-hdr-main10-L51-dash-cenc-live" + + DV: + - "hevc-dv5-main10-L30-dash-cenc" + - "hevc-dv5-main10-L30-dash-cenc-prk" + - "hevc-dv5-main10-L30-dash-cenc-prk-do" + - "hevc-dv5-main10-L31-dash-cenc" + - "hevc-dv5-main10-L31-dash-cenc-prk" + - "hevc-dv5-main10-L31-dash-cenc-prk-do" + - "hevc-dv5-main10-L40-dash-cenc" + - "hevc-dv5-main10-L40-dash-cenc-prk" + - "hevc-dv5-main10-L40-dash-cenc-prk-do" + - "hevc-dv5-main10-L41-dash-cenc" + - "hevc-dv5-main10-L41-dash-cenc-prk" + - "hevc-dv5-main10-L41-dash-cenc-prk-do" + - "hevc-dv5-main10-L50-dash-cenc" + - "hevc-dv5-main10-L50-dash-cenc-prk" + - "hevc-dv5-main10-L50-dash-cenc-prk-do" + - "hevc-dv5-main10-L51-dash-cenc" + - "hevc-dv5-main10-L51-dash-cenc-prk" + - "hevc-dv5-main10-L51-dash-cenc-prk-do" + + AV1: + SDR: + - av1-main-L20-dash-cbcs-prk + - av1-main-L21-dash-cbcs-prk + - av1-main-L30-dash-cbcs + - av1-main-L30-dash-cbcs-prk + - av1-main-L31-dash-cbcs + - av1-main-L31-dash-cbcs-prk + - av1-main-L40-dash-cbcs + - av1-main-L40-dash-cbcs-prk + - av1-main-L41-dash-cbcs + - av1-main-L41-dash-cbcs-prk + - av1-main-L50-dash-cbcs + - av1-main-L50-dash-cbcs-prk + - av1-main-L51-dash-cbcs + - av1-main-L51-dash-cbcs-prk + HDR10: + - av1-hdr10plus-main-L30-dash-cbcs-prk + - av1-hdr10plus-main-L31-dash-cbcs-prk + - av1-hdr10plus-main-L40-dash-cbcs-prk + - av1-hdr10plus-main-L41-dash-cbcs-prk + - av1-hdr10plus-main-L50-dash-cbcs-prk + - av1-hdr10plus-main-L51-dash-cbcs-prk + + VP9: + P0: + - "vp9-profile0-L21-dash-cenc" + - "vp9-profile0-L21-dash-cenc-prk" + - "vp9-profile0-L30-dash-cenc" + - "vp9-profile0-L30-dash-cenc-prk" + - "vp9-profile0-L31-dash-cenc" + - "vp9-profile0-L31-dash-cenc-prk" + - "vp9-profile0-L40-dash-cenc" + - "vp9-profile0-L40-dash-cenc-prk" + - "vp9-profile0-L41-dash-cenc" + - "vp9-profile0-L41-dash-cenc-prk" + + P1: + - "vp9-profile1-L30-dash-cenc" + - "vp9-profile1-L30-dash-cenc-prk" + - "vp9-profile1-L31-dash-cenc" + - "vp9-profile1-L31-dash-cenc-prk" + - "vp9-profile1-L40-dash-cenc" + - "vp9-profile1-L40-dash-cenc-prk" + - "vp9-profile1-L41-dash-cenc" + - "vp9-profile1-L41-dash-cenc-prk" + + P2: + - "vp9-profile2-L30-dash-cenc" + - "vp9-profile2-L30-dash-cenc-prk" + - "vp9-profile2-L31-dash-cenc" + - "vp9-profile2-L31-dash-cenc-prk" + - "vp9-profile2-L40-dash-cenc" + - "vp9-profile2-L40-dash-cenc-prk" + - "vp9-profile2-L41-dash-cenc" + - "vp9-profile2-L41-dash-cenc-prk" + + HDR10: + - "vp9-hdr-profile2-L30-dash-cenc-prk" + - "vp9-hdr-profile2-L31-dash-cenc-prk" + - "vp9-hdr-profile2-L40-dash-cenc-prk" + - "vp9-hdr-profile2-L41-dash-cenc-prk" + - "vp9-hdr-profile2-L50-dash-cenc-prk" + - "vp9-hdr-profile2-L51-dash-cenc-prk" + audio: + AAC: + - "heaac-2-dash" + - "heaac-2hq-dash" + - "heaac-5.1-dash" + + DD: + - "dd-5.1-dash" + + EC3: + - "ddplus-2.0-dash" + - "ddplus-5.1-dash" + - "ddplus-5.1hq-dash" + - "ddplus-atmos-dash" + + VORB: + - "playready-oggvorbis-2-dash" + - "playready-oggvorbis-5-dash" + + subtitles: + - "webvtt-lssdh-ios8" + # - "dfxp-ls-sdh" + # - "imsc1.1" + # - "simplesdh"