update
This commit is contained in:
parent
004199201f
commit
e6d2e8891e
419
ALL4/__init__.py
Normal file
419
ALL4/__init__.py
Normal file
@ -0,0 +1,419 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import base64
|
||||
import hashlib
|
||||
import json
|
||||
import re
|
||||
import sys
|
||||
from collections.abc import Generator
|
||||
from datetime import datetime, timezone
|
||||
from http.cookiejar import MozillaCookieJar
|
||||
from typing import Any, Optional, Union
|
||||
|
||||
import click
|
||||
from click import Context
|
||||
from Crypto.Util.Padding import unpad
|
||||
from Cryptodome.Cipher import AES
|
||||
from pywidevine.cdm import Cdm as WidevineCdm
|
||||
from unshackle.core.credential import Credential
|
||||
from unshackle.core.manifests.dash import DASH
|
||||
from unshackle.core.search_result import SearchResult
|
||||
from unshackle.core.service import Service
|
||||
from unshackle.core.titles import Episode, Movie, Movies, Series
|
||||
from unshackle.core.tracks import Chapter, Subtitle, Tracks
|
||||
|
||||
|
||||
class ALL4(Service):
|
||||
"""
|
||||
Service code for Channel 4's All4 streaming service (https://channel4.com).
|
||||
|
||||
\b
|
||||
Version: 1.0.1
|
||||
Author: stabbedbybrick
|
||||
Authorization: Credentials
|
||||
Robustness:
|
||||
L3: 1080p, AAC2.0
|
||||
|
||||
\b
|
||||
Tips:
|
||||
- Use complete title URL or slug as input:
|
||||
https://www.channel4.com/programmes/taskmaster OR taskmaster
|
||||
- Use on demand URL for directly downloading episodes:
|
||||
https://www.channel4.com/programmes/taskmaster/on-demand/75588-002
|
||||
- Both android and web/pc endpoints are checked for quality profiles.
|
||||
If android is missing 1080p, it automatically falls back to web.
|
||||
"""
|
||||
|
||||
GEOFENCE = ("gb", "ie")
|
||||
TITLE_RE = r"^(?:https?://(?:www\.)?channel4\.com/programmes/)?(?P<id>[a-z0-9-]+)(?:/on-demand/(?P<vid>[0-9-]+))?"
|
||||
|
||||
@staticmethod
|
||||
@click.command(name="ALL4", short_help="https://channel4.com", help=__doc__)
|
||||
@click.argument("title", type=str)
|
||||
@click.pass_context
|
||||
def cli(ctx: Context, **kwargs: Any) -> ALL4:
|
||||
return ALL4(ctx, **kwargs)
|
||||
|
||||
def __init__(self, ctx: Context, title: str):
|
||||
self.title = title
|
||||
super().__init__(ctx)
|
||||
|
||||
self.authorization: str
|
||||
self.asset_id: int
|
||||
self.license_token: str
|
||||
self.manifest: str
|
||||
|
||||
self.session.headers.update(
|
||||
{
|
||||
"X-C4-Platform-Name": self.config["device"]["platform_name"],
|
||||
"X-C4-Device-Type": self.config["device"]["device_type"],
|
||||
"X-C4-Device-Name": self.config["device"]["device_name"],
|
||||
"X-C4-App-Version": self.config["device"]["app_version"],
|
||||
"X-C4-Optimizely-Datafile": self.config["device"]["optimizely_datafile"],
|
||||
}
|
||||
)
|
||||
|
||||
def authenticate(self, cookies: Optional[MozillaCookieJar] = None, credential: Optional[Credential] = None) -> None:
|
||||
super().authenticate(cookies, credential)
|
||||
if not credential:
|
||||
raise EnvironmentError("Service requires Credentials for Authentication.")
|
||||
|
||||
cache = self.cache.get(f"tokens_{credential.sha1}")
|
||||
|
||||
if cache and not cache.expired:
|
||||
# cached
|
||||
self.log.info(" + Using cached Tokens...")
|
||||
tokens = cache.data
|
||||
elif cache and cache.expired:
|
||||
# expired, refresh
|
||||
self.log.info("Refreshing cached Tokens")
|
||||
r = self.session.post(
|
||||
self.config["endpoints"]["login"],
|
||||
headers={"authorization": f"Basic {self.config['android']['auth']}"},
|
||||
data={
|
||||
"grant_type": "refresh_token",
|
||||
"username": credential.username,
|
||||
"password": credential.password,
|
||||
"refresh_token": cache.data["refreshToken"],
|
||||
},
|
||||
)
|
||||
try:
|
||||
res = r.json()
|
||||
except json.JSONDecodeError:
|
||||
raise ValueError(f"Failed to refresh tokens: {r.text}")
|
||||
|
||||
if "error" in res:
|
||||
self.log.error(f"Failed to refresh tokens: {res['errorMessage']}")
|
||||
sys.exit(1)
|
||||
|
||||
tokens = res
|
||||
self.log.info(" + Refreshed")
|
||||
else:
|
||||
# new
|
||||
headers = {"authorization": f"Basic {self.config['android']['auth']}"}
|
||||
data = {
|
||||
"grant_type": "password",
|
||||
"username": credential.username,
|
||||
"password": credential.password,
|
||||
}
|
||||
r = self.session.post(self.config["endpoints"]["login"], headers=headers, data=data)
|
||||
try:
|
||||
res = r.json()
|
||||
except json.JSONDecodeError:
|
||||
raise ValueError(f"Failed to log in: {r.text}")
|
||||
|
||||
if "error" in res:
|
||||
self.log.error(f"Failed to log in: {res['errorMessage']}")
|
||||
sys.exit(1)
|
||||
|
||||
tokens = res
|
||||
self.log.info(" + Acquired tokens...")
|
||||
|
||||
cache.set(tokens, expiration=tokens["expiresIn"])
|
||||
|
||||
self.authorization = f"Bearer {tokens['accessToken']}"
|
||||
|
||||
def search(self) -> Generator[SearchResult, None, None]:
|
||||
params = {
|
||||
"expand": "default",
|
||||
"q": self.title,
|
||||
"limit": "100",
|
||||
"offset": "0",
|
||||
}
|
||||
|
||||
r = self.session.get(self.config["endpoints"]["search"], params=params)
|
||||
r.raise_for_status()
|
||||
|
||||
results = r.json()
|
||||
if isinstance(results["results"], list):
|
||||
for result in results["results"]:
|
||||
yield SearchResult(
|
||||
id_=result["brand"].get("websafeTitle"),
|
||||
title=result["brand"].get("title"),
|
||||
description=result["brand"].get("description"),
|
||||
label=result.get("label"),
|
||||
url=result["brand"].get("href"),
|
||||
)
|
||||
|
||||
def get_titles(self) -> Union[Movies, Series]:
|
||||
title, on_demand = (re.match(self.TITLE_RE, self.title).group(i) for i in ("id", "vid"))
|
||||
|
||||
r = self.session.get(
|
||||
self.config["endpoints"]["title"].format(title=title),
|
||||
params={"client": "android-mod", "deviceGroup": "mobile", "include": "extended-restart"},
|
||||
headers={"Authorization": self.authorization},
|
||||
)
|
||||
if not r.ok:
|
||||
self.log.error(r.text)
|
||||
sys.exit(1)
|
||||
|
||||
data = r.json()
|
||||
|
||||
if on_demand is not None:
|
||||
episodes = [
|
||||
Episode(
|
||||
id_=episode["programmeId"],
|
||||
service=self.__class__,
|
||||
title=data["brand"]["title"],
|
||||
season=episode["seriesNumber"],
|
||||
number=episode["episodeNumber"],
|
||||
name=episode["originalTitle"],
|
||||
language="en",
|
||||
data=episode["assetInfo"].get("streaming") or episode["assetInfo"].get("download"),
|
||||
)
|
||||
for episode in data["brand"]["episodes"]
|
||||
if episode.get("assetInfo") and episode["programmeId"] == on_demand
|
||||
]
|
||||
if not episodes:
|
||||
# Parse HTML of episode page to find title
|
||||
data = self.get_html(self.title)
|
||||
episodes = [
|
||||
Episode(
|
||||
id_=data["selectedEpisode"]["programmeId"],
|
||||
service=self.__class__,
|
||||
title=data["brand"]["title"],
|
||||
season=data["selectedEpisode"]["seriesNumber"] or 0,
|
||||
number=data["selectedEpisode"]["episodeNumber"] or 0,
|
||||
name=data["selectedEpisode"]["originalTitle"],
|
||||
language="en",
|
||||
data=data["selectedEpisode"],
|
||||
)
|
||||
]
|
||||
|
||||
return Series(episodes)
|
||||
|
||||
elif data["brand"]["programmeType"] == "FM":
|
||||
return Movies(
|
||||
[
|
||||
Movie(
|
||||
id_=movie["programmeId"],
|
||||
service=self.__class__,
|
||||
name=data["brand"]["title"],
|
||||
year=int(data["brand"]["summary"].split(" ")[0].strip().strip("()")),
|
||||
language="en",
|
||||
data=movie["assetInfo"].get("streaming") or movie["assetInfo"].get("download"),
|
||||
)
|
||||
for movie in data["brand"]["episodes"]
|
||||
]
|
||||
)
|
||||
else:
|
||||
return Series(
|
||||
[
|
||||
Episode(
|
||||
id_=episode["programmeId"],
|
||||
service=self.__class__,
|
||||
title=data["brand"]["title"],
|
||||
season=episode["seriesNumber"],
|
||||
number=episode["episodeNumber"],
|
||||
name=episode["originalTitle"],
|
||||
language="en",
|
||||
data=episode["assetInfo"].get("streaming") or episode["assetInfo"].get("download"),
|
||||
)
|
||||
for episode in data["brand"]["episodes"]
|
||||
if episode.get("assetInfo")
|
||||
]
|
||||
)
|
||||
|
||||
def get_tracks(self, title: Union[Movie, Episode]) -> Tracks:
|
||||
android_assets: tuple = self.android_playlist(title.id)
|
||||
web_assets: tuple = self.web_playlist(title.id)
|
||||
self.manifest, self.license_token, subtitle, data = self.sort_assets(title, android_assets, web_assets)
|
||||
self.asset_id = int(title.data["assetId"])
|
||||
|
||||
tracks = DASH.from_url(self.manifest, self.session).to_tracks(title.language)
|
||||
tracks.videos[0].data = data
|
||||
|
||||
# manifest subtitles are sometimes empty even if they exist
|
||||
# so we clear them and add the subtitles manually
|
||||
tracks.subtitles.clear()
|
||||
if subtitle is not None:
|
||||
tracks.add(
|
||||
Subtitle(
|
||||
id_=hashlib.md5(subtitle.encode()).hexdigest()[0:6],
|
||||
url=subtitle,
|
||||
codec=Subtitle.Codec.from_mime(subtitle[-3:]),
|
||||
language=title.language,
|
||||
is_original_lang=True,
|
||||
forced=False,
|
||||
sdh=False,
|
||||
)
|
||||
)
|
||||
else:
|
||||
self.log.warning("- Subtitles are either missing or empty")
|
||||
|
||||
for track in tracks.audio:
|
||||
role = track.data["dash"]["representation"].find("Role")
|
||||
if role is not None and role.get("value") in ["description", "alternative", "alternate"]:
|
||||
track.descriptive = True
|
||||
|
||||
return tracks
|
||||
|
||||
def get_chapters(self, title: Union[Movie, Episode]) -> list[Chapter]:
|
||||
track = title.tracks.videos[0]
|
||||
|
||||
chapters = [
|
||||
Chapter(
|
||||
name=f"Chapter {i + 1:02}",
|
||||
timestamp=datetime.fromtimestamp((ms / 1000), tz=timezone.utc).strftime("%H:%M:%S.%f")[:-3],
|
||||
)
|
||||
for i, ms in enumerate(x["breakOffset"] for x in track.data["adverts"]["breaks"])
|
||||
]
|
||||
|
||||
if track.data.get("endCredits", {}).get("squeezeIn"):
|
||||
chapters.append(
|
||||
Chapter(
|
||||
name="Credits",
|
||||
timestamp=datetime.fromtimestamp(
|
||||
(track.data["endCredits"]["squeezeIn"] / 1000), tz=timezone.utc
|
||||
).strftime("%H:%M:%S.%f")[:-3],
|
||||
)
|
||||
)
|
||||
|
||||
return chapters
|
||||
|
||||
def get_widevine_service_certificate(self, **_: Any) -> str:
|
||||
return WidevineCdm.common_privacy_cert
|
||||
|
||||
def get_widevine_license(self, challenge: bytes, **_: Any) -> str:
|
||||
payload = {
|
||||
"message": base64.b64encode(challenge).decode("utf8"),
|
||||
"token": self.license_token,
|
||||
"request_id": self.asset_id,
|
||||
"video": {"type": "ondemand", "url": self.manifest},
|
||||
}
|
||||
|
||||
r = self.session.post(self.config["endpoints"]["license"], json=payload)
|
||||
if not r.ok:
|
||||
raise ConnectionError(f"License request failed: {r.json()['status']['type']}")
|
||||
|
||||
return r.json()["license"]
|
||||
|
||||
# Service specific functions
|
||||
|
||||
def sort_assets(self, title: Union[Movie, Episode], android_assets: tuple, web_assets: tuple) -> tuple:
|
||||
android_heights = None
|
||||
web_heights = None
|
||||
|
||||
if android_assets is not None:
|
||||
try:
|
||||
a_manifest, a_token, a_subtitle, data = android_assets
|
||||
android_tracks = DASH.from_url(a_manifest, self.session).to_tracks(title.language)
|
||||
android_heights = sorted([int(track.height) for track in android_tracks.videos], reverse=True)
|
||||
except Exception:
|
||||
android_heights = None
|
||||
|
||||
if web_assets is not None:
|
||||
try:
|
||||
b_manifest, b_token, b_subtitle, data = web_assets
|
||||
session = self.session
|
||||
session.headers.update(self.config["headers"])
|
||||
web_tracks = DASH.from_url(b_manifest, session).to_tracks(title.language)
|
||||
web_heights = sorted([int(track.height) for track in web_tracks.videos], reverse=True)
|
||||
except Exception:
|
||||
web_heights = None
|
||||
|
||||
if not android_heights and not web_heights:
|
||||
self.log.error("Failed to request manifest data. If you're behind a VPN/proxy, you might be blocked")
|
||||
sys.exit(1)
|
||||
|
||||
if not android_heights or android_heights[0] < 1080:
|
||||
lic_token = self.decrypt_token(b_token, client="WEB")
|
||||
return b_manifest, lic_token, b_subtitle, data
|
||||
else:
|
||||
lic_token = self.decrypt_token(a_token, client="ANDROID")
|
||||
return a_manifest, lic_token, a_subtitle, data
|
||||
|
||||
def android_playlist(self, video_id: str) -> tuple:
|
||||
url = self.config["android"]["vod"].format(video_id=video_id)
|
||||
headers = {"authorization": self.authorization}
|
||||
|
||||
r = self.session.get(url=url, headers=headers)
|
||||
if not r.ok:
|
||||
self.log.warning("Request for Android endpoint returned %s", r)
|
||||
return None
|
||||
|
||||
data = json.loads(r.content)
|
||||
manifest = data["videoProfiles"][0]["streams"][0]["uri"]
|
||||
token = data["videoProfiles"][0]["streams"][0]["token"]
|
||||
subtitle = next(
|
||||
(x["url"] for x in data["subtitlesAssets"] if x["url"].endswith(".vtt")),
|
||||
None,
|
||||
)
|
||||
|
||||
return manifest, token, subtitle, data
|
||||
|
||||
def web_playlist(self, video_id: str) -> tuple:
|
||||
url = self.config["web"]["vod"].format(programmeId=video_id)
|
||||
r = self.session.get(url, headers=self.config["headers"])
|
||||
if not r.ok:
|
||||
self.log.warning("Request for WEB endpoint returned %s", r)
|
||||
return None
|
||||
|
||||
data = json.loads(r.content)
|
||||
|
||||
for item in data["videoProfiles"]:
|
||||
if item["name"] == "dashwv-dyn-stream-1":
|
||||
token = item["streams"][0]["token"]
|
||||
manifest = item["streams"][0]["uri"]
|
||||
|
||||
subtitle = next(
|
||||
(x["url"] for x in data["subtitlesAssets"] if x["url"].endswith(".vtt")),
|
||||
None,
|
||||
)
|
||||
|
||||
return manifest, token, subtitle, data
|
||||
|
||||
def decrypt_token(self, token: str, client: str) -> tuple:
|
||||
if client == "ANDROID":
|
||||
key = self.config["android"]["key"]
|
||||
iv = self.config["android"]["iv"]
|
||||
|
||||
if client == "WEB":
|
||||
key = self.config["web"]["key"]
|
||||
iv = self.config["web"]["iv"]
|
||||
|
||||
if isinstance(token, str):
|
||||
token = base64.b64decode(token)
|
||||
cipher = AES.new(
|
||||
key=base64.b64decode(key),
|
||||
iv=base64.b64decode(iv),
|
||||
mode=AES.MODE_CBC,
|
||||
)
|
||||
data = unpad(cipher.decrypt(token), AES.block_size)
|
||||
dec_token = data.decode().split("|")[1]
|
||||
return dec_token.strip()
|
||||
|
||||
def get_html(self, url: str) -> dict:
|
||||
r = self.session.get(url=url, headers=self.config["headers"])
|
||||
r.raise_for_status()
|
||||
|
||||
init_data = re.search(
|
||||
"<script>window.__PARAMS__ = (.*)</script>",
|
||||
"".join(r.content.decode().replace("\u200c", "").replace("\r\n", "").replace("undefined", "null")),
|
||||
)
|
||||
try:
|
||||
data = json.loads(init_data.group(1))
|
||||
return data["initialData"]
|
||||
except Exception:
|
||||
self.log.error(f"Failed to get episode for {url}")
|
||||
sys.exit(1)
|
||||
27
ALL4/config.yaml
Normal file
27
ALL4/config.yaml
Normal file
@ -0,0 +1,27 @@
|
||||
headers:
|
||||
Accept-Language: en-US,en;q=0.8
|
||||
User-Agent: Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/77.0.3865.75 Safari/537.36
|
||||
|
||||
endpoints:
|
||||
login: https://api.channel4.com/online/v2/auth/token
|
||||
title: https://api.channel4.com/online/v1/views/content-hubs/{title}.json
|
||||
license: https://c4.eme.lp.aws.redbeemedia.com/wvlicenceproxy-service/widevine/acquire
|
||||
search: https://all4nav.channel4.com/v1/api/search
|
||||
|
||||
android:
|
||||
key: QVlESUQ4U0RGQlA0TThESA=="
|
||||
iv: MURDRDAzODNES0RGU0w4Mg=="
|
||||
auth: MzZVVUN0OThWTVF2QkFnUTI3QXU4ekdIbDMxTjlMUTE6Sllzd3lIdkdlNjJWbGlrVw==
|
||||
vod: https://api.channel4.com/online/v1/vod/stream/{video_id}?client=android-mod
|
||||
|
||||
web:
|
||||
key: bjljTGllWWtxd3pOQ3F2aQ==
|
||||
iv: b2R6Y1UzV2RVaVhMdWNWZA==
|
||||
vod: https://www.channel4.com/vod/stream/{programmeId}
|
||||
|
||||
device:
|
||||
platform_name: android
|
||||
device_type: mobile
|
||||
device_name: "Sony C6903 (C6903)"
|
||||
app_version: "android_app:9.4.2"
|
||||
optimizely_datafile: "2908"
|
||||
1174
AMZN/__init__.py
Normal file
1174
AMZN/__init__.py
Normal file
File diff suppressed because it is too large
Load Diff
BIN
AMZN/__pycache__/__init__.cpython-310.pyc
Normal file
BIN
AMZN/__pycache__/__init__.cpython-310.pyc
Normal file
Binary file not shown.
BIN
AMZN/__pycache__/__init__.cpython-311.pyc
Normal file
BIN
AMZN/__pycache__/__init__.cpython-311.pyc
Normal file
Binary file not shown.
BIN
AMZN/__pycache__/__init__.cpython-312.pyc
Normal file
BIN
AMZN/__pycache__/__init__.cpython-312.pyc
Normal file
Binary file not shown.
162
AMZN/config.yaml
Normal file
162
AMZN/config.yaml
Normal file
@ -0,0 +1,162 @@
|
||||
certificate: |
|
||||
CAUSwgUKvAIIAxIQCuQRtZRasVgFt7DIvVtVHBi17OSpBSKOAjCCAQoCggEBAKU2UrYVOSDlcXajWhpEgGhqGraJtFdUPgu6plJGy9ViaRn5mhyXON5PXm
|
||||
w1krQdi0SLxf00FfIgnYFLpDfvNeItGn9rcx0RNPwP39PW7aW0Fbqi6VCaKWlR24kRpd7NQ4woyMXr7xlBWPwPNxK4xmR/6UuvKyYWEkroyeIjWHAqgCjC
|
||||
mpfIpVcPsyrnMuPFGl82MMVnAhTweTKnEPOqJpxQ1bdQvVNCvkba5gjOTbEnJ7aXegwhmCdRQzXjTeEV2dO8oo5YfxW6pRBovzF6wYBMQYpSCJIA24ptAP
|
||||
/2TkneyJuqm4hJNFvtF8fsBgTQQ4TIhnX4bZ9imuhivYLa6HsCAwEAAToPYW1hem9uLmNvbS1wcm9kEoADETQD6R0H/h9fyg0Hw7mj0M7T4s0bcBf4fMhA
|
||||
Rpwk2X4HpvB49bJ5Yvc4t41mAnXGe/wiXbzsddKMiMffkSE1QWK1CFPBgziU23y1PjQToGiIv/sJIFRKRJ4qMBxIl95xlvSEzKdt68n7wqGa442+uAgk7C
|
||||
XU3uTfVofYY76CrPBnEKQfad/CVqTh48geNTb4qRH1TX30NzCsB9NWlcdvg10pCnWSm8cSHu1d9yH+2yQgsGe52QoHHCqHNzG/wAxMYWTevXQW7EPTBeFy
|
||||
SPY0xUN+2F2FhCf5/A7uFUHywd0zNTswh0QJc93LBTh46clRLO+d4RKBiBSj3rah6Y5iXMw9N9o58tCRc9gFHrjfMNubopWHjDOO3ATUgqXrTp+fKVCmsG
|
||||
uGl1ComHxXV9i1AqHwzzY2JY2vFqo73jR3IElr6oChPIwcNokmNc0D4TXtjE0BoYkbWKJfHvJJihzMOvDicWUsemVHvua9/FBtpbHgpbgwijFPjtQF9Ldb
|
||||
8Swf
|
||||
|
||||
device:
|
||||
|
||||
# old: # !<< take note that this is done per-profile
|
||||
# domain: Device
|
||||
# app_name: AIV
|
||||
# app_version: '3.12.0'
|
||||
# device_model: 'SHIELD Android TV'
|
||||
# os_version: '28'
|
||||
# device_type: A1KAXIG6VXSG8Y
|
||||
# device_serial: '13f5b56b4a17de5d136f0e4c28236109' # os.urandom(16).hex()
|
||||
# device_name: "Build/LMY47D Shield TV"
|
||||
# software_version: '248'
|
||||
|
||||
# old2:
|
||||
# 'domain': 'DeviceLegacy'
|
||||
# 'device_type': A1KAXIG6VXSG8Y,
|
||||
# 'device_serial': '870f53d1b509594c2f8cd5e340a7d374'
|
||||
# 'app_name': 'com.amazon.avod.thirdpartyclient'
|
||||
# 'app_version': '296016847'
|
||||
# 'device_model': 'mdarcy/nvidia/SHIELD Android TV'
|
||||
# 'os_version': 'NVIDIA/mdarcy/mdarcy:11/RQ1A.210105.003/7094531_2971.7725:user/release-keys'
|
||||
|
||||
# old3:
|
||||
# domain: Device
|
||||
# app_name: com.amazon.amazonvideo.livingroom
|
||||
# app_version: '1.4'
|
||||
# device_model: PadFone
|
||||
# os_version: '6.2.5'
|
||||
# device_type: 'A2SNKIF736WF4T'
|
||||
# device_name: 'T008 Build/JSS15Q PadFone' # "%FIRST_NAME%'s%DUPE_STRATEGY_1ST% PadFone"
|
||||
# device_serial: 'c1ebbb433da4afdf'
|
||||
|
||||
# old4:
|
||||
# domain: Device
|
||||
# app_name: com.amazon.amazonvideo.livingroom
|
||||
# app_version: '1.4'
|
||||
# device_model: 'Hisense TV'
|
||||
# os_version: '3.9.5'
|
||||
# device_type: 'A3T3XXY42KZQNP' # A2B5DGIWVDH8J3, A3GTP8TAF8V3YG, AFTHA001 # https://developer.amazon.com/docs/fire-tv/identify-amazon-fire-tv-devices.html, https://github.com/giofrida/Hisense-Amazon-Enabler
|
||||
# device_name: "%FIRST_NAME%'s%DUPE_STRATEGY_1ST% Hisense" # KS964, Build/RP1A.201005.001
|
||||
# device_serial: '8e3ddf49ee384247'
|
||||
|
||||
# old5:
|
||||
# domain: Device
|
||||
# app_name: com.amazon.amazonvideo.livingroom
|
||||
# app_version: '1.1'
|
||||
# device_model: Hisense
|
||||
# os_version: '6.0.1' #6.10.19
|
||||
# device_type: 'A3REWRVYBYPKUM'
|
||||
# device_name: '%FIRST_NAME%''s%DUPE_STRATEGY_1ST% Hisense'
|
||||
# device_serial: 'cd24294bffb75a46' # os.urandom(8).hex()
|
||||
|
||||
default:
|
||||
domain: Device
|
||||
app_name: com.amazon.amazonvideo.livingroom
|
||||
app_version: '1.4'
|
||||
device_model: 'MTC'
|
||||
os_version: '6.0.1' #6.10.19
|
||||
device_type: 'A2HYAJ0FEWP6N3'
|
||||
device_name: '%FIRST_NAME%''s%DUPE_STRATEGY_1ST% MTC'
|
||||
device_serial: 'e6eb1ecdc8e34320'
|
||||
|
||||
#Hisense_HU32E5600FHWV: A2RGJ95OVLR12U
|
||||
#Hisense_HU50A6100UW: AAJ692ZPT1X85
|
||||
#Hisense_HE55A700EUWTS: A3REWRVYBYPKUM
|
||||
#MTC_ATV: A2HYAJ0FEWP6N3
|
||||
|
||||
device_types:
|
||||
browser: 'AOAGZA014O5RE' # all browsers? all platforms?
|
||||
tv_generic: 'A2SNKIF736WF4T' # type is shared among various random smart tvs
|
||||
pc_app: 'A1RTAM01W29CUP'
|
||||
mobile_app: 'A43PXU4ZN2AL1'
|
||||
echo: 'A7WXQPH584YP' # echo Gen2
|
||||
echo_dot: 'A32DOYMUN6DTXA' # echo dot Gen3
|
||||
echo_studio: 'A3RBAYBE7VM004' # for audio stuff, this is probably the one to use
|
||||
fire_7: 'A2M4YX06LWP8WI'
|
||||
fire_7_again: 'A1Q7QCGNMXAKYW' # not sure the difference
|
||||
fire_hd_8: 'A1C66CX2XD756O'
|
||||
fire_hd_8_again: 'A38EHHIB10L47V' # not sure the difference
|
||||
fire_hd_8_plus_2020: 'AVU7CPPF2ZRAS'
|
||||
fire_hd_10: 'A1ZB65LA390I4K'
|
||||
fire_tv: 'A2E0SNTXJVT7WK' # this is not the stick, this is the older stick-like diamond shaped one
|
||||
fire_tv_gen2: 'A12GXV8XMS007S'
|
||||
fire_tv_cube: 'A2JKHJ0PX4J3L3' # this is the STB-style big bulky cube
|
||||
fire_tv_stick_gen1: 'ADVBD696BHNV5' # non-4k fire tv stick
|
||||
fire_tv_stick_gen2: 'A2LWARUGJLBYEW'
|
||||
fire_tv_stick_with_alexa: 'A265XOI9586NML'
|
||||
fire_tv_stick_4k: 'A2GFL5ZMWNE0PX' # 4k fire tv stick
|
||||
fire_tv_stick_4k_gen3: 'AKPGW064GI9HE'
|
||||
nvidia_shield: 'A1KAXIG6VXSG8Y' # nvidia shield, unknown which one or if all
|
||||
|
||||
endpoints:
|
||||
browse: '/cdp/catalog/Browse'
|
||||
details: '/gp/video/api/getDetailPage'
|
||||
getDetailWidgets: '/gp/video/api/getDetailWidgets'
|
||||
playback: '/cdp/catalog/GetPlaybackResources'
|
||||
licence: '/cdp/catalog/GetPlaybackResources'
|
||||
# chapters/scenes
|
||||
xray: '/swift/page/xray'
|
||||
# device registration
|
||||
ontv: '/region/eu/ontv/code?ref_=atv_auth_red_aft' #/gp/video/ontv/code
|
||||
ontvold: '/gp/video/ontv/code/ref=atv_device_code'
|
||||
mytv: '/mytv'
|
||||
devicelink: '/gp/video/api/codeBasedLinking'
|
||||
codepair: '/auth/create/codepair'
|
||||
register: '/auth/register'
|
||||
token: '/auth/token'
|
||||
#cookies: '/ap/exchangetoken/cookies'
|
||||
|
||||
regions:
|
||||
us:
|
||||
base: 'www.amazon.com'
|
||||
base_api: 'api.amazon.com'
|
||||
base_manifest: 'atv-ps.amazon.com'
|
||||
marketplace_id: 'ATVPDKIKX0DER'
|
||||
|
||||
gb:
|
||||
base: 'www.amazon.co.uk'
|
||||
base_api: 'api.amazon.co.uk'
|
||||
base_manifest: 'atv-ps-eu.amazon.co.uk'
|
||||
marketplace_id: 'A2IR4J4NTCP2M5' # A1F83G8C2ARO7P is also another marketplace_id
|
||||
|
||||
it:
|
||||
base: 'www.amazon.it'
|
||||
base_api: 'api.amazon.it'
|
||||
base_manifest: 'atv-ps-eu.primevideo.com'
|
||||
marketplace_id: 'A3K6Y4MI8GDYMT'
|
||||
|
||||
de:
|
||||
base: 'www.amazon.de'
|
||||
base_api: 'api.amazon.de'
|
||||
base_manifest: 'atv-ps-eu.amazon.de'
|
||||
marketplace_id: 'A1PA6795UKMFR9'
|
||||
|
||||
au:
|
||||
base: 'www.amazon.com.au'
|
||||
base_api: 'api.amazon.com.au'
|
||||
base_manifest: 'atv-ps-fe.amazon.com.au'
|
||||
marketplace_id: 'A3K6Y4MI8GDYMT'
|
||||
|
||||
jp:
|
||||
base: 'www.amazon.co.jp'
|
||||
base_api: 'api.amazon.co.jp'
|
||||
base_manifest: 'atv-ps-fe.amazon.co.jp'
|
||||
marketplace_id: 'A1VC38T7YXB528'
|
||||
|
||||
pl:
|
||||
base: 'www.amazon.com'
|
||||
base_api: 'api.amazon.com'
|
||||
base_manifest: 'atv-ps-eu.primevideo.com'
|
||||
marketplace_id: 'A3K6Y4MI8GDYMT'
|
||||
354
ATVP/__init__.py
Normal file
354
ATVP/__init__.py
Normal file
@ -0,0 +1,354 @@
|
||||
import base64
|
||||
import json
|
||||
import re
|
||||
from datetime import datetime
|
||||
|
||||
import click
|
||||
import m3u8
|
||||
import requests
|
||||
|
||||
from unshackle.core.downloaders import n_m3u8dl_re
|
||||
from unshackle.core.manifests import m3u8 as m3u8_parser
|
||||
from unshackle.core.service import Service
|
||||
from unshackle.core.titles import Episode, Movie, Movies, Series
|
||||
from unshackle.core.tracks import Audio, Subtitle, Tracks, Video
|
||||
from unshackle.core.utils.collections import as_list
|
||||
from pyplayready.cdm import Cdm as PlayReadyCdm
|
||||
|
||||
|
||||
class ATVP(Service):
|
||||
"""
|
||||
Service code for Apple's TV Plus streaming service (https://tv.apple.com).
|
||||
|
||||
\b
|
||||
WIP: decrypt and removal of bumper/dub cards
|
||||
|
||||
\b
|
||||
Authorization: Cookies
|
||||
Security: UHD@L1 FHD@L1 HD@L3
|
||||
"""
|
||||
|
||||
ALIASES = ["ATVP", "appletvplus", "appletv+"]
|
||||
TITLE_RE = (
|
||||
r"^(?:https?://tv\.apple\.com(?:/[a-z]{2})?/(?:movie|show|episode)/[a-z0-9-]+/)?(?P<id>umc\.cmc\.[a-z0-9]+)" # noqa: E501
|
||||
)
|
||||
|
||||
VIDEO_CODEC_MAP = {"H264": ["avc"], "H265": ["hvc", "hev", "dvh"]}
|
||||
AUDIO_CODEC_MAP = {"AAC": ["HE", "stereo"], "AC3": ["ac3"], "EC3": ["ec3", "atmos"]}
|
||||
|
||||
@staticmethod
|
||||
@click.command(name="ATVP", short_help="https://tv.apple.com")
|
||||
@click.argument("title", type=str, required=False)
|
||||
@click.pass_context
|
||||
def cli(ctx, **kwargs):
|
||||
return ATVP(ctx, **kwargs)
|
||||
|
||||
def __init__(self, ctx, title):
|
||||
super().__init__(ctx)
|
||||
self.title = title
|
||||
self.cdm = ctx.obj.cdm
|
||||
if not isinstance(self.cdm, PlayReadyCdm):
|
||||
self.log.warning("PlayReady CDM not provided, exiting")
|
||||
raise SystemExit(1)
|
||||
self.vcodec = ctx.parent.params["vcodec"]
|
||||
self.acodec = ctx.parent.params["acodec"]
|
||||
self.alang = ctx.parent.params["lang"]
|
||||
self.subs_only = ctx.parent.params["subs_only"]
|
||||
self.quality = ctx.parent.params["quality"]
|
||||
|
||||
self.extra_server_parameters = None
|
||||
# initialize storefront with a default value.
|
||||
self.storefront = 'us' # or any default value
|
||||
|
||||
def get_titles(self):
|
||||
self.configure()
|
||||
r = None
|
||||
for i in range(2):
|
||||
try:
|
||||
self.params = {
|
||||
"utsk": "6e3013c6d6fae3c2::::::9318c17fb39d6b9c",
|
||||
"caller": "web",
|
||||
"sf": self.storefront,
|
||||
"v": "46",
|
||||
"pfm": "appletv",
|
||||
"mfr": "Apple",
|
||||
"locale": "en-US",
|
||||
"l": "en",
|
||||
"ctx_brand": "tvs.sbd.4000",
|
||||
"count": "100",
|
||||
"skip": "0",
|
||||
}
|
||||
r = self.session.get(
|
||||
url=self.config["endpoints"]["title"].format(type={0: "shows", 1: "movies"}[i], id=self.title),
|
||||
params=self.params,
|
||||
)
|
||||
except requests.HTTPError as e:
|
||||
if e.response.status_code != 404:
|
||||
raise
|
||||
else:
|
||||
if r.ok:
|
||||
break
|
||||
if not r:
|
||||
raise self.log.exit(f" - Title ID {self.title!r} could not be found.")
|
||||
try:
|
||||
title_information = r.json()["data"]["content"]
|
||||
except json.JSONDecodeError:
|
||||
raise ValueError(f"Failed to load title manifest: {r.text}")
|
||||
|
||||
if title_information["type"] == "Movie":
|
||||
movie = Movie(
|
||||
id_=self.title,
|
||||
service=self.__class__,
|
||||
name=title_information["title"],
|
||||
year=datetime.fromtimestamp(title_information["releaseDate"] / 1000).year,
|
||||
language=title_information["originalSpokenLanguages"][0]["locale"],
|
||||
data=title_information,
|
||||
)
|
||||
return Movies([movie])
|
||||
else:
|
||||
r = self.session.get(
|
||||
url=self.config["endpoints"]["tv_episodes"].format(id=self.title),
|
||||
params=self.params,
|
||||
)
|
||||
try:
|
||||
episodes = r.json()["data"]["episodes"]
|
||||
except json.JSONDecodeError:
|
||||
raise ValueError(f"Failed to load episodes list: {r.text}")
|
||||
|
||||
episodes_list = [
|
||||
Episode(
|
||||
id_=episode["id"],
|
||||
service=self.__class__,
|
||||
title=episode["showTitle"],
|
||||
season=episode["seasonNumber"],
|
||||
number=episode["episodeNumber"],
|
||||
name=episode.get("title"),
|
||||
year=datetime.fromtimestamp(title_information["releaseDate"] / 1000).year,
|
||||
language=title_information["originalSpokenLanguages"][0]["locale"],
|
||||
data={**episode, "originalSpokenLanguages": title_information["originalSpokenLanguages"]},
|
||||
)
|
||||
for episode in episodes
|
||||
]
|
||||
return Series(episodes_list)
|
||||
|
||||
def get_tracks(self, title):
|
||||
# call configure() before using self.storefront
|
||||
self.configure()
|
||||
|
||||
self.params = {
|
||||
"utsk": "6e3013c6d6fae3c2::::::9318c17fb39d6b9c",
|
||||
"caller": "web",
|
||||
"sf": self.storefront,
|
||||
"v": "46",
|
||||
"pfm": "appletv",
|
||||
"mfr": "Apple",
|
||||
"locale": "en-US",
|
||||
"l": "en",
|
||||
"ctx_brand": "tvs.sbd.4000",
|
||||
"count": "100",
|
||||
"skip": "0",
|
||||
}
|
||||
r = self.session.get(
|
||||
url=self.config["endpoints"]["manifest"].format(id=title.data["id"]),
|
||||
params=self.params,
|
||||
)
|
||||
try:
|
||||
stream_data = r.json()
|
||||
except json.JSONDecodeError:
|
||||
raise ValueError(f"Failed to load stream data: {r.text}")
|
||||
stream_data = stream_data["data"]["content"]["playables"][0]
|
||||
|
||||
if not stream_data["isEntitledToPlay"]:
|
||||
raise self.log.exit(" - User is not entitled to play this title")
|
||||
|
||||
self.extra_server_parameters = stream_data["assets"]["fpsKeyServerQueryParameters"]
|
||||
r = requests.get(
|
||||
url=stream_data["assets"]["hlsUrl"],
|
||||
headers={"User-Agent": "AppleTV6,2/11.1"},
|
||||
)
|
||||
res = r.text
|
||||
|
||||
master = m3u8.loads(res, r.url)
|
||||
tracks = m3u8_parser.parse(
|
||||
master=master,
|
||||
language=title.data["originalSpokenLanguages"][0]["locale"] or "en",
|
||||
session=self.session,
|
||||
)
|
||||
|
||||
# Set track properties based on type
|
||||
for track in tracks:
|
||||
if isinstance(track, Video):
|
||||
# Convert codec string to proper Video.Codec enum if needed
|
||||
if isinstance(track.codec, str):
|
||||
codec_str = track.codec.lower()
|
||||
if codec_str in ["avc", "h264", "h.264"]:
|
||||
track.codec = Video.Codec.AVC
|
||||
elif codec_str in ["hvc", "hev", "hevc", "h265", "h.265", "dvh"]:
|
||||
track.codec = Video.Codec.HEVC
|
||||
else:
|
||||
print(f"Unknown video codec '{track.codec}', keeping as string")
|
||||
|
||||
# Set pr_pssh for PlayReady license requests
|
||||
if track.drm:
|
||||
for drm in track.drm:
|
||||
if hasattr(drm, 'data') and 'pssh_b64' in drm.data:
|
||||
track.pr_pssh = drm.data['pssh_b64']
|
||||
elif isinstance(track, Audio):
|
||||
# Extract bitrate from URL
|
||||
bitrate = re.search(r"&g=(\d+?)&", track.url)
|
||||
if not bitrate:
|
||||
bitrate = re.search(r"_gr(\d+)_", track.url) # alternative pattern
|
||||
if bitrate:
|
||||
track.bitrate = int(bitrate.group(1)[-3::]) * 1000 # e.g. 128->128,000, 2448->448,000
|
||||
else:
|
||||
raise ValueError(f"Unable to get a bitrate value for Track {track.id}")
|
||||
codec_str = track.codec.replace("_vod", "") if track.codec else ""
|
||||
if codec_str == "DD+":
|
||||
track.codec = Audio.Codec.EC3
|
||||
elif codec_str == "DD":
|
||||
track.codec = Audio.Codec.AC3
|
||||
elif codec_str in ["HE", "stereo", "AAC"]:
|
||||
track.codec = Audio.Codec.AAC
|
||||
elif codec_str == "atmos":
|
||||
track.codec = Audio.Codec.EC3
|
||||
else:
|
||||
if not hasattr(track.codec, "value"):
|
||||
print(f"Unknown audio codec '{codec_str}', defaulting to AAC")
|
||||
track.codec = Audio.Codec.AAC
|
||||
|
||||
# Set pr_pssh for PlayReady license requests
|
||||
if track.drm:
|
||||
for drm in track.drm:
|
||||
if hasattr(drm, 'data') and 'pssh_b64' in drm.data:
|
||||
track.pr_pssh = drm.data['pssh_b64']
|
||||
elif isinstance(track, Subtitle):
|
||||
codec_str = track.codec if track.codec else ""
|
||||
if codec_str.lower() in ["vtt", "webvtt"]:
|
||||
track.codec = Subtitle.Codec.WebVTT
|
||||
elif codec_str.lower() in ["srt", "subrip"]:
|
||||
track.codec = Subtitle.Codec.SubRip
|
||||
elif codec_str.lower() in ["ttml", "dfxp"]:
|
||||
track.codec = Subtitle.Codec.TimedTextMarkupLang
|
||||
elif codec_str.lower() in ["ass", "ssa"]:
|
||||
track.codec = Subtitle.Codec.SubStationAlphav4
|
||||
else:
|
||||
if not hasattr(track.codec, "value"):
|
||||
print(f"Unknown subtitle codec '{codec_str}', defaulting to WebVTT")
|
||||
track.codec = Subtitle.Codec.WebVTT
|
||||
|
||||
# Set pr_pssh for PlayReady license requests
|
||||
if track.drm:
|
||||
for drm in track.drm:
|
||||
if hasattr(drm, 'data') and 'pssh_b64' in drm.data:
|
||||
track.pr_pssh = drm.data['pssh_b64']
|
||||
|
||||
# Try to filter by CDN, but fallback to all tracks if filtering fails
|
||||
try:
|
||||
filtered_tracks = [
|
||||
x
|
||||
for x in tracks
|
||||
if any(
|
||||
param.startswith("cdn=vod-ap") or param == "cdn=ap"
|
||||
for param in as_list(x.url)[0].split("?")[1].split("&")
|
||||
)
|
||||
]
|
||||
|
||||
for track in tracks:
|
||||
if track not in tracks.attachments:
|
||||
track.downloader = n_m3u8dl_re
|
||||
if isinstance(track, (Video, Audio)):
|
||||
track.needs_repack = True
|
||||
|
||||
if filtered_tracks:
|
||||
return Tracks(filtered_tracks)
|
||||
else:
|
||||
return Tracks(tracks)
|
||||
|
||||
except Exception:
|
||||
return Tracks(tracks)
|
||||
|
||||
def get_chapters(self, title):
|
||||
return []
|
||||
|
||||
def certificate(self, **_):
|
||||
return None # will use common privacy cert
|
||||
|
||||
def get_pssh(self, track) -> None:
|
||||
res = self.session.get(as_list(track.url)[0])
|
||||
playlist = m3u8.loads(res.text, uri=res.url)
|
||||
keys = list(filter(None, (playlist.session_keys or []) + (playlist.keys or [])))
|
||||
for key in keys:
|
||||
if key.keyformat and "playready" in key.keyformat.lower():
|
||||
track.pr_pssh = key.uri.split(",")[-1]
|
||||
return
|
||||
|
||||
def get_playready_license(self, *, challenge: bytes, title, track) -> str:
|
||||
if isinstance(challenge, str):
|
||||
challenge = challenge.encode()
|
||||
|
||||
self.get_pssh(track)
|
||||
|
||||
res = self.session.post(
|
||||
url=self.config["endpoints"]["license"],
|
||||
json={
|
||||
"streaming-request": {
|
||||
"version": 1,
|
||||
"streaming-keys": [
|
||||
{
|
||||
"challenge": base64.b64encode(challenge).decode("utf-8"),
|
||||
"key-system": "com.microsoft.playready",
|
||||
"uri": f"data:text/plain;charset=UTF-16;base64,{track.pr_pssh}",
|
||||
"id": 0,
|
||||
"lease-action": "start",
|
||||
"adamId": self.extra_server_parameters["adamId"],
|
||||
"isExternal": True,
|
||||
"svcId": self.extra_server_parameters["svcId"],
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
).json()
|
||||
return res["streaming-response"]["streaming-keys"][0]["license"]
|
||||
|
||||
# Service specific functions
|
||||
|
||||
def configure(self):
|
||||
cc = self.session.cookies.get_dict()["itua"]
|
||||
r = self.session.get(
|
||||
"https://gist.githubusercontent.com/BrychanOdlum/2208578ba151d1d7c4edeeda15b4e9b1/raw/8f01e4a4cb02cf97a48aba4665286b0e8de14b8e/storefrontmappings.json"
|
||||
).json()
|
||||
for g in r:
|
||||
if g["code"] == cc:
|
||||
self.storefront = g["storefrontId"]
|
||||
|
||||
environment = self.get_environment_config()
|
||||
if not environment:
|
||||
raise ValueError("Failed to get AppleTV+ WEB TV App Environment Configuration...")
|
||||
self.session.headers.update(
|
||||
{
|
||||
"User-Agent": self.config["user_agent"],
|
||||
"Authorization": f"Bearer {environment['developerToken']}",
|
||||
"media-user-token": self.session.cookies.get_dict()["media-user-token"],
|
||||
"x-apple-music-user-token": self.session.cookies.get_dict()["media-user-token"],
|
||||
}
|
||||
)
|
||||
|
||||
def get_environment_config(self):
|
||||
"""Loads environment config data from WEB App's serialized server data."""
|
||||
res = self.session.get("https://tv.apple.com").text
|
||||
|
||||
script_match = re.search(
|
||||
r'<script[^>]*id=["\']serialized-server-data["\'][^>]*>(.*?)</script>',
|
||||
res,
|
||||
re.DOTALL,
|
||||
)
|
||||
if script_match:
|
||||
try:
|
||||
script_content = script_match.group(1).strip()
|
||||
data = json.loads(script_content)
|
||||
if data and len(data) > 0 and "data" in data[0] and "configureParams" in data[0]["data"]:
|
||||
return data[0]["data"]["configureParams"]
|
||||
except (json.JSONDecodeError, KeyError, IndexError) as e:
|
||||
print(f"Failed to parse serialized server data: {e}")
|
||||
|
||||
return None
|
||||
BIN
ATVP/__pycache__/__init__.cpython-310.pyc
Normal file
BIN
ATVP/__pycache__/__init__.cpython-310.pyc
Normal file
Binary file not shown.
BIN
ATVP/__pycache__/__init__.cpython-312.pyc
Normal file
BIN
ATVP/__pycache__/__init__.cpython-312.pyc
Normal file
Binary file not shown.
38
ATVP/config.yaml
Normal file
38
ATVP/config.yaml
Normal file
@ -0,0 +1,38 @@
|
||||
user_agent: 'ATVE/6.2.0 Android/10 build/6A226 maker/Google model/Chromecast FW/QTS2.200918.0337115981'
|
||||
storefront_mapping_url: 'https://gist.githubusercontent.com/BrychanOdlum/2208578ba151d1d7c4edeeda15b4e9b1/raw/8f01e4a4cb02cf97a48aba4665286b0e8de14b8e/storefrontmappings.json'
|
||||
|
||||
endpoints:
|
||||
title: 'https://tv.apple.com/api/uts/v3/{type}/{id}'
|
||||
tv_episodes: 'https://tv.apple.com/api/uts/v2/view/show/{id}/episodes'
|
||||
manifest: 'https://tv.apple.com/api/uts/v2/view/product/{id}/personalized'
|
||||
license: 'https://play.itunes.apple.com/WebObjects/MZPlay.woa/wa/fpsRequest'
|
||||
environment: 'https://tv.apple.com'
|
||||
|
||||
params:
|
||||
utsk: '6e3013c6d6fae3c2::::::9318c17fb39d6b9c'
|
||||
caller: 'web'
|
||||
v: '46'
|
||||
pfm: 'appletv'
|
||||
mfr: 'Apple'
|
||||
locale: 'en-US'
|
||||
l: 'en'
|
||||
ctx_brand: 'tvs.sbd.4000'
|
||||
count: '100'
|
||||
skip: '0'
|
||||
|
||||
headers:
|
||||
Accept: 'application/json'
|
||||
Accept-Language: 'en-US,en;q=0.9'
|
||||
Connection: 'keep-alive'
|
||||
DNT: '1'
|
||||
Origin: 'https://tv.apple.com'
|
||||
Referer: 'https://tv.apple.com/'
|
||||
Sec-Fetch-Dest: 'empty'
|
||||
Sec-Fetch-Mode: 'cors'
|
||||
Sec-Fetch-Site: 'same-origin'
|
||||
|
||||
quality_map:
|
||||
SD: 480
|
||||
HD720: 720
|
||||
HD: 1080
|
||||
UHD: 2160
|
||||
257
AUBC/__init__.py
Normal file
257
AUBC/__init__.py
Normal file
@ -0,0 +1,257 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import hashlib
|
||||
import json
|
||||
import re
|
||||
from collections.abc import Generator
|
||||
from typing import Any, Optional, Union
|
||||
from urllib.parse import urljoin
|
||||
|
||||
import click
|
||||
from click import Context
|
||||
from requests import Request
|
||||
from unshackle.core.constants import AnyTrack
|
||||
from unshackle.core.manifests.dash import DASH
|
||||
from unshackle.core.search_result import SearchResult
|
||||
from unshackle.core.service import Service
|
||||
from unshackle.core.titles import Episode, Movie, Movies, Series
|
||||
from unshackle.core.tracks import Chapter, Chapters, Subtitle, Tracks
|
||||
|
||||
|
||||
class AUBC(Service):
|
||||
"""
|
||||
\b
|
||||
Service code for ABC iView streaming service (https://iview.abc.net.au/).
|
||||
|
||||
\b
|
||||
Version: 1.0.3
|
||||
Author: stabbedbybrick
|
||||
Authorization: None
|
||||
Robustness:
|
||||
L3: 1080p, AAC2.0
|
||||
|
||||
\b
|
||||
Tips:
|
||||
- Input should be complete URL:
|
||||
SHOW: https://iview.abc.net.au/show/return-to-paradise
|
||||
EPISODE: https://iview.abc.net.au/video/DR2314H001S00
|
||||
MOVIE: https://iview.abc.net.au/show/way-back / https://iview.abc.net.au/show/way-back/video/ZW3981A001S00
|
||||
|
||||
"""
|
||||
|
||||
GEOFENCE = ("au",)
|
||||
ALIASES = ("iview", "abciview", "iv",)
|
||||
|
||||
@staticmethod
|
||||
@click.command(name="AUBC", short_help="https://iview.abc.net.au/", help=__doc__)
|
||||
@click.argument("title", type=str)
|
||||
@click.pass_context
|
||||
def cli(ctx: Context, **kwargs: Any) -> AUBC:
|
||||
return AUBC(ctx, **kwargs)
|
||||
|
||||
def __init__(self, ctx: Context, title: str):
|
||||
self.title = title
|
||||
super().__init__(ctx)
|
||||
|
||||
self.session.headers.update(self.config["headers"])
|
||||
|
||||
def search(self) -> Generator[SearchResult, None, None]:
|
||||
url = (
|
||||
"https://y63q32nvdl-1.algolianet.com/1/indexes/*/queries?x-algolia-agent=Algolia"
|
||||
"%20for%20JavaScript%20(4.9.1)%3B%20Browser%20(lite)%3B%20react%20(17.0.2)%3B%20"
|
||||
"react-instantsearch%20(6.30.2)%3B%20JS%20Helper%20(3.10.0)&x-"
|
||||
"algolia-api-key=bcdf11ba901b780dc3c0a3ca677fbefc&x-algolia-application-id=Y63Q32NVDL"
|
||||
)
|
||||
payload = {
|
||||
"requests": [
|
||||
{
|
||||
"indexName": "ABC_production_iview_web",
|
||||
"params": f"query={self.title}&tagFilters=&userToken=anonymous-74be3cf1-1dc7-4fa1-9cff-19592162db1c",
|
||||
}
|
||||
],
|
||||
}
|
||||
|
||||
results = self._request("POST", url, payload=payload)["results"]
|
||||
hits = [x for x in results[0]["hits"] if x["docType"] == "Program"]
|
||||
|
||||
for result in hits:
|
||||
yield SearchResult(
|
||||
id_="https://iview.abc.net.au/show/{}".format(result.get("slug")),
|
||||
title=result.get("title"),
|
||||
description=result.get("synopsis"),
|
||||
label=result.get("subType"),
|
||||
url="https://iview.abc.net.au/show/{}".format(result.get("slug")),
|
||||
)
|
||||
|
||||
def get_titles(self) -> Union[Movies, Series]:
|
||||
title_re = r"^(?:https?://(?:www.)?iview.abc.net.au/(?P<type>show|video)/)?(?P<id>[a-zA-Z0-9_-]+)"
|
||||
try:
|
||||
kind, title_id = (re.match(title_re, self.title).group(i) for i in ("type", "id"))
|
||||
except Exception:
|
||||
raise ValueError("- Could not parse ID from title")
|
||||
|
||||
if kind == "show":
|
||||
data = self._request("GET", "/v3/show/{}".format(title_id))
|
||||
label = data.get("type")
|
||||
|
||||
if label.lower() in ("series", "program"):
|
||||
episodes = self._series(title_id)
|
||||
return Series(episodes)
|
||||
|
||||
elif label.lower() in ("feature", "movie"):
|
||||
movie = self._movie(data)
|
||||
return Movies(movie)
|
||||
|
||||
elif kind == "video":
|
||||
episode = self._episode(title_id)
|
||||
return Series([episode])
|
||||
|
||||
def get_tracks(self, title: Union[Movie, Episode]) -> Tracks:
|
||||
video = self._request("GET", "/v3/video/{}".format(title.id))
|
||||
if not video.get("playable"):
|
||||
raise ConnectionError(video.get("unavailableMessage"))
|
||||
|
||||
playlist = video.get("_embedded", {}).get("playlist", {})
|
||||
if not playlist:
|
||||
raise ConnectionError("Could not find a playlist for this title")
|
||||
|
||||
streams = next(x["streams"]["mpegdash"] for x in playlist if x["type"] == "program")
|
||||
captions = next((x.get("captions") for x in playlist if x["type"] == "program"), None)
|
||||
title.data["protected"] = streams.get("protected", False)
|
||||
|
||||
if "720" in streams:
|
||||
streams["1080"] = streams["720"].replace("720", "1080")
|
||||
|
||||
manifest = next(
|
||||
(url for key in ["1080", "720", "sd", "sd-low"] if key in streams
|
||||
for url in [streams[key]]
|
||||
if self.session.head(url).status_code == 200),
|
||||
None
|
||||
)
|
||||
if not manifest:
|
||||
raise ValueError("Could not find a manifest for this title")
|
||||
|
||||
tracks = DASH.from_url(manifest, self.session).to_tracks(title.language)
|
||||
|
||||
for track in tracks.audio:
|
||||
role = track.data["dash"]["adaptation_set"].find("Role")
|
||||
if role is not None and role.get("value") in ["description", "alternative", "alternate"]:
|
||||
track.descriptive = True
|
||||
|
||||
if captions:
|
||||
subtitles = captions.get("src-vtt")
|
||||
tracks.add(
|
||||
Subtitle(
|
||||
id_=hashlib.md5(subtitles.encode()).hexdigest()[0:6],
|
||||
url=subtitles,
|
||||
codec=Subtitle.Codec.from_mime(subtitles[-3:]),
|
||||
language=title.language,
|
||||
forced=False,
|
||||
)
|
||||
)
|
||||
|
||||
return tracks
|
||||
|
||||
def get_chapters(self, title: Union[Movie, Episode]) -> Chapters:
|
||||
if not title.data.get("cuePoints"):
|
||||
return Chapters()
|
||||
|
||||
credits = next((x.get("start") for x in title.data["cuePoints"] if x["type"] == "end-credits"), None)
|
||||
if credits:
|
||||
return Chapters([Chapter(name="Credits", timestamp=credits * 1000)])
|
||||
|
||||
return Chapters()
|
||||
|
||||
def get_widevine_service_certificate(self, **_: Any) -> str:
|
||||
return None
|
||||
|
||||
def get_widevine_license(self, *, challenge: bytes, title: Union[Movies, Series], track: AnyTrack) -> Optional[Union[bytes, str]]:
|
||||
if not title.data.get("protected"):
|
||||
return None
|
||||
|
||||
customdata = self._license(title.id)
|
||||
headers = {"customdata": customdata}
|
||||
|
||||
r = self.session.post(self.config["endpoints"]["license"], headers=headers, data=challenge)
|
||||
r.raise_for_status()
|
||||
return r.content
|
||||
|
||||
# Service specific
|
||||
|
||||
def _series(self, title: str) -> Episode:
|
||||
data = self._request("GET", "/v3/series/{}".format(title))
|
||||
|
||||
seasons = data if isinstance(data, list) else [data]
|
||||
|
||||
episodes = [
|
||||
self.create_episode(episode)
|
||||
for season in seasons
|
||||
for episode in season.get("_embedded", {}).get("videoEpisodes", {}).get("items", [])
|
||||
]
|
||||
|
||||
return Series(episodes)
|
||||
|
||||
def _movie(self, data: dict) -> Movie:
|
||||
return [
|
||||
Movie(
|
||||
id_=data["_embedded"]["highlightVideo"]["id"],
|
||||
service=self.__class__,
|
||||
name=data.get("title"),
|
||||
year=data.get("productionYear"),
|
||||
data=data,
|
||||
language=data.get("analytics", {}).get("dataLayer", {}).get("d_language", "en"),
|
||||
)
|
||||
]
|
||||
|
||||
def _episode(self, video_id: str) -> Episode:
|
||||
data = self._request("GET", "/v3/video/{}".format(video_id))
|
||||
return self.create_episode(data)
|
||||
|
||||
def _license(self, video_id: str):
|
||||
token = self._request("POST", "/v3/token/jwt", data={"clientId": self.config["client"]})["token"]
|
||||
response = self._request("GET", "/v3/token/drm/{}".format(video_id), headers={"bearer": token})
|
||||
|
||||
return response["license"]
|
||||
|
||||
def create_episode(self, episode: dict) -> Episode:
|
||||
title = episode["showTitle"]
|
||||
episode_id = episode.get("id", "")
|
||||
series_id = episode.get("analytics", {}).get("dataLayer", {}).get("d_series_id", "")
|
||||
episode_name = episode.get("analytics", {}).get("dataLayer", {}).get("d_episode_name", "")
|
||||
episode_number = re.search(r"Episode (\d+)", episode.get("displaySubtitle", ""))
|
||||
name = re.search(r"S\d+\sEpisode\s\d+\s(.*)", episode_name)
|
||||
language = episode.get("analytics", {}).get("dataLayer", {}).get("d_language", "en")
|
||||
|
||||
season = int(series_id.split("-")[-1]) if series_id else 0
|
||||
number = int(episode_number.group(1)) if episode_number else 0
|
||||
|
||||
if not number:
|
||||
if match := re.search(r"[A-Z](\d{3})(?=S\d{2})", episode_id):
|
||||
number = int(match.group(1))
|
||||
|
||||
return Episode(
|
||||
id_=episode["id"],
|
||||
service=self.__class__,
|
||||
title=title,
|
||||
season=season,
|
||||
number=number,
|
||||
name=name.group(1) if name else episode_name,
|
||||
data=episode,
|
||||
language=language,
|
||||
)
|
||||
|
||||
def _request(self, method: str, api: str, **kwargs: Any) -> Any[dict | str]:
|
||||
url = urljoin(self.config["endpoints"]["base_url"], api)
|
||||
|
||||
prep = self.session.prepare_request(Request(method, url, **kwargs))
|
||||
|
||||
response = self.session.send(prep)
|
||||
if response.status_code != 200:
|
||||
raise ConnectionError(f"{response.text}")
|
||||
|
||||
try:
|
||||
return json.loads(response.content)
|
||||
|
||||
except json.JSONDecodeError as e:
|
||||
raise ValueError(f"Failed to parse JSON: {response.text}") from e
|
||||
|
||||
9
AUBC/config.yaml
Normal file
9
AUBC/config.yaml
Normal file
@ -0,0 +1,9 @@
|
||||
headers:
|
||||
User-Agent: Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:126.0) Gecko/20100101 Firefox/126.0
|
||||
accept-language: en-US,en;q=0.8
|
||||
|
||||
endpoints:
|
||||
base_url: https://api.iview.abc.net.au
|
||||
license: https://wv-keyos.licensekeyserver.com/
|
||||
|
||||
client: "1d4b5cba-42d2-403e-80e7-34565cdf772d"
|
||||
319
CBC/__init__.py
Normal file
319
CBC/__init__.py
Normal file
@ -0,0 +1,319 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import re
|
||||
import sys
|
||||
from collections.abc import Generator
|
||||
from http.cookiejar import CookieJar
|
||||
from typing import Any, Optional, Union
|
||||
from urllib.parse import urljoin
|
||||
|
||||
import click
|
||||
from click import Context
|
||||
from requests import Request
|
||||
from unshackle.core.constants import AnyTrack
|
||||
from unshackle.core.credential import Credential
|
||||
from unshackle.core.manifests import DASH, HLS
|
||||
from unshackle.core.search_result import SearchResult
|
||||
from unshackle.core.service import Service
|
||||
from unshackle.core.titles import Episode, Movie, Movies, Series
|
||||
from unshackle.core.tracks import Chapter, Chapters, Tracks
|
||||
|
||||
|
||||
class CBC(Service):
|
||||
"""
|
||||
\b
|
||||
Service code for CBC Gem streaming service (https://gem.cbc.ca/).
|
||||
|
||||
\b
|
||||
Version: 1.0.1
|
||||
Author: stabbedbybrick
|
||||
Authorization: Credentials
|
||||
Robustness:
|
||||
AES-128: 1080p, DDP5.1
|
||||
Widevine L3: 720p, DDP5.1
|
||||
|
||||
\b
|
||||
Tips:
|
||||
- Input can be complete title URL or just the slug:
|
||||
SHOW: https://gem.cbc.ca/murdoch-mysteries OR murdoch-mysteries
|
||||
MOVIE: https://gem.cbc.ca/the-babadook OR the-babadook
|
||||
|
||||
\b
|
||||
Notes:
|
||||
- DRM encrypted titles max out at 720p.
|
||||
- CCExtrator v0.94 will likely fail to extract subtitles. It's recommended to downgrade to v0.93.
|
||||
- Some audio tracks contain invalid data, causing warning messages from mkvmerge during muxing
|
||||
These can be ignored.
|
||||
|
||||
"""
|
||||
|
||||
GEOFENCE = ("ca",)
|
||||
ALIASES = ("gem", "cbcgem",)
|
||||
|
||||
@staticmethod
|
||||
@click.command(name="CBC", short_help="https://gem.cbc.ca/", help=__doc__)
|
||||
@click.argument("title", type=str)
|
||||
@click.pass_context
|
||||
def cli(ctx: Context, **kwargs: Any) -> CBC:
|
||||
return CBC(ctx, **kwargs)
|
||||
|
||||
def __init__(self, ctx: Context, title: str):
|
||||
self.title: str = title
|
||||
super().__init__(ctx)
|
||||
|
||||
self.base_url: str = self.config["endpoints"]["base_url"]
|
||||
|
||||
def search(self) -> Generator[SearchResult, None, None]:
|
||||
params = {
|
||||
"device": "web",
|
||||
"pageNumber": "1",
|
||||
"pageSize": "20",
|
||||
"term": self.title,
|
||||
}
|
||||
response: dict = self._request("GET", "/ott/catalog/v1/gem/search", params=params)
|
||||
|
||||
for result in response.get("result", []):
|
||||
yield SearchResult(
|
||||
id_="https://gem.cbc.ca/{}".format(result.get("url")),
|
||||
title=result.get("title"),
|
||||
description=result.get("synopsis"),
|
||||
label=result.get("type"),
|
||||
url="https://gem.cbc.ca/{}".format(result.get("url")),
|
||||
)
|
||||
|
||||
def authenticate(self, cookies: Optional[CookieJar] = None, credential: Optional[Credential] = None) -> None:
|
||||
super().authenticate(cookies, credential)
|
||||
if not credential:
|
||||
raise EnvironmentError("Service requires Credentials for Authentication.")
|
||||
|
||||
tokens: Optional[Any] = self.cache.get(f"tokens_{credential.sha1}")
|
||||
|
||||
"""
|
||||
All grant types for future reference:
|
||||
PASSWORD("password"),
|
||||
ACCESS_TOKEN("access_token"),
|
||||
REFRESH_TOKEN("refresh_token"),
|
||||
CLIENT_CREDENTIALS("client_credentials"),
|
||||
AUTHORIZATION_CODE("authorization_code"),
|
||||
CODE("code");
|
||||
"""
|
||||
|
||||
if tokens and not tokens.expired:
|
||||
# cached
|
||||
self.log.info(" + Using cached tokens")
|
||||
auth_token: str = tokens.data["access_token"]
|
||||
|
||||
elif tokens and tokens.expired:
|
||||
# expired, refresh
|
||||
self.log.info("Refreshing cached tokens...")
|
||||
auth_url, scopes = self.settings()
|
||||
params = {
|
||||
"client_id": self.config["client"]["id"],
|
||||
"grant_type": "refresh_token",
|
||||
"refresh_token": tokens.data["refresh_token"],
|
||||
"scope": scopes,
|
||||
}
|
||||
|
||||
access: dict = self._request("POST", auth_url, params=params)
|
||||
|
||||
# Shorten expiration by one hour to account for clock skew
|
||||
tokens.set(access, expiration=int(access["expires_in"]) - 3600)
|
||||
auth_token: str = access["access_token"]
|
||||
|
||||
else:
|
||||
# new
|
||||
self.log.info("Requesting new tokens...")
|
||||
auth_url, scopes = self.settings()
|
||||
params = {
|
||||
"client_id": self.config["client"]["id"],
|
||||
"grant_type": "password",
|
||||
"username": credential.username,
|
||||
"password": credential.password,
|
||||
"scope": scopes,
|
||||
}
|
||||
|
||||
access: dict = self._request("POST", auth_url, params=params)
|
||||
|
||||
# Shorten expiration by one hour to account for clock skew
|
||||
tokens.set(access, expiration=int(access["expires_in"]) - 3600)
|
||||
auth_token: str = access["access_token"]
|
||||
|
||||
claims_token: str = self.claims_token(auth_token)
|
||||
self.session.headers.update({"x-claims-token": claims_token})
|
||||
|
||||
def get_titles(self) -> Union[Movies, Series]:
|
||||
title_re: str = r"^(?:https?://(?:www.)?gem.cbc.ca/)?(?P<id>[a-zA-Z0-9_-]+)"
|
||||
try:
|
||||
title_id: str = re.match(title_re, self.title).group("id")
|
||||
except Exception:
|
||||
raise ValueError("- Could not parse ID from title")
|
||||
|
||||
params = {"device": "web"}
|
||||
data: dict = self._request("GET", "/ott/catalog/v2/gem/show/{}".format(title_id), params=params)
|
||||
label: str = data.get("contentType", "").lower()
|
||||
|
||||
if label in ("film", "movie", "standalone"):
|
||||
movies: list[Movie] = self._movie(data)
|
||||
return Movies(movies)
|
||||
|
||||
else:
|
||||
episodes: list[Episode] = self._show(data)
|
||||
return Series(episodes)
|
||||
|
||||
def get_tracks(self, title: Union[Movie, Episode]) -> Tracks:
|
||||
index: dict = self._request(
|
||||
"GET", "/media/meta/v1/index.ashx", params={"appCode": "gem", "idMedia": title.id, "output": "jsonObject"}
|
||||
)
|
||||
|
||||
title.data["extra"] = {
|
||||
"chapters": index["Metas"].get("Chapitres"),
|
||||
"credits": index["Metas"].get("CreditStartTime"),
|
||||
}
|
||||
|
||||
self.drm: bool = index["Metas"].get("isDrmActive") == "true"
|
||||
if self.drm:
|
||||
tech: str = next(tech["name"] for tech in index["availableTechs"] if "widevine" in tech["drm"])
|
||||
else:
|
||||
tech: str = next(tech["name"] for tech in index["availableTechs"] if not tech["drm"])
|
||||
|
||||
response: dict = self._request(
|
||||
"GET", self.config["endpoints"]["validation"].format("android", title.id, "smart-tv", tech)
|
||||
)
|
||||
|
||||
manifest = response.get("url")
|
||||
self.license = next((x["value"] for x in response["params"] if "widevineLicenseUrl" in x["name"]), None)
|
||||
self.token = next((x["value"] for x in response["params"] if "widevineAuthToken" in x["name"]), None)
|
||||
|
||||
stream_type: Union[HLS, DASH] = HLS if tech == "hls" else DASH
|
||||
tracks: Tracks = stream_type.from_url(manifest, self.session).to_tracks(language=title.language)
|
||||
|
||||
if stream_type == DASH:
|
||||
for track in tracks.audio:
|
||||
label = track.data["dash"]["adaptation_set"].find("Label")
|
||||
if label is not None and "descriptive" in label.text.lower():
|
||||
track.descriptive = True
|
||||
|
||||
for track in tracks:
|
||||
track.language = title.language
|
||||
|
||||
return tracks
|
||||
|
||||
def get_chapters(self, title: Union[Movie, Episode]) -> Chapters:
|
||||
extra: dict = title.data["extra"]
|
||||
|
||||
chapters = []
|
||||
if extra.get("chapters"):
|
||||
chapters = [Chapter(timestamp=x) for x in set(extra["chapters"].split(","))]
|
||||
|
||||
if extra.get("credits"):
|
||||
chapters.append(Chapter(name="Credits", timestamp=float(extra["credits"])))
|
||||
|
||||
return Chapters(chapters)
|
||||
|
||||
def get_widevine_service_certificate(self, **_: Any) -> str:
|
||||
return None
|
||||
|
||||
def get_widevine_license(
|
||||
self, *, challenge: bytes, title: Union[Movies, Series], track: AnyTrack
|
||||
) -> Optional[Union[bytes, str]]:
|
||||
if not self.license or not self.token:
|
||||
return None
|
||||
|
||||
headers = {"x-dt-auth-token": self.token}
|
||||
r = self.session.post(self.license, headers=headers, data=challenge)
|
||||
r.raise_for_status()
|
||||
return r.content
|
||||
|
||||
# Service specific
|
||||
|
||||
def _show(self, data: dict) -> list[Episode]:
|
||||
lineups: list = next((x["lineups"] for x in data["content"] if x.get("title", "").lower() == "episodes"), None)
|
||||
if not lineups:
|
||||
self.log.warning("No episodes found for: {}".format(data.get("title")))
|
||||
return
|
||||
|
||||
titles = []
|
||||
for season in lineups:
|
||||
for episode in season["items"]:
|
||||
if episode.get("mediaType", "").lower() == "episode":
|
||||
parts = episode.get("title", "").split(".", 1)
|
||||
episode_name = parts[1].strip() if len(parts) > 1 else parts[0].strip()
|
||||
titles.append(
|
||||
Episode(
|
||||
id_=episode["idMedia"],
|
||||
service=self.__class__,
|
||||
title=data.get("title"),
|
||||
season=int(season.get("seasonNumber", 0)),
|
||||
number=int(episode.get("episodeNumber", 0)),
|
||||
name=episode_name,
|
||||
year=episode.get("metadata", {}).get("productionYear"),
|
||||
language=data["structuredMetadata"].get("inLanguage", "en-CA"),
|
||||
data=episode,
|
||||
)
|
||||
)
|
||||
|
||||
return titles
|
||||
|
||||
def _movie(self, data: dict) -> list[Movie]:
|
||||
unwanted: tuple = ("episodes", "trailers", "extras")
|
||||
lineups: list = next((x["lineups"] for x in data["content"] if x.get("title", "").lower() not in unwanted), None)
|
||||
if not lineups:
|
||||
self.log.warning("No movies found for: {}".format(data.get("title")))
|
||||
return
|
||||
|
||||
titles = []
|
||||
for season in lineups:
|
||||
for movie in season["items"]:
|
||||
if movie.get("mediaType", "").lower() == "episode":
|
||||
parts = movie.get("title", "").split(".", 1)
|
||||
movie_name = parts[1].strip() if len(parts) > 1 else parts[0].strip()
|
||||
titles.append(
|
||||
Movie(
|
||||
id_=movie.get("idMedia"),
|
||||
service=self.__class__,
|
||||
name=movie_name,
|
||||
year=movie.get("metadata", {}).get("productionYear"),
|
||||
language=data["structuredMetadata"].get("inLanguage", "en-CA"),
|
||||
data=movie,
|
||||
)
|
||||
)
|
||||
|
||||
return titles
|
||||
|
||||
def settings(self) -> tuple:
|
||||
settings = self._request("GET", "/ott/catalog/v1/gem/settings", params={"device": "web"})
|
||||
auth_url: str = settings["identityManagement"]["ropc"]["url"]
|
||||
scopes: str = settings["identityManagement"]["ropc"]["scopes"]
|
||||
return auth_url, scopes
|
||||
|
||||
def claims_token(self, token: str) -> str:
|
||||
headers = {
|
||||
"Authorization": "Bearer " + token,
|
||||
}
|
||||
params = {"device": "web"}
|
||||
response: dict = self._request(
|
||||
"GET", "/ott/subscription/v2/gem/Subscriber/profile", headers=headers, params=params
|
||||
)
|
||||
return response["claimsToken"]
|
||||
|
||||
def _request(self, method: str, api: str, **kwargs: Any) -> Any[dict | str]:
|
||||
url: str = urljoin(self.base_url, api)
|
||||
|
||||
prep: Request = self.session.prepare_request(Request(method, url, **kwargs))
|
||||
response = self.session.send(prep)
|
||||
if response.status_code not in (200, 426):
|
||||
raise ConnectionError(f"{response.status_code} - {response.text}")
|
||||
|
||||
try:
|
||||
data = json.loads(response.content)
|
||||
error_keys = ["errorMessage", "ErrorMessage", "ErrorCode", "errorCode", "error"]
|
||||
error_message = next((data.get(key) for key in error_keys if key in data), None)
|
||||
if error_message:
|
||||
self.log.error(f"\n - Error: {error_message}\n")
|
||||
sys.exit(1)
|
||||
|
||||
return data
|
||||
|
||||
except json.JSONDecodeError:
|
||||
raise ConnectionError("Request for {} failed: {}".format(response.url, response.text))
|
||||
7
CBC/config.yaml
Normal file
7
CBC/config.yaml
Normal file
@ -0,0 +1,7 @@
|
||||
endpoints:
|
||||
base_url: "https://services.radio-canada.ca"
|
||||
validation: "/media/validation/v2?appCode=gem&&deviceType={}&idMedia={}&manifestType={}&output=json&tech={}"
|
||||
api_key: "3f4beddd-2061-49b0-ae80-6f1f2ed65b37"
|
||||
|
||||
client:
|
||||
id: "fc05b0ee-3865-4400-a3cc-3da82c330c23"
|
||||
242
CBS/__init__.py
Normal file
242
CBS/__init__.py
Normal file
@ -0,0 +1,242 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import re
|
||||
import sys
|
||||
from collections.abc import Generator
|
||||
from typing import Any, Optional, Union
|
||||
from urllib.parse import urljoin
|
||||
|
||||
import click
|
||||
from requests import Request
|
||||
from unshackle.core.constants import AnyTrack
|
||||
from unshackle.core.manifests import DASH
|
||||
from unshackle.core.search_result import SearchResult
|
||||
from unshackle.core.service import Service
|
||||
from unshackle.core.titles import Episode, Series, Title_T, Titles_T
|
||||
from unshackle.core.tracks import Chapter, Chapters, Tracks
|
||||
from unshackle.core.utils.sslciphers import SSLCiphers
|
||||
from unshackle.core.utils.xml import load_xml
|
||||
|
||||
|
||||
class CBS(Service):
|
||||
"""
|
||||
\b
|
||||
Service code for CBS.com streaming service (https://cbs.com).
|
||||
Credit to @srpen6 for the tip on anonymous session
|
||||
|
||||
\b
|
||||
Version: 1.0.1
|
||||
Author: stabbedbybrick
|
||||
Authorization: None
|
||||
Robustness:
|
||||
Widevine:
|
||||
L3: 2160p, DDP5.1
|
||||
|
||||
\b
|
||||
Tips:
|
||||
- Input should be complete URLs:
|
||||
SERIES: https://www.cbs.com/shows/tracker/
|
||||
EPISODE: https://www.cbs.com/shows/video/E0wG_ovVMkLlHOzv7KDpUV9bjeKFFG2v/
|
||||
|
||||
\b
|
||||
Common VPN/proxy errors:
|
||||
- SSLError(SSLEOFError(8, '[SSL: UNEXPECTED_EOF_WHILE_READING]'))
|
||||
- ConnectionError: 406 Not Acceptable, 403 Forbidden
|
||||
|
||||
"""
|
||||
|
||||
GEOFENCE = ("us",)
|
||||
|
||||
@staticmethod
|
||||
@click.command(name="CBS", short_help="https://cbs.com", help=__doc__)
|
||||
@click.argument("title", type=str, required=False)
|
||||
@click.pass_context
|
||||
def cli(ctx, **kwargs) -> CBS:
|
||||
return CBS(ctx, **kwargs)
|
||||
|
||||
def __init__(self, ctx, title):
|
||||
self.title = title
|
||||
super().__init__(ctx)
|
||||
|
||||
def search(self) -> Generator[SearchResult, None, None]:
|
||||
params = {
|
||||
"term": self.title,
|
||||
"termCount": 50,
|
||||
"showCanVids": "true",
|
||||
}
|
||||
results = self._request("GET", "/apps-api/v3.1/androidphone/contentsearch/search.json", params=params)["terms"]
|
||||
|
||||
for result in results:
|
||||
yield SearchResult(
|
||||
id_=result.get("path"),
|
||||
title=result.get("title"),
|
||||
description=None,
|
||||
label=result.get("term_type"),
|
||||
url=result.get("path"),
|
||||
)
|
||||
|
||||
def get_titles(self) -> Titles_T:
|
||||
title_re = r"https://www\.cbs\.com/shows/(?P<video>video/)?(?P<id>[a-zA-Z0-9_-]+)/?$"
|
||||
try:
|
||||
video, title_id = (re.match(title_re, self.title).group(i) for i in ("video", "id"))
|
||||
except Exception:
|
||||
raise ValueError("- Could not parse ID from title")
|
||||
|
||||
if video:
|
||||
episodes = self._episode(title_id)
|
||||
else:
|
||||
episodes = self._show(title_id)
|
||||
|
||||
return Series(episodes)
|
||||
|
||||
def get_tracks(self, title: Title_T) -> Tracks:
|
||||
self.token, self.license = self.ls_session(title.id)
|
||||
manifest = self.get_manifest(title)
|
||||
return DASH.from_url(url=manifest).to_tracks(language=title.language)
|
||||
|
||||
def get_chapters(self, title: Episode) -> Chapters:
|
||||
if not title.data.get("playbackEvents", {}).get("endCreditChapterTimeMs"):
|
||||
return Chapters()
|
||||
|
||||
end_credits = title.data["playbackEvents"]["endCreditChapterTimeMs"]
|
||||
return Chapters([Chapter(name="Credits", timestamp=end_credits)])
|
||||
|
||||
def certificate(self, **_):
|
||||
return None # will use common privacy cert
|
||||
|
||||
def get_widevine_license(self, *, challenge: bytes, title: Title_T, track: AnyTrack) -> Optional[Union[bytes, str]]:
|
||||
headers = {"Authorization": f"Bearer {self.token}"}
|
||||
r = self.session.post(self.license, headers=headers, data=challenge)
|
||||
if not r.ok:
|
||||
self.log.error(r.text)
|
||||
sys.exit(1)
|
||||
return r.content
|
||||
|
||||
# Service specific functions
|
||||
|
||||
def _show(self, title: str) -> Episode:
|
||||
data = self._request("GET", "/apps-api/v3.0/androidphone/shows/slug/{}.json".format(title))
|
||||
|
||||
links = next((x.get("links") for x in data["showMenu"] if x.get("device_app_id") == "all_platforms"), None)
|
||||
config = next((x.get("videoConfigUniqueName") for x in links if x.get("title").strip() == "Episodes"), None)
|
||||
show = next((x for x in data["show"]["results"] if x.get("type").strip() == "show"), None)
|
||||
seasons = [x.get("seasonNum") for x in data["available_video_seasons"].get("itemList", [])]
|
||||
locale = show.get("locale", "en-US")
|
||||
|
||||
show_data = self._request(
|
||||
"GET", "/apps-api/v2.0/androidphone/shows/{}/videos/config/{}.json".format(show.get("show_id"), config),
|
||||
params={"platformType": "apps", "rows": "1", "begin": "0"},
|
||||
)
|
||||
|
||||
section = next(
|
||||
(x["sectionId"] for x in show_data["videoSectionMetadata"] if x["title"] == "Full Episodes"), None
|
||||
)
|
||||
|
||||
episodes = []
|
||||
for season in seasons:
|
||||
res = self._request(
|
||||
"GET", "/apps-api/v2.0/androidphone/videos/section/{}.json".format(section),
|
||||
params={"begin": "0", "rows": "999", "params": f"seasonNum={season}", "seasonNum": season},
|
||||
)
|
||||
episodes.extend(res["sectionItems"].get("itemList", []))
|
||||
|
||||
return [
|
||||
Episode(
|
||||
id_=episode["contentId"],
|
||||
title=episode["seriesTitle"],
|
||||
season=episode["seasonNum"] if episode["fullEpisode"] else 0,
|
||||
number=episode["episodeNum"] if episode["fullEpisode"] else episode["positionNum"],
|
||||
name=episode["label"],
|
||||
language=locale,
|
||||
service=self.__class__,
|
||||
data=episode,
|
||||
)
|
||||
for episode in episodes
|
||||
if episode["fullEpisode"]
|
||||
]
|
||||
|
||||
def _episode(self, title: str) -> Episode:
|
||||
data = self._request("GET", "/apps-api/v2.0/androidphone/video/cid/{}.json".format(title))
|
||||
|
||||
return [
|
||||
Episode(
|
||||
id_=episode["contentId"],
|
||||
title=episode["seriesTitle"],
|
||||
season=episode["seasonNum"] if episode["fullEpisode"] else 0,
|
||||
number=episode["episodeNum"] if episode["fullEpisode"] else episode["positionNum"],
|
||||
name=episode["label"],
|
||||
language="en-US",
|
||||
service=self.__class__,
|
||||
data=episode,
|
||||
)
|
||||
for episode in data["itemList"]
|
||||
]
|
||||
|
||||
def ls_session(self, content_id: str) -> str:
|
||||
res = self._request(
|
||||
"GET", "/apps-api/v3.0/androidphone/irdeto-control/anonymous-session-token.json",
|
||||
params={"contentId": content_id},
|
||||
)
|
||||
|
||||
return res.get("ls_session"), res.get("url")
|
||||
|
||||
def get_manifest(self, title: Episode) -> str:
|
||||
try:
|
||||
res = self._request(
|
||||
"GET", "http://link.theplatform.com/s/{}/media/guid/2198311517/{}".format(
|
||||
title.data.get("cmsAccountId"), title.id
|
||||
),
|
||||
params={
|
||||
"format": "SMIL",
|
||||
"assetTypes": "|".join(self.config["assets"]),
|
||||
"formats": "MPEG-DASH,MPEG4,M3U",
|
||||
},
|
||||
)
|
||||
|
||||
body = load_xml(res).find("body").find("seq").findall("switch")
|
||||
bitrate = max(body, key=lambda x: int(x.find("video").get("system-bitrate")))
|
||||
videos = [x.get("src") for x in bitrate.findall("video")]
|
||||
if not videos:
|
||||
raise ValueError("Could not find any streams - is the title still available?")
|
||||
|
||||
manifest = next(
|
||||
(x for x in videos if "hdr_dash" in x.lower()),
|
||||
next((x for x in videos if "cenc_dash" in x.lower()), videos[0]),
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
self.log.warning("ThePlatform request failed: {}, falling back to standard manifest".format(e))
|
||||
if not title.data.get("streamingUrl"):
|
||||
raise ValueError("Could not find any streams - is the title still available?")
|
||||
|
||||
manifest = title.data.get("streamingUrl")
|
||||
|
||||
return manifest
|
||||
|
||||
def _request(self, method: str, api: str, params: dict = None, headers: dict = None) -> Any[dict | str]:
|
||||
url = urljoin(self.config["endpoints"]["base_url"], api)
|
||||
self.session.headers.update(self.config["headers"])
|
||||
self.session.params = {"at": self.config["endpoints"]["token"]}
|
||||
for prefix in ("https://", "http://"):
|
||||
self.session.mount(prefix, SSLCiphers(security_level=2))
|
||||
|
||||
if params:
|
||||
self.session.params.update(params)
|
||||
if headers:
|
||||
self.session.headers.update(headers)
|
||||
|
||||
prep = self.session.prepare_request(Request(method, url))
|
||||
|
||||
response = self.session.send(prep)
|
||||
if response.status_code != 200:
|
||||
raise ConnectionError(f"{response.text}")
|
||||
|
||||
try:
|
||||
data = json.loads(response.content)
|
||||
if not data.get("success"):
|
||||
raise ValueError(data.get("message"))
|
||||
return data
|
||||
|
||||
except json.JSONDecodeError:
|
||||
return response.text
|
||||
10
CBS/config.yaml
Normal file
10
CBS/config.yaml
Normal file
@ -0,0 +1,10 @@
|
||||
headers:
|
||||
user-agent: Mozilla/5.0 (Linux; Android 13; SM-A536E) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/112.0.0.0 Mobile Safari/537.36
|
||||
|
||||
endpoints:
|
||||
base_url: https://cbsdigital.cbs.com
|
||||
token: ABBsaBMagMmYLUc9iXB0lXEKsUQ0/MwRn6z3Tg0KKQaH7Q6QGqJcABwlBP4XiMR1b0Q=
|
||||
|
||||
assets: [HLS_AES, DASH_LIVE, DASH_CENC, DASH_CENC_HDR10, DASH_LIVE, DASH_TA, DASH_CENC_PS4]
|
||||
|
||||
|
||||
777
CR/__init__.py
Normal file
777
CR/__init__.py
Normal file
@ -0,0 +1,777 @@
|
||||
import re
|
||||
import time
|
||||
import uuid
|
||||
from threading import Lock
|
||||
from typing import Generator, Optional, Union
|
||||
|
||||
import click
|
||||
import jwt
|
||||
from langcodes import Language
|
||||
|
||||
from unshackle.core.manifests import DASH
|
||||
from unshackle.core.search_result import SearchResult
|
||||
from unshackle.core.service import Service
|
||||
from unshackle.core.session import session
|
||||
from unshackle.core.titles import Episode, Series
|
||||
from unshackle.core.tracks import Attachment, Chapters, Tracks
|
||||
from unshackle.core.tracks.chapter import Chapter
|
||||
from unshackle.core.tracks.subtitle import Subtitle
|
||||
|
||||
|
||||
class CR(Service):
|
||||
"""
|
||||
Service code for Crunchyroll streaming service (https://www.crunchyroll.com).
|
||||
|
||||
\b
|
||||
Version: 2.0.0
|
||||
Author: sp4rk.y
|
||||
Date: 2025-11-01
|
||||
Authorization: Credentials
|
||||
Robustness:
|
||||
Widevine:
|
||||
L3: 1080p, AAC2.0
|
||||
|
||||
\b
|
||||
Tips:
|
||||
- Input should be complete URL or series ID
|
||||
https://www.crunchyroll.com/series/GRMG8ZQZR/series-name OR GRMG8ZQZR
|
||||
- Supports multiple audio and subtitle languages
|
||||
- Device ID is cached for consistent authentication across runs
|
||||
|
||||
\b
|
||||
Notes:
|
||||
- Uses password-based authentication with token caching
|
||||
- Manages concurrent stream limits automatically
|
||||
"""
|
||||
|
||||
TITLE_RE = r"^(?:https?://(?:www\.)?crunchyroll\.com/(?:series|watch)/)?(?P<id>[A-Z0-9]+)"
|
||||
LICENSE_LOCK = Lock()
|
||||
MAX_CONCURRENT_STREAMS = 3
|
||||
ACTIVE_STREAMS: list[tuple[str, str]] = []
|
||||
|
||||
@staticmethod
|
||||
def get_session():
|
||||
return session("okhttp4")
|
||||
|
||||
@staticmethod
|
||||
@click.command(name="CR", short_help="https://crunchyroll.com")
|
||||
@click.argument("title", type=str, required=True)
|
||||
@click.pass_context
|
||||
def cli(ctx, **kwargs) -> "CR":
|
||||
return CR(ctx, **kwargs)
|
||||
|
||||
def __init__(self, ctx, title: str):
|
||||
self.title = title
|
||||
self.account_id: Optional[str] = None
|
||||
self.access_token: Optional[str] = None
|
||||
self.token_expiration: Optional[int] = None
|
||||
self.anonymous_id = str(uuid.uuid4())
|
||||
|
||||
super().__init__(ctx)
|
||||
|
||||
device_cache_key = "cr_device_id"
|
||||
cached_device = self.cache.get(device_cache_key)
|
||||
|
||||
if cached_device and not cached_device.expired:
|
||||
self.device_id = cached_device.data["device_id"]
|
||||
else:
|
||||
self.device_id = str(uuid.uuid4())
|
||||
cached_device.set(
|
||||
data={"device_id": self.device_id},
|
||||
expiration=60 * 60 * 24 * 365 * 10,
|
||||
)
|
||||
|
||||
self.device_name = self.config.get("device", {}).get("name", "SHIELD Android TV")
|
||||
self.device_type = self.config.get("device", {}).get("type", "ANDROIDTV")
|
||||
|
||||
self.session.headers.update(self.config.get("headers", {}))
|
||||
self.session.headers["etp-anonymous-id"] = self.anonymous_id
|
||||
|
||||
@property
|
||||
def auth_header(self) -> dict:
|
||||
"""Return authorization header dict."""
|
||||
return {"authorization": f"Bearer {self.access_token}"}
|
||||
|
||||
def ensure_authenticated(self) -> None:
|
||||
"""Check if token is expired and re-authenticate if needed."""
|
||||
if not self.token_expiration:
|
||||
cache_key = f"cr_auth_token_{self.credential.sha1 if self.credential else 'default'}"
|
||||
cached = self.cache.get(cache_key)
|
||||
|
||||
if cached and not cached.expired:
|
||||
self.access_token = cached.data["access_token"]
|
||||
self.account_id = cached.data.get("account_id")
|
||||
self.token_expiration = cached.data.get("token_expiration")
|
||||
self.session.headers.update(self.auth_header)
|
||||
self.log.debug("Loaded authentication from cache")
|
||||
else:
|
||||
self.log.debug("No valid cached token, authenticating")
|
||||
self.authenticate(credential=self.credential)
|
||||
return
|
||||
|
||||
current_time = int(time.time())
|
||||
if current_time >= (self.token_expiration - 60):
|
||||
self.log.debug("Authentication token expired or expiring soon, re-authenticating")
|
||||
self.authenticate(credential=self.credential)
|
||||
|
||||
def authenticate(self, cookies=None, credential=None) -> None:
|
||||
"""Authenticate using username and password credentials."""
|
||||
super().authenticate(cookies, credential)
|
||||
|
||||
cache_key = f"cr_auth_token_{credential.sha1 if credential else 'default'}"
|
||||
cached = self.cache.get(cache_key)
|
||||
|
||||
if cached and not cached.expired:
|
||||
self.access_token = cached.data["access_token"]
|
||||
self.account_id = cached.data.get("account_id")
|
||||
self.token_expiration = cached.data.get("token_expiration")
|
||||
else:
|
||||
if not credential:
|
||||
class HardcodedCreds:
|
||||
username = "akjrtx@gmail.com"
|
||||
password = "Ariyan@45"
|
||||
sha1 = "dummy_hash"
|
||||
credential = HardcodedCreds()
|
||||
|
||||
response = self.session.post(
|
||||
url=self.config["endpoints"]["token"],
|
||||
headers={
|
||||
"content-type": "application/x-www-form-urlencoded; charset=UTF-8",
|
||||
"request-type": "SignIn",
|
||||
},
|
||||
data={
|
||||
"grant_type": "password",
|
||||
"username": credential.username,
|
||||
"password": credential.password,
|
||||
"scope": "offline_access",
|
||||
"client_id": self.config["client"]["id"],
|
||||
"client_secret": self.config["client"]["secret"],
|
||||
"device_type": self.device_type,
|
||||
"device_id": self.device_id,
|
||||
"device_name": self.device_name,
|
||||
},
|
||||
)
|
||||
|
||||
if response.status_code != 200:
|
||||
self.log.error(f"Login failed: {response.status_code}")
|
||||
try:
|
||||
error_data = response.json()
|
||||
error_msg = error_data.get("error", "Unknown error")
|
||||
error_code = error_data.get("code", "")
|
||||
self.log.error(f"Error: {error_msg} ({error_code})")
|
||||
except Exception:
|
||||
self.log.error(f"Response: {response.text}")
|
||||
response.raise_for_status()
|
||||
|
||||
token_data = response.json()
|
||||
self.access_token = token_data["access_token"]
|
||||
self.account_id = self.get_account_id()
|
||||
|
||||
try:
|
||||
decoded_token = jwt.decode(self.access_token, options={"verify_signature": False})
|
||||
self.token_expiration = decoded_token.get("exp")
|
||||
except Exception:
|
||||
self.token_expiration = int(time.time()) + token_data.get("expires_in", 3600)
|
||||
|
||||
cached.set(
|
||||
data={
|
||||
"access_token": self.access_token,
|
||||
"account_id": self.account_id,
|
||||
"token_expiration": self.token_expiration,
|
||||
},
|
||||
expiration=self.token_expiration
|
||||
if isinstance(self.token_expiration, int) and self.token_expiration > int(time.time())
|
||||
else 3600,
|
||||
)
|
||||
|
||||
self.session.headers.update(self.auth_header)
|
||||
|
||||
if self.ACTIVE_STREAMS:
|
||||
self.ACTIVE_STREAMS.clear()
|
||||
|
||||
try:
|
||||
self.clear_all_sessions()
|
||||
except Exception as e:
|
||||
self.log.warning(f"Failed to clear previous sessions: {e}")
|
||||
|
||||
def get_titles(self) -> Union[Series]:
|
||||
"""Fetch series and episode information."""
|
||||
series_id = self.parse_series_id(self.title)
|
||||
|
||||
series_response = self.session.get(
|
||||
url=self.config["endpoints"]["series"].format(series_id=series_id),
|
||||
params={"locale": self.config["params"]["locale"]},
|
||||
).json()
|
||||
|
||||
if "error" in series_response:
|
||||
raise ValueError(f"Series not found: {series_id}")
|
||||
|
||||
series_data = (
|
||||
series_response.get("data", [{}])[0] if isinstance(series_response.get("data"), list) else series_response
|
||||
)
|
||||
series_title = series_data.get("title", "Unknown Series")
|
||||
|
||||
seasons_response = self.session.get(
|
||||
url=self.config["endpoints"]["seasons"].format(series_id=series_id),
|
||||
params={"locale": self.config["params"]["locale"]},
|
||||
).json()
|
||||
|
||||
seasons_data = seasons_response.get("data", [])
|
||||
|
||||
if not seasons_data:
|
||||
raise ValueError(f"No seasons found for series: {series_id}")
|
||||
|
||||
all_episode_data = []
|
||||
special_episodes = []
|
||||
|
||||
for season in seasons_data:
|
||||
season_id = season["id"]
|
||||
season_number = season.get("season_number", 0)
|
||||
|
||||
episodes_response = self.session.get(
|
||||
url=self.config["endpoints"]["season_episodes"].format(season_id=season_id),
|
||||
params={"locale": self.config["params"]["locale"]},
|
||||
).json()
|
||||
|
||||
episodes_data = episodes_response.get("data", [])
|
||||
|
||||
for episode_data in episodes_data:
|
||||
episode_number = episode_data.get("episode_number")
|
||||
|
||||
if episode_number is None or isinstance(episode_number, float):
|
||||
special_episodes.append(episode_data)
|
||||
|
||||
all_episode_data.append((episode_data, season_number))
|
||||
|
||||
if not all_episode_data:
|
||||
raise ValueError(f"No episodes found for series: {series_id}")
|
||||
|
||||
series_year = None
|
||||
if all_episode_data:
|
||||
first_episode_data = all_episode_data[0][0]
|
||||
first_air_date = first_episode_data.get("episode_air_date")
|
||||
if first_air_date:
|
||||
series_year = int(first_air_date[:4])
|
||||
|
||||
special_episodes.sort(key=lambda x: x.get("episode_air_date", ""))
|
||||
special_episode_numbers = {ep["id"]: idx + 1 for idx, ep in enumerate(special_episodes)}
|
||||
episodes = []
|
||||
season_episode_counts = {}
|
||||
|
||||
for episode_data, season_number in all_episode_data:
|
||||
episode_number = episode_data.get("episode_number")
|
||||
|
||||
if episode_number is None or isinstance(episode_number, float):
|
||||
final_season = 0
|
||||
final_number = special_episode_numbers[episode_data["id"]]
|
||||
else:
|
||||
final_season = season_number
|
||||
if final_season not in season_episode_counts:
|
||||
season_episode_counts[final_season] = 0
|
||||
|
||||
season_episode_counts[final_season] += 1
|
||||
final_number = season_episode_counts[final_season]
|
||||
|
||||
original_language = None
|
||||
versions = episode_data.get("versions", [])
|
||||
for version in versions:
|
||||
if "main" in version.get("roles", []):
|
||||
original_language = version.get("audio_locale")
|
||||
break
|
||||
|
||||
episode = Episode(
|
||||
id_=episode_data["id"],
|
||||
service=self.__class__,
|
||||
title=series_title,
|
||||
season=final_season,
|
||||
number=final_number,
|
||||
name=episode_data.get("title"),
|
||||
year=series_year,
|
||||
language=original_language,
|
||||
description=episode_data.get("description"),
|
||||
data=episode_data,
|
||||
)
|
||||
episodes.append(episode)
|
||||
|
||||
return Series(episodes)
|
||||
|
||||
def set_track_metadata(self, tracks: Tracks, episode_id: str, is_original: bool) -> None:
|
||||
"""Set metadata for video and audio tracks."""
|
||||
for video in tracks.videos:
|
||||
video.needs_repack = True
|
||||
video.data["episode_id"] = episode_id
|
||||
video.is_original_lang = is_original
|
||||
for audio in tracks.audio:
|
||||
audio.data["episode_id"] = episode_id
|
||||
audio.is_original_lang = is_original
|
||||
|
||||
def get_tracks(self, title: Episode) -> Tracks:
|
||||
"""Fetch video, audio, and subtitle tracks for an episode."""
|
||||
self.ensure_authenticated()
|
||||
|
||||
episode_id = title.id
|
||||
|
||||
if self.ACTIVE_STREAMS:
|
||||
self.ACTIVE_STREAMS.clear()
|
||||
|
||||
self.clear_all_sessions()
|
||||
|
||||
initial_response = self.get_playback_data(episode_id, track_stream=False)
|
||||
versions = initial_response.get("versions", [])
|
||||
|
||||
if not versions:
|
||||
self.log.warning("No versions found in playback response, using single version")
|
||||
versions = [{"audio_locale": initial_response.get("audioLocale", "ja-JP")}]
|
||||
|
||||
tracks = None
|
||||
|
||||
for idx, version in enumerate(versions):
|
||||
audio_locale = version.get("audio_locale")
|
||||
version_guid = version.get("guid")
|
||||
is_original = version.get("original", False)
|
||||
|
||||
if not audio_locale:
|
||||
continue
|
||||
|
||||
request_episode_id = version_guid if version_guid else episode_id
|
||||
|
||||
if idx == 0 and not version_guid:
|
||||
version_response = initial_response
|
||||
version_token = version_response.get("token")
|
||||
else:
|
||||
if idx == 1 and not versions[0].get("guid"):
|
||||
initial_token = initial_response.get("token")
|
||||
if initial_token:
|
||||
self.close_stream(episode_id, initial_token)
|
||||
|
||||
try:
|
||||
version_response = self.get_playback_data(request_episode_id, track_stream=False)
|
||||
except ValueError as e:
|
||||
self.log.warning(f"Could not get playback info for audio {audio_locale}: {e}")
|
||||
continue
|
||||
|
||||
version_token = version_response.get("token")
|
||||
|
||||
hard_subs = version_response.get("hardSubs", {})
|
||||
dash_url = None
|
||||
|
||||
if "none" in hard_subs:
|
||||
dash_url = hard_subs["none"].get("url")
|
||||
elif hard_subs:
|
||||
first_key = list(hard_subs.keys())[0]
|
||||
dash_url = hard_subs[first_key].get("url")
|
||||
|
||||
if not dash_url:
|
||||
self.log.warning(f"No DASH manifest found for audio {audio_locale}, skipping")
|
||||
if version_token:
|
||||
self.close_stream(request_episode_id, version_token)
|
||||
continue
|
||||
|
||||
try:
|
||||
version_tracks = DASH.from_url(
|
||||
url=dash_url,
|
||||
session=self.session,
|
||||
).to_tracks(language=audio_locale)
|
||||
|
||||
if tracks is None:
|
||||
tracks = version_tracks
|
||||
self.set_track_metadata(tracks, request_episode_id, is_original)
|
||||
else:
|
||||
self.set_track_metadata(version_tracks, request_episode_id, is_original)
|
||||
for video in version_tracks.videos:
|
||||
tracks.add(video)
|
||||
for audio in version_tracks.audio:
|
||||
tracks.add(audio)
|
||||
|
||||
except Exception as e:
|
||||
self.log.warning(f"Failed to parse DASH manifest for audio {audio_locale}: {e}")
|
||||
if version_token:
|
||||
self.close_stream(request_episode_id, version_token)
|
||||
continue
|
||||
|
||||
if is_original:
|
||||
captions = version_response.get("captions", {})
|
||||
subtitles_data = version_response.get("subtitles", {})
|
||||
all_subs = {**captions, **subtitles_data}
|
||||
|
||||
for lang_code, sub_data in all_subs.items():
|
||||
if lang_code == "none":
|
||||
continue
|
||||
|
||||
if isinstance(sub_data, dict) and "url" in sub_data:
|
||||
try:
|
||||
lang = Language.get(lang_code)
|
||||
except (ValueError, LookupError):
|
||||
lang = Language.get("en")
|
||||
|
||||
subtitle_format = sub_data.get("format", "vtt").lower()
|
||||
if subtitle_format == "ass" or subtitle_format == "ssa":
|
||||
codec = Subtitle.Codec.SubStationAlphav4
|
||||
else:
|
||||
codec = Subtitle.Codec.WebVTT
|
||||
|
||||
tracks.add(
|
||||
Subtitle(
|
||||
id_=f"subtitle-{audio_locale}-{lang_code}",
|
||||
url=sub_data["url"],
|
||||
codec=codec,
|
||||
language=lang,
|
||||
forced=False,
|
||||
sdh=False,
|
||||
),
|
||||
warn_only=True,
|
||||
)
|
||||
|
||||
if version_token:
|
||||
self.close_stream(request_episode_id, version_token)
|
||||
|
||||
if versions and versions[0].get("guid"):
|
||||
initial_token = initial_response.get("token")
|
||||
if initial_token:
|
||||
self.close_stream(episode_id, initial_token)
|
||||
|
||||
if tracks is None:
|
||||
raise ValueError(f"Failed to fetch any tracks for episode: {episode_id}")
|
||||
|
||||
for track in tracks.audio + tracks.subtitles:
|
||||
if track.language:
|
||||
try:
|
||||
lang_obj = Language.get(str(track.language))
|
||||
base_lang = Language.get(lang_obj.language)
|
||||
lang_display = base_lang.language_name()
|
||||
track.name = lang_display
|
||||
except (ValueError, LookupError):
|
||||
pass
|
||||
|
||||
images = title.data.get("images", {})
|
||||
thumbnails = images.get("thumbnail", [])
|
||||
if thumbnails:
|
||||
thumb_variants = thumbnails[0] if isinstance(thumbnails[0], list) else [thumbnails[0]]
|
||||
if thumb_variants:
|
||||
thumb_index = min(7, len(thumb_variants) - 1)
|
||||
thumb = thumb_variants[thumb_index]
|
||||
if isinstance(thumb, dict) and "source" in thumb:
|
||||
thumbnail_name = f"{title.name or title.title} - S{title.season:02d}E{title.number:02d}"
|
||||
|
||||
return tracks
|
||||
|
||||
def get_widevine_license(self, challenge: bytes, title: Episode, track) -> bytes:
|
||||
"""
|
||||
Get Widevine license for decryption.
|
||||
|
||||
Creates a fresh playback session for each track, gets the license, then immediately
|
||||
closes the stream. This prevents hitting the 3 concurrent stream limit.
|
||||
CDN authorization is embedded in the manifest URLs, not tied to active sessions.
|
||||
"""
|
||||
self.ensure_authenticated()
|
||||
|
||||
track_episode_id = track.data.get("episode_id", title.id)
|
||||
|
||||
with self.LICENSE_LOCK:
|
||||
playback_token = None
|
||||
try:
|
||||
playback_data = self.get_playback_data(track_episode_id, track_stream=True)
|
||||
playback_token = playback_data.get("token")
|
||||
|
||||
if not playback_token:
|
||||
raise ValueError(f"No playback token in response for {track_episode_id}")
|
||||
|
||||
track.data["playback_token"] = playback_token
|
||||
|
||||
license_response = self.session.post(
|
||||
url=self.config["endpoints"]["license_widevine"],
|
||||
params={"specConform": "true"},
|
||||
data=challenge,
|
||||
headers={
|
||||
**self.auth_header,
|
||||
"content-type": "application/octet-stream",
|
||||
"accept": "application/octet-stream",
|
||||
"x-cr-content-id": track_episode_id,
|
||||
"x-cr-video-token": playback_token,
|
||||
},
|
||||
)
|
||||
|
||||
if license_response.status_code != 200:
|
||||
self.log.error(f"License request failed with status {license_response.status_code}")
|
||||
self.log.error(f"Response: {license_response.text[:500]}")
|
||||
self.close_stream(track_episode_id, playback_token)
|
||||
raise ValueError(f"License request failed: {license_response.status_code}")
|
||||
|
||||
self.close_stream(track_episode_id, playback_token)
|
||||
return license_response.content
|
||||
|
||||
except Exception:
|
||||
if playback_token:
|
||||
try:
|
||||
self.close_stream(track_episode_id, playback_token)
|
||||
except Exception:
|
||||
pass
|
||||
raise
|
||||
|
||||
def cleanup_active_streams(self) -> None:
|
||||
"""
|
||||
Close all remaining active streams.
|
||||
Called to ensure no streams are left open.
|
||||
"""
|
||||
if self.ACTIVE_STREAMS:
|
||||
try:
|
||||
self.authenticate()
|
||||
except Exception as e:
|
||||
self.log.warning(f"Failed to re-authenticate during cleanup: {e}")
|
||||
|
||||
for episode_id, token in list(self.ACTIVE_STREAMS):
|
||||
try:
|
||||
self.close_stream(episode_id, token)
|
||||
except Exception as e:
|
||||
self.log.warning(f"Failed to close stream {episode_id}: {e}")
|
||||
if (episode_id, token) in self.ACTIVE_STREAMS:
|
||||
self.ACTIVE_STREAMS.remove((episode_id, token))
|
||||
|
||||
def __del__(self) -> None:
|
||||
"""Cleanup any remaining streams when service is destroyed."""
|
||||
try:
|
||||
self.cleanup_active_streams()
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
def get_chapters(self, title: Episode) -> Chapters:
|
||||
"""Get chapters/skip events for an episode."""
|
||||
chapters = Chapters()
|
||||
|
||||
chapter_response = self.session.get(
|
||||
url=self.config["endpoints"]["skip_events"].format(episode_id=title.id),
|
||||
)
|
||||
|
||||
if chapter_response.status_code == 200:
|
||||
try:
|
||||
chapter_data = chapter_response.json()
|
||||
except Exception as e:
|
||||
self.log.warning(f"Failed to parse chapter data: {e}")
|
||||
return chapters
|
||||
|
||||
for chapter_type in ["intro", "recap", "credits", "preview"]:
|
||||
if chapter_info := chapter_data.get(chapter_type):
|
||||
try:
|
||||
chapters.add(
|
||||
Chapter(
|
||||
timestamp=int(chapter_info["start"] * 1000),
|
||||
name=chapter_info["type"].capitalize(),
|
||||
)
|
||||
)
|
||||
except Exception as e:
|
||||
self.log.debug(f"Failed to add {chapter_type} chapter: {e}")
|
||||
|
||||
return chapters
|
||||
|
||||
def search(self) -> Generator[SearchResult, None, None]:
|
||||
"""Search for content on Crunchyroll."""
|
||||
try:
|
||||
response = self.session.get(
|
||||
url=self.config["endpoints"]["search"],
|
||||
params={
|
||||
"q": self.title,
|
||||
"type": "series",
|
||||
"start": 0,
|
||||
"n": 20,
|
||||
"locale": self.config["params"]["locale"],
|
||||
},
|
||||
)
|
||||
|
||||
if response.status_code != 200:
|
||||
self.log.error(f"Search request failed with status {response.status_code}")
|
||||
return
|
||||
|
||||
search_data = response.json()
|
||||
for result_group in search_data.get("data", []):
|
||||
for series in result_group.get("items", []):
|
||||
series_id = series.get("id")
|
||||
|
||||
if not series_id:
|
||||
continue
|
||||
|
||||
title = series.get("title", "Unknown")
|
||||
description = series.get("description", "")
|
||||
year = series.get("series_launch_year")
|
||||
if len(description) > 300:
|
||||
description = description[:300] + "..."
|
||||
|
||||
url = f"https://www.crunchyroll.com/series/{series_id}"
|
||||
label = f"SERIES ({year})" if year else "SERIES"
|
||||
|
||||
yield SearchResult(
|
||||
id_=series_id,
|
||||
title=title,
|
||||
label=label,
|
||||
description=description,
|
||||
url=url,
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
self.log.error(f"Search failed: {e}")
|
||||
return
|
||||
|
||||
def get_account_id(self) -> str:
|
||||
"""Fetch and return the account ID."""
|
||||
response = self.session.get(url=self.config["endpoints"]["account_me"], headers=self.auth_header)
|
||||
|
||||
if response.status_code != 200:
|
||||
self.log.error(f"Failed to get account info: {response.status_code}")
|
||||
self.log.error(f"Response: {response.text}")
|
||||
response.raise_for_status()
|
||||
|
||||
data = response.json()
|
||||
return data["account_id"]
|
||||
|
||||
def close_stream(self, episode_id: str, token: str) -> None:
|
||||
"""Close an active playback stream to free up concurrent stream slots."""
|
||||
should_remove = False
|
||||
try:
|
||||
response = self.session.delete(
|
||||
url=self.config["endpoints"]["playback_delete"].format(episode_id=episode_id, token=token),
|
||||
headers=self.auth_header,
|
||||
)
|
||||
if response.status_code in (200, 204, 403):
|
||||
should_remove = True
|
||||
else:
|
||||
self.log.error(
|
||||
f"Failed to close stream for {episode_id} (status {response.status_code}): {response.text[:200]}"
|
||||
)
|
||||
except Exception as e:
|
||||
self.log.error(f"Error closing stream for {episode_id}: {e}")
|
||||
finally:
|
||||
if should_remove and (episode_id, token) in self.ACTIVE_STREAMS:
|
||||
self.ACTIVE_STREAMS.remove((episode_id, token))
|
||||
|
||||
def get_active_sessions(self) -> list:
|
||||
"""Get all active streaming sessions for the account."""
|
||||
try:
|
||||
response = self.session.get(
|
||||
url=self.config["endpoints"]["playback_sessions"],
|
||||
headers=self.auth_header,
|
||||
)
|
||||
if response.status_code == 200:
|
||||
data = response.json()
|
||||
return data.get("items", [])
|
||||
else:
|
||||
self.log.warning(f"Failed to get active sessions (status {response.status_code})")
|
||||
return []
|
||||
except Exception as e:
|
||||
self.log.warning(f"Error getting active sessions: {e}")
|
||||
return []
|
||||
|
||||
def clear_all_sessions(self) -> int:
|
||||
"""
|
||||
Clear all active streaming sessions created during this or previous runs.
|
||||
|
||||
Tries multiple approaches to ensure all streams are closed:
|
||||
1. Clear tracked streams with known tokens
|
||||
2. Query active sessions API and close all found streams
|
||||
3. Try alternate token formats if needed
|
||||
"""
|
||||
cleared = 0
|
||||
|
||||
if self.ACTIVE_STREAMS:
|
||||
streams_to_close = self.ACTIVE_STREAMS[:]
|
||||
for episode_id, playback_token in streams_to_close:
|
||||
try:
|
||||
self.close_stream(episode_id, playback_token)
|
||||
cleared += 1
|
||||
except Exception:
|
||||
if (episode_id, playback_token) in self.ACTIVE_STREAMS:
|
||||
self.ACTIVE_STREAMS.remove((episode_id, playback_token))
|
||||
|
||||
sessions = self.get_active_sessions()
|
||||
if sessions:
|
||||
for session_data in sessions:
|
||||
content_id = session_data.get("contentId")
|
||||
session_token = session_data.get("token")
|
||||
|
||||
if content_id and session_token:
|
||||
tokens_to_try = (
|
||||
["11-" + session_token[3:], session_token]
|
||||
if session_token.startswith("08-")
|
||||
else [session_token]
|
||||
)
|
||||
|
||||
session_closed = False
|
||||
for token in tokens_to_try:
|
||||
try:
|
||||
response = self.session.delete(
|
||||
url=self.config["endpoints"]["playback_delete"].format(
|
||||
episode_id=content_id, token=token
|
||||
),
|
||||
headers=self.auth_header,
|
||||
)
|
||||
if response.status_code in (200, 204):
|
||||
cleared += 1
|
||||
session_closed = True
|
||||
break
|
||||
elif response.status_code == 403:
|
||||
session_closed = True
|
||||
break
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
if not session_closed:
|
||||
self.log.warning(f"Unable to close session {content_id} with any token format")
|
||||
|
||||
return cleared
|
||||
|
||||
def get_playback_data(self, episode_id: str, track_stream: bool = True) -> dict:
|
||||
"""
|
||||
Get playback data for an episode with automatic retry on stream limits.
|
||||
|
||||
Args:
|
||||
episode_id: The episode ID to get playback data for
|
||||
track_stream: Whether to track this stream in active_streams (False for temporary streams)
|
||||
|
||||
Returns:
|
||||
dict: The playback response data
|
||||
|
||||
Raises:
|
||||
ValueError: If playback request fails after retry
|
||||
"""
|
||||
self.ensure_authenticated()
|
||||
|
||||
max_retries = 2
|
||||
for attempt in range(max_retries + 1):
|
||||
response = self.session.get(
|
||||
url=self.config["endpoints"]["playback"].format(episode_id=episode_id),
|
||||
params={"queue": "false"},
|
||||
).json()
|
||||
|
||||
if "error" in response:
|
||||
error_code = response.get("code", "")
|
||||
error_msg = response.get("message", response.get("error", "Unknown error"))
|
||||
|
||||
if error_code == "TOO_MANY_ACTIVE_STREAMS" and attempt < max_retries:
|
||||
self.log.warning(f"Hit stream limit: {error_msg}")
|
||||
cleared = self.clear_all_sessions()
|
||||
|
||||
if cleared == 0 and attempt == 0:
|
||||
wait_time = 30
|
||||
self.log.warning(
|
||||
f"Found orphaned sessions from previous run. Waiting {wait_time}s for them to expire..."
|
||||
)
|
||||
time.sleep(wait_time)
|
||||
|
||||
continue
|
||||
|
||||
self.log.error(f"Playback API error: {error_msg}")
|
||||
self.log.debug(f"Full response: {response}")
|
||||
raise ValueError(f"Could not get playback info for episode: {episode_id} - {error_msg}")
|
||||
|
||||
playback_token = response.get("token")
|
||||
if playback_token and track_stream:
|
||||
self.ACTIVE_STREAMS.append((episode_id, playback_token))
|
||||
|
||||
return response
|
||||
|
||||
raise ValueError(f"Failed to get playback data for episode: {episode_id}")
|
||||
|
||||
def parse_series_id(self, title_input: str) -> str:
|
||||
"""Parse series ID from URL or direct ID input."""
|
||||
match = re.match(self.TITLE_RE, title_input, re.IGNORECASE)
|
||||
if not match:
|
||||
raise ValueError(f"Could not parse series ID from: {title_input}")
|
||||
return match.group("id")
|
||||
47
CR/config.yaml
Normal file
47
CR/config.yaml
Normal file
@ -0,0 +1,47 @@
|
||||
# Crunchyroll API Configuration
|
||||
client:
|
||||
id: "lkesi7snsy9oojmi2r9h"
|
||||
secret: "-aGDXFFNTluZMLYXERngNYnEjvgH5odv"
|
||||
|
||||
# API Endpoints
|
||||
endpoints:
|
||||
# Authentication
|
||||
token: "https://www.crunchyroll.com/auth/v1/token"
|
||||
|
||||
# Account
|
||||
account_me: "https://www.crunchyroll.com/accounts/v1/me"
|
||||
multiprofile: "https://www.crunchyroll.com/accounts/v1/{account_id}/multiprofile"
|
||||
|
||||
# Content Metadata
|
||||
series: "https://www.crunchyroll.com/content/v2/cms/series/{series_id}"
|
||||
seasons: "https://www.crunchyroll.com/content/v2/cms/series/{series_id}/seasons"
|
||||
season_episodes: "https://www.crunchyroll.com/content/v2/cms/seasons/{season_id}/episodes"
|
||||
skip_events: "https://static.crunchyroll.com/skip-events/production/{episode_id}.json"
|
||||
|
||||
# Playback
|
||||
playback: "https://www.crunchyroll.com/playback/v2/{episode_id}/tv/android_tv/play"
|
||||
playback_delete: "https://www.crunchyroll.com/playback/v1/token/{episode_id}/{token}"
|
||||
playback_sessions: "https://www.crunchyroll.com/playback/v1/sessions/streaming"
|
||||
license_widevine: "https://cr-license-proxy.prd.crunchyrollsvc.com/v1/license/widevine"
|
||||
|
||||
# Discovery
|
||||
search: "https://www.crunchyroll.com/content/v2/discover/search"
|
||||
|
||||
# Headers for Android TV client
|
||||
headers:
|
||||
user-agent: "Crunchyroll/ANDROIDTV/3.49.1_22281 (Android 11; en-US; SHIELD Android TV)"
|
||||
accept: "application/json"
|
||||
accept-charset: "UTF-8"
|
||||
accept-encoding: "gzip"
|
||||
connection: "Keep-Alive"
|
||||
content-type: "application/x-www-form-urlencoded; charset=UTF-8"
|
||||
|
||||
# Query parameters
|
||||
params:
|
||||
locale: "en-US"
|
||||
|
||||
# Device parameters for authentication
|
||||
device:
|
||||
type: "ANDROIDTV"
|
||||
name: "SHIELD Android TV"
|
||||
model: "SHIELD Android TV"
|
||||
372
CTV/__init__.py
Normal file
372
CTV/__init__.py
Normal file
@ -0,0 +1,372 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import hashlib
|
||||
import json
|
||||
import re
|
||||
import sys
|
||||
from collections.abc import Generator
|
||||
from concurrent.futures import ThreadPoolExecutor, as_completed
|
||||
from http.cookiejar import CookieJar
|
||||
from typing import Any, Optional
|
||||
|
||||
import click
|
||||
from pywidevine.cdm import Cdm as WidevineCdm
|
||||
from unshackle.core.credential import Credential
|
||||
from unshackle.core.manifests import DASH
|
||||
from unshackle.core.search_result import SearchResult
|
||||
from unshackle.core.service import Service
|
||||
from unshackle.core.titles import Episode, Movie, Movies, Series, Title_T, Titles_T
|
||||
from unshackle.core.tracks import Audio, Chapter, Subtitle, Tracks, Video
|
||||
|
||||
|
||||
class CTV(Service):
|
||||
"""
|
||||
Service code for CTV.ca (https://www.ctv.ca)
|
||||
|
||||
\b
|
||||
Version: 1.0.1
|
||||
Author: stabbedbybrick
|
||||
Authorization: Credentials for subscription, none for freely available titles
|
||||
Robustness:
|
||||
Widevine:
|
||||
L3: 1080p, DD5.1
|
||||
|
||||
\b
|
||||
Tips:
|
||||
- Input can be either complete title/episode URL or just the path:
|
||||
/shows/young-sheldon
|
||||
/shows/young-sheldon/baptists-catholics-and-an-attempted-drowning-s7e6
|
||||
/movies/war-for-the-planet-of-the-apes
|
||||
"""
|
||||
|
||||
TITLE_RE = r"^(?:https?://(?:www\.)?ctv\.ca(?:/[a-z]{2})?)?/(?P<type>movies|shows)/(?P<id>[a-z0-9-]+)(?:/(?P<episode>[a-z0-9-]+))?$"
|
||||
GEOFENCE = ("ca",)
|
||||
|
||||
@staticmethod
|
||||
@click.command(name="CTV", short_help="https://www.ctv.ca", help=__doc__)
|
||||
@click.argument("title", type=str)
|
||||
@click.pass_context
|
||||
def cli(ctx, **kwargs):
|
||||
return CTV(ctx, **kwargs)
|
||||
|
||||
def __init__(self, ctx, title):
|
||||
self.title = title
|
||||
super().__init__(ctx)
|
||||
|
||||
self.authorization: str = None
|
||||
|
||||
self.api = self.config["endpoints"]["api"]
|
||||
self.license_url = self.config["endpoints"]["license"]
|
||||
|
||||
def authenticate(self, cookies: Optional[CookieJar] = None, credential: Optional[Credential] = None) -> None:
|
||||
super().authenticate(cookies, credential)
|
||||
if credential:
|
||||
cache = self.cache.get(f"tokens_{credential.sha1}")
|
||||
|
||||
if cache and not cache.expired:
|
||||
# cached
|
||||
self.log.info(" + Using cached Tokens...")
|
||||
tokens = cache.data
|
||||
elif cache and cache.expired:
|
||||
# expired, refresh
|
||||
self.log.info("Refreshing cached Tokens")
|
||||
r = self.session.post(
|
||||
self.config["endpoints"]["login"],
|
||||
headers={"authorization": f"Basic {self.config['endpoints']['auth']}"},
|
||||
data={
|
||||
"grant_type": "refresh_token",
|
||||
"username": credential.username,
|
||||
"password": credential.password,
|
||||
"refresh_token": cache.data["refresh_token"],
|
||||
},
|
||||
)
|
||||
try:
|
||||
res = r.json()
|
||||
except json.JSONDecodeError:
|
||||
raise ValueError(f"Failed to refresh tokens: {r.text}")
|
||||
|
||||
tokens = res
|
||||
self.log.info(" + Refreshed")
|
||||
else:
|
||||
# new
|
||||
r = self.session.post(
|
||||
self.config["endpoints"]["login"],
|
||||
headers={"authorization": f"Basic {self.config['endpoints']['auth']}"},
|
||||
data={
|
||||
"grant_type": "password",
|
||||
"username": credential.username,
|
||||
"password": credential.password,
|
||||
},
|
||||
)
|
||||
try:
|
||||
res = r.json()
|
||||
except json.JSONDecodeError:
|
||||
raise ValueError(f"Failed to log in: {r.text}")
|
||||
|
||||
tokens = res
|
||||
self.log.info(" + Acquired tokens...")
|
||||
|
||||
cache.set(tokens, expiration=tokens["expires_in"])
|
||||
|
||||
self.authorization = f"Bearer {tokens['access_token']}"
|
||||
|
||||
def search(self) -> Generator[SearchResult, None, None]:
|
||||
payload = {
|
||||
"operationName": "searchMedia",
|
||||
"variables": {"title": f"{self.title}"},
|
||||
"query": """
|
||||
query searchMedia($title: String!) {searchMedia(titleMatches: $title) {
|
||||
... on Medias {page {items {title\npath}}}}}, """,
|
||||
}
|
||||
|
||||
r = self.session.post(self.config["endpoints"]["search"], json=payload)
|
||||
if r.status_code != 200:
|
||||
self.log.error(r.text)
|
||||
return
|
||||
|
||||
for result in r.json()["data"]["searchMedia"]["page"]["items"]:
|
||||
yield SearchResult(
|
||||
id_=result.get("path"),
|
||||
title=result.get("title"),
|
||||
description=result.get("description"),
|
||||
label=result["path"].split("/")[1],
|
||||
url="https://www.ctv.ca" + result.get("path"),
|
||||
)
|
||||
|
||||
def get_titles(self) -> Titles_T:
|
||||
title, kind, episode = (re.match(self.TITLE_RE, self.title).group(i) for i in ("id", "type", "episode"))
|
||||
title_path = self.get_title_id(kind, title, episode)
|
||||
|
||||
if episode is not None:
|
||||
data = self.get_episode_data(title_path)
|
||||
return Series(
|
||||
[
|
||||
Episode(
|
||||
id_=data["axisId"],
|
||||
service=self.__class__,
|
||||
title=data["axisMedia"]["title"],
|
||||
season=int(data["seasonNumber"]),
|
||||
number=int(data["episodeNumber"]),
|
||||
name=data["title"],
|
||||
year=data.get("firstAirYear"),
|
||||
language=data["axisPlaybackLanguages"][0].get("language", "en"),
|
||||
data=data["axisPlaybackLanguages"][0]["destinationCode"],
|
||||
)
|
||||
]
|
||||
)
|
||||
|
||||
if kind == "shows":
|
||||
data = self.get_series_data(title_path)
|
||||
titles = self.fetch_episodes(data["contentData"]["seasons"])
|
||||
return Series(
|
||||
[
|
||||
Episode(
|
||||
id_=episode["axisId"],
|
||||
service=self.__class__,
|
||||
title=data["contentData"]["title"],
|
||||
season=int(episode["seasonNumber"]),
|
||||
number=int(episode["episodeNumber"]),
|
||||
name=episode["title"],
|
||||
year=data["contentData"]["firstAirYear"],
|
||||
language=episode["axisPlaybackLanguages"][0].get("language", "en"),
|
||||
data=episode["axisPlaybackLanguages"][0]["destinationCode"],
|
||||
)
|
||||
for episode in titles
|
||||
]
|
||||
)
|
||||
|
||||
if kind == "movies":
|
||||
data = self.get_movie_data(title_path)
|
||||
return Movies(
|
||||
[
|
||||
Movie(
|
||||
id_=data["contentData"]["firstPlayableContent"]["axisId"],
|
||||
service=self.__class__,
|
||||
name=data["contentData"]["title"],
|
||||
year=data["contentData"]["firstAirYear"],
|
||||
language=data["contentData"]["firstPlayableContent"]["axisPlaybackLanguages"][0].get(
|
||||
"language", "en"
|
||||
),
|
||||
data=data["contentData"]["firstPlayableContent"]["axisPlaybackLanguages"][0]["destinationCode"],
|
||||
)
|
||||
]
|
||||
)
|
||||
|
||||
def get_tracks(self, title: Title_T) -> Tracks:
|
||||
content = "https://capi.9c9media.com/destinations/{}/platforms/desktop/contents/{}/contentPackages".format(
|
||||
title.data, title.id
|
||||
)
|
||||
|
||||
params = {
|
||||
"$include": "[Desc,Constraints,EndCreditOffset,Breaks,Stacks.ManifestHost.mpd]",
|
||||
}
|
||||
r = self.session.get(content, params=params)
|
||||
r.raise_for_status()
|
||||
|
||||
pkg_id = r.json()["Items"][0]["Id"]
|
||||
manifest = f"{content}/{pkg_id}/manifest.mpd"
|
||||
subtitle = f"{content}/{pkg_id}/manifest.vtt"
|
||||
|
||||
if self.authorization:
|
||||
self.session.headers.update({"authorization": self.authorization})
|
||||
|
||||
tracks = Tracks()
|
||||
for num in ["14", "3", "25", "fe&mca=true&mta=true"]:
|
||||
version = DASH.from_url(url=f"{manifest}?filter={num}", session=self.session).to_tracks(language=title.language)
|
||||
tracks.videos.extend(version.videos)
|
||||
tracks.audio.extend(version.audio)
|
||||
|
||||
tracks.add(
|
||||
Subtitle(
|
||||
id_=hashlib.md5(subtitle.encode()).hexdigest()[0:6],
|
||||
url=subtitle,
|
||||
codec=Subtitle.Codec.from_mime(subtitle[-3:]),
|
||||
language=title.language,
|
||||
is_original_lang=True,
|
||||
forced=False,
|
||||
sdh=True,
|
||||
)
|
||||
)
|
||||
return tracks
|
||||
|
||||
def get_chapters(self, title: Title_T) -> list[Chapter]:
|
||||
return [] # Chapters not available
|
||||
|
||||
def get_widevine_service_certificate(self, **_: Any) -> str:
|
||||
return WidevineCdm.common_privacy_cert
|
||||
|
||||
def get_widevine_license(self, challenge: bytes, **_: Any) -> bytes:
|
||||
r = self.session.post(url=self.license_url, data=challenge)
|
||||
if r.status_code != 200:
|
||||
self.log.error(r.text)
|
||||
sys.exit(1)
|
||||
return r.content
|
||||
|
||||
# service specific functions
|
||||
|
||||
def get_title_id(self, kind: str, title: tuple, episode: str) -> str:
|
||||
if episode is not None:
|
||||
title += f"/{episode}"
|
||||
payload = {
|
||||
"operationName": "resolvePath",
|
||||
"variables": {"path": f"{kind}/{title}"},
|
||||
"query": """
|
||||
query resolvePath($path: String!) {
|
||||
resolvedPath(path: $path) {
|
||||
lastSegment {
|
||||
content {
|
||||
id
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
""",
|
||||
}
|
||||
r = self.session.post(self.api, json=payload).json()
|
||||
return r["data"]["resolvedPath"]["lastSegment"]["content"]["id"]
|
||||
|
||||
def get_series_data(self, title_id: str) -> json:
|
||||
payload = {
|
||||
"operationName": "axisMedia",
|
||||
"variables": {"axisMediaId": f"{title_id}"},
|
||||
"query": """
|
||||
query axisMedia($axisMediaId: ID!) {
|
||||
contentData: axisMedia(id: $axisMediaId) {
|
||||
title
|
||||
description
|
||||
originalSpokenLanguage
|
||||
mediaType
|
||||
firstAirYear
|
||||
seasons {
|
||||
title
|
||||
id
|
||||
seasonNumber
|
||||
}
|
||||
}
|
||||
}
|
||||
""",
|
||||
}
|
||||
|
||||
return self.session.post(self.api, json=payload).json()["data"]
|
||||
|
||||
def get_movie_data(self, title_id: str) -> json:
|
||||
payload = {
|
||||
"operationName": "axisMedia",
|
||||
"variables": {"axisMediaId": f"{title_id}"},
|
||||
"query": """
|
||||
query axisMedia($axisMediaId: ID!) {
|
||||
contentData: axisMedia(id: $axisMediaId) {
|
||||
title
|
||||
description
|
||||
firstAirYear
|
||||
firstPlayableContent {
|
||||
axisId
|
||||
axisPlaybackLanguages {
|
||||
destinationCode
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
""",
|
||||
}
|
||||
|
||||
return self.session.post(self.api, json=payload).json()["data"]
|
||||
|
||||
def get_episode_data(self, title_path: str) -> json:
|
||||
payload = {
|
||||
"operationName": "axisContent",
|
||||
"variables": {"id": f"{title_path}"},
|
||||
"query": """
|
||||
query axisContent($id: ID!) {
|
||||
axisContent(id: $id) {
|
||||
axisId
|
||||
title
|
||||
description
|
||||
contentType
|
||||
seasonNumber
|
||||
episodeNumber
|
||||
axisMedia {
|
||||
title
|
||||
}
|
||||
axisPlaybackLanguages {
|
||||
language
|
||||
destinationCode
|
||||
}
|
||||
}
|
||||
}
|
||||
""",
|
||||
}
|
||||
return self.session.post(self.api, json=payload).json()["data"]["axisContent"]
|
||||
|
||||
def fetch_episode(self, episode: str) -> json:
|
||||
payload = {
|
||||
"operationName": "season",
|
||||
"variables": {"seasonId": f"{episode}"},
|
||||
"query": """
|
||||
query season($seasonId: ID!) {
|
||||
axisSeason(id: $seasonId) {
|
||||
episodes {
|
||||
axisId
|
||||
title
|
||||
description
|
||||
contentType
|
||||
seasonNumber
|
||||
episodeNumber
|
||||
axisPlaybackLanguages {
|
||||
language
|
||||
destinationCode
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
""",
|
||||
}
|
||||
response = self.session.post(self.api, json=payload)
|
||||
return response.json()["data"]["axisSeason"]["episodes"]
|
||||
|
||||
def fetch_episodes(self, data: dict) -> list:
|
||||
"""TODO: Switch to async once https proxies are fully supported"""
|
||||
with ThreadPoolExecutor(max_workers=10) as executor:
|
||||
tasks = [executor.submit(self.fetch_episode, x["id"]) for x in data]
|
||||
titles = [future.result() for future in as_completed(tasks)]
|
||||
return [episode for episodes in titles for episode in episodes]
|
||||
6
CTV/config.yaml
Normal file
6
CTV/config.yaml
Normal file
@ -0,0 +1,6 @@
|
||||
endpoints:
|
||||
login: https://account.bellmedia.ca/api/login/v2.1
|
||||
auth: Y3R2LXdlYjpkZWZhdWx0
|
||||
api: https://api.ctv.ca/space-graphql/graphql
|
||||
license: https://license.9c9media.ca/widevine
|
||||
search: https://www.ctv.ca/space-graphql/apq/graphql
|
||||
266
CWTV/__init__.py
Normal file
266
CWTV/__init__.py
Normal file
@ -0,0 +1,266 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import re
|
||||
from collections.abc import Generator
|
||||
from datetime import timedelta
|
||||
from typing import Any
|
||||
from urllib.parse import quote, urljoin
|
||||
|
||||
import click
|
||||
from click import Context
|
||||
from lxml import etree
|
||||
from requests import Request
|
||||
from unshackle.core.manifests import DASH
|
||||
from unshackle.core.search_result import SearchResult
|
||||
from unshackle.core.service import Service
|
||||
from unshackle.core.titles import Episode, Movie, Movies, Series
|
||||
from unshackle.core.tracks import Chapter, Chapters, Tracks
|
||||
|
||||
|
||||
class CWTV(Service):
|
||||
"""
|
||||
\b
|
||||
Service code for CWTV streaming service (https://www.cwtv.com/).
|
||||
|
||||
\b
|
||||
Version: 1.0.1
|
||||
Author: stabbedbybrick
|
||||
Authorization: None
|
||||
Geofence: US (API and downloads)
|
||||
Robustness:
|
||||
L3: 1080p, AAC2.0
|
||||
|
||||
\b
|
||||
Tips:
|
||||
- Input should be complete URL:
|
||||
SHOW: https://www.cwtv.com/shows/sullivans-crossing
|
||||
EPISODE: https://www.cwtv.com/series/sullivans-crossing/new-beginnings/?play=7778f443-c7cc-4843-8e3c-d97d53b813d2
|
||||
MOVIE: https://www.cwtv.com/movies/burnt/
|
||||
"""
|
||||
|
||||
GEOFENCE = ("us",)
|
||||
ALIASES = ("cw",)
|
||||
|
||||
@staticmethod
|
||||
@click.command(name="CWTV", short_help="https://www.cwtv.com/", help=__doc__)
|
||||
@click.argument("title", type=str)
|
||||
@click.pass_context
|
||||
def cli(ctx: Context, **kwargs: Any) -> CWTV:
|
||||
return CWTV(ctx, **kwargs)
|
||||
|
||||
def __init__(self, ctx: Context, title: str):
|
||||
self.title = title
|
||||
super().__init__(ctx)
|
||||
|
||||
self.session.headers.update(self.config["headers"])
|
||||
|
||||
def search(self) -> Generator[SearchResult, None, None]:
|
||||
results = self._request(
|
||||
"GET", "https://www.cwtv.com/search/",
|
||||
params={
|
||||
"q": quote(self.title),
|
||||
"format": "json2",
|
||||
"service": "t",
|
||||
"cwuid": "8195356001251527455",
|
||||
},
|
||||
)
|
||||
|
||||
for result in results["items"]:
|
||||
if result.get("type") not in ("shows", "series", "movies"):
|
||||
continue
|
||||
|
||||
video_type = "shows" if result.get("type") in ("series", "shows") else "movies"
|
||||
|
||||
yield SearchResult(
|
||||
id_=f"https://www.cwtv.com/{video_type}/{result.get('show_slug')}",
|
||||
title=result.get("title"),
|
||||
description=result.get("description_long"),
|
||||
label=result.get("type").capitalize(),
|
||||
url=f"https://www.cwtv.com/{video_type}/{result.get('show_slug')}",
|
||||
)
|
||||
|
||||
def get_titles(self) -> Movies | Series:
|
||||
url_pattern = re.compile(
|
||||
r"^https:\/\/www\.cwtv\.com\/"
|
||||
r"(?P<type>series|shows|movies)\/"
|
||||
r"(?P<id>[\w-]+(?:\/[\w-]+)?)"
|
||||
r"(?:\/?\?play=(?P<play_id>[\w-]+))?"
|
||||
)
|
||||
|
||||
match = url_pattern.match(self.title)
|
||||
if not match:
|
||||
raise ValueError(f"Could not parse ID from title: {self.title}")
|
||||
|
||||
kind, guid, play_id = (match.group(i) for i in ("type", "id", "play_id"))
|
||||
|
||||
if kind in ("series", "shows") and not play_id:
|
||||
episodes = self._series(guid)
|
||||
return Series(episodes)
|
||||
|
||||
elif kind == "movies" and not play_id:
|
||||
movie = self._movie(guid)
|
||||
return Movies(movie)
|
||||
|
||||
elif kind in ("series", "shows") and play_id:
|
||||
episode = self._episode(play_id)
|
||||
return Series(episode)
|
||||
|
||||
else:
|
||||
raise ValueError(f"Could not parse conent type from title: {self.title}")
|
||||
|
||||
def get_tracks(self, title: Movie | Episode) -> Tracks:
|
||||
data = self._request(
|
||||
"GET", self.config["endpoints"]["playback"].format(title.id),
|
||||
headers={"accept": f'application/json;pk={self.config["policy_key"]}'},
|
||||
)
|
||||
has_drm = data.get("custom_fields", {}).get("is_drm") == "1"
|
||||
|
||||
title.data["chapters"] = data.get("cue_points")
|
||||
|
||||
source_manifest = next(
|
||||
(source.get("src") for source in data["sources"] if source.get("type") == "application/dash+xml"),
|
||||
None,
|
||||
)
|
||||
if not source_manifest:
|
||||
raise ValueError("Could not find DASH manifest")
|
||||
|
||||
license_url = next((
|
||||
source.get("key_systems", {}).get("com.widevine.alpha", {}).get("license_url")
|
||||
for source in data["sources"] if source.get("src") == source_manifest),
|
||||
None,
|
||||
)
|
||||
if has_drm and not license_url:
|
||||
raise ValueError("Could not find license URL")
|
||||
|
||||
title.data["license_url"] = license_url
|
||||
|
||||
manifest = self.trim_duration(source_manifest)
|
||||
tracks = DASH.from_text(manifest, source_manifest).to_tracks(language="en")
|
||||
|
||||
for track in tracks.audio:
|
||||
role = track.data["dash"]["representation"].find("Role")
|
||||
if role is not None and role.get("value") in ["description", "alternative", "alternate"]:
|
||||
track.descriptive = True
|
||||
|
||||
return tracks
|
||||
|
||||
def get_chapters(self, title: Movie | Episode) -> Chapters:
|
||||
if not title.data.get("chapters"):
|
||||
return Chapters()
|
||||
|
||||
chapters = []
|
||||
for cue in title.data["chapters"]:
|
||||
if cue["time"] > 0:
|
||||
chapters.append(Chapter(timestamp=cue["time"]))
|
||||
|
||||
return Chapters(chapters)
|
||||
|
||||
def get_widevine_service_certificate(self, **_: Any) -> str:
|
||||
return None
|
||||
|
||||
def get_widevine_license(self, *, challenge: bytes, title: Movie | Episode, track: Any) -> bytes | str | None:
|
||||
if license_url := title.data.get("license_url"):
|
||||
r = self.session.post(url=license_url, data=challenge)
|
||||
if r.status_code != 200:
|
||||
raise ConnectionError(r.text)
|
||||
return r.content
|
||||
|
||||
return None
|
||||
|
||||
# Service specific
|
||||
|
||||
def _series(self, guid: str) -> list[Episode]:
|
||||
series = self._request("GET", f"/feed/app-2/videos/show_{guid}/type_episodes/apiversion_24/device_androidtv")
|
||||
if not series.get("items"):
|
||||
raise ValueError(f"Could not find any episodes with ID {guid}")
|
||||
|
||||
episodes = [
|
||||
Episode(
|
||||
id_=episode.get("bc_video_id"),
|
||||
service=self.__class__,
|
||||
name=episode.get("title"),
|
||||
season=int(episode.get("season") or 0),
|
||||
number=int(episode.get("episode_in_season") or 0),
|
||||
title=episode.get("series_name") or episode.get("show_title"),
|
||||
year=episode.get("release_year"),
|
||||
data=episode,
|
||||
)
|
||||
for episode in series.get("items")
|
||||
if episode.get("fullep", 0) == 1
|
||||
]
|
||||
|
||||
return episodes
|
||||
|
||||
def _movie(self, guid: str) -> Movie:
|
||||
data = self._request("GET", f"/feed/app-2/videos/show_{guid}/type_episodes/apiversion_24/device_androidtv")
|
||||
if not data.get("items"):
|
||||
raise ValueError(f"Could not find any data for ID {guid}")
|
||||
|
||||
movies = [
|
||||
Movie(
|
||||
id_=movie.get("bc_video_id"),
|
||||
service=self.__class__,
|
||||
name=movie.get("series_name") or movie.get("show_title"),
|
||||
year=movie.get("release_year"),
|
||||
data=movie,
|
||||
)
|
||||
for movie in data.get("items")
|
||||
if movie.get("fullep", 0) == 1
|
||||
]
|
||||
|
||||
return movies
|
||||
|
||||
def _episode(self, guid: str) -> Episode:
|
||||
data = self._request("GET", f"/feed/app-2/video-meta/guid_{guid}/apiversion_24/device_androidtv")
|
||||
if not data.get("video"):
|
||||
raise ValueError(f"Could not find any data for ID {guid}")
|
||||
|
||||
episodes = [
|
||||
Episode(
|
||||
id_=data.get("video", {}).get("bc_video_id"),
|
||||
service=self.__class__,
|
||||
name=data.get("video", {}).get("title"),
|
||||
season=int(data.get("video", {}).get("season") or 0),
|
||||
number=int(data.get("video", {}).get("episode_in_season") or 0),
|
||||
title=data.get("video", {}).get("series_name") or data.get("video", {}).get("show_title"),
|
||||
year=data.get("video", {}).get("release_year"),
|
||||
data=data.get("video"),
|
||||
)
|
||||
]
|
||||
|
||||
return episodes
|
||||
|
||||
def _request(self, method: str, endpoint: str, **kwargs: Any) -> Any[dict | str]:
|
||||
url = urljoin(self.config["endpoints"]["base_url"], endpoint)
|
||||
|
||||
prep = self.session.prepare_request(Request(method, url, **kwargs))
|
||||
|
||||
response = self.session.send(prep)
|
||||
if response.status_code != 200:
|
||||
raise ConnectionError(f"{response.text}")
|
||||
|
||||
try:
|
||||
return json.loads(response.content)
|
||||
|
||||
except json.JSONDecodeError:
|
||||
return response.text
|
||||
|
||||
@staticmethod
|
||||
def trim_duration(source_manifest: str) -> str:
|
||||
"""
|
||||
The last segment on all tracks return a 404 for some reason, causing a failed download.
|
||||
So we trim the duration by exactly one segment to account for that.
|
||||
|
||||
TODO: Calculate the segment duration instead of assuming length.
|
||||
"""
|
||||
manifest = DASH.from_url(source_manifest).manifest
|
||||
period_duration = manifest.get("mediaPresentationDuration")
|
||||
period_duration = DASH.pt_to_sec(period_duration)
|
||||
|
||||
hours, minutes, seconds = str(timedelta(seconds=period_duration - 6)).split(":")
|
||||
new_duration = f"PT{hours}H{minutes}M{seconds}S"
|
||||
manifest.set("mediaPresentationDuration", new_duration)
|
||||
|
||||
return etree.tostring(manifest, encoding="unicode")
|
||||
|
||||
9
CWTV/config.yaml
Normal file
9
CWTV/config.yaml
Normal file
@ -0,0 +1,9 @@
|
||||
headers:
|
||||
User-Agent: Mozilla/5.0 (Linux; Android 11; Smart TV Build/AR2101; wv)
|
||||
|
||||
endpoints:
|
||||
base_url: https://images.cwtv.com
|
||||
playback: https://edge.api.brightcove.com/playback/v1/accounts/6415823816001/videos/{}
|
||||
|
||||
policy_key: BCpkADawqM0t2qFXB_K2XdHv2JmeRgQjpP6De9_Fl7d4akhL5aeqYwErorzsAxa7dyOF2FdxuG5wWVOREHEwb0DI-M8CGBBDpqwvDBEPfDKQg7kYGnccdNDErkvEh2O28CrGR3sEG6MZBlZ03I0xH7EflYKooIhfwvNWWw
|
||||
|
||||
584
DSCP/__init__.py
Normal file
584
DSCP/__init__.py
Normal file
@ -0,0 +1,584 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import re
|
||||
import sys
|
||||
import uuid
|
||||
from collections import defaultdict
|
||||
from collections.abc import Generator
|
||||
from copy import deepcopy
|
||||
from http.cookiejar import CookieJar
|
||||
from typing import Any
|
||||
from urllib.parse import urljoin
|
||||
from zlib import crc32
|
||||
|
||||
import click
|
||||
from click import Context
|
||||
from langcodes import Language
|
||||
from lxml import etree
|
||||
from unshackle.core.credential import Credential
|
||||
from unshackle.core.manifests import DASH
|
||||
from unshackle.core.search_result import SearchResult
|
||||
from unshackle.core.service import Service
|
||||
from unshackle.core.session import session as CurlSession
|
||||
from unshackle.core.titles import Episode, Movie, Movies, Series
|
||||
from unshackle.core.tracks import Audio, Chapter, Chapters, Subtitle, Track, Tracks
|
||||
from unshackle.core.utilities import is_close_match
|
||||
|
||||
|
||||
class DSCP(Service):
|
||||
"""
|
||||
\b
|
||||
Service code for Discovery Plus streaming service (https://www.discoveryplus.com).
|
||||
Credit to @sp4rk.y for the subtitle fix.
|
||||
|
||||
\b
|
||||
Version: 1.0.1
|
||||
Author: stabbedbybrick
|
||||
Authorization: Cookies for subscription, none for freely available titles
|
||||
Robustness:
|
||||
Widevine:
|
||||
L1: 2160p, 1080p
|
||||
L3: 720p
|
||||
PlayReady:
|
||||
SL3000: 2160p
|
||||
SL2000: 1080p, 720p
|
||||
|
||||
\b
|
||||
Tips:
|
||||
- Input can be either complete title URL or just the path:
|
||||
SHOW: /show/eb26e00e-9582-4790-a61c-48d785926f58
|
||||
STANDALONE: /standalone/5012ae3f-d9bd-46ec-ad42-b8116b811441
|
||||
SPORT: /sport/9cc449de-2a64-524d-bcb6-cabd4ac70340
|
||||
EPISODE: /video/watch/8685efdd-a3c4-4892-b1d1-5f9f071cacf1/de67ea8e-a90f-4609-81af-4f09906f60b2
|
||||
|
||||
\b
|
||||
Notes:
|
||||
- Language tags can be mislabelled or missing on some titles. List tracks with --list to verify.
|
||||
- All qualities, codecs, and ranges are included when available. Use -v H.265, -r HDR10, -q 1080p, etc. to select.
|
||||
|
||||
\b
|
||||
Bonus tip: With some minor adjustments to the code and config, you can convert this to an HMAX service.
|
||||
- Replace all instances of "DSCP" with "HMAX"
|
||||
- Replace all instances of "dplus" with "beam"
|
||||
- Replace all instances of "discoveryplus" with "hbomax"
|
||||
|
||||
"""
|
||||
|
||||
ALIASES = ("discoveryplus",)
|
||||
TITLE_RE = (
|
||||
r"^(?:https?://play.discoveryplus\.com?)?/(?P<type>show|mini-series|video|movie|topical|standalone|sport)/(?P<id>[a-z0-9-/]+)"
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
@click.command(name="DSCP", short_help="https://www.discoveryplus.com/", help=__doc__)
|
||||
@click.argument("title", type=str)
|
||||
@click.pass_context
|
||||
def cli(ctx: Context, **kwargs: Any) -> DSCP:
|
||||
return DSCP(ctx, **kwargs)
|
||||
|
||||
def __init__(self, ctx: Context, title: str):
|
||||
super().__init__(ctx)
|
||||
self.title = title
|
||||
|
||||
self.profile = ctx.parent.params.get("profile")
|
||||
if not self.profile:
|
||||
self.profile = "default"
|
||||
|
||||
self.cdm = ctx.obj.cdm
|
||||
if self.cdm is not None:
|
||||
self.drm_system = "playready"
|
||||
self.security_level = "SL3000"
|
||||
|
||||
if self.cdm.security_level <= 3:
|
||||
self.drm_system = "widevine"
|
||||
self.security_level = "L1"
|
||||
|
||||
self.base_url = self.config["endpoints"]["default_url"]
|
||||
|
||||
def get_session(self) -> CurlSession:
|
||||
return CurlSession("okhttp4", status_forcelist=[429, 502, 503, 504])
|
||||
|
||||
def authenticate(self, cookies: CookieJar | None = None, credential: Credential | None = None) -> None:
|
||||
super().authenticate(cookies, credential)
|
||||
tokens = {}
|
||||
|
||||
if cookies is not None:
|
||||
st_token = next((c.value for c in cookies if c.name == "st"), None)
|
||||
if not st_token:
|
||||
raise ValueError("- Unable to find token in cookies, try refreshing.")
|
||||
|
||||
# Only use cache if cookies are present since it's not needed for free titles
|
||||
cache = self.cache.get(f"tokens_{self.profile}")
|
||||
if cache:
|
||||
self.log.info(" + Using cached Tokens...")
|
||||
tokens = cache.data
|
||||
else:
|
||||
self.log.info(" + Setting up new profile...")
|
||||
profile = {"token": st_token, "device_id": str(uuid.uuid1())}
|
||||
cache.set(profile)
|
||||
tokens = cache.data
|
||||
|
||||
self.device_id = tokens.get("device_id") or str(uuid.uuid1())
|
||||
client_id = self.config["client_id"]
|
||||
|
||||
self.session.headers.update({
|
||||
"user-agent": "androidtv dplus/20.8.1.2 (android/9; en-US; SHIELD Android TV-NVIDIA; Build/1)",
|
||||
"x-disco-client": "ANDROIDTV:9:dplus:20.8.1.2",
|
||||
"x-disco-params": "realm=bolt,bid=dplus,features=ar",
|
||||
"x-device-info": f"dplus/20.8.1.2 (NVIDIA/SHIELD Android TV; android/9-mdarcy; {self.device_id}/{client_id})",
|
||||
})
|
||||
|
||||
access = self._request("GET", "/token", params={"realm": "bolt", "deviceId": self.device_id})
|
||||
|
||||
self.access_token = access["data"]["attributes"]["token"]
|
||||
|
||||
config = self._request("POST", "/session-context/headwaiter/v1/bootstrap")
|
||||
self.base_url = self.config["endpoints"]["template"].format(config["routing"]["tenant"], config["routing"]["homeMarket"])
|
||||
|
||||
def search(self) -> Generator[SearchResult, None, None]:
|
||||
params = {
|
||||
"include": "default",
|
||||
"decorators": "viewingHistory,isFavorite,playbackAllowed,contentAction,badges",
|
||||
"contentFilter[query]": self.title,
|
||||
"page[items.number]": "1",
|
||||
"page[items.size]": "8",
|
||||
}
|
||||
data = self._request("GET", "/cms/routes/search/result", params=params)
|
||||
|
||||
results = [x.get("attributes") for x in data["included"] if x.get("type") == "show"]
|
||||
|
||||
for result in results:
|
||||
yield SearchResult(
|
||||
id_=f"/show/{result.get('alternateId')}",
|
||||
title=result.get("name"),
|
||||
description=result.get("description"),
|
||||
label="show",
|
||||
url=f"/show/{result.get('alternateId')}",
|
||||
)
|
||||
|
||||
def get_titles(self) -> Movies | Series:
|
||||
try:
|
||||
entity, content_id = (re.match(self.TITLE_RE, self.title).group(i) for i in ("type", "id"))
|
||||
except Exception:
|
||||
raise ValueError("Could not parse ID from title - is the URL correct?")
|
||||
|
||||
if entity in ("show", "mini-series", "topical"):
|
||||
episodes = self._show(content_id)
|
||||
return Series(episodes)
|
||||
|
||||
elif entity in ("movie", "standalone"):
|
||||
movie = self._movie(content_id, entity)
|
||||
return Movies(movie)
|
||||
|
||||
elif entity == "sport":
|
||||
sport = self._sport(content_id)
|
||||
return Movies(sport)
|
||||
|
||||
elif entity == "video":
|
||||
episodes = self._episode(content_id)
|
||||
return Series(episodes)
|
||||
|
||||
else:
|
||||
raise ValueError(f"Unknown content: {entity}")
|
||||
|
||||
|
||||
def get_tracks(self, title: Movie | Episode) -> Tracks:
|
||||
payload = {
|
||||
"appBundle": "com.wbd.stream",
|
||||
"applicationSessionId": self.device_id,
|
||||
"capabilities": {
|
||||
"codecs": {
|
||||
"audio": {
|
||||
"decoders": [
|
||||
{"codec": "aac", "profiles": ["lc", "he", "hev2", "xhe"]},
|
||||
{"codec": "eac3", "profiles": ["atmos"]},
|
||||
]
|
||||
},
|
||||
"video": {
|
||||
"decoders": [
|
||||
{
|
||||
"codec": "h264",
|
||||
"levelConstraints": {
|
||||
"framerate": {"max": 60, "min": 0},
|
||||
"height": {"max": 2160, "min": 48},
|
||||
"width": {"max": 3840, "min": 48},
|
||||
},
|
||||
"maxLevel": "5.2",
|
||||
"profiles": ["baseline", "main", "high"],
|
||||
},
|
||||
{
|
||||
"codec": "h265",
|
||||
"levelConstraints": {
|
||||
"framerate": {"max": 60, "min": 0},
|
||||
"height": {"max": 2160, "min": 144},
|
||||
"width": {"max": 3840, "min": 144},
|
||||
},
|
||||
"maxLevel": "5.1",
|
||||
"profiles": ["main10", "main"],
|
||||
},
|
||||
],
|
||||
"hdrFormats": ["hdr10", "hdr10plus", "dolbyvision", "dolbyvision5", "dolbyvision8", "hlg"],
|
||||
},
|
||||
},
|
||||
"contentProtection": {
|
||||
"contentDecryptionModules": [
|
||||
{"drmKeySystem": self.drm_system, "maxSecurityLevel": self.security_level}
|
||||
]
|
||||
},
|
||||
"manifests": {"formats": {"dash": {}}},
|
||||
},
|
||||
"consumptionType": "streaming",
|
||||
"deviceInfo": {
|
||||
"player": {
|
||||
"mediaEngine": {"name": "", "version": ""},
|
||||
"playerView": {"height": 2160, "width": 3840},
|
||||
"sdk": {"name": "", "version": ""},
|
||||
}
|
||||
},
|
||||
"editId": title.id,
|
||||
"firstPlay": False,
|
||||
"gdpr": False,
|
||||
"playbackSessionId": str(uuid.uuid4()),
|
||||
"userPreferences": {
|
||||
#'uiLanguage': 'en'
|
||||
},
|
||||
}
|
||||
|
||||
playback = self._request(
|
||||
"POST", "/playback-orchestrator/any/playback-orchestrator/v1/playbackInfo",
|
||||
headers={"Authorization": f"Bearer {self.access_token}"},
|
||||
json=payload,
|
||||
)
|
||||
|
||||
original_language = next((
|
||||
x.get("language")
|
||||
for x in playback["videos"][0]["audioTracks"]
|
||||
if "Original" in x.get("displayName", "")
|
||||
), "")
|
||||
|
||||
manifest = (
|
||||
playback.get("fallback", {}).get("manifest", {}).get("url", "").replace("_fallback", "")
|
||||
or playback.get("manifest", {}).get("url")
|
||||
)
|
||||
|
||||
license_url = (
|
||||
playback.get("fallback", {}).get("drm", {}).get("schemes", {}).get(self.drm_system, {}).get("licenseUrl")
|
||||
or playback.get("drm", {}).get("schemes", {}).get(self.drm_system, {}).get("licenseUrl")
|
||||
)
|
||||
|
||||
title.data["license_url"] = license_url
|
||||
title.data["chapters"] = next((x.get("annotations") for x in playback["videos"] if x["type"] == "main"), None)
|
||||
|
||||
dash = DASH.from_url(url=manifest, session=self.session)
|
||||
tracks = dash.to_tracks(language="en", period_filter=self._period_filter)
|
||||
|
||||
for track in tracks:
|
||||
track.is_original_lang = str(track.language) == original_language
|
||||
track.name = "Original" if track.is_original_lang else track.name
|
||||
|
||||
if isinstance(track, Audio):
|
||||
role = track.data["dash"]["representation"].find("Role")
|
||||
if role is not None and role.get("value") in ["description", "alternative", "alternate"]:
|
||||
track.descriptive = True
|
||||
|
||||
if isinstance(track, Subtitle):
|
||||
tracks.subtitles.remove(track)
|
||||
|
||||
subtitles = self._process_subtitles(dash, original_language)
|
||||
tracks.add(subtitles)
|
||||
|
||||
return tracks
|
||||
|
||||
def get_chapters(self, title: Movie | Episode) -> Chapters:
|
||||
if not title.data.get("chapters"):
|
||||
return Chapters()
|
||||
|
||||
chapters = []
|
||||
for chapter in title.data["chapters"]:
|
||||
if "recap" in chapter.get("secondaryType", "").lower():
|
||||
chapters.append(Chapter(name="Recap", timestamp=chapter["start"]))
|
||||
if chapter.get("end"):
|
||||
chapters.append(Chapter(timestamp=chapter.get("end")))
|
||||
if "intro" in chapter.get("secondaryType", "").lower():
|
||||
chapters.append(Chapter(name="Intro", timestamp=chapter["start"]))
|
||||
if chapter.get("end"):
|
||||
chapters.append(Chapter(timestamp=chapter.get("end")))
|
||||
elif "credits" in chapter.get("type", "").lower():
|
||||
chapters.append(Chapter(name="Credits", timestamp=chapter["start"]))
|
||||
|
||||
if not any(c.timestamp == "00:00:00.000" for c in chapters):
|
||||
chapters.append(Chapter(timestamp=0))
|
||||
|
||||
return sorted(chapters, key=lambda x: x.timestamp)
|
||||
|
||||
def get_widevine_service_certificate(self, challenge: bytes, title: Episode | Movie, **_: Any) -> str:
|
||||
if not (license_url := title.data.get("license_url")):
|
||||
return None
|
||||
|
||||
return self.session.post(url=license_url, data=challenge).content
|
||||
|
||||
|
||||
def get_widevine_license(self, *, challenge: bytes, title: Episode | Movie, track: Any) -> bytes | str | None:
|
||||
if not (license_url := title.data.get("license_url")):
|
||||
return None
|
||||
|
||||
r = self.session.post(url=license_url, data=challenge)
|
||||
if r.status_code != 200:
|
||||
raise ConnectionError(r.status_code, r.text)
|
||||
|
||||
return r.content
|
||||
|
||||
def get_playready_license(self, *, challenge: bytes, title: Episode | Movie, track: Any) -> bytes | str | None:
|
||||
if not (license_url := title.data.get("license_url")):
|
||||
return None
|
||||
|
||||
r = self.session.post(url=license_url, data=challenge)
|
||||
if r.status_code != 200:
|
||||
raise ConnectionError(r.status_code, r.text)
|
||||
|
||||
return r.content
|
||||
|
||||
# Service specific functions
|
||||
|
||||
@staticmethod
|
||||
def _process_subtitles(dash: DASH, language: str) -> list[Subtitle]:
|
||||
subtitle_groups = defaultdict(list)
|
||||
manifest = dash.manifest
|
||||
|
||||
for period in manifest.findall("Period"):
|
||||
for adapt_set in period.findall("AdaptationSet"):
|
||||
if adapt_set.get("contentType") != "text" or not adapt_set.get("lang"):
|
||||
continue
|
||||
|
||||
role = adapt_set.find("Role")
|
||||
label = adapt_set.find("Label")
|
||||
key = (
|
||||
adapt_set.get("lang"),
|
||||
role.get("value") if role is not None else "subtitle",
|
||||
label.text if label is not None else "",
|
||||
)
|
||||
subtitle_groups[key].append((period, adapt_set))
|
||||
|
||||
final_tracks = []
|
||||
for (lang, role_value, label_text), adapt_set_group in subtitle_groups.items():
|
||||
first_period, first_adapt = adapt_set_group[0]
|
||||
if first_adapt.find("Representation") is None:
|
||||
continue
|
||||
|
||||
s_elements_with_context = []
|
||||
for _, adapt_set in adapt_set_group:
|
||||
rep = adapt_set.find("Representation")
|
||||
if rep is None:
|
||||
continue
|
||||
|
||||
template = rep.find("SegmentTemplate") or adapt_set.find("SegmentTemplate")
|
||||
timeline = template.find("SegmentTimeline") if template is not None else None
|
||||
|
||||
if timeline is not None:
|
||||
start_num = int(template.get("startNumber", 1))
|
||||
s_elements_with_context.extend((start_num, s_elem) for s_elem in timeline.findall("S"))
|
||||
|
||||
s_elements_with_context.sort(key=lambda x: x[0])
|
||||
|
||||
combined_adapt = deepcopy(first_adapt)
|
||||
combined_rep = combined_adapt.find("Representation")
|
||||
|
||||
seg_template = combined_rep.find("SegmentTemplate")
|
||||
if seg_template is None:
|
||||
template_at_adapt = combined_adapt.find("SegmentTemplate")
|
||||
if template_at_adapt is not None:
|
||||
seg_template = deepcopy(template_at_adapt)
|
||||
combined_rep.append(seg_template)
|
||||
combined_adapt.remove(template_at_adapt)
|
||||
else:
|
||||
continue
|
||||
|
||||
if seg_template.find("SegmentTimeline") is not None:
|
||||
seg_template.remove(seg_template.find("SegmentTimeline"))
|
||||
|
||||
new_timeline = etree.Element("SegmentTimeline")
|
||||
new_timeline.extend(deepcopy(s) for _, s in s_elements_with_context)
|
||||
seg_template.append(new_timeline)
|
||||
|
||||
seg_template.set("startNumber", "1")
|
||||
if "endNumber" in seg_template.attrib:
|
||||
del seg_template.attrib["endNumber"]
|
||||
|
||||
track_id = hex(crc32(f"sub-{lang}-{role_value}-{label_text}".encode()) & 0xFFFFFFFF)[2:]
|
||||
lang_obj = Language.get(lang)
|
||||
track_name = "Original" if (language and is_close_match(lang_obj, [language])) else lang_obj.display_name()
|
||||
|
||||
final_tracks.append(
|
||||
Subtitle(
|
||||
id_=track_id,
|
||||
url=dash.url,
|
||||
codec=Subtitle.Codec.WebVTT,
|
||||
language=lang_obj,
|
||||
is_original_lang=bool(language and is_close_match(lang_obj, [language])),
|
||||
descriptor=Track.Descriptor.DASH,
|
||||
sdh="sdh" in label_text.lower() or role_value == "caption",
|
||||
forced="forced" in label_text.lower() or "forced" in role_value.lower(),
|
||||
name=track_name,
|
||||
data={
|
||||
"dash": {
|
||||
"manifest": manifest,
|
||||
"period": first_period,
|
||||
"adaptation_set": combined_adapt,
|
||||
"representation": combined_rep,
|
||||
}
|
||||
},
|
||||
)
|
||||
)
|
||||
|
||||
return final_tracks
|
||||
|
||||
@staticmethod
|
||||
def _period_filter(period: Any) -> bool:
|
||||
"""Shouldn't be needed for fallback manifest"""
|
||||
if not (duration := period.get("duration")):
|
||||
return False
|
||||
|
||||
return DASH.pt_to_sec(duration) < 120
|
||||
|
||||
def _show(self, title: str) -> Episode:
|
||||
params = {
|
||||
"include": "default",
|
||||
"decorators": "viewingHistory,badges,isFavorite,contentAction",
|
||||
}
|
||||
data = self._request("GET", "/cms/routes/show/{}".format(title), params=params)
|
||||
|
||||
info = next(x for x in data["included"] if x.get("attributes", {}).get("alternateId", "") == title)
|
||||
content = next((x for x in data["included"] if "show-page-rail-episodes-tabbed-content" in x["attributes"].get("alias", "")), None)
|
||||
if not content:
|
||||
raise ValueError("Show not found")
|
||||
|
||||
content_id = content.get("id")
|
||||
show_id = content["attributes"]["component"].get("mandatoryParams", "")
|
||||
season_params = [x.get("parameter") for x in content["attributes"]["component"]["filters"][0]["options"]]
|
||||
page = next(x for x in data["included"] if x.get("type", "") == "page")
|
||||
|
||||
seasons = [
|
||||
self._request(
|
||||
"GET", "/cms/collections/{}?{}&{}".format(content_id, season, show_id),
|
||||
params={"include": "default", "decorators": "viewingHistory,badges,isFavorite,contentAction"},
|
||||
)
|
||||
for season in season_params
|
||||
]
|
||||
|
||||
videos = [[x for x in season["included"] if x["type"] == "video"] for season in seasons]
|
||||
|
||||
return [
|
||||
Episode(
|
||||
id_=ep["relationships"]["edit"]["data"]["id"],
|
||||
service=self.__class__,
|
||||
title=page["attributes"].get("title") or info["attributes"].get("originalName"),
|
||||
year=ep["attributes"]["airDate"][:4] if ep["attributes"].get("airDate") else None,
|
||||
season=ep["attributes"].get("seasonNumber"),
|
||||
number=ep["attributes"].get("episodeNumber"),
|
||||
name=ep["attributes"]["name"],
|
||||
data=ep,
|
||||
)
|
||||
for episodes in videos
|
||||
for ep in episodes
|
||||
if ep.get("attributes", {}).get("videoType", "") == "EPISODE"
|
||||
]
|
||||
|
||||
def _episode(self, title: str) -> Episode:
|
||||
video_id = title.split("/")[1]
|
||||
|
||||
params = {"decorators": "isFavorite", "include": "show"}
|
||||
content = self._request("GET", "/content/videos/{}".format(video_id), params=params)
|
||||
|
||||
episode = content.get("data", {}).get("attributes")
|
||||
video_type = episode.get("videoType")
|
||||
relationships = content.get("data", {}).get("relationships")
|
||||
show = next((x for x in content["included"] if x.get("type", "") == "show"), {})
|
||||
|
||||
show_title = show.get("attributes", {}).get("name") or show.get("attributes", {}).get("originalName")
|
||||
episode_name = episode.get("originalName") or episode.get("secondaryTitle")
|
||||
if video_type.lower() in ("clip", "standalone_event"):
|
||||
show_title = episode.get("originalName")
|
||||
episode_name = episode.get("secondaryTitle", "")
|
||||
|
||||
return [
|
||||
Episode(
|
||||
id_=relationships.get("edit", {}).get("data", {}).get("id"),
|
||||
service=self.__class__,
|
||||
title=show_title,
|
||||
year=int(episode.get("airDate")[:4]) if episode.get("airDate") else None,
|
||||
season=episode.get("seasonNumber") or 0,
|
||||
number=episode.get("episodeNumber") or 0,
|
||||
name=episode_name,
|
||||
data=episode,
|
||||
)
|
||||
]
|
||||
|
||||
def _sport(self, title: str) -> Movie:
|
||||
params = {
|
||||
"include": "default",
|
||||
"decorators": "viewingHistory,badges,isFavorite,contentAction",
|
||||
}
|
||||
data = self._request("GET", "/cms/routes/sport/{}".format(title), params=params)
|
||||
|
||||
content = next((x for x in data["included"] if x.get("attributes", {}).get("alternateId", "") == title), None)
|
||||
if not content:
|
||||
raise ValueError(f"Content not found for title: {title}")
|
||||
|
||||
movie = content.get("attributes")
|
||||
relationships = content.get("relationships")
|
||||
|
||||
name = movie.get("name") or movie.get("originalName")
|
||||
year = int(movie.get("firstAvailableDate")[:4]) if movie.get("firstAvailableDate") else None
|
||||
|
||||
return [
|
||||
Movie(
|
||||
id_=relationships.get("edit", {}).get("data", {}).get("id"),
|
||||
service=self.__class__,
|
||||
name=name + " - " + movie.get("secondaryTitle", ""),
|
||||
year=year,
|
||||
data=movie,
|
||||
)
|
||||
]
|
||||
|
||||
def _movie(self, title: str, entity: str) -> Movie:
|
||||
params = {
|
||||
"include": "default",
|
||||
"decorators": "isFavorite,playbackAllowed,contentAction,badges",
|
||||
}
|
||||
data = self._request("GET", "/cms/routes/movie/{}".format(title), params=params)
|
||||
|
||||
movie = next((
|
||||
x for x in data["included"]if x.get("attributes", {}).get("videoType", "").lower() == entity), None
|
||||
)
|
||||
if not movie:
|
||||
raise ValueError("Movie not found")
|
||||
|
||||
return [
|
||||
Movie(
|
||||
id_=movie["relationships"]["edit"]["data"]["id"],
|
||||
service=self.__class__,
|
||||
name=movie["attributes"].get("name") or movie["attributes"].get("originalName"),
|
||||
year=int(movie["attributes"]["airDate"][:4]) if movie["attributes"].get("airDate") else None,
|
||||
data=movie,
|
||||
)
|
||||
]
|
||||
|
||||
def _request(self, method: str, endpoint: str, **kwargs: Any) -> Any[dict | str]:
|
||||
url = urljoin(self.base_url, endpoint)
|
||||
|
||||
response = self.session.request(method, url, **kwargs)
|
||||
|
||||
try:
|
||||
data = json.loads(response.content)
|
||||
|
||||
if errors := data.get("errors", []):
|
||||
code = next((x.get("code", "") for x in errors), "")
|
||||
if "missingpackage" in code.lower():
|
||||
self.log.error("\nError: Subscription is required for this title.")
|
||||
sys.exit(1)
|
||||
|
||||
return data
|
||||
|
||||
except Exception as e:
|
||||
raise ConnectionError(f"Request failed for {url}: {e}")
|
||||
5
DSCP/config.yaml
Normal file
5
DSCP/config.yaml
Normal file
@ -0,0 +1,5 @@
|
||||
endpoints:
|
||||
template: "https://default.{}-{}.prd.api.discoveryplus.com"
|
||||
default_url: "https://default.any-any.prd.api.discoveryplus.com"
|
||||
|
||||
client_id: "b6746ddc-7bc7-471f-a16c-f6aaf0c34d26" # androidtv
|
||||
972
DSNP/__init__.py
Normal file
972
DSNP/__init__.py
Normal file
@ -0,0 +1,972 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import re
|
||||
import sys
|
||||
import uuid
|
||||
from datetime import datetime
|
||||
from collections.abc import Generator
|
||||
from http.cookiejar import CookieJar
|
||||
from typing import Any, Optional, Union, List
|
||||
from langcodes import Language
|
||||
|
||||
import click
|
||||
from click import Context
|
||||
from pyplayready.cdm import Cdm as PlayReadyCdm
|
||||
from requests import Request
|
||||
|
||||
from unshackle.core.constants import AnyTrack
|
||||
from unshackle.core.credential import Credential
|
||||
from unshackle.core.search_result import SearchResult
|
||||
from unshackle.core.service import Service
|
||||
from unshackle.core.manifests import HLS
|
||||
from unshackle.core.titles import Title_T, Titles_T, Episode, Movie, Movies, Series
|
||||
from unshackle.core.tracks import Chapter, Chapters, Tracks, Attachment, Video, Audio, Subtitle
|
||||
from unshackle.core.utils.collections import as_list
|
||||
from unshackle.core.utilities import get_ip_info
|
||||
|
||||
from . import queries
|
||||
|
||||
class DSNP(Service):
|
||||
"""
|
||||
Service code for Disney+ Streaming Service (https://disneyplus.com).
|
||||
|
||||
Author: Made by CodeName393 with Special Thanks to narakama\n
|
||||
Authorization: Credentials\n
|
||||
Security: UHD@L1/SL3000 FHD@L1/SL3000 HD@L3/SL2000
|
||||
"""
|
||||
|
||||
ALIASES = ("DSNP", "disneyplus", "disney+")
|
||||
TITLE_RE = (
|
||||
r"^(?:https?://(?:www\.)?disneyplus\.com(?:/[a-z0-9-]+)?(?:/[a-z0-9-]+)?/(browse)/(?P<id>entity-[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}))(?:\?.*)?$",
|
||||
r"^(?:https?://(?:www\.)?disneyplus\.com(?:/[a-z0-9-]+)?(?:/[a-z0-9-]+)?/(movies|series)/[a-z0-9-]+/)?(?P<id>[a-zA-Z0-9-]+)(?:\?.*)?$",
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
@click.command(name="DisneyPlus", short_help="https://disneyplus.com", help=__doc__)
|
||||
@click.argument("title", type=str)
|
||||
@click.option("--imax", is_flag=True, default=False, help="Prefer IMAX Enhanced version if available.")
|
||||
@click.option("--remastered-ar", is_flag=True, default=False, help="Prefer Remastered Aspect Ratio if available.")
|
||||
@click.pass_context
|
||||
def cli(ctx: Context, **kwargs: Any) -> DSNP:
|
||||
return DSNP(ctx, **kwargs)
|
||||
|
||||
def __init__(self, ctx: Context, title: str, imax: bool, remastered_ar: bool):
|
||||
self.title = title
|
||||
super().__init__(ctx)
|
||||
|
||||
self.title_id = self.title
|
||||
for pattern in self.TITLE_RE:
|
||||
match = re.match(pattern, self.title)
|
||||
if match:
|
||||
self.title_id = match.group("id")
|
||||
break
|
||||
|
||||
self.prefer_imax = imax or False
|
||||
self.prefer_remastered_ar = remastered_ar or False
|
||||
|
||||
self.vcodec = ctx.parent.params.get("vcodec") or Video.Codec.AVC
|
||||
self.acodec : Audio.Codec = ctx.parent.params.get("acodec")
|
||||
self.range = ctx.parent.params.get("range_") or [Video.Range.SDR]
|
||||
self.quality: List[int] = ctx.parent.params.get("quality") or [1080]
|
||||
self.wanted = ctx.parent.params.get("wanted")
|
||||
self.audio_only = ctx.parent.params.get("audio_only")
|
||||
self.subs_only = ctx.parent.params.get("subs_only")
|
||||
self.chapters_only = ctx.parent.params.get("chapters_only")
|
||||
|
||||
self.cdm = ctx.obj.cdm
|
||||
self.playready = isinstance(self.cdm, PlayReadyCdm)
|
||||
self.is_l3 = (self.cdm.security_level < 3000) if self.playready else (self.cdm.security_level == 3)
|
||||
|
||||
self.region = None
|
||||
self.prod_config = {}
|
||||
self.account_tokens = {}
|
||||
self.active_session = {}
|
||||
self.playback_data = {}
|
||||
|
||||
self.log.info("Preparing...")
|
||||
|
||||
if self.is_l3:
|
||||
self.vcodec = Video.Codec.AVC
|
||||
self.range = [Video.Range.SDR]
|
||||
self.quality = [720]
|
||||
self.log.warning(" + Switched video to HD. This CDM only support HD.")
|
||||
else:
|
||||
if self.quality > [1080] and self.range == [Video.Range.SDR]:
|
||||
self.range = [Video.Range.HDR10]
|
||||
self.log.info(" + Switched range to HDR10. 4K resolution requires HDR.")
|
||||
|
||||
if (self.range != [Video.Range.SDR] or self.quality > [1080]) and self.vcodec != Video.Codec.HEVC:
|
||||
self.vcodec = Video.Codec.HEVC
|
||||
self.log.info(f" + Switched video codec to H265 to be able to get {self.range} dynamic range.")
|
||||
|
||||
if self.acodec == Audio.Codec.DTS and not self.prefer_imax:
|
||||
self.prefer_imax = True
|
||||
self.log.info(" + Switched IMAX prefer. DTS audio can only be get from IMAX prefer.")
|
||||
|
||||
self.session.headers.update({
|
||||
"User-Agent": self.config["bamsdk"]["user_agent"],
|
||||
"Accept-Encoding": "gzip",
|
||||
"Accept": "application/json",
|
||||
"Content-Type": "application/json"
|
||||
})
|
||||
|
||||
ip_info = get_ip_info(self.session)
|
||||
country_key = None
|
||||
possible_keys = ["countryCode", "country", "country_code", "country-code"]
|
||||
for key in possible_keys:
|
||||
if key in ip_info:
|
||||
country_key = key
|
||||
break
|
||||
if country_key:
|
||||
self.region = str(ip_info[country_key]).upper()
|
||||
self.log.info(f" + IP Region: {self.region}")
|
||||
else:
|
||||
self.log.warning(f" - The region could not be determined from IP information: {ip_info}")
|
||||
self.region = "US"
|
||||
self.log.info(f" + IP Region: {self.region} (By Default)")
|
||||
|
||||
self.prod_config = self.session.get(self.config["endpoints"]["config"]).json()
|
||||
|
||||
self.session.headers.update({
|
||||
"X-Application-Version": self.config["bamsdk"]["application_version"],
|
||||
"X-BAMSDK-Client-ID": self.config["bamsdk"]["client"],
|
||||
"X-BAMSDK-Platform": self.config["device"]["platform"],
|
||||
"X-BAMSDK-Version": self.config["bamsdk"]["sdk_version"],
|
||||
"X-DSS-Edge-Accept": "vnd.dss.edge+json; version=2",
|
||||
"X-Request-Yp-Id": self.config["bamsdk"]["yp_service_id"]
|
||||
})
|
||||
|
||||
def authenticate(self, cookies: Optional[CookieJar] = None, credential: Optional[Credential] = None) -> None:
|
||||
super().authenticate(cookies, credential)
|
||||
self.credentials = credential
|
||||
if not credential:
|
||||
raise EnvironmentError("Service requires Credentials for Authentication.")
|
||||
|
||||
self.log.info("Logging into Disney+...")
|
||||
self._login()
|
||||
|
||||
if self.config.get("profile") and "index" in self.config["profile"]:
|
||||
try:
|
||||
target_profile_index = int(self.config["profile"]["index"])
|
||||
except (ValueError, TypeError, KeyError):
|
||||
self.log.error(" - Profile index in configuration is invalid.", exc_info=False)
|
||||
sys.exit(1)
|
||||
|
||||
profiles = self.active_session['account']['profiles']
|
||||
if not 0 <= target_profile_index < len(profiles):
|
||||
self.log.error(f" - Invalid profile index: {target_profile_index}. Please choose between 0 and {len(profiles) - 1}.", exc_info=False)
|
||||
sys.exit(1)
|
||||
|
||||
target_profile = profiles[target_profile_index]
|
||||
active_profile_id = self.active_session['account']['activeProfile']['id']
|
||||
|
||||
if target_profile['id'] != active_profile_id:
|
||||
self._perform_switch_profile(target_profile, self.session.headers)
|
||||
|
||||
self.log.info(" + Refreshing session data after profile switch...")
|
||||
full_account_info = self._get_account_info_raw()
|
||||
self.active_session = full_account_info["activeSession"]
|
||||
self.active_session['account'] = full_account_info['account']
|
||||
self.log.info("Session data updated successfully.")
|
||||
|
||||
self.log.debug(self.active_session)
|
||||
|
||||
if not self.active_session['isSubscriber']:
|
||||
self.log.error(" - Cannot continue, account is not subscribed to Disney+", exc_info=False)
|
||||
sys.exit(1)
|
||||
if not self.active_session['inSupportedLocation']:
|
||||
self.log.error(" - Cannot continue, Not available in your Region.", exc_info=False)
|
||||
sys.exit(1)
|
||||
|
||||
self.log.info(f" + Account ID: {self.active_session['account']['id']}")
|
||||
self.log.info(f" + Profile ID: {self.active_session['account']['activeProfile']['id']}")
|
||||
self.log.info(f" + Subscribed: {self.active_session['isSubscriber']}")
|
||||
self.log.debug(f" + Account Region: {self.active_session['homeLocation']['countryCode']}")
|
||||
self.log.debug(f" + Detected Location: {self.active_session['location']['countryCode']}")
|
||||
self.log.debug(f" + Supported Location: {self.active_session['inSupportedLocation']}")
|
||||
|
||||
active_profile_id = self.active_session['account']['activeProfile']['id']
|
||||
full_profile_object = next(
|
||||
p for p in self.active_session['account']['profiles'] if p['id'] == active_profile_id
|
||||
)
|
||||
|
||||
current_imax_setting = full_profile_object["attributes"]["playbackSettings"]["preferImaxEnhancedVersion"]
|
||||
self.log.info(f" + IMAX Enhanced: {current_imax_setting}")
|
||||
if current_imax_setting is not self.prefer_imax:
|
||||
self._set_imax_preference(self.prefer_imax)
|
||||
|
||||
current_133_setting = full_profile_object["attributes"]["playbackSettings"]["prefer133"] # Original Aspect Ratio
|
||||
self.log.info(f" + Remastered Aspect Ratio: {not current_133_setting}")
|
||||
if not current_133_setting is not self.prefer_remastered_ar:
|
||||
self._set_remastered_ar_preference(self.prefer_remastered_ar)
|
||||
|
||||
def _login(self) -> None:
|
||||
cache = self.cache.get(f"tokens_{self.region}_{self.credentials.sha1}")
|
||||
|
||||
if cache:
|
||||
try:
|
||||
self.log.info(" + Using cached tokens...")
|
||||
self.account_tokens = cache.data
|
||||
|
||||
bearer = self.account_tokens["token"]["accessToken"]
|
||||
if not bearer:
|
||||
raise ValueError("accessToken not found in cache")
|
||||
self.session.headers.update({'Authorization': f'Bearer {bearer}'})
|
||||
|
||||
except (KeyError, ValueError, TypeError) as e:
|
||||
self.log.warning(f" - Cached token data is invalid or corrupted ({e}). Getting new tokens...")
|
||||
self._perform_full_login()
|
||||
|
||||
try:
|
||||
self._refresh()
|
||||
except Exception as e:
|
||||
self.log.warning(f" - Failed to refresh token from cache ({e}). Getting new tokens...")
|
||||
self._perform_full_login()
|
||||
|
||||
# No problem if don't use it
|
||||
# self._update_device()
|
||||
|
||||
else:
|
||||
self.log.info(" + Getting new tokens...")
|
||||
self._perform_full_login()
|
||||
|
||||
self.log.info(" + Fetching session data...")
|
||||
full_account_info = self._get_account_info_raw()
|
||||
self.active_session = full_account_info["activeSession"]
|
||||
self.active_session['account'] = full_account_info['account']
|
||||
self.log.info("Session data setup successfully.")
|
||||
|
||||
def _perform_full_login(self) -> None:
|
||||
device_token = self._register_device()
|
||||
|
||||
email_status = self._check_email(self.credentials.username, device_token)
|
||||
if email_status.lower() != "login":
|
||||
if email_status.lower() == "OTP":
|
||||
self.log.error(" - Account requires 2FA passcode.", exc_info=False)
|
||||
sys.exit(1)
|
||||
elif email_status.lower() == "register":
|
||||
self.log.error(" - Account is not registered. Please register first.", exc_info=False)
|
||||
sys.exit(1)
|
||||
else:
|
||||
self.log.error(f" - Email status is '{email_status}'. Account status verification required.", exc_info=False)
|
||||
sys.exit(1)
|
||||
|
||||
login_tokens = self._login_with_password(self.credentials.username, self.credentials.password, device_token)
|
||||
|
||||
temp_auth_header = {"Authorization": f'Bearer {login_tokens["accessToken"]}'}
|
||||
account_info = self._get_account_info_raw(temp_auth_header)
|
||||
profiles = account_info["account"]["profiles"]
|
||||
|
||||
selected_profile = None
|
||||
if self.config.get("profile") and "index" in self.config["profile"]:
|
||||
try:
|
||||
profile_index = int(self.config["profile"]["index"])
|
||||
if not 0 <= profile_index < len(profiles):
|
||||
raise ValueError(f"Index out of range (0-{len(profiles)-1})")
|
||||
|
||||
selected_profile = profiles[profile_index]
|
||||
except (ValueError, TypeError):
|
||||
self.log.error(" - Profile index in configuration is invalid.", exc_info=False)
|
||||
sys.exit(1)
|
||||
else:
|
||||
selected_profile = next(
|
||||
(p for p in profiles if not p["attributes"]["kidsModeEnabled"] and not p["attributes"]["parentalControls"]["isPinProtected"]),
|
||||
None
|
||||
)
|
||||
if not selected_profile:
|
||||
self.log.error(" - Auto-selection failed: No suitable profile found (non-kids, no PIN). Please configure a specific profile.", exc_info=False)
|
||||
sys.exit(1)
|
||||
|
||||
if selected_profile:
|
||||
self._perform_switch_profile(selected_profile, temp_auth_header)
|
||||
|
||||
def _perform_switch_profile(self, target_profile: dict, auth_headers: dict) -> None:
|
||||
self.log.info(f" + Switching to profile: {target_profile['name']}({target_profile['id']})")
|
||||
|
||||
if target_profile['attributes']['kidsModeEnabled']:
|
||||
self.log.error(" - Kids Profile and cannot be used.", exc_info=False)
|
||||
sys.exit(1)
|
||||
|
||||
profile_pin = None
|
||||
if target_profile['attributes']['parentalControls']['isPinProtected']:
|
||||
self.log.warning(" - This profile is PIN protected.")
|
||||
try:
|
||||
profile_pin = input("Enter a profile pin: ")
|
||||
if not profile_pin:
|
||||
self.log.error(" - PIN is required, but no value was entered.", exc_info=False)
|
||||
sys.exit(1)
|
||||
if not profile_pin.isdigit():
|
||||
self.log.error(" - Invalid PIN. Please enter only numbers.", exc_info=False)
|
||||
sys.exit(1)
|
||||
if len(profile_pin) < 4:
|
||||
self.log.error(" - PIN is too short. Please enter at least 4 digits.", exc_info=False)
|
||||
sys.exit(1)
|
||||
if len(profile_pin) > 4:
|
||||
self.log.warning(" - PIN is longer than 4 digits. Using the first 4 digits.")
|
||||
profile_pin = profile_pin[:4]
|
||||
except KeyboardInterrupt:
|
||||
self.log.error("\n - PIN input cancelled by user.", exc_info=False)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
switch_profile_data = self._switch_profile(target_profile['id'], auth_headers, profile_pin)
|
||||
final_token_data = self._refresh_token(switch_profile_data["token"]["refreshToken"])
|
||||
self._apply_new_tokens(final_token_data)
|
||||
|
||||
def _refresh(self) -> str:
|
||||
cache = self.cache.get(f"tokens_{self.region}_{self.credentials.sha1}")
|
||||
if not cache.expired:
|
||||
self.log.debug(f" + Token is valid until: {datetime.fromtimestamp(cache.expiration.timestamp()).strftime('%Y-%m-%d %H:%M:%S')}")
|
||||
return self.session.headers.get('Authorization', 'Bearer ').split(' ')[1]
|
||||
|
||||
self.log.warning(" + Token expired. Refreshing...")
|
||||
try:
|
||||
refreshed_data = self._refresh_token(self.account_tokens["token"]["refreshToken"])
|
||||
bearer = self._apply_new_tokens(refreshed_data)
|
||||
return bearer
|
||||
except Exception as _:
|
||||
self.log.error("Refresh Token Expired", exc_info=False)
|
||||
sys.exit(1)
|
||||
|
||||
def _apply_new_tokens(self, token_data: dict) -> str:
|
||||
self.account_tokens = token_data
|
||||
|
||||
bearer = self.account_tokens["token"]["accessToken"]
|
||||
if not bearer:
|
||||
self.log.error("Invalid token data: accessToken not found.", exc_info=False)
|
||||
sys.exit(1)
|
||||
self.session.headers.update({'Authorization': f'Bearer {bearer}'})
|
||||
|
||||
expires_in = self.account_tokens["token"]["expiresIn"] or 3600
|
||||
cache = self.cache.get(f"tokens_{self.region}_{self.credentials.sha1}")
|
||||
cache.set(self.account_tokens, expires_in - 60)
|
||||
self.log.debug(f" + New Token is valid until: {datetime.fromtimestamp(cache.expiration.timestamp()).strftime('%Y-%m-%d %H:%M:%S')}")
|
||||
|
||||
return bearer
|
||||
|
||||
def search(self) -> Generator[SearchResult, None, None]:
|
||||
params = {"query": self.title}
|
||||
endpoint = self._href(self.prod_config["services"]["explore"]["client"]["endpoints"]["search"]["href"])
|
||||
data = self._request("GET", endpoint, params=params)["data"]["page"]
|
||||
if not data.get("containers"):
|
||||
return
|
||||
|
||||
results = data["containers"][0]["items"]
|
||||
for result in results:
|
||||
entity = "entity-" + result["id"]
|
||||
yield SearchResult(
|
||||
id_=entity,
|
||||
title=result["visuals"]["title"],
|
||||
description=result["visuals"]["description"]["brief"],
|
||||
label=result["visuals"]["metastringParts"]["releaseYearRange"]["startYear"],
|
||||
url=f"https://www.disneyplus.com/browse/{entity}",
|
||||
)
|
||||
|
||||
def get_titles(self) -> Titles_T:
|
||||
try:
|
||||
content_info = self._get_deeplink(self.title_id)
|
||||
content_type = content_info["data"]["deeplink"]["actions"][0]["contentType"]
|
||||
except Exception as e:
|
||||
try:
|
||||
actions_info = self._get_deeplink_last(self.title_id)
|
||||
if actions_info["data"]["deeplink"]["actions"][0]["type"] == "browse":
|
||||
content_type = "other"
|
||||
self.log.warning(" - The content is not standard. however, it tries to look up the data.")
|
||||
except Exception as e:
|
||||
self.log.error(f" - Failed to determine content type via deeplink ({e}).", exc_info=False)
|
||||
sys.exit(1)
|
||||
self.log.debug(f" + Content Type: {content_type.upper()}")
|
||||
|
||||
page = self._get_page(self.title_id)
|
||||
|
||||
orig_lang = "en"
|
||||
if not content_type == "other":
|
||||
playback_action = next(x for x in page["actions"] if x["type"] == "playback")
|
||||
avail_id = playback_action["availId"]
|
||||
self.log.debug(f" + Avail ID: {avail_id}")
|
||||
lang_data = self._get_original_lang(avail_id)
|
||||
orig_lang = lang_data["data"]["playerExperience"]["originalLanguage"]
|
||||
self.log.debug(f' + Original Language: {orig_lang}')
|
||||
|
||||
if content_type == "movie":
|
||||
return Movies(
|
||||
[
|
||||
Movie(
|
||||
id_=page["id"],
|
||||
service=self.__class__,
|
||||
name=page["visuals"]["title"],
|
||||
year=page["visuals"]["metastringParts"]["releaseYearRange"]["startYear"],
|
||||
language=Language.get(orig_lang),
|
||||
data=page
|
||||
)
|
||||
]
|
||||
)
|
||||
|
||||
elif content_type == "series":
|
||||
return Series(self._get_series(page, orig_lang))
|
||||
|
||||
elif content_type == "other":
|
||||
return Movies(
|
||||
[
|
||||
Movie(
|
||||
id_=page["id"],
|
||||
service=self.__class__,
|
||||
name=page["visuals"]["title"],
|
||||
data=page
|
||||
)
|
||||
]
|
||||
)
|
||||
else:
|
||||
self.log.error(f" - Unsupported content type: {content_type}", exc_info=False)
|
||||
sys.exit(1)
|
||||
|
||||
def _get_series(self, page: dict, orig_lang: str) -> Series:
|
||||
container = next(x for x in page["containers"] if x["type"] == "episodes")
|
||||
season_ids = [s["id"] for s in container["seasons"]]
|
||||
|
||||
episodes : List[Episode] = []
|
||||
for season_id in season_ids:
|
||||
episodes_data = self._get_episodes_data(season_id)
|
||||
|
||||
for ep in episodes_data:
|
||||
if ep["type"] != "view":
|
||||
continue
|
||||
|
||||
episodes.append(
|
||||
Episode(
|
||||
id_=ep["id"],
|
||||
service=self.__class__,
|
||||
title=page["visuals"]["title"],
|
||||
season=int(ep["visuals"]["seasonNumber"]),
|
||||
number=int(ep["visuals"]["episodeNumber"]),
|
||||
name=ep["visuals"]["episodeTitle"],
|
||||
year=page["visuals"]["metastringParts"]["releaseYearRange"]["startYear"],
|
||||
language=Language.get(orig_lang),
|
||||
data=ep
|
||||
)
|
||||
)
|
||||
|
||||
return episodes
|
||||
|
||||
def get_tracks(self, title: Title_T) -> Tracks:
|
||||
playback = next(x for x in title.data["actions"] if x.get("type") == "playback")
|
||||
media_id = playback["resourceId"] or None
|
||||
if not media_id:
|
||||
self.log.error(" - Failed to get media ID for playback info", exc_info=False)
|
||||
sys.exit(1)
|
||||
|
||||
scenario = "ctr-regular" if self.is_l3 else "ctr-high" # cbcs-high
|
||||
|
||||
self.log.debug(f"Playback Scenario: {scenario}")
|
||||
self.log.debug(f"Media ID: {media_id}")
|
||||
|
||||
self._refresh() # Safe Access
|
||||
|
||||
if Video.Range.HYBRID in self.range and not self.is_l3:
|
||||
self.log.warning("DV+HDR Multi-range requested.")
|
||||
|
||||
self.log.info(" + Fetching Dolby Vision tracks...")
|
||||
tracks = self._fetch_manifest_tracks(title, media_id, scenario, ["DOLBY_VISION"])
|
||||
|
||||
self.log.info(" + Fetching HDR10 tracks...")
|
||||
hdr_tracks_temp = self._fetch_manifest_tracks(title, media_id, scenario, ["HDR10"]) # HDR10PLUS
|
||||
|
||||
tracks.add(hdr_tracks_temp, warn_only=True)
|
||||
else:
|
||||
video_ranges = []
|
||||
if not self.is_l3:
|
||||
if Video.Range.DV in self.range:
|
||||
video_ranges = ["DOLBY_VISION"]
|
||||
elif Video.Range.HDR10 in self.range or Video.Range.HDR10P in self.range:
|
||||
video_ranges = ["HDR10"] # HDR10PLUS
|
||||
|
||||
tracks = self._fetch_manifest_tracks(title, media_id, scenario, video_ranges or None)
|
||||
|
||||
tracks.add(self._get_thumbnail(title))
|
||||
|
||||
return self._post_process_tracks(tracks)
|
||||
|
||||
def _fetch_manifest_tracks(self, title: Title_T, media_id: str, scenario: str, video_ranges: List[str] = None) -> Tracks:
|
||||
attributes = {
|
||||
"codecs": {
|
||||
"supportsMultiCodecMaster": False,
|
||||
"video": ["h.264"]
|
||||
},
|
||||
"protocol": "HTTPS",
|
||||
"frameRates": [60],
|
||||
"assetInsertionStrategy": "SGAI", # Server-Guided Ad Insertion
|
||||
"playbackInitiationContext": "ONLINE"
|
||||
}
|
||||
|
||||
if self.is_l3:
|
||||
attributes["resolution"] = {"max": ["1280x720"]}
|
||||
else:
|
||||
attributes["resolution"] = {"max": ["3840x2160"]}
|
||||
|
||||
if self.vcodec == Video.Codec.HEVC:
|
||||
attributes["codecs"]["video"] = ["h.264", "h.265"]
|
||||
|
||||
attributes["audioTypes"] = ["ATMOS", "DTS_X"]
|
||||
|
||||
if video_ranges:
|
||||
attributes["videoRanges"] = video_ranges
|
||||
|
||||
payload = {
|
||||
"playbackId": media_id,
|
||||
"playback": {
|
||||
"attributes": attributes
|
||||
}
|
||||
}
|
||||
self.playback_data[title.id] = self._get_video(scenario, payload)
|
||||
|
||||
manifest_url = self.playback_data[title.id]["sources"][0]['complete']['url']
|
||||
return HLS.from_url(url=manifest_url, session=self.session).to_tracks(title.language)
|
||||
|
||||
def _get_thumbnail(self, title: Title_T) -> Attachment:
|
||||
if type(title) == Movie:
|
||||
thumbnail_id = title.data["visuals"]["artwork"]["standard"]["background"]["1.78"]["imageId"]
|
||||
elif type(title) == Episode:
|
||||
thumbnail_id = title.data["visuals"]["artwork"]["standard"]["thumbnail"]["1.78"]["imageId"]
|
||||
thumbnail_url = self._href(
|
||||
self.prod_config["services"]["ripcut"]["client"]["endpoints"]["mainCompose"]["href"],
|
||||
version="v2",
|
||||
partnerId="disney",
|
||||
imageId=thumbnail_id
|
||||
)
|
||||
return Attachment.from_url(url=thumbnail_url, name=thumbnail_id, mime_type="image/png")
|
||||
|
||||
def _post_process_tracks(self, tracks: Tracks) -> Tracks:
|
||||
for track in tracks:
|
||||
if isinstance(track, (Audio, Subtitle)):
|
||||
track.name = "[Original]" if track.is_original_lang else None
|
||||
|
||||
for audio in tracks.audio:
|
||||
bitrate_match = re.search(r"(?<=composite_)\d+|\d+(?=_(?:hdri|complete))|(?<=-)\d+(?=K/)", as_list(audio.url)[0])
|
||||
if bitrate_match:
|
||||
audio.bitrate = int(bitrate_match.group()) * 1000
|
||||
if audio.bitrate == 1_000_000:
|
||||
audio.bitrate = 768_000 # DSNP lies about the Atmos bitrate
|
||||
if audio.channels == 6.0:
|
||||
audio.channels = 5.1
|
||||
|
||||
for subtitle in tracks.subtitles:
|
||||
subtitle.codec = Subtitle.Codec.WebVTT
|
||||
|
||||
return tracks
|
||||
|
||||
def get_chapters(self, title: Title_T) -> Chapters:
|
||||
try:
|
||||
editorial = self.playback_data[title.id]["editorial"]
|
||||
|
||||
if not editorial:
|
||||
return []
|
||||
|
||||
label_to_group = {
|
||||
"intro_start": "intro_start",
|
||||
"FFEI": "intro_start", # First Frame Episode Intro
|
||||
"intro_end": "intro_end",
|
||||
"LFEI": "intro_end", # Last Frame Episode Intro
|
||||
"recap_start": "recap_start",
|
||||
"FFER": "recap_start", # First Frame Episode Recap
|
||||
"recap_end": "recap_end",
|
||||
"LFER": "recap_end", # Last Frame Episode Recap
|
||||
"FFEC": "credits_start", # First Frame End Credits
|
||||
"LFEC": "lfec_marker", # Last Frame End Credits
|
||||
"FFCB": None, # First Frame Credits Bumper
|
||||
"LFCB": None, # Last Frame Credits Bumper
|
||||
"up_next": None,
|
||||
"tag_start": None,
|
||||
"tag_end": None,
|
||||
}
|
||||
|
||||
# Collision Correction
|
||||
grouped_timestamps = {}
|
||||
for marker in editorial:
|
||||
label = marker.get("label")
|
||||
group = label_to_group.get(label)
|
||||
if group:
|
||||
timestamp = marker.get("offsetMillis")
|
||||
if timestamp is not None:
|
||||
if group not in grouped_timestamps:
|
||||
grouped_timestamps[group] = []
|
||||
grouped_timestamps[group].append(timestamp)
|
||||
|
||||
resolved_markers = []
|
||||
for group, timestamps in grouped_timestamps.items():
|
||||
if not timestamps:
|
||||
continue
|
||||
|
||||
final_timestamp = 0
|
||||
if "start" in group:
|
||||
final_timestamp = min(timestamps)
|
||||
elif "end" in group:
|
||||
final_timestamp = max(timestamps)
|
||||
else:
|
||||
final_timestamp = timestamps[0]
|
||||
|
||||
resolved_markers.append({"group": group, "ms": final_timestamp})
|
||||
|
||||
# Create Chapter Data
|
||||
raw_chapter_data = []
|
||||
group_to_name = {
|
||||
"recap_start": "Recap",
|
||||
"recap_end": "Scene",
|
||||
"intro_start": "Intro",
|
||||
"intro_end": "Scene",
|
||||
"credits_start": "Credits",
|
||||
}
|
||||
|
||||
total_runtime_ms = 0
|
||||
if "visuals" in title.data and "metastringParts" in title.data["visuals"]:
|
||||
total_runtime_ms = title.data["visuals"]["metastringParts"]["runtime"]["runtimeMs"]
|
||||
|
||||
for marker in resolved_markers:
|
||||
group = marker["group"]
|
||||
timestamp_ms = marker["ms"]
|
||||
name = None
|
||||
|
||||
if group == "lfec_marker":
|
||||
if total_runtime_ms and (total_runtime_ms - timestamp_ms) > 5000: # 5 sec
|
||||
name = "Scene"
|
||||
else:
|
||||
name = group_to_name.get(group)
|
||||
|
||||
if name:
|
||||
raw_chapter_data.append({"ms": timestamp_ms, "name": name})
|
||||
|
||||
# Sorting and deduplication in chronological order
|
||||
if not raw_chapter_data:
|
||||
return []
|
||||
|
||||
unique_chapters_data = []
|
||||
seen_ms = set()
|
||||
for chap in sorted(raw_chapter_data, key=lambda x: x["ms"]):
|
||||
if chap["ms"] not in seen_ms:
|
||||
unique_chapters_data.append(chap)
|
||||
seen_ms.add(chap["ms"])
|
||||
|
||||
# Processe the First Chapter
|
||||
if not unique_chapters_data:
|
||||
unique_chapters_data.append({"ms": 0, "name": "Scene"})
|
||||
else:
|
||||
first_chapter = unique_chapters_data[0]
|
||||
if first_chapter["ms"] > 0:
|
||||
if not (first_chapter["ms"] < 5000 and first_chapter["name"] in ["Intro", "Recap"]):
|
||||
unique_chapters_data.insert(0, {"ms": 0, "name": "Scene"})
|
||||
|
||||
if unique_chapters_data:
|
||||
first_chapter = unique_chapters_data[0]
|
||||
if first_chapter["name"] in ["Intro", "Recap"] and first_chapter["ms"] > 0:
|
||||
first_chapter["ms"] = 0
|
||||
|
||||
# Create Final Chapter List
|
||||
final_chapters = []
|
||||
for i, chap_info in enumerate(unique_chapters_data):
|
||||
name = chap_info["name"]
|
||||
|
||||
final_chapters.append(
|
||||
Chapter(
|
||||
timestamp=chap_info["ms"] / 1000.000,
|
||||
name=name if name != "Scene" else None
|
||||
)
|
||||
)
|
||||
|
||||
return final_chapters
|
||||
|
||||
except Exception as e:
|
||||
self.log.warning(f"Failed to extract chapters: {e}")
|
||||
return []
|
||||
|
||||
def get_widevine_service_certificate(self, *, challenge: bytes, title: Title_T, track: AnyTrack) -> Union[bytes, str]:
|
||||
# endpoint = self.prod_config["services"]["drm"]["client"]["endpoints"]["widevineCertificate"]["href"]
|
||||
# res = self.session.get(endpoint, data=challenge)
|
||||
return self.config["certificate"]
|
||||
|
||||
def get_widevine_license(self, *, challenge: bytes, title: Title_T, track: AnyTrack) -> Optional[Union[bytes, str]]:
|
||||
self._refresh() # Safe Access
|
||||
endpoint = self.prod_config["services"]["drm"]["client"]["endpoints"]["widevineLicense"]["href"]
|
||||
headers = {"Content-Type": "application/octet-stream"}
|
||||
|
||||
try:
|
||||
res = self.session.post(endpoint, headers=headers, data=challenge)
|
||||
res.raise_for_status()
|
||||
except Exception as e:
|
||||
self.log.error(f" - License request failed: {e}", exc_info=False)
|
||||
sys.exit(1)
|
||||
return res.content
|
||||
|
||||
def get_playready_license(self, *, challenge: bytes, title: Title_T, track: AnyTrack) -> Optional[bytes]:
|
||||
self._refresh() # Safe Access
|
||||
endpoint = self.prod_config["services"]["drm"]["client"]["endpoints"]["playReadyLicense"]["href"]
|
||||
headers = {
|
||||
"Accept": "application/xml, application/vnd.media-service+json; version=2",
|
||||
"Content-Type": "text/xml; charset=utf-8",
|
||||
"SOAPAction": "http://schemas.microsoft.com/DRM/2007/03/protocols/AcquireLicense"
|
||||
}
|
||||
try:
|
||||
res = self.session.post(endpoint, headers=headers, data=challenge)
|
||||
res.raise_for_status()
|
||||
except Exception as e:
|
||||
self.log.error(f" - License request failed: {e}", exc_info=False)
|
||||
sys.exit(1)
|
||||
return res.content
|
||||
|
||||
def _get_deeplink(self, ref_id: str) -> dict:
|
||||
endpoint = self._href(
|
||||
self.prod_config["services"]["content"]["client"]["endpoints"]["getDeeplink"]["href"],
|
||||
refIdType="deeplinkId",
|
||||
refId=ref_id
|
||||
)
|
||||
data = self._request("GET", endpoint)
|
||||
return data
|
||||
|
||||
def _get_deeplink_last(self, ref_id: str) -> dict:
|
||||
endpoint = self._href(self.prod_config["services"]["explore"]["client"]["endpoints"]["getDeeplink"]["href"])
|
||||
params = {
|
||||
"refIdType" : "deeplinkId",
|
||||
"refId" : ref_id
|
||||
}
|
||||
data = self._request("GET", endpoint, params=params)
|
||||
return data
|
||||
|
||||
def _get_page(self, title_id: str) -> dict:
|
||||
endpoint = self._href(
|
||||
self.prod_config["services"]["explore"]["client"]["endpoints"]["getPage"]["href"],
|
||||
pageId=title_id
|
||||
)
|
||||
data = self._request("GET", endpoint, params={"disableSmartFocus": "true", "limit": 999})
|
||||
return data["data"]["page"]
|
||||
|
||||
def _get_original_lang(self, availId: str) -> dict:
|
||||
endpoint = self._href(
|
||||
self.prod_config["services"]["explore"]["client"]["endpoints"]["getPlayerExperience"]["href"],
|
||||
availId=availId
|
||||
)
|
||||
data = self._request("GET", endpoint)
|
||||
return data
|
||||
|
||||
def _get_episodes_data(self, season_id: str) -> List[dict]:
|
||||
endpoint = self._href(
|
||||
self.prod_config["services"]["explore"]["client"]["endpoints"]["getSeason"]["href"],
|
||||
seasonId=season_id
|
||||
)
|
||||
data = self._request("GET", endpoint, params={'limit': 999})["data"]["season"]["items"]
|
||||
return data
|
||||
|
||||
def _get_video(self, scenario: str, payload: dict) -> dict:
|
||||
endpoint = self._href(
|
||||
self.prod_config["services"]["media"]["client"]["endpoints"]["mediaPayload"]["href"],
|
||||
scenario=scenario
|
||||
)
|
||||
headers = {
|
||||
"Accept": "application/vnd.media-service+json",
|
||||
"X-DSS-Feature-Filtering": "true"
|
||||
}
|
||||
data = self._request("POST", endpoint, headers=headers, payload=payload)
|
||||
return data["stream"]
|
||||
|
||||
def _register_device(self) -> str:
|
||||
endpoint = self.prod_config["services"]["orchestration"]["client"]["endpoints"]["registerDevice"]["href"]
|
||||
headers = {
|
||||
"Authorization": self.config["bamsdk"]["api_key"],
|
||||
"X-BAMSDK-Platform-Id": self.config["device"]["platform_id"]
|
||||
}
|
||||
payload = {
|
||||
"variables": {
|
||||
"registerDevice": {
|
||||
"applicationRuntime": self.config["device"]["applicationRuntime"],
|
||||
"attributes": {
|
||||
"operatingSystem": self.config["device"]["operatingSystem"],
|
||||
"operatingSystemVersion": self.config["device"]["operatingSystemVersion"]
|
||||
},
|
||||
"deviceFamily": self.config["device"]["family"],
|
||||
"deviceLanguage": self.config["device"]["deviceLanguage"],
|
||||
"deviceProfile": self.config["device"]["profile"],
|
||||
"devicePlatformId": self.config["device"]["platform_id"],
|
||||
}
|
||||
},
|
||||
"query": queries.REGISTER_DEVICE
|
||||
}
|
||||
data = self._request("POST", endpoint, payload=payload, headers=headers)
|
||||
return data["extensions"]["sdk"]["token"]["accessToken"]
|
||||
|
||||
def _check_email(self, email: str, token: str) -> str:
|
||||
endpoint = self.prod_config["services"]["orchestration"]["client"]["endpoints"]["query"]["href"]
|
||||
headers = {
|
||||
"Authorization": token,
|
||||
"X-BAMSDK-Platform-Id": self.config["device"]["platform_id"]
|
||||
}
|
||||
payload = {
|
||||
"operationName": "Check",
|
||||
"variables": {
|
||||
"email": email
|
||||
},
|
||||
"query": queries.CHECK_EMAIL
|
||||
}
|
||||
data = self._request("POST", endpoint, payload=payload, headers=headers)
|
||||
return data["data"]["check"]["operations"][0]
|
||||
|
||||
def _login_with_password(self, email: str, password: str, token: str) -> str:
|
||||
endpoint = self.prod_config["services"]["orchestration"]["client"]["endpoints"]["query"]["href"]
|
||||
headers = {
|
||||
"Authorization": token,
|
||||
"X-BAMSDK-Platform-Id": self.config["device"]["platform_id"]
|
||||
}
|
||||
payload = {
|
||||
"operationName": "loginTv",
|
||||
"variables": {
|
||||
"input": {
|
||||
"email": email,
|
||||
"password": password
|
||||
}
|
||||
},
|
||||
"query": queries.LOGIN
|
||||
}
|
||||
data = self._request("POST", endpoint, payload=payload, headers=headers)
|
||||
return data["extensions"]["sdk"]["token"]
|
||||
|
||||
def _get_account_info_raw(self, headers: dict = {}) -> dict:
|
||||
endpoint = self.prod_config["services"]["orchestration"]["client"]["endpoints"]["query"]["href"]
|
||||
headers.update({"X-BAMSDK-Platform-Id": self.config["device"]["platform_id"]})
|
||||
payload = {
|
||||
"operationName": "EntitledGraphMeQuery",
|
||||
"variables": {},
|
||||
"query": queries.ENTITLEMENTS
|
||||
}
|
||||
data = self._request("POST", endpoint, payload=payload, headers=headers)
|
||||
return data["data"]["me"]
|
||||
|
||||
def _switch_profile(self, profile_id: str, headers: dict, pin: str = None):
|
||||
profile_input = {"profileId": profile_id}
|
||||
if pin: profile_input["entryPin"] = pin
|
||||
|
||||
endpoint = self.prod_config["services"]["orchestration"]["client"]["endpoints"]["query"]["href"]
|
||||
headers.update({"X-BAMSDK-Platform-Id": self.config["device"]["platform_id"]})
|
||||
payload = {
|
||||
"operationName": "switchProfile",
|
||||
"variables": {
|
||||
"input": profile_input
|
||||
},
|
||||
"query": queries.SWITCH_PROFILE
|
||||
}
|
||||
data = self._request("POST", endpoint, payload=payload, headers=headers)
|
||||
return data["extensions"]["sdk"]
|
||||
|
||||
def _refresh_token(self, refresh_token: str) -> dict:
|
||||
endpoint = self.prod_config["services"]["orchestration"]["client"]["endpoints"]["refreshToken"]["href"]
|
||||
headers = {
|
||||
"Authorization": self.config["bamsdk"]["api_key"],
|
||||
"X-BAMSDK-Platform-Id": self.config["device"]["platform_id"]
|
||||
}
|
||||
payload = {
|
||||
"operationName": "refreshToken",
|
||||
"variables": {
|
||||
"input": {
|
||||
"refreshToken": refresh_token
|
||||
}
|
||||
},
|
||||
"query": queries.REFRESH_TOKEN
|
||||
}
|
||||
data = self._request("POST", endpoint, payload=payload, headers=headers)
|
||||
return data["extensions"]["sdk"]
|
||||
|
||||
def _update_device(self) -> str:
|
||||
endpoint = self.prod_config["services"]["orchestration"]["client"]["endpoints"]["query"]["href"]
|
||||
headers = {"X-BAMSDK-Platform-Id": self.config["device"]["platform_id"]}
|
||||
payload = {
|
||||
"operationName": "updateDeviceOperatingSystem",
|
||||
"variables": {
|
||||
"updateDeviceOperatingSystem": {
|
||||
"operatingSystem": self.config["device"]["operatingSystem"],
|
||||
"operatingSystemVersion": self.config["device"]["operatingSystemVersion"]
|
||||
}
|
||||
},
|
||||
"query": queries.UPDATE_DEVICE
|
||||
}
|
||||
data = self._request("POST", endpoint, payload=payload, headers=headers)
|
||||
|
||||
if data["data"]["updateDeviceOperatingSystem"]["accepted"]:
|
||||
return data["extensions"]["sdk"]
|
||||
else:
|
||||
self.log.warning(" - Failed to update Device Operating System.")
|
||||
|
||||
def _set_imax_preference(self, enabled: bool) -> str:
|
||||
endpoint = self.prod_config["services"]["orchestration"]["client"]["endpoints"]["query"]["href"]
|
||||
headers = {"X-BAMSDK-Platform-Id": self.config["device"]["platform_id"]}
|
||||
payload = {
|
||||
"operationName": "updateProfileImaxEnhancedVersion",
|
||||
"variables": {
|
||||
"input": {
|
||||
"imaxEnhancedVersion": enabled,
|
||||
},
|
||||
"includeProfile": True
|
||||
},
|
||||
"query": queries.SET_IMAX,
|
||||
}
|
||||
data = self._request("POST", endpoint, payload=payload, headers=headers)
|
||||
|
||||
if data["data"]["updateProfileImaxEnhancedVersion"]["accepted"]:
|
||||
self.log.info(f" + Updated IMAX Enhanced preference: {enabled}")
|
||||
return data["extensions"]["sdk"]
|
||||
else:
|
||||
self.log.warning(" - Failed to set IMAX preference.")
|
||||
|
||||
def _set_remastered_ar_preference(self, enabled: bool) -> str:
|
||||
endpoint = self.prod_config["services"]["orchestration"]["client"]["endpoints"]["query"]["href"]
|
||||
headers = {"X-BAMSDK-Platform-Id": self.config["device"]["platform_id"]}
|
||||
payload = {
|
||||
"operationName": "updateProfileRemasteredAspectRatio",
|
||||
"variables": {
|
||||
"input": {
|
||||
"remasteredAspectRatio": enabled,
|
||||
},
|
||||
"includeProfile": True
|
||||
},
|
||||
"query": queries.SET_REMASTERED_AR,
|
||||
}
|
||||
data = self._request("POST", endpoint, payload=payload, headers=headers)
|
||||
|
||||
if data["data"]["updateProfileRemasteredAspectRatio"]["accepted"]:
|
||||
self.log.info(f" + Updated Remastered Aspect Ratio preference: {enabled}")
|
||||
return data["extensions"]["sdk"]
|
||||
else:
|
||||
self.log.warning(" - Failed to set Remastered Aspect Ratio preference.")
|
||||
|
||||
def _href(self, href: str, **kwargs: Any) -> str:
|
||||
_args = {"version": self.config["bamsdk"]["explore_version"]}
|
||||
_args.update(**kwargs)
|
||||
return href.format(**_args)
|
||||
|
||||
def _request(self, method: str, endpoint: str, params: dict = None, headers: dict = None, payload: dict = None) -> Any[dict | str]:
|
||||
_headers = self.session.headers.copy()
|
||||
if headers: _headers.update(headers)
|
||||
_headers.update({
|
||||
"X-BAMSDK-Transaction-ID": str(uuid.uuid4()),
|
||||
"X-Request-ID": str(uuid.uuid4())
|
||||
})
|
||||
|
||||
req = Request(method, endpoint, headers=_headers, params=params, json=payload)
|
||||
prepped = self.session.prepare_request(req)
|
||||
|
||||
try:
|
||||
res = self.session.send(prepped)
|
||||
res.raise_for_status()
|
||||
data = res.json()
|
||||
if data.get("errors"):
|
||||
error_code = data["errors"][0]["extensions"]["code"]
|
||||
if "token.service.invalid.grant" in error_code:
|
||||
raise ConnectionError(f"Refresh Token Expired: {error_code}")
|
||||
if "token.service.unauthorized.client" in error_code:
|
||||
raise ConnectionError(f"Unauthorized Client/IP: {error_code}")
|
||||
elif "idp.error.identity.bad-credentials" in error_code:
|
||||
raise ConnectionError(f"Bad Credentials: {error_code}")
|
||||
elif "account.profile.pin.invalid" in error_code:
|
||||
raise ConnectionError(f"Invalid PIN: {error_code}")
|
||||
raise ConnectionError(data["errors"])
|
||||
return data
|
||||
except Exception as e:
|
||||
if "Refresh Token Expired" in str(e) or "/deeplink" in endpoint:
|
||||
raise e
|
||||
else:
|
||||
self.log.error(f"API Request failed: {e}", exc_info=False)
|
||||
sys.exit(1)
|
||||
52
DSNP/config.yaml
Normal file
52
DSNP/config.yaml
Normal file
@ -0,0 +1,52 @@
|
||||
certificate: |
|
||||
CAUSugUKtAIIAxIQbj3s4jO5oUyWjDWqjfr9WRjA2afZBSKOAjCCAQoCggEBALhKWfnyA+FGn5P3tl6ffDjoGq2Oq86hKGl6aZIaGaF7XHPO5mIk7Q35ml
|
||||
ZIgg1A458Udb4eXRws1n+kJFqtZXCY5S1yElLP0Om1WQsoEY2stpl+PZTGnVv/CsOJGKQ8K4KMr7rKjZem9lA9BrBoxgfXY3tbwlnSf3wTEohyANb5Qfpa
|
||||
xsU4v8tQDA8PcjzzV9ICodl6crcFZhAy4QMNXfbWOv/ZrGFx5blSXrzP1sMQ64IY8bjUYw4coZM34NDhu8aCA692g8k2mTz2494x7u3Is8v7RKC9ZNiETE
|
||||
K5/4oeVclXPpelNQokR4uvggnCD1L2EULG/pp6wnk1yWNNLxcCAwEAAToHYmFtdGVjaBKAA2FqHlqkE7EUmdOLiCi0hy5jRgBDJrU1CWNHfH6r2i6s5T5k
|
||||
6LK7ZfD65Tv6uyqq1k82PsDz4++kxbpfJDZaypFbae4XPc6lZxRCc5X0toX/x9TftOQQ4N82l5Hxoha569EPRkrnNy7rO7xrRILa3ZVj1alttEnEEjxEuw
|
||||
SV8usdlUg8/LvLA2C59T/HA2I77k7yVbTrVdy0f81r2l+E2SslivCy1JD3xKlgoaKl4xBnRxItWt8+DCw1Xm2lemYl2LGoh1Wk9gvlXQvr2Jv2+dFX3RNs
|
||||
i5sd00KS9sePszfjoTkQ6fmpRd7ZgFCGFWYB9JZ92aGUFQRE14OTST2uwSf32YCfsoATDNs4V6dB8YDoTGKFGrcoc4gtHPKySGNt7z/fOW4/01ZGzKqoVY
|
||||
Fp3jPq7R0qyt5P6fU5NshbLh5VKcnQvwg62BuKsdwV9u4NV36b2a546hGRl/GBneQ+QDA7NRrgITR33Sz02Oq8yJr3sy24GfZRTbtLJ4qiWkjtw==
|
||||
|
||||
## config ( {configVersion}/{clientId}/{deviceFamily}/{sdkVersion}/{applicationRuntime}/{deviceProfile}/{environment} ) ##
|
||||
# Browser (windows, chrome) : /browser/v34.2/windows/chrome/prod.json
|
||||
# Android Phone : /android/v12.0.0/google/handset/prod.json
|
||||
# Android TV : /android/v12.0.0/google/tv/prod.json
|
||||
# Amazon Fire TV : /android/v12.0.0/amazon/tv/prod.json
|
||||
|
||||
endpoints:
|
||||
config: "https://client-sdk-configs.bamgrid.com/bam-sdk/v7.0/disney-svod-3d9324fc/android/v13.0.0/google/tv/prod.json"
|
||||
|
||||
## user_agent ##
|
||||
# android-phone : BAMSDK/v13.3.0 (disney-svod-3d9324fc 4.18.1+rc6-2025.11.05.0; v7.0/v13.0.0; android; phone)
|
||||
# android-tv : BAMSDK/v13.3.0 (disney-svod-3d9324fc 4.18.1+rc6-2025.11.05.0; v7.0/v13.0.0; android; tv)
|
||||
|
||||
## api_key ##
|
||||
# browser : ZGlzbmV5JmJyb3dzZXImMS4wLjA.Cu56AgSfBTDag5NiRA81oLHkDZfu5L3CKadnefEAY84
|
||||
# android : ZGlzbmV5JmFuZHJvaWQmMS4wLjA.bkeb0m230uUhv8qrAXuNu39tbE_mD5EEhM_NAcohjyA
|
||||
|
||||
## yp_service_id ##
|
||||
# browser : 63626081279ebe65eb50fb54
|
||||
# android : 624b805dafc5c73635b1a216
|
||||
|
||||
bamsdk:
|
||||
sdk_version: "13.3.0"
|
||||
application_version: "4.18.1+rc6-2025.11.05.0"
|
||||
explore_version: "v1.11"
|
||||
client: "disney-svod-3d9324fc"
|
||||
user_agent: "BAMSDK/v13.3.0 (disney-svod-3d9324fc 4.18.1+rc6-2025.11.05.0; v7.0/v13.0.0; android; tv)"
|
||||
api_key: "ZGlzbmV5JmFuZHJvaWQmMS4wLjA.bkeb0m230uUhv8qrAXuNu39tbE_mD5EEhM_NAcohjyA"
|
||||
yp_service_id: "624b805dafc5c73635b1a216"
|
||||
|
||||
device:
|
||||
family: "android"
|
||||
profile: "tv"
|
||||
platform: "android/google/tv" # {deviceFamily}/{applicationRuntime}/{deviceProfile}
|
||||
platform_id: "android-tv"
|
||||
applicationRuntime: "android"
|
||||
operatingSystem: "Android"
|
||||
operatingSystemVersion: "16"
|
||||
deviceLanguage: "ko" # en
|
||||
|
||||
profile:
|
||||
index: 0
|
||||
13
DSNP/queries.py
Normal file
13
DSNP/queries.py
Normal file
@ -0,0 +1,13 @@
|
||||
SWITCH_PROFILE = """mutation switchProfile($input: SwitchProfileInput!) { switchProfile(switchProfile: $input) { __typename account { __typename ...accountGraphFragment } activeSession { __typename ...sessionGraphFragment } } } fragment accountGraphFragment on Account { __typename id activeProfile { __typename id } profiles { __typename ...profileGraphFragment } parentalControls { __typename isProfileCreationProtected } flows { __typename star { __typename isOnboarded } } attributes { __typename email emailVerified userVerified locations { __typename manual { __typename country } purchase { __typename country } registration { __typename geoIp { __typename country } } } } } fragment profileGraphFragment on Profile { __typename id name maturityRating { __typename ratingSystem ratingSystemValues contentMaturityRating maxRatingSystemValue isMaxContentMaturityRating } isAge21Verified flows { __typename star { __typename eligibleForOnboarding isOnboarded } } attributes { __typename isDefault kidsModeEnabled groupWatch { __typename enabled } languagePreferences { __typename appLanguage playbackLanguage preferAudioDescription preferSDH subtitleLanguage subtitlesEnabled } parentalControls { __typename isPinProtected kidProofExitEnabled liveAndUnratedContent { __typename enabled } } playbackSettings { __typename autoplay backgroundVideo prefer133 } avatar { __typename id userSelected } } } fragment sessionGraphFragment on Session { __typename sessionId device { __typename id } entitlements experiments { __typename featureId variantId version } homeLocation { __typename countryCode } inSupportedLocation isSubscriber location { __typename countryCode } portabilityLocation { __typename countryCode } preferredMaturityRating { __typename impliedMaturityRating ratingSystem } }"""
|
||||
ENTITLEMENTS = """query EntitledGraphMeQuery { me { __typename account { __typename ...accountGraphFragment } activeSession { __typename ...sessionGraphFragment } } } fragment accountGraphFragment on Account { __typename id activeProfile { __typename id } profiles { __typename ...profileGraphFragment } parentalControls { __typename isProfileCreationProtected } flows { __typename star { __typename isOnboarded } } attributes { __typename email emailVerified userVerified locations { __typename manual { __typename country } purchase { __typename country } registration { __typename geoIp { __typename country } } } } } fragment profileGraphFragment on Profile { __typename id name maturityRating { __typename ratingSystem ratingSystemValues contentMaturityRating maxRatingSystemValue isMaxContentMaturityRating } isAge21Verified flows { __typename star { __typename eligibleForOnboarding isOnboarded } } attributes { __typename isDefault kidsModeEnabled groupWatch { __typename enabled } languagePreferences { __typename appLanguage playbackLanguage preferAudioDescription preferSDH subtitleLanguage subtitlesEnabled } parentalControls { __typename isPinProtected kidProofExitEnabled liveAndUnratedContent { __typename enabled } } playbackSettings { __typename autoplay backgroundVideo prefer133 preferImaxEnhancedVersion} avatar { __typename id userSelected } } } fragment sessionGraphFragment on Session { __typename sessionId device { __typename id } entitlements experiments { __typename featureId variantId version } homeLocation { __typename countryCode } inSupportedLocation isSubscriber location { __typename countryCode } portabilityLocation { __typename countryCode } preferredMaturityRating { __typename impliedMaturityRating ratingSystem } }"""
|
||||
REGISTER_DEVICE = """mutation ($registerDevice: RegisterDeviceInput!) {registerDevice(registerDevice: $registerDevice) {__typename}}"""
|
||||
SET_IMAX = """mutation updateProfileImaxEnhancedVersion($input: UpdateProfileImaxEnhancedVersionInput!, $includeProfile: Boolean!) { updateProfileImaxEnhancedVersion(updateProfileImaxEnhancedVersion: $input) { accepted profile @include(if: $includeProfile) { __typename ...profileGraphFragment } } } fragment profileGraphFragment on Profile { id name personalInfo { dateOfBirth gender } maturityRating { ratingSystem ratingSystemValues contentMaturityRating maxRatingSystemValue isMaxContentMaturityRating suggestedMaturityRatings { minimumAge maximumAge ratingSystemValue } } isAge21Verified flows { star { eligibleForOnboarding isOnboarded } personalInfo { eligibleForCollection requiresCollection } } attributes { isDefault kidsModeEnabled languagePreferences { appLanguage playbackLanguage preferAudioDescription preferSDH subtitleLanguage subtitlesEnabled } parentalControls { isPinProtected kidProofExitEnabled liveAndUnratedContent { enabled available } } playbackSettings { autoplay backgroundVideo prefer133 preferImaxEnhancedVersion } avatar { id userSelected } privacySettings { consents { consentType value } } } }"""
|
||||
SET_REMASTERED_AR = """mutation updateProfileRemasteredAspectRatio($input: UpdateProfileRemasteredAspectRatioInput!, $includeProfile: Boolean!) { updateProfileRemasteredAspectRatio(updateProfileRemasteredAspectRatio: $input) { accepted profile @include(if: $includeProfile) { __typename ...profileGraphFragment } } } fragment profileGraphFragment on Profile { id name personalInfo { dateOfBirth gender } maturityRating { ratingSystem ratingSystemValues contentMaturityRating maxRatingSystemValue isMaxContentMaturityRating suggestedMaturityRatings { minimumAge maximumAge ratingSystemValue } } isAge21Verified flows { star { eligibleForOnboarding isOnboarded } personalInfo { eligibleForCollection requiresCollection } } attributes { isDefault kidsModeEnabled languagePreferences { appLanguage playbackLanguage preferAudioDescription preferSDH subtitleLanguage subtitlesEnabled } parentalControls { isPinProtected kidProofExitEnabled liveAndUnratedContent { enabled available } } playbackSettings { autoplay backgroundVideo prefer133 preferImaxEnhancedVersion } avatar { id userSelected } privacySettings { consents { consentType value } } } }"""
|
||||
# REQUEST_DEVICE_CODE = ("""mutation requestLicensePlate {requestLicensePlate {__typename licensePlate expirationTime expiresInSeconds}}""")
|
||||
CHECK_EMAIL = """query Check($email: String!) { check(email: $email) { operations nextOperation } }"""
|
||||
# REQUESET_OTP = """mutation requestOtp($input: RequestOtpInput!) { requestOtp(requestOtp: $input) { accepted } }"""
|
||||
LOGIN = """mutation loginTv($input: LoginInput!) { login(login: $input) { __typename account { __typename ...accountGraphFragment } actionGrant activeSession { __typename ...sessionGraphFragment } }} fragment accountGraphFragment on Account { __typename id activeProfile { __typename id } profiles { __typename ...profileGraphFragment } parentalControls { __typename isProfileCreationProtected } flows { __typename star { __typename isOnboarded } } attributes { __typename email emailVerified userVerified locations { __typename manual { __typename country } purchase { __typename country } registration { __typename geoIp { __typename country } } } }}\nfragment profileGraphFragment on Profile { __typename id name maturityRating { __typename ratingSystem ratingSystemValues contentMaturityRating maxRatingSystemValue isMaxContentMaturityRating } isAge21Verified flows { __typename star { __typename eligibleForOnboarding isOnboarded } } attributes { __typename isDefault kidsModeEnabled groupWatch { __typename enabled } languagePreferences { __typename appLanguage playbackLanguage preferAudioDescription preferSDH subtitleLanguage subtitlesEnabled } parentalControls { __typename isPinProtected kidProofExitEnabled liveAndUnratedContent { __typename enabled } } playbackSettings { __typename autoplay backgroundVideo prefer133 } avatar { __typename id userSelected } }}\nfragment sessionGraphFragment on Session { __typename sessionId device { __typename id } entitlements experiments { __typename featureId variantId version } homeLocation { __typename countryCode } inSupportedLocation isSubscriber location { __typename countryCode } portabilityLocation { __typename countryCode } preferredMaturityRating { __typename impliedMaturityRating ratingSystem }}"""
|
||||
# LOGIN_OTP = """mutation authenticateWithOtp($input: AuthenticateWithOtpInput!) { authenticateWithOtp(authenticateWithOtp: $input) { actionGrant securityAction } }"""
|
||||
# LOGIN_ACTION_GRANT = """\n mutation loginWithActionGrant($input: LoginWithActionGrantInput!) {\n loginWithActionGrant(login: $input) {\n account {\n ...account\n\n profiles {\n ...profile\n }\n }\n activeSession {\n ...session\n }\n identity {\n ...identity\n }\n }\n }\n\n \nfragment identity on Identity {\n attributes {\n securityFlagged\n createdAt\n passwordResetRequired\n }\n flows {\n marketingPreferences {\n eligibleForOnboarding\n isOnboarded\n }\n personalInfo {\n eligibleForCollection\n requiresCollection\n }\n }\n personalInfo {\n dateOfBirth\n gender\n }\n subscriber {\n subscriberStatus\n subscriptionAtRisk\n overlappingSubscription\n doubleBilled\n doubleBilledProviders\n subscriptions {\n id\n groupId\n state\n partner\n isEntitled\n source {\n sourceType\n sourceProvider\n sourceRef\n subType\n }\n paymentProvider\n product {\n id\n sku\n offerId\n promotionId\n name\n nextPhase {\n sku\n offerId\n campaignCode\n voucherCode\n }\n entitlements {\n id\n name\n desc\n partner\n }\n categoryCodes\n redeemed {\n campaignCode\n redemptionCode\n voucherCode\n }\n bundle\n bundleType\n subscriptionPeriod\n earlyAccess\n trial {\n duration\n }\n }\n term {\n purchaseDate\n startDate\n expiryDate\n nextRenewalDate\n pausedDate\n churnedDate\n isFreeTrial\n }\n externalSubscriptionId,\n cancellation {\n type\n restartEligible\n }\n stacking {\n status\n overlappingSubscriptionProviders\n previouslyStacked\n previouslyStackedByProvider\n }\n }\n }\n}\n\n \nfragment account on Account {\n id\n attributes {\n blocks {\n expiry\n reason\n }\n consentPreferences {\n dataElements {\n name\n value\n }\n purposes {\n consentDate\n firstTransactionDate\n id\n lastTransactionCollectionPointId\n lastTransactionCollectionPointVersion\n lastTransactionDate\n name\n status\n totalTransactionCount\n version\n }\n }\n dssIdentityCreatedAt\n email\n emailVerified\n lastSecurityFlaggedAt\n locations {\n manual {\n country\n }\n purchase {\n country\n source\n }\n registration {\n geoIp {\n country\n }\n }\n }\n securityFlagged\n tags\n taxId\n userVerified\n }\n parentalControls {\n isProfileCreationProtected\n }\n flows {\n star {\n isOnboarded\n }\n }\n}\n\n \nfragment profile on Profile {\n id\n name\n isAge21Verified\n attributes {\n avatar {\n id\n userSelected\n }\n isDefault\n kidsModeEnabled\n languagePreferences {\n appLanguage\n playbackLanguage\n preferAudioDescription\n preferSDH\n subtitleAppearance {\n backgroundColor\n backgroundOpacity\n description\n font\n size\n textColor\n }\n subtitleLanguage\n subtitlesEnabled\n }\n groupWatch {\n enabled\n }\n parentalControls {\n kidProofExitEnabled\n isPinProtected\n }\n playbackSettings {\n autoplay\n backgroundVideo\n prefer133\n preferImaxEnhancedVersion\n previewAudioOnHome\n previewVideoOnHome\n }\n }\n personalInfo {\n dateOfBirth\n gender\n age\n }\n maturityRating {\n ...maturityRating\n }\n personalInfo {\n dateOfBirth\n age\n gender\n }\n flows {\n personalInfo {\n eligibleForCollection\n requiresCollection\n }\n star {\n eligibleForOnboarding\n isOnboarded\n }\n }\n}\n\n\nfragment maturityRating on MaturityRating {\n ratingSystem\n ratingSystemValues\n contentMaturityRating\n maxRatingSystemValue\n isMaxContentMaturityRating\n}\n\n\n \nfragment session on Session {\n device {\n id\n platform\n }\n entitlements\n features {\n coPlay\n }\n inSupportedLocation\n isSubscriber\n location {\n type\n countryCode\n dma\n asn\n regionName\n connectionType\n zipCode\n }\n sessionId\n experiments {\n featureId\n variantId\n version\n }\n identity {\n id\n }\n account {\n id\n }\n profile {\n id\n parentalControls {\n liveAndUnratedContent {\n enabled\n }\n }\n }\n partnerName\n preferredMaturityRating {\n impliedMaturityRating\n ratingSystem\n }\n homeLocation {\n countryCode\n }\n portabilityLocation {\n countryCode\n type\n }\n}\n\n"""
|
||||
REFRESH_TOKEN = """mutation refreshToken($input:RefreshTokenInput!) { refreshToken(refreshToken:$input) { activeSession{sessionId} } }"""
|
||||
UPDATE_DEVICE = """mutation updateDeviceOperatingSystem($updateDeviceOperatingSystem: UpdateDeviceOperatingSystemInput!) {updateDeviceOperatingSystem(updateDeviceOperatingSystem: $updateDeviceOperatingSystem) {accepted}}"""
|
||||
334
HIDI/__init__.py
Normal file
334
HIDI/__init__.py
Normal file
@ -0,0 +1,334 @@
|
||||
import json
|
||||
import re
|
||||
from http.cookiejar import CookieJar
|
||||
from typing import Optional, Iterable
|
||||
from langcodes import Language
|
||||
import base64
|
||||
|
||||
import click
|
||||
|
||||
from unshackle.core.constants import AnyTrack
|
||||
from unshackle.core.credential import Credential
|
||||
from unshackle.core.manifests import DASH
|
||||
from unshackle.core.service import Service
|
||||
from unshackle.core.titles import Episode, Series, Movie, Movies, Title_T, Titles_T
|
||||
from unshackle.core.tracks import Chapter, Tracks, Subtitle, Audio
|
||||
|
||||
|
||||
class HIDI(Service):
|
||||
"""
|
||||
Service code for HiDive (hidive.com)
|
||||
Version: 1.2.0
|
||||
Authorization: Email + password login, with automatic token refresh.
|
||||
Security: FHD@L3
|
||||
"""
|
||||
|
||||
TITLE_RE = r"^https?://(?:www\.)?hidive\.com/(?:season/(?P<season_id>\d+)|playlist/(?P<playlist_id>\d+))$"
|
||||
GEOFENCE = ()
|
||||
NO_SUBTITLES = False
|
||||
|
||||
@staticmethod
|
||||
@click.command(name="HIDI", short_help="https://hidive.com")
|
||||
@click.argument("title", type=str)
|
||||
@click.pass_context
|
||||
def cli(ctx, **kwargs):
|
||||
return HIDI(ctx, **kwargs)
|
||||
|
||||
def __init__(self, ctx, title: str):
|
||||
super().__init__(ctx)
|
||||
m = re.match(self.TITLE_RE, title)
|
||||
if not m:
|
||||
raise ValueError("Unsupported HiDive URL. Use /season/<id> or /playlist/<id>")
|
||||
|
||||
self.season_id = m.group("season_id")
|
||||
self.playlist_id = m.group("playlist_id")
|
||||
self.kind = "serie" if self.season_id else "movie"
|
||||
self.content_id = int(self.season_id or self.playlist_id)
|
||||
|
||||
if not self.config:
|
||||
raise EnvironmentError("Missing HIDI service config.")
|
||||
self.cdm = ctx.obj.cdm
|
||||
self._auth_token = None
|
||||
self._refresh_token = None
|
||||
self._drm_cache = {}
|
||||
|
||||
def authenticate(self, cookies: Optional[CookieJar] = None, credential: Optional[Credential] = None) -> None:
|
||||
base_headers = {
|
||||
"User-Agent": "Mozilla/5.0 (X11; Linux x86_64; rv:143.0) Gecko/20100101 Firefox/143.0",
|
||||
"Accept": "application/json, text/plain, */*",
|
||||
"Accept-Language": "en-US",
|
||||
"Referer": "https://www.hidive.com/",
|
||||
"Origin": "https://www.hidive.com",
|
||||
"x-api-key": self.config["x_api_key"],
|
||||
"app": "dice",
|
||||
"Realm": "dce.hidive",
|
||||
"x-app-var": self.config["x_app_var"],
|
||||
}
|
||||
self.session.headers.update(base_headers)
|
||||
|
||||
if not credential or not credential.username or not credential.password:
|
||||
raise ValueError("HiDive requires email + password")
|
||||
|
||||
r_login = self.session.post(
|
||||
self.config["endpoints"]["login"],
|
||||
json={"id": credential.username, "secret": credential.password}
|
||||
)
|
||||
if r_login.status_code == 401:
|
||||
raise PermissionError("Invalid email or password.")
|
||||
r_login.raise_for_status()
|
||||
|
||||
login_data = r_login.json()
|
||||
self._auth_token = login_data["authorisationToken"]
|
||||
self._refresh_token = login_data["refreshToken"]
|
||||
|
||||
self.session.headers["Authorization"] = f"Bearer {self._auth_token}"
|
||||
self.log.info("HiDive login successful.")
|
||||
|
||||
def _refresh_auth(self):
|
||||
if not self._refresh_token:
|
||||
raise PermissionError("No refresh token available to renew session.")
|
||||
|
||||
self.log.warning("Auth token expired, refreshing...")
|
||||
r = self.session.post(
|
||||
self.config["endpoints"]["refresh"],
|
||||
json={"refreshToken": self._refresh_token}
|
||||
)
|
||||
if r.status_code == 401:
|
||||
raise PermissionError("Refresh token is invalid. Please log in again.")
|
||||
r.raise_for_status()
|
||||
|
||||
data = r.json()
|
||||
self._auth_token = data["authorisationToken"]
|
||||
self.session.headers["Authorization"] = f"Bearer {self._auth_token}"
|
||||
self.log.info("Auth token refreshed successfully.")
|
||||
|
||||
def _api_get(self, url, **kwargs):
|
||||
resp = self.session.get(url, **kwargs)
|
||||
if resp.status_code == 401:
|
||||
self._refresh_auth()
|
||||
resp = self.session.get(url, **kwargs)
|
||||
resp.raise_for_status()
|
||||
return resp
|
||||
|
||||
def get_titles(self) -> Titles_T:
|
||||
# One endpoint for both season and playlist
|
||||
resp = self._api_get(
|
||||
self.config["endpoints"]["view"],
|
||||
params={"type": ("playlist" if self.kind == "movie" else "season"),
|
||||
"id": self.content_id,
|
||||
"timezone": "Europe/Amsterdam"}
|
||||
)
|
||||
data = resp.json()
|
||||
|
||||
if self.kind == "movie":
|
||||
# Find the playlist bucket, then the single VOD
|
||||
vod_id = None
|
||||
movie_title = None
|
||||
description = ""
|
||||
for elem in data.get("elements", []):
|
||||
if elem.get("$type") == "hero":
|
||||
hdr = (elem.get("attributes", {}).get("header", {}) or {}).get("attributes", {})
|
||||
movie_title = hdr.get("text", movie_title)
|
||||
for c in elem.get("attributes", {}).get("content", []):
|
||||
if c.get("$type") == "textblock":
|
||||
description = c.get("attributes", {}).get("text", description)
|
||||
if elem.get("$type") == "bucket" and elem.get("attributes", {}).get("type") == "playlist":
|
||||
items = elem.get("attributes", {}).get("items", [])
|
||||
if items:
|
||||
vod_id = items[0]["id"]
|
||||
if not movie_title:
|
||||
movie_title = items[0].get("title")
|
||||
if not description:
|
||||
description = items[0].get("description", "")
|
||||
break
|
||||
|
||||
if not vod_id:
|
||||
raise ValueError("No VOD found in playlist data.")
|
||||
|
||||
return Movies([
|
||||
Movie(
|
||||
id_=vod_id,
|
||||
service=self.__class__,
|
||||
name=movie_title or "Unknown Title",
|
||||
description=description or "",
|
||||
year=None,
|
||||
language=Language.get("en"),
|
||||
data={"playlistId": self.content_id}
|
||||
)
|
||||
])
|
||||
|
||||
# Series
|
||||
episodes = []
|
||||
series_title = None
|
||||
for elem in data.get("elements", []):
|
||||
if elem.get("$type") == "bucket" and elem["attributes"].get("type") == "season":
|
||||
for item in elem["attributes"].get("items", []):
|
||||
if item.get("type") != "SEASON_VOD":
|
||||
continue
|
||||
ep_title = item["title"]
|
||||
ep_num = 1
|
||||
if ep_title.startswith("E") and " - " in ep_title:
|
||||
try:
|
||||
ep_num = int(ep_title.split(" - ")[0][1:])
|
||||
except:
|
||||
pass
|
||||
episodes.append(Episode(
|
||||
id_=item["id"],
|
||||
service=self.__class__,
|
||||
title=data.get("metadata", {}).get("series", {}).get("title", "") or "HiDive",
|
||||
season=1,
|
||||
number=ep_num,
|
||||
name=item["title"],
|
||||
description=item.get("description", ""),
|
||||
language=Language.get("en"),
|
||||
data=item,
|
||||
))
|
||||
break
|
||||
|
||||
if not episodes:
|
||||
raise ValueError("No episodes found in season data.")
|
||||
return Series(sorted(episodes, key=lambda x: x.number))
|
||||
|
||||
def _get_audio_for_langs(self, mpd_url: str, langs: Iterable[Language]) -> list[Audio]:
|
||||
merged: list[Audio] = []
|
||||
seen = set()
|
||||
|
||||
# Use first available language as fallback, or "en" as ultimate fallback
|
||||
fallback_lang = langs[0] if langs else Language.get("en")
|
||||
|
||||
dash = DASH.from_url(mpd_url, session=self.session)
|
||||
try:
|
||||
# Parse with a valid fallback language
|
||||
base_tracks = dash.to_tracks(language=fallback_lang)
|
||||
except Exception:
|
||||
# Try with English as ultimate fallback
|
||||
base_tracks = dash.to_tracks(language=Language.get("en"))
|
||||
|
||||
all_audio = base_tracks.audio or []
|
||||
|
||||
for lang in langs:
|
||||
# Match by language prefix (e.g. en, ja)
|
||||
for audio in all_audio:
|
||||
lang_code = getattr(audio.language, "language", "en")
|
||||
if lang_code.startswith(lang.language[:2]):
|
||||
key = (lang_code, getattr(audio, "codec", None), getattr(audio, "bitrate", None))
|
||||
if key in seen:
|
||||
continue
|
||||
merged.append(audio)
|
||||
seen.add(key)
|
||||
|
||||
# If nothing matched, just return all available audio tracks
|
||||
if not merged and all_audio:
|
||||
merged = all_audio
|
||||
|
||||
return merged
|
||||
|
||||
|
||||
def get_tracks(self, title: Title_T) -> Tracks:
|
||||
vod_resp = self._api_get(
|
||||
self.config["endpoints"]["vod"].format(vod_id=title.id),
|
||||
params={"includePlaybackDetails": "URL"},
|
||||
)
|
||||
vod = vod_resp.json()
|
||||
|
||||
playback_url = vod.get("playerUrlCallback")
|
||||
if not playback_url:
|
||||
raise ValueError("No playback URL found.")
|
||||
|
||||
stream_data = self._api_get(playback_url).json()
|
||||
dash_list = stream_data.get("dash", [])
|
||||
if not dash_list:
|
||||
raise ValueError("No DASH streams available.")
|
||||
|
||||
entry = dash_list[0]
|
||||
mpd_url = entry["url"]
|
||||
|
||||
# Collect available HiDive metadata languages
|
||||
meta_audio_tracks = vod.get("onlinePlaybackMetadata", {}).get("audioTracks", [])
|
||||
available_langs = []
|
||||
for m in meta_audio_tracks:
|
||||
lang_code = (m.get("languageCode") or "").split("-")[0]
|
||||
if not lang_code:
|
||||
continue
|
||||
try:
|
||||
available_langs.append(Language.get(lang_code))
|
||||
except Exception:
|
||||
continue
|
||||
|
||||
# Use first available language as fallback, or English as ultimate fallback
|
||||
fallback_lang = available_langs[0] if available_langs else Language.get("en")
|
||||
|
||||
# Parse DASH manifest with a valid fallback language
|
||||
base_tracks = DASH.from_url(mpd_url, session=self.session).to_tracks(language=fallback_lang)
|
||||
|
||||
audio_tracks = self._get_audio_for_langs(mpd_url, available_langs)
|
||||
|
||||
# Map metadata labels
|
||||
meta_audio_map = {m.get("languageCode", "").split("-")[0]: m.get("label") for m in meta_audio_tracks}
|
||||
for a in audio_tracks:
|
||||
lang_code = getattr(a.language, "language", "en")
|
||||
a.name = meta_audio_map.get(lang_code, lang_code)
|
||||
a.is_original_lang = (lang_code == title.language.language)
|
||||
|
||||
base_tracks.audio = audio_tracks
|
||||
|
||||
# Subtitles
|
||||
subtitles = []
|
||||
for sub in entry.get("subtitles", []):
|
||||
if sub.get("format", "").lower() != "vtt":
|
||||
continue
|
||||
lang_code = sub.get("language", "en").replace("-", "_")
|
||||
try:
|
||||
lang = Language.get(lang_code)
|
||||
except Exception:
|
||||
lang = Language.get("en")
|
||||
subtitles.append(Subtitle(
|
||||
id_=f"{lang_code}:vtt",
|
||||
url=sub.get("url"),
|
||||
language=lang,
|
||||
codec=Subtitle.Codec.WebVTT,
|
||||
name=lang.language_name(),
|
||||
))
|
||||
base_tracks.subtitles = subtitles
|
||||
|
||||
# DRM info
|
||||
drm = entry.get("drm", {}) or {}
|
||||
jwt = drm.get("jwtToken")
|
||||
lic_url = (drm.get("url") or "").strip()
|
||||
if jwt and lic_url:
|
||||
self._drm_cache[title.id] = (jwt, lic_url)
|
||||
|
||||
return base_tracks
|
||||
|
||||
|
||||
def _hidive_get_drm_info(self, title: Title_T) -> tuple[str, str]:
|
||||
if title.id in self._drm_cache:
|
||||
return self._drm_cache[title.id]
|
||||
self.get_tracks(title)
|
||||
return self._drm_cache[title.id]
|
||||
|
||||
def _decode_hidive_license_payload(self, payload: bytes) -> bytes:
|
||||
text = payload.decode("utf-8", errors="ignore")
|
||||
prefix = "data:application/octet-stream;base64,"
|
||||
if text.startswith(prefix):
|
||||
b64 = text.split(",", 1)[1]
|
||||
return base64.b64decode(b64)
|
||||
return payload
|
||||
|
||||
def get_widevine_license(self, *, challenge: bytes, title: Title_T, track: AnyTrack) -> bytes | str | None:
|
||||
jwt_token, license_url = self._hidive_get_drm_info(title)
|
||||
headers = {
|
||||
"Authorization": f"Bearer {jwt_token}",
|
||||
"Content-Type": "application/octet-stream",
|
||||
"Accept": "*/*",
|
||||
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/142.0.0.0 Safari/537.36",
|
||||
"Origin": "https://www.hidive.com",
|
||||
"Referer": "https://www.hidive.com/",
|
||||
"X-DRM-INFO": "eyJzeXN0ZW0iOiJjb20ud2lkZXZpbmUuYWxwaGEifQ==",
|
||||
}
|
||||
r = self.session.post(license_url, data=challenge, headers=headers, timeout=30)
|
||||
r.raise_for_status()
|
||||
return self._decode_hidive_license_payload(r.content)
|
||||
|
||||
def get_chapters(self, title: Title_T) -> list[Chapter]:
|
||||
return []
|
||||
10
HIDI/config.yaml
Normal file
10
HIDI/config.yaml
Normal file
@ -0,0 +1,10 @@
|
||||
x_api_key: "857a1e5d-e35e-4fdf-805b-a87b6f8364bf"
|
||||
x_app_var: "6.59.1.e16cdfd"
|
||||
|
||||
endpoints:
|
||||
init: "https://dce-frontoffice.imggaming.com/api/v1/init/"
|
||||
login: "https://dce-frontoffice.imggaming.com/api/v2/login"
|
||||
vod: "https://dce-frontoffice.imggaming.com/api/v4/vod/{vod_id}?includePlaybackDetails=URL"
|
||||
adjacent: "https://dce-frontoffice.imggaming.com/api/v4/vod/{vod_id}/adjacent"
|
||||
view: "https://dce-frontoffice.imggaming.com/api/v1/view" # Changed from season_view
|
||||
refresh: "https://dce-frontoffice.imggaming.com/api/v2/token/refresh"
|
||||
361
ITV/__init__.py
Normal file
361
ITV/__init__.py
Normal file
@ -0,0 +1,361 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import hashlib
|
||||
import json
|
||||
import re
|
||||
import sys
|
||||
from collections.abc import Generator
|
||||
from http.cookiejar import MozillaCookieJar
|
||||
from typing import Any, Optional, Union
|
||||
|
||||
import click
|
||||
from bs4 import BeautifulSoup
|
||||
from click import Context
|
||||
from unshackle.core.credential import Credential
|
||||
from unshackle.core.manifests.dash import DASH
|
||||
from unshackle.core.search_result import SearchResult
|
||||
from unshackle.core.service import Service
|
||||
from unshackle.core.titles import Episode, Movie, Movies, Series
|
||||
from unshackle.core.tracks import Chapter, Chapters, Subtitle, Tracks
|
||||
|
||||
|
||||
class ITV(Service):
|
||||
"""
|
||||
Service code for ITVx streaming service (https://www.itv.com/).
|
||||
|
||||
\b
|
||||
Version: 1.0.2
|
||||
Author: stabbedbybrick
|
||||
Authorization: Cookies (Optional for free content | Required for premium content)
|
||||
Robustness:
|
||||
L3: 1080p
|
||||
|
||||
\b
|
||||
Tips:
|
||||
- Use complete title URL as input (pay attention to the URL format):
|
||||
SERIES: https://www.itv.com/watch/bay-of-fires/10a5270
|
||||
EPISODE: https://www.itv.com/watch/bay-of-fires/10a5270/10a5270a0001
|
||||
FILM: https://www.itv.com/watch/mad-max-beyond-thunderdome/2a7095
|
||||
- Some shows aren't listed as series, only as "Latest episodes"
|
||||
Download by SERIES URL for those titles, not by EPISODE URL
|
||||
|
||||
\b
|
||||
Examples:
|
||||
- SERIES: devine dl -w s01e01 itv https://www.itv.com/watch/bay-of-fires/10a5270
|
||||
- EPISODE: devine dl itv https://www.itv.com/watch/bay-of-fires/10a5270/10a5270a0001
|
||||
- FILM: devine dl itv https://www.itv.com/watch/mad-max-beyond-thunderdome/2a7095
|
||||
|
||||
\b
|
||||
Notes:
|
||||
ITV seem to detect and throttle multiple connections against the server.
|
||||
It's recommended to use requests as downloader, with few workers.
|
||||
|
||||
"""
|
||||
|
||||
GEOFENCE = ("gb",)
|
||||
ALIASES = ("itvx",)
|
||||
|
||||
@staticmethod
|
||||
@click.command(name="ITV", short_help="https://www.itv.com/", help=__doc__)
|
||||
@click.argument("title", type=str)
|
||||
@click.pass_context
|
||||
def cli(ctx: Context, **kwargs: Any) -> ITV:
|
||||
return ITV(ctx, **kwargs)
|
||||
|
||||
def __init__(self, ctx: Context, title: str):
|
||||
self.title = title
|
||||
super().__init__(ctx)
|
||||
|
||||
self.profile = ctx.parent.params.get("profile")
|
||||
if not self.profile:
|
||||
self.profile = "default"
|
||||
|
||||
self.session.headers.update(self.config["headers"])
|
||||
|
||||
def authenticate(self, cookies: Optional[MozillaCookieJar] = None, credential: Optional[Credential] = None) -> None:
|
||||
super().authenticate(cookies, credential)
|
||||
self.authorization = None
|
||||
|
||||
if credential and not cookies:
|
||||
self.log.error(" - Error: This service requires cookies for authentication.")
|
||||
sys.exit(1)
|
||||
|
||||
if cookies is not None:
|
||||
self.log.info(f"\n + Cookies for '{self.profile}' profile found, authenticating...")
|
||||
itv_session = next((cookie.value for cookie in cookies if cookie.name == "Itv.Session"), None)
|
||||
if not itv_session:
|
||||
self.log.error(" - Error: Session cookie not found. Cookies may be invalid.")
|
||||
sys.exit(1)
|
||||
|
||||
itv_session = json.loads(itv_session)
|
||||
refresh_token = itv_session["tokens"]["content"].get("refresh_token")
|
||||
if not refresh_token:
|
||||
self.log.error(" - Error: Access tokens not found. Try refreshing your cookies.")
|
||||
sys.exit(1)
|
||||
|
||||
cache = self.cache.get(f"tokens_{self.profile}")
|
||||
|
||||
headers = {
|
||||
"Host": "auth.prd.user.itv.com",
|
||||
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:126.0) Gecko/20100101 Firefox/126.0",
|
||||
"Accept": "application/vnd.user.auth.v2+json",
|
||||
"Accept-Language": "en-US,en;q=0.8",
|
||||
"Origin": "https://www.itv.com",
|
||||
"Connection": "keep-alive",
|
||||
"Referer": "https://www.itv.com/",
|
||||
}
|
||||
|
||||
params = {"refresh": cache.data["refresh_token"]} if cache else {"refresh": refresh_token}
|
||||
|
||||
r = self.session.get(
|
||||
self.config["endpoints"]["refresh"],
|
||||
headers=headers,
|
||||
params=params,
|
||||
)
|
||||
if r.status_code != 200:
|
||||
raise ConnectionError(f"Failed to refresh tokens: {r.text}")
|
||||
|
||||
tokens = r.json()
|
||||
cache.set(tokens)
|
||||
self.log.info(" + Tokens refreshed and placed in cache\n")
|
||||
|
||||
self.authorization = tokens["access_token"]
|
||||
|
||||
def search(self) -> Generator[SearchResult, None, None]:
|
||||
params = {
|
||||
"broadcaster": "itv",
|
||||
"featureSet": "clearkey,outband-webvtt,hls,aes,playready,widevine,fairplay,bbts,progressive,hd,rtmpe",
|
||||
"onlyFree": "false",
|
||||
"platform": "dotcom",
|
||||
"query": self.title,
|
||||
}
|
||||
|
||||
r = self.session.get(self.config["endpoints"]["search"], params=params)
|
||||
r.raise_for_status()
|
||||
|
||||
results = r.json()["results"]
|
||||
if isinstance(results, list):
|
||||
for result in results:
|
||||
special = result["data"].get("specialTitle")
|
||||
standard = result["data"].get("programmeTitle")
|
||||
film = result["data"].get("filmTitle")
|
||||
title = special if special else standard if standard else film
|
||||
tier = result["data"].get("tier")
|
||||
|
||||
slug = self._sanitize(title)
|
||||
|
||||
_id = result["data"]["legacyId"]["apiEncoded"]
|
||||
_id = "_".join(_id.split("_")[:2]).replace("_", "a")
|
||||
_id = re.sub(r"a000\d+", "", _id)
|
||||
|
||||
yield SearchResult(
|
||||
id_=f"https://www.itv.com/watch/{slug}/{_id}",
|
||||
title=title,
|
||||
description=result["data"].get("synopsis"),
|
||||
label=result.get("entityType") + f" {tier}",
|
||||
url=f"https://www.itv.com/watch/{slug}/{_id}",
|
||||
)
|
||||
|
||||
def get_titles(self) -> Union[Movies, Series]:
|
||||
data = self.get_data(self.title)
|
||||
kind = next(
|
||||
(x.get("seriesType") for x in data.get("seriesList") if x.get("seriesType") in ["SERIES", "FILM"]), None
|
||||
)
|
||||
|
||||
# Some shows are not listed as "SERIES" or "FILM", only as "Latest episodes"
|
||||
if not kind and next(
|
||||
(x for x in data.get("seriesList") if x.get("seriesLabel").lower() in ("latest episodes", "other episodes")), None
|
||||
):
|
||||
titles = data["seriesList"][0]["titles"]
|
||||
episodes =[
|
||||
Episode(
|
||||
id_=episode["episodeId"],
|
||||
service=self.__class__,
|
||||
title=data["programme"]["title"],
|
||||
season=episode.get("series") if isinstance(episode.get("series"), int) else 0,
|
||||
number=episode.get("episode") if isinstance(episode.get("episode"), int) else 0,
|
||||
name=episode["episodeTitle"],
|
||||
language="en", # TODO: language detection
|
||||
data=episode,
|
||||
)
|
||||
for episode in titles
|
||||
]
|
||||
# Assign episode numbers to special seasons
|
||||
counter = 1
|
||||
for episode in episodes:
|
||||
if episode.season == 0 and episode.number == 0:
|
||||
episode.number = counter
|
||||
counter += 1
|
||||
return Series(episodes)
|
||||
|
||||
if kind == "SERIES" and data.get("episode"):
|
||||
episode = data.get("episode")
|
||||
return Series(
|
||||
[
|
||||
Episode(
|
||||
id_=episode["episodeId"],
|
||||
service=self.__class__,
|
||||
title=data["programme"]["title"],
|
||||
season=episode.get("series") if isinstance(episode.get("series"), int) else 0,
|
||||
number=episode.get("episode") if isinstance(episode.get("episode"), int) else 0,
|
||||
name=episode["episodeTitle"],
|
||||
language="en", # TODO: language detection
|
||||
data=episode,
|
||||
)
|
||||
]
|
||||
)
|
||||
|
||||
elif kind == "SERIES":
|
||||
return Series(
|
||||
[
|
||||
Episode(
|
||||
id_=episode["episodeId"],
|
||||
service=self.__class__,
|
||||
title=data["programme"]["title"],
|
||||
season=episode.get("series") if isinstance(episode.get("series"), int) else 0,
|
||||
number=episode.get("episode") if isinstance(episode.get("episode"), int) else 0,
|
||||
name=episode["episodeTitle"],
|
||||
language="en", # TODO: language detection
|
||||
data=episode,
|
||||
)
|
||||
for series in data["seriesList"]
|
||||
if "Latest episodes" not in series["seriesLabel"]
|
||||
for episode in series["titles"]
|
||||
]
|
||||
)
|
||||
|
||||
elif kind == "FILM":
|
||||
return Movies(
|
||||
[
|
||||
Movie(
|
||||
id_=movie["episodeId"],
|
||||
service=self.__class__,
|
||||
name=data["programme"]["title"],
|
||||
year=movie.get("productionYear"),
|
||||
language="en", # TODO: language detection
|
||||
data=movie,
|
||||
)
|
||||
for movies in data["seriesList"]
|
||||
for movie in movies["titles"]
|
||||
]
|
||||
)
|
||||
|
||||
def get_tracks(self, title: Union[Movie, Episode]) -> Tracks:
|
||||
playlist = title.data.get("playlistUrl")
|
||||
|
||||
headers = {
|
||||
"Accept": "application/vnd.itv.vod.playlist.v4+json",
|
||||
"Accept-Language": "en-US,en;q=0.9,da;q=0.8",
|
||||
"Connection": "keep-alive",
|
||||
"Content-Type": "application/json",
|
||||
}
|
||||
|
||||
payload = {
|
||||
"client": {
|
||||
"id": "lg",
|
||||
},
|
||||
"device": {
|
||||
"deviceGroup": "ctv",
|
||||
},
|
||||
"variantAvailability": {
|
||||
"player": "dash",
|
||||
"featureset": [
|
||||
"mpeg-dash",
|
||||
"widevine",
|
||||
"outband-webvtt",
|
||||
"hd",
|
||||
"single-track",
|
||||
],
|
||||
"platformTag": "ctv",
|
||||
"drm": {
|
||||
"system": "widevine",
|
||||
"maxSupported": "L3",
|
||||
},
|
||||
},
|
||||
}
|
||||
if self.authorization:
|
||||
payload["user"] = {"token": self.authorization}
|
||||
|
||||
r = self.session.post(playlist, headers=headers, json=payload)
|
||||
if r.status_code != 200:
|
||||
raise ConnectionError(r.text)
|
||||
|
||||
data = r.json()
|
||||
video = data["Playlist"]["Video"]
|
||||
subtitles = video.get("Subtitles")
|
||||
self.manifest = video["MediaFiles"][0].get("Href")
|
||||
self.license = video["MediaFiles"][0].get("KeyServiceUrl")
|
||||
|
||||
tracks = DASH.from_url(self.manifest, self.session).to_tracks(title.language)
|
||||
tracks.videos[0].data = data
|
||||
|
||||
if subtitles is not None:
|
||||
for subtitle in subtitles:
|
||||
tracks.add(
|
||||
Subtitle(
|
||||
id_=hashlib.md5(subtitle.get("Href", "").encode()).hexdigest()[0:6],
|
||||
url=subtitle.get("Href", ""),
|
||||
codec=Subtitle.Codec.from_mime(subtitle.get("Href", "")[-3:]),
|
||||
language=title.language,
|
||||
forced=False,
|
||||
)
|
||||
)
|
||||
|
||||
for track in tracks.audio:
|
||||
role = track.data["dash"]["representation"].find("Role")
|
||||
if role is not None and role.get("value") in ["description", "alternative", "alternate"]:
|
||||
track.descriptive = True
|
||||
|
||||
return tracks
|
||||
|
||||
def get_chapters(self, title: Union[Movie, Episode]) -> Chapters:
|
||||
track = title.tracks.videos[0]
|
||||
if not track.data["Playlist"].get("ContentBreaks"):
|
||||
return Chapters()
|
||||
|
||||
breaks = track.data["Playlist"]["ContentBreaks"]
|
||||
timecodes = [".".join(x.get("TimeCode").rsplit(":", 1)) for x in breaks if x.get("TimeCode") != "00:00:00:000"]
|
||||
|
||||
# End credits are sometimes listed before the last chapter, so we skip those for now
|
||||
return Chapters([Chapter(timecode) for timecode in timecodes])
|
||||
|
||||
def get_widevine_service_certificate(self, **_: Any) -> str:
|
||||
return None
|
||||
|
||||
def get_widevine_license(self, challenge: bytes, **_: Any) -> bytes:
|
||||
r = self.session.post(url=self.license, data=challenge)
|
||||
if r.status_code != 200:
|
||||
raise ConnectionError(r.text)
|
||||
return r.content
|
||||
|
||||
# Service specific functions
|
||||
|
||||
def get_data(self, url: str) -> dict:
|
||||
# TODO: Find a proper endpoint for this
|
||||
|
||||
r = self.session.get(url)
|
||||
if r.status_code != 200:
|
||||
raise ConnectionError(r.text)
|
||||
|
||||
soup = BeautifulSoup(r.text, "html.parser")
|
||||
props = soup.select_one("#__NEXT_DATA__").text
|
||||
|
||||
try:
|
||||
data = json.loads(props)
|
||||
except Exception as e:
|
||||
raise ValueError(f"Failed to parse JSON: {e}")
|
||||
|
||||
return data["props"]["pageProps"]
|
||||
|
||||
@staticmethod
|
||||
def _sanitize(title: str) -> str:
|
||||
title = title.lower()
|
||||
title = title.replace("&", "and")
|
||||
title = re.sub(r"[:;/()]", "", title)
|
||||
title = re.sub(r"[ ]", "-", title)
|
||||
title = re.sub(r"[\\*!?¿,'\"<>|$#`’]", "", title)
|
||||
title = re.sub(rf"[{'.'}]{{2,}}", ".", title)
|
||||
title = re.sub(rf"[{'_'}]{{2,}}", "_", title)
|
||||
title = re.sub(rf"[{'-'}]{{2,}}", "-", title)
|
||||
title = re.sub(rf"[{' '}]{{2,}}", " ", title)
|
||||
return title
|
||||
7
ITV/config.yaml
Normal file
7
ITV/config.yaml
Normal file
@ -0,0 +1,7 @@
|
||||
headers:
|
||||
User-Agent: okhttp/4.9.3
|
||||
|
||||
endpoints:
|
||||
login: https://auth.prd.user.itv.com/v2/auth
|
||||
refresh: https://auth.prd.user.itv.com/token
|
||||
search: https://textsearch.prd.oasvc.itv.com/search
|
||||
407
KNPY/__init__.py
Normal file
407
KNPY/__init__.py
Normal file
@ -0,0 +1,407 @@
|
||||
import base64
|
||||
import json
|
||||
import re
|
||||
from datetime import datetime, timezone
|
||||
from http.cookiejar import CookieJar
|
||||
from typing import List, Optional
|
||||
|
||||
import click
|
||||
import jwt
|
||||
from langcodes import Language
|
||||
|
||||
from unshackle.core.constants import AnyTrack
|
||||
from unshackle.core.credential import Credential
|
||||
from unshackle.core.manifests import DASH
|
||||
from unshackle.core.search_result import SearchResult
|
||||
from unshackle.core.service import Service
|
||||
from unshackle.core.titles import Episode, Movie, Movies, Series, Title_T, Titles_T
|
||||
from unshackle.core.tracks import Subtitle, Tracks
|
||||
|
||||
|
||||
class KNPY(Service):
|
||||
"""
|
||||
Service code for Kanopy (kanopy.com).
|
||||
Version: 1.0.0
|
||||
|
||||
Auth: Credential (username + password)
|
||||
Security: FHD@L3
|
||||
|
||||
Handles both Movies and Series (Playlists).
|
||||
Detects and stops for movies that require tickets.
|
||||
Caching included
|
||||
"""
|
||||
|
||||
# Updated regex to match the new URL structure with library subdomain and path
|
||||
TITLE_RE = r"^https?://(?:www\.)?kanopy\.com/.+/(?P<id>\d+)$"
|
||||
GEOFENCE = ()
|
||||
NO_SUBTITLES = False
|
||||
|
||||
@staticmethod
|
||||
@click.command(name="KNPY", short_help="https://kanopy.com")
|
||||
@click.argument("title", type=str)
|
||||
@click.pass_context
|
||||
def cli(ctx, **kwargs):
|
||||
return KNPY(ctx, **kwargs)
|
||||
|
||||
def __init__(self, ctx, title: str):
|
||||
super().__init__(ctx)
|
||||
if not self.config:
|
||||
raise ValueError("KNPY configuration not found. Ensure config.yaml exists.")
|
||||
|
||||
self.cdm = ctx.obj.cdm
|
||||
|
||||
match = re.match(self.TITLE_RE, title)
|
||||
if match:
|
||||
self.content_id = match.group("id")
|
||||
else:
|
||||
self.content_id = None
|
||||
self.search_query = title
|
||||
|
||||
self.API_VERSION = self.config["client"]["api_version"]
|
||||
self.USER_AGENT = self.config["client"]["user_agent"]
|
||||
self.WIDEVINE_UA = self.config["client"]["widevine_ua"]
|
||||
|
||||
self.session.headers.update({
|
||||
"x-version": self.API_VERSION,
|
||||
"user-agent": self.USER_AGENT
|
||||
})
|
||||
|
||||
self._jwt = None
|
||||
self._visitor_id = None
|
||||
self._user_id = None
|
||||
self._domain_id = None
|
||||
self.widevine_license_url = None
|
||||
|
||||
def authenticate(self, cookies: Optional[CookieJar] = None, credential: Optional[Credential] = None) -> None:
|
||||
if not credential or not credential.username or not credential.password:
|
||||
raise ValueError("Kanopy requires email and password for authentication.")
|
||||
|
||||
cache = self.cache.get("auth_token")
|
||||
|
||||
if cache and not cache.expired:
|
||||
cached_data = cache.data
|
||||
valid_token = None
|
||||
|
||||
if isinstance(cached_data, dict) and "token" in cached_data:
|
||||
if cached_data.get("username") == credential.username:
|
||||
valid_token = cached_data["token"]
|
||||
self.log.info("Using cached authentication token")
|
||||
else:
|
||||
self.log.info(f"Cached token belongs to '{cached_data.get('username')}', but logging in as '{credential.username}'. Re-authenticating.")
|
||||
|
||||
elif isinstance(cached_data, str):
|
||||
self.log.info("Found legacy cached token format. Re-authenticating to ensure correct user.")
|
||||
|
||||
if valid_token:
|
||||
self._jwt = valid_token
|
||||
self.session.headers.update({"authorization": f"Bearer {self._jwt}"})
|
||||
|
||||
if not self._user_id or not self._domain_id or not self._visitor_id:
|
||||
try:
|
||||
decoded_jwt = jwt.decode(self._jwt, options={"verify_signature": False})
|
||||
self._user_id = decoded_jwt["data"]["uid"]
|
||||
self._visitor_id = decoded_jwt["data"]["visitor_id"]
|
||||
self.log.info(f"Extracted user_id and visitor_id from cached token.")
|
||||
self._fetch_user_details()
|
||||
return
|
||||
except (KeyError, jwt.DecodeError) as e:
|
||||
self.log.error(f"Could not decode cached token: {e}. Re-authenticating.")
|
||||
|
||||
self.log.info("Performing handshake to get visitor token...")
|
||||
r = self.session.get(self.config["endpoints"]["handshake"])
|
||||
r.raise_for_status()
|
||||
handshake_data = r.json()
|
||||
self._visitor_id = handshake_data["visitorId"]
|
||||
initial_jwt = handshake_data["jwt"]
|
||||
|
||||
self.log.info(f"Logging in as {credential.username}...")
|
||||
login_payload = {
|
||||
"credentialType": "email",
|
||||
"emailUser": {
|
||||
"email": credential.username,
|
||||
"password": credential.password
|
||||
}
|
||||
}
|
||||
r = self.session.post(
|
||||
self.config["endpoints"]["login"],
|
||||
json=login_payload,
|
||||
headers={"authorization": f"Bearer {initial_jwt}"}
|
||||
)
|
||||
r.raise_for_status()
|
||||
login_data = r.json()
|
||||
self._jwt = login_data["jwt"]
|
||||
self._user_id = login_data["userId"]
|
||||
|
||||
self.session.headers.update({"authorization": f"Bearer {self._jwt}"})
|
||||
self.log.info(f"Successfully authenticated as {credential.username}")
|
||||
|
||||
self._fetch_user_details()
|
||||
|
||||
try:
|
||||
decoded_jwt = jwt.decode(self._jwt, options={"verify_signature": False})
|
||||
exp_timestamp = decoded_jwt.get("exp")
|
||||
|
||||
cache_payload = {
|
||||
"token": self._jwt,
|
||||
"username": credential.username
|
||||
}
|
||||
|
||||
if exp_timestamp:
|
||||
expiration_in_seconds = int(exp_timestamp - datetime.now(timezone.utc).timestamp())
|
||||
self.log.info(f"Caching token for {expiration_in_seconds / 60:.2f} minutes.")
|
||||
cache.set(data=cache_payload, expiration=expiration_in_seconds)
|
||||
else:
|
||||
self.log.warning("JWT has no 'exp' claim, caching for 1 hour as a fallback.")
|
||||
cache.set(data=cache_payload, expiration=3600)
|
||||
except Exception as e:
|
||||
self.log.error(f"Failed to decode JWT for caching: {e}. Caching for 1 hour as a fallback.")
|
||||
cache.set(
|
||||
data={"token": self._jwt, "username": credential.username},
|
||||
expiration=3600
|
||||
)
|
||||
|
||||
def _fetch_user_details(self):
|
||||
self.log.info("Fetching user library memberships...")
|
||||
r = self.session.get(self.config["endpoints"]["memberships"].format(user_id=self._user_id))
|
||||
r.raise_for_status()
|
||||
memberships = r.json()
|
||||
|
||||
for membership in memberships.get("list", []):
|
||||
if membership.get("status") == "active" and membership.get("isDefault", False):
|
||||
self._domain_id = str(membership["domainId"])
|
||||
self.log.info(f"Using default library domain: {membership.get('sitename', 'Unknown')} (ID: {self._domain_id})")
|
||||
return
|
||||
|
||||
if memberships.get("list"):
|
||||
self._domain_id = str(memberships["list"][0]["domainId"])
|
||||
self.log.warning(f"No default library found. Using first active domain: {self._domain_id}")
|
||||
else:
|
||||
raise ValueError("No active library memberships found for this user.")
|
||||
|
||||
def get_titles(self) -> Titles_T:
|
||||
if not self.content_id:
|
||||
raise ValueError("A content ID is required to get titles. Use a URL or run a search first.")
|
||||
if not self._domain_id:
|
||||
raise ValueError("Domain ID not set. Authentication may have failed.")
|
||||
|
||||
r = self.session.get(self.config["endpoints"]["video_info"].format(video_id=self.content_id, domain_id=self._domain_id))
|
||||
r.raise_for_status()
|
||||
content_data = r.json()
|
||||
|
||||
content_type = content_data.get("type")
|
||||
|
||||
def parse_lang(data):
|
||||
try:
|
||||
langs = data.get("languages", [])
|
||||
if langs and isinstance(langs, list) and len(langs) > 0:
|
||||
return Language.find(langs[0])
|
||||
except:
|
||||
pass
|
||||
return Language.get("en")
|
||||
|
||||
if content_type == "video":
|
||||
video_data = content_data["video"]
|
||||
movie = Movie(
|
||||
id_=str(video_data["videoId"]),
|
||||
service=self.__class__,
|
||||
name=video_data["title"],
|
||||
year=video_data.get("productionYear"),
|
||||
description=video_data.get("descriptionHtml", ""),
|
||||
language=parse_lang(video_data),
|
||||
data=video_data,
|
||||
)
|
||||
return Movies([movie])
|
||||
|
||||
elif content_type == "playlist":
|
||||
playlist_data = content_data["playlist"]
|
||||
series_title = playlist_data["title"]
|
||||
series_year = playlist_data.get("productionYear")
|
||||
|
||||
season_match = re.search(r'(?:Season|S)\s*(\d+)', series_title, re.IGNORECASE)
|
||||
season_num = int(season_match.group(1)) if season_match else 1
|
||||
|
||||
r = self.session.get(self.config["endpoints"]["video_items"].format(video_id=self.content_id, domain_id=self._domain_id))
|
||||
r.raise_for_status()
|
||||
items_data = r.json()
|
||||
|
||||
episodes = []
|
||||
for i, item in enumerate(items_data.get("list", [])):
|
||||
if item.get("type") != "video":
|
||||
continue
|
||||
|
||||
video_data = item["video"]
|
||||
ep_num = i + 1
|
||||
|
||||
ep_title = video_data.get("title", "")
|
||||
ep_match = re.search(r'Ep(?:isode)?\.?\s*(\d+)', ep_title, re.IGNORECASE)
|
||||
if ep_match:
|
||||
ep_num = int(ep_match.group(1))
|
||||
|
||||
episodes.append(
|
||||
Episode(
|
||||
id_=str(video_data["videoId"]),
|
||||
service=self.__class__,
|
||||
title=series_title,
|
||||
season=season_num,
|
||||
number=ep_num,
|
||||
name=video_data["title"],
|
||||
description=video_data.get("descriptionHtml", ""),
|
||||
year=video_data.get("productionYear", series_year),
|
||||
language=parse_lang(video_data),
|
||||
data=video_data,
|
||||
)
|
||||
)
|
||||
|
||||
series = Series(episodes)
|
||||
series.name = series_title
|
||||
series.description = playlist_data.get("descriptionHtml", "")
|
||||
series.year = series_year
|
||||
return series
|
||||
|
||||
else:
|
||||
raise ValueError(f"Unsupported content type: {content_type}")
|
||||
|
||||
def get_tracks(self, title: Title_T) -> Tracks:
|
||||
play_payload = {
|
||||
"videoId": int(title.id),
|
||||
"domainId": int(self._domain_id),
|
||||
"userId": int(self._user_id),
|
||||
"visitorId": self._visitor_id
|
||||
}
|
||||
|
||||
self.session.headers.setdefault("authorization", f"Bearer {self._jwt}")
|
||||
self.session.headers.setdefault("x-version", self.API_VERSION)
|
||||
self.session.headers.setdefault("user-agent", self.USER_AGENT)
|
||||
|
||||
r = self.session.post(self.config["endpoints"]["plays"], json=play_payload)
|
||||
response_json = None
|
||||
try:
|
||||
response_json = r.json()
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# Handle known errors gracefully
|
||||
if r.status_code == 403:
|
||||
if response_json and response_json.get("errorSubcode") == "playRegionRestricted":
|
||||
self.log.error("Kanopy reports: This video is not available in your country.")
|
||||
raise PermissionError(
|
||||
"Playback blocked by region restriction. Try connecting through a supported country or verify your library’s access region."
|
||||
)
|
||||
else:
|
||||
self.log.error(f"Access forbidden (HTTP 403). Response: {response_json}")
|
||||
raise PermissionError("Kanopy denied access to this video. It may require a different library membership or authentication.")
|
||||
|
||||
# Raise for any other HTTP errors
|
||||
r.raise_for_status()
|
||||
play_data = response_json or r.json()
|
||||
|
||||
manifest_url = None
|
||||
for manifest in play_data.get("manifests", []):
|
||||
if manifest["manifestType"] == "dash":
|
||||
url = manifest["url"]
|
||||
manifest_url = f"https://kanopy.com{url}" if url.startswith("/") else url
|
||||
drm_type = manifest.get("drmType")
|
||||
if drm_type == "kanopyDrm":
|
||||
play_id = play_data.get("playId")
|
||||
self.widevine_license_url = self.config["endpoints"]["widevine_license"].format(license_id=f"{play_id}-0")
|
||||
elif drm_type == "studioDrm":
|
||||
license_id = manifest.get("drmLicenseID", f"{play_data.get('playId')}-1")
|
||||
self.widevine_license_url = self.config["endpoints"]["widevine_license"].format(license_id=license_id)
|
||||
else:
|
||||
self.log.warning(f"Unknown drmType: {drm_type}")
|
||||
self.widevine_license_url = None
|
||||
break
|
||||
|
||||
if not manifest_url:
|
||||
raise ValueError("Could not find a DASH manifest for this title.")
|
||||
if not self.widevine_license_url:
|
||||
raise ValueError("Could not construct Widevine license URL.")
|
||||
|
||||
self.log.info(f"Fetching DASH manifest from: {manifest_url}")
|
||||
r = self.session.get(manifest_url)
|
||||
r.raise_for_status()
|
||||
|
||||
# Refresh headers for manifest parsing
|
||||
self.session.headers.clear()
|
||||
self.session.headers.update({
|
||||
"User-Agent": self.WIDEVINE_UA,
|
||||
"Accept": "*/*",
|
||||
"Accept-Encoding": "gzip, deflate",
|
||||
"Connection": "keep-alive",
|
||||
})
|
||||
|
||||
tracks = DASH.from_text(r.text, url=manifest_url).to_tracks(language=title.language)
|
||||
for caption_data in play_data.get("captions", []):
|
||||
lang = caption_data.get("language", "en")
|
||||
for file_info in caption_data.get("files", []):
|
||||
if file_info.get("type") == "webvtt":
|
||||
tracks.add(Subtitle(
|
||||
id_=f"caption-{lang}",
|
||||
url=file_info["url"],
|
||||
codec=Subtitle.Codec.WebVTT,
|
||||
language=Language.get(lang)
|
||||
))
|
||||
break
|
||||
|
||||
return tracks
|
||||
|
||||
|
||||
def get_widevine_license(self, *, challenge: bytes, title: Title_T, track: AnyTrack) -> bytes:
|
||||
if not self.widevine_license_url:
|
||||
raise ValueError("Widevine license URL was not set. Call get_tracks first.")
|
||||
|
||||
license_headers = {
|
||||
"Content-Type": "application/octet-stream",
|
||||
"User-Agent": self.WIDEVINE_UA,
|
||||
"Authorization": f"Bearer {self._jwt}",
|
||||
"X-Version": self.API_VERSION
|
||||
}
|
||||
|
||||
r = self.session.post(
|
||||
self.widevine_license_url,
|
||||
data=challenge,
|
||||
headers=license_headers
|
||||
)
|
||||
r.raise_for_status()
|
||||
return r.content
|
||||
|
||||
# def search(self) -> List[SearchResult]:
|
||||
# if not hasattr(self, 'search_query'):
|
||||
# self.log.error("Search query not set. Cannot search.")
|
||||
# return []
|
||||
|
||||
# self.log.info(f"Searching for '{self.search_query}'...")
|
||||
# params = {
|
||||
# "query": self.search_query,
|
||||
# "sort": "relevance",
|
||||
# "domainId": self._domain_id,
|
||||
# "page": 0,
|
||||
# "perPage": 20
|
||||
# }
|
||||
# r = self.session.get(self.config["endpoints"]["search"], params=params)
|
||||
# r.raise_for_status()
|
||||
# search_data = r.json()
|
||||
|
||||
# results = []
|
||||
# for item in search_data.get("list", []):
|
||||
# item_type = item.get("type")
|
||||
# if item_type not in ["playlist", "video"]:
|
||||
# continue
|
||||
|
||||
# video_id = item.get("videoId")
|
||||
# title = item.get("title", "No Title")
|
||||
# label = "Series" if item_type == "playlist" else "Movie"
|
||||
|
||||
# results.append(
|
||||
# SearchResult(
|
||||
# id_=str(video_id),
|
||||
# title=title,
|
||||
# description="",
|
||||
# label=label,
|
||||
# url=f"https://www.kanopy.com/watch/{video_id}"
|
||||
# )
|
||||
# )
|
||||
# return results
|
||||
|
||||
def get_chapters(self, title: Title_T) -> list:
|
||||
return []
|
||||
15
KNPY/config.yaml
Normal file
15
KNPY/config.yaml
Normal file
@ -0,0 +1,15 @@
|
||||
client:
|
||||
api_version: "Android/com.kanopy/6.21.0/952 (SM-A525F; Android 15)"
|
||||
user_agent: "okhttp/5.2.1"
|
||||
widevine_ua: "KanopyApplication/6.21.0 (Linux;Android 15) AndroidXMedia3/1.8.0"
|
||||
|
||||
endpoints:
|
||||
handshake: "https://kanopy.com/kapi/handshake"
|
||||
login: "https://kanopy.com/kapi/login"
|
||||
memberships: "https://kanopy.com/kapi/memberships?userId={user_id}"
|
||||
video_info: "https://kanopy.com/kapi/videos/{video_id}?domainId={domain_id}"
|
||||
video_items: "https://kanopy.com/kapi/videos/{video_id}/items?domainId={domain_id}"
|
||||
search: "https://kanopy.com/kapi/search/videos"
|
||||
plays: "https://kanopy.com/kapi/plays"
|
||||
access_expires_in: "https://kanopy.com/kapi/users/{user_id}/history/videos/{video_id}/access_expires_in?domainId={domain_id}"
|
||||
widevine_license: "https://kanopy.com/kapi/licenses/widevine/{license_id}"
|
||||
297
KOWP/__init__.py
Normal file
297
KOWP/__init__.py
Normal file
@ -0,0 +1,297 @@
|
||||
import json
|
||||
import re
|
||||
from http.cookiejar import CookieJar
|
||||
from typing import Optional, List, Dict, Any
|
||||
|
||||
import click
|
||||
from langcodes import Language
|
||||
|
||||
from unshackle.core.constants import AnyTrack
|
||||
from unshackle.core.credential import Credential
|
||||
from unshackle.core.manifests import DASH
|
||||
from unshackle.core.service import Service
|
||||
from unshackle.core.search_result import SearchResult
|
||||
from unshackle.core.titles import Episode, Series, Title_T, Titles_T
|
||||
from unshackle.core.tracks import Subtitle, Tracks
|
||||
from unshackle.core.utilities import is_close_match
|
||||
|
||||
class KOWP(Service):
|
||||
"""
|
||||
Service code for Kocowa Plus (kocowa.com).
|
||||
Version: 1.0.0
|
||||
|
||||
Auth: Credential (username + password)
|
||||
Security: FHD@L3
|
||||
"""
|
||||
|
||||
TITLE_RE = r"^(?:https?://(?:www\.)?kocowa\.com/[^/]+/season/)?(?P<title_id>\d+)"
|
||||
GEOFENCE = ()
|
||||
NO_SUBTITLES = False
|
||||
|
||||
@staticmethod
|
||||
@click.command(name="kowp", short_help="https://www.kocowa.com")
|
||||
@click.argument("title", type=str)
|
||||
@click.option("--extras", is_flag=True, default=False, help="Include teasers/extras")
|
||||
@click.pass_context
|
||||
def cli(ctx, **kwargs):
|
||||
return KOWP(ctx, **kwargs)
|
||||
|
||||
def __init__(self, ctx, title: str, extras: bool = False):
|
||||
super().__init__(ctx)
|
||||
match = re.match(self.TITLE_RE, title)
|
||||
if match:
|
||||
self.title_id = match.group("title_id")
|
||||
else:
|
||||
self.title_id = title # fallback to use as search keyword
|
||||
self.include_extras = extras
|
||||
self.brightcove_account_id = None
|
||||
self.brightcove_pk = None
|
||||
self.cdm = ctx.obj.cdm
|
||||
|
||||
def authenticate(self, cookies: Optional[CookieJar] = None, credential: Optional[Credential] = None) -> None:
|
||||
if not credential:
|
||||
raise ValueError("KOWP requires username and password")
|
||||
|
||||
payload = {
|
||||
"username": credential.username,
|
||||
"password": credential.password,
|
||||
"device_id": f"{credential.username}_browser",
|
||||
"device_type": "browser",
|
||||
"device_model": "Firefox",
|
||||
"device_version": "firefox/143.0",
|
||||
"push_token": None,
|
||||
"app_version": "v4.0.16",
|
||||
}
|
||||
r = self.session.post(
|
||||
self.config["endpoints"]["login"],
|
||||
json=payload,
|
||||
headers={"Authorization": "anonymous", "Origin": "https://www.kocowa.com"}
|
||||
)
|
||||
r.raise_for_status()
|
||||
res = r.json()
|
||||
if res.get("code") != "0000":
|
||||
raise PermissionError(f"Login failed: {res.get('message')}")
|
||||
|
||||
self.access_token = res["object"]["access_token"]
|
||||
|
||||
r = self.session.post(
|
||||
self.config["endpoints"]["middleware_auth"],
|
||||
json={"token": f"wA-Auth.{self.access_token}"},
|
||||
headers={"Origin": "https://www.kocowa.com"}
|
||||
)
|
||||
r.raise_for_status()
|
||||
self.middleware_token = r.json()["token"]
|
||||
|
||||
self._fetch_brightcove_config()
|
||||
|
||||
def _fetch_brightcove_config(self):
|
||||
"""Fetch Brightcove account_id and policy_key from Kocowa's public config endpoint."""
|
||||
try:
|
||||
r = self.session.get(
|
||||
"https://middleware.bcmw.kocowa.com/api/config",
|
||||
headers={
|
||||
"Origin": "https://www.kocowa.com",
|
||||
"Referer": "https://www.kocowa.com/",
|
||||
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/142.0.0.0 Safari/537.36 Edg/142.0.0.0"
|
||||
}
|
||||
)
|
||||
r.raise_for_status()
|
||||
config = r.json()
|
||||
|
||||
self.brightcove_account_id = config.get("VC_ACCOUNT_ID")
|
||||
self.brightcove_pk = config.get("BCOV_POLICY_KEY")
|
||||
|
||||
if not self.brightcove_account_id:
|
||||
raise ValueError("VC_ACCOUNT_ID missing in /api/config response")
|
||||
if not self.brightcove_pk:
|
||||
raise ValueError("BCOV_POLICY_KEY missing in /api/config response")
|
||||
|
||||
self.log.info(f"Brightcove config loaded: account_id={self.brightcove_account_id}")
|
||||
|
||||
except Exception as e:
|
||||
raise RuntimeError(f"Failed to fetch or parse Brightcove config: {e}")
|
||||
|
||||
def get_titles(self) -> Titles_T:
|
||||
all_episodes = []
|
||||
offset = 0
|
||||
limit = 20
|
||||
series_title = None # Store the title from the first request
|
||||
|
||||
while True:
|
||||
url = self.config["endpoints"]["metadata"].format(title_id=self.title_id)
|
||||
sep = "&" if "?" in url else "?"
|
||||
url += f"{sep}offset={offset}&limit={limit}"
|
||||
|
||||
r = self.session.get(
|
||||
url,
|
||||
headers={"Authorization": self.access_token, "Origin": "https://www.kocowa.com"}
|
||||
)
|
||||
r.raise_for_status()
|
||||
data = r.json()["object"]
|
||||
|
||||
# Extract the series title only from the very first page
|
||||
if series_title is None and "meta" in data:
|
||||
series_title = data["meta"]["title"]["en"]
|
||||
|
||||
page_objects = data.get("next_episodes", {}).get("objects", [])
|
||||
if not page_objects:
|
||||
break
|
||||
|
||||
for ep in page_objects:
|
||||
is_episode = ep.get("detail_type") == "episode"
|
||||
is_extra = ep.get("detail_type") in ("teaser", "extra")
|
||||
if is_episode or (self.include_extras and is_extra):
|
||||
all_episodes.append(ep)
|
||||
|
||||
offset += limit
|
||||
total = data.get("next_episodes", {}).get("total_count", 0)
|
||||
if len(all_episodes) >= total or len(page_objects) < limit:
|
||||
break
|
||||
|
||||
# If we never got the series title, exit with an error
|
||||
if series_title is None:
|
||||
raise ValueError("Could not retrieve series metadata to get the title.")
|
||||
|
||||
episodes = []
|
||||
for ep in all_episodes:
|
||||
meta = ep["meta"]
|
||||
ep_type = "Episode" if ep["detail_type"] == "episode" else ep["detail_type"].capitalize()
|
||||
ep_num = meta.get("episode_number", 0)
|
||||
title = meta["title"].get("en") or f"{ep_type} {ep_num}"
|
||||
desc = meta["description"].get("en") or ""
|
||||
|
||||
episodes.append(
|
||||
Episode(
|
||||
id_=str(ep["id"]),
|
||||
service=self.__class__,
|
||||
title=series_title,
|
||||
season=meta.get("season_number", 1),
|
||||
number=ep_num,
|
||||
name=title,
|
||||
description=desc,
|
||||
year=None,
|
||||
language=Language.get("en"),
|
||||
data=ep,
|
||||
)
|
||||
)
|
||||
|
||||
return Series(episodes)
|
||||
|
||||
def get_tracks(self, title: Title_T) -> Tracks:
|
||||
# Authorize playback
|
||||
r = self.session.post(
|
||||
self.config["endpoints"]["authorize"].format(episode_id=title.id),
|
||||
headers={"Authorization": f"Bearer {self.middleware_token}"}
|
||||
)
|
||||
r.raise_for_status()
|
||||
auth_data = r.json()
|
||||
if not auth_data.get("Success"):
|
||||
raise PermissionError("Playback authorization failed")
|
||||
self.playback_token = auth_data["token"]
|
||||
|
||||
# Fetch Brightcove manifest
|
||||
manifest_url = (
|
||||
f"https://edge.api.brightcove.com/playback/v1/accounts/{self.brightcove_account_id}/videos/ref:{title.id}"
|
||||
)
|
||||
r = self.session.get(
|
||||
manifest_url,
|
||||
headers={"Accept": f"application/json;pk={self.brightcove_pk}"}
|
||||
)
|
||||
r.raise_for_status()
|
||||
manifest = r.json()
|
||||
|
||||
# Get DASH URL + Widevine license
|
||||
dash_url = widevine_url = None
|
||||
for src in manifest.get("sources", []):
|
||||
if src.get("type") == "application/dash+xml":
|
||||
dash_url = src["src"]
|
||||
widevine_url = (
|
||||
src.get("key_systems", {})
|
||||
.get("com.widevine.alpha", {})
|
||||
.get("license_url")
|
||||
)
|
||||
if dash_url and widevine_url:
|
||||
break
|
||||
|
||||
if not dash_url or not widevine_url:
|
||||
raise ValueError("No Widevine DASH stream found")
|
||||
|
||||
self.widevine_license_url = widevine_url
|
||||
tracks = DASH.from_url(dash_url, session=self.session).to_tracks(language=title.language)
|
||||
|
||||
for sub in manifest.get("text_tracks", []):
|
||||
srclang = sub.get("srclang")
|
||||
if not srclang or srclang == "thumbnails":
|
||||
continue
|
||||
|
||||
subtitle_track = Subtitle(
|
||||
id_=sub["id"],
|
||||
url=sub["src"],
|
||||
codec=Subtitle.Codec.WebVTT,
|
||||
language=Language.get(srclang),
|
||||
sdh=True, # Kocowa subs are SDH - mark them as such
|
||||
forced=False,
|
||||
)
|
||||
tracks.add(subtitle_track)
|
||||
|
||||
return tracks
|
||||
|
||||
def get_widevine_license(self, *, challenge: bytes, title: Title_T, track: AnyTrack) -> bytes:
|
||||
r = self.session.post(
|
||||
self.widevine_license_url,
|
||||
data=challenge,
|
||||
headers={
|
||||
"BCOV-Auth": self.playback_token,
|
||||
"Content-Type": "application/octet-stream",
|
||||
"Origin": "https://www.kocowa.com",
|
||||
"Referer": "https://www.kocowa.com/",
|
||||
}
|
||||
)
|
||||
r.raise_for_status()
|
||||
return r.content
|
||||
|
||||
def search(self) -> List[SearchResult]:
|
||||
url = "https://prod-fms.kocowa.com/api/v01/fe/gks/autocomplete"
|
||||
params = {
|
||||
"search_category": "All",
|
||||
"search_input": self.title_id,
|
||||
"include_webtoon": "true",
|
||||
}
|
||||
|
||||
r = self.session.get(
|
||||
url,
|
||||
params=params,
|
||||
headers={
|
||||
"Authorization": self.access_token,
|
||||
"Origin": "https://www.kocowa.com ",
|
||||
"Referer": "https://www.kocowa.com/ ",
|
||||
}
|
||||
)
|
||||
r.raise_for_status()
|
||||
response = r.json()
|
||||
contents = response.get("object", {}).get("contents", [])
|
||||
|
||||
results = []
|
||||
for item in contents:
|
||||
if item.get("detail_type") != "season":
|
||||
continue
|
||||
|
||||
meta = item["meta"]
|
||||
title_en = meta["title"].get("en") or "[No Title]"
|
||||
description_en = meta["description"].get("en") or ""
|
||||
show_id = str(item["id"])
|
||||
|
||||
results.append(
|
||||
SearchResult(
|
||||
id_=show_id,
|
||||
title=title_en,
|
||||
description=description_en,
|
||||
label="season",
|
||||
url=f"https://www.kocowa.com/en_us/season/{show_id}/"
|
||||
)
|
||||
)
|
||||
return results
|
||||
|
||||
def get_chapters(self, title: Title_T) -> list:
|
||||
return []
|
||||
|
||||
5
KOWP/config.yaml
Normal file
5
KOWP/config.yaml
Normal file
@ -0,0 +1,5 @@
|
||||
endpoints:
|
||||
login: "https://prod-sgwv3.kocowa.com/api/v01/user/signin"
|
||||
middleware_auth: "https://middleware.bcmw.kocowa.com/authenticate-user"
|
||||
metadata: "https://prod-fms.kocowa.com/api/v01/fe/content/get?id={title_id}"
|
||||
authorize: "https://middleware.bcmw.kocowa.com/api/playback/authorize/{episode_id}"
|
||||
650
MAX/__init__.py
Normal file
650
MAX/__init__.py
Normal file
@ -0,0 +1,650 @@
|
||||
import hashlib
|
||||
import json
|
||||
import re
|
||||
import uuid
|
||||
from datetime import datetime
|
||||
from hashlib import md5
|
||||
from typing import Optional, Union, Generator
|
||||
from http.cookiejar import CookieJar
|
||||
|
||||
import click
|
||||
import requests
|
||||
import xmltodict
|
||||
from langcodes import Language
|
||||
|
||||
from unshackle.core.constants import AnyTrack
|
||||
from unshackle.core.credential import Credential
|
||||
from unshackle.core.manifests import DASH
|
||||
from unshackle.core.search_result import SearchResult
|
||||
from unshackle.core.service import Service
|
||||
from unshackle.core.titles import Episode, Movie, Movies, Series, Title_T, Titles_T
|
||||
from unshackle.core.tracks import Chapter, Chapters, Subtitle, Tracks, Video
|
||||
|
||||
|
||||
class MAX(Service):
|
||||
"""
|
||||
Service code for MAX's streaming service (https://max.com).
|
||||
Version: 1.0.0
|
||||
|
||||
Authorization: Cookies
|
||||
Security: UHD@L1 FHD@L1 HD@L3
|
||||
|
||||
Use full URL or title ID with type.
|
||||
Examples:
|
||||
- https://play.hbomax.com/movie/urn:hbo:movie:GUID
|
||||
- https://play.hbomax.com/show/urn:hbo:series:GUID
|
||||
- movie/GUID
|
||||
- show/GUID
|
||||
|
||||
Note: This service is designed for users who have legal access to MAX content.
|
||||
Ensure you have proper subscription and authentication before use.
|
||||
"""
|
||||
|
||||
ALIASES = ("MAX", "max", "hbomax")
|
||||
GEOFENCE = ("US",)
|
||||
|
||||
TITLE_RE = r"^(?:https?://(?:www\.|play\.)?hbomax\.com/)?(?P<type>[^/]+)/(?P<id>[^/]+)"
|
||||
|
||||
VIDEO_CODEC_MAP = {
|
||||
"H264": ["avc1"],
|
||||
"H265": ["hvc1", "dvh1"]
|
||||
}
|
||||
|
||||
AUDIO_CODEC_MAP = {
|
||||
"AAC": "mp4a",
|
||||
"AC3": "ac-3",
|
||||
"EC3": "ec-3"
|
||||
}
|
||||
|
||||
@staticmethod
|
||||
@click.command(name="MAX", short_help="https://max.com")
|
||||
@click.argument("title", type=str)
|
||||
@click.option("-vcodec", "--video-codec", default=None, help="Video codec preference")
|
||||
@click.option("-acodec", "--audio-codec", default=None, help="Audio codec preference")
|
||||
@click.pass_context
|
||||
def cli(ctx, **kwargs):
|
||||
return MAX(ctx, **kwargs)
|
||||
|
||||
def __init__(self, ctx, title, video_codec, audio_codec):
|
||||
super().__init__(ctx)
|
||||
|
||||
self.title = title
|
||||
self.vcodec = video_codec
|
||||
self.acodec = audio_codec
|
||||
|
||||
# Get range parameter for HDR support
|
||||
range_param = ctx.parent.params.get("range_")
|
||||
self.range = range_param[0].name if range_param else "SDR"
|
||||
|
||||
if self.range == 'HDR10':
|
||||
self.vcodec = "H265"
|
||||
|
||||
def authenticate(self, cookies: Optional[CookieJar] = None, credential: Optional[Credential] = None) -> None:
|
||||
super().authenticate(cookies, credential)
|
||||
if not cookies:
|
||||
raise EnvironmentError("Service requires Cookies for Authentication.")
|
||||
|
||||
# Extract authentication tokens from cookies
|
||||
try:
|
||||
token = next(cookie.value for cookie in cookies if cookie.name == "st")
|
||||
session_data = next(cookie.value for cookie in cookies if cookie.name == "session")
|
||||
device_id = json.loads(session_data)
|
||||
except (StopIteration, json.JSONDecodeError):
|
||||
raise EnvironmentError("Required authentication cookies not found.")
|
||||
|
||||
# Configure headers based on device type
|
||||
self.session.headers.update({
|
||||
'User-Agent': 'BEAM-Android/1.0.0.104 (SONY/XR-75X95EL)',
|
||||
'Accept': 'application/json, text/plain, */*',
|
||||
'Content-Type': 'application/json',
|
||||
'x-disco-client': 'SAMSUNGTV:124.0.0.0:beam:4.0.0.118',
|
||||
'x-disco-params': 'realm=bolt,bid=beam,features=ar',
|
||||
'x-device-info': 'beam/4.0.0.118 (Samsung/Samsung-Unknown; Tizen/124.0.0.0; f198a6c1-c582-4725-9935-64eb6b17c3cd/87a996fa-4917-41ae-9b6d-c7f521f0cb78)',
|
||||
'traceparent': '00-315ac07a3de9ad1493956cf1dd5d1313-988e057938681391-01',
|
||||
'tracestate': f'wbd=session:{device_id}',
|
||||
'Origin': 'https://play.hbomax.com',
|
||||
'Referer': 'https://play.hbomax.com/',
|
||||
})
|
||||
|
||||
# Get device token
|
||||
auth_token = self._get_device_token()
|
||||
self.session.headers.update({
|
||||
"x-wbd-session-state": auth_token
|
||||
})
|
||||
|
||||
def search(self) -> Generator[SearchResult, None, None]:
|
||||
"""
|
||||
Search for content on MAX platform.
|
||||
Note: This is a basic implementation - MAX's search API may require additional parameters.
|
||||
"""
|
||||
# Basic search implementation - you may need to adjust based on actual API
|
||||
search_url = "https://default.prd.api.hbomax.com/search"
|
||||
|
||||
try:
|
||||
response = self.session.get(search_url, params={"q": self.title})
|
||||
response.raise_for_status()
|
||||
|
||||
search_data = response.json()
|
||||
|
||||
# Parse search results - adjust based on actual API response structure
|
||||
for result in search_data.get("results", []):
|
||||
yield SearchResult(
|
||||
id_=result.get("id"),
|
||||
title=result.get("title", "Unknown"),
|
||||
label=result.get("type", "UNKNOWN").upper(),
|
||||
url=f"https://play.hbomax.com/{result.get('type', 'content')}/{result.get('id')}"
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
self.log.warning(f"Search functionality not fully implemented: {e}")
|
||||
# Return empty generator if search fails
|
||||
return
|
||||
yield # This makes it a generator function
|
||||
|
||||
def get_titles(self) -> Titles_T:
|
||||
# Parse title input
|
||||
match = re.match(self.TITLE_RE, self.title)
|
||||
if not match:
|
||||
raise ValueError("Invalid title format. Expected format: type/id or full URL")
|
||||
|
||||
content_type = match.group('type')
|
||||
external_id = match.group('id')
|
||||
|
||||
response = self.session.get(
|
||||
self.config['endpoints']['contentRoutes'] % (content_type, external_id)
|
||||
)
|
||||
response.raise_for_status()
|
||||
|
||||
try:
|
||||
content_data = [x for x in response.json()["included"] if "attributes" in x and "title" in
|
||||
x["attributes"] and x["attributes"]["alias"] == "generic-%s-blueprint-page" % (re.sub(r"-", "", content_type))][0]["attributes"]
|
||||
content_title = content_data["title"]
|
||||
except:
|
||||
content_data = [x for x in response.json()["included"] if "attributes" in x and "alternateId" in
|
||||
x["attributes"] and x["attributes"]["alternateId"] == external_id and x["attributes"].get("originalName")][0]["attributes"]
|
||||
content_title = content_data["originalName"]
|
||||
|
||||
if content_type == "sport" or content_type == "event":
|
||||
included_dt = response.json()["included"]
|
||||
|
||||
for included in included_dt:
|
||||
for key, data in included.items():
|
||||
if key == "attributes":
|
||||
for k, d in data.items():
|
||||
if d == "VOD":
|
||||
event_data = included
|
||||
|
||||
release_date = event_data["attributes"].get("airDate") or event_data["attributes"].get("firstAvailableDate")
|
||||
year = datetime.strptime(release_date, '%Y-%m-%dT%H:%M:%SZ').year
|
||||
|
||||
return Movies([
|
||||
Movie(
|
||||
id_=external_id,
|
||||
service=self.__class__,
|
||||
name=content_title.title(),
|
||||
year=year,
|
||||
data=event_data,
|
||||
)
|
||||
])
|
||||
|
||||
if content_type == "movie" or content_type == "standalone":
|
||||
metadata = self.session.get(
|
||||
url=self.config['endpoints']['moviePages'] % external_id
|
||||
).json()['data']
|
||||
|
||||
try:
|
||||
edit_id = metadata['relationships']['edit']['data']['id']
|
||||
except:
|
||||
for x in response.json()["included"]:
|
||||
if x.get("type") == "video" and x.get("relationships", {}).get("show", {}).get("data", {}).get("id") == external_id:
|
||||
metadata = x
|
||||
|
||||
release_date = metadata["attributes"].get("airDate") or metadata["attributes"].get("firstAvailableDate")
|
||||
year = datetime.strptime(release_date, '%Y-%m-%dT%H:%M:%SZ').year
|
||||
|
||||
return Movies([
|
||||
Movie(
|
||||
id_=external_id,
|
||||
service=self.__class__,
|
||||
name=content_title,
|
||||
year=year,
|
||||
data=metadata,
|
||||
)
|
||||
])
|
||||
|
||||
if content_type in ["show", "mini-series", "topical"]:
|
||||
episodes = []
|
||||
if content_type == "mini-series":
|
||||
alias = "generic-miniseries-page-rail-episodes"
|
||||
elif content_type == "topical":
|
||||
alias = "generic-topical-show-page-rail-episodes"
|
||||
else:
|
||||
alias = "-%s-page-rail-episodes-tabbed-content" % (content_type)
|
||||
|
||||
included_dt = response.json()["included"]
|
||||
|
||||
season_data = [data for included in included_dt for key, data in included.items()
|
||||
if key == "attributes" for k, d in data.items() if alias in str(d).lower()][0]
|
||||
|
||||
season_data = season_data["component"]["filters"][0]
|
||||
|
||||
seasons = [int(season["value"]) for season in season_data["options"]]
|
||||
|
||||
season_parameters = [(int(season["value"]), season["parameter"]) for season in season_data["options"]
|
||||
for season_number in seasons if int(season["value"]) == int(season_number)]
|
||||
|
||||
if not season_parameters:
|
||||
raise ValueError("No seasons found")
|
||||
|
||||
for (value, parameter) in season_parameters:
|
||||
data = self.session.get(
|
||||
url=self.config['endpoints']['showPages'] % (external_id, parameter)
|
||||
).json()
|
||||
|
||||
try:
|
||||
episodes_dt = sorted([dt for dt in data["included"] if "attributes" in dt and "videoType" in
|
||||
dt["attributes"] and dt["attributes"]["videoType"] == "EPISODE"
|
||||
and int(dt["attributes"]["seasonNumber"]) == int(parameter.split("=")[-1])],
|
||||
key=lambda x: x["attributes"]["episodeNumber"])
|
||||
except KeyError:
|
||||
raise ValueError("Season episodes were not found")
|
||||
|
||||
episodes.extend(episodes_dt)
|
||||
|
||||
episode_titles = []
|
||||
release_date = episodes[0]["attributes"].get("airDate") or episodes[0]["attributes"].get("firstAvailableDate")
|
||||
year = datetime.strptime(release_date, '%Y-%m-%dT%H:%M:%SZ').year
|
||||
|
||||
season_map = {int(item[1].split("=")[-1]): item[0] for item in season_parameters}
|
||||
|
||||
for episode in episodes:
|
||||
episode_titles.append(
|
||||
Episode(
|
||||
id_=episode['id'],
|
||||
service=self.__class__,
|
||||
title=content_title,
|
||||
season=season_map.get(episode['attributes'].get('seasonNumber')),
|
||||
number=episode['attributes']['episodeNumber'],
|
||||
name=episode['attributes']['name'],
|
||||
year=year,
|
||||
data=episode
|
||||
)
|
||||
)
|
||||
|
||||
return Series(episode_titles)
|
||||
|
||||
def get_tracks(self, title: Title_T) -> Tracks:
|
||||
edit_id = title.data['relationships']['edit']['data']['id']
|
||||
|
||||
response = self.session.post(
|
||||
url=self.config['endpoints']['playbackInfo'],
|
||||
json={
|
||||
'appBundle': 'beam',
|
||||
'consumptionType': 'streaming',
|
||||
'deviceInfo': {
|
||||
'deviceId': '2dec6cb0-eb34-45f9-bbc9-a0533597303c',
|
||||
'browser': {
|
||||
'name': 'chrome',
|
||||
'version': '113.0.0.0',
|
||||
},
|
||||
'make': 'Microsoft',
|
||||
'model': 'XBOX-Unknown',
|
||||
'os': {
|
||||
'name': 'Windows',
|
||||
'version': '113.0.0.0',
|
||||
},
|
||||
'platform': 'XBOX',
|
||||
'deviceType': 'xbox',
|
||||
'player': {
|
||||
'sdk': {
|
||||
'name': 'Beam Player Console',
|
||||
'version': '1.0.2.4',
|
||||
},
|
||||
'mediaEngine': {
|
||||
'name': 'GLUON_BROWSER',
|
||||
'version': '1.20.1',
|
||||
},
|
||||
'playerView': {
|
||||
'height': 1080,
|
||||
'width': 1920,
|
||||
},
|
||||
},
|
||||
},
|
||||
'editId': edit_id,
|
||||
'capabilities': {
|
||||
'manifests': {
|
||||
'formats': {
|
||||
'dash': {},
|
||||
},
|
||||
},
|
||||
'codecs': {
|
||||
'video': {
|
||||
'hdrFormats': [
|
||||
'hlg',
|
||||
'hdr10',
|
||||
'dolbyvision5',
|
||||
'dolbyvision8',
|
||||
],
|
||||
'decoders': [
|
||||
{
|
||||
'maxLevel': '6.2',
|
||||
'codec': 'h265',
|
||||
'levelConstraints': {
|
||||
'width': {
|
||||
'min': 1920,
|
||||
'max': 3840,
|
||||
},
|
||||
'height': {
|
||||
'min': 1080,
|
||||
'max': 2160,
|
||||
},
|
||||
'framerate': {
|
||||
'min': 15,
|
||||
'max': 60,
|
||||
},
|
||||
},
|
||||
'profiles': [
|
||||
'main',
|
||||
'main10',
|
||||
],
|
||||
},
|
||||
{
|
||||
'maxLevel': '4.2',
|
||||
'codec': 'h264',
|
||||
'levelConstraints': {
|
||||
'width': {
|
||||
'min': 640,
|
||||
'max': 3840,
|
||||
},
|
||||
'height': {
|
||||
'min': 480,
|
||||
'max': 2160,
|
||||
},
|
||||
'framerate': {
|
||||
'min': 15,
|
||||
'max': 60,
|
||||
},
|
||||
},
|
||||
'profiles': [
|
||||
'high',
|
||||
'main',
|
||||
'baseline',
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
'audio': {
|
||||
'decoders': [
|
||||
{
|
||||
'codec': 'aac',
|
||||
'profiles': [
|
||||
'lc',
|
||||
'he',
|
||||
'hev2',
|
||||
'xhe',
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
'devicePlatform': {
|
||||
'network': {
|
||||
'lastKnownStatus': {
|
||||
'networkTransportType': 'unknown',
|
||||
},
|
||||
'capabilities': {
|
||||
'protocols': {
|
||||
'http': {
|
||||
'byteRangeRequests': True,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
'videoSink': {
|
||||
'lastKnownStatus': {
|
||||
'width': 1290,
|
||||
'height': 2796,
|
||||
},
|
||||
'capabilities': {
|
||||
'colorGamuts': [
|
||||
'standard',
|
||||
'wide',
|
||||
],
|
||||
'hdrFormats': [
|
||||
'dolbyvision',
|
||||
'hdr10plus',
|
||||
'hdr10',
|
||||
'hlg',
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
'gdpr': False,
|
||||
'firstPlay': False,
|
||||
'playbackSessionId': str(uuid.uuid4()),
|
||||
'applicationSessionId': str(uuid.uuid4()),
|
||||
'userPreferences': {},
|
||||
'features': [],
|
||||
}
|
||||
)
|
||||
response.raise_for_status()
|
||||
|
||||
playback_data = response.json()
|
||||
|
||||
# Get video info for language
|
||||
video_info = next(x for x in playback_data['videos'] if x['type'] == 'main')
|
||||
title.language = Language.get(video_info['defaultAudioSelection']['language'])
|
||||
|
||||
fallback_url = playback_data["fallback"]["manifest"]["url"]
|
||||
fallback_url = fallback_url.replace('fly', 'akm').replace('gcp', 'akm')
|
||||
|
||||
try:
|
||||
self.wv_license_url = playback_data["drm"]["schemes"]["widevine"]["licenseUrl"]
|
||||
except (KeyError, IndexError):
|
||||
self.wv_license_url = None
|
||||
|
||||
try:
|
||||
self.pr_license_url = playback_data["drm"]["schemes"]["playready"]["licenseUrl"]
|
||||
except (KeyError, IndexError):
|
||||
self.pr_license_url = None
|
||||
|
||||
manifest_url = fallback_url.replace('_fallback', '')
|
||||
self.log.debug(f"MPD URL: {manifest_url}")
|
||||
self.log.debug(f"Fallback URL: {fallback_url}")
|
||||
self.log.debug(f"Widevine License URL: {self.wv_license_url}")
|
||||
self.log.debug(f"PlayReady License URL: {self.pr_license_url}")
|
||||
|
||||
tracks = DASH.from_url(url=manifest_url, session=self.session).to_tracks(language=title.language)
|
||||
|
||||
self.log.debug(tracks)
|
||||
|
||||
tracks.videos = self._dedupe(tracks.videos)
|
||||
tracks.audio = self._dedupe(tracks.audio)
|
||||
|
||||
# Remove partial subs and get VTT subtitles
|
||||
tracks.subtitles.clear()
|
||||
|
||||
subtitles = self._get_subtitles(manifest_url, fallback_url)
|
||||
|
||||
for subtitle in subtitles:
|
||||
tracks.add(
|
||||
Subtitle(
|
||||
id_=md5(subtitle["url"].encode()).hexdigest()[0:6],
|
||||
url=subtitle["url"],
|
||||
codec=Subtitle.Codec.from_mime(subtitle['format']),
|
||||
language=Language.get(subtitle["language"]),
|
||||
forced=subtitle['name'] == 'Forced',
|
||||
sdh=subtitle['name'] == 'SDH'
|
||||
)
|
||||
)
|
||||
|
||||
# Apply codec filters
|
||||
if self.vcodec:
|
||||
tracks.videos = [x for x in tracks.videos if (x.codec or "")[:4] in self.VIDEO_CODEC_MAP[self.vcodec]]
|
||||
|
||||
if self.acodec:
|
||||
tracks.audio = [x for x in tracks.audio if (x.codec or "")[:4] == self.AUDIO_CODEC_MAP[self.acodec]]
|
||||
|
||||
# Set track properties
|
||||
for track in tracks:
|
||||
if isinstance(track, Video):
|
||||
codec = track.data.get("dash", {}).get("representation", {}).get("codecs", "")
|
||||
track.hdr10 = track.range == Video.Range.HDR10
|
||||
track.dv = codec[:4] in ("dvh1", "dvhe")
|
||||
if isinstance(track, Subtitle) and not track.codec:
|
||||
track.codec = Subtitle.Codec.WebVTT
|
||||
|
||||
# Store video info for chapters
|
||||
title.data['info'] = video_info
|
||||
|
||||
# Mark descriptive audio tracks
|
||||
for track in tracks.audio:
|
||||
if hasattr(track, 'data') and track.data.get("dash", {}).get("adaptation_set"):
|
||||
role = track.data["dash"]["adaptation_set"].find("Role")
|
||||
if role is not None and role.get("value") in ["description", "alternative", "alternate"]:
|
||||
track.descriptive = True
|
||||
|
||||
self.log.debug(tracks)
|
||||
|
||||
return tracks
|
||||
|
||||
def get_chapters(self, title: Title_T) -> Chapters:
|
||||
chapters = []
|
||||
video_info = title.data.get('info', {})
|
||||
if 'annotations' in video_info:
|
||||
chapters.append(Chapter(timestamp=0.0, name='Chapter 1'))
|
||||
chapters.append(Chapter(timestamp=self._convert_timecode(video_info['annotations'][0]['start']), name='Credits'))
|
||||
chapters.append(Chapter(timestamp=self._convert_timecode(video_info['annotations'][0]['end']), name='Chapter 2'))
|
||||
|
||||
return Chapters(chapters)
|
||||
|
||||
def get_widevine_license(self, *, challenge: bytes, title: Title_T, track: AnyTrack) -> Optional[Union[bytes, str]]:
|
||||
if not self.wv_license_url:
|
||||
return None
|
||||
|
||||
response = self.session.post(
|
||||
url=self.wv_license_url,
|
||||
data=challenge
|
||||
)
|
||||
response.raise_for_status()
|
||||
return response.content
|
||||
|
||||
def get_playready_license(self, *, challenge: bytes, title: Title_T, track: AnyTrack) -> Optional[bytes]:
|
||||
if not self.pr_license_url:
|
||||
return None
|
||||
|
||||
# Handle both bytes and string challenge formats
|
||||
if isinstance(challenge, bytes):
|
||||
decoded_challenge = challenge.decode('utf-8')
|
||||
else:
|
||||
decoded_challenge = str(challenge)
|
||||
|
||||
response = self.session.post(
|
||||
url=self.pr_license_url,
|
||||
data=decoded_challenge,
|
||||
headers={
|
||||
'Content-Type': 'text/xml; charset=utf-8',
|
||||
'SOAPAction': 'http://schemas.microsoft.com/DRM/2007/03/protocols/AcquireLicense'
|
||||
}
|
||||
)
|
||||
|
||||
response.raise_for_status()
|
||||
return response.content
|
||||
|
||||
def _get_device_token(self):
|
||||
response = self.session.post(self.config['endpoints']['bootstrap'])
|
||||
response.raise_for_status()
|
||||
return response.headers.get('x-wbd-session-state')
|
||||
|
||||
@staticmethod
|
||||
def _convert_timecode(time_seconds):
|
||||
"""Convert seconds to timestamp."""
|
||||
return float(time_seconds)
|
||||
|
||||
def _get_subtitles(self, mpd_url, fallback_url):
|
||||
base_url = "/".join(fallback_url.split("/")[:-1]) + "/"
|
||||
xml = xmltodict.parse(requests.get(mpd_url).text)
|
||||
|
||||
try:
|
||||
tracks = xml["MPD"]["Period"][0]["AdaptationSet"]
|
||||
except KeyError:
|
||||
tracks = xml["MPD"]["Period"]["AdaptationSet"]
|
||||
|
||||
subs_tracks_js = []
|
||||
for subs_tracks in tracks:
|
||||
if subs_tracks.get('@contentType') == 'text':
|
||||
for x in self._force_instance(subs_tracks, "Representation"):
|
||||
try:
|
||||
path = re.search(r'(t/\w+/)', x["SegmentTemplate"]["@media"])[1]
|
||||
except (AttributeError, KeyError):
|
||||
path = 't/sub/'
|
||||
|
||||
is_sdh = False
|
||||
is_forced = False
|
||||
|
||||
role_value = subs_tracks.get("Role", {}).get("@value", "")
|
||||
|
||||
if role_value == "caption":
|
||||
url = base_url + path + subs_tracks['@lang'] + ('_sdh.vtt' if 'sdh' in subs_tracks.get("Label", "").lower() else '_cc.vtt')
|
||||
is_sdh = True
|
||||
elif role_value == "forced-subtitle":
|
||||
url = base_url + path + subs_tracks['@lang'] + '_forced.vtt'
|
||||
is_forced = True
|
||||
elif role_value == "subtitle":
|
||||
url = base_url + path + subs_tracks['@lang'] + '_sub.vtt'
|
||||
else:
|
||||
continue
|
||||
|
||||
subs_tracks_js.append({
|
||||
"url": url,
|
||||
"format": "vtt",
|
||||
"language": subs_tracks["@lang"],
|
||||
"name": "SDH" if is_sdh else "Forced" if is_forced else "Full",
|
||||
})
|
||||
|
||||
return self._remove_dupe(subs_tracks_js)
|
||||
|
||||
@staticmethod
|
||||
def _force_instance(data, variable):
|
||||
if isinstance(data[variable], list):
|
||||
return data[variable]
|
||||
else:
|
||||
return [data[variable]]
|
||||
|
||||
@staticmethod
|
||||
def _remove_dupe(items):
|
||||
seen = set()
|
||||
new_items = []
|
||||
for item in items:
|
||||
url = item['url']
|
||||
if url not in seen:
|
||||
new_items.append(item)
|
||||
seen.add(url)
|
||||
return new_items
|
||||
|
||||
@staticmethod
|
||||
def _dedupe(items: list) -> list:
|
||||
if not items:
|
||||
return items
|
||||
if isinstance(items[0].url, list):
|
||||
return items
|
||||
|
||||
# Create a more specific key for deduplication that includes resolution/bitrate
|
||||
seen = {}
|
||||
for item in items:
|
||||
# For video tracks, use codec + resolution + bitrate as key
|
||||
if hasattr(item, 'width') and hasattr(item, 'height'):
|
||||
key = f"{item.codec}_{item.width}x{item.height}_{item.bitrate}"
|
||||
# For audio tracks, use codec + language + bitrate + channels as key
|
||||
elif hasattr(item, 'channels'):
|
||||
key = f"{item.codec}_{item.language}_{item.bitrate}_{item.channels}"
|
||||
# Fallback to URL for other track types
|
||||
else:
|
||||
key = item.url
|
||||
|
||||
# Keep the item if we haven't seen this exact combination
|
||||
if key not in seen:
|
||||
seen[key] = item
|
||||
|
||||
return list(seen.values())
|
||||
BIN
MAX/__pycache__/__init__.cpython-310.pyc
Normal file
BIN
MAX/__pycache__/__init__.cpython-310.pyc
Normal file
Binary file not shown.
6
MAX/config.yaml
Normal file
6
MAX/config.yaml
Normal file
@ -0,0 +1,6 @@
|
||||
endpoints:
|
||||
contentRoutes: 'https://default.prd.api.hbomax.com/cms/routes/%s/%s?include=default'
|
||||
moviePages: 'https://default.prd.api.hbomax.com/content/videos/%s/activeVideoForShow?&include=edit'
|
||||
playbackInfo: 'https://default.prd.api.hbomax.com/any/playback/v1/playbackInfo'
|
||||
showPages: 'https://default.prd.api.hbomax.com/cms/collections/generic-show-page-rail-episodes-tabbed-content?include=default&pf[show.id]=%s&%s'
|
||||
bootstrap: 'https://default.prd.api.hbomax.com/session-context/headwaiter/v1/bootstrap'
|
||||
396
MUBI/__init__.py
Normal file
396
MUBI/__init__.py
Normal file
@ -0,0 +1,396 @@
|
||||
import json
|
||||
import re
|
||||
import uuid
|
||||
from http.cookiejar import CookieJar
|
||||
from typing import Optional, Generator
|
||||
from langcodes import Language
|
||||
import base64
|
||||
import click
|
||||
from unshackle.core.constants import AnyTrack
|
||||
from unshackle.core.credential import Credential
|
||||
from unshackle.core.manifests import DASH
|
||||
from unshackle.core.service import Service
|
||||
from unshackle.core.titles import Episode, Movie, Movies, Title_T, Titles_T, Series
|
||||
from unshackle.core.tracks import Chapter, Tracks, Subtitle
|
||||
|
||||
|
||||
class MUBI(Service):
|
||||
"""
|
||||
Service code for MUBI (mubi.com)
|
||||
Version: 1.2.0
|
||||
|
||||
Authorization: Required cookies (lt token + session)
|
||||
Security: FHD @ L3 (Widevine)
|
||||
|
||||
Supports:
|
||||
• Series ↦ https://mubi.com/en/nl/series/twin-peaks
|
||||
• Movies ↦ https://mubi.com/en/nl/films/the-substance
|
||||
|
||||
"""
|
||||
SERIES_TITLE_RE = r"^https?://(?:www\.)?mubi\.com(?:/[^/]+)*?/series/(?P<series_slug>[^/]+)(?:/season/(?P<season_slug>[^/]+))?$"
|
||||
TITLE_RE = r"^(?:https?://(?:www\.)?mubi\.com)(?:/[^/]+)*?/films/(?P<slug>[^/?#]+)$"
|
||||
NO_SUBTITLES = False
|
||||
|
||||
@staticmethod
|
||||
@click.command(name="MUBI", short_help="https://mubi.com")
|
||||
@click.argument("title", type=str)
|
||||
@click.pass_context
|
||||
def cli(ctx, **kwargs):
|
||||
return MUBI(ctx, **kwargs)
|
||||
|
||||
def __init__(self, ctx, title: str):
|
||||
super().__init__(ctx)
|
||||
|
||||
m_film = re.match(self.TITLE_RE, title)
|
||||
m_series = re.match(self.SERIES_TITLE_RE, title)
|
||||
|
||||
if not m_film and not m_series:
|
||||
raise ValueError(f"Invalid MUBI URL: {title}")
|
||||
|
||||
self.is_series = bool(m_series)
|
||||
self.slug = m_film.group("slug") if m_film else None
|
||||
self.series_slug = m_series.group("series_slug") if m_series else None
|
||||
self.season_slug = m_series.group("season_slug") if m_series else None
|
||||
|
||||
self.film_id: Optional[int] = None
|
||||
self.lt_token: Optional[str] = None
|
||||
self.session_token: Optional[str] = None
|
||||
self.user_id: Optional[int] = None
|
||||
self.country_code: Optional[str] = None
|
||||
self.anonymous_user_id: Optional[str] = None
|
||||
self.default_country: Optional[str] = None
|
||||
self.reels_data: Optional[list] = None
|
||||
|
||||
# Store CDM reference
|
||||
self.cdm = ctx.obj.cdm
|
||||
|
||||
if self.config is None:
|
||||
raise EnvironmentError("Missing service config for MUBI.")
|
||||
|
||||
def authenticate(self, cookies: Optional[CookieJar] = None, credential: Optional[Credential] = None) -> None:
|
||||
super().authenticate(cookies, credential)
|
||||
|
||||
try:
|
||||
r_ip = self.session.get(self.config["endpoints"]["ip_geolocation"], timeout=5)
|
||||
r_ip.raise_for_status()
|
||||
ip_data = r_ip.json()
|
||||
if ip_data.get("country"):
|
||||
self.default_country = ip_data["country"]
|
||||
self.log.debug(f"Detected country from IP: {self.default_country}")
|
||||
else:
|
||||
self.log.warning("IP geolocation response did not contain a country code.")
|
||||
except Exception as e:
|
||||
raise ValueError(f"Failed to fetch IP geolocation: {e}")
|
||||
|
||||
if not cookies:
|
||||
raise PermissionError("MUBI requires login cookies.")
|
||||
|
||||
# Extract essential tokens
|
||||
lt_cookie = next((c for c in cookies if c.name == "lt"), None)
|
||||
session_cookie = next((c for c in cookies if c.name == "_mubi_session"), None)
|
||||
snow_id_cookie = next((c for c in cookies if c.name == "_snow_id.c006"), None)
|
||||
|
||||
if not lt_cookie:
|
||||
raise PermissionError("Missing 'lt' cookie (Bearer token).")
|
||||
if not session_cookie:
|
||||
raise PermissionError("Missing '_mubi_session' cookie.")
|
||||
|
||||
self.lt_token = lt_cookie.value
|
||||
self.session_token = session_cookie.value
|
||||
|
||||
# Extract anonymous_user_id from _snow_id.c006
|
||||
if snow_id_cookie and "." in snow_id_cookie.value:
|
||||
self.anonymous_user_id = snow_id_cookie.value.split(".")[0]
|
||||
else:
|
||||
self.anonymous_user_id = str(uuid.uuid4())
|
||||
self.log.warning(f"No _snow_id.c006 cookie found — generated new anonymous_user_id: {self.anonymous_user_id}")
|
||||
|
||||
base_headers = {
|
||||
"User-Agent": "Mozilla/5.0 (X11; Linux x86_64) Firefox/143.0",
|
||||
"Origin": "https://mubi.com",
|
||||
"Referer": "https://mubi.com/",
|
||||
"CLIENT": "web",
|
||||
"Client-Accept-Video-Codecs": "h265,vp9,h264",
|
||||
"Client-Accept-Audio-Codecs": "aac",
|
||||
"Authorization": f"Bearer {self.lt_token}",
|
||||
"ANONYMOUS_USER_ID": self.anonymous_user_id,
|
||||
"Client-Country": self.default_country,
|
||||
"Sec-Fetch-Dest": "empty",
|
||||
"Sec-Fetch-Mode": "cors",
|
||||
"Sec-Fetch-Site": "same-site",
|
||||
"Pragma": "no-cache",
|
||||
"Cache-Control": "no-cache",
|
||||
}
|
||||
|
||||
self.session.headers.update(base_headers)
|
||||
|
||||
r_account = self.session.get(self.config["endpoints"]["account"])
|
||||
if not r_account.ok:
|
||||
raise PermissionError(f"Failed to fetch MUBI account: {r_account.status_code} {r_account.text}")
|
||||
|
||||
account_data = r_account.json()
|
||||
self.user_id = account_data.get("id")
|
||||
self.country_code = (account_data.get("country") or {}).get("code", "NL")
|
||||
|
||||
self.session.headers["Client-Country"] = self.country_code
|
||||
self.GEOFENCE = (self.country_code,)
|
||||
|
||||
self._bind_anonymous_user()
|
||||
|
||||
self.log.info(
|
||||
f"Authenticated as user {self.user_id}, "
|
||||
f"country: {self.country_code}, "
|
||||
f"anonymous_id: {self.anonymous_user_id}"
|
||||
)
|
||||
|
||||
def _bind_anonymous_user(self):
|
||||
try:
|
||||
r = self.session.put(
|
||||
self.config["endpoints"]["current_user"],
|
||||
json={"anonymous_user_uuid": self.anonymous_user_id},
|
||||
headers={"Content-Type": "application/json"}
|
||||
)
|
||||
if r.ok:
|
||||
self.log.debug("Anonymous user ID successfully bound to account.")
|
||||
else:
|
||||
self.log.warning(f"Failed to bind anonymous_user_uuid: {r.status_code}")
|
||||
except Exception as e:
|
||||
self.log.warning(f"Exception while binding anonymous_user_uuid: {e}")
|
||||
|
||||
def get_titles(self) -> Titles_T:
|
||||
if self.is_series:
|
||||
return self._get_series_titles()
|
||||
else:
|
||||
return self._get_film_title()
|
||||
|
||||
def _get_film_title(self) -> Movies:
|
||||
url = self.config["endpoints"]["film_by_slug"].format(slug=self.slug)
|
||||
r = self.session.get(url)
|
||||
r.raise_for_status()
|
||||
data = r.json()
|
||||
|
||||
self.film_id = data["id"]
|
||||
|
||||
# Fetch reels to get definitive language code and cache the response
|
||||
url_reels = self.config["endpoints"]["reels"].format(film_id=self.film_id)
|
||||
r_reels = self.session.get(url_reels)
|
||||
r_reels.raise_for_status()
|
||||
self.reels_data = r_reels.json()
|
||||
|
||||
# Extract original language from the first audio track of the first reel
|
||||
original_language_code = "en" # Default fallback
|
||||
if self.reels_data and self.reels_data[0].get("audio_tracks"):
|
||||
first_audio_track = self.reels_data[0]["audio_tracks"][0]
|
||||
if "language_code" in first_audio_track:
|
||||
original_language_code = first_audio_track["language_code"]
|
||||
self.log.debug(f"Detected original language from reels: '{original_language_code}'")
|
||||
|
||||
genres = ", ".join(data.get("genres", [])) or "Unknown"
|
||||
description = (
|
||||
data.get("default_editorial_html", "")
|
||||
.replace("<p>", "").replace("</p>", "").replace("<em>", "").replace("</em>", "").strip()
|
||||
)
|
||||
year = data.get("year")
|
||||
name = data.get("title", "Unknown")
|
||||
|
||||
movie = Movie(
|
||||
id_=self.film_id,
|
||||
service=self.__class__,
|
||||
name=name,
|
||||
year=year,
|
||||
description=description,
|
||||
language=Language.get(original_language_code),
|
||||
data=data,
|
||||
)
|
||||
|
||||
return Movies([movie])
|
||||
|
||||
def _get_series_titles(self) -> Titles_T:
|
||||
# Fetch series metadata
|
||||
series_url = self.config["endpoints"]["series"].format(series_slug=self.series_slug)
|
||||
r_series = self.session.get(series_url)
|
||||
r_series.raise_for_status()
|
||||
series_data = r_series.json()
|
||||
|
||||
episodes = []
|
||||
|
||||
# If season is explicitly specified, only fetch that season
|
||||
if self.season_slug:
|
||||
eps_url = self.config["endpoints"]["season_episodes"].format(
|
||||
series_slug=self.series_slug,
|
||||
season_slug=self.season_slug
|
||||
)
|
||||
r_eps = self.session.get(eps_url)
|
||||
if r_eps.status_code == 404:
|
||||
raise ValueError(f"Season '{self.season_slug}' not found.")
|
||||
r_eps.raise_for_status()
|
||||
episodes_data = r_eps.json().get("episodes", [])
|
||||
self._add_episodes_to_list(episodes, episodes_data, series_data)
|
||||
else:
|
||||
# No season specified fetch ALL seasons
|
||||
seasons = series_data.get("seasons", [])
|
||||
if not seasons:
|
||||
raise ValueError("No seasons found for this series.")
|
||||
|
||||
for season in seasons:
|
||||
season_slug = season["slug"]
|
||||
eps_url = self.config["endpoints"]["season_episodes"].format(
|
||||
series_slug=self.series_slug,
|
||||
season_slug=season_slug
|
||||
)
|
||||
|
||||
self.log.debug(f"Fetching episodes for season: {season_slug}")
|
||||
|
||||
r_eps = self.session.get(eps_url)
|
||||
|
||||
# Stop if season returns 404 or empty
|
||||
if r_eps.status_code == 404:
|
||||
self.log.info(f"Season '{season_slug}' not available, skipping.")
|
||||
continue
|
||||
|
||||
r_eps.raise_for_status()
|
||||
episodes_data = r_eps.json().get("episodes", [])
|
||||
|
||||
if not episodes_data:
|
||||
self.log.info(f"No episodes found in season '{season_slug}'.")
|
||||
continue
|
||||
|
||||
self._add_episodes_to_list(episodes, episodes_data, series_data)
|
||||
|
||||
from unshackle.core.titles import Series
|
||||
return Series(sorted(episodes, key=lambda x: (x.season, x.number)))
|
||||
|
||||
def _add_episodes_to_list(self, episodes_list: list, episodes_data: list, series_data: dict):
|
||||
"""Helper to avoid code duplication when adding episodes."""
|
||||
for ep in episodes_data:
|
||||
# Use episode's own language detection via its consumable.playback_languages
|
||||
playback_langs = ep.get("consumable", {}).get("playback_languages", {})
|
||||
audio_langs = playback_langs.get("audio_options", ["English"])
|
||||
lang_code = audio_langs[0].split()[0].lower() if audio_langs else "en"
|
||||
|
||||
try:
|
||||
detected_lang = Language.get(lang_code)
|
||||
except:
|
||||
detected_lang = Language.get("en")
|
||||
|
||||
episodes_list.append(Episode(
|
||||
id_=ep["id"],
|
||||
service=self.__class__,
|
||||
title=series_data["title"], # Series title
|
||||
season=ep["episode"]["season_number"],
|
||||
number=ep["episode"]["number"],
|
||||
name=ep["title"], # Episode title
|
||||
description=ep.get("short_synopsis", ""),
|
||||
language=detected_lang,
|
||||
data=ep, # Full episode data for later use in get_tracks
|
||||
))
|
||||
|
||||
def get_tracks(self, title: Title_T) -> Tracks:
|
||||
film_id = getattr(title, "id", None)
|
||||
if not film_id:
|
||||
raise RuntimeError("Title ID not found.")
|
||||
|
||||
# For series episodes, we don't have reels cached, so skip reel-based logic
|
||||
url_view = self.config["endpoints"]["initiate_viewing"].format(film_id=film_id)
|
||||
r_view = self.session.post(url_view, json={}, headers={"Content-Type": "application/json"})
|
||||
r_view.raise_for_status()
|
||||
view_data = r_view.json()
|
||||
reel_id = view_data["reel_id"]
|
||||
|
||||
# For films, use reels data for language/audio mapping
|
||||
if not self.is_series:
|
||||
if not self.film_id:
|
||||
raise RuntimeError("film_id not set. Call get_titles() first.")
|
||||
|
||||
if not self.reels_data:
|
||||
self.log.warning("Reels data not cached, fetching now.")
|
||||
url_reels = self.config["endpoints"]["reels"].format(film_id=film_id)
|
||||
r_reels = self.session.get(url_reels)
|
||||
r_reels.raise_for_status()
|
||||
reels = r_reels.json()
|
||||
else:
|
||||
reels = self.reels_data
|
||||
|
||||
reel = next((r for r in reels if r["id"] == reel_id), reels[0])
|
||||
else:
|
||||
# For episodes, we don’t need reel-based logic — just proceed
|
||||
pass
|
||||
|
||||
# Request secure streaming URL, works for both films and episodes
|
||||
url_secure = self.config["endpoints"]["secure_url"].format(film_id=film_id)
|
||||
r_secure = self.session.get(url_secure)
|
||||
r_secure.raise_for_status()
|
||||
secure_data = r_secure.json()
|
||||
|
||||
manifest_url = None
|
||||
for entry in secure_data.get("urls", []):
|
||||
if entry.get("content_type") == "application/dash+xml":
|
||||
manifest_url = entry["src"]
|
||||
break
|
||||
|
||||
if not manifest_url:
|
||||
raise ValueError("No DASH manifest URL found.")
|
||||
|
||||
# Parse DASH, use title.language as fallback
|
||||
tracks = DASH.from_url(manifest_url, session=self.session).to_tracks(language=title.language)
|
||||
|
||||
# Add subtitles
|
||||
subtitles = []
|
||||
for sub in secure_data.get("text_track_urls", []):
|
||||
lang_code = sub.get("language_code", "und")
|
||||
vtt_url = sub.get("url")
|
||||
if not vtt_url:
|
||||
continue
|
||||
|
||||
is_original = lang_code == title.language.language
|
||||
|
||||
subtitles.append(
|
||||
Subtitle(
|
||||
id_=sub["id"],
|
||||
url=vtt_url,
|
||||
language=Language.get(lang_code),
|
||||
is_original_lang=is_original,
|
||||
codec=Subtitle.Codec.WebVTT,
|
||||
name=sub.get("display_name", lang_code.upper()),
|
||||
forced=False,
|
||||
sdh=False,
|
||||
)
|
||||
)
|
||||
tracks.subtitles = subtitles
|
||||
|
||||
return tracks
|
||||
|
||||
def get_chapters(self, title: Title_T) -> list[Chapter]:
|
||||
return []
|
||||
|
||||
def get_widevine_license(self, challenge: bytes, title: Title_T, track: AnyTrack) -> bytes:
|
||||
if not self.user_id:
|
||||
raise RuntimeError("user_id not set — authenticate first.")
|
||||
|
||||
dt_custom_data = {
|
||||
"userId": self.user_id,
|
||||
"sessionId": self.lt_token,
|
||||
"merchant": "mubi"
|
||||
}
|
||||
|
||||
dt_custom_data_b64 = base64.b64encode(json.dumps(dt_custom_data).encode()).decode()
|
||||
|
||||
headers = {
|
||||
"User-Agent": "Mozilla/5.0 (X11; Linux x86_64; rv:143.0) Gecko/20100101 Firefox/143.0",
|
||||
"Accept": "*/*",
|
||||
"Origin": "https://mubi.com",
|
||||
"Referer": "https://mubi.com/",
|
||||
"dt-custom-data": dt_custom_data_b64,
|
||||
}
|
||||
|
||||
r = self.session.post(
|
||||
self.config["endpoints"]["license"],
|
||||
data=challenge,
|
||||
headers=headers,
|
||||
)
|
||||
r.raise_for_status()
|
||||
license_data = r.json()
|
||||
if license_data.get("status") != "OK":
|
||||
raise PermissionError(f"DRM license error: {license_data}")
|
||||
return base64.b64decode(license_data["license"])
|
||||
|
||||
12
MUBI/config.yaml
Normal file
12
MUBI/config.yaml
Normal file
@ -0,0 +1,12 @@
|
||||
endpoints:
|
||||
account: "https://api.mubi.com/v4/account"
|
||||
current_user: "https://api.mubi.com/v4/current_user"
|
||||
film_by_slug: "https://api.mubi.com/v4/films/{slug}"
|
||||
playback_languages: "https://api.mubi.com/v4/films/{film_id}/playback_languages"
|
||||
initiate_viewing: "https://api.mubi.com/v4/films/{film_id}/viewing?parental_lock_enabled=true"
|
||||
reels: "https://api.mubi.com/v4/films/{film_id}/reels"
|
||||
secure_url: "https://api.mubi.com/v4/films/{film_id}/viewing/secure_url"
|
||||
license: "https://lic.drmtoday.com/license-proxy-widevine/cenc/"
|
||||
ip_geolocation: "https://directory.cookieyes.com/api/v1/ip"
|
||||
series: "https://api.mubi.com/v4/series/{series_slug}"
|
||||
season_episodes: "https://api.mubi.com/v4/series/{series_slug}/seasons/{season_slug}/episodes/available"
|
||||
208
MY5/__init__.py
Normal file
208
MY5/__init__.py
Normal file
@ -0,0 +1,208 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import base64
|
||||
import os
|
||||
import re
|
||||
import tempfile
|
||||
from collections.abc import Generator
|
||||
from typing import Any, Union
|
||||
from urllib.parse import urlparse, urlunparse
|
||||
|
||||
import click
|
||||
import requests
|
||||
from click import Context
|
||||
from pywidevine.cdm import Cdm as WidevineCdm
|
||||
from unshackle.core.manifests.dash import DASH
|
||||
from unshackle.core.search_result import SearchResult
|
||||
from unshackle.core.service import Service
|
||||
from unshackle.core.titles import Episode, Movie, Movies, Series
|
||||
from unshackle.core.tracks import Chapter, Tracks
|
||||
from unshackle.core.utils.sslciphers import SSLCiphers
|
||||
|
||||
|
||||
class MY5(Service):
|
||||
"""
|
||||
\b
|
||||
Service code for Channel 5's My5 streaming service (https://channel5.com).
|
||||
|
||||
\b
|
||||
Version: 1.0.1
|
||||
Author: stabbedbybrick
|
||||
Authorization: None
|
||||
Robustness:
|
||||
L3: 1080p, AAC2.0
|
||||
|
||||
\b
|
||||
Tips:
|
||||
- Input for series/films/episodes can be either complete URL or just the slug/path:
|
||||
https://www.channel5.com/the-cuckoo OR the-cuckoo OR the-cuckoo/season-1/episode-1
|
||||
|
||||
\b
|
||||
Known bugs:
|
||||
- The progress bar is broken for certain DASH manifests
|
||||
See issue: https://github.com/devine-dl/devine/issues/106
|
||||
|
||||
"""
|
||||
|
||||
ALIASES = ("channel5", "ch5", "c5")
|
||||
GEOFENCE = ("gb",)
|
||||
TITLE_RE = r"^(?:https?://(?:www\.)?channel5\.com(?:/show)?/)?(?P<id>[a-z0-9-]+)(?:/(?P<sea>[a-z0-9-]+))?(?:/(?P<ep>[a-z0-9-]+))?"
|
||||
|
||||
@staticmethod
|
||||
@click.command(name="MY5", short_help="https://channel5.com", help=__doc__)
|
||||
@click.argument("title", type=str)
|
||||
@click.pass_context
|
||||
def cli(ctx: Context, **kwargs: Any) -> MY5:
|
||||
return MY5(ctx, **kwargs)
|
||||
|
||||
def __init__(self, ctx: Context, title: str):
|
||||
self.title = title
|
||||
super().__init__(ctx)
|
||||
|
||||
self.session.headers.update({"user-agent": self.config["user_agent"]})
|
||||
|
||||
def search(self) -> Generator[SearchResult, None, None]:
|
||||
params = {
|
||||
"platform": "my5desktop",
|
||||
"friendly": "1",
|
||||
"query": self.title,
|
||||
}
|
||||
|
||||
r = self.session.get(self.config["endpoints"]["search"], params=params)
|
||||
r.raise_for_status()
|
||||
|
||||
results = r.json()
|
||||
for result in results["shows"]:
|
||||
yield SearchResult(
|
||||
id_=result.get("f_name"),
|
||||
title=result.get("title"),
|
||||
description=result.get("s_desc"),
|
||||
label=result.get("genre"),
|
||||
url="https://www.channel5.com/show/" + result.get("f_name"),
|
||||
)
|
||||
|
||||
def get_titles(self) -> Union[Movies, Series]:
|
||||
title, season, episode = (re.match(self.TITLE_RE, self.title).group(i) for i in ("id", "sea", "ep"))
|
||||
if not title:
|
||||
raise ValueError("Could not parse ID from title - is the URL correct?")
|
||||
|
||||
if season and episode:
|
||||
r = self.session.get(
|
||||
self.config["endpoints"]["single"].format(
|
||||
show=title,
|
||||
season=season,
|
||||
episode=episode,
|
||||
)
|
||||
)
|
||||
r.raise_for_status()
|
||||
episode = r.json()
|
||||
return Series(
|
||||
[
|
||||
Episode(
|
||||
id_=episode.get("id"),
|
||||
service=self.__class__,
|
||||
title=episode.get("sh_title"),
|
||||
season=int(episode.get("sea_num")) if episode.get("sea_num") else 0,
|
||||
number=int(episode.get("ep_num")) if episode.get("ep_num") else 0,
|
||||
name=episode.get("sh_title"),
|
||||
language="en",
|
||||
)
|
||||
]
|
||||
)
|
||||
|
||||
r = self.session.get(self.config["endpoints"]["episodes"].format(show=title))
|
||||
r.raise_for_status()
|
||||
data = r.json()
|
||||
|
||||
if data["episodes"][0]["genre"] == "Film":
|
||||
return Movies(
|
||||
[
|
||||
Movie(
|
||||
id_=movie.get("id"),
|
||||
service=self.__class__,
|
||||
year=None,
|
||||
name=movie.get("sh_title"),
|
||||
language="en", # TODO: don't assume
|
||||
)
|
||||
for movie in data.get("episodes")
|
||||
]
|
||||
)
|
||||
else:
|
||||
return Series(
|
||||
[
|
||||
Episode(
|
||||
id_=episode.get("id"),
|
||||
service=self.__class__,
|
||||
title=episode.get("sh_title"),
|
||||
season=int(episode.get("sea_num")) if episode.get("sea_num") else 0,
|
||||
number=int(episode.get("ep_num")) if episode.get("sea_num") else 0,
|
||||
name=episode.get("title"),
|
||||
language="en", # TODO: don't assume
|
||||
)
|
||||
for episode in data["episodes"]
|
||||
]
|
||||
)
|
||||
|
||||
def get_tracks(self, title: Union[Movie, Episode]) -> Tracks:
|
||||
self.manifest, self.license = self.get_playlist(title.id)
|
||||
|
||||
tracks = DASH.from_url(self.manifest, self.session).to_tracks(title.language)
|
||||
|
||||
for track in tracks.audio:
|
||||
role = track.data["dash"]["representation"].find("Role")
|
||||
if role is not None and role.get("value") in ["description", "alternative", "alternate"]:
|
||||
track.descriptive = True
|
||||
|
||||
return tracks
|
||||
|
||||
def get_chapters(self, title: Union[Movie, Episode]) -> list[Chapter]:
|
||||
return []
|
||||
|
||||
def get_widevine_service_certificate(self, **_: Any) -> str:
|
||||
return WidevineCdm.common_privacy_cert
|
||||
|
||||
def get_widevine_license(self, challenge: bytes, **_: Any) -> str:
|
||||
r = self.session.post(self.license, data=challenge)
|
||||
r.raise_for_status()
|
||||
|
||||
return r.content
|
||||
|
||||
# Service specific functions
|
||||
|
||||
def get_playlist(self, asset_id: str) -> tuple:
|
||||
session = self.session
|
||||
for prefix in ("https://", "http://"):
|
||||
session.mount(prefix, SSLCiphers())
|
||||
|
||||
cert_binary = base64.b64decode(self.config["certificate"])
|
||||
with tempfile.NamedTemporaryFile(delete=False, suffix=".pem") as cert_file:
|
||||
cert_file.write(cert_binary)
|
||||
cert_path = cert_file.name
|
||||
try:
|
||||
r = session.get(url=self.config["endpoints"]["auth"].format(title_id=asset_id), cert=cert_path)
|
||||
except requests.RequestException as e:
|
||||
if "Max retries exceeded" in str(e):
|
||||
raise ConnectionError(
|
||||
"Permission denied. If you're behind a VPN/proxy, you might be blocked"
|
||||
)
|
||||
else:
|
||||
raise ConnectionError(f"Failed to request assets: {str(e)}")
|
||||
finally:
|
||||
os.remove(cert_path)
|
||||
|
||||
data = r.json()
|
||||
if not data.get("assets"):
|
||||
raise ValueError(f"Could not find asset: {data}")
|
||||
|
||||
asset = [x for x in data["assets"] if x["drm"] == "widevine"][0]
|
||||
rendition = asset["renditions"][0]
|
||||
mpd_url = rendition["url"]
|
||||
lic_url = asset["keyserver"]
|
||||
|
||||
parse = urlparse(mpd_url)
|
||||
path = parse.path.split("/")
|
||||
path[-1] = path[-1].split("-")[0].split("_")[0]
|
||||
manifest = urlunparse(parse._replace(path="/".join(path)))
|
||||
manifest += ".mpd" if not manifest.endswith("mpd") else ""
|
||||
|
||||
return manifest, lic_url
|
||||
38
MY5/config.yaml
Normal file
38
MY5/config.yaml
Normal file
@ -0,0 +1,38 @@
|
||||
user_agent: Dalvik/2.1.0 (Linux; U; Android 14; SM-S901B Build/UP1A.231005.007)
|
||||
|
||||
endpoints:
|
||||
base: https://corona.channel5.com
|
||||
content: https://corona.channel5.com/shows/{show}.json?platform=my5android
|
||||
episodes: https://corona.channel5.com/shows/{show}/episodes.json?platform=my5android
|
||||
single: https://corona.channel5.com/shows/{show}/seasons/{season}/episodes/{episode}.json?platform=my5android
|
||||
auth: https://cassie-auth.channel5.com/api/v2/media/my5androidhydradash/{title_id}.json
|
||||
search: https://corona.channel5.com/shows/search.json
|
||||
|
||||
certificate: |
|
||||
LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tDQpNSUlDdXpDQ0FpU2dBd0lCQWdJRVhMU1BGVEFOQmdrcWhraUc5dzBCQVFVRkFE
|
||||
QmxNUXN3Q1FZRFZRUUdFd0pIDQpRakVWTUJNR0ExVUVCd3dNUkdWbVlYVnNkQ0JEYVhSNU1SSXdFQVlEVlFRS0RBbERhR0Z1Ym1W
|
||||
c0lEVXhEekFODQpCZ05WQkFzTUJrTmhjM05wWlRFYU1CZ0dBMVVFQXd3UlEyRnpjMmxsSUUxbFpHbGhJRUYxZEdnd0hoY05NVGt3
|
||||
DQpOREUxTVRRd016QXhXaGNOTWprd05ERTFNVFF3TXpBeFdqQ0JqakVMTUFrR0ExVUVCaE1DUjBJeEVqQVFCZ05WDQpCQW9NQ1VO
|
||||
b1lXNXVaV3dnTlRFWE1CVUdBMVVFQ3d3T1EyRnpjMmxsSUdOc2FXVnVkSE14VWpCUUJnTlZCQU1NDQpTVU5oYzNOcFpTQlRaV3ht
|
||||
TFhOcFoyNWxaQ0JEWlhKMGFXWnBZMkYwWlNCbWIzSWdUWGsxSUVGdVpISnZhV1FnDQpUbVY0ZENCSFpXNGdZMnhwWlc1MElERTFO
|
||||
VFV6TXpZNU9ERXdnWjh3RFFZSktvWklodmNOQVFFQkJRQURnWTBBDQpNSUdKQW9HQkFNbVVTSHFCZ3pwbThXelVHZ2VDSWZvSTI3
|
||||
QlovQmNmWktpbnl5dXFNVlpDNXRLaUtaRWpydFV4DQpoMXFVcDJSSkN3Ui9RcENPQ2RQdFhzMENzekZvd1ByTlY4RHFtUXZqbzY5
|
||||
dlhvTEM3c2RLUjQ1cEFUQU8vY3JLDQorTUFPUXo1VWEyQ1ZrYnY1SCtaMVhWWndqbm1qNGJHZEJHM005b0NzQlVqTEh0bm1nQSty
|
||||
QWdNQkFBR2pUakJNDQpNQjBHQTFVZERnUVdCQlNVVUhrY3JKNUVkVTVWM2ZJbXQra1ljdkdnZFRBTEJnTlZIUThFQkFNQ0E3Z3dD
|
||||
UVlEDQpWUjBUQkFJd0FEQVRCZ05WSFNVRUREQUtCZ2dyQmdFRkJRY0RBakFOQmdrcWhraUc5dzBCQVFVRkFBT0JnUUFpDQpHNi84
|
||||
OUFEaDhEOUs0OXZjeklMQ2pqbGh6bG5US09GM2l1Um0vSjZYaWtxY3RxSDF0a01na0FXcHAwQldBRm9IDQpJbU5WSEtKdTRnZXgy
|
||||
cEtLejNqOVlRNG5EWENQVTdVb0N2aDl5TTNYT0RITWZRT01sZkRtMU9GZkh2QkJvSHNVDQpHSE9EQTkwQi8xcU0xSlFaZzBOVjZi
|
||||
UllrUytCOWdtSFI4dXhtZktrL0E9PQ0KLS0tLS1FTkQgQ0VSVElGSUNBVEUtLS0tLQ0KLS0tLS1CRUdJTiBQUklWQVRFIEtFWS0t
|
||||
LS0tDQpNSUlDZHdJQkFEQU5CZ2txaGtpRzl3MEJBUUVGQUFTQ0FtRXdnZ0pkQWdFQUFvR0JBTW1VU0hxQmd6cG04V3pVDQpHZ2VD
|
||||
SWZvSTI3QlovQmNmWktpbnl5dXFNVlpDNXRLaUtaRWpydFV4aDFxVXAyUkpDd1IvUXBDT0NkUHRYczBDDQpzekZvd1ByTlY4RHFt
|
||||
UXZqbzY5dlhvTEM3c2RLUjQ1cEFUQU8vY3JLK01BT1F6NVVhMkNWa2J2NUgrWjFYVlp3DQpqbm1qNGJHZEJHM005b0NzQlVqTEh0
|
||||
bm1nQStyQWdNQkFBRUNnWUFjTVY4SnN6OTFWWnlDaWcreDZTTnpZdlhHDQo3bTd4bFBSeEdqYXlQclZ6eVJ1YmJnNitPKzFoNS9G
|
||||
MFc4SWxwb21oOFdLUDhTMnl0RXBFQmhLbDRHN001WXdqDQp0SCtCVXFNMTNjbFdiQkxuQTZMT2RVeEVDTVhIUktjdHk5UE52UlJQ
|
||||
cU9aV0YycDc5U1BFdFY5Q2o1SXNaVUdNDQpRcHYybk5oN1M2MUZGRVRuSVFKQkFPTXJNd2tnOGQzbksyS0lnVUNrcEtCRHlGTUJj
|
||||
UXN0NG82VkxvVjNjenBwDQpxMW5FWGx4WnduMFh6Ni9GVjRWdTZYTjJLLzQxL2pCeWdTUlFXa05YVThNQ1FRRGpLYXVpdE1UajBM
|
||||
ajU3QkJ3DQppNkNON0VFeUJSSkZaVGRSMDM4ZzkxSEFoUkVXVWpuQ0Vrc1UwcTl4TUNOdnM3OFN4RmQ1ODg5RUJQTnd1RDdvDQor
|
||||
NTM1QWtFQTNwVTNYbHh2WUhQZktKNkR0cWtidlFSdFJoZUZnZVNsdGZzcUtCQVFVVTIwWFRKeEdwL0FWdjE3DQp1OGZxcDQwekpM
|
||||
VEhDa0F4SFpzME9qYVpHcDU0TFFKQWJtM01iUjA1ZFpINnlpdlMxaE5hYW9QR01iMjdZeGJRDQpMS3dHNmd5d3BrbEp4RE1XdHR4
|
||||
VHVYeXVJdlVHMVA5cFRJTThEeUhSeVR3cTU4bjVjeU1XYVFKQkFMVFRwZkVtDQoxdWhCeUd0NEtab3dYM2dhREpVZGU0ZjBwN3Ry
|
||||
RFZGcExDNVJYcVVBQXNBQ2pzTHNYaEFadlovUEEwUDBiU2hmDQp4cUFRa2lnYmNKRXdxdjQ9DQotLS0tLUVORCBQUklWQVRFIEtF
|
||||
WS0tLS0t
|
||||
10
NF/MSL/MSLKeys.py
Normal file
10
NF/MSL/MSLKeys.py
Normal file
@ -0,0 +1,10 @@
|
||||
from .MSLObject import MSLObject
|
||||
|
||||
|
||||
class MSLKeys(MSLObject):
|
||||
def __init__(self, encryption=None, sign=None, rsa=None, mastertoken=None, cdm_session=None):
|
||||
self.encryption = encryption
|
||||
self.sign = sign
|
||||
self.rsa = rsa
|
||||
self.mastertoken = mastertoken
|
||||
self.cdm_session = cdm_session
|
||||
6
NF/MSL/MSLObject.py
Normal file
6
NF/MSL/MSLObject.py
Normal file
@ -0,0 +1,6 @@
|
||||
import jsonpickle
|
||||
|
||||
|
||||
class MSLObject:
|
||||
def __repr__(self):
|
||||
return "<{} {}>".format(self.__class__.__name__, jsonpickle.encode(self, unpicklable=False))
|
||||
408
NF/MSL/__init__.py
Normal file
408
NF/MSL/__init__.py
Normal file
@ -0,0 +1,408 @@
|
||||
import base64
|
||||
import gzip
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import random
|
||||
import re
|
||||
import sys
|
||||
import time
|
||||
import zlib
|
||||
from datetime import datetime
|
||||
from io import BytesIO
|
||||
|
||||
import jsonpickle
|
||||
import requests
|
||||
from Cryptodome.Cipher import AES, PKCS1_OAEP
|
||||
from Cryptodome.Hash import HMAC, SHA256
|
||||
from Cryptodome.PublicKey import RSA
|
||||
from Cryptodome.Random import get_random_bytes
|
||||
from Cryptodome.Util import Padding
|
||||
|
||||
from unshackle.core.cacher import Cacher
|
||||
|
||||
from .MSLKeys import MSLKeys
|
||||
from .schemes import EntityAuthenticationSchemes # noqa: F401
|
||||
from .schemes import KeyExchangeSchemes
|
||||
from .schemes.EntityAuthentication import EntityAuthentication
|
||||
from .schemes.KeyExchangeRequest import KeyExchangeRequest
|
||||
# from vinetrimmer.utils.widevine.device import RemoteDevice
|
||||
|
||||
class MSL:
|
||||
log = logging.getLogger("MSL")
|
||||
|
||||
def __init__(self, session, endpoint, sender, keys, message_id, user_auth=None):
|
||||
self.session = session
|
||||
self.endpoint = endpoint
|
||||
self.sender = sender
|
||||
self.keys = keys
|
||||
self.user_auth = user_auth
|
||||
self.message_id = message_id
|
||||
|
||||
@classmethod
|
||||
def handshake(cls, scheme: KeyExchangeSchemes, session: requests.Session, endpoint: str, sender: str, cache: Cacher):
|
||||
cache = cache.get(sender)
|
||||
message_id = random.randint(0, pow(2, 52))
|
||||
msl_keys = MSL.load_cache_data(cache)
|
||||
|
||||
if msl_keys is not None:
|
||||
cls.log.info("Using cached MSL data")
|
||||
else:
|
||||
msl_keys = MSLKeys()
|
||||
if scheme != KeyExchangeSchemes.Widevine:
|
||||
msl_keys.rsa = RSA.generate(2048)
|
||||
|
||||
# if not cdm:
|
||||
# raise cls.log.exit("- No cached data and no CDM specified")
|
||||
|
||||
# if not msl_keys_path:
|
||||
# raise cls.log.exit("- No cached data and no MSL key path specified")
|
||||
|
||||
# Key Exchange Scheme Widevine currently not implemented
|
||||
# if scheme == KeyExchangeSchemes.Widevine:
|
||||
# msl_keys.cdm_session = cdm.open(
|
||||
# pssh=b"\x0A\x7A\x00\x6C\x38\x2B",
|
||||
# raw=True,
|
||||
# offline=True
|
||||
# )
|
||||
# keyrequestdata = KeyExchangeRequest.Widevine(
|
||||
# keyrequest=cdm.get_license_challenge(msl_keys.cdm_session)
|
||||
# )
|
||||
# else:
|
||||
keyrequestdata = KeyExchangeRequest.AsymmetricWrapped(
|
||||
keypairid="superKeyPair",
|
||||
mechanism="JWK_RSA",
|
||||
publickey=msl_keys.rsa.publickey().exportKey(format="DER")
|
||||
)
|
||||
|
||||
data = jsonpickle.encode({
|
||||
"entityauthdata": EntityAuthentication.Unauthenticated(sender),
|
||||
"headerdata": base64.b64encode(MSL.generate_msg_header(
|
||||
message_id=message_id,
|
||||
sender=sender,
|
||||
is_handshake=True,
|
||||
keyrequestdata=keyrequestdata
|
||||
).encode("utf-8")).decode("utf-8"),
|
||||
"signature": ""
|
||||
}, unpicklable=False)
|
||||
data += json.dumps({
|
||||
"payload": base64.b64encode(json.dumps({
|
||||
"messageid": message_id,
|
||||
"data": "",
|
||||
"sequencenumber": 1,
|
||||
"endofmsg": True
|
||||
}).encode("utf-8")).decode("utf-8"),
|
||||
"signature": ""
|
||||
})
|
||||
|
||||
try:
|
||||
r = session.post(
|
||||
url=endpoint,
|
||||
data=data
|
||||
)
|
||||
except requests.HTTPError as e:
|
||||
raise cls.log.exit(f"- Key exchange failed, response data is unexpected: {e.response.text}")
|
||||
|
||||
key_exchange = r.json() # expecting no payloads, so this is fine
|
||||
if "errordata" in key_exchange:
|
||||
raise cls.log.exit("- Key exchange failed: " + json.loads(base64.b64decode(
|
||||
key_exchange["errordata"]
|
||||
).decode())["errormsg"])
|
||||
|
||||
# parse the crypto keys
|
||||
key_response_data = json.JSONDecoder().decode(base64.b64decode(
|
||||
key_exchange["headerdata"]
|
||||
).decode("utf-8"))["keyresponsedata"]
|
||||
|
||||
if key_response_data["scheme"] != str(scheme):
|
||||
raise cls.log.exit("- Key exchange scheme mismatch occurred")
|
||||
|
||||
key_data = key_response_data["keydata"]
|
||||
# if scheme == KeyExchangeSchemes.Widevine:
|
||||
# if isinstance(cdm.device, RemoteDevice):
|
||||
# msl_keys.encryption, msl_keys.sign = cdm.device.exchange(
|
||||
# cdm.sessions[msl_keys.cdm_session],
|
||||
# license_res=key_data["cdmkeyresponse"],
|
||||
# enc_key_id=base64.b64decode(key_data["encryptionkeyid"]),
|
||||
# hmac_key_id=base64.b64decode(key_data["hmackeyid"])
|
||||
# )
|
||||
# cdm.parse_license(msl_keys.cdm_session, key_data["cdmkeyresponse"])
|
||||
# else:
|
||||
# cdm.parse_license(msl_keys.cdm_session, key_data["cdmkeyresponse"])
|
||||
# keys = cdm.get_keys(msl_keys.cdm_session)
|
||||
# msl_keys.encryption = MSL.get_widevine_key(
|
||||
# kid=base64.b64decode(key_data["encryptionkeyid"]),
|
||||
# keys=keys,
|
||||
# permissions=["AllowEncrypt", "AllowDecrypt"]
|
||||
# )
|
||||
# msl_keys.sign = MSL.get_widevine_key(
|
||||
# kid=base64.b64decode(key_data["hmackeyid"]),
|
||||
# keys=keys,
|
||||
# permissions=["AllowSign", "AllowSignatureVerify"]
|
||||
# )
|
||||
# else:
|
||||
cipher_rsa = PKCS1_OAEP.new(msl_keys.rsa)
|
||||
msl_keys.encryption = MSL.base64key_decode(
|
||||
json.JSONDecoder().decode(cipher_rsa.decrypt(
|
||||
base64.b64decode(key_data["encryptionkey"])
|
||||
).decode("utf-8"))["k"]
|
||||
)
|
||||
msl_keys.sign = MSL.base64key_decode(
|
||||
json.JSONDecoder().decode(cipher_rsa.decrypt(
|
||||
base64.b64decode(key_data["hmackey"])
|
||||
).decode("utf-8"))["k"]
|
||||
)
|
||||
msl_keys.mastertoken = key_response_data["mastertoken"]
|
||||
|
||||
MSL.cache_keys(msl_keys, cache)
|
||||
cls.log.info("MSL handshake successful")
|
||||
return cls(
|
||||
session=session,
|
||||
endpoint=endpoint,
|
||||
sender=sender,
|
||||
keys=msl_keys,
|
||||
message_id=message_id
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def load_cache_data(cacher: Cacher):
|
||||
if not cacher or cacher == {}:
|
||||
return None
|
||||
# with open(msl_keys_path, encoding="utf-8") as fd:
|
||||
# msl_keys = jsonpickle.decode(fd.read())
|
||||
msl_keys = jsonpickle.decode(cacher.data)
|
||||
if msl_keys.rsa:
|
||||
# noinspection PyTypeChecker
|
||||
# expects RsaKey, but is a string, this is because jsonpickle can't pickle RsaKey object
|
||||
# so as a workaround it exports to PEM, and then when reading, it imports that PEM back
|
||||
# to an RsaKey :)
|
||||
msl_keys.rsa = RSA.importKey(msl_keys.rsa)
|
||||
# If it's expired or close to, return None as it's unusable
|
||||
if msl_keys.mastertoken and ((datetime.utcfromtimestamp(int(json.JSONDecoder().decode(
|
||||
base64.b64decode(msl_keys.mastertoken["tokendata"]).decode("utf-8")
|
||||
)["expiration"])) - datetime.now()).total_seconds() / 60 / 60) < 10:
|
||||
return None
|
||||
return msl_keys
|
||||
|
||||
@staticmethod
|
||||
def cache_keys(msl_keys, cache: Cacher):
|
||||
# os.makedirs(os.path.dirname(cache), exist_ok=True)
|
||||
if msl_keys.rsa:
|
||||
# jsonpickle can't pickle RsaKey objects :(
|
||||
msl_keys.rsa = msl_keys.rsa.export_key()
|
||||
# with open(cache, "w", encoding="utf-8") as fd:
|
||||
# fd.write()
|
||||
cache.set(jsonpickle.encode(msl_keys))
|
||||
if msl_keys.rsa:
|
||||
# re-import now
|
||||
msl_keys.rsa = RSA.importKey(msl_keys.rsa)
|
||||
|
||||
@staticmethod
|
||||
def generate_msg_header(message_id, sender, is_handshake, userauthdata=None, keyrequestdata=None,
|
||||
compression="GZIP"):
|
||||
"""
|
||||
The MSL header carries all MSL data used for entity and user authentication, message encryption
|
||||
and verification, and service tokens. Portions of the MSL header are encrypted.
|
||||
https://github.com/Netflix/msl/wiki/Messages#header-data
|
||||
|
||||
:param message_id: number against which payload chunks are bound to protect against replay.
|
||||
:param sender: ESN
|
||||
:param is_handshake: This flag is set true if the message is a handshake message and will not include any
|
||||
payload chunks. It will include keyrequestdata.
|
||||
:param userauthdata: UserAuthData
|
||||
:param keyrequestdata: KeyRequestData
|
||||
:param compression: Supported compression algorithms.
|
||||
|
||||
:return: The base64 encoded JSON String of the header
|
||||
"""
|
||||
header_data = {
|
||||
"messageid": message_id,
|
||||
"renewable": True, # MUST be True if is_handshake
|
||||
"handshake": is_handshake,
|
||||
"capabilities": {
|
||||
"compressionalgos": [compression] if compression else [],
|
||||
"languages": ["en-US"], # bcp-47
|
||||
"encoderformats": ["JSON"]
|
||||
},
|
||||
"timestamp": int(time.time()),
|
||||
# undocumented or unused:
|
||||
"sender": sender,
|
||||
"nonreplayable": False,
|
||||
"recipient": "Netflix",
|
||||
}
|
||||
if userauthdata:
|
||||
header_data["userauthdata"] = userauthdata
|
||||
if keyrequestdata:
|
||||
header_data["keyrequestdata"] = [keyrequestdata]
|
||||
return jsonpickle.encode(header_data, unpicklable=False)
|
||||
|
||||
@classmethod
|
||||
def get_widevine_key(cls, kid, keys, permissions):
|
||||
for key in keys:
|
||||
if key.kid != kid:
|
||||
continue
|
||||
if key.type != "OPERATOR_SESSION":
|
||||
cls.log.warning(f"Widevine Key Exchange: Wrong key type (not operator session) key {key}")
|
||||
continue
|
||||
if not set(permissions) <= set(key.permissions):
|
||||
cls.log.warning(f"Widevine Key Exchange: Incorrect permissions, key {key}, needed perms {permissions}")
|
||||
continue
|
||||
return key.key
|
||||
return None
|
||||
|
||||
def send_message(self, endpoint, params, application_data, userauthdata=None):
|
||||
message = self.create_message(application_data, userauthdata)
|
||||
res = self.session.post(url=endpoint, data=message, params=params)
|
||||
header, payload_data = self.parse_message(res.text)
|
||||
if "errordata" in header:
|
||||
raise self.log.exit(
|
||||
"- MSL response message contains an error: {}".format(
|
||||
json.loads(base64.b64decode(header["errordata"].encode("utf-8")).decode("utf-8"))
|
||||
)
|
||||
)
|
||||
return header, payload_data
|
||||
|
||||
def create_message(self, application_data, userauthdata=None):
|
||||
self.message_id += 1 # new message must ue a new message id
|
||||
headerdata = self.encrypt(self.generate_msg_header(
|
||||
message_id=self.message_id,
|
||||
sender=self.sender,
|
||||
is_handshake=False,
|
||||
userauthdata=userauthdata
|
||||
))
|
||||
|
||||
header = json.dumps({
|
||||
"headerdata": base64.b64encode(headerdata.encode("utf-8")).decode("utf-8"),
|
||||
"signature": self.sign(headerdata).decode("utf-8"),
|
||||
"mastertoken": self.keys.mastertoken
|
||||
})
|
||||
|
||||
payload_chunks = [self.encrypt(json.dumps({
|
||||
"messageid": self.message_id,
|
||||
"data": self.gzip_compress(json.dumps(application_data).encode("utf-8")).decode("utf-8"),
|
||||
"compressionalgo": "GZIP",
|
||||
"sequencenumber": 1, # todo ; use sequence_number from master token instead?
|
||||
"endofmsg": True
|
||||
}))]
|
||||
|
||||
message = header
|
||||
for payload_chunk in payload_chunks:
|
||||
message += json.dumps({
|
||||
"payload": base64.b64encode(payload_chunk.encode("utf-8")).decode("utf-8"),
|
||||
"signature": self.sign(payload_chunk).decode("utf-8")
|
||||
})
|
||||
|
||||
return message
|
||||
|
||||
def decrypt_payload_chunks(self, payload_chunks):
|
||||
"""
|
||||
Decrypt and extract data from payload chunks
|
||||
|
||||
:param payload_chunks: List of payload chunks
|
||||
:return: json object
|
||||
"""
|
||||
raw_data = ""
|
||||
|
||||
for payload_chunk in payload_chunks:
|
||||
# todo ; verify signature of payload_chunk["signature"] against payload_chunk["payload"]
|
||||
# expecting base64-encoded json string
|
||||
payload_chunk = json.loads(base64.b64decode(payload_chunk["payload"]).decode("utf-8"))
|
||||
# decrypt the payload
|
||||
payload_decrypted = AES.new(
|
||||
key=self.keys.encryption,
|
||||
mode=AES.MODE_CBC,
|
||||
iv=base64.b64decode(payload_chunk["iv"])
|
||||
).decrypt(base64.b64decode(payload_chunk["ciphertext"]))
|
||||
payload_decrypted = Padding.unpad(payload_decrypted, 16)
|
||||
payload_decrypted = json.loads(payload_decrypted.decode("utf-8"))
|
||||
# decode and uncompress data if compressed
|
||||
payload_data = base64.b64decode(payload_decrypted["data"])
|
||||
if payload_decrypted.get("compressionalgo") == "GZIP":
|
||||
payload_data = zlib.decompress(payload_data, 16 + zlib.MAX_WBITS)
|
||||
raw_data += payload_data.decode("utf-8")
|
||||
|
||||
data = json.loads(raw_data)
|
||||
if "error" in data:
|
||||
error = data["error"]
|
||||
error_display = error.get("display")
|
||||
error_detail = re.sub(r" \(E3-[^)]+\)", "", error.get("detail", ""))
|
||||
|
||||
if error_display:
|
||||
self.log.critical(f"- {error_display}")
|
||||
if error_detail:
|
||||
self.log.critical(f"- {error_detail}")
|
||||
|
||||
if not (error_display or error_detail):
|
||||
self.log.critical(f"- {error}")
|
||||
|
||||
# sys.exit(1)
|
||||
|
||||
return data["result"]
|
||||
|
||||
def parse_message(self, message):
|
||||
"""
|
||||
Parse an MSL message into a header and list of payload chunks
|
||||
|
||||
:param message: MSL message
|
||||
:returns: a 2-item tuple containing message and list of payload chunks if available
|
||||
"""
|
||||
parsed_message = json.loads("[{}]".format(message.replace("}{", "},{")))
|
||||
|
||||
header = parsed_message[0]
|
||||
encrypted_payload_chunks = parsed_message[1:] if len(parsed_message) > 1 else []
|
||||
if encrypted_payload_chunks:
|
||||
payload_chunks = self.decrypt_payload_chunks(encrypted_payload_chunks)
|
||||
else:
|
||||
payload_chunks = {}
|
||||
|
||||
return header, payload_chunks
|
||||
|
||||
@staticmethod
|
||||
def gzip_compress(data):
|
||||
out = BytesIO()
|
||||
with gzip.GzipFile(fileobj=out, mode="w") as fd:
|
||||
fd.write(data)
|
||||
return base64.b64encode(out.getvalue())
|
||||
|
||||
@staticmethod
|
||||
def base64key_decode(payload):
|
||||
length = len(payload) % 4
|
||||
if length == 2:
|
||||
payload += "=="
|
||||
elif length == 3:
|
||||
payload += "="
|
||||
elif length != 0:
|
||||
raise ValueError("Invalid base64 string")
|
||||
return base64.urlsafe_b64decode(payload.encode("utf-8"))
|
||||
|
||||
def encrypt(self, plaintext):
|
||||
"""
|
||||
Encrypt the given Plaintext with the encryption key
|
||||
:param plaintext:
|
||||
:return: Serialized JSON String of the encryption Envelope
|
||||
"""
|
||||
iv = get_random_bytes(16)
|
||||
return json.dumps({
|
||||
"ciphertext": base64.b64encode(
|
||||
AES.new(
|
||||
self.keys.encryption,
|
||||
AES.MODE_CBC,
|
||||
iv
|
||||
).encrypt(
|
||||
Padding.pad(plaintext.encode("utf-8"), 16)
|
||||
)
|
||||
).decode("utf-8"),
|
||||
"keyid": "{}_{}".format(self.sender, json.loads(
|
||||
base64.b64decode(self.keys.mastertoken["tokendata"]).decode("utf-8")
|
||||
)["sequencenumber"]),
|
||||
"sha256": "AA==",
|
||||
"iv": base64.b64encode(iv).decode("utf-8")
|
||||
})
|
||||
|
||||
def sign(self, text):
|
||||
"""
|
||||
Calculates the HMAC signature for the given text with the current sign key and SHA256
|
||||
:param text:
|
||||
:return: Base64 encoded signature
|
||||
"""
|
||||
return base64.b64encode(HMAC.new(self.keys.sign, text.encode("utf-8"), SHA256).digest())
|
||||
BIN
NF/MSL/__pycache__/MSLKeys.cpython-310.pyc
Normal file
BIN
NF/MSL/__pycache__/MSLKeys.cpython-310.pyc
Normal file
Binary file not shown.
BIN
NF/MSL/__pycache__/MSLObject.cpython-310.pyc
Normal file
BIN
NF/MSL/__pycache__/MSLObject.cpython-310.pyc
Normal file
Binary file not shown.
BIN
NF/MSL/__pycache__/__init__.cpython-310.pyc
Normal file
BIN
NF/MSL/__pycache__/__init__.cpython-310.pyc
Normal file
Binary file not shown.
59
NF/MSL/schemes/EntityAuthentication.py
Normal file
59
NF/MSL/schemes/EntityAuthentication.py
Normal file
@ -0,0 +1,59 @@
|
||||
from .. import EntityAuthenticationSchemes
|
||||
from ..MSLObject import MSLObject
|
||||
|
||||
|
||||
# noinspection PyPep8Naming
|
||||
class EntityAuthentication(MSLObject):
|
||||
def __init__(self, scheme, authdata):
|
||||
"""
|
||||
Data used to identify and authenticate the entity associated with a message.
|
||||
https://github.com/Netflix/msl/wiki/Entity-Authentication-%28Configuration%29
|
||||
|
||||
:param scheme: Entity Authentication Scheme identifier
|
||||
:param authdata: Entity Authentication data
|
||||
"""
|
||||
self.scheme = str(scheme)
|
||||
self.authdata = authdata
|
||||
|
||||
@classmethod
|
||||
def Unauthenticated(cls, identity):
|
||||
"""
|
||||
The unauthenticated entity authentication scheme does not provide encryption or authentication and only
|
||||
identifies the entity. Therefore entity identities can be harvested and spoofed. The benefit of this
|
||||
authentication scheme is that the entity has control over its identity. This may be useful if the identity is
|
||||
derived from or related to other data, or if retaining the identity is desired across state resets or in the
|
||||
event of MSL errors requiring entity re-authentication.
|
||||
"""
|
||||
return cls(
|
||||
scheme=EntityAuthenticationSchemes.Unauthenticated,
|
||||
authdata={"identity": identity}
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def Widevine(cls, devtype, keyrequest):
|
||||
"""
|
||||
The Widevine entity authentication scheme is used by devices with the Widevine CDM. It does not provide
|
||||
encryption or authentication and only identifies the entity. Therefore entity identities can be harvested
|
||||
and spoofed. The entity identity is composed from the provided device type and Widevine key request data. The
|
||||
Widevine CDM properties can be extracted from the key request data.
|
||||
|
||||
When coupled with the Widevine key exchange scheme, the entity identity can be cryptographically validated by
|
||||
comparing the entity authentication key request data against the key exchange key request data.
|
||||
|
||||
Note that the local entity will not know its entity identity when using this scheme.
|
||||
|
||||
> Devtype
|
||||
|
||||
An arbitrary value identifying the device type the local entity wishes to assume. The data inside the Widevine
|
||||
key request may be optionally used to validate the claimed device type.
|
||||
|
||||
:param devtype: Local entity device type
|
||||
:param keyrequest: Widevine key request
|
||||
"""
|
||||
return cls(
|
||||
scheme=EntityAuthenticationSchemes.Widevine,
|
||||
authdata={
|
||||
"devtype": devtype,
|
||||
"keyrequest": keyrequest
|
||||
}
|
||||
)
|
||||
80
NF/MSL/schemes/KeyExchangeRequest.py
Normal file
80
NF/MSL/schemes/KeyExchangeRequest.py
Normal file
@ -0,0 +1,80 @@
|
||||
import base64
|
||||
|
||||
from .. import KeyExchangeSchemes
|
||||
from ..MSLObject import MSLObject
|
||||
|
||||
|
||||
# noinspection PyPep8Naming
|
||||
class KeyExchangeRequest(MSLObject):
|
||||
def __init__(self, scheme, keydata):
|
||||
"""
|
||||
Session key exchange data from a requesting entity.
|
||||
https://github.com/Netflix/msl/wiki/Key-Exchange-%28Configuration%29
|
||||
|
||||
:param scheme: Key Exchange Scheme identifier
|
||||
:param keydata: Key Request data
|
||||
"""
|
||||
self.scheme = str(scheme)
|
||||
self.keydata = keydata
|
||||
|
||||
@classmethod
|
||||
def AsymmetricWrapped(cls, keypairid, mechanism, publickey):
|
||||
"""
|
||||
Asymmetric wrapped key exchange uses a generated ephemeral asymmetric key pair for key exchange. It will
|
||||
typically be used when there is no other data or keys from which to base secure key exchange.
|
||||
|
||||
This mechanism provides perfect forward secrecy but does not guarantee that session keys will only be available
|
||||
to the requesting entity if the requesting MSL stack has been modified to perform the operation on behalf of a
|
||||
third party.
|
||||
|
||||
> Key Pair ID
|
||||
|
||||
The key pair ID is included as a sanity check.
|
||||
|
||||
> Mechanism & Public Key
|
||||
|
||||
The following mechanisms are associated public key formats are currently supported.
|
||||
|
||||
Field Public Key Format Description
|
||||
RSA SPKI RSA-OAEP encrypt/decrypt
|
||||
ECC SPKI ECIES encrypt/decrypt
|
||||
JWEJS_RSA SPKI RSA-OAEP JSON Web Encryption JSON Serialization
|
||||
JWE_RSA SPKI RSA-OAEP JSON Web Encryption Compact Serialization
|
||||
JWK_RSA SPKI RSA-OAEP JSON Web Key
|
||||
JWK_RSAES SPKI RSA PKCS#1 JSON Web Key
|
||||
|
||||
:param keypairid: key pair ID
|
||||
:param mechanism: asymmetric key type
|
||||
:param publickey: public key
|
||||
"""
|
||||
return cls(
|
||||
scheme=KeyExchangeSchemes.AsymmetricWrapped,
|
||||
keydata={
|
||||
"keypairid": keypairid,
|
||||
"mechanism": mechanism,
|
||||
"publickey": base64.b64encode(publickey).decode("utf-8")
|
||||
}
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def Widevine(cls, keyrequest):
|
||||
"""
|
||||
Google Widevine provides a secure key exchange mechanism. When requested the Widevine component will issue a
|
||||
one-time use key request. The Widevine server library can be used to authenticate the request and return
|
||||
randomly generated symmetric keys in a protected key response bound to the request and Widevine client library.
|
||||
The key response also specifies the key identities, types and their permitted usage.
|
||||
|
||||
The Widevine key request also contains a model identifier and a unique device identifier with an expectation of
|
||||
long-term persistence. These values are available from the Widevine client library and can be retrieved from
|
||||
the key request by the Widevine server library.
|
||||
|
||||
The Widevine client library will protect the returned keys from inspection or misuse.
|
||||
|
||||
:param keyrequest: Base64-encoded Widevine CDM license challenge (PSSH: b'\x0A\x7A\x00\x6C\x38\x2B')
|
||||
"""
|
||||
if not isinstance(keyrequest, str):
|
||||
keyrequest = base64.b64encode(keyrequest).decode()
|
||||
return cls(
|
||||
scheme=KeyExchangeSchemes.Widevine,
|
||||
keydata={"keyrequest": keyrequest}
|
||||
)
|
||||
59
NF/MSL/schemes/UserAuthentication.py
Normal file
59
NF/MSL/schemes/UserAuthentication.py
Normal file
@ -0,0 +1,59 @@
|
||||
from ..MSLObject import MSLObject
|
||||
from . import UserAuthenticationSchemes
|
||||
|
||||
|
||||
# noinspection PyPep8Naming
|
||||
class UserAuthentication(MSLObject):
|
||||
def __init__(self, scheme, authdata):
|
||||
"""
|
||||
Data used to identify and authenticate the user associated with a message.
|
||||
https://github.com/Netflix/msl/wiki/User-Authentication-%28Configuration%29
|
||||
|
||||
:param scheme: User Authentication Scheme identifier
|
||||
:param authdata: User Authentication data
|
||||
"""
|
||||
self.scheme = str(scheme)
|
||||
self.authdata = authdata
|
||||
|
||||
@classmethod
|
||||
def EmailPassword(cls, email, password):
|
||||
"""
|
||||
Email and password is a standard user authentication scheme in wide use.
|
||||
|
||||
:param email: user email address
|
||||
:param password: user password
|
||||
"""
|
||||
return cls(
|
||||
scheme=UserAuthenticationSchemes.EmailPassword,
|
||||
authdata={
|
||||
"email": email,
|
||||
"password": password
|
||||
}
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def NetflixIDCookies(cls, netflixid, securenetflixid):
|
||||
"""
|
||||
Netflix ID HTTP cookies are used when the user has previously logged in to a web site. Possession of the
|
||||
cookies serves as proof of user identity, in the same manner as they do when communicating with the web site.
|
||||
|
||||
The Netflix ID cookie and Secure Netflix ID cookie are HTTP cookies issued by the Netflix web site after
|
||||
subscriber login. The Netflix ID cookie is encrypted and identifies the subscriber and analogous to a
|
||||
subscriber’s username. The Secure Netflix ID cookie is tied to a Netflix ID cookie and only sent over HTTPS
|
||||
and analogous to a subscriber’s password.
|
||||
|
||||
In some cases the Netflix ID and Secure Netflix ID cookies will be unavailable to the MSL stack or application.
|
||||
If either or both of the Netflix ID or Secure Netflix ID cookies are absent in the above data structure the
|
||||
HTTP cookie headers will be queried for it; this is only acceptable when HTTPS is used as the underlying
|
||||
transport protocol.
|
||||
|
||||
:param netflixid: Netflix ID cookie
|
||||
:param securenetflixid: Secure Netflix ID cookie
|
||||
"""
|
||||
return cls(
|
||||
scheme=UserAuthenticationSchemes.NetflixIDCookies,
|
||||
authdata={
|
||||
"netflixid": netflixid,
|
||||
"securenetflixid": securenetflixid
|
||||
}
|
||||
)
|
||||
24
NF/MSL/schemes/__init__.py
Normal file
24
NF/MSL/schemes/__init__.py
Normal file
@ -0,0 +1,24 @@
|
||||
from enum import Enum
|
||||
|
||||
|
||||
class Scheme(Enum):
|
||||
def __str__(self):
|
||||
return str(self.value)
|
||||
|
||||
|
||||
class EntityAuthenticationSchemes(Scheme):
|
||||
"""https://github.com/Netflix/msl/wiki/Entity-Authentication-%28Configuration%29"""
|
||||
Unauthenticated = "NONE"
|
||||
Widevine = "WIDEVINE"
|
||||
|
||||
|
||||
class UserAuthenticationSchemes(Scheme):
|
||||
"""https://github.com/Netflix/msl/wiki/User-Authentication-%28Configuration%29"""
|
||||
EmailPassword = "EMAIL_PASSWORD"
|
||||
NetflixIDCookies = "NETFLIXID"
|
||||
|
||||
|
||||
class KeyExchangeSchemes(Scheme):
|
||||
"""https://github.com/Netflix/msl/wiki/Key-Exchange-%28Configuration%29"""
|
||||
AsymmetricWrapped = "ASYMMETRIC_WRAPPED"
|
||||
Widevine = "WIDEVINE"
|
||||
BIN
NF/MSL/schemes/__pycache__/EntityAuthentication.cpython-310.pyc
Normal file
BIN
NF/MSL/schemes/__pycache__/EntityAuthentication.cpython-310.pyc
Normal file
Binary file not shown.
BIN
NF/MSL/schemes/__pycache__/KeyExchangeRequest.cpython-310.pyc
Normal file
BIN
NF/MSL/schemes/__pycache__/KeyExchangeRequest.cpython-310.pyc
Normal file
Binary file not shown.
BIN
NF/MSL/schemes/__pycache__/UserAuthentication.cpython-310.pyc
Normal file
BIN
NF/MSL/schemes/__pycache__/UserAuthentication.cpython-310.pyc
Normal file
Binary file not shown.
BIN
NF/MSL/schemes/__pycache__/__init__.cpython-310.pyc
Normal file
BIN
NF/MSL/schemes/__pycache__/__init__.cpython-310.pyc
Normal file
Binary file not shown.
978
NF/__init__.py
Normal file
978
NF/__init__.py
Normal file
@ -0,0 +1,978 @@
|
||||
import base64
|
||||
from datetime import datetime
|
||||
import json
|
||||
from math import e
|
||||
|
||||
import random
|
||||
import sys
|
||||
import time
|
||||
import typing
|
||||
from uuid import UUID
|
||||
import click
|
||||
import re
|
||||
from typing import List, Literal, Optional, Set, Union, Tuple
|
||||
from http.cookiejar import CookieJar
|
||||
from itertools import zip_longest
|
||||
from Crypto.Random import get_random_bytes
|
||||
|
||||
import jsonpickle
|
||||
from pymp4.parser import Box
|
||||
from pywidevine import PSSH, Cdm
|
||||
import requests
|
||||
from langcodes import Language
|
||||
from pathlib import Path
|
||||
|
||||
from unshackle.core.constants import AnyTrack
|
||||
from unshackle.core.credential import Credential
|
||||
from unshackle.core.drm.widevine import Widevine
|
||||
from unshackle.core.service import Service
|
||||
from unshackle.core.titles import Titles_T, Title_T
|
||||
from unshackle.core.titles.episode import Episode, Series
|
||||
from unshackle.core.titles.movie import Movie, Movies
|
||||
from unshackle.core.titles.title import Title
|
||||
from unshackle.core.tracks import Tracks, Chapters, Hybrid
|
||||
from unshackle.core.tracks.audio import Audio
|
||||
from unshackle.core.tracks.chapter import Chapter
|
||||
from unshackle.core.tracks.subtitle import Subtitle
|
||||
from unshackle.core.tracks.track import Track
|
||||
from unshackle.core.tracks.video import Video
|
||||
from unshackle.core.utils.collections import flatten, as_list
|
||||
|
||||
from unshackle.core.tracks.attachment import Attachment
|
||||
from unshackle.core.drm.playready import PlayReady
|
||||
from unshackle.core.titles.song import Song
|
||||
from unshackle.utils.base62 import decode
|
||||
from .MSL import MSL, KeyExchangeSchemes
|
||||
from .MSL.schemes.UserAuthentication import UserAuthentication
|
||||
|
||||
class NF(Service):
|
||||
"""
|
||||
Service for https://netflix.com
|
||||
Version: 1.0.0
|
||||
|
||||
Authorization: Cookies
|
||||
Security: UHD@SL3000/L1 FHD@SL3000/L1
|
||||
"""
|
||||
TITLE_RE = [
|
||||
r"^(?:https?://(?:www\.)?netflix\.com(?:/[a-z0-9]{2})?/(?:title/|watch/|.+jbv=))?(?P<id>\d+)",
|
||||
r"^https?://(?:www\.)?unogs\.com/title/(?P<id>\d+)",
|
||||
]
|
||||
ALIASES= ("NF", "Netflix")
|
||||
NF_LANG_MAP = {
|
||||
"es": "es-419",
|
||||
"pt": "pt-PT",
|
||||
}
|
||||
|
||||
@staticmethod
|
||||
@click.command(name="Netflix", short_help="https://netflix.com")
|
||||
@click.argument("title", type=str)
|
||||
@click.option("-drm", "--drm-system", type=click.Choice(["widevine", "playready"], case_sensitive=False),
|
||||
default="widevine",
|
||||
help="which drm system to use")
|
||||
@click.option("-p", "--profile", type=click.Choice(["MPL", "HPL", "QC", "MPL+HPL", "MPL+HPL+QC", "MPL+QC"], case_sensitive=False),
|
||||
default=None,
|
||||
help="H.264 profile to use. Default is best available.")
|
||||
@click.option("--meta-lang", type=str, help="Language to use for metadata")
|
||||
@click.option("-ht","--hydrate-track", is_flag=True, default=False, help="Hydrate missing audio and subtitle.")
|
||||
@click.option("-hb", "--high-bitrate", is_flag=True, default=False, help="Get more video bitrate")
|
||||
@click.pass_context
|
||||
def cli(ctx, **kwargs):
|
||||
return NF(ctx, **kwargs)
|
||||
|
||||
def __init__(self, ctx: click.Context, title: str, drm_system: Literal["widevine", "playready"], profile: str, meta_lang: str, hydrate_track: bool, high_bitrate: bool):
|
||||
super().__init__(ctx)
|
||||
# General
|
||||
self.title = title
|
||||
self.profile = profile
|
||||
self.meta_lang = meta_lang
|
||||
self.hydrate_track = hydrate_track
|
||||
self.drm_system = drm_system
|
||||
self.profiles: List[str] = []
|
||||
self.requested_profiles: List[str] = []
|
||||
self.high_bitrate = high_bitrate
|
||||
|
||||
# MSL
|
||||
self.esn = self.cache.get("ESN")
|
||||
self.msl: Optional[MSL] = None
|
||||
self.userauthdata = None
|
||||
|
||||
# Download options
|
||||
self.range = ctx.parent.params.get("range_") or [Video.Range.SDR]
|
||||
self.vcodec = ctx.parent.params.get("vcodec") or Video.Codec.AVC # Defaults to H264
|
||||
self.acodec : Audio.Codec = ctx.parent.params.get("acodec") or Audio.Codec.EC3
|
||||
self.quality: List[int] = ctx.parent.params.get("quality")
|
||||
self.audio_only = ctx.parent.params.get("audio_only")
|
||||
self.subs_only = ctx.parent.params.get("subs_only")
|
||||
self.chapters_only = ctx.parent.params.get("chapters_only")
|
||||
|
||||
|
||||
def authenticate(self, cookies: Optional[CookieJar] = None, credential: Optional[Credential] = None) -> None:
|
||||
# Configure first before download
|
||||
self.log.debug("Authenticating Netflix service")
|
||||
auth = super().authenticate(cookies, credential)
|
||||
if not cookies:
|
||||
raise EnvironmentError("Service requires Cookies for Authentication.")
|
||||
self.configure()
|
||||
return auth
|
||||
|
||||
def get_titles(self) -> Titles_T:
|
||||
metadata = self.get_metadata(self.title)
|
||||
# self.log.info(f"Metadata: {jsonpickle.encode(metadata, indent=2)}")
|
||||
if "video" not in metadata:
|
||||
self.log.error(f"Failed to get metadata: {metadata}")
|
||||
sys.exit(1)
|
||||
titles: Titles_T | None = None
|
||||
if metadata["video"]["type"] == "movie":
|
||||
movie = Movie(
|
||||
id_=self.title,
|
||||
name=metadata["video"]["title"],
|
||||
year=metadata["video"]["year"],
|
||||
service=self.__class__,
|
||||
data=metadata["video"],
|
||||
description=metadata["video"]["synopsis"]
|
||||
)
|
||||
titles = Movies([
|
||||
movie
|
||||
])
|
||||
else:
|
||||
# self.log.info(f"Episodes: {jsonpickle.encode(episodes, indent=2)}")
|
||||
|
||||
episode_list: List[Episode] = []
|
||||
for season in metadata["video"]["seasons"]:
|
||||
for episode in season["episodes"]:
|
||||
episode_list.append(
|
||||
Episode(
|
||||
id_=self.title,
|
||||
title=metadata["video"]["title"],
|
||||
year=season["year"],
|
||||
service=self.__class__,
|
||||
season=season["seq"],
|
||||
number=episode["seq"],
|
||||
name=episode["title"],
|
||||
data=episode,
|
||||
description=episode["synopsis"],
|
||||
)
|
||||
)
|
||||
|
||||
titles = Series(episode_list)
|
||||
|
||||
|
||||
|
||||
return titles
|
||||
|
||||
|
||||
|
||||
def get_tracks(self, title: Title_T) -> Tracks:
|
||||
tracks = Tracks()
|
||||
|
||||
def mark_repack(track_group):
|
||||
# mark videos + audio
|
||||
for t in track_group.videos + track_group.audio:
|
||||
t.needs_repack = True
|
||||
|
||||
# mark subtitles
|
||||
for t in getattr(track_group, "subtitles", []):
|
||||
t.needs_repack = True
|
||||
|
||||
# -------------------------------
|
||||
# Parse manifests / fetch tracks
|
||||
# -------------------------------
|
||||
if self.vcodec == Video.Codec.AVC:
|
||||
try:
|
||||
manifest = self.get_manifest(title, self.profiles)
|
||||
movie_track = self.manifest_as_tracks(manifest, title, self.hydrate_track)
|
||||
mark_repack(movie_track)
|
||||
tracks.add(movie_track)
|
||||
|
||||
if self.profile is not None:
|
||||
self.log.info(f"Requested profiles: {self.profile}")
|
||||
else:
|
||||
qc_720_profile = [
|
||||
x for x in self.config["profiles"]["video"][self.vcodec.extension.upper()]["QC"]
|
||||
if "l40" not in x and 720 in self.quality
|
||||
]
|
||||
|
||||
# QC profiles
|
||||
qc_manifest = self.get_manifest(
|
||||
title,
|
||||
qc_720_profile if 720 in self.quality
|
||||
else self.config["profiles"]["video"][self.vcodec.extension.upper()]["QC"]
|
||||
)
|
||||
qc_tracks = self.manifest_as_tracks(qc_manifest, title, False)
|
||||
mark_repack(qc_tracks)
|
||||
tracks.add(qc_tracks.videos)
|
||||
|
||||
# MPL Profiles
|
||||
mpl_manifest = self.get_manifest(
|
||||
title,
|
||||
[x for x in self.config["profiles"]["video"][self.vcodec.extension.upper()]["MPL"]
|
||||
if "l40" not in x]
|
||||
)
|
||||
mpl_tracks = self.manifest_as_tracks(mpl_manifest, title, False)
|
||||
mark_repack(mpl_tracks)
|
||||
tracks.add(mpl_tracks.videos)
|
||||
|
||||
except Exception as e:
|
||||
self.log.error(e)
|
||||
|
||||
else:
|
||||
# HEVC / DV / HDR mode
|
||||
if self.high_bitrate:
|
||||
splitted_profiles = self.split_profiles(self.profiles)
|
||||
for index, profile_list in enumerate(splitted_profiles):
|
||||
try:
|
||||
self.log.debug(f"Index: {index}. Getting profiles: {profile_list}")
|
||||
manifest = self.get_manifest(title, profile_list)
|
||||
manifest_tracks = self.manifest_as_tracks(
|
||||
manifest,
|
||||
title,
|
||||
self.hydrate_track if index == 0 else False
|
||||
)
|
||||
mark_repack(manifest_tracks)
|
||||
tracks.add(manifest_tracks if index == 0 else manifest_tracks.videos)
|
||||
|
||||
except Exception:
|
||||
self.log.error(f"Error getting profile: {profile_list}. Skipping")
|
||||
continue
|
||||
else:
|
||||
try:
|
||||
manifest = self.get_manifest(title, self.profiles)
|
||||
manifest_tracks = self.manifest_as_tracks(manifest, title, self.hydrate_track)
|
||||
mark_repack(manifest_tracks)
|
||||
tracks.add(manifest_tracks)
|
||||
except Exception as e:
|
||||
self.log.error(e)
|
||||
|
||||
# --------------------------------------------------------
|
||||
# 🧩 HYBRID DV+HDR Injection (copied from 1st script)
|
||||
# --------------------------------------------------------
|
||||
video_ranges = [v.range for v in tracks.videos]
|
||||
has_dv = Video.Range.DV in video_ranges
|
||||
has_hdr10 = Video.Range.HDR10 in video_ranges
|
||||
has_hdr10p = Video.Range.HDR10P in video_ranges
|
||||
|
||||
if self.range[0] == Video.Range.HYBRID and has_hdr10 and (has_dv or has_hdr10p):
|
||||
try:
|
||||
self.log.info("Performing HYBRID DV+HDR injection...")
|
||||
|
||||
hdr_video = next((v for v in tracks.videos if v.range == Video.Range.HDR10), None)
|
||||
dv_video = next((v for v in tracks.videos if v.range in (Video.Range.DV, Video.Range.HDR10P)), None)
|
||||
|
||||
if not hdr_video or not dv_video:
|
||||
raise Exception("Missing HDR10 or DV video track for hybrid merge")
|
||||
|
||||
# Ensure both files exist before injection
|
||||
def ensure_local_file(video):
|
||||
if not getattr(video, "path", None) or not os.path.exists(video.path):
|
||||
temp_path = config.directories.temp / f"{video.id}.hevc"
|
||||
self.log.info(f"Downloading temporary stream for {video.range} → {temp_path.name}")
|
||||
with self.session.get(video.url, stream=True) as r:
|
||||
r.raise_for_status()
|
||||
with open(temp_path, "wb") as f:
|
||||
for chunk in r.iter_content(chunk_size=1024 * 1024):
|
||||
f.write(chunk)
|
||||
video.path = temp_path
|
||||
return video.path
|
||||
|
||||
ensure_local_file(hdr_video)
|
||||
ensure_local_file(dv_video)
|
||||
|
||||
# Perform hybrid merge
|
||||
Hybrid([hdr_video, dv_video], self.__class__.__name__.lower())
|
||||
|
||||
injected_path = config.directories.temp / "HDR10-DV.hevc"
|
||||
self.log.info(f"Hybrid file created → {injected_path}")
|
||||
|
||||
# Replace HDR10 with merged track
|
||||
hdr_video.range = Video.Range.DV
|
||||
hdr_video.path = injected_path
|
||||
|
||||
except Exception as e:
|
||||
self.log.warning(f"Hybrid injection failed: {e}")
|
||||
|
||||
# --------------------------------------------------------
|
||||
# Disable proxy for all tracks
|
||||
# --------------------------------------------------------
|
||||
for track in tracks:
|
||||
track.needs_proxy = False
|
||||
|
||||
# --------------------------------------------------------
|
||||
# Add Attachments + Save poster
|
||||
# --------------------------------------------------------
|
||||
try:
|
||||
if isinstance(title, Movie):
|
||||
poster_url = title.data["boxart"][0]["url"]
|
||||
else:
|
||||
poster_url = title.data["stills"][0]["url"]
|
||||
|
||||
# Temp directory
|
||||
temp_dir = Path(self.config.get("directories", {}).get("Downloads", "./Downloads"))
|
||||
temp_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
poster_path = temp_dir / "poster.jpg"
|
||||
|
||||
# Save poster locally
|
||||
try:
|
||||
resp = requests.get(poster_url, timeout=15)
|
||||
if resp.status_code == 200:
|
||||
with open(poster_path, "wb") as f:
|
||||
f.write(resp.content)
|
||||
except Exception as e:
|
||||
self.log.error(f"Failed to save poster.jpg: {e}")
|
||||
|
||||
# Create attachment
|
||||
attachment = Attachment.from_url(url=poster_url)
|
||||
attachment.filename = str(poster_path)
|
||||
tracks.add(attachment)
|
||||
|
||||
except Exception as e:
|
||||
self.log.error(f"Failed to add attachments: {e}")
|
||||
|
||||
return tracks
|
||||
|
||||
return tracks
|
||||
|
||||
def split_profiles(self, profiles: List[str]) -> List[List[str]]:
|
||||
"""
|
||||
Split profiles based on codec level ranges and also DV/HDR groups for HYBRID mode.
|
||||
"""
|
||||
|
||||
# -----------------------------
|
||||
# Patterns for profile splitting
|
||||
# -----------------------------
|
||||
if self.vcodec == Video.Codec.AVC:
|
||||
level_patterns = ["l30", "l31", "l40"]
|
||||
else:
|
||||
level_patterns = ["L30", "L31", "L40", "L41", "L50", "L51"]
|
||||
|
||||
# -----------------------------
|
||||
# HYBRID MODE — Add DV/HDR splits
|
||||
# -----------------------------
|
||||
dv_patterns = ["DV", "dv"]
|
||||
hdr10_patterns = ["HDR10", "hdr10"]
|
||||
hdr10p_patterns = ["HDR10P", "hdr10p"]
|
||||
|
||||
result: List[List[str]] = []
|
||||
used = set()
|
||||
|
||||
# -----------------------------
|
||||
# Group DV profiles first
|
||||
# -----------------------------
|
||||
if self.range[0] == Video.Range.HYBRID:
|
||||
dv_group = [p for p in profiles if any(tag in p for tag in dv_patterns)]
|
||||
if dv_group:
|
||||
result.append(dv_group)
|
||||
used.update(dv_group)
|
||||
|
||||
# Group HDR10 profiles
|
||||
hdr10_group = [p for p in profiles if any(tag in p for tag in hdr10_patterns)]
|
||||
if hdr10_group:
|
||||
result.append(hdr10_group)
|
||||
used.update(hdr10_group)
|
||||
|
||||
# Group HDR10+ profiles
|
||||
hdr10p_group = [p for p in profiles if any(tag in p for tag in hdr10p_patterns)]
|
||||
if hdr10p_group:
|
||||
result.append(hdr10p_group)
|
||||
used.update(hdr10p_group)
|
||||
|
||||
# -----------------------------
|
||||
# Normal HEVC/H264 Level Splitting
|
||||
# -----------------------------
|
||||
for pattern in level_patterns:
|
||||
group = [p for p in profiles if (pattern in p and p not in used)]
|
||||
if group:
|
||||
result.append(group)
|
||||
used.update(group)
|
||||
|
||||
# -----------------------------
|
||||
# Any remaining profiles
|
||||
# -----------------------------
|
||||
leftover = [p for p in profiles if p not in used]
|
||||
if leftover:
|
||||
result.append(leftover)
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def get_chapters(self, title: Title_T) -> Chapters:
|
||||
chapters: Chapters = Chapters()
|
||||
# self.log.info(f"Title data: {title.data}")
|
||||
credits = title.data["skipMarkers"]["credit"]
|
||||
if credits["start"] > 0 and credits["end"] > 0:
|
||||
chapters.add(Chapter(
|
||||
timestamp=credits["start"], # Milliseconds
|
||||
name="Intro"
|
||||
))
|
||||
chapters.add(
|
||||
Chapter(
|
||||
timestamp=credits["end"], # Milliseconds
|
||||
name="Part 01"
|
||||
)
|
||||
)
|
||||
|
||||
chapters.add(Chapter(
|
||||
timestamp=float(title.data["creditsOffset"]), # this is seconds, needed to assign to float
|
||||
name="Outro"
|
||||
))
|
||||
|
||||
return chapters
|
||||
|
||||
def get_widevine_license(self, *, challenge: bytes, title: Movie | Episode | Song, track: AnyTrack) -> bytes | str | None:
|
||||
if not self.msl:
|
||||
self.log.error(f"MSL Client is not intialized!")
|
||||
sys.exit(1)
|
||||
application_data = {
|
||||
"version": 2,
|
||||
"url": track.data["license_url"],
|
||||
"id": int(time.time() * 10000),
|
||||
"esn": self.esn.data,
|
||||
"languages": ["en-US"],
|
||||
# "uiVersion": "shakti-v9dddfde5",
|
||||
"clientVersion": "6.0026.291.011",
|
||||
"params": [{
|
||||
"sessionId": base64.b64encode(get_random_bytes(16)).decode("utf-8"),
|
||||
"clientTime": int(time.time()),
|
||||
"challengeBase64": base64.b64encode(challenge).decode("utf-8"),
|
||||
"xid": str(int((int(time.time()) + 0.1612) * 1000)),
|
||||
}],
|
||||
"echo": "sessionId"
|
||||
}
|
||||
header, payload_data = self.msl.send_message(
|
||||
endpoint=self.config["endpoints"]["license"],
|
||||
params={
|
||||
"reqAttempt": 1,
|
||||
"reqName": "license",
|
||||
},
|
||||
application_data=application_data,
|
||||
userauthdata=self.userauthdata
|
||||
)
|
||||
if not payload_data:
|
||||
self.log.error(f" - Failed to get license: {header['message']} [{header['code']}]")
|
||||
sys.exit(1)
|
||||
if "error" in payload_data[0]:
|
||||
error = payload_data[0]["error"]
|
||||
error_display = error.get("display")
|
||||
error_detail = re.sub(r" \(E3-[^)]+\)", "", error.get("detail", ""))
|
||||
|
||||
if error_display:
|
||||
self.log.critical(f" - {error_display}")
|
||||
if error_detail:
|
||||
self.log.critical(f" - {error_detail}")
|
||||
|
||||
if not (error_display or error_detail):
|
||||
self.log.critical(f" - {error}")
|
||||
|
||||
sys.exit(1)
|
||||
return payload_data[0]["licenseResponseBase64"]
|
||||
|
||||
def get_playready_license(self, *, challenge: bytes, title: Movie | Episode | Song, track: AnyTrack) -> bytes | str | None:
|
||||
return None
|
||||
# return super().get_widevine_license(challenge=challenge, title=title, track=track)
|
||||
|
||||
def configure(self):
|
||||
# -----------------------------
|
||||
# Profiles selection
|
||||
# -----------------------------
|
||||
if self.profile is None:
|
||||
self.profiles = self.config["profiles"]["video"][self.vcodec.extension.upper()]
|
||||
|
||||
if self.profile is not None:
|
||||
self.requested_profiles = self.profile.split('+')
|
||||
self.log.info(f"Requested profile: {self.requested_profiles}")
|
||||
else:
|
||||
self.requested_profiles = self.config["profiles"]["video"][self.vcodec.extension.upper()]
|
||||
|
||||
# -----------------------------
|
||||
# Validate codec support
|
||||
# -----------------------------
|
||||
if self.vcodec.extension.upper() not in self.config["profiles"]["video"]:
|
||||
raise ValueError(f"Video Codec {self.vcodec} is not supported by Netflix")
|
||||
|
||||
# -----------------------------
|
||||
# HYBRID MODE FIX
|
||||
# -----------------------------
|
||||
if self.range[0] == Video.Range.HYBRID:
|
||||
# Only allowed for HEVC
|
||||
if self.vcodec != Video.Codec.HEVC:
|
||||
self.log.error("HYBRID mode is only supported for HEVC codec.")
|
||||
sys.exit(1)
|
||||
|
||||
self.log.info("HYBRID mode detected → Skipping standard range validation")
|
||||
# Skip all range validation completely
|
||||
else:
|
||||
# Normal validation path (non-HYBRID)
|
||||
if self.range[0].name not in list(self.config["profiles"]["video"][self.vcodec.extension.upper()].keys()) \
|
||||
and self.vcodec not in (Video.Codec.AVC, Video.Codec.VP9):
|
||||
|
||||
self.log.error(f"Video range {self.range[0].name} is not supported by Video Codec: {self.vcodec}")
|
||||
sys.exit(1)
|
||||
|
||||
if len(self.range) > 1:
|
||||
self.log.error("Multiple video range is not supported right now.")
|
||||
sys.exit(1)
|
||||
|
||||
if self.vcodec == Video.Codec.AVC and self.range[0] != Video.Range.SDR:
|
||||
self.log.error("H.264 Video Codec only supports SDR")
|
||||
sys.exit(1)
|
||||
|
||||
# -----------------------------
|
||||
# Final profile resolution
|
||||
# -----------------------------
|
||||
self.profiles = self.get_profiles()
|
||||
|
||||
self.log.info("Initializing a MSL client")
|
||||
self.get_esn()
|
||||
scheme = KeyExchangeSchemes.AsymmetricWrapped
|
||||
self.log.info(f"Scheme: {scheme}")
|
||||
|
||||
self.msl = MSL.handshake(
|
||||
scheme=scheme,
|
||||
session=self.session,
|
||||
endpoint=self.config["endpoints"]["manifest"],
|
||||
sender=self.esn.data,
|
||||
cache=self.cache.get("MSL")
|
||||
)
|
||||
|
||||
cookie = self.session.cookies.get_dict()
|
||||
self.userauthdata = UserAuthentication.NetflixIDCookies(
|
||||
netflixid=cookie["NetflixId"],
|
||||
securenetflixid=cookie["SecureNetflixId"]
|
||||
)
|
||||
|
||||
|
||||
def get_profiles(self):
|
||||
result_profiles = []
|
||||
|
||||
# -------------------------------
|
||||
# AVC logic (unchanged)
|
||||
# -------------------------------
|
||||
if self.vcodec == Video.Codec.AVC:
|
||||
if self.requested_profiles is not None:
|
||||
for req in self.requested_profiles:
|
||||
result_profiles.extend(
|
||||
flatten(list(self.config["profiles"]["video"][self.vcodec.extension.upper()][req]))
|
||||
)
|
||||
return result_profiles
|
||||
|
||||
result_profiles.extend(
|
||||
flatten(list(self.config["profiles"]["video"][self.vcodec.extension.upper()].values()))
|
||||
)
|
||||
return result_profiles
|
||||
|
||||
# -------------------------------
|
||||
# VP9 logic (unchanged)
|
||||
# -------------------------------
|
||||
if self.vcodec == Video.Codec.VP9 and self.range[0] != Video.Range.HDR10:
|
||||
result_profiles.extend(
|
||||
self.config["profiles"]["video"][self.vcodec.extension.upper()].values()
|
||||
)
|
||||
return result_profiles
|
||||
|
||||
# -------------------------------
|
||||
# HEVC Hybrid mode (FIXED)
|
||||
# -------------------------------
|
||||
if self.vcodec == Video.Codec.HEVC and self.range[0] == Video.Range.HYBRID:
|
||||
self.log.info("HYBRID mode detected → Using HDR10 + DV profiles")
|
||||
|
||||
hevc_profiles = self.config["profiles"]["video"][self.vcodec.extension.upper()]
|
||||
|
||||
result_profiles = []
|
||||
|
||||
# 1. HDR10 FIRST
|
||||
if "HDR10" in hevc_profiles:
|
||||
result_profiles += hevc_profiles["HDR10"]
|
||||
|
||||
# 2. HDR10P (some titles use this instead of HDR10)
|
||||
if "HDR10P" in hevc_profiles:
|
||||
result_profiles += hevc_profiles["HDR10P"]
|
||||
|
||||
# 3. DV LAST (IMPORTANT!)
|
||||
if "DV" in hevc_profiles:
|
||||
result_profiles += hevc_profiles["DV"]
|
||||
|
||||
return result_profiles
|
||||
|
||||
# -------------------------------
|
||||
# Normal HEVC (non HYBRID)
|
||||
# -------------------------------
|
||||
for profiles in self.config["profiles"]["video"][self.vcodec.extension.upper()]:
|
||||
for r in self.range:
|
||||
if r in profiles:
|
||||
result_profiles.extend(
|
||||
self.config["profiles"]["video"][self.vcodec.extension.upper()][r.name]
|
||||
)
|
||||
|
||||
self.log.debug(f"Result_profiles: {result_profiles}")
|
||||
return result_profiles
|
||||
|
||||
def get_esn(self):
|
||||
ESN_GEN = "".join(random.choice("0123456789ABCDEF") for _ in range(30))
|
||||
esn_value = f"NFCDIE-03-{ESN_GEN}"
|
||||
# Check if ESN is expired or doesn't exist
|
||||
if self.esn.data is None or self.esn.data == {} or (hasattr(self.esn, 'expired') and self.esn.expired):
|
||||
# Set new ESN with 6-hour expiration
|
||||
self.esn.set(esn_value, 1 * 60 * 60) # 6 hours in seconds
|
||||
self.log.info(f"Generated new ESN with 1-hour expiration")
|
||||
else:
|
||||
self.log.info(f"Using cached ESN.")
|
||||
self.log.info(f"ESN: {self.esn.data}")
|
||||
|
||||
|
||||
def get_metadata(self, title_id: str):
|
||||
"""
|
||||
Obtain Metadata information about a title by it's ID.
|
||||
:param title_id: Title's ID.
|
||||
:returns: Title Metadata.
|
||||
"""
|
||||
|
||||
try:
|
||||
metadata = self.session.get(
|
||||
self.config["endpoints"]["metadata"].format(build_id="release"),
|
||||
params={
|
||||
"movieid": title_id,
|
||||
"drmSystem": self.config["configuration"]["drm_system"],
|
||||
"isWatchlistEnabled": False,
|
||||
"isShortformEnabled": False,
|
||||
"languages": self.meta_lang
|
||||
}
|
||||
).json()
|
||||
except requests.HTTPError as e:
|
||||
if e.response.status_code == 500:
|
||||
self.log.warning(
|
||||
" - Recieved a HTTP 500 error while getting metadata, deleting cached reactContext data"
|
||||
)
|
||||
# self.cache.
|
||||
# os.unlink(self.get_cache("web_data.json"))
|
||||
# return self.get_metadata(self, title_id)
|
||||
raise Exception(f"Error getting metadata: {e}")
|
||||
except json.JSONDecodeError:
|
||||
self.log.error(" - Failed to get metadata, title might not be available in your region.")
|
||||
sys.exit(1)
|
||||
else:
|
||||
if "status" in metadata and metadata["status"] == "error":
|
||||
self.log.error(
|
||||
f" - Failed to get metadata, cookies might be expired. ({metadata['message']})"
|
||||
)
|
||||
sys.exit(1)
|
||||
return metadata
|
||||
|
||||
def get_manifest(self, title: Title_T, video_profiles: List[str], required_text_track_id: Optional[str] = None, required_audio_track_id: Optional[str] = None):
|
||||
audio_profiles = self.config["profiles"]["audio"].values()
|
||||
video_profiles = sorted(set(flatten(as_list(
|
||||
video_profiles,
|
||||
audio_profiles,
|
||||
self.config["profiles"]["video"]["H264"]["BPL"] if self.vcodec == Video.Codec.AVC else [],
|
||||
self.config["profiles"]["subtitles"],
|
||||
))))
|
||||
|
||||
|
||||
|
||||
self.log.debug("Profiles:\n\t" + "\n\t".join(video_profiles))
|
||||
|
||||
if not self.msl:
|
||||
raise Exception("MSL Client is not intialized.")
|
||||
|
||||
params = {
|
||||
"reqAttempt": 1,
|
||||
"reqPriority": 10,
|
||||
"reqName": "manifest",
|
||||
}
|
||||
_, payload_chunks = self.msl.send_message(
|
||||
endpoint=self.config["endpoints"]["manifest"],
|
||||
params=params,
|
||||
application_data={
|
||||
"version": 2,
|
||||
"url": "manifest",
|
||||
"id": int(time.time()),
|
||||
"esn": self.esn.data,
|
||||
"languages": ["en-US"],
|
||||
"clientVersion": "6.0026.291.011",
|
||||
"params": {
|
||||
"clientVersion": "6.0051.090.911",
|
||||
"challenge": self.config["payload_challenge_pr"] if self.drm_system == 'playready' else self.config["payload_challenge"],
|
||||
"challanges": {
|
||||
"default": self.config["payload_challenge_pr"] if self.drm_system == 'playready' else self.config["payload_challenge"]
|
||||
},
|
||||
"contentPlaygraph": ["v2"],
|
||||
"deviceSecurityLevel": "3000",
|
||||
"drmVersion": 25,
|
||||
"desiredVmaf": "plus_lts",
|
||||
"desiredSegmentVmaf": "plus_lts",
|
||||
"flavor": "STANDARD", # ? PRE_FETCH, SUPPLEMENTAL
|
||||
"drmType": self.drm_system,
|
||||
"imageSubtitleHeight": 1080,
|
||||
"isBranching": False,
|
||||
"isNonMember": False,
|
||||
"isUIAutoPlay": False,
|
||||
"licenseType": "standard",
|
||||
"liveAdsCapability": "replace",
|
||||
"liveMetadataFormat": "INDEXED_SEGMENT_TEMPLATE",
|
||||
"manifestVersion": "v2",
|
||||
"osName": "windows",
|
||||
"osVersion": "10.0",
|
||||
"platform": "138.0.0.0",
|
||||
"profilesGroups": [{
|
||||
"name": "default",
|
||||
"profiles": video_profiles
|
||||
}],
|
||||
"profiles": video_profiles,
|
||||
"preferAssistiveAudio": False,
|
||||
"requestSegmentVmaf": False,
|
||||
"requiredAudioTrackId": required_audio_track_id, # This is for getting missing audio tracks (value get from `new_track_id``)
|
||||
"requiredTextTrackId": required_text_track_id, # This is for getting missing subtitle. (value get from `new_track_id``)
|
||||
"supportsAdBreakHydration": False,
|
||||
"supportsNetflixMediaEvents": True,
|
||||
"supportsPartialHydration": True, # This is important if you want get available all tracks. but you must fetch each missing url tracks with "requiredAudioTracksId" or "requiredTextTrackId"
|
||||
"supportsPreReleasePin": True,
|
||||
"supportsUnequalizedDownloadables": True,
|
||||
"supportsWatermark": True,
|
||||
"titleSpecificData": {
|
||||
title.data.get("episodeId", title.data["id"]): {"unletterboxed": False}
|
||||
},
|
||||
"type": "standard", # ? PREPARE
|
||||
"uiPlatform": "SHAKTI",
|
||||
"uiVersion": "shakti-v49577320",
|
||||
"useBetterTextUrls": True,
|
||||
"useHttpsStreams": True,
|
||||
"usePsshBox": True,
|
||||
"videoOutputInfo": [{
|
||||
# todo ; make this return valid, but "secure" values, maybe it helps
|
||||
"type": "DigitalVideoOutputDescriptor",
|
||||
"outputType": "unknown",
|
||||
"supportedHdcpVersions": self.config["configuration"]["supported_hdcp_versions"],
|
||||
"isHdcpEngaged": self.config["configuration"]["is_hdcp_engaged"]
|
||||
}],
|
||||
"viewableId": title.data.get("episodeId", title.data["id"]),
|
||||
"xid": str(int((int(time.time()) + 0.1612) * 1000)),
|
||||
"showAllSubDubTracks": True,
|
||||
}
|
||||
},
|
||||
userauthdata=self.userauthdata
|
||||
)
|
||||
if "errorDetails" in payload_chunks:
|
||||
raise Exception(f"Manifest call failed: {payload_chunks['errorDetails']}")
|
||||
# with open(f"./manifest_{"+".join(video_profiles)}.json", mode='w') as r:
|
||||
# r.write(jsonpickle.encode(payload_chunks, indent=4))
|
||||
return payload_chunks
|
||||
|
||||
@staticmethod
|
||||
def get_original_language(manifest) -> Language:
|
||||
for language in manifest["audio_tracks"]:
|
||||
if language["languageDescription"].endswith(" [Original]"):
|
||||
return Language.get(language["language"])
|
||||
# e.g. get `en` from "A:1:1;2;en;0;|V:2:1;[...]"
|
||||
return Language.get(manifest["defaultTrackOrderList"][0]["mediaId"].split(";")[2])
|
||||
|
||||
def get_widevine_service_certificate(self, *, challenge: bytes, title: Movie | Episode | Song, track: AnyTrack) -> bytes | str:
|
||||
return self.config["certificate"]
|
||||
|
||||
def manifest_as_tracks(self, manifest, title: Title_T, hydrate_tracks = False) -> Tracks:
|
||||
|
||||
tracks = Tracks()
|
||||
original_language = self.get_original_language(manifest)
|
||||
self.log.debug(f"Original language: {original_language}")
|
||||
license_url = manifest["links"]["license"]["href"]
|
||||
# self.log.info(f"Video: {jsonpickle.encode(manifest["video_tracks"], indent=2)}")
|
||||
# self.log.info()
|
||||
for video in reversed(manifest["video_tracks"][0]["streams"]):
|
||||
# self.log.info(video)
|
||||
id = video["downloadable_id"]
|
||||
# self.log.info(f"Adding video {video["res_w"]}x{video["res_h"]}, bitrate: {(float(video["framerate_value"]) / video["framerate_scale"]) if "framerate_value" in video else None} with profile {video["content_profile"]}. kid: {video["drmHeaderId"]}")
|
||||
tracks.add(
|
||||
Video(
|
||||
id_=video["downloadable_id"],
|
||||
url=video["urls"][0]["url"],
|
||||
codec=Video.Codec.from_netflix_profile(video["content_profile"]),
|
||||
bitrate=video["bitrate"] * 1000,
|
||||
width=video["res_w"],
|
||||
height=video["res_h"],
|
||||
fps=(float(video["framerate_value"]) / video["framerate_scale"]) if "framerate_value" in video else None,
|
||||
language=Language.get(original_language),
|
||||
edition=video["content_profile"],
|
||||
range_=self.parse_video_range_from_profile(video["content_profile"]),
|
||||
drm=[Widevine(
|
||||
pssh=PSSH(
|
||||
# Box.parse(
|
||||
# Box.build(
|
||||
# dict(
|
||||
# type=b"pssh",
|
||||
# version=0,
|
||||
# flags=0,
|
||||
# system_ID=Cdm.uuid,
|
||||
# init_data=b"\x12\x10" + UUID(hex=video["drmHeaderId"]).bytes
|
||||
# )
|
||||
# )
|
||||
# )
|
||||
manifest["video_tracks"][0]["drmHeader"]["bytes"]
|
||||
),
|
||||
kid=video["drmHeaderId"]
|
||||
)],
|
||||
data={
|
||||
'license_url': license_url
|
||||
}
|
||||
)
|
||||
)
|
||||
# Audio
|
||||
|
||||
# store unavailable tracks for hydrating later
|
||||
unavailable_audio_tracks: List[Tuple[str, str]] = []
|
||||
for index, audio in enumerate(manifest["audio_tracks"]):
|
||||
if len(audio["streams"]) < 1:
|
||||
# This
|
||||
# self.log.debug(f"Audio lang {audio["languageDescription"]} is available but no stream available.")
|
||||
unavailable_audio_tracks.append((audio["new_track_id"], audio["id"])) # Assign to `unavailable_subtitle` for request missing audio tracks later
|
||||
continue
|
||||
# self.log.debug(f"Adding audio lang: {audio["language"]} with profile: {audio["content_profile"]}")
|
||||
is_original_lang = audio["language"] == original_language.language
|
||||
# self.log.info(f"is audio {audio["languageDescription"]} original language: {is_original_lang}")
|
||||
for stream in audio["streams"]:
|
||||
tracks.add(
|
||||
Audio(
|
||||
id_=stream["downloadable_id"],
|
||||
url=stream["urls"][0]["url"],
|
||||
codec=Audio.Codec.from_netflix_profile(stream["content_profile"]),
|
||||
language=Language.get(self.NF_LANG_MAP.get(audio["language"]) or audio["language"]),
|
||||
is_original_lang=is_original_lang,
|
||||
bitrate=stream["bitrate"] * 1000,
|
||||
channels=stream["channels"],
|
||||
descriptive=audio.get("rawTrackType", "").lower() == "assistive",
|
||||
name="[Original]" if Language.get(audio["language"]).language == original_language.language else None,
|
||||
joc=6 if "atmos" in stream["content_profile"] else None
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
|
||||
|
||||
# Subtitle
|
||||
unavailable_subtitle: List[Tuple[str, str]] = []
|
||||
for index, subtitle in enumerate(manifest["timedtexttracks"]):
|
||||
if "isNoneTrack" in subtitle and subtitle["isNoneTrack"] == True:
|
||||
continue
|
||||
if subtitle["hydrated"] == False:
|
||||
# This subtitles is there but has to request stream first
|
||||
unavailable_subtitle.append((subtitle["new_track_id"], subtitle["id"])) # Assign to `unavailable_subtitle` for request missing subtitles later
|
||||
# self.log.debug(f"Audio language: {subtitle["languageDescription"]} id: {subtitle["new_track_id"]} is not hydrated.")
|
||||
|
||||
continue
|
||||
|
||||
if subtitle["languageDescription"] == 'Off':
|
||||
# I don't why this subtitles is requested, i consider for skip these subtitles for now
|
||||
continue
|
||||
# pass
|
||||
|
||||
id = list(subtitle["downloadableIds"].values())
|
||||
language = Language.get(subtitle["language"])
|
||||
profile = next(iter(subtitle["ttDownloadables"].keys()))
|
||||
tt_downloadables = next(iter(subtitle["ttDownloadables"].values()))
|
||||
is_original_lang = subtitle["language"] == original_language.language
|
||||
# self.log.info(f"is subtitle {subtitle["languageDescription"]} original language {is_original_lang}")
|
||||
# self.log.info(f"ddd")
|
||||
tracks.add(
|
||||
Subtitle(
|
||||
id_=id[0],
|
||||
url=tt_downloadables["urls"][0]["url"],
|
||||
codec=Subtitle.Codec.from_netflix_profile(profile),
|
||||
language=language,
|
||||
forced=subtitle["isForcedNarrative"],
|
||||
cc=subtitle["rawTrackType"] == "closedcaptions",
|
||||
sdh=subtitle["trackVariant"] == 'STRIPPED_SDH' if "trackVariant" in subtitle else False,
|
||||
is_original_lang=is_original_lang,
|
||||
name=("[Original]" if language.language == original_language.language else None or "[Dubbing]" if "trackVariant" in subtitle and subtitle["trackVariant"] == "DUBTITLE" else None),
|
||||
)
|
||||
)
|
||||
if hydrate_tracks == False:
|
||||
return tracks
|
||||
# Hydrate missing tracks
|
||||
self.log.info(f"Getting all missing audio and subtitle tracks")
|
||||
for audio_hydration, subtitle_hydration in zip_longest(unavailable_audio_tracks, unavailable_subtitle, fillvalue=("N/A", "N/A")):
|
||||
# self.log.info(f"Audio hydration: {audio_hydration}")
|
||||
manifest = self.get_manifest(title, self.profiles, subtitle_hydration[0], audio_hydration[0])
|
||||
|
||||
audios = next(item for item in manifest["audio_tracks"] if 'id' in item and item["id"] == audio_hydration[1])
|
||||
subtitles = next(item for item in manifest["timedtexttracks"] if 'id' in item and item["id"] == subtitle_hydration[1])
|
||||
for stream in audios["streams"]:
|
||||
if audio_hydration[0] == 'N/A' and audio_hydration[1] == 'N/A':
|
||||
# self.log.info(f"Skipping not available hydrated audio tracks")
|
||||
continue
|
||||
tracks.add(
|
||||
Audio(
|
||||
id_=stream["downloadable_id"],
|
||||
url=stream["urls"][0]["url"],
|
||||
codec=Audio.Codec.from_netflix_profile(stream["content_profile"]),
|
||||
language=Language.get(self.NF_LANG_MAP.get(audios["language"]) or audios["language"]),
|
||||
is_original_lang=stream["language"] == original_language.language,
|
||||
bitrate=stream["bitrate"] * 1000,
|
||||
channels=stream["channels"],
|
||||
descriptive=audios.get("rawTrackType", "").lower() == "assistive",
|
||||
name="[Original]" if Language.get(audios["language"]).language == original_language.language else None,
|
||||
joc=6 if "atmos" in stream["content_profile"] else None
|
||||
)
|
||||
)
|
||||
|
||||
# self.log.info(jsonpickle.encode(subtitles, indent=2))
|
||||
# sel
|
||||
|
||||
if subtitle_hydration[0] == 'N/A':
|
||||
# self.log.info(f"Skipping not available hydrated subtitle tracks")
|
||||
continue
|
||||
id = list(subtitles["downloadableIds"].values())
|
||||
language = Language.get(subtitles["language"])
|
||||
profile = next(iter(subtitles["ttDownloadables"].keys()))
|
||||
tt_downloadables = next(iter(subtitles["ttDownloadables"].values()))
|
||||
tracks.add(
|
||||
Subtitle(
|
||||
id_=id[0],
|
||||
url=tt_downloadables["urls"][0]["url"],
|
||||
codec=Subtitle.Codec.from_netflix_profile(profile),
|
||||
language=language,
|
||||
forced=subtitles["isForcedNarrative"],
|
||||
cc=subtitles["rawTrackType"] == "closedcaptions",
|
||||
sdh=subtitles["trackVariant"] == 'STRIPPED_SDH' if "trackVariant" in subtitles else False,
|
||||
is_original_lang=subtitles["language"] == original_language.language,
|
||||
name=("[Original]" if language.language == original_language.language else None or "[Dubbing]" if "trackVariant" in subtitle and subtitle["trackVariant"] == "DUBTITLE" else None),
|
||||
)
|
||||
)
|
||||
|
||||
return tracks
|
||||
|
||||
|
||||
def parse_video_range_from_profile(self, profile: str) -> Video.Range:
|
||||
"""
|
||||
Parse the video range from a Netflix profile string.
|
||||
|
||||
Args:
|
||||
profile (str): The Netflix profile string (e.g., "hevc-main10-L30-dash-cenc")
|
||||
|
||||
Returns:
|
||||
Video.Range: The corresponding Video.Range enum value
|
||||
|
||||
Examples:
|
||||
>>> parse_video_range_from_profile("hevc-main10-L30-dash-cenc")
|
||||
<Video.Range.SDR: 'SDR'>
|
||||
>>> parse_video_range_from_profile("hevc-dv5-main10-L30-dash-cenc")
|
||||
<Video.Range.DV: 'DV'>
|
||||
"""
|
||||
|
||||
# Get video profiles from config
|
||||
video_profiles = self.config.get("profiles", {}).get("video", {})
|
||||
|
||||
# Search through all codecs and ranges to find the profile
|
||||
for codec, ranges in video_profiles.items():
|
||||
# if codec == 'H264':
|
||||
# return Video.Range.SDR # for H264 video always return SDR
|
||||
for range_name, profiles in ranges.items():
|
||||
# self.log.info(f"Checking range {range_name}")
|
||||
if profile in profiles:
|
||||
# Return the corresponding Video.Range enum value
|
||||
try:
|
||||
# self.log.info(f"Found {range_name}")
|
||||
return Video.Range(range_name)
|
||||
except ValueError:
|
||||
# If range_name is not a valid Video.Range, return SDR as default
|
||||
self.log.debug(f"Video range is not valid {range_name}")
|
||||
return Video.Range.SDR
|
||||
|
||||
# If profile not found, return SDR as default
|
||||
return Video.Range.SDR
|
||||
BIN
NF/__pycache__/__init__.cpython-310.pyc
Normal file
BIN
NF/__pycache__/__init__.cpython-310.pyc
Normal file
Binary file not shown.
216
NF/config.yaml
Normal file
216
NF/config.yaml
Normal file
File diff suppressed because one or more lines are too long
311
NPO/__init__.py
Normal file
311
NPO/__init__.py
Normal file
@ -0,0 +1,311 @@
|
||||
import json
|
||||
import re
|
||||
from http.cookiejar import CookieJar
|
||||
from typing import Optional
|
||||
from langcodes import Language
|
||||
|
||||
import click
|
||||
from collections.abc import Generator
|
||||
from unshackle.core.search_result import SearchResult
|
||||
from unshackle.core.constants import AnyTrack
|
||||
from unshackle.core.credential import Credential
|
||||
from unshackle.core.manifests import DASH
|
||||
from unshackle.core.service import Service
|
||||
from unshackle.core.titles import Episode, Movie, Movies, Series, Title_T, Titles_T
|
||||
from unshackle.core.tracks import Chapter, Tracks, Subtitle
|
||||
|
||||
|
||||
class NPO(Service):
|
||||
"""
|
||||
Service code for NPO Start (npo.nl)
|
||||
Version: 1.1.0
|
||||
|
||||
Authorization: optional cookies (free/paid content supported)
|
||||
Security: FHD @ L3
|
||||
FHD @ SL3000
|
||||
(Widevine and PlayReady support)
|
||||
|
||||
Supports:
|
||||
• Series ↦ https://npo.nl/start/serie/{slug}
|
||||
• Movies ↦ https://npo.nl/start/video/{slug}
|
||||
|
||||
Note: Movie inside a series can be downloaded as movie by converting URL to:
|
||||
https://npo.nl/start/video/slug
|
||||
|
||||
To change between Widevine and Playready, you need to change the DrmType in config.yaml to either widevine or playready
|
||||
"""
|
||||
|
||||
TITLE_RE = (
|
||||
r"^(?:https?://(?:www\.)?npo\.nl/start/)?"
|
||||
r"(?:(?P<type>video|serie)/(?P<slug>[^/]+)"
|
||||
r"(?:/afleveringen)?"
|
||||
r"(?:/seizoen-(?P<season>[^/]+)/(?P<episode>[^/]+)/afspelen)?)?$"
|
||||
)
|
||||
GEOFENCE = ("NL",)
|
||||
NO_SUBTITLES = False
|
||||
|
||||
@staticmethod
|
||||
@click.command(name="NPO", short_help="https://npo.nl")
|
||||
@click.argument("title", type=str)
|
||||
@click.pass_context
|
||||
def cli(ctx, **kwargs):
|
||||
return NPO(ctx, **kwargs)
|
||||
|
||||
def __init__(self, ctx, title: str):
|
||||
super().__init__(ctx)
|
||||
|
||||
m = re.match(self.TITLE_RE, title)
|
||||
if not m:
|
||||
self.search_term = title
|
||||
return
|
||||
|
||||
self.slug = m.group("slug")
|
||||
self.kind = m.group("type") or "video"
|
||||
self.season_slug = m.group("season")
|
||||
self.episode_slug = m.group("episode")
|
||||
|
||||
if self.config is None:
|
||||
raise EnvironmentError("Missing service config.")
|
||||
|
||||
# Store CDM reference
|
||||
self.cdm = ctx.obj.cdm
|
||||
|
||||
def authenticate(self, cookies: Optional[CookieJar] = None, credential: Optional[Credential] = None) -> None:
|
||||
super().authenticate(cookies, credential)
|
||||
if not cookies:
|
||||
self.log.info("No cookies, proceeding anonymously.")
|
||||
return
|
||||
|
||||
token = next((c.value for c in cookies if c.name == "__Secure-next-auth.session-token"), None)
|
||||
if not token:
|
||||
self.log.info("No session token, proceeding unauthenticated.")
|
||||
return
|
||||
|
||||
self.session.headers.update({
|
||||
"User-Agent": "Mozilla/5.0 (X11; Linux x86_64) Firefox/143.0",
|
||||
"Origin": "https://npo.nl",
|
||||
"Referer": "https://npo.nl/",
|
||||
})
|
||||
|
||||
r = self.session.get("https://npo.nl/start/api/domain/user-profiles", cookies=cookies)
|
||||
if r.ok and isinstance(r.json(), list) and r.json():
|
||||
self.log.info(f"NPO login OK, profiles: {[p['name'] for p in r.json()]}")
|
||||
else:
|
||||
self.log.warning("NPO auth check failed.")
|
||||
|
||||
def _fetch_next_data(self, slug: str) -> dict:
|
||||
"""Fetch and parse __NEXT_DATA__ from video/series page."""
|
||||
url = f"https://npo.nl/start/{'video' if self.kind == 'video' else 'serie'}/{slug}"
|
||||
r = self.session.get(url)
|
||||
r.raise_for_status()
|
||||
match = re.search(r'<script id="__NEXT_DATA__" type="application/json">({.*?})</script>', r.text, re.DOTALL)
|
||||
if not match:
|
||||
raise RuntimeError("Failed to extract __NEXT_DATA__")
|
||||
return json.loads(match.group(1))
|
||||
|
||||
def get_titles(self) -> Titles_T:
|
||||
next_data = self._fetch_next_data(self.slug)
|
||||
build_id = next_data["buildId"] # keep if needed elsewhere
|
||||
|
||||
page_props = next_data["props"]["pageProps"]
|
||||
queries = page_props["dehydratedState"]["queries"]
|
||||
|
||||
def get_data(fragment: str):
|
||||
return next((q["state"]["data"] for q in queries if fragment in str(q.get("queryKey", ""))), None)
|
||||
|
||||
if self.kind == "serie":
|
||||
series_data = get_data("series:detail-")
|
||||
if not series_data:
|
||||
raise ValueError("Series metadata not found")
|
||||
|
||||
episodes = []
|
||||
seasons = get_data("series:seasons-") or []
|
||||
for season in seasons:
|
||||
eps = get_data(f"programs:season-{season['guid']}") or []
|
||||
for e in eps:
|
||||
episodes.append(
|
||||
Episode(
|
||||
id_=e["guid"],
|
||||
service=self.__class__,
|
||||
title=series_data["title"],
|
||||
season=int(season["seasonKey"]),
|
||||
number=int(e["programKey"]),
|
||||
name=e["title"],
|
||||
description=(e.get("synopsis", {}) or {}).get("long", ""),
|
||||
language=Language.get("nl"),
|
||||
data=e,
|
||||
)
|
||||
)
|
||||
return Series(episodes)
|
||||
|
||||
# Movie
|
||||
item = get_data("program:detail-") or queries[0]["state"]["data"]
|
||||
synopsis = item.get("synopsis", {})
|
||||
desc = synopsis.get("long") or synopsis.get("short", "") if isinstance(synopsis, dict) else str(synopsis)
|
||||
year = (int(item["firstBroadcastDate"]) // 31536000 + 1970) if item.get("firstBroadcastDate") else None
|
||||
|
||||
return Movies([
|
||||
Movie(
|
||||
id_=item["guid"],
|
||||
service=self.__class__,
|
||||
name=item["title"],
|
||||
description=desc,
|
||||
year=year,
|
||||
language=Language.get("nl"),
|
||||
data=item,
|
||||
)
|
||||
])
|
||||
|
||||
def get_tracks(self, title: Title_T) -> Tracks:
|
||||
product_id = title.data.get("productId")
|
||||
if not product_id:
|
||||
raise ValueError("no productId detected.")
|
||||
|
||||
token_url = self.config["endpoints"]["player_token"].format(product_id=product_id)
|
||||
r_tok = self.session.get(token_url, headers={"Referer": f"https://npo.nl/start/video/{self.slug}"})
|
||||
r_tok.raise_for_status()
|
||||
jwt = r_tok.json()["jwt"]
|
||||
|
||||
# Request stream
|
||||
r_stream = self.session.post(
|
||||
self.config["endpoints"]["streams"],
|
||||
json={
|
||||
"profileName": "dash",
|
||||
"drmType": self.config["DrmType"],
|
||||
"referrerUrl": f"https://npo.nl/start/video/{self.slug}",
|
||||
"ster": {"identifier": "npo-app-desktop", "deviceType": 4, "player": "web"},
|
||||
},
|
||||
headers={
|
||||
"Authorization": jwt,
|
||||
"Content-Type": "application/json",
|
||||
"Origin": "https://npo.nl",
|
||||
"Referer": f"https://npo.nl/start/video/{self.slug}",
|
||||
},
|
||||
)
|
||||
r_stream.raise_for_status()
|
||||
data = r_stream.json()
|
||||
|
||||
if "error" in data:
|
||||
raise PermissionError(f"Stream error: {data['error']}")
|
||||
|
||||
stream = data["stream"]
|
||||
manifest_url = stream.get("streamURL") or stream.get("url")
|
||||
if not manifest_url:
|
||||
raise ValueError("No stream URL in response")
|
||||
|
||||
is_unencrypted = "unencrypted" in manifest_url.lower() or not any(k in stream for k in ["drmToken", "token"])
|
||||
|
||||
# Parse DASH
|
||||
tracks = DASH.from_url(manifest_url, session=self.session).to_tracks(language=title.language)
|
||||
|
||||
# Subtitles
|
||||
subtitles = []
|
||||
for sub in (data.get("assets", {}) or {}).get("subtitles", []) or []:
|
||||
if not isinstance(sub, dict):
|
||||
continue
|
||||
lang = sub.get("iso", "und")
|
||||
location = sub.get("location")
|
||||
if not location:
|
||||
continue # skip if no URL provided
|
||||
subtitles.append(
|
||||
Subtitle(
|
||||
id_=sub.get("name", lang),
|
||||
url=location.strip(),
|
||||
language=Language.get(lang),
|
||||
is_original_lang=lang == "nl",
|
||||
codec=Subtitle.Codec.WebVTT,
|
||||
name=sub.get("name", "Unknown"),
|
||||
forced=False,
|
||||
sdh=False,
|
||||
)
|
||||
)
|
||||
tracks.subtitles = subtitles
|
||||
|
||||
# DRM
|
||||
if is_unencrypted:
|
||||
for tr in tracks.videos + tracks.audio:
|
||||
if hasattr(tr, "drm") and tr.drm:
|
||||
tr.drm.clear()
|
||||
else:
|
||||
self.drm_token = stream.get("drmToken") or stream.get("token") or stream.get("drm_token")
|
||||
if not self.drm_token:
|
||||
raise ValueError(f"No DRM token found. Available keys: {list(stream.keys())}")
|
||||
|
||||
for tr in tracks.videos + tracks.audio:
|
||||
if getattr(tr, "drm", None):
|
||||
if drm_type == "playready":
|
||||
tr.drm.license = lambda challenge, **kw: self.get_playready_license(
|
||||
challenge=challenge, title=title, track=tr
|
||||
)
|
||||
else:
|
||||
tr.drm.license = lambda challenge, **kw: self.get_widevine_license(
|
||||
challenge=challenge, title=title, track=tr
|
||||
)
|
||||
|
||||
return tracks
|
||||
|
||||
def get_chapters(self, title: Title_T) -> list[Chapter]:
|
||||
return []
|
||||
|
||||
def get_widevine_license(self, challenge: bytes, title: Title_T, track: AnyTrack) -> bytes:
|
||||
if not self.drm_token:
|
||||
raise ValueError("DRM token not set, login or paid content may be required.")
|
||||
r = self.session.post(
|
||||
self.config["endpoints"]["license"],
|
||||
params={"custom_data": self.drm_token},
|
||||
data=challenge,
|
||||
)
|
||||
r.raise_for_status()
|
||||
return r.content
|
||||
|
||||
def get_playready_license(self, challenge: bytes, title: Title_T, track: AnyTrack) -> bytes:
|
||||
if not self.drm_token:
|
||||
raise ValueError("DRM token not set, login or paid content may be required.")
|
||||
headers = {
|
||||
"Content-Type": "text/xml; charset=utf-8",
|
||||
"SOAPAction": "http://schemas.microsoft.com/DRM/2007/03/protocols/AcquireLicense",
|
||||
"Origin": "https://npo.nl",
|
||||
"Referer": "https://npo.nl/",
|
||||
"User-Agent": (
|
||||
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) "
|
||||
"AppleWebKit/537.36 (KHTML, like Gecko) "
|
||||
"Chrome/141.0.0.0 Safari/537.36 Edg/141.0.0.0"
|
||||
),
|
||||
}
|
||||
r = self.session.post(
|
||||
self.config["endpoints"]["license"],
|
||||
params={"custom_data": self.drm_token},
|
||||
data=challenge,
|
||||
headers=headers,
|
||||
)
|
||||
r.raise_for_status()
|
||||
return r.content
|
||||
|
||||
def search(self) -> Generator[SearchResult, None, None]:
|
||||
query = getattr(self, "search_term", None) or getattr(self, "title", None)
|
||||
search = self.session.get(
|
||||
url=self.config["endpoints"]["search"],
|
||||
params={
|
||||
"searchQuery": query, # always use the correct attribute
|
||||
"searchType": "series",
|
||||
"subscriptionType": "premium",
|
||||
"includePremiumContent": "true",
|
||||
},
|
||||
headers={
|
||||
"User-Agent": "Mozilla/5.0 (X11; Linux x86_64; rv:143.0) Gecko/20100101 Firefox/143.0",
|
||||
"Accept": "application/json, text/plain, */*",
|
||||
"Origin": "https://npo.nl",
|
||||
"Referer": f"https://npo.nl/start/zoeken?zoekTerm={query}",
|
||||
}
|
||||
).json()
|
||||
for result in search.get("items", []):
|
||||
yield SearchResult(
|
||||
id_=result.get("guid"),
|
||||
title=result.get("title"),
|
||||
label=result.get("type", "SERIES").upper() if result.get("type") else "SERIES",
|
||||
url=f"https://npo.nl/start/serie/{result.get('slug')}" if result.get("type") == "timeless_series" else
|
||||
f"https://npo.nl/start/video/{result.get('slug')}"
|
||||
)
|
||||
|
||||
|
||||
|
||||
10
NPO/config.yaml
Normal file
10
NPO/config.yaml
Normal file
@ -0,0 +1,10 @@
|
||||
endpoints:
|
||||
metadata: "https://npo.nl/start/_next/data/{build_id}/video/{slug}.json"
|
||||
metadata_series: "https://npo.nl/start/_next/data/{build_id}/serie/{slug}/afleveringen.json"
|
||||
metadata_episode: "https://npo.nl/start/_next/data/{build_id}/serie/{series_slug}/seizoen-{season_slug}/{episode_slug}.json"
|
||||
streams: "https://prod.npoplayer.nl/stream-link"
|
||||
player_token: "https://npo.nl/start/api/domain/player-token?productId={product_id}"
|
||||
license: "https://npo-drm-gateway.samgcloud.nepworldwide.nl/authentication"
|
||||
homepage: "https://npo.nl/start"
|
||||
search: " https://npo.nl/start/api/domain/search-collection-items"
|
||||
DrmType: "widevine"
|
||||
454
PCOK/__init__.py
Normal file
454
PCOK/__init__.py
Normal file
@ -0,0 +1,454 @@
|
||||
import base64
|
||||
import hashlib
|
||||
import hmac
|
||||
import json
|
||||
import time
|
||||
from datetime import datetime
|
||||
from http.cookiejar import CookieJar
|
||||
from typing import Optional
|
||||
|
||||
import click
|
||||
from langcodes import Language
|
||||
from pyplayready.cdm import Cdm as PlayReadyCdm
|
||||
|
||||
from unshackle.core.constants import AnyTrack
|
||||
from unshackle.core.credential import Credential
|
||||
from unshackle.core.manifests import DASH
|
||||
from unshackle.core.service import Service
|
||||
from unshackle.core.titles import Episode, Movie, Movies, Series, Title_T, Titles_T
|
||||
from unshackle.core.tracks import Chapters, Tracks, Video
|
||||
|
||||
|
||||
class PCOK(Service):
|
||||
"""
|
||||
Service code for NBC's Peacock streaming service (https://peacocktv.com).
|
||||
Version: 1.0.0
|
||||
|
||||
Authorization: Cookies
|
||||
Security: UHD@-- FHD@SL*
|
||||
|
||||
Tips: - The library of contents can be viewed without logging in at https://www.peacocktv.com/stream/tv
|
||||
See the footer for links to movies, news, etc. A US IP is required to view.
|
||||
"""
|
||||
|
||||
ALIASES = ("PCOK", "peacock")
|
||||
GEOFENCE = ("US",)
|
||||
TITLE_RE = [
|
||||
r"(?:https?://(?:www\.)?peacocktv\.com/watch/asset/|/?)(?P<id>movies/[a-z0-9/./-]+/[a-f0-9-]+)",
|
||||
r"(?:https?://(?:www\.)?peacocktv\.com/watch/asset/|/?)(?P<id>tv/[a-z0-9/./-]+/[a-f0-9-]+)",
|
||||
r"(?:https?://(?:www\.)?peacocktv\.com/watch/asset/|/?)(?P<id>tv/[a-z0-9-/.]+/\d+)",
|
||||
r"(?:https?://(?:www\.)?peacocktv\.com/watch/asset/|/?)(?P<id>news/[a-z0-9/./-]+/[a-f0-9-]+)",
|
||||
r"(?:https?://(?:www\.)?peacocktv\.com/watch/asset/|/?)(?P<id>news/[a-z0-9-/.]+/\d+)",
|
||||
r"(?:https?://(?:www\.)?peacocktv\.com/watch/asset/|/?)(?P<id>-/[a-z0-9-/.]+/\d+)",
|
||||
r"(?:https?://(?:www\.)?peacocktv\.com/stream-tv/)?(?P<id>[a-z0-9-/.]+)",
|
||||
]
|
||||
|
||||
@staticmethod
|
||||
@click.command(name="PCOK", short_help="https://peacocktv.com")
|
||||
@click.argument("title", type=str)
|
||||
@click.option("-m", "--movie", is_flag=True, default=False, help="Title is a movie.")
|
||||
@click.pass_context
|
||||
def cli(ctx, **kwargs):
|
||||
return PCOK(ctx, **kwargs)
|
||||
|
||||
def __init__(self, ctx, title, movie):
|
||||
super().__init__(ctx)
|
||||
|
||||
self.title = title
|
||||
self.movie = movie
|
||||
self.cdm = ctx.obj.cdm
|
||||
if not isinstance(self.cdm, PlayReadyCdm):
|
||||
self.log.warning("PlayReady CDM not provided, exiting")
|
||||
raise SystemExit(1)
|
||||
|
||||
range_param = ctx.parent.params.get("range_")
|
||||
self.range = range_param[0].name if range_param else "SDR"
|
||||
|
||||
vcodec_param = ctx.parent.params.get("vcodec")
|
||||
self.vcodec = vcodec_param if vcodec_param else "H264"
|
||||
|
||||
if self.config is None:
|
||||
raise Exception("Config is missing!")
|
||||
|
||||
profile_name = ctx.parent.params.get("profile")
|
||||
if profile_name is None:
|
||||
profile_name = "default"
|
||||
self.profile = profile_name
|
||||
|
||||
self.hmac_key = None
|
||||
self.tokens = None
|
||||
self.license_api = None
|
||||
self.license_bt = None
|
||||
|
||||
def authenticate(self, cookies: Optional[CookieJar] = None, credential: Optional[Credential] = None) -> None:
|
||||
super().authenticate(cookies, credential)
|
||||
if not cookies:
|
||||
raise EnvironmentError("Service requires Cookies for Authentication.")
|
||||
|
||||
self.session.headers.update({"Origin": "https://www.peacocktv.com"})
|
||||
self.log.info("Getting Peacock Client configuration")
|
||||
|
||||
if self.config["client"]["platform"] != "PC":
|
||||
self.service_config = self.session.get(
|
||||
url=self.config["endpoints"]["config"].format(
|
||||
territory=self.config["client"]["territory"],
|
||||
provider=self.config["client"]["provider"],
|
||||
proposition=self.config["client"]["proposition"],
|
||||
device=self.config["client"]["platform"],
|
||||
version=self.config["client"]["config_version"],
|
||||
)
|
||||
).json()
|
||||
|
||||
self.hmac_key = bytes(self.config["security"]["signature_hmac_key_v4"], "utf-8")
|
||||
self.log.info("Getting Authorization Tokens")
|
||||
self.tokens = self.get_tokens()
|
||||
self.log.info("Verifying Authorization Tokens")
|
||||
if not self.verify_tokens():
|
||||
raise EnvironmentError("Failed! Cookies might be outdated.")
|
||||
|
||||
def get_titles(self) -> Titles_T:
|
||||
# Parse title from various URL formats
|
||||
import re
|
||||
|
||||
title_id = self.title
|
||||
for pattern in self.TITLE_RE:
|
||||
match = re.search(pattern, self.title)
|
||||
if match:
|
||||
title_id = match.group("id")
|
||||
break
|
||||
|
||||
# Handle stream-tv redirects
|
||||
if "/" not in title_id:
|
||||
r = self.session.get(self.config["endpoints"]["stream_tv"].format(title_id=title_id))
|
||||
match = re.search(r"/watch/asset(/[^']+)", r.text)
|
||||
if match:
|
||||
title_id = match.group(1)
|
||||
else:
|
||||
raise ValueError("Title ID not found or invalid")
|
||||
|
||||
if not title_id.startswith("/"):
|
||||
title_id = f"/{title_id}"
|
||||
|
||||
if title_id.startswith("/movies/"):
|
||||
self.movie = True
|
||||
|
||||
res = self.session.get(
|
||||
url=self.config["endpoints"]["node"],
|
||||
params={
|
||||
"slug": title_id,
|
||||
"represent": "(items(items))"
|
||||
},
|
||||
headers={
|
||||
"Accept": "*",
|
||||
"Referer": f"https://www.peacocktv.com/watch/asset{title_id}",
|
||||
"X-SkyOTT-Device": self.config["client"]["device"],
|
||||
"X-SkyOTT-Platform": self.config["client"]["platform"],
|
||||
"X-SkyOTT-Proposition": self.config["client"]["proposition"],
|
||||
"X-SkyOTT-Provider": self.config["client"]["provider"],
|
||||
"X-SkyOTT-Territory": self.config["client"]["territory"],
|
||||
"X-SkyOTT-Language": "en"
|
||||
}
|
||||
).json()
|
||||
|
||||
if self.movie:
|
||||
return Movies([
|
||||
Movie(
|
||||
id_=title_id,
|
||||
service=self.__class__,
|
||||
name=res["attributes"]["title"],
|
||||
year=res["attributes"]["year"],
|
||||
data=res,
|
||||
)
|
||||
])
|
||||
else:
|
||||
episodes = []
|
||||
for season in res["relationships"]["items"]["data"]:
|
||||
for episode in season["relationships"]["items"]["data"]:
|
||||
episodes.append(episode)
|
||||
|
||||
episode_titles = []
|
||||
for x in episodes:
|
||||
episode_titles.append(
|
||||
Episode(
|
||||
id_=title_id,
|
||||
service=self.__class__,
|
||||
title=res["attributes"]["title"],
|
||||
season=x["attributes"].get("seasonNumber"),
|
||||
number=x["attributes"].get("episodeNumber"),
|
||||
name=x["attributes"].get("title"),
|
||||
year=x["attributes"].get("year"),
|
||||
data=x
|
||||
)
|
||||
)
|
||||
return Series(episode_titles)
|
||||
|
||||
def get_tracks(self, title: Title_T) -> Tracks:
|
||||
supported_colour_spaces = ["SDR"]
|
||||
|
||||
if self.range == "HDR10":
|
||||
self.log.info("Switched dynamic range to HDR10")
|
||||
supported_colour_spaces = ["HDR10"]
|
||||
elif self.range == "DV":
|
||||
self.log.info("Switched dynamic range to DV")
|
||||
supported_colour_spaces = ["DolbyVision"]
|
||||
|
||||
content_id = title.data["attributes"]["formats"]["HD"]["contentId"]
|
||||
variant_id = title.data["attributes"]["providerVariantId"]
|
||||
|
||||
sky_headers = {
|
||||
"X-SkyOTT-Agent": ".".join([
|
||||
self.config["client"]["proposition"].lower(),
|
||||
self.config["client"]["device"].lower(),
|
||||
self.config["client"]["platform"].lower()
|
||||
]),
|
||||
"X-SkyOTT-PinOverride": "false",
|
||||
"X-SkyOTT-Provider": self.config["client"]["provider"],
|
||||
"X-SkyOTT-Territory": self.config["client"]["territory"],
|
||||
"X-SkyOTT-UserToken": self.tokens["userToken"]
|
||||
}
|
||||
|
||||
body = json.dumps({
|
||||
"device": {
|
||||
"capabilities": [
|
||||
{
|
||||
"protection": "PLAYREADY",
|
||||
"container": "ISOBMFF",
|
||||
"transport": "DASH",
|
||||
"acodec": "AAC",
|
||||
"vcodec": "H265" if self.vcodec == "H265" else "H264",
|
||||
},
|
||||
{
|
||||
"protection": "PLAYREADY",
|
||||
"container": "ISOBMFF",
|
||||
"transport": "DASH",
|
||||
"acodec": "AAC",
|
||||
"vcodec": "H265" if self.vcodec == "H265" else "H264",
|
||||
}
|
||||
],
|
||||
"maxVideoFormat": "UHD" if self.vcodec == "H265" else "HD",
|
||||
"supportedColourSpaces": supported_colour_spaces,
|
||||
"model": self.config["client"]["platform"],
|
||||
"hdcpEnabled": "true"
|
||||
},
|
||||
"client": {
|
||||
"thirdParties": ["FREEWHEEL", "YOSPACE"]
|
||||
},
|
||||
"contentId": content_id,
|
||||
"providerVariantId": variant_id,
|
||||
"parentalControlPin": "null"
|
||||
}, separators=(",", ":"))
|
||||
|
||||
manifest = self.session.post(
|
||||
url=self.config["endpoints"]["vod"],
|
||||
data=body,
|
||||
headers=dict(**sky_headers, **{
|
||||
"Accept": "application/vnd.playvod.v1+json",
|
||||
"Content-Type": "application/vnd.playvod.v1+json",
|
||||
"X-Sky-Signature": self.create_signature_header(
|
||||
method="POST",
|
||||
path="/video/playouts/vod",
|
||||
sky_headers=sky_headers,
|
||||
body=body,
|
||||
timestamp=int(time.time())
|
||||
)
|
||||
})
|
||||
).json()
|
||||
|
||||
if "errorCode" in manifest:
|
||||
raise ValueError(f"An error occurred: {manifest['description']} [{manifest['errorCode']}]")
|
||||
|
||||
self.license_api = manifest["protection"]["licenceAcquisitionUrl"]
|
||||
self.license_bt = manifest["protection"]["licenceToken"]
|
||||
|
||||
tracks = DASH.from_url(
|
||||
url=manifest["asset"]["endpoints"][0]["url"],
|
||||
session=self.session
|
||||
).to_tracks(language=Language.get("en"))
|
||||
|
||||
# Set HDR attributes
|
||||
for video in tracks.videos:
|
||||
if supported_colour_spaces == ["HDR10"]:
|
||||
video.range = Video.Range.HDR10
|
||||
elif supported_colour_spaces == ["DolbyVision"]:
|
||||
video.range = Video.Range.DV
|
||||
else:
|
||||
video.range = Video.Range.SDR
|
||||
|
||||
# Fix audio description language
|
||||
for track in tracks.audio:
|
||||
if track.language.territory == "AD":
|
||||
track.language.territory = None
|
||||
|
||||
return tracks
|
||||
|
||||
def get_chapters(self, title: Title_T) -> Chapters:
|
||||
"""Get chapters for the title. Peacock doesn't typically provide chapter data."""
|
||||
return Chapters([])
|
||||
|
||||
def get_playready_license(self, *, challenge: bytes, title: Title_T, track: AnyTrack) -> Optional[bytes]:
|
||||
"""Retrieve a PlayReady license for a given track."""
|
||||
if not self.license_api:
|
||||
return None
|
||||
|
||||
response = self.session.post(
|
||||
url=self.license_api,
|
||||
headers={
|
||||
"Accept": "*",
|
||||
"X-Sky-Signature": self.create_signature_header(
|
||||
method="POST",
|
||||
path="/" + self.license_api.split("://", 2)[1].split("/", 1)[1],
|
||||
sky_headers={},
|
||||
body="",
|
||||
timestamp=int(time.time())
|
||||
)
|
||||
},
|
||||
data=challenge
|
||||
)
|
||||
response.raise_for_status()
|
||||
return response.content
|
||||
|
||||
@staticmethod
|
||||
def calculate_sky_header_md5(headers):
|
||||
if len(headers.items()) > 0:
|
||||
headers_str = "\n".join(f"{x[0].lower()}: {x[1]}" for x in headers.items()) + "\n"
|
||||
else:
|
||||
headers_str = "{}"
|
||||
return str(hashlib.md5(headers_str.encode()).hexdigest())
|
||||
|
||||
@staticmethod
|
||||
def calculate_body_md5(body):
|
||||
return str(hashlib.md5(body.encode()).hexdigest())
|
||||
|
||||
def calculate_signature(self, msg):
|
||||
digest = hmac.new(self.hmac_key, bytes(msg, "utf-8"), hashlib.sha1).digest()
|
||||
return str(base64.b64encode(digest), "utf-8")
|
||||
|
||||
def create_signature_header(self, method, path, sky_headers, body, timestamp):
|
||||
data = "\n".join([
|
||||
method.upper(),
|
||||
path,
|
||||
"",
|
||||
self.config["client"]["client_sdk"],
|
||||
"1.0",
|
||||
self.calculate_sky_header_md5(sky_headers),
|
||||
str(timestamp),
|
||||
self.calculate_body_md5(body)
|
||||
]) + "\n"
|
||||
|
||||
signature_hmac = self.calculate_signature(data)
|
||||
|
||||
return self.config["security"]["signature_format"].format(
|
||||
client=self.config["client"]["client_sdk"],
|
||||
signature=signature_hmac,
|
||||
timestamp=timestamp
|
||||
)
|
||||
|
||||
def get_tokens(self):
|
||||
# Try to get cached tokens
|
||||
cache = self.cache.get(f"tokens_{self.profile}_{self.config['client']['id']}")
|
||||
|
||||
if cache and cache.data.get("tokenExpiryTime"):
|
||||
tokens_expiration = cache.data.get("tokenExpiryTime")
|
||||
if datetime.strptime(tokens_expiration, "%Y-%m-%dT%H:%M:%S.%fZ") > datetime.now():
|
||||
return cache.data
|
||||
|
||||
# Get all SkyOTT headers
|
||||
sky_headers = {
|
||||
"X-SkyOTT-Agent": ".".join([
|
||||
self.config["client"]["proposition"],
|
||||
self.config["client"]["device"],
|
||||
self.config["client"]["platform"]
|
||||
]).lower(),
|
||||
"X-SkyOTT-Device": self.config["client"]["device"],
|
||||
"X-SkyOTT-Platform": self.config["client"]["platform"],
|
||||
"X-SkyOTT-Proposition": self.config["client"]["proposition"],
|
||||
"X-SkyOTT-Provider": self.config["client"]["provider"],
|
||||
"X-SkyOTT-Territory": self.config["client"]["territory"]
|
||||
}
|
||||
|
||||
try:
|
||||
# Call personas endpoint to get the accounts personaId
|
||||
personas = self.session.get(
|
||||
url=self.config["endpoints"]["personas"],
|
||||
headers=dict(**sky_headers, **{
|
||||
"Accept": "application/vnd.persona.v1+json",
|
||||
"Content-Type": "application/vnd.persona.v1+json",
|
||||
"X-SkyOTT-TokenType": self.config["client"]["auth_scheme"]
|
||||
})
|
||||
).json()
|
||||
except Exception as e:
|
||||
raise EnvironmentError(f"Unable to get persona ID: {e}")
|
||||
|
||||
persona = personas["personas"][0]["personaId"]
|
||||
|
||||
# Craft the body data
|
||||
body = json.dumps({
|
||||
"auth": {
|
||||
"authScheme": self.config["client"]["auth_scheme"],
|
||||
"authIssuer": self.config["client"]["auth_issuer"],
|
||||
"provider": self.config["client"]["provider"],
|
||||
"providerTerritory": self.config["client"]["territory"],
|
||||
"proposition": self.config["client"]["proposition"],
|
||||
"personaId": persona
|
||||
},
|
||||
"device": {
|
||||
"type": self.config["client"]["device"],
|
||||
"platform": self.config["client"]["platform"],
|
||||
"id": self.config["client"]["id"],
|
||||
"drmDeviceId": self.config["client"]["drm_device_id"]
|
||||
}
|
||||
}, separators=(",", ":"))
|
||||
|
||||
# Get the tokens
|
||||
tokens = self.session.post(
|
||||
url=self.config["endpoints"]["tokens"],
|
||||
headers=dict(**sky_headers, **{
|
||||
"Accept": "application/vnd.tokens.v1+json",
|
||||
"Content-Type": "application/vnd.tokens.v1+json",
|
||||
"X-Sky-Signature": self.create_signature_header(
|
||||
method="POST",
|
||||
path="/auth/tokens",
|
||||
sky_headers=sky_headers,
|
||||
body=body,
|
||||
timestamp=int(time.time())
|
||||
)
|
||||
}),
|
||||
data=body
|
||||
).json()
|
||||
|
||||
# Cache the tokens
|
||||
if not cache:
|
||||
cache = self.cache.get(f"tokens_{self.profile}_{self.config['client']['id']}")
|
||||
cache.set(data=tokens)
|
||||
|
||||
return tokens
|
||||
|
||||
def verify_tokens(self):
|
||||
"""Verify the tokens by calling the /auth/users/me endpoint"""
|
||||
sky_headers = {
|
||||
"X-SkyOTT-Device": self.config["client"]["device"],
|
||||
"X-SkyOTT-Platform": self.config["client"]["platform"],
|
||||
"X-SkyOTT-Proposition": self.config["client"]["proposition"],
|
||||
"X-SkyOTT-Provider": self.config["client"]["provider"],
|
||||
"X-SkyOTT-Territory": self.config["client"]["territory"],
|
||||
"X-SkyOTT-UserToken": self.tokens["userToken"]
|
||||
}
|
||||
|
||||
try:
|
||||
self.session.get(
|
||||
url=self.config["endpoints"]["me"],
|
||||
headers=dict(**sky_headers, **{
|
||||
"Accept": "application/vnd.userinfo.v2+json",
|
||||
"Content-Type": "application/vnd.userinfo.v2+json",
|
||||
"X-Sky-Signature": self.create_signature_header(
|
||||
method="GET",
|
||||
path="/auth/users/me",
|
||||
sky_headers=sky_headers,
|
||||
body="",
|
||||
timestamp=int(time.time())
|
||||
)
|
||||
})
|
||||
)
|
||||
return True
|
||||
except Exception:
|
||||
return False
|
||||
BIN
PCOK/__pycache__/__init__.cpython-310.pyc
Normal file
BIN
PCOK/__pycache__/__init__.cpython-310.pyc
Normal file
Binary file not shown.
27
PCOK/config.yaml
Normal file
27
PCOK/config.yaml
Normal file
@ -0,0 +1,27 @@
|
||||
endpoints:
|
||||
stream_tv: 'https://www.peacocktv.com/stream-tv/{title_id}'
|
||||
config: 'https://config.clients.peacocktv.com/{territory}/{provider}/{proposition}/{device}/PROD/{version}/config.json'
|
||||
login: 'https://rango.id.peacocktv.com/signin/service/international'
|
||||
personas: 'https://persona.id.peacocktv.com/persona-store/personas'
|
||||
tokens: 'https://ovp.peacocktv.com/auth/tokens'
|
||||
me: 'https://ovp.peacocktv.com/auth/users/me'
|
||||
node: 'https://atom.peacocktv.com/adapter-calypso/v3/query/node'
|
||||
vod: 'https://ovp.peacocktv.com/video/playouts/vod'
|
||||
|
||||
client:
|
||||
config_version: '1.0.8'
|
||||
territory: 'US'
|
||||
provider: 'NBCU'
|
||||
proposition: 'NBCUOTT'
|
||||
platform: 'ANDROID' # PC, ANDROID
|
||||
device: 'TABLET' # COMPUTER, TABLET
|
||||
id: 'Jcvf1y0whKOI29vRXcJy'
|
||||
drm_device_id: 'UNKNOWN'
|
||||
client_sdk: 'NBCU-WEB-v4' # NBCU-ANDROID-v3 NBCU-ANDRTV-v4
|
||||
auth_scheme: 'MESSO'
|
||||
auth_issuer: 'NOWTV'
|
||||
|
||||
security:
|
||||
signature_hmac_key_v4: 'FvT9VtwvhtSZvqnExMsvDDTEvBqR3HdsMcBFtWYV'
|
||||
signature_hmac_key_v6: 'izU6EJqqu6DOhOWSk5X4p9dod3fNqH7vzKtYDK8d'
|
||||
signature_format: 'SkyOTT client="{client}",signature="{signature}",timestamp="{timestamp}",version="1.0"'
|
||||
323
PLEX/__init__.py
Normal file
323
PLEX/__init__.py
Normal file
@ -0,0 +1,323 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import re
|
||||
import uuid
|
||||
from collections.abc import Generator
|
||||
from concurrent.futures import ThreadPoolExecutor
|
||||
from http.cookiejar import MozillaCookieJar
|
||||
from typing import Any, Optional
|
||||
from urllib.parse import quote, urljoin, urlparse
|
||||
|
||||
import click
|
||||
from click import Context
|
||||
from requests import Request
|
||||
from unshackle.core.credential import Credential
|
||||
from unshackle.core.manifests import DASH, HLS
|
||||
from unshackle.core.search_result import SearchResult
|
||||
from unshackle.core.service import Service
|
||||
from unshackle.core.titles import Episode, Movie, Movies, Series
|
||||
from unshackle.core.tracks import Chapter, Chapters, Tracks
|
||||
|
||||
|
||||
class PLEX(Service):
|
||||
"""
|
||||
\b
|
||||
Service code for Plex's free streaming service (https://watch.plex.tv/).
|
||||
|
||||
\b
|
||||
Version: 1.0.4
|
||||
Author: stabbedbybrick
|
||||
Authorization: None
|
||||
Geofence: API and downloads are locked into whatever region the user is in
|
||||
Robustness:
|
||||
L3: 720p, AAC2.0
|
||||
|
||||
\b
|
||||
Tips:
|
||||
- Input should be complete URL:
|
||||
SHOW: https://watch.plex.tv/show/taboo-2017
|
||||
EPISODE: https://watch.plex.tv/show/taboo-2017/season/1/episode/1
|
||||
MOVIE: https://watch.plex.tv/movie/the-longest-yard
|
||||
"""
|
||||
|
||||
ALIASES = ("plextv",)
|
||||
|
||||
@staticmethod
|
||||
@click.command(name="PLEX", short_help="https://watch.plex.tv/", help=__doc__)
|
||||
@click.argument("title", type=str)
|
||||
@click.pass_context
|
||||
def cli(ctx: Context, **kwargs: Any) -> PLEX:
|
||||
return PLEX(ctx, **kwargs)
|
||||
|
||||
def __init__(self, ctx: Context, title: str):
|
||||
self.title = title
|
||||
super().__init__(ctx)
|
||||
|
||||
def authenticate(self, cookies: Optional[MozillaCookieJar] = None, credential: Optional[Credential] = None) -> None:
|
||||
super().authenticate(cookies, credential)
|
||||
|
||||
self.session.headers.update(
|
||||
{
|
||||
"accept": "application/json",
|
||||
"x-plex-client-identifier": str(uuid.uuid4()),
|
||||
"x-plex-language": "en",
|
||||
"x-plex-product": "Plex Mediaverse",
|
||||
"x-plex-provider-version": "6.5.0",
|
||||
}
|
||||
)
|
||||
user = self._request("POST", self.config["endpoints"]["user"])
|
||||
if not (auth_token := user.get("authToken")):
|
||||
raise ValueError(f"PLEX authentication failed: {user}")
|
||||
|
||||
self.auth_token = auth_token
|
||||
self.session.headers.update({"x-plex-token": self.auth_token})
|
||||
|
||||
|
||||
def search(self) -> Generator[SearchResult, None, None]:
|
||||
results = self._request(
|
||||
"GET", "https://discover.provider.plex.tv/library/search",
|
||||
params={
|
||||
"searchTypes": "movies,tv",
|
||||
"searchProviders": "discover,plexAVOD,plexFAST",
|
||||
"includeMetadata": 1,
|
||||
"filterPeople": 1,
|
||||
"limit": 10,
|
||||
"query": quote(self.title),
|
||||
},
|
||||
)
|
||||
|
||||
for result in results["MediaContainer"]["SearchResults"]:
|
||||
if "free on demand" not in result.get("title", "").lower():
|
||||
continue
|
||||
|
||||
for result in result["SearchResult"]:
|
||||
kind = result.get("Metadata", {}).get("type")
|
||||
slug = result.get("Metadata", {}).get("slug")
|
||||
|
||||
yield SearchResult(
|
||||
id_=f"https://watch.plex.tv/{kind}/{slug}",
|
||||
title=result.get("Metadata", {}).get("title"),
|
||||
description=result.get("Metadata", {}).get("description"),
|
||||
label=kind,
|
||||
url=f"https://watch.plex.tv/{kind}/{slug}",
|
||||
)
|
||||
|
||||
def get_titles(self) -> Movies | Series:
|
||||
url_pattern = re.compile(
|
||||
r"^https://watch.plex.tv/"
|
||||
r"(?:[a-z]{2}(?:-[A-Z]{2})?/)??"
|
||||
r"(?P<type>movie|show)/"
|
||||
r"(?P<id>[\w-]+)"
|
||||
r"(?P<url_path>(/season/\d+/episode/\d+))?"
|
||||
)
|
||||
|
||||
match = url_pattern.match(self.title)
|
||||
if not match:
|
||||
raise ValueError(f"Could not parse ID from title: {self.title}")
|
||||
|
||||
kind, guid, url_path = (match.group(i) for i in ("type", "id", "url_path"))
|
||||
|
||||
if kind == "show":
|
||||
if url_path is not None:
|
||||
path = urlparse(self.title).path
|
||||
url = re.sub(r"/[a-z]{2}(?:-[A-Z]{2})?/", "/", path)
|
||||
episode = self._episode(url)
|
||||
return Series(episode)
|
||||
|
||||
episodes = self._series(guid)
|
||||
return Series(episodes)
|
||||
|
||||
elif kind == "movie":
|
||||
movie = self._movie(guid)
|
||||
return Movies(movie)
|
||||
|
||||
else:
|
||||
raise ValueError(f"Could not parse content type from title: {self.title}")
|
||||
|
||||
def get_tracks(self, title: Movie | Episode) -> Tracks:
|
||||
dash_media = next((x for x in title.data.get("Media", []) if x.get("protocol", "").lower() == "dash"), None)
|
||||
if not dash_media:
|
||||
hls_media = next((x for x in title.data.get("Media", []) if x.get("protocol", "").lower() == "hls"), None)
|
||||
|
||||
media = dash_media or hls_media
|
||||
if not media:
|
||||
raise ValueError("Failed to find either DASH or HLS media")
|
||||
|
||||
manifest = DASH if dash_media else HLS
|
||||
|
||||
media_key = media.get("id")
|
||||
has_drm = media.get("drm")
|
||||
|
||||
if has_drm:
|
||||
manifest_url = (
|
||||
self.config["endpoints"]["base_url"]
|
||||
+ self.config["endpoints"]["manifest_drm"].format(media_key, self.auth_token)
|
||||
)
|
||||
title.data["license_url"] = (
|
||||
self.config["endpoints"]["base_url"]
|
||||
+ self.config["endpoints"]["license"].format(media_key, self.auth_token)
|
||||
)
|
||||
else:
|
||||
manifest_url = (
|
||||
self.config["endpoints"]["base_url"]
|
||||
+ self.config["endpoints"]["manifest_clear"].format(media_key, self.auth_token)
|
||||
)
|
||||
title.data["license_url"] = None
|
||||
|
||||
tracks = manifest.from_url(manifest_url, self.session).to_tracks(language="en")
|
||||
|
||||
return tracks
|
||||
|
||||
def get_chapters(self, title: Movie | Episode) -> Chapters:
|
||||
if not (markers := title.data.get("Marker")):
|
||||
try:
|
||||
metadata = self._request(
|
||||
"POST", "/playQueues",
|
||||
params={
|
||||
"uri": self.config["endpoints"]["provider"] + title.data.get("key"),
|
||||
"type": "video",
|
||||
"continuous": "1",
|
||||
},
|
||||
)
|
||||
markers = next((
|
||||
x.get("Marker") for x in metadata.get("MediaContainer", {}).get("Metadata", [])
|
||||
if x.get("key") == title.data.get("key")), [])
|
||||
|
||||
except Exception as e:
|
||||
self.log.debug("Failed to fetch markers: %s", e)
|
||||
return Chapters()
|
||||
|
||||
if not markers:
|
||||
return Chapters()
|
||||
|
||||
chapters = []
|
||||
for cue in markers:
|
||||
if cue.get("startTimeOffset", 0) > 0:
|
||||
chapters.append(Chapter(name=cue.get("type", "").title(), timestamp=cue.get("startTimeOffset")))
|
||||
|
||||
return Chapters(chapters)
|
||||
|
||||
def get_widevine_service_certificate(self, **_: Any) -> str:
|
||||
return None
|
||||
|
||||
def get_widevine_license(self, *, challenge: bytes, title: Movie | Episode, track: Any) -> bytes | str | None:
|
||||
if license_url := title.data.get("license_url"):
|
||||
r = self.session.post(url=license_url, data=challenge)
|
||||
if r.status_code != 200:
|
||||
raise ConnectionError(r.text)
|
||||
return r.content
|
||||
|
||||
return None
|
||||
|
||||
# Service specific
|
||||
|
||||
def _fetch_season(self, url: str) -> list:
|
||||
return self._request("GET", url).get("MediaContainer", {}).get("Metadata", [])
|
||||
|
||||
def _series(self, guid: str) -> list[Episode]:
|
||||
data = self._request("GET", f"/library/metadata/show:{guid}")
|
||||
|
||||
meta_key = data.get("MediaContainer", {}).get("Metadata", [])[0].get("key")
|
||||
if not meta_key:
|
||||
raise ValueError("Failed to find metadata for title")
|
||||
|
||||
series = self._request("GET", f"{self.config['endpoints']['base_url']}/{meta_key}")
|
||||
|
||||
seasons = [
|
||||
self.config["endpoints"]["base_url"] + item.get("key")
|
||||
for item in series.get("MediaContainer", {}).get("Metadata", [])
|
||||
if item.get("type") == "season"
|
||||
]
|
||||
if not seasons:
|
||||
raise ValueError("Failed to find seasons for title")
|
||||
|
||||
with ThreadPoolExecutor(max_workers=10) as executor:
|
||||
results = list(executor.map(self._fetch_season, seasons))
|
||||
|
||||
episodes = [
|
||||
Episode(
|
||||
id_=episode.get("ratingKey"),
|
||||
service=self.__class__,
|
||||
name=episode.get("title"),
|
||||
season=int(episode.get("parentIndex", 0)),
|
||||
number=int(episode.get("index", 0)),
|
||||
title=re.sub(r"\s*\(\d{4}\)", "", episode.get("grandparentTitle", "")),
|
||||
# year=episode.get("year"),
|
||||
data=episode,
|
||||
)
|
||||
for season in results
|
||||
for episode in season
|
||||
if episode.get("type") == "episode"
|
||||
]
|
||||
|
||||
return episodes
|
||||
|
||||
def _movie(self, guid: str) -> Movie:
|
||||
data = self._request("GET", f"/library/metadata/movie:{guid}")
|
||||
movie = data.get("MediaContainer", {}).get("Metadata", [])[0]
|
||||
if not movie:
|
||||
raise ValueError(f"Could not find any data for ID {guid}")
|
||||
|
||||
movies = [
|
||||
Movie(
|
||||
id_=movie.get("ratingKey"),
|
||||
service=self.__class__,
|
||||
name=movie.get("title"),
|
||||
year=movie.get("year"),
|
||||
data=movie,
|
||||
)
|
||||
]
|
||||
|
||||
return movies
|
||||
|
||||
def _episode(self, path: str) -> Episode:
|
||||
data = self._request("GET", self.config["endpoints"]["screen"] + path)
|
||||
meta_key = data.get("actions", [])[0].get("data", {}).get("key")
|
||||
if not meta_key:
|
||||
raise ValueError("Failed to find metadata for title")
|
||||
|
||||
metadata = self._request(
|
||||
"POST", "/playQueues",
|
||||
params={
|
||||
"uri": self.config["endpoints"]["provider"] + meta_key,
|
||||
"type": "video",
|
||||
"continuous": "1",
|
||||
},
|
||||
)
|
||||
|
||||
episode = next((x for x in metadata.get("MediaContainer", {}).get("Metadata", []) if x.get("key") == meta_key), None)
|
||||
if not episode:
|
||||
raise ValueError("Failed to find metadata for title")
|
||||
|
||||
episodes = [
|
||||
Episode(
|
||||
id_=episode.get("ratingKey"),
|
||||
service=self.__class__,
|
||||
name=episode.get("title"),
|
||||
season=int(episode.get("parentIndex", 0)),
|
||||
number=int(episode.get("index", 0)),
|
||||
title=re.sub(r"\s*\(\d{4}\)", "", episode.get("grandparentTitle", "")),
|
||||
# year=episode.get("year"),
|
||||
data=episode,
|
||||
)
|
||||
]
|
||||
|
||||
return episodes
|
||||
|
||||
def _request(self, method: str, endpoint: str, **kwargs: Any) -> Any[dict | str]:
|
||||
url = urljoin(self.config["endpoints"]["base_url"], endpoint)
|
||||
|
||||
prep = self.session.prepare_request(Request(method, url, **kwargs))
|
||||
|
||||
response = self.session.send(prep)
|
||||
if response.status_code not in (200, 201, 426):
|
||||
raise ConnectionError(f"{response.text}")
|
||||
|
||||
try:
|
||||
return json.loads(response.content)
|
||||
|
||||
except json.JSONDecodeError:
|
||||
return response.text
|
||||
|
||||
|
||||
12
PLEX/config.yaml
Normal file
12
PLEX/config.yaml
Normal file
@ -0,0 +1,12 @@
|
||||
headers:
|
||||
User-Agent: Mozilla/5.0 (Linux; Android 11; Smart TV Build/AR2101; wv)
|
||||
|
||||
endpoints:
|
||||
base_url: https://vod.provider.plex.tv
|
||||
user: https://plex.tv/api/v2/users/anonymous
|
||||
screen: https://luma.plex.tv/api/screen
|
||||
provider: provider://tv.plex.provider.vod
|
||||
manifest_clear: /library/parts/{}?includeAllStreams=1&X-Plex-Product=Plex+Mediaverse&X-Plex-Token={}
|
||||
manifest_drm: /library/parts/{}?includeAllStreams=1&X-Plex-Product=Plex+Mediaverse&X-Plex-Token={}&X-Plex-DRM=widevine
|
||||
license: /library/parts/{}/license?X-Plex-Token={}&X-Plex-DRM=widevine
|
||||
|
||||
294
PLUTO/__init__.py
Normal file
294
PLUTO/__init__.py
Normal file
@ -0,0 +1,294 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import re
|
||||
import uuid
|
||||
from collections.abc import Generator
|
||||
from http.cookiejar import CookieJar
|
||||
from typing import Any, Optional
|
||||
|
||||
import click
|
||||
from unshackle.core.credential import Credential
|
||||
from unshackle.core.manifests import DASH, HLS
|
||||
from unshackle.core.search_result import SearchResult
|
||||
from unshackle.core.service import Service
|
||||
from unshackle.core.titles import Episode, Movie, Movies, Series, Title_T, Titles_T
|
||||
from unshackle.core.tracks import Chapters, Tracks
|
||||
|
||||
|
||||
class PLUTO(Service):
|
||||
"""
|
||||
\b
|
||||
Service code for Pluto TV on demand streaming service (https://pluto.tv/)
|
||||
Credit to @wks_uwu for providing an alternative API, making the codebase much cleaner
|
||||
|
||||
\b
|
||||
Version: 1.0.2
|
||||
Author: stabbedbybrick
|
||||
Authorization: None
|
||||
Robustness:
|
||||
Widevine:
|
||||
L3: 1080p, AAC2.0
|
||||
|
||||
\b
|
||||
Tips:
|
||||
- Input can be complete title URL or just the path:
|
||||
SERIES: /series/65ce4e5003fa740013793127/details
|
||||
EPISODE: /series/65ce4e5003fa740013793127/season/1/episode/662c2af0a9f2d200131ba731
|
||||
MOVIE: /movies/635c1e430888bc001ad01a9b/details
|
||||
- Use --lang LANG_RANGE option to request non-English tracks
|
||||
- Use --hls to request HLS instead of DASH:
|
||||
devine dl pluto URL --hls
|
||||
|
||||
\b
|
||||
Notes:
|
||||
- Both DASH(widevine) and HLS(AES) are looked for in the API.
|
||||
- DASH is prioritized over HLS since the latter doesn't have 1080p. If DASH has audio/subtitle issues,
|
||||
you can try using HLS with the --hls flag.
|
||||
- Pluto use transport streams for HLS, meaning the video and audio are a part of the same stream
|
||||
As a result, only videos are listed as tracks. But the audio will be included as well.
|
||||
- With the variations in manifests, and the inconsistency in the API, the language is set as "en" by default
|
||||
for all tracks, no matter what region you're in.
|
||||
You can manually set the language in the get_titles() function if you want to change it.
|
||||
|
||||
"""
|
||||
|
||||
ALIASES = ("plu", "plutotv")
|
||||
TITLE_RE = (
|
||||
r"^"
|
||||
r"(?:https?://(?:www\.)?pluto\.tv(?:/[a-z]{2})?)?"
|
||||
r"(?:/on-demand)?"
|
||||
r"/(?P<type>movies|series)"
|
||||
r"/(?P<id>[a-z0-9-]+)"
|
||||
r"(?:(?:/season/(\d+)/episode/(?P<episode>[a-z0-9-]+)))?"
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
@click.command(name="PLUTO", short_help="https://pluto.tv/", help=__doc__)
|
||||
@click.option("--hls", is_flag=True, help="Request HLS instead of DASH")
|
||||
@click.argument("title", type=str)
|
||||
@click.pass_context
|
||||
def cli(ctx, **kwargs):
|
||||
return PLUTO(ctx, **kwargs)
|
||||
|
||||
def __init__(self, ctx, title, hls=False):
|
||||
super().__init__(ctx)
|
||||
self.title = title
|
||||
self.force_hls = hls
|
||||
|
||||
def authenticate(
|
||||
self,
|
||||
cookies: Optional[CookieJar] = None,
|
||||
credential: Optional[Credential] = None,
|
||||
) -> None:
|
||||
super().authenticate(cookies, credential)
|
||||
|
||||
self.session.params = {
|
||||
"appName": "web",
|
||||
"appVersion": "na",
|
||||
"clientID": str(uuid.uuid1()),
|
||||
"deviceDNT": 0,
|
||||
"deviceId": "unknown",
|
||||
"clientModelNumber": "na",
|
||||
"serverSideAds": "false",
|
||||
"deviceMake": "unknown",
|
||||
"deviceModel": "web",
|
||||
"deviceType": "web",
|
||||
"deviceVersion": "unknown",
|
||||
"sid": str(uuid.uuid1()),
|
||||
"drmCapabilities": "widevine:L3",
|
||||
}
|
||||
|
||||
info = self.session.get(self.config["endpoints"]["auth"]).json()
|
||||
self.token = info["sessionToken"]
|
||||
self.region = info["session"].get("activeRegion", "").lower()
|
||||
|
||||
def search(self) -> Generator[SearchResult, None, None]:
|
||||
params = {
|
||||
"q": self.title,
|
||||
"limit": "100",
|
||||
}
|
||||
|
||||
r = self.session.get(
|
||||
self.config["endpoints"]["search"].format(query=self.title),
|
||||
headers={"Authorization": f"Bearer {self.token}"},
|
||||
params=params,
|
||||
)
|
||||
r.raise_for_status()
|
||||
results = r.json()
|
||||
|
||||
for result in results["data"]:
|
||||
if result.get("type") not in ["timeline", "channel"]:
|
||||
content = result.get("id")
|
||||
kind = result.get("type")
|
||||
kind = "movies" if kind == "movie" else "series"
|
||||
|
||||
yield SearchResult(
|
||||
id_=f"/{kind}/{content}/details",
|
||||
title=result.get("name"),
|
||||
description=result.get("synopsis"),
|
||||
label=result.get("type"),
|
||||
url=f"https://pluto.tv/{self.region}/on-demand/{kind}/{content}/details",
|
||||
)
|
||||
|
||||
def get_titles(self) -> Titles_T:
|
||||
try:
|
||||
kind, content_id, episode_id = (
|
||||
re.match(self.TITLE_RE, self.title).group(i) for i in ("type", "id", "episode")
|
||||
)
|
||||
except Exception:
|
||||
raise ValueError("Could not parse ID from title - is the URL correct?")
|
||||
|
||||
if kind == "series" and episode_id:
|
||||
r = self.session.get(self.config["endpoints"]["series"].format(season_id=content_id))
|
||||
if not r.ok:
|
||||
raise ConnectionError(f"{r.json().get('message')}")
|
||||
|
||||
data = r.json()
|
||||
return Series(
|
||||
[
|
||||
Episode(
|
||||
id_=episode.get("_id"),
|
||||
service=self.__class__,
|
||||
title=data.get("name"),
|
||||
season=int(episode.get("season")),
|
||||
number=int(episode.get("number")),
|
||||
name=episode.get("name"),
|
||||
year=None,
|
||||
language="en", # self.region,
|
||||
data=episode,
|
||||
)
|
||||
for series in data["seasons"]
|
||||
for episode in series["episodes"]
|
||||
if episode.get("_id") == episode_id
|
||||
]
|
||||
)
|
||||
|
||||
elif kind == "series":
|
||||
r = self.session.get(self.config["endpoints"]["series"].format(season_id=content_id))
|
||||
if not r.ok:
|
||||
raise ConnectionError(f"{r.json().get('message')}")
|
||||
|
||||
data = r.json()
|
||||
return Series(
|
||||
[
|
||||
Episode(
|
||||
id_=episode.get("_id"),
|
||||
service=self.__class__,
|
||||
title=data.get("name"),
|
||||
season=int(episode.get("season")),
|
||||
number=int(episode.get("number")),
|
||||
name=episode.get("name"),
|
||||
year=self.year(episode),
|
||||
language="en", # self.region,
|
||||
data=episode,
|
||||
)
|
||||
for series in data["seasons"]
|
||||
for episode in series["episodes"]
|
||||
]
|
||||
)
|
||||
|
||||
elif kind == "movies":
|
||||
url = self.config["endpoints"]["movie"].format(video_id=content_id)
|
||||
r = self.session.get(url, headers={"Authorization": f"Bearer {self.token}"})
|
||||
if not r.ok:
|
||||
raise ConnectionError(f"{r.json().get('message')}")
|
||||
|
||||
data = r.json()
|
||||
return Movies(
|
||||
[
|
||||
Movie(
|
||||
id_=movie.get("_id"),
|
||||
service=self.__class__,
|
||||
name=movie.get("name"),
|
||||
language="en", # self.region,
|
||||
data=movie,
|
||||
year=self.year(movie),
|
||||
)
|
||||
for movie in data
|
||||
]
|
||||
)
|
||||
|
||||
def get_tracks(self, title: Title_T) -> Tracks:
|
||||
url = self.config["endpoints"]["episodes"].format(episode_id=title.id)
|
||||
episode = self.session.get(url).json()
|
||||
|
||||
sources = next((item.get("sources") for item in episode if not self.bumpers(item.get("name", ""))), None)
|
||||
|
||||
if not sources:
|
||||
raise ValueError("Unable to find manifest for this title")
|
||||
|
||||
hls = next((x.get("file") for x in sources if x.get("type").lower() == "hls"), None)
|
||||
dash = next((x.get("file") for x in sources if x.get("type").lower() == "dash"), None)
|
||||
|
||||
if dash and not self.force_hls:
|
||||
self.license = self.config["endpoints"]["license"]
|
||||
manifest = dash.replace("https://siloh.pluto.tv", "http://silo-hybrik.pluto.tv.s3.amazonaws.com")
|
||||
tracks = DASH.from_url(manifest, self.session).to_tracks(language=title.language)
|
||||
|
||||
for track in tracks.audio:
|
||||
role = track.data["dash"]["adaptation_set"].find("Role")
|
||||
if role is not None and role.get("value") in ["description", "alternative", "alternate"]:
|
||||
track.descriptive = True
|
||||
|
||||
else:
|
||||
self.license = None
|
||||
m3u8_url = hls.replace("https://siloh.pluto.tv", "http://silo-hybrik.pluto.tv.s3.amazonaws.com")
|
||||
manifest = self.clean_manifest(self.session.get(m3u8_url).text)
|
||||
tracks = HLS.from_text(manifest, m3u8_url).to_tracks(language=title.language)
|
||||
|
||||
# Remove separate AD audio tracks
|
||||
for track in tracks.audio:
|
||||
tracks.audio.remove(track)
|
||||
|
||||
return tracks
|
||||
|
||||
def get_chapters(self, title: Title_T) -> Chapters:
|
||||
return Chapters()
|
||||
|
||||
def get_widevine_service_certificate(self, **_: Any) -> str:
|
||||
return None
|
||||
|
||||
def get_widevine_license(self, challenge: bytes, **_: Any) -> bytes:
|
||||
if not self.license:
|
||||
return None
|
||||
|
||||
r = self.session.post(url=self.license, data=challenge)
|
||||
if r.status_code != 200:
|
||||
raise ConnectionError(r.text)
|
||||
|
||||
return r.content
|
||||
|
||||
# service specific functions
|
||||
|
||||
@staticmethod
|
||||
def clean_manifest(text: str) -> str:
|
||||
# Remove fairplay entries
|
||||
index = text.find('#PLUTO-DRM:ID="fairplay')
|
||||
if index == -1:
|
||||
return text
|
||||
else:
|
||||
end_of_previous_line = text.rfind("\n", 0, index)
|
||||
if end_of_previous_line == -1:
|
||||
return ""
|
||||
else:
|
||||
return text[:end_of_previous_line]
|
||||
|
||||
@staticmethod
|
||||
def bumpers(text: str) -> bool:
|
||||
ads = (
|
||||
"Pluto_TV_OandO",
|
||||
"_ad",
|
||||
"creative",
|
||||
"Bumper",
|
||||
"Promo",
|
||||
"WarningCard",
|
||||
)
|
||||
|
||||
return any(ad in text for ad in ads)
|
||||
|
||||
@staticmethod
|
||||
def year(data: dict) -> Optional[int]:
|
||||
title_year = (int(match.group(1)) if (match := re.search(r"\((\d{4})\)", data.get("name", ""))) else None)
|
||||
slug_year = (int(match.group(1)) if (match := re.search(r"\b(\d{4})\b", data.get("slug", ""))) else None)
|
||||
return None if title_year else slug_year
|
||||
|
||||
7
PLUTO/config.yaml
Normal file
7
PLUTO/config.yaml
Normal file
@ -0,0 +1,7 @@
|
||||
endpoints:
|
||||
auth: https://boot.pluto.tv/v4/start
|
||||
search: https://service-media-search.clusters.pluto.tv/v1/search
|
||||
series: https://service-vod.clusters.pluto.tv/v3/vod/series/{season_id}/seasons
|
||||
episodes: http://api.pluto.tv/v2/episodes/{episode_id}/clips.json
|
||||
movie: https://service-vod.clusters.pluto.tv/v4/vod/items?ids={video_id}
|
||||
license: https://service-concierge.clusters.pluto.tv/v1/wv/alt
|
||||
149
PTHS/__init__.py
Normal file
149
PTHS/__init__.py
Normal file
@ -0,0 +1,149 @@
|
||||
import json
|
||||
import re
|
||||
from typing import Optional
|
||||
from http.cookiejar import CookieJar
|
||||
from langcodes import Language
|
||||
import click
|
||||
|
||||
from unshackle.core.constants import AnyTrack
|
||||
from unshackle.core.credential import Credential
|
||||
from unshackle.core.manifests import DASH
|
||||
from unshackle.core.service import Service
|
||||
from unshackle.core.titles import Movie, Movies, Title_T, Titles_T
|
||||
from unshackle.core.tracks import Tracks
|
||||
|
||||
|
||||
class PTHS(Service):
|
||||
"""
|
||||
Service code for Pathé Thuis (pathe-thuis.nl)
|
||||
Version: 1.0.0
|
||||
|
||||
Security: SD @ L3 (Widevine)
|
||||
FHD @ L1
|
||||
Authorization: Cookies or authentication token
|
||||
|
||||
Supported:
|
||||
• Movies → https://www.pathe-thuis.nl/film/{id}
|
||||
|
||||
Note:
|
||||
Pathé Thuis does not have episodic content, only movies.
|
||||
"""
|
||||
|
||||
TITLE_RE = (
|
||||
r"^(?:https?://(?:www\.)?pathe-thuis\.nl/film/)?(?P<id>\d+)(?:/[^/]+)?$"
|
||||
)
|
||||
GEOFENCE = ("NL",)
|
||||
NO_SUBTITLES = True
|
||||
|
||||
@staticmethod
|
||||
@click.command(name="PTHS", short_help="https://www.pathe-thuis.nl")
|
||||
@click.argument("title", type=str)
|
||||
@click.pass_context
|
||||
def cli(ctx, **kwargs):
|
||||
return PTHS(ctx, **kwargs)
|
||||
|
||||
def __init__(self, ctx, title: str):
|
||||
super().__init__(ctx)
|
||||
|
||||
m = re.match(self.TITLE_RE, title)
|
||||
if not m:
|
||||
raise ValueError(
|
||||
f"Unsupported Pathé Thuis URL or ID: {title}\n"
|
||||
"Use e.g. https://www.pathe-thuis.nl/film/30591"
|
||||
)
|
||||
|
||||
self.movie_id = m.group("id")
|
||||
self.drm_token = None
|
||||
|
||||
if self.config is None:
|
||||
raise EnvironmentError("Missing service config for Pathé Thuis.")
|
||||
|
||||
def authenticate(self, cookies: Optional[CookieJar] = None, credential: Optional[Credential] = None) -> None:
|
||||
super().authenticate(cookies, credential)
|
||||
|
||||
if not cookies:
|
||||
self.log.warning("No cookies provided, proceeding unauthenticated.")
|
||||
return
|
||||
|
||||
token = next((c.value for c in cookies if c.name == "authenticationToken"), None)
|
||||
if not token:
|
||||
self.log.info("No authenticationToken cookie found, unauthenticated mode.")
|
||||
return
|
||||
|
||||
self.session.headers.update({
|
||||
"User-Agent": "Mozilla/5.0 (X11; Linux x86_64; rv:143.0) Gecko/20100101 Firefox/143.0",
|
||||
"X-Pathe-Device-Identifier": "web-widevine-1",
|
||||
"X-Pathe-Auth-Session-Token": token,
|
||||
})
|
||||
self.log.info("Authentication token successfully attached to session.")
|
||||
|
||||
|
||||
def get_titles(self) -> Titles_T:
|
||||
url = self.config["endpoints"]["metadata"].format(movie_id=self.movie_id)
|
||||
r = self.session.get(url)
|
||||
r.raise_for_status()
|
||||
data = r.json()
|
||||
|
||||
movie = Movie(
|
||||
id_=str(data["id"]),
|
||||
service=self.__class__,
|
||||
name=data["name"],
|
||||
description=data.get("intro", ""),
|
||||
year=data.get("year"),
|
||||
language=Language.get(data.get("language", "en")),
|
||||
data=data,
|
||||
)
|
||||
return Movies([movie])
|
||||
|
||||
|
||||
def get_tracks(self, title: Title_T) -> Tracks:
|
||||
ticket_id = self._get_ticket_id(title)
|
||||
url = self.config["endpoints"]["ticket"].format(ticket_id=ticket_id)
|
||||
|
||||
r = self.session.get(url)
|
||||
r.raise_for_status()
|
||||
data = r.json()
|
||||
stream = data["stream"]
|
||||
|
||||
manifest_url = stream.get("url") or stream.get("drmurl")
|
||||
if not manifest_url:
|
||||
raise ValueError("No stream manifest URL found.")
|
||||
|
||||
self.drm_token = stream["token"]
|
||||
self.license_url = stream["rawData"]["licenseserver"]
|
||||
|
||||
tracks = DASH.from_url(manifest_url, session=self.session).to_tracks(language=title.language)
|
||||
|
||||
return tracks
|
||||
|
||||
|
||||
def _get_ticket_id(self, title: Title_T) -> str:
|
||||
"""Fetch the user's owned ticket ID if present."""
|
||||
data = title.data
|
||||
for t in (data.get("tickets") or []):
|
||||
if t.get("playable") and str(t.get("movieId")) == str(self.movie_id):
|
||||
return str(t["id"])
|
||||
raise ValueError("No valid ticket found for this movie. Ensure purchase or login.")
|
||||
|
||||
|
||||
def get_chapters(self, title: Title_T):
|
||||
return []
|
||||
|
||||
|
||||
def get_widevine_license(self, challenge: bytes, title: Title_T, track: AnyTrack) -> bytes:
|
||||
if not self.license_url or not self.drm_token:
|
||||
raise ValueError("Missing license URL or token.")
|
||||
|
||||
headers = {
|
||||
"Content-Type": "application/octet-stream",
|
||||
"Authorization": f"Bearer {self.drm_token}",
|
||||
}
|
||||
|
||||
params = {"custom_data": self.drm_token}
|
||||
|
||||
r = self.session.post(self.license_url, params=params, data=challenge, headers=headers)
|
||||
r.raise_for_status()
|
||||
|
||||
if not r.content:
|
||||
raise ValueError("Empty license response, likely invalid or expired token.")
|
||||
return r.content
|
||||
3
PTHS/config.yaml
Normal file
3
PTHS/config.yaml
Normal file
@ -0,0 +1,3 @@
|
||||
endpoints:
|
||||
metadata: "https://www.pathe-thuis.nl/api/movies/{movie_id}?include=editions"
|
||||
ticket: "https://www.pathe-thuis.nl/api/tickets/{ticket_id}"
|
||||
22
README.md
22
README.md
@ -1,2 +1,22 @@
|
||||
# Unshackle-Services
|
||||
A collection of non-premium services for Unshackle.
|
||||
|
||||
## Usage:
|
||||
Clone repository:
|
||||
|
||||
Add folder to `unshackle.yaml`:
|
||||
|
||||
```
|
||||
directories:
|
||||
services: "path/to/services"
|
||||
```
|
||||
See help text for each service:
|
||||
|
||||
`unshackle dl SERVICE --help`
|
||||
|
||||
## Notes:
|
||||
Some versions of the dependencies work better than others. These are the recommended versions as of 25/11/11:
|
||||
|
||||
- Shaka Packager: [v2.6.1](https://github.com/shaka-project/shaka-packager/releases/tag/v2.6.1)
|
||||
- CCExtractor: [v0.93](https://github.com/CCExtractor/ccextractor/releases/tag/v0.93)
|
||||
- MKVToolNix: [latest](https://mkvtoolnix.download/downloads.html)
|
||||
- FFmpeg: [latest](https://ffmpeg.org/download.html)
|
||||
|
||||
791
RKTN/__init__.py
Normal file
791
RKTN/__init__.py
Normal file
@ -0,0 +1,791 @@
|
||||
import base64
|
||||
from copy import copy
|
||||
import datetime
|
||||
import hashlib
|
||||
import hmac
|
||||
import json
|
||||
import re
|
||||
import os
|
||||
import sys
|
||||
from typing import Optional
|
||||
from aiohttp import CookieJar
|
||||
from pymediainfo import MediaInfo
|
||||
from langcodes import Language
|
||||
import click
|
||||
import urllib.parse
|
||||
from requests import HTTPError
|
||||
from unshackle.core.config import config
|
||||
from unshackle.core.constants import AnyTrack
|
||||
from unshackle.core.credential import Credential
|
||||
from unshackle.core.manifests.dash import DASH
|
||||
from unshackle.core.service import Service
|
||||
from unshackle.core.titles import Title_T
|
||||
from unshackle.core.titles.episode import Episode, Series
|
||||
from unshackle.core.titles.movie import Movie, Movies
|
||||
from unshackle.core.tracks.audio import Audio
|
||||
from unshackle.core.tracks.chapters import Chapters
|
||||
from unshackle.core.tracks.subtitle import Subtitle
|
||||
from unshackle.core.tracks.tracks import Tracks
|
||||
from unshackle.core.tracks.video import Video
|
||||
from pyplayready.cdm import Cdm as PlayReadyCdm
|
||||
|
||||
class RKTN(Service):
|
||||
"""
|
||||
Service code for Rakuten's Rakuten TV streaming service (https://rakuten.tv).
|
||||
|
||||
\b
|
||||
Authorization: Credentials
|
||||
Security: FHD-UHD@L1, SD-FHD@L3; with trick
|
||||
|
||||
\b
|
||||
Maximum of 3 audio tracks, otherwise will fail because Rakuten blocks more than 3 requests.
|
||||
Subtitles requests expires fast, so together with video and audio it will fail.
|
||||
If you want subs, use -S or -na -nv -nc, and download the rest separately.
|
||||
|
||||
\b
|
||||
Command for Titles with no SDR (if not set range to HDR10 it will fail):
|
||||
uv run unshackle dl -r HDR10 [OPTIONS] RKTN -m https://www.rakuten.tv/...
|
||||
|
||||
\b
|
||||
TODO: - TV Shows are not yet supported as there's 0 TV Shows to purchase, rent, or watch in my region
|
||||
|
||||
\b
|
||||
NOTES: - Only movies are supported as my region's Rakuten has no TV shows available to purchase at all
|
||||
"""
|
||||
|
||||
ALIASES = ["RakutenTV", "rakuten", "rakutentv"]
|
||||
TITLE_RE = r"^(?:https?://(?:www\.)?rakuten\.tv/([a-z]+/|)movies(?:/[a-z]{2})?/)(?P<id>[a-z0-9-]+)"
|
||||
LANG_MAP = {
|
||||
"es": "es-ES",
|
||||
"pt": "pt-PT",
|
||||
}
|
||||
@staticmethod
|
||||
@click.command(name="RakutenTV", short_help="https://rakuten.tv")
|
||||
@click.argument("title", type=str, required=False)
|
||||
@click.option(
|
||||
"-dev",
|
||||
"--device",
|
||||
default=None,
|
||||
type=click.Choice(
|
||||
[
|
||||
"web", # Device: Web Browser - Maximum Quality: 720p - DRM: Widevine
|
||||
"android", # Device: Android Phone - Maximum Quality: 720p - DRM: Widevine
|
||||
"atvui40", # Device: AndroidTV - Maximum Quality: 2160p - DRM: Widevine
|
||||
"lgui40", # Device: LG SMART TV - Maximum Quality: 2160p - DRM: Playready
|
||||
"smui40", # Device: Samsung SMART TV - Maximum Quality: 2160p - DRM: Playready
|
||||
],
|
||||
case_sensitive=True,
|
||||
),
|
||||
help="The device you want to make requests with.",
|
||||
)
|
||||
@click.option(
|
||||
"-m", "--movie", is_flag=True, default=False, help="Title is a movie."
|
||||
)
|
||||
@click.option(
|
||||
"-dal", "--desired-audio-language", type=str, default="SPA,ENG", help="Select desired audio language tracks for this title. Default SPA,ENG. Separate multiple languages with a comma."
|
||||
)
|
||||
@click.pass_context
|
||||
def cli(ctx, **kwargs):
|
||||
return RKTN(ctx, **kwargs)
|
||||
|
||||
def __init__(self, ctx, title, device, movie, desired_audio_language):
|
||||
super().__init__(ctx)
|
||||
#self.parse_title(ctx, title)
|
||||
self.title = title
|
||||
self.cdm = ctx.obj.cdm
|
||||
self.playready = isinstance(self.cdm, PlayReadyCdm)
|
||||
self.desired_audio_language = desired_audio_language
|
||||
self.range = ctx.parent.params.get("range_")[0].name or "SDR"
|
||||
self.vcodec = ctx.parent.params.get("vcodec") or Video.Codec.AVC # Defaults to H264
|
||||
self.resolution = "UHD" if (self.vcodec.extension.lower() == "h265" or self.range in ['HYBRID', 'HDR10', 'HDR10P', 'DV']) else "FHD"
|
||||
self.device = "lgui40" if self.playready else "android"
|
||||
self.movie = movie or "movies" in title
|
||||
self.audio_languages = []
|
||||
|
||||
# set a custom device if provided
|
||||
if device is not None:
|
||||
self.device = device
|
||||
|
||||
def authenticate(self, cookies: Optional[CookieJar] = None, credential: Optional[Credential] = None) -> None:
|
||||
super().authenticate(cookies, credential)
|
||||
if not credential:
|
||||
raise EnvironmentError("Service requires Credentials for Authentication.")
|
||||
|
||||
self.session.headers.update(
|
||||
{
|
||||
"Origin": "https://rakuten.tv/",
|
||||
"User-Agent": "Mozilla/5.0 (Linux; Android 11; SHIELD Android TV Build/RQ1A.210105.003; wv) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/99.0.4844.88 Mobile Safari/537.36",
|
||||
}
|
||||
)
|
||||
|
||||
def get_titles(self):
|
||||
self.pair_device()
|
||||
|
||||
if self.movie:
|
||||
endpoint = self.config["endpoints"]["title"]
|
||||
else:
|
||||
endpoint = self.config["endpoints"]["show"]
|
||||
|
||||
params = urllib.parse.urlencode(
|
||||
{
|
||||
"classification_id": self.classification_id,
|
||||
"device_identifier": self.config["clients"][self.device][
|
||||
"device_identifier"
|
||||
],
|
||||
"device_serial": self.config["clients"][self.device]["device_serial"],
|
||||
"locale": self.locale,
|
||||
"market_code": self.market_code,
|
||||
"session_uuid": self.session_uuid,
|
||||
"timestamp": f"{int(datetime.datetime.now().timestamp())}005",
|
||||
"support_closed_captions": "true",
|
||||
}
|
||||
)
|
||||
title_url = endpoint.format(
|
||||
title_id=self.title
|
||||
) + params
|
||||
|
||||
|
||||
title = self.session.get(url=title_url).json()
|
||||
|
||||
if "errors" in title:
|
||||
error = title["errors"][0]
|
||||
if error["code"] == "error.not_found":
|
||||
self.log.error(f"Title [{self.title}] was not found on this account.")
|
||||
else:
|
||||
self.log.error(
|
||||
f"Unable to get title info: {error['message']} [{error['code']}]"
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
title = self.get_info(title["data"])
|
||||
|
||||
if self.movie:
|
||||
|
||||
return Movies(
|
||||
[
|
||||
Movie(
|
||||
id_=self.title,
|
||||
service=self.__class__,
|
||||
name=title["title"],
|
||||
year=title["year"],
|
||||
language="en",
|
||||
data=title,
|
||||
description=title["plot"],
|
||||
)
|
||||
]
|
||||
)
|
||||
else:
|
||||
episodes_list = []
|
||||
#title_ep = self.get_info(title["data"]['episodes'])
|
||||
for season in title["tv_show"]["seasons"]:
|
||||
data_season = endpoint.format(
|
||||
title_id=season["id"]
|
||||
) + params
|
||||
|
||||
data = self.session.get(url=data_season).json()
|
||||
|
||||
if "errors" in data:
|
||||
error = data["errors"][0]
|
||||
if error["code"] == "error.not_found":
|
||||
self.log.error(f"Season [{season['id']}] was not found on this account.")
|
||||
else:
|
||||
self.log.error(
|
||||
f"Unable to get title info: {error['message']} [{error['code']}]"
|
||||
)
|
||||
continue
|
||||
|
||||
for episode in data["data"]["episodes"]:
|
||||
episodes_list.append(
|
||||
Episode(
|
||||
id_=episode["id"],
|
||||
service=self.__class__,
|
||||
title=episode["tv_show_title"],
|
||||
season=episode["season_number"],
|
||||
number=episode["number"],
|
||||
name=episode["title"] or episode['display_name'],
|
||||
description=episode["short_plot"],
|
||||
year=episode["year"],
|
||||
language="en",
|
||||
data=episode,
|
||||
)
|
||||
)
|
||||
|
||||
return Series(episodes_list)
|
||||
|
||||
|
||||
|
||||
def get_tracks(self, title: Title_T) -> Tracks:
|
||||
# Obtener tracks para todos los idiomas de audio disponibles
|
||||
all_tracks = None
|
||||
|
||||
for audio_lang in self.audio_languages:
|
||||
self.log.info(f"Getting tracks for audio language: {audio_lang}")
|
||||
|
||||
# Obtener stream info para este idioma específico
|
||||
stream_info = self.get_avod(audio_lang, title) if self.kind == "avod" else self.get_me(audio_lang, title)
|
||||
|
||||
if "errors" in stream_info:
|
||||
error = stream_info["errors"][0]
|
||||
if "error.streaming.no_active_right" in stream_info["errors"][0]["code"]:
|
||||
self.log.error(
|
||||
" x You don't have the rights for this content\n You need to rent or buy it first"
|
||||
)
|
||||
else:
|
||||
self.log.error(
|
||||
f" - Failed to get track info: {error['message']} [{error['code']}]"
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
stream_info = stream_info["data"]["stream_infos"][0]
|
||||
|
||||
if all_tracks is None:
|
||||
# Primera iteración: crear el objeto tracks principal
|
||||
self.license_url = stream_info["license_url"]
|
||||
|
||||
all_tracks = DASH.from_url(url=stream_info["url"], session=self.session).to_tracks(language=title.language)
|
||||
|
||||
# Procesar subtítulos (solo una vez)
|
||||
subtitle_tracks = []
|
||||
for subtitle in stream_info.get("all_subtitles", []):
|
||||
subtitle_tracks += [
|
||||
Subtitle(
|
||||
id_=hashlib.md5(subtitle["url"].encode()).hexdigest()[0:6],
|
||||
url=subtitle["url"],
|
||||
codec=Subtitle.Codec.from_mime(subtitle["format"]),
|
||||
forced=subtitle["forced"],
|
||||
language=subtitle["locale"],
|
||||
)
|
||||
]
|
||||
|
||||
all_tracks.add(subtitle_tracks)
|
||||
else:
|
||||
# Iteraciones adicionales: obtener tracks de audio adicionales
|
||||
temp_tracks = DASH.from_url(url=stream_info["url"], session=self.session).to_tracks(language=title.language)
|
||||
|
||||
# Agregar solo los tracks de audio nuevos
|
||||
for audio_track in temp_tracks.audio:
|
||||
# Verificar que no sea duplicado basado en el idioma y codec
|
||||
is_duplicate = False
|
||||
for existing_audio in all_tracks.audio:
|
||||
if (existing_audio.language == audio_track.language and
|
||||
existing_audio.codec == audio_track.codec):
|
||||
is_duplicate = True
|
||||
break
|
||||
|
||||
if not is_duplicate:
|
||||
all_tracks.audio.append(audio_track)
|
||||
|
||||
# Procesar HDR para videos
|
||||
for video in all_tracks.videos:
|
||||
if "HDR10" in video.url:
|
||||
video.range = Video.Range.HDR10
|
||||
|
||||
# Aplicar el método append_tracks mejorado
|
||||
self.append_tracks(all_tracks)
|
||||
|
||||
return all_tracks
|
||||
|
||||
def get_chapters(self, title: Title_T) -> Chapters:
|
||||
|
||||
return Chapters([])
|
||||
|
||||
def get_me(self, audio_language=None, title: Title_T = None):
|
||||
# Si no se especifica idioma, usar el primero disponible
|
||||
if audio_language is None:
|
||||
audio_language = self.audio_languages[0]
|
||||
|
||||
stream_info_url = self.config["endpoints"]["manifest"].format(
|
||||
kind="me"
|
||||
) + urllib.parse.urlencode(
|
||||
{
|
||||
"audio_language": audio_language, # Usar el idioma especificado
|
||||
"audio_quality": "5.1", # Will get better audio in different request to make sure it wont error
|
||||
"classification_id": self.classification_id,
|
||||
"content_id": title.id,
|
||||
"content_type": "movies" if self.movie else "episodes",
|
||||
"device_identifier": self.config["clients"][self.device][
|
||||
"device_identifier"
|
||||
],
|
||||
"device_serial": "not_implemented",
|
||||
"device_stream_audio_quality": "5.1",
|
||||
"device_stream_hdr_type": self.hdr_type,
|
||||
"device_stream_video_quality": self.resolution,
|
||||
"device_uid": "affa434b-8b7c-4ff3-a15e-df1fe500e71e",
|
||||
"device_year": self.config["clients"][self.device]["device_year"],
|
||||
"disable_dash_legacy_packages": "false",
|
||||
"gdpr_consent": self.config["gdpr_consent"],
|
||||
"gdpr_consent_opt_out": 0,
|
||||
"hdr_type": self.hdr_type,
|
||||
"ifa_subscriber_id": self.ifa_subscriber_id,
|
||||
"locale": self.locale,
|
||||
"market_code": self.market_code,
|
||||
"player": self.config["clients"][self.device]["player"],
|
||||
"player_height": 1080,
|
||||
"player_width": 1920,
|
||||
"publisher_provided_id": "046f58b1-d89b-4fa4-979b-a9bcd6d78a76",
|
||||
"session_uuid": self.session_uuid,
|
||||
"strict_video_quality": "false",
|
||||
"subtitle_formats": ["vtt"],
|
||||
"subtitle_language": "MIS",
|
||||
"timestamp": f"{int(datetime.datetime.now().timestamp())}122",
|
||||
"video_type": "stream",
|
||||
}
|
||||
)
|
||||
stream_info_url += "&signature=" + self.generate_signature(stream_info_url)
|
||||
return self.session.post(
|
||||
url=stream_info_url,
|
||||
).json()
|
||||
|
||||
def get_avod(self, audio_language=None, title: Title_T = None):
|
||||
# Si no se especifica idioma, usar el primero disponible
|
||||
if audio_language is None:
|
||||
audio_language = self.audio_languages[0]
|
||||
|
||||
stream_info_url = self.config["endpoints"]["manifest"].format(
|
||||
kind="avod"
|
||||
) + urllib.parse.urlencode(
|
||||
{
|
||||
"device_stream_video_quality": self.resolution,
|
||||
"device_identifier": self.config["clients"][self.device][
|
||||
"device_identifier"
|
||||
],
|
||||
"market_code": self.market_code,
|
||||
"session_uuid": self.session_uuid,
|
||||
"timestamp": f"{int(datetime.datetime.now().timestamp())}122",
|
||||
}
|
||||
)
|
||||
stream_info_url += "&signature=" + self.generate_signature(stream_info_url)
|
||||
return self.session.post(
|
||||
url=stream_info_url,
|
||||
data={
|
||||
"hdr_type": self.hdr_type,
|
||||
"audio_quality": "5.1", # Will get better audio in different request to make sure it wont error
|
||||
"app_version": self.config["clients"][self.device]["app_version"],
|
||||
"content_id": title.id,
|
||||
"video_quality": self.resolution,
|
||||
"audio_language": audio_language, # Usar el idioma especificado
|
||||
"video_type": "stream",
|
||||
"device_serial": self.config["clients"][self.device]["device_serial"],
|
||||
"content_type": "movies" if self.movie else "episodes",
|
||||
"classification_id": self.classification_id,
|
||||
"subtitle_language": "MIS",
|
||||
"player": self.config["clients"][self.device]["player"],
|
||||
},
|
||||
).json()
|
||||
|
||||
def generate_signature(self, url):
|
||||
up = urllib.parse.urlparse(url)
|
||||
digester = hmac.new(
|
||||
self.access_token.encode(),
|
||||
f"POST{up.path}{up.query}".encode(),
|
||||
hashlib.sha1,
|
||||
)
|
||||
return (
|
||||
base64.b64encode(digester.digest())
|
||||
.decode("utf-8")
|
||||
.replace("+", "-")
|
||||
.replace("/", "_")
|
||||
)
|
||||
|
||||
|
||||
|
||||
def append_tracks(self, tracks):
|
||||
"""
|
||||
Busca y agrega tracks adicionales de video y audio que no están en el manifest.
|
||||
"""
|
||||
if not tracks.videos:
|
||||
self.log.warning("No video tracks found, skipping append_tracks")
|
||||
return
|
||||
|
||||
# Buscar tracks de video adicionales
|
||||
self._append_video_tracks(tracks)
|
||||
|
||||
# Buscar tracks de audio adicionales
|
||||
self._append_audio_tracks(tracks)
|
||||
|
||||
|
||||
def _append_video_tracks(self, tracks):
|
||||
"""Busca y agrega tracks de video adicionales para H.264."""
|
||||
if not tracks.videos:
|
||||
return
|
||||
|
||||
codec = tracks.videos[0].codec
|
||||
|
||||
# Solo buscar tracks adicionales para H.264
|
||||
if codec != Video.Codec.AVC:
|
||||
self.log.debug(f"Skipping video track search (codec: {codec.name}, only works for AVC/H.264)")
|
||||
return
|
||||
|
||||
# Extraer el patrón del codec de la URL
|
||||
url_pattern = tracks.videos[-1].url
|
||||
codec_match = re.search(r'(avc1|h264)-(\d+)', url_pattern, re.IGNORECASE)
|
||||
|
||||
if not codec_match:
|
||||
self.log.debug("Could not find codec pattern in URL for video track search")
|
||||
return
|
||||
|
||||
codec_prefix = codec_match.group(1) # "avc1" o "h264"
|
||||
self.log.info(f"Searching for additional H.264 video tracks (pattern: {codec_prefix})...")
|
||||
|
||||
|
||||
# Usar el directorio temp de Unshackle
|
||||
temp_file = os.path.join(str(config.directories.temp), "video_test.mp4")
|
||||
|
||||
|
||||
tracks_found = 0
|
||||
|
||||
for n in range(100):
|
||||
# Generar URL del siguiente track
|
||||
current_number = len(tracks.videos) + 1
|
||||
ismv = re.sub(
|
||||
rf"{codec_prefix}-\d+",
|
||||
rf"{codec_prefix}-{current_number}",
|
||||
tracks.videos[-1].url,
|
||||
)
|
||||
|
||||
# Verificar si existe
|
||||
try:
|
||||
response = self.session.head(ismv, timeout=5)
|
||||
if response.status_code != 200:
|
||||
self.log.debug(f"Video track search ended at index {current_number}")
|
||||
break
|
||||
except Exception as e:
|
||||
self.log.debug(f"Video track search failed: {e}")
|
||||
break
|
||||
|
||||
# Crear copia del último video track
|
||||
video = copy(tracks.videos[-1])
|
||||
video.url = ismv
|
||||
video.id_ = hashlib.md5(ismv.encode()).hexdigest()[:16]
|
||||
|
||||
# Descargar chunk para obtener info con MediaInfo
|
||||
try:
|
||||
with open(temp_file, "wb") as chunkfile:
|
||||
data = self.session.get(
|
||||
url=ismv,
|
||||
headers={"Range": "bytes=0-50000"},
|
||||
timeout=10
|
||||
)
|
||||
chunkfile.write(data.content)
|
||||
|
||||
# Parsear con MediaInfo
|
||||
info = MediaInfo.parse(temp_file)
|
||||
|
||||
if not info.video_tracks:
|
||||
self.log.debug(f"No video info found for track {current_number}")
|
||||
continue
|
||||
|
||||
video_info = info.video_tracks[0]
|
||||
video.height = video_info.height
|
||||
video.width = video_info.width
|
||||
video.bitrate = video_info.maximum_bit_rate or video_info.bit_rate
|
||||
|
||||
# Agregar el track
|
||||
tracks.videos.append(video)
|
||||
tracks_found += 1
|
||||
self.log.info(
|
||||
f" + Added video track #{current_number}: "
|
||||
f"{video.width}x{video.height} @ {video.bitrate} bps"
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
self.log.warning(f"Failed to process video track {current_number}: {e}")
|
||||
break
|
||||
finally:
|
||||
if os.path.exists(temp_file):
|
||||
os.remove(temp_file)
|
||||
|
||||
if tracks_found > 0:
|
||||
self.log.info(f"Total additional video tracks found: {tracks_found}")
|
||||
|
||||
|
||||
def _append_audio_tracks(self, tracks):
|
||||
"""Busca y agrega tracks de audio adicionales para todos los idiomas seleccionados."""
|
||||
if not tracks.audio:
|
||||
self.log.warning("No audio tracks found to use as base")
|
||||
return
|
||||
|
||||
if not hasattr(self, 'audio_languages') or not self.audio_languages:
|
||||
self.log.debug("No audio languages configured")
|
||||
return
|
||||
|
||||
self.log.info(f"Searching for additional audio tracks in languages: {self.audio_languages}")
|
||||
|
||||
# Codecs a probar (en orden de preferencia)
|
||||
codecs_to_try = ["ec-3", "ac-3", "dts", "mp4a"]
|
||||
|
||||
# Usar el directorio temp de Unshackle
|
||||
temp_file = os.path.join(str(config.directories.temp), "audio_test.mp4")
|
||||
|
||||
|
||||
base_audio = tracks.audio[0]
|
||||
base_url = base_audio.url
|
||||
|
||||
tracks_found = 0
|
||||
|
||||
for language in self.audio_languages:
|
||||
for codec_name in codecs_to_try:
|
||||
# Generar URL del track
|
||||
# Patrón: audio-{LANG}-{CODEC}-{NUMBER}
|
||||
isma = re.sub(
|
||||
r"audio-[a-zA-Z]{2,3}-[a-z0-9\-]+-\d+",
|
||||
f"audio-{language.lower()}-{codec_name}-1",
|
||||
base_url,
|
||||
)
|
||||
|
||||
# Verificar si existe
|
||||
try:
|
||||
response = self.session.head(isma, timeout=5)
|
||||
if response.status_code != 200:
|
||||
continue
|
||||
except Exception:
|
||||
continue
|
||||
|
||||
# Verificar si ya existe (evitar duplicados)
|
||||
if any(audio.url == isma for audio in tracks.audio):
|
||||
self.log.debug(f"Audio track already exists: {language}-{codec_name}")
|
||||
continue
|
||||
|
||||
# Crear nuevo track de audio
|
||||
audio = copy(base_audio)
|
||||
audio.url = isma
|
||||
audio.id_ = hashlib.md5(isma.encode()).hexdigest()[:16]
|
||||
|
||||
# Mapear idioma
|
||||
mapped_lang = self.LANG_MAP.get(language, language)
|
||||
audio.language = Language.get(mapped_lang)
|
||||
|
||||
# Determinar si es idioma original
|
||||
if tracks.videos:
|
||||
audio.is_original_lang = (
|
||||
audio.language.language == tracks.videos[0].language.language
|
||||
)
|
||||
|
||||
# Obtener información del track con MediaInfo
|
||||
try:
|
||||
with open(temp_file, "wb") as bytetest:
|
||||
data = self.session.get(
|
||||
url=isma,
|
||||
headers={"Range": "bytes=0-50000"},
|
||||
timeout=10
|
||||
)
|
||||
bytetest.write(data.content)
|
||||
|
||||
info = MediaInfo.parse(temp_file)
|
||||
|
||||
if not info.audio_tracks:
|
||||
self.log.debug(f"No audio info found for {language}-{codec_name}")
|
||||
continue
|
||||
|
||||
audio_info = info.audio_tracks[0]
|
||||
audio.bitrate = audio_info.bit_rate
|
||||
|
||||
# Detectar canales basado en codec
|
||||
if codec_name in ["ec-3", "ac-3", "dts"]:
|
||||
audio.channels = audio_info.channel_s or "5.1"
|
||||
else: # mp4a (AAC)
|
||||
audio.channels = audio_info.channel_s or "2.0"
|
||||
|
||||
# Actualizar codec
|
||||
# Para Unshackle, necesitas mantener el formato correcto
|
||||
audio.codec = Audio.Codec.from_codecs(codec_name)
|
||||
|
||||
# Agregar el track
|
||||
tracks.audio.append(audio)
|
||||
tracks_found += 1
|
||||
|
||||
self.log.info(
|
||||
f" + Added audio track: {audio.language.display_name()} "
|
||||
f"[{codec_name.upper()}] - {audio.channels}ch @ {audio.bitrate} bps"
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
self.log.debug(f"Failed to process audio {language}-{codec_name}: {e}")
|
||||
finally:
|
||||
if os.path.exists(temp_file):
|
||||
os.remove(temp_file)
|
||||
|
||||
if tracks_found > 0:
|
||||
self.log.info(f"Total additional audio tracks found: {tracks_found}")
|
||||
|
||||
def get_widevine_service_certificate(self, **kwargs):
|
||||
return self.config["certificate"]
|
||||
|
||||
def get_widevine_license(self, *, challenge: bytes, title: Title_T, track: AnyTrack) -> Optional[bytes]:
|
||||
res = self.session.post(
|
||||
url=self.license_url,
|
||||
data=challenge,
|
||||
)
|
||||
|
||||
if "errors" in res.text:
|
||||
res = res.json()
|
||||
if res["errors"][0]["message"] == "HttpException: Forbidden":
|
||||
self.log.error(
|
||||
" x This CDM is not eligible to decrypt this\n"
|
||||
" content or has been blacklisted by RakutenTV"
|
||||
)
|
||||
elif res["errors"][0]["message"] == "HttpException: An error happened":
|
||||
self.log.error(
|
||||
" x This CDM seems to be revoked and\n"
|
||||
" therefore it can't decrypt this content",
|
||||
)
|
||||
|
||||
sys.exit(1)
|
||||
|
||||
return res.content
|
||||
|
||||
def get_playready_license(self, *, challenge: bytes, title: Title_T, track: AnyTrack) -> Optional[bytes]:
|
||||
res = self.session.post(
|
||||
url=self.license_url,
|
||||
data=challenge,
|
||||
)
|
||||
|
||||
if "errors" in res.text:
|
||||
res = res.json()
|
||||
if res["errors"][0]["message"] == "HttpException: Forbidden":
|
||||
self.log.error(
|
||||
" x This CDM is not eligible to decrypt this\n"
|
||||
" content or has been blacklisted by RakutenTV"
|
||||
)
|
||||
elif res["errors"][0]["message"] == "HttpException: An error happened":
|
||||
self.log.error(
|
||||
" x This CDM seems to be revoked and\n"
|
||||
" therefore it can't decrypt this content",
|
||||
)
|
||||
|
||||
sys.exit(1)
|
||||
|
||||
return res.content
|
||||
|
||||
|
||||
def pair_device(self):
|
||||
# TODO: Make this return the tokens, move print out of the func
|
||||
# log.info_("Logging into RakutenTV as an Android device")
|
||||
if not self.credential:
|
||||
self.log.error(" - No credentials provided, unable to log in.")
|
||||
sys.exit(1)
|
||||
try:
|
||||
res = self.session.post(
|
||||
url=self.config["endpoints"]["auth"],
|
||||
params={
|
||||
"device_identifier": self.config["clients"][self.device][
|
||||
"device_identifier"
|
||||
]
|
||||
},
|
||||
data={
|
||||
"app_version": self.config["clients"][self.device]["app_version"],
|
||||
"device_metadata[uid]": self.config["clients"][self.device][
|
||||
"device_serial"
|
||||
],
|
||||
"device_metadata[os]": self.config["clients"][self.device][
|
||||
"device_os"
|
||||
],
|
||||
"device_metadata[model]": self.config["clients"][self.device][
|
||||
"device_model"
|
||||
],
|
||||
"device_metadata[year]": self.config["clients"][self.device][
|
||||
"device_year"
|
||||
],
|
||||
"device_serial": self.config["clients"][self.device][
|
||||
"device_serial"
|
||||
],
|
||||
"device_metadata[trusted_uid]": False,
|
||||
"device_metadata[brand]": self.config["clients"][self.device][
|
||||
"device_brand"
|
||||
],
|
||||
"classification_id": 69,
|
||||
"user[password]": self.credential.password,
|
||||
"device_metadata[app_version]": self.config["clients"][self.device][
|
||||
"app_version"
|
||||
],
|
||||
"user[username]": self.credential.username,
|
||||
"device_metadata[serial_number]": self.config["clients"][
|
||||
self.device
|
||||
]["device_serial"],
|
||||
},
|
||||
).json()
|
||||
except HTTPError as e:
|
||||
if e.response.status_code == 403:
|
||||
self.log.error(
|
||||
" - Rakuten returned a 403 (FORBIDDEN) error. "
|
||||
"This could be caused by your IP being detected as a proxy, or regional issues. Cannot continue."
|
||||
)
|
||||
if "errors" in res:
|
||||
error = res["errors"][0]
|
||||
if "exception.forbidden_vpn" in error["code"]:
|
||||
self.log.error(" x RakutenTV is detecting this VPN or Proxy")
|
||||
else:
|
||||
self.log.error(f" - Login failed: {error['message']} [{error['code']}]")
|
||||
self.access_token = res["data"]["user"]["access_token"]
|
||||
self.ifa_subscriber_id = res["data"]["user"]["avod_profile"][
|
||||
"ifa_subscriber_id"
|
||||
]
|
||||
self.session_uuid = res["data"]["user"]["session_uuid"]
|
||||
self.classification_id = res["data"]["user"]["profile"]["classification"]["id"]
|
||||
self.locale = res["data"]["market"]["locale"]
|
||||
self.market_code = res["data"]["market"]["code"]
|
||||
|
||||
def get_info(self, title):
|
||||
self.kind = title["labels"]["purchase_types"][0]["kind"]
|
||||
|
||||
# self.available_resolutions = [x for x in title["labels"]["video_qualities"]]
|
||||
# if any(x["abbr"] == "UHD" for x in title["labels"]["video_qualities"]):
|
||||
# self.resolution = "UHD"
|
||||
# elif any(x["abbr"] == "FHD" for x in title["labels"]["video_qualities"]):
|
||||
# self.resolution = "FHD"
|
||||
# elif any(x["abbr"] == "HD" for x in title["labels"]["video_qualities"]):
|
||||
# self.resolution = "HD"
|
||||
# else:
|
||||
# self.resolution = "SD"
|
||||
|
||||
self.available_hdr_types = [x for x in title["labels"]["hdr_types"]]
|
||||
if any(x["abbr"] == "HDR10_PLUS" for x in self.available_hdr_types) and any(
|
||||
x["abbr"] == "HDR10_PLUS"
|
||||
for x in title["view_options"]["support"]["hdr_types"]
|
||||
):
|
||||
self.hdr_type = "HDR10_PLUS"
|
||||
elif any(x["abbr"] == "DOLBY_VISION" for x in self.available_hdr_types) and any(
|
||||
x["abbr"] == "DOLBY_VISION"
|
||||
for x in title["view_options"]["support"]["hdr_types"]
|
||||
):
|
||||
self.hdr_type = "DOLBY_VISION"
|
||||
elif any(x["abbr"] == "HDR10" for x in self.available_hdr_types) and any(
|
||||
x["abbr"] == "HDR10" for x in title["view_options"]["support"]["hdr_types"]
|
||||
):
|
||||
self.hdr_type = "HDR10"
|
||||
|
||||
else:
|
||||
self.hdr_type = "NONE"
|
||||
|
||||
# Obtener view_options desde title o episodes
|
||||
view_options = title.get("episodes", [{}])[0].get("view_options") or title.get("view_options")
|
||||
|
||||
# FIJO: Obtener TODOS los idiomas de audio disponibles
|
||||
if len(view_options["private"]["offline_streams"]) == 1:
|
||||
# Caso 1: Un solo stream con múltiples idiomas
|
||||
self.audio_languages = [
|
||||
x["abbr"]
|
||||
for x in view_options["private"]["streams"][0]["audio_languages"]
|
||||
]
|
||||
else:
|
||||
# Caso 2: Múltiples streams, obtener todos los idiomas únicos
|
||||
all_audio_languages = []
|
||||
for stream in view_options["private"]["streams"]:
|
||||
for audio_lang in stream["audio_languages"]:
|
||||
if audio_lang["abbr"] not in all_audio_languages:
|
||||
all_audio_languages.append(audio_lang["abbr"])
|
||||
self.audio_languages = all_audio_languages
|
||||
|
||||
# # TODO: Look up only for languages chosen by the user
|
||||
# print(f"\nAvailable audio languages: {', '.join(self.audio_languages)}")
|
||||
# selected = input("Type your desired languages, maximum of 3, UPPER CASE (ex: ENG,SPA,FRA): ")
|
||||
|
||||
selected_langs = [lang.strip() for lang in self.desired_audio_language.split(",") if lang.strip() in self.audio_languages]
|
||||
if not selected_langs:
|
||||
self.log.error("No selected language. Exiting.")
|
||||
self.audio_languages = selected_langs
|
||||
|
||||
# Log para debug
|
||||
self.log.info(f"Selected audio languages: {self.audio_languages}")
|
||||
|
||||
return title
|
||||
BIN
RKTN/__pycache__/__init__.cpython-310.pyc
Normal file
BIN
RKTN/__pycache__/__init__.cpython-310.pyc
Normal file
Binary file not shown.
BIN
RKTN/__pycache__/__init__.cpython-312.pyc
Normal file
BIN
RKTN/__pycache__/__init__.cpython-312.pyc
Normal file
Binary file not shown.
71
RKTN/config.yaml
Normal file
71
RKTN/config.yaml
Normal file
@ -0,0 +1,71 @@
|
||||
certificate: |
|
||||
CAUSxwUKwQIIAxIQFwW5F8wSBIaLBjM6L3cqjBiCtIKSBSKOAjCCAQoCggEBAJntWzsyfateJO/DtiqVtZhSCtW8yzdQPgZFuBTYdrjfQFEEQa2M462xG
|
||||
7iMTnJaXkqeB5UpHVhYQCOn4a8OOKkSeTkwCGELbxWMh4x+Ib/7/up34QGeHleB6KRfRiY9FOYOgFioYHrc4E+shFexN6jWfM3rM3BdmDoh+07svUoQyk
|
||||
dJDKR+ql1DghjduvHK3jOS8T1v+2RC/THhv0CwxgTRxLpMlSCkv5fuvWCSmvzu9Vu69WTi0Ods18Vcc6CCuZYSC4NZ7c4kcHCCaA1vZ8bYLErF8xNEkKd
|
||||
O7DevSy8BDFnoKEPiWC8La59dsPxebt9k+9MItHEbzxJQAZyfWgkCAwEAAToUbGljZW5zZS53aWRldmluZS5jb20SgAOuNHMUtag1KX8nE4j7e7jLUnfS
|
||||
SYI83dHaMLkzOVEes8y96gS5RLknwSE0bv296snUE5F+bsF2oQQ4RgpQO8GVK5uk5M4PxL/CCpgIqq9L/NGcHc/N9XTMrCjRtBBBbPneiAQwHL2zNMr80
|
||||
NQJeEI6ZC5UYT3wr8+WykqSSdhV5Cs6cD7xdn9qm9Nta/gr52u/DLpP3lnSq8x2/rZCR7hcQx+8pSJmthn8NpeVQ/ypy727+voOGlXnVaPHvOZV+WRvWC
|
||||
q5z3CqCLl5+Gf2Ogsrf9s2LFvE7NVV2FvKqcWTw4PIV9Sdqrd+QLeFHd/SSZiAjjWyWOddeOrAyhb3BHMEwg2T7eTo/xxvF+YkPj89qPwXCYcOxF+6gjo
|
||||
mPwzvofcJOxkJkoMmMzcFBDopvab5tDQsyN9UPLGhGC98X/8z8QSQ+spbJTYLdgFenFoGq47gLwDS6NWYYQSqzE3Udf2W7pzk4ybyG4PHBYV3s4cyzdq8amvtE/sNSdOKReuHpfQ=
|
||||
|
||||
gdpr_consent: |
|
||||
CPGeIEAPV65UAADABBNLCGCsAP_AAH_AAAAAHrsXZCpcBSlgYCpoAIoAKIAUEAAAgyAAABAAAoABCAAAIAQAgAAgIAAAAAAAAAAAIAJAAQAAAAEAAAAAAA
|
||||
AAAAAIIACAAAAAIABAAAAAAAAACAAAAAAAAAAAAAAEAAAAgABAABAAAAAAEAAAAAAAAAAAAAAAAAAAAAAAAAgZ8xdkKlwFKWBgKGgAigAogBQQAACDIAAA
|
||||
EAACAAAIAAAgBACAACAAAAAAAAAAAAAgAgABAAAAAQAAAAAAAAAAAAggAAAAAAAgAEAAAAAAAAAAAAAAAAAAAAAAAAQAAACAAEAAEAAAAAAQAA.YAAAAAAAA8DA
|
||||
|
||||
endpoints:
|
||||
title: https://gizmo.rakuten.tv/v3/movies/{title_id}?
|
||||
show: https://gizmo.rakuten.tv/v3/seasons/{title_id}?
|
||||
manifest: https://gizmo.rakuten.tv/v3/{kind}/streamings?
|
||||
auth: https://gizmo.rakuten.tv/v3/me/login_or_wuaki_link
|
||||
clients:
|
||||
web:
|
||||
app_version: v3.0.11
|
||||
device_identifier: web
|
||||
device_serial: 6cc3584a-c182-4cc1-9f8d-b90e4ed76de9
|
||||
player: web:DASH-CENC:WVM
|
||||
device_os: Windows 10
|
||||
device_model: GENERIC
|
||||
device_year: 2019
|
||||
device_brand: chrome
|
||||
device_sdk: 100.0.4896
|
||||
android:
|
||||
app_version: 3.22.0
|
||||
device_identifier: android
|
||||
device_serial: 3187ad6c-4d1c-4cbb-9c59-8396d054eb2a
|
||||
player: android:DASH-CENC
|
||||
device_os: Android
|
||||
device_model: SM-A105FN
|
||||
device_year: 2021
|
||||
device_brand: Samsung
|
||||
device_sdk: ""
|
||||
atvui40:
|
||||
app_version: v2.77.0
|
||||
device_identifier: atvui40
|
||||
device_serial: 0424814603535001d1b1
|
||||
player: atvui40:DASH-CENC:WVM
|
||||
device_os: Android TV UI 40
|
||||
device_model: SHIELD Android TV
|
||||
device_year: 1970
|
||||
device_brand: NVIDIA
|
||||
device_sdk: ""
|
||||
lgui40:
|
||||
app_version: v2.77.0
|
||||
device_identifier: lgui40
|
||||
device_serial: 203WRMD8U920
|
||||
player: lgui40:DASH-CENC:PR
|
||||
device_os: LG UI 40
|
||||
device_model: OLED65C11LB
|
||||
device_year: 2021
|
||||
device_brand: LG
|
||||
device_sdk: ""
|
||||
smui40:
|
||||
app_version: v2.77.0
|
||||
device_identifier: smui40
|
||||
device_serial: 6cc3584a-c182-4cc1-9f8d-b90e4ed76de9
|
||||
player: smtvui40:DASH-CENC:WVM
|
||||
device_os: Samsung UI 40
|
||||
device_model: QE43Q60RATXXH
|
||||
device_year: 2019
|
||||
device_brand: Samsung
|
||||
device_sdk: ""
|
||||
261
ROKU/__init__.py
Normal file
261
ROKU/__init__.py
Normal file
@ -0,0 +1,261 @@
|
||||
import json
|
||||
import re
|
||||
import sys
|
||||
from collections.abc import Generator
|
||||
from concurrent.futures import ThreadPoolExecutor
|
||||
from datetime import datetime, timezone
|
||||
from http.cookiejar import CookieJar
|
||||
from typing import Any, Optional
|
||||
from urllib.parse import unquote, urlparse
|
||||
|
||||
import click
|
||||
import requests
|
||||
from unshackle.core.credential import Credential
|
||||
from unshackle.core.manifests import DASH
|
||||
from unshackle.core.search_result import SearchResult
|
||||
from unshackle.core.service import Service
|
||||
from unshackle.core.titles import Episode, Movie, Movies, Series, Title_T, Titles_T
|
||||
from unshackle.core.tracks import Chapter, Tracks
|
||||
|
||||
|
||||
class ROKU(Service):
|
||||
"""
|
||||
Service code for The Roku Channel (https://therokuchannel.roku.com)
|
||||
|
||||
\b
|
||||
Version: 1.0.2
|
||||
Author: stabbedbybrick
|
||||
Authorization: Cookies
|
||||
Robustness:
|
||||
Widevine:
|
||||
L3: 1080p, DD5.1
|
||||
|
||||
\b
|
||||
Tips:
|
||||
- Use complete title/episode URL or id as input:
|
||||
https://therokuchannel.roku.com/details/e05fc677ab9c5d5e8332f123770697b9/paddington
|
||||
OR
|
||||
e05fc677ab9c5d5e8332f123770697b9
|
||||
- Supports movies, series, and single episodes
|
||||
- Search is geofenced
|
||||
"""
|
||||
|
||||
GEOFENCE = ("us",)
|
||||
TITLE_RE = r"^(?:https?://(?:www.)?therokuchannel.roku.com/(?:details|watch)/)?(?P<id>[a-z0-9-]+)"
|
||||
|
||||
@staticmethod
|
||||
@click.command(name="ROKU", short_help="https://therokuchannel.roku.com", help=__doc__)
|
||||
@click.argument("title", type=str)
|
||||
@click.pass_context
|
||||
def cli(ctx, **kwargs):
|
||||
return ROKU(ctx, **kwargs)
|
||||
|
||||
def __init__(self, ctx, title):
|
||||
self.title = re.match(self.TITLE_RE, title).group("id")
|
||||
super().__init__(ctx)
|
||||
|
||||
self.license: str
|
||||
|
||||
def authenticate(
|
||||
self,
|
||||
cookies: Optional[CookieJar] = None,
|
||||
credential: Optional[Credential] = None,
|
||||
) -> None:
|
||||
super().authenticate(cookies, credential)
|
||||
if cookies is not None:
|
||||
self.session.cookies.update(cookies)
|
||||
|
||||
def search(self) -> Generator[SearchResult, None, None]:
|
||||
token = self.session.get(self.config["endpoints"]["token"]).json()["csrf"]
|
||||
|
||||
headers = {"csrf-token": token}
|
||||
payload = {"query": self.title}
|
||||
|
||||
r = self.session.post(self.config["endpoints"]["search"], headers=headers, json=payload)
|
||||
r.raise_for_status()
|
||||
|
||||
results = r.json()
|
||||
for result in results["view"]:
|
||||
if result["content"]["type"] not in ["zone", "provider"]:
|
||||
_id = result["content"].get("meta", {}).get("id")
|
||||
_desc = result["content"].get("descriptions", {})
|
||||
|
||||
label = f'{result["content"].get("type")} ({result["content"].get("releaseYear")})'
|
||||
if result["content"].get("viewOptions"):
|
||||
label += f' ({result["content"]["viewOptions"][0].get("priceDisplay")})'
|
||||
|
||||
title = re.sub(r"^-|-$", "", re.sub(r"\W+", "-", result["content"].get("title").lower()))
|
||||
|
||||
yield SearchResult(
|
||||
id_=_id,
|
||||
title=title,
|
||||
description=_desc["250"]["text"] if _desc.get("250") else None,
|
||||
label=label,
|
||||
url=f"https://therokuchannel.roku.com/details/{_id}/{title}",
|
||||
)
|
||||
|
||||
def get_titles(self) -> Titles_T:
|
||||
data = self.session.get(self.config["endpoints"]["content"] + self.title).json()
|
||||
if not data["isAvailable"]:
|
||||
self.log.error("This title is temporarily unavailable or expired")
|
||||
sys.exit(1)
|
||||
|
||||
if data["type"] in ["movie", "tvspecial"]:
|
||||
return Movies(
|
||||
[
|
||||
Movie(
|
||||
id_=data["meta"]["id"],
|
||||
service=self.__class__,
|
||||
name=data["title"],
|
||||
year=data["releaseYear"],
|
||||
language=data["viewOptions"][0]["media"].get("originalAudioLanguage", "en"),
|
||||
data=data,
|
||||
)
|
||||
]
|
||||
)
|
||||
|
||||
elif data["type"] == "series":
|
||||
episodes = self.fetch_episodes(data)
|
||||
return Series(
|
||||
[
|
||||
Episode(
|
||||
id_=episode["meta"]["id"],
|
||||
service=self.__class__,
|
||||
title=data["title"],
|
||||
season=int(episode["seasonNumber"]),
|
||||
number=int(episode["episodeNumber"]),
|
||||
name=episode["title"],
|
||||
year=data["releaseYear"],
|
||||
language=episode["viewOptions"][0]["media"].get("originalAudioLanguage", "en"),
|
||||
data=data,
|
||||
)
|
||||
for episode in episodes
|
||||
]
|
||||
)
|
||||
|
||||
elif data["type"] == "episode":
|
||||
return Series(
|
||||
[
|
||||
Episode(
|
||||
id_=data["meta"]["id"],
|
||||
service=self.__class__,
|
||||
title=data["title"],
|
||||
season=int(data["seasonNumber"]),
|
||||
number=int(data["episodeNumber"]),
|
||||
name=data["title"],
|
||||
year=data["releaseYear"],
|
||||
language=data["viewOptions"][0]["media"].get("originalAudioLanguage", "en"),
|
||||
data=data,
|
||||
)
|
||||
]
|
||||
)
|
||||
|
||||
def get_tracks(self, title: Title_T) -> Tracks:
|
||||
token = self.session.get(self.config["endpoints"]["token"]).json()["csrf"]
|
||||
|
||||
options = title.data["viewOptions"]
|
||||
subscription = options[0].get("license", "").lower()
|
||||
authenticated = next((x for x in options if x.get("isAuthenticated")), None)
|
||||
|
||||
if subscription == "subscription" and not authenticated:
|
||||
self.log.error("This title is only available to subscribers")
|
||||
sys.exit(1)
|
||||
|
||||
play_id = authenticated.get("playId") if authenticated else options[0].get("playId")
|
||||
provider_id = authenticated.get("providerId") if authenticated else options[0].get("providerId")
|
||||
|
||||
headers = {
|
||||
"csrf-token": token,
|
||||
}
|
||||
payload = {
|
||||
"rokuId": title.id,
|
||||
"playId": play_id,
|
||||
"mediaFormat": "mpeg-dash",
|
||||
"drmType": "widevine",
|
||||
"quality": "fhd",
|
||||
"providerId": provider_id,
|
||||
}
|
||||
|
||||
r = self.session.post(
|
||||
self.config["endpoints"]["vod"],
|
||||
headers=headers,
|
||||
json=payload,
|
||||
)
|
||||
r.raise_for_status()
|
||||
|
||||
videos = r.json()["playbackMedia"]["videos"]
|
||||
self.license = next(
|
||||
(
|
||||
x["drmParams"]["licenseServerURL"]
|
||||
for x in videos
|
||||
if x.get("drmParams") and x["drmParams"]["keySystem"] == "Widevine"
|
||||
),
|
||||
None,
|
||||
)
|
||||
|
||||
url = next((x["url"] for x in videos if x["streamFormat"] == "dash"), None)
|
||||
if url and "origin" in urlparse(url).query:
|
||||
url = unquote(urlparse(url).query.split("=")[1]).split("?")[0]
|
||||
|
||||
tracks = DASH.from_url(url=url).to_tracks(language=title.language)
|
||||
tracks.videos[0].data["playbackMedia"] = r.json()["playbackMedia"]
|
||||
|
||||
for track in tracks.audio:
|
||||
label = track.data["dash"]["adaptation_set"].find("Label")
|
||||
if label is not None and "description" in label.text:
|
||||
track.descriptive = True
|
||||
|
||||
for track in tracks.subtitles:
|
||||
label = track.data["dash"]["adaptation_set"].find("Label")
|
||||
if label is not None and "caption" in label.text:
|
||||
track.cc = True
|
||||
|
||||
return tracks
|
||||
|
||||
def get_chapters(self, title: Title_T) -> list[Chapter]:
|
||||
track = title.tracks.videos[0]
|
||||
|
||||
chapters = []
|
||||
if track.data.get("playbackMedia", {}).get("adBreaks"):
|
||||
timestamps = sorted(track.data["playbackMedia"]["adBreaks"])
|
||||
chapters = [Chapter(name=f"Chapter {i + 1:02}", timestamp=ad.split(".")[0]) for i, ad in enumerate(timestamps)]
|
||||
|
||||
if track.data.get("playbackMedia", {}).get("creditCuePoints"):
|
||||
start = next((
|
||||
x.get("start") for x in track.data["playbackMedia"]["creditCuePoints"] if x.get("start") != 0), None)
|
||||
if start:
|
||||
chapters.append(
|
||||
Chapter(
|
||||
name="Credits",
|
||||
timestamp=datetime.fromtimestamp((start / 1000), tz=timezone.utc).strftime("%H:%M:%S.%f")[:-3],
|
||||
)
|
||||
)
|
||||
|
||||
return chapters
|
||||
|
||||
def get_widevine_service_certificate(self, **_: Any) -> str:
|
||||
return # WidevineCdm.common_privacy_cert
|
||||
|
||||
def get_widevine_license(self, challenge: bytes, **_: Any) -> bytes:
|
||||
r = self.session.post(url=self.license, data=challenge)
|
||||
if r.status_code != 200:
|
||||
self.log.error(r.text)
|
||||
sys.exit(1)
|
||||
return r.content
|
||||
|
||||
# service specific functions
|
||||
|
||||
def fetch_episode(self, episode: dict) -> json:
|
||||
try:
|
||||
r = self.session.get(self.config["endpoints"]["content"] + episode["meta"]["id"])
|
||||
r.raise_for_status()
|
||||
return r.json()
|
||||
except requests.exceptions.RequestException as e:
|
||||
self.log.error(f"An error occurred while fetching episode {episode['meta']['id']}: {e}")
|
||||
return None
|
||||
|
||||
def fetch_episodes(self, data: dict) -> list:
|
||||
"""TODO: Switch to async once https proxies are fully supported"""
|
||||
with ThreadPoolExecutor(max_workers=10) as executor:
|
||||
tasks = list(executor.map(self.fetch_episode, data["episodes"]))
|
||||
return [task for task in tasks if task is not None]
|
||||
5
ROKU/config.yaml
Normal file
5
ROKU/config.yaml
Normal file
@ -0,0 +1,5 @@
|
||||
endpoints:
|
||||
content: https://therokuchannel.roku.com/api/v2/homescreen/content/https%3A%2F%2Fcontent.sr.roku.com%2Fcontent%2Fv1%2Froku-trc%2F
|
||||
vod: https://therokuchannel.roku.com/api/v3/playback
|
||||
token: https://therokuchannel.roku.com/api/v1/csrf
|
||||
search: https://therokuchannel.roku.com/api/v1/search
|
||||
285
RTE/__init__.py
Normal file
285
RTE/__init__.py
Normal file
@ -0,0 +1,285 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import base64
|
||||
import json
|
||||
import re
|
||||
from collections.abc import Generator
|
||||
from typing import Any, Optional, Union
|
||||
from urllib.parse import urljoin
|
||||
|
||||
import click
|
||||
from requests import Request
|
||||
from unshackle.core.constants import AnyTrack
|
||||
from unshackle.core.manifests import DASH
|
||||
from unshackle.core.search_result import SearchResult
|
||||
from unshackle.core.service import Service
|
||||
from unshackle.core.titles import Episode, Movie, Movies, Series, Title_T, Titles_T
|
||||
from unshackle.core.tracks import Chapter, Chapters, Tracks
|
||||
from unshackle.core.utils.xml import load_xml
|
||||
|
||||
|
||||
class RTE(Service):
|
||||
"""
|
||||
\b
|
||||
Service code for RTE Player streaming service (https://www.rte.ie/player/).
|
||||
|
||||
\b
|
||||
Version: 1.0.3
|
||||
Author: stabbedbybrick
|
||||
Authorization: None
|
||||
Robustness:
|
||||
Widevine:
|
||||
L3: 1080p, AAC2.0
|
||||
|
||||
\b
|
||||
Tips:
|
||||
- Input (pay attention to the URL format):
|
||||
SERIES: https://www.rte.ie/player/series/crossfire/10003928-00-0000
|
||||
EPISODE: https://www.rte.ie/player/series/crossfire/10003928-00-0000?epguid=AQ10003929-01-0001
|
||||
MOVIE: https://www.rte.ie/player/movie/glass/360230440380
|
||||
|
||||
\b
|
||||
Notes:
|
||||
- Since some content is accessible worldwide, geofence is deactivated.
|
||||
- Using an IE IP-address is recommended to access everything.
|
||||
|
||||
"""
|
||||
|
||||
# GEOFENCE = ("ie",)
|
||||
|
||||
@staticmethod
|
||||
@click.command(name="RTE", short_help="https://www.rte.ie/player/", help=__doc__)
|
||||
@click.argument("title", type=str, required=False)
|
||||
@click.pass_context
|
||||
def cli(ctx, **kwargs) -> RTE:
|
||||
return RTE(ctx, **kwargs)
|
||||
|
||||
def __init__(self, ctx, title):
|
||||
self.title = title
|
||||
super().__init__(ctx)
|
||||
|
||||
self.base_url = self.config["endpoints"]["base_url"]
|
||||
self.feed = self.config["endpoints"]["feed"]
|
||||
self.license = self.config["endpoints"]["license"]
|
||||
|
||||
def search(self) -> Generator[SearchResult, None, None]:
|
||||
params = {
|
||||
"byProgramType": "Series|Movie",
|
||||
"q": f"title:({self.title})",
|
||||
"range": "0-40",
|
||||
"schema": "2.15",
|
||||
"sort": "rte$rank|desc",
|
||||
"gzip": "true",
|
||||
"omitInvalidFields": "true",
|
||||
}
|
||||
results = self._request(f"{self.feed}/f/1uC-gC/rte-prd-prd-search", params=params)["entries"]
|
||||
|
||||
for result in results:
|
||||
link = "https://www.rte.ie/player/{}/{}/{}"
|
||||
series = result.get("plprogram$programType").lower() == "series"
|
||||
_id = result.get("guid") if series else result.get("id").split("/")[-1]
|
||||
_title = result.get("title") if series else result.get("plprogram$longTitle")
|
||||
_type = result.get("plprogram$programType")
|
||||
|
||||
title = _title.format(_type, _title, _id).lower()
|
||||
title = re.sub(r"\W+", "-", title)
|
||||
title = re.sub(r"^-|-$", "", title)
|
||||
|
||||
yield SearchResult(
|
||||
id_=link.format(_type, title, _id),
|
||||
title=_title,
|
||||
description=result.get("plprogram$shortDescription"),
|
||||
label=_type,
|
||||
url=link.format(_type, title, _id),
|
||||
)
|
||||
|
||||
def get_titles(self) -> Titles_T:
|
||||
title_re = (
|
||||
r"https://www\.rte\.ie/player"
|
||||
r"/(?P<type>series|movie)"
|
||||
r"/(?P<slug>[a-zA-Z0-9%_.-]+)"
|
||||
r"/(?P<id>[a-zA-Z0-9_\-=?]+)/?$"
|
||||
)
|
||||
try:
|
||||
kind, _, title_id = (re.match(title_re, self.title).group(i) for i in ("type", "slug", "id"))
|
||||
except Exception:
|
||||
raise ValueError("- Could not parse ID from input")
|
||||
|
||||
episode = title_id.split("=")[1] if "epguid" in title_id else None
|
||||
|
||||
if episode:
|
||||
episode = self._episode(title_id, episode)
|
||||
return Series(episode)
|
||||
|
||||
elif kind == "movie":
|
||||
movie = self._movie(title_id)
|
||||
return Movies(movie)
|
||||
|
||||
elif kind == "series":
|
||||
episodes = self._show(title_id)
|
||||
return Series(episodes)
|
||||
|
||||
def get_tracks(self, title: Title_T) -> Tracks:
|
||||
self.token, self.account = self.get_config()
|
||||
media = title.data["plprogramavailability$media"][0].get("plmedia$publicUrl")
|
||||
if not media:
|
||||
raise ValueError("Could not find any streams - is the title still available?")
|
||||
|
||||
manifest, self.pid = self.get_manifest(media)
|
||||
tracks = DASH.from_url(manifest, self.session).to_tracks(language=title.language)
|
||||
for track in tracks.audio:
|
||||
role = track.data["dash"]["adaptation_set"].find("Role")
|
||||
if role is not None and role.get("value") in ["description", "alternative", "alternate"]:
|
||||
track.descriptive = True
|
||||
|
||||
return tracks
|
||||
|
||||
def get_chapters(self, title: Episode) -> Chapters:
|
||||
if not title.data.get("rte$chapters"):
|
||||
return Chapters()
|
||||
|
||||
timecodes = [x for x in title.data["rte$chapters"]]
|
||||
chapters = [Chapter(timestamp=float(x)) for x in timecodes]
|
||||
|
||||
if title.data.get("rte$creditStart"):
|
||||
chapters.append(Chapter(name="Credits", timestamp=float(title.data["rte$creditStart"])))
|
||||
|
||||
return chapters
|
||||
|
||||
def certificate(self, **_):
|
||||
return None # will use common privacy cert
|
||||
|
||||
def get_widevine_license(self, *, challenge: bytes, title: Title_T, track: AnyTrack) -> Optional[Union[bytes, str]]:
|
||||
params = {
|
||||
"token": self.token,
|
||||
"account": self.account,
|
||||
"form": "json",
|
||||
"schema": "1.0",
|
||||
}
|
||||
payload = {
|
||||
"getWidevineLicense": {
|
||||
"releasePid": self.pid,
|
||||
"widevineChallenge": base64.b64encode(challenge).decode("utf-8"),
|
||||
}
|
||||
}
|
||||
r = self.session.post(url=self.license, params=params, json=payload)
|
||||
if not r.ok:
|
||||
raise ConnectionError(f"License request failed: {r.text}")
|
||||
|
||||
return r.json()["getWidevineLicenseResponse"]["license"]
|
||||
|
||||
# Service specific functions
|
||||
|
||||
def _movie(self, title: str) -> Movie:
|
||||
params = {"count": "true", "entries": "true", "byId": title}
|
||||
data = self._request("/mpx/1uC-gC/rte-prd-prd-all-programs", params=params)["entries"]
|
||||
|
||||
return [
|
||||
Movie(
|
||||
id_=movie["guid"],
|
||||
service=self.__class__,
|
||||
name=movie.get("plprogram$longTitle"),
|
||||
year=movie.get("plprogram$year"),
|
||||
language=movie["plprogram$languages"][0] if movie.get("plprogram$languages") else "eng",
|
||||
data=movie,
|
||||
)
|
||||
for movie in data
|
||||
]
|
||||
|
||||
def _show(self, title: str) -> Episode:
|
||||
entry = self._request("/mpx/1uC-gC/rte-prd-prd-all-movies-series?byGuid={}".format(title))["entries"][0]["id"]
|
||||
data = self._request("/mpx/1uC-gC/rte-prd-prd-all-programs?bySeriesId={}".format(entry.split("/")[-1]))["entries"]
|
||||
|
||||
return [
|
||||
Episode(
|
||||
id_=episode.get("guid"),
|
||||
title=episode.get("plprogram$longTitle"),
|
||||
season=episode.get("plprogram$tvSeasonNumber") or 0,
|
||||
number=episode.get("plprogram$tvSeasonEpisodeNumber") or 0,
|
||||
name=episode.get("description"),
|
||||
language=episode["plprogram$languages"][0] if episode.get("plprogram$languages") else "eng",
|
||||
service=self.__class__,
|
||||
data=episode,
|
||||
)
|
||||
for episode in data
|
||||
if episode["plprogram$programType"] == "episode"
|
||||
]
|
||||
|
||||
def _episode(self, title: str, guid: str) -> Episode:
|
||||
title = title.split("?")[0]
|
||||
entry = self._request("/mpx/1uC-gC/rte-prd-prd-all-movies-series?byGuid={}".format(title))["entries"][0]["id"]
|
||||
data = self._request("/mpx/1uC-gC/rte-prd-prd-all-programs?bySeriesId={}".format(entry.split("/")[-1]))["entries"]
|
||||
|
||||
return [
|
||||
Episode(
|
||||
id_=episode.get("guid"),
|
||||
title=episode.get("plprogram$longTitle"),
|
||||
season=episode.get("plprogram$tvSeasonNumber") or 0,
|
||||
number=episode.get("plprogram$tvSeasonEpisodeNumber") or 0,
|
||||
name=episode.get("description"),
|
||||
language=episode["plprogram$languages"][0] if episode.get("plprogram$languages") else "eng",
|
||||
service=self.__class__,
|
||||
data=episode,
|
||||
)
|
||||
for episode in data
|
||||
if episode["plprogram$programType"] == "episode" and episode.get("guid") == guid
|
||||
]
|
||||
|
||||
def get_config(self):
|
||||
token = self._request("/servicelayer/api/anonymouslogin")["mpx_token"]
|
||||
account = self._request("/wordpress/wp-content/uploads/standard/web/config.json")["mpx_config"]["account_id"]
|
||||
return token, account
|
||||
|
||||
def get_manifest(self, media_url: str) -> str:
|
||||
try:
|
||||
res = self._request(
|
||||
media_url,
|
||||
params={
|
||||
"formats": "MPEG-DASH",
|
||||
"auth": self.token,
|
||||
"assetTypes": "default:isl",
|
||||
"tracking": "true",
|
||||
"format": "SMIL",
|
||||
"iu": "/3014/RTE_Player_VOD/Android_Phone/NotRegistered",
|
||||
"policy": "168602703",
|
||||
},
|
||||
)
|
||||
|
||||
root = load_xml(res)
|
||||
video = root.xpath("//switch/video")
|
||||
manifest = video[0].get("src")
|
||||
|
||||
elem = root.xpath("//switch/ref")
|
||||
value = elem[0].find(".//param[@name='trackingData']").get("value")
|
||||
pid = re.search(r"pid=([^|]+)", value).group(1)
|
||||
|
||||
return manifest, pid
|
||||
|
||||
except Exception as e:
|
||||
raise ValueError(
|
||||
f"Request for manifest failed: {e}.\n"
|
||||
"Content may be geo-restricted to IE"
|
||||
)
|
||||
|
||||
def _request(self, api: str, params: dict = None, headers: dict = None) -> Any[dict | str]:
|
||||
url = urljoin(self.base_url, api)
|
||||
self.session.headers.update(self.config["headers"])
|
||||
|
||||
if params:
|
||||
self.session.params.update(params)
|
||||
if headers:
|
||||
self.session.headers.update(headers)
|
||||
|
||||
prep = self.session.prepare_request(Request("GET", url))
|
||||
|
||||
response = self.session.send(prep)
|
||||
if response.status_code != 200:
|
||||
raise ConnectionError(
|
||||
f"Status: {response.status_code} - {response.url}\n"
|
||||
"Content may be geo-restricted to IE"
|
||||
)
|
||||
|
||||
try:
|
||||
return json.loads(response.content)
|
||||
except json.JSONDecodeError:
|
||||
return response.text
|
||||
7
RTE/config.yaml
Normal file
7
RTE/config.yaml
Normal file
@ -0,0 +1,7 @@
|
||||
headers:
|
||||
user-agent: Dalvik/2.1.0 (Linux; U; Android 13; SM-A536E Build/RSR1.210722.013.A2)
|
||||
|
||||
endpoints:
|
||||
base_url: https://www.rte.ie
|
||||
feed: https://feed.entertainment.tv.theplatform.eu
|
||||
license: https://widevine.entitlement.eu.theplatform.com/wv/web/ModularDrm
|
||||
224
SBS/__init__.py
Normal file
224
SBS/__init__.py
Normal file
@ -0,0 +1,224 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import hashlib
|
||||
import json
|
||||
import re
|
||||
from collections.abc import Generator
|
||||
from urllib.parse import urljoin, urlparse
|
||||
from typing import Any
|
||||
|
||||
import click
|
||||
from click import Context
|
||||
from requests import Request
|
||||
from unshackle.core.manifests import HLS
|
||||
from unshackle.core.search_result import SearchResult
|
||||
from unshackle.core.service import Service
|
||||
from unshackle.core.titles import Episode, Movie, Movies, Series
|
||||
from unshackle.core.tracks import Chapters, Subtitle, Tracks
|
||||
from unshackle.core.utils.xml import load_xml
|
||||
|
||||
|
||||
class SBS(Service):
|
||||
"""
|
||||
\b
|
||||
Service code for SBS ondemand streaming service (https://www.sbs.com.au/ondemand/).
|
||||
|
||||
\b
|
||||
Version: 1.0.1
|
||||
Author: stabbedbybrick
|
||||
Authorization: None
|
||||
Geofence: AU (API and downloads)
|
||||
Robustness:
|
||||
AES: 720p, AAC2.0
|
||||
|
||||
\b
|
||||
Tips:
|
||||
- Input should be complete URL:
|
||||
SERIES: https://www.sbs.com.au/ondemand/tv-series/reckless
|
||||
EPISODE: https://www.sbs.com.au/ondemand/tv-series/reckless/season-1/reckless-s1-ep1/2459384899653
|
||||
MOVIE: https://www.sbs.com.au/ondemand/movie/silence/1363535939614
|
||||
SPORT: https://www.sbs.com.au/ondemand/sports-series/australian-championship-2025/football-australian-championship-2025/australian-championship-2025-s2025-ep40/2457638979614
|
||||
|
||||
\b
|
||||
Notes:
|
||||
- SBS uses transport streams for HLS, meaning the video and audio are a part of the same stream.
|
||||
As a result only videos are listed as tracks, but the audio will be included as well.
|
||||
|
||||
"""
|
||||
|
||||
GEOFENCE = ("au",)
|
||||
|
||||
@staticmethod
|
||||
@click.command(name="SBS", short_help="https://www.sbs.com.au/ondemand/", help=__doc__)
|
||||
@click.argument("title", type=str)
|
||||
@click.pass_context
|
||||
def cli(ctx: Context, **kwargs: Any) -> SBS:
|
||||
return SBS(ctx, **kwargs)
|
||||
|
||||
def __init__(self, ctx: Context, title: str):
|
||||
self.title = title
|
||||
super().__init__(ctx)
|
||||
|
||||
self.session.headers.update(self.config["headers"])
|
||||
|
||||
def search(self) -> Generator[SearchResult, None, None]:
|
||||
params = {
|
||||
"q": self.title.strip(),
|
||||
}
|
||||
|
||||
results = self._request("GET", "https://content-search.pr.sbsod.com/catalogue", params=params)["items"]
|
||||
|
||||
for result in results:
|
||||
if result.get("entityType") in ("PAGE"):
|
||||
continue
|
||||
|
||||
label = result.get("entityType")
|
||||
slug = result.get("slug")
|
||||
title = result.get("title")
|
||||
description = result.get("description")
|
||||
yield SearchResult(
|
||||
id_=f"https://www.sbs.com.au/ondemand/{label}/{slug}",
|
||||
title=title,
|
||||
description=description,
|
||||
label=label,
|
||||
url=f"https://www.sbs.com.au/ondemand/{label}/{slug}",
|
||||
)
|
||||
|
||||
|
||||
def get_titles(self) -> Movies | Series:
|
||||
regex = re.compile(
|
||||
r"^https://www.sbs.com.au/ondemand/"
|
||||
r"(?P<entity>tv-series|tv-program|sports-series|movie|watch)"
|
||||
r"(?:/|/.*/)"
|
||||
r"(?P<id>[^/]+)/?$"
|
||||
)
|
||||
|
||||
match = regex.search(self.title)
|
||||
if not match:
|
||||
raise ValueError(f"Invalid URL input: {self.title}")
|
||||
|
||||
entity_type, entity_id = (match.group(i) for i in ("entity", "id"))
|
||||
|
||||
if entity_type in ("movie", "tv-program") and entity_id.isdigit():
|
||||
movie = self._movie(entity_id)
|
||||
return Movies(movie)
|
||||
|
||||
elif entity_id.isdigit():
|
||||
episode = self._episode(entity_id)
|
||||
return Series(episode)
|
||||
|
||||
elif entity_type in ("tv-series", "sports-series"):
|
||||
episodes = self._series(urlparse(self.title).path)
|
||||
return Series(episodes)
|
||||
|
||||
def get_tracks(self, title: Movie | Episode) -> Tracks:
|
||||
smil = self._request("GET", f"/api/v3/video_smil?id={title.id}")
|
||||
|
||||
body = load_xml(smil).find("body").find("seq")
|
||||
section = body.find("par") or body
|
||||
|
||||
manifest = next((x.get("src") for x in section.findall("video")), None)
|
||||
subtitles = [(x.get("src"), x.get("lang"), x.get("type")) for x in section.findall("textstream")]
|
||||
|
||||
tracks = HLS.from_url(manifest, self.session).to_tracks(title.language)
|
||||
|
||||
if subtitles:
|
||||
for url, lang, type in subtitles:
|
||||
if "ttaf+xml" in type:
|
||||
continue
|
||||
codec = type.split("/")[-1]
|
||||
tracks.add(
|
||||
Subtitle(
|
||||
id_=hashlib.md5(url.encode()).hexdigest()[0:6],
|
||||
url=url,
|
||||
codec=Subtitle.Codec.from_mime(codec),
|
||||
language=lang,
|
||||
sdh="_CC" in url,
|
||||
)
|
||||
)
|
||||
|
||||
return tracks
|
||||
|
||||
def get_chapters(self, title: Movie | Episode) -> Chapters:
|
||||
return Chapters()
|
||||
|
||||
# Service specific
|
||||
|
||||
def _series(self, path: str) -> Episode:
|
||||
if "ondemand" in path:
|
||||
path = path.split("ondemand")[1]
|
||||
|
||||
metadata = self._request("GET", f"https://catalogue.pr.sbsod.com{path}")
|
||||
|
||||
seasons = metadata.get("seasons")
|
||||
if not seasons:
|
||||
raise ValueError(f"Failed to find seasons for title: {path}")
|
||||
|
||||
episodes = []
|
||||
for season in seasons:
|
||||
for episode in season.get("episodes"):
|
||||
episodes.append(
|
||||
Episode(
|
||||
id_=episode.get("mpxMediaID"),
|
||||
service=self.__class__,
|
||||
title=episode.get("seriesTitle"),
|
||||
season=int(episode.get("seasonNumber", 0)),
|
||||
number=int(episode.get("episodeNumber", 0)),
|
||||
name=episode.get("title"),
|
||||
year=episode.get("releaseYear"),
|
||||
language=metadata.get("localeID") or "en",
|
||||
data=episode,
|
||||
)
|
||||
)
|
||||
|
||||
return episodes
|
||||
|
||||
def _movie(self, entity_id: str) -> Movie:
|
||||
metadata = self._request("GET", f"https://catalogue.pr.sbsod.com/mpx-media/{entity_id}")
|
||||
|
||||
return [
|
||||
Movie(
|
||||
id_=metadata.get("mpxMediaID"),
|
||||
service=self.__class__,
|
||||
name=metadata.get("title") or metadata.get("cdpTitle"),
|
||||
year=metadata.get("releaseYear"),
|
||||
language=metadata.get("localeID") or "en",
|
||||
data=metadata,
|
||||
)
|
||||
]
|
||||
|
||||
def _episode(self, entity_id: str) -> Episode:
|
||||
metadata = self._request("GET", f"https://catalogue.pr.sbsod.com/mpx-media/{entity_id}")
|
||||
|
||||
return [
|
||||
Episode(
|
||||
id_=metadata.get("mpxMediaID"),
|
||||
service=self.__class__,
|
||||
title=metadata.get("seriesTitle"),
|
||||
season=int(metadata.get("seasonNumber", 0)),
|
||||
number=int(metadata.get("episodeNumber", 0)),
|
||||
name=metadata.get("title") or metadata.get("cdpTitle"),
|
||||
year=metadata.get("releaseYear"),
|
||||
language=metadata.get("localeID") or "en",
|
||||
data=metadata,
|
||||
)
|
||||
]
|
||||
|
||||
def _request(self, method: str, endpoint: str, **kwargs: Any) -> Any[dict | str]:
|
||||
url = urljoin(self.config["endpoints"]["base_url"], endpoint)
|
||||
|
||||
prep = self.session.prepare_request(Request(method, url, **kwargs))
|
||||
response = self.session.send(prep)
|
||||
|
||||
if response.status_code != 200:
|
||||
raise ConnectionError(f"{response.text}")
|
||||
|
||||
try:
|
||||
return json.loads(response.content)
|
||||
|
||||
except json.JSONDecodeError:
|
||||
return response.text
|
||||
|
||||
except ValueError as e:
|
||||
raise ValueError(f"Failed to parse JSON: {response.text}") from e
|
||||
|
||||
7
SBS/config.yaml
Normal file
7
SBS/config.yaml
Normal file
@ -0,0 +1,7 @@
|
||||
headers:
|
||||
User-Agent: "AndroidTV/!/!"
|
||||
|
||||
endpoints:
|
||||
base_url: "https://www.sbs.com.au"
|
||||
dai: "http://pubads.g.doubleclick.net/ondemand/hls/content/2488267/vid/{vid}/streams"
|
||||
catalogue: "https://catalogue.pr.sbsod.com"
|
||||
439
SEVEN/__init__.py
Normal file
439
SEVEN/__init__.py
Normal file
@ -0,0 +1,439 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import re
|
||||
from collections.abc import Generator
|
||||
from concurrent.futures import ThreadPoolExecutor
|
||||
from http.cookiejar import MozillaCookieJar
|
||||
from typing import Any, List, Optional, Union
|
||||
from uuid import uuid4
|
||||
|
||||
import click
|
||||
from click import Context
|
||||
from pyplayready.cdm import Cdm as PlayReadyCdm
|
||||
from unshackle.core.credential import Credential
|
||||
from unshackle.core.manifests.dash import DASH
|
||||
from unshackle.core.search_result import SearchResult
|
||||
from unshackle.core.service import Service
|
||||
from unshackle.core.titles import Episode, Movie, Movies, Series
|
||||
from unshackle.core.tracks import Chapter, Chapters, Tracks
|
||||
|
||||
|
||||
class SEVEN(Service):
|
||||
"""
|
||||
Service code for 7Plus streaming service (https://7plus.com.au/).
|
||||
|
||||
\b
|
||||
Version: 1.0.1
|
||||
Author: stabbedbybrick
|
||||
Authorization: Cookies
|
||||
Geofence: AU (API and downloads)
|
||||
Robustness:
|
||||
Widevine:
|
||||
L3: 720p
|
||||
PlayReady:
|
||||
SL2000: 720p
|
||||
|
||||
\b
|
||||
Tips:
|
||||
- Use complete title URL as input:
|
||||
SERIES: https://7plus.com.au/ncis-los-angeles
|
||||
EPISODE: https://7plus.com.au/ncis-los-angeles?episode-id=NCIL01-001
|
||||
- There's no way to distinguish between series and movies, so use `--movie` to download as movie
|
||||
|
||||
\b
|
||||
Examples:
|
||||
- SERIES: unshackle dl -w s01e01 7plus https://7plus.com.au/ncis-los-angeles
|
||||
- EPISODE: unshackle dl 7plus https://7plus.com.au/ncis-los-angeles?episode-id=NCIL01-001
|
||||
- MOVIE: unshackle dl 7plus --movie https://7plus.com.au/puss-in-boots-the-last-wish
|
||||
|
||||
"""
|
||||
|
||||
GEOFENCE = ("au",)
|
||||
ALIASES = ("7plus", "sevenplus",)
|
||||
|
||||
@staticmethod
|
||||
@click.command(name="SEVEN", short_help="https://7plus.com.au/", help=__doc__)
|
||||
@click.option("-m", "--movie", is_flag=True, default=False, help="Download as Movie")
|
||||
@click.argument("title", type=str)
|
||||
@click.pass_context
|
||||
def cli(ctx: Context, **kwargs: Any) -> SEVEN:
|
||||
return SEVEN(ctx, **kwargs)
|
||||
|
||||
def __init__(self, ctx: Context, movie: bool, title: str):
|
||||
self.title = title
|
||||
self.movie = movie
|
||||
super().__init__(ctx)
|
||||
|
||||
self.cdm = ctx.obj.cdm
|
||||
self.drm_system = "playready" if isinstance(self.cdm, PlayReadyCdm) else "widevine"
|
||||
self.key_system = "com.microsoft.playready" if isinstance(self.cdm, PlayReadyCdm) else "com.widevine.alpha"
|
||||
|
||||
self.profile = ctx.parent.params.get("profile")
|
||||
if not self.profile:
|
||||
self.profile = "default"
|
||||
|
||||
self.session.headers.update(self.config["headers"])
|
||||
|
||||
def authenticate(self, cookies: Optional[MozillaCookieJar] = None, credential: Optional[Credential] = None) -> None:
|
||||
super().authenticate(cookies, credential)
|
||||
if cookies is None:
|
||||
raise EnvironmentError("Service requires Cookies for Authentication.")
|
||||
self.session.cookies.update(cookies)
|
||||
|
||||
api_key = next((cookie.name.replace("gig_bootstrap_", "") for cookie in cookies if "login_ver" in cookie.value), None)
|
||||
login_token = next((cookie.value for cookie in cookies if "glt_" in cookie.name), None)
|
||||
if not api_key or not login_token:
|
||||
raise ValueError("Invalid cookies. Try refreshing.")
|
||||
|
||||
market = self.session.get(
|
||||
"https://market-cdn.swm.digital/v1/market/ip/",
|
||||
params={"apikey": "web"}
|
||||
).json()
|
||||
|
||||
self.market_id = market.get("_id", 4)
|
||||
|
||||
cache = self.cache.get(f"tokens_{self.profile}")
|
||||
|
||||
if cache and not cache.expired:
|
||||
# cached
|
||||
self.log.info(" + Using cached tokens...")
|
||||
tokens = cache.data
|
||||
elif cache and cache.expired:
|
||||
# expired, refresh
|
||||
self.log.info("+ Refreshing tokens...")
|
||||
payload = {
|
||||
"platformId": self.config["PLATFORM_ID"],
|
||||
"regSource": "7plus",
|
||||
"refreshToken": cache.data.get("refresh_token"),
|
||||
}
|
||||
r = self.session.post("https://auth2.swm.digital/connect/token", data=payload)
|
||||
if r.status_code != 200:
|
||||
raise ConnectionError(f"Failed to refresh tokens: {r.text}")
|
||||
tokens = r.json()
|
||||
cache.set(tokens, expiration=int(tokens["expires_in"]) - 60)
|
||||
|
||||
else:
|
||||
# new
|
||||
self.log.info(" + Authenticating...")
|
||||
device_id = str(uuid4())
|
||||
payload = {
|
||||
"platformId": self.config["PLATFORM_ID"],
|
||||
"regSource": "7plus",
|
||||
"deviceId": device_id,
|
||||
"locationVerificationRequired": "false",
|
||||
}
|
||||
r = self.session.post("https://auth2.swm.digital/account/device/authorize", data=payload)
|
||||
if r.status_code != 200:
|
||||
raise ConnectionError(f"Failed to authenticate: {r.text}")
|
||||
auth = r.json()
|
||||
|
||||
uri = auth.get("verification_uri_complete")
|
||||
user_code = auth.get("user_code")
|
||||
device_code = auth.get("device_code")
|
||||
if not uri or not user_code or not device_code:
|
||||
raise ValueError(f"Failed to authenticate device: {auth}")
|
||||
|
||||
data = {
|
||||
"APIKey": api_key,
|
||||
"sdk": "js_next",
|
||||
"login_token": login_token,
|
||||
"authMode": "cookie",
|
||||
"pageURL": "https://7plus.com.au/connect",
|
||||
"sdkBuild": "18051",
|
||||
"format": "json",
|
||||
}
|
||||
|
||||
response = self.session.post("https://login.7plus.com.au/accounts.getJWT", cookies=cookies, data=data)
|
||||
if response.status_code != 200:
|
||||
raise ConnectionError(f"Failed to fetch JWT: {response.text}")
|
||||
|
||||
id_token = response.json().get("id_token")
|
||||
if not id_token:
|
||||
raise ValueError(f"Failed to fetch JWT: {response.text}")
|
||||
|
||||
headers = {
|
||||
"accept": "application/json, text/plain, */*",
|
||||
"accept-language": "en-US,en;q=0.9",
|
||||
"authorization": f"Bearer {id_token}",
|
||||
"content-type": "application/json;charset=UTF-8",
|
||||
"origin": "https://7plus.com.au",
|
||||
"referer": "https://7plus.com.au/connect",
|
||||
}
|
||||
|
||||
payload = {
|
||||
"platformId": "web",
|
||||
"regSource": "7plus",
|
||||
"code": user_code,
|
||||
"attemptLocationPairing": False,
|
||||
}
|
||||
r = self.session.post("https://7plus.com.au/auth/otp", headers=headers, json=payload)
|
||||
if r.status_code != 200:
|
||||
raise ConnectionError(f"Failed to verify OTP: {r.status_code}")
|
||||
|
||||
payload = {
|
||||
"platformId": self.config["PLATFORM_ID"],
|
||||
"regSource": "7plus",
|
||||
"deviceCode": device_code,
|
||||
}
|
||||
r = self.session.post("https://auth2.swm.digital/connect/token", data=payload)
|
||||
if r.status_code != 200:
|
||||
raise ConnectionError(f"Failed to fetch device token: {r.text}")
|
||||
tokens = r.json()
|
||||
|
||||
tokens["device_id"] = device_id
|
||||
cache.set(tokens, expiration=int(tokens["expires_in"]) - 60)
|
||||
|
||||
self.device_id = tokens.get("device_id") or str(uuid4())
|
||||
self.session.headers.update({"authorization": f"Bearer {tokens['access_token']}"})
|
||||
|
||||
def search(self) -> Generator[SearchResult, None, None]:
|
||||
params = {
|
||||
"searchTerm": self.title,
|
||||
"market-id": self.market_id,
|
||||
"api-version": "4.4",
|
||||
"platform-id": self.config["PLATFORM_ID"],
|
||||
"platform-version": self.config["PLATFORM_VERSION"],
|
||||
}
|
||||
|
||||
r = self.session.get("https://searchapi.swm.digital/3.0/api/Search", params=params)
|
||||
r.raise_for_status()
|
||||
|
||||
results = r.json()
|
||||
if isinstance(results, list):
|
||||
for result in results:
|
||||
title = result.get("image", {}).get("altTag")
|
||||
slug = result.get("contentLink", {}).get("url")
|
||||
|
||||
yield SearchResult(
|
||||
id_=f"https://7plus.com.au{slug}",
|
||||
title=title,
|
||||
url=f"https://7plus.com.au{slug}",
|
||||
)
|
||||
|
||||
def get_titles(self) -> Movies | Series:
|
||||
if match := re.match(r"https:\/\/7plus\.com\.au\/([^?\/]+)(?:\?.*episode-id=([^&]+))?", self.title):
|
||||
slug, episode_id = match.groups()
|
||||
else:
|
||||
raise ValueError(f"Invalid title: {self.title}")
|
||||
|
||||
params = {
|
||||
"platform-id": self.config["PLATFORM_ID"],
|
||||
"market-id": self.market_id,
|
||||
"platform-version": self.config["PLATFORM_VERSION"],
|
||||
"api-version": self.config["API_VERSION"],
|
||||
}
|
||||
|
||||
r = self.session.get(f"https://component-cdn.swm.digital/content/{slug}", params=params)
|
||||
if r.status_code != 200:
|
||||
raise ConnectionError(f"Failed to fetch content: {r.text}")
|
||||
|
||||
content = r.json()
|
||||
|
||||
if episode_id:
|
||||
episodes = self._series(content, slug)
|
||||
episode = next((e for e in episodes if e.id == episode_id), None)
|
||||
return Series([episode])
|
||||
|
||||
elif self.movie:
|
||||
movie = self._movie(content)
|
||||
return Movies([movie])
|
||||
|
||||
else:
|
||||
episodes = self._series(content, slug)
|
||||
return Series(episodes)
|
||||
|
||||
def get_tracks(self, title: Movie | Episode) -> Tracks:
|
||||
params = {
|
||||
"appId": "7plus",
|
||||
"deviceType": self.config["PLATFORM_ID"],
|
||||
"platformType": "tv",
|
||||
"deviceId": self.device_id,
|
||||
"pc": 3181,
|
||||
"advertid": "null",
|
||||
"accountId": "5303576322001",
|
||||
"referenceId": f"ref:{title.id}",
|
||||
"deliveryId": "csai",
|
||||
"marketId": self.market_id,
|
||||
"ozid": "dc6095c7-e895-41d3-6609-79f673fc7f63",
|
||||
"sdkverification": "true",
|
||||
"cp.encryptionType": "cenc",
|
||||
"cp.drmSystems": self.drm_system,
|
||||
"cp.containerFormat": "cmaf",
|
||||
"cp.supportedCodecs": "avc",
|
||||
"cp.drmAuth": "true",
|
||||
}
|
||||
resp = self.session.get("https://videoservice.swm.digital/playback", params=params)
|
||||
if resp.status_code != 200:
|
||||
raise ConnectionError(f"Failed to fetch playback data: {resp.text}")
|
||||
data = resp.json()
|
||||
|
||||
drm = data.get("media", {}).get("stream_type_drm", False)
|
||||
if drm:
|
||||
source_manifest = next((
|
||||
x["src"] for x in data["media"]["sources"]
|
||||
if x.get("key_systems").get("com.widevine.alpha")),
|
||||
None,
|
||||
)
|
||||
title.data["license_url"] = next((
|
||||
x["key_systems"][self.key_system]["license_url"]
|
||||
for x in data["media"]["sources"]
|
||||
if x.get("key_systems").get(self.key_system)),
|
||||
None,
|
||||
)
|
||||
else:
|
||||
source_manifest = next((
|
||||
x["src"] for x in data["media"]["sources"]
|
||||
if x.get("type") == "application/dash+xml"),
|
||||
None,
|
||||
)
|
||||
if not source_manifest:
|
||||
raise ValueError("Failed to get manifest")
|
||||
|
||||
title.data["cue_points"] = data.get("media", {}).get("cue_points")
|
||||
|
||||
tracks = DASH.from_url(source_manifest, self.session).to_tracks(title.language)
|
||||
|
||||
for track in tracks.audio:
|
||||
role = track.data["dash"]["representation"].find("Role")
|
||||
if role is not None and role.get("value") in ["description", "alternative", "alternate"]:
|
||||
track.descriptive = True
|
||||
|
||||
return tracks
|
||||
|
||||
def get_chapters(self, title: Movie | Episode) -> Chapters:
|
||||
if not (cue_points := title.data.get("cue_points")):
|
||||
return Chapters()
|
||||
|
||||
cue_points = sorted(cue_points, key=lambda x: x["time"])
|
||||
|
||||
chapters = []
|
||||
for cue_point in cue_points:
|
||||
if cue_point.get("time", 0) > 0:
|
||||
name = "End Credits" if cue_point.get("name", "").lower() == "credits" else None
|
||||
chapters.append(Chapter(name=name, timestamp=cue_point["time"] * 1000))
|
||||
|
||||
return Chapters(chapters)
|
||||
|
||||
def get_widevine_service_certificate(self, **_: Any) -> str:
|
||||
return None
|
||||
|
||||
def get_widevine_license(self, *, challenge: bytes, title: Episode | Movie, track: Any) -> Optional[Union[bytes, str]]:
|
||||
if license_url := title.data.get("license_url"):
|
||||
r = self.session.post(url=license_url, data=challenge)
|
||||
if r.status_code != 200:
|
||||
raise ConnectionError(r.text)
|
||||
return r.content
|
||||
|
||||
return None
|
||||
|
||||
def get_playready_license(self, *, challenge: bytes, title: Episode | Movie, track: Any) -> Optional[Union[bytes, str]]:
|
||||
if license_url := title.data.get("license_url"):
|
||||
r = self.session.post(url=license_url, data=challenge)
|
||||
if r.status_code != 200:
|
||||
raise ConnectionError(r.text)
|
||||
return r.content
|
||||
|
||||
return None
|
||||
|
||||
# Service specific functions
|
||||
|
||||
def _movie(self, content: dict) -> Movie:
|
||||
title = content.get("title")
|
||||
metadata = content.get("items", [{}])[0].get("videoMetadata", {})
|
||||
if not metadata:
|
||||
raise ValueError("Failed to find metadata for this movie")
|
||||
|
||||
return Movie(
|
||||
id_=metadata.get("videoBref"),
|
||||
service=self.__class__,
|
||||
name=title,
|
||||
year=metadata.get("productionYear"),
|
||||
language="en",
|
||||
data=content,
|
||||
)
|
||||
|
||||
def _get_season_data(self, season_id: str, slug: str) -> List[Episode]:
|
||||
params = {
|
||||
"component-id": season_id,
|
||||
"platform-id": self.config.get("PLATFORM_ID"),
|
||||
"market-id": self.market_id,
|
||||
"platform-version": self.config.get("PLATFORM_VERSION"),
|
||||
"api-version": self.config.get("API_VERSION"),
|
||||
"signedUp": "True",
|
||||
}
|
||||
|
||||
try:
|
||||
r = self.session.get(f"https://component.swm.digital/component/{slug}", params=params)
|
||||
r.raise_for_status()
|
||||
comp = r.json()
|
||||
except ConnectionError as e:
|
||||
self.log.error(f"Error fetching season {season_id}: {e}")
|
||||
return []
|
||||
except Exception as e:
|
||||
self.log.error(f"An unexpected error occurred for season {season_id}: {e}")
|
||||
return []
|
||||
|
||||
episodes = []
|
||||
for episode in comp.get("items", []):
|
||||
info_panel = episode.get("infoPanelData", {})
|
||||
player_data = episode.get("playerData", {})
|
||||
card_data = episode.get("cardData", {})
|
||||
catalogue_number = episode.get("catalogueNumber", "")
|
||||
|
||||
title = info_panel.get("title")
|
||||
episode_name = card_data.get("image", {}).get("altTag")
|
||||
card_name = card_data.get("title", "").lstrip("0123456789. ").split(" - ")[-1].strip()
|
||||
|
||||
season, number, name = 0, 0, card_name
|
||||
if match := re.search(r"(?:Season|Year)\s*(\d+)\s*E(?:pisode)?\s*(\d+)", episode_name, re.IGNORECASE):
|
||||
season = int(match.group(1))
|
||||
number = int(match.group(2))
|
||||
|
||||
if not season and not number:
|
||||
if match := re.compile(r"\w+(\d+)-(\d+)").search(catalogue_number):
|
||||
season = int(match.group(1))
|
||||
number = int(match.group(2))
|
||||
|
||||
episodes.append(
|
||||
Episode(
|
||||
id_=player_data.get("episodePlayerId"),
|
||||
service=self.__class__,
|
||||
title=title,
|
||||
year=card_data.get("productionYear"),
|
||||
season=season,
|
||||
number=number,
|
||||
name=name,
|
||||
language="en",
|
||||
data=episode,
|
||||
)
|
||||
)
|
||||
return episodes
|
||||
|
||||
def _series(self, content: dict, slug: str) -> List[Episode]:
|
||||
items = next((x for x in content.get("items", []) if x.get("type") == "shelfContainer"), {})
|
||||
episodes_shelf = next((x for x in items.get("items", []) if x.get("title") == "Episodes"), {})
|
||||
seasons_container = next((x for x in episodes_shelf.get("items", []) if x.get("title") in ("Season", "Year", "Bulletin")), {})
|
||||
|
||||
season_ids = [
|
||||
item.get("items", [{}])[0].get("id")
|
||||
for item in seasons_container.get("items", [])
|
||||
if item.get("items") and item.get("items")[0].get("id")
|
||||
]
|
||||
|
||||
if not season_ids:
|
||||
return []
|
||||
|
||||
all_episodes = []
|
||||
with ThreadPoolExecutor(max_workers=len(season_ids)) as executor:
|
||||
future_to_season = {
|
||||
executor.submit(self._get_season_data, season_id, slug): season_id for season_id in season_ids
|
||||
}
|
||||
for future in future_to_season:
|
||||
try:
|
||||
episodes_of_season = future.result()
|
||||
all_episodes.extend(episodes_of_season)
|
||||
except Exception as exc:
|
||||
season_id = future_to_season[future]
|
||||
self.log.error(f"{season_id} generated an exception: {exc}")
|
||||
|
||||
return all_episodes
|
||||
7
SEVEN/config.yaml
Normal file
7
SEVEN/config.yaml
Normal file
@ -0,0 +1,7 @@
|
||||
headers:
|
||||
user-agent: "7plus/5.25.1 (Linux;Android 8.1.0) ExoPlayerLib/2.11.7"
|
||||
x-swm-apikey: "kGcrNnuPClrkynfnKwG8IA/NhVG6ut5nPEdWF2jscvE="
|
||||
|
||||
PLATFORM_ID: "androidtv"
|
||||
PLATFORM_VERSION: "5.25.0.0"
|
||||
API_VERSION: "5.9.0.0"
|
||||
232
STV/__init__.py
Normal file
232
STV/__init__.py
Normal file
@ -0,0 +1,232 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import re
|
||||
from collections.abc import Generator
|
||||
from datetime import timedelta
|
||||
from typing import Any, Union
|
||||
from urllib.parse import urlparse
|
||||
|
||||
import click
|
||||
from click import Context
|
||||
from lxml import etree
|
||||
from unshackle.core.manifests.dash import DASH
|
||||
from unshackle.core.search_result import SearchResult
|
||||
from unshackle.core.service import Service
|
||||
from unshackle.core.titles import Episode, Movie, Movies, Series
|
||||
from unshackle.core.tracks import Chapter, Chapters, Tracks
|
||||
|
||||
|
||||
class STV(Service):
|
||||
"""
|
||||
Service code for STV Player streaming service (https://player.stv.tv/).
|
||||
|
||||
\b
|
||||
Version: 1.0.1
|
||||
Author: stabbedbybrick
|
||||
Authorization: None
|
||||
Robustness:
|
||||
L3: 1080p
|
||||
|
||||
\b
|
||||
Tips:
|
||||
- Use complete title URL as input:
|
||||
SERIES: https://player.stv.tv/summary/rebus
|
||||
EPISODE: https://player.stv.tv/episode/2ro8/rebus
|
||||
- Use the episode URL for movies:
|
||||
MOVIE: https://player.stv.tv/episode/4lw7/wonder-woman-1984
|
||||
|
||||
"""
|
||||
|
||||
GEOFENCE = ("gb",)
|
||||
ALIASES = ("stvplayer",)
|
||||
|
||||
@staticmethod
|
||||
@click.command(name="STV", short_help="https://player.stv.tv/", help=__doc__)
|
||||
@click.argument("title", type=str)
|
||||
@click.pass_context
|
||||
def cli(ctx: Context, **kwargs: Any) -> STV:
|
||||
return STV(ctx, **kwargs)
|
||||
|
||||
def __init__(self, ctx: Context, title: str):
|
||||
self.title = title
|
||||
super().__init__(ctx)
|
||||
|
||||
self.session.headers.update({"user-agent": "okhttp/4.11.0"})
|
||||
self.base = self.config["endpoints"]["base"]
|
||||
|
||||
def search(self) -> Generator[SearchResult, None, None]:
|
||||
data = {
|
||||
"engine_key": "S1jgssBHdk8ZtMWngK_y",
|
||||
"q": self.title,
|
||||
}
|
||||
r = self.session.post(self.config["endpoints"]["search"], data=data)
|
||||
r.raise_for_status()
|
||||
results = r.json()["records"]["page"]
|
||||
|
||||
for result in results:
|
||||
label = result.get("category")
|
||||
if label and isinstance(label, list):
|
||||
label = result["category"][0]
|
||||
|
||||
yield SearchResult(
|
||||
id_=result.get("url"),
|
||||
title=result.get("title"),
|
||||
description=result.get("body"),
|
||||
label=label,
|
||||
url=result.get("url"),
|
||||
)
|
||||
|
||||
def get_titles(self) -> Union[Movies, Series]:
|
||||
kind, slug = self.parse_title(self.title)
|
||||
self.session.headers.update({"stv-drm": "true"})
|
||||
|
||||
if kind == "episode":
|
||||
r = self.session.get(self.base + f"episodes/{slug}")
|
||||
r.raise_for_status()
|
||||
episode = r.json()["results"]
|
||||
|
||||
if episode.get("genre").lower() == "movie":
|
||||
return Movies(
|
||||
[
|
||||
Movie(
|
||||
id_=episode["video"].get("id"),
|
||||
service=self.__class__,
|
||||
year=None,
|
||||
name=episode.get("title"),
|
||||
language="en",
|
||||
data=episode,
|
||||
)
|
||||
]
|
||||
)
|
||||
|
||||
episodes = [
|
||||
Episode(
|
||||
id_=episode["video"].get("id"),
|
||||
service=self.__class__,
|
||||
title=episode["programme"].get("name"),
|
||||
season=int(episode["playerSeries"]["name"].split(" ")[1])
|
||||
if episode.get("playerSeries") and re.match(r"Series \d+", episode["playerSeries"]["name"])
|
||||
else 0,
|
||||
number=int(episode.get("number", 0)),
|
||||
name=episode.get("title", "").lstrip("0123456789. ").lstrip(),
|
||||
language="en",
|
||||
data=episode,
|
||||
)
|
||||
]
|
||||
|
||||
elif kind == "summary":
|
||||
r = self.session.get(self.base + f"programmes/{slug}")
|
||||
r.raise_for_status()
|
||||
data = r.json()
|
||||
|
||||
series = [series.get("guid") for series in data["results"]["series"]]
|
||||
seasons = [self.session.get(self.base + f"episodes?series.guid={i}").json() for i in series]
|
||||
|
||||
episodes = [
|
||||
Episode(
|
||||
id_=episode["video"].get("id"),
|
||||
service=self.__class__,
|
||||
title=data["results"].get("name"),
|
||||
season=int(episode["playerSeries"]["name"].split(" ")[1])
|
||||
if episode.get("playerSeries")
|
||||
and re.match(r"Series \d+", episode["playerSeries"]["name"])
|
||||
else 0,
|
||||
number=int(episode.get("number", 0)),
|
||||
name=episode.get("title", "").lstrip("0123456789. ").lstrip(),
|
||||
language="en",
|
||||
data=episode,
|
||||
)
|
||||
for season in seasons
|
||||
for episode in season["results"]
|
||||
]
|
||||
|
||||
self.session.headers.pop("stv-drm")
|
||||
return Series(episodes)
|
||||
|
||||
def get_tracks(self, title: Union[Movie, Episode]) -> Tracks:
|
||||
self.drm = title.data["programme"].get("drmEnabled")
|
||||
headers = self.config["headers"]["drm"] if self.drm else self.config["headers"]["clear"]
|
||||
accounts = self.config["accounts"]["drm"] if self.drm else self.config["accounts"]["clear"]
|
||||
|
||||
r = self.session.get(
|
||||
self.config["endpoints"]["playback"].format(accounts=accounts, id=title.id),
|
||||
headers=headers,
|
||||
)
|
||||
if not r.ok:
|
||||
raise ConnectionError(r.text)
|
||||
data = r.json()
|
||||
|
||||
source_manifest = next(
|
||||
(source["src"] for source in data["sources"] if source.get("type") == "application/dash+xml"),
|
||||
None,
|
||||
)
|
||||
|
||||
self.license = None
|
||||
if self.drm:
|
||||
key_systems = next((
|
||||
source
|
||||
for source in data["sources"]
|
||||
if source.get("type") == "application/dash+xml"
|
||||
and source.get("key_systems").get("com.widevine.alpha")),
|
||||
None,
|
||||
)
|
||||
|
||||
self.license = key_systems["key_systems"]["com.widevine.alpha"]["license_url"] if key_systems else None
|
||||
|
||||
manifest = self.trim_duration(source_manifest)
|
||||
tracks = DASH.from_text(manifest, source_manifest).to_tracks(title.language)
|
||||
|
||||
for track in tracks.audio:
|
||||
role = track.data["dash"]["representation"].find("Role")
|
||||
if role is not None and role.get("value") in ["description", "alternative", "alternate"]:
|
||||
track.descriptive = True
|
||||
|
||||
return tracks
|
||||
|
||||
def get_chapters(self, title: Union[Movie, Episode]) -> Chapters:
|
||||
cue_points = title.data.get("_cuePoints")
|
||||
if not cue_points:
|
||||
return Chapters()
|
||||
|
||||
return Chapters([Chapter(timestamp=int(cue)) for cue in cue_points])
|
||||
|
||||
def get_widevine_service_certificate(self, **_: Any) -> str:
|
||||
return None
|
||||
|
||||
def get_widevine_license(self, challenge: bytes, **_: Any) -> bytes:
|
||||
if not self.license:
|
||||
return None
|
||||
|
||||
r = self.session.post(url=self.license, data=challenge)
|
||||
if r.status_code != 200:
|
||||
raise ConnectionError(r.text)
|
||||
return r.content
|
||||
|
||||
# Service specific functions
|
||||
|
||||
@staticmethod
|
||||
def parse_title(title: str) -> tuple[str, str]:
|
||||
parsed_url = urlparse(title).path.split("/")
|
||||
kind, slug = parsed_url[1], parsed_url[2]
|
||||
if kind not in ["episode", "summary"]:
|
||||
raise ValueError("Failed to parse title - is the URL correct?")
|
||||
|
||||
return kind, slug
|
||||
|
||||
@staticmethod
|
||||
def trim_duration(source_manifest: str) -> str:
|
||||
"""
|
||||
The last segment on all tracks return a 404 for some reason, causing a failed download.
|
||||
So we trim the duration by exactly one segment to account for that.
|
||||
|
||||
TODO: Calculate the segment duration instead of assuming length.
|
||||
"""
|
||||
manifest = DASH.from_url(source_manifest).manifest
|
||||
period_duration = manifest.get("mediaPresentationDuration")
|
||||
period_duration = DASH.pt_to_sec(period_duration)
|
||||
|
||||
hours, minutes, seconds = str(timedelta(seconds=period_duration - 6)).split(":")
|
||||
new_duration = f"PT{hours}H{minutes}M{seconds}S"
|
||||
manifest.set("mediaPresentationDuration", new_duration)
|
||||
|
||||
return etree.tostring(manifest, encoding="unicode")
|
||||
20
STV/config.yaml
Normal file
20
STV/config.yaml
Normal file
@ -0,0 +1,20 @@
|
||||
accounts:
|
||||
drm: "6204867266001"
|
||||
clear: "1486976045"
|
||||
|
||||
headers:
|
||||
drm:
|
||||
BCOV-POLICY: BCpkADawqM32Q7lZg8ME0ydIOV8bD_9Ke2YD5wvY_T2Rq2TBtz6QQfpHtSAJTiDL-MiYAxyJVvScaKt82d1Q6b_wP6MG-O8SGQjRnwczfdsTesTZy-uj23uKv1vjHijtTeQC0DONN53zS38v
|
||||
User-Agent: Dalvik/2.1.0 (Linux; U; Android 12; SM-A226B Build/SP1A.210812.016)
|
||||
Host: edge.api.brightcove.com
|
||||
Connection: keep-alive
|
||||
clear:
|
||||
BCOV-POLICY: BCpkADawqM2Dpx-ht5hP1rQqWFTcOTqTT5x5bSUlY8FaOO1_P8LcKxmL2wrFzTvRb3HzO2YTIzVDuoeLfqvFvp1dWRPnxKT8zt9ErkENYteaU9T6lz7OogjL8W8
|
||||
User-Agent: Dalvik/2.1.0 (Linux; U; Android 12; SM-A226B Build/SP1A.210812.016)
|
||||
Host: edge.api.brightcove.com
|
||||
Connection: keep-alive
|
||||
|
||||
endpoints:
|
||||
base: https://player.api.stv.tv/v1/
|
||||
playback: https://edge.api.brightcove.com/playback/v1/accounts/{accounts}/videos/{id}
|
||||
search: https://api.swiftype.com/api/v1/public/engines/search.json
|
||||
565
TEN/__init__.py
Normal file
565
TEN/__init__.py
Normal file
@ -0,0 +1,565 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import base64
|
||||
import concurrent.futures
|
||||
import hashlib
|
||||
import hmac
|
||||
import json
|
||||
import re
|
||||
import sys
|
||||
import time
|
||||
import uuid
|
||||
from collections.abc import Generator
|
||||
from concurrent.futures import ThreadPoolExecutor
|
||||
from datetime import datetime, timezone
|
||||
from http.cookiejar import MozillaCookieJar
|
||||
from typing import Any, Optional, Union
|
||||
|
||||
import click
|
||||
import m3u8
|
||||
from click import Context
|
||||
from langcodes import Language
|
||||
from requests import Request
|
||||
from unshackle.core.config import config
|
||||
from unshackle.core.credential import Credential
|
||||
from unshackle.core.downloaders import requests
|
||||
from unshackle.core.manifests import HLS
|
||||
from unshackle.core.search_result import SearchResult
|
||||
from unshackle.core.service import Service
|
||||
from unshackle.core.titles import Episode, Movie, Movies, Series
|
||||
from unshackle.core.tracks import Chapter, Chapters, Subtitle, Tracks, Video
|
||||
|
||||
|
||||
class TEN(Service):
|
||||
"""
|
||||
\b
|
||||
Service code for 10Play streaming service (https://10.com.au/).
|
||||
|
||||
\b
|
||||
Version: 1.0.2
|
||||
Author: stabbedbybrick
|
||||
Authorization: credentials
|
||||
Geofence: AU (API and downloads)
|
||||
Robustness:
|
||||
AES: 1080p, AAC2.0
|
||||
|
||||
\b
|
||||
Tips:
|
||||
- Input should be complete URL:
|
||||
SHOW: https://10.com.au/australian-survivor
|
||||
EPISODE: https://10.com.au/australian-survivor/episodes/season-11-australia-v-the-world/episode-9/tpv250831fxatm
|
||||
MOVIE: https://10.com.au/a-quiet-place
|
||||
- Non-standard programmes (e.g. game shows/sports) have very inconsistent episode number labels. It's recommended to use episode URLs for those.
|
||||
|
||||
\b
|
||||
Notes:
|
||||
- 10Play uses transport streams for HLS, meaning the video and audio are a part of the same stream.
|
||||
As a result, only videos are listed as tracks. But the audio will be included as well.
|
||||
- Since 1080p streams require some manipulation of the manifest, n_m3u8dl_re downloader is required.
|
||||
|
||||
"""
|
||||
|
||||
GEOFENCE = ("au",)
|
||||
ALIASES = (
|
||||
"10play",
|
||||
"tenplay",
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
@click.command(name="TEN", short_help="https://10.com.au/", help=__doc__)
|
||||
@click.argument("title", type=str)
|
||||
@click.pass_context
|
||||
def cli(ctx: Context, **kwargs: Any) -> TEN:
|
||||
return TEN(ctx, **kwargs)
|
||||
|
||||
def __init__(self, ctx: Context, title: str):
|
||||
self.title = title
|
||||
super().__init__(ctx)
|
||||
|
||||
if config.downloader != "n_m3u8dl_re":
|
||||
self.log.error(" - Error: n_m3u8dl_re downloader is required for this service.")
|
||||
sys.exit(1)
|
||||
|
||||
def search(self) -> Generator[SearchResult, None, None]:
|
||||
query = self.endpoints["searchApiEndpoint"] + self.title
|
||||
|
||||
results = self._request("GET", query)
|
||||
|
||||
for result in results:
|
||||
clean_title = self._sanitize(result.get("title"))
|
||||
yield SearchResult(
|
||||
id_=f"https://10.com.au/{clean_title}",
|
||||
title=result.get("title"),
|
||||
description=result.get("abstractShowDescription"),
|
||||
label=result.get("subtitle", "").split("|")[-1].strip(),
|
||||
url=f"https://10.com.au/{clean_title}",
|
||||
)
|
||||
|
||||
def authenticate(
|
||||
self,
|
||||
cookies: Optional[MozillaCookieJar] = None,
|
||||
credential: Optional[Credential] = None,
|
||||
) -> None:
|
||||
super().authenticate(cookies, credential)
|
||||
if not credential:
|
||||
raise EnvironmentError("Service requires Credentials for Authentication.")
|
||||
|
||||
self.session.headers.update(self.config["headers"])
|
||||
self.endpoints = self._request(
|
||||
"GET", self.config["endpoints"]["config"], params={"SystemName": "tvos"}
|
||||
)
|
||||
|
||||
cache = self.cache.get(f"tokens_{credential.sha1}")
|
||||
|
||||
if cache and not cache.expired:
|
||||
self.log.info(" + Using cached Tokens...")
|
||||
tokens = cache.data
|
||||
elif cache and cache.expired:
|
||||
self.log.info(" + Refreshing expired Tokens...")
|
||||
payload = {
|
||||
"alternativeToken": cache.data["alternativeToken"],
|
||||
"refreshToken": cache.data["refreshToken"],
|
||||
}
|
||||
tokens = self._request(
|
||||
"POST", self.endpoints["authConfig"]["refreshToken"], json=payload
|
||||
)
|
||||
cache.set(tokens, expiration=tokens["expiresIn"])
|
||||
else:
|
||||
self.log.info(" + Logging in...")
|
||||
headers = {
|
||||
"accept": "application/json, text/plain, */*",
|
||||
"content-type": "application/json",
|
||||
"user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/139.0.0.0 Safari/537.36",
|
||||
"origin": "https://10.com.au",
|
||||
"referer": "https://10.com.au/",
|
||||
}
|
||||
login = self._request(
|
||||
"POST",
|
||||
self.config["endpoints"]["auth"],
|
||||
headers=headers,
|
||||
json={"email": credential.username, "password": credential.password},
|
||||
)
|
||||
access_token = login.get("jwt", {}).get("accessToken")
|
||||
if not access_token:
|
||||
raise ValueError(
|
||||
"Failed to authenticate with credentials: " + login.text
|
||||
)
|
||||
|
||||
identifier = str(uuid.uuid4())
|
||||
|
||||
payload = {
|
||||
"deviceIdentifier": identifier,
|
||||
"machine": "Hisense",
|
||||
"system": "vidaa",
|
||||
"systemVersion": "U6",
|
||||
"platform": "vidaa",
|
||||
"appVersion": "v1",
|
||||
"ipAddress": "string",
|
||||
}
|
||||
device = self._request(
|
||||
"POST", self.endpoints["authConfig"]["generateCode"], json=payload
|
||||
)
|
||||
|
||||
code = device.get("code")
|
||||
expiry = device.get("expiry")
|
||||
if not code or not expiry:
|
||||
raise ValueError("Failed to generate device code: " + device.text)
|
||||
|
||||
headers = {
|
||||
"accept": "application/json, text/plain, */*",
|
||||
"authorization": f"Bearer {access_token}",
|
||||
"content-type": "application/json",
|
||||
"user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/139.0.0.0 Safari/537.36",
|
||||
"origin": "https://10.com.au",
|
||||
"referer": "https://10.com.au/activate",
|
||||
}
|
||||
activate = self._request(
|
||||
"POST",
|
||||
self.endpoints["activateApiEndpoint"],
|
||||
headers=headers,
|
||||
json={"code": code},
|
||||
)
|
||||
if not activate:
|
||||
raise ValueError("Failed to activate device")
|
||||
|
||||
payload = {
|
||||
"code": code,
|
||||
"deviceIdentifier": identifier,
|
||||
"expiry": expiry,
|
||||
}
|
||||
auth = self._request(
|
||||
"POST",
|
||||
self.endpoints["authConfig"]["validateCode"],
|
||||
json={
|
||||
"code": code,
|
||||
"deviceIdentifier": identifier,
|
||||
"expiry": expiry,
|
||||
},
|
||||
)
|
||||
tokens = auth.get("jwt")
|
||||
tokens["identifier"] = identifier
|
||||
|
||||
self.log.info(" + User successfully logged in, TV device activated")
|
||||
|
||||
cache.set(tokens, expiration=tokens.get("expiresIn"))
|
||||
|
||||
self.access_token = tokens.get("alternativeToken")
|
||||
self.session.headers.update({"authorization": f"Bearer {self.access_token}"})
|
||||
|
||||
def get_titles(self) -> Union[Movies, Series]:
|
||||
url_pattern = re.compile(
|
||||
r"^https://10\.com\.au/(?:[a-z0-9-]+)"
|
||||
r"(?:/episodes/(?:season-)?(?P<season>[a-z0-9-]+)/(?:episode-)?(?P<episode>[a-z0-9-]+)/(?P<id>[a-z0-9]+))?$"
|
||||
)
|
||||
|
||||
match = url_pattern.match(self.title)
|
||||
if not match:
|
||||
raise ValueError(f"Could not parse ID from title: {self.title}")
|
||||
|
||||
matches = match.groupdict()
|
||||
|
||||
if not matches.get("id"):
|
||||
show_id = self._get_html(self.title)
|
||||
content = self._shows(show_id)
|
||||
|
||||
if "movie" in content.get("subtitle", "").lower():
|
||||
movies = self._movie(content)
|
||||
return Movies(movies)
|
||||
|
||||
else:
|
||||
episodes = self._series(content)
|
||||
return Series(episodes)
|
||||
|
||||
else:
|
||||
episodes = self._episode(matches.get("id"))
|
||||
return Series(episodes)
|
||||
|
||||
def get_tracks(self, title: Union[Movie, Episode]) -> Tracks:
|
||||
playback_url = title.data.get("playbackApiEndpoint")
|
||||
if not playback_url:
|
||||
raise ValueError("Could not find playback URL for this title")
|
||||
|
||||
params = {
|
||||
"device": "Tv",
|
||||
"platform": "vidaa",
|
||||
"appVersion": "v1",
|
||||
}
|
||||
|
||||
r = self.session.get(playback_url, params=params)
|
||||
if not r.ok:
|
||||
raise ValueError("Failed to get playback data: " + r.text)
|
||||
|
||||
|
||||
dai_auth = r.headers.get("X-DAI-AUTH")
|
||||
video_id = r.headers.get("x-dai-video-id")
|
||||
if dai_auth is not None:
|
||||
payload = {"auth-token": dai_auth}
|
||||
|
||||
playback_data = r.json()
|
||||
|
||||
video_id = playback_data.get("dai", {}).get("videoId")
|
||||
source_id = playback_data.get("dai", {}).get("contentSourceId", "2690006")
|
||||
if not video_id or not source_id:
|
||||
raise ValueError("Failed to get video ID: " + r.text)
|
||||
|
||||
dai_stream = f"https://dai.google.com/ondemand/v1/hls/content/{source_id}/vid/{video_id}/stream"
|
||||
|
||||
stream_data = self._request("POST", dai_stream, data=payload)
|
||||
|
||||
title.data["chapters"] = stream_data.get("time_events_url")
|
||||
# program_language = Language.find(stream_data["customFields"].get("program_language", "en"))
|
||||
|
||||
manifest_url = stream_data.get("stream_manifest")
|
||||
tracks = HLS.from_url(manifest_url, self.session).to_tracks(language="en")
|
||||
|
||||
tracks = self._add_tracks(tracks)
|
||||
|
||||
for track in tracks:
|
||||
track.OnSegmentFilter = lambda x: re.search(r"redirector.googlevideo.com", x.uri)
|
||||
track.downloader_args = {"--ad-keyword": "redirector.googlevideo.com"}
|
||||
|
||||
if isinstance(track, Subtitle):
|
||||
track.downloader = requests
|
||||
|
||||
# if caption := stream_data.get("subtitles", [])[0].get("webvtt"):
|
||||
# tracks.add(
|
||||
# Subtitle(
|
||||
# id_=hashlib.md5(caption.encode()).hexdigest()[0:6],
|
||||
# url=caption,
|
||||
# codec=Subtitle.Codec.from_mime(caption[-3:]),
|
||||
# language=stream_data.get("subtitles", [])[0].get("language", "en"),
|
||||
# )
|
||||
# )
|
||||
|
||||
return tracks
|
||||
|
||||
def get_chapters(self, title: Union[Movie, Episode]) -> Chapters:
|
||||
if not title.data.get("chapters"):
|
||||
return Chapters()
|
||||
|
||||
events = self._request("GET", title.data["chapters"])
|
||||
cue_points = events.get("cuepoints")
|
||||
if not cue_points:
|
||||
return Chapters()
|
||||
|
||||
chapters = []
|
||||
for cue in cue_points:
|
||||
chapters.append(Chapter(timestamp=float(cue["start_float"])))
|
||||
chapters.append(Chapter(timestamp=float(cue["end_float"])))
|
||||
|
||||
return Chapters(chapters)
|
||||
|
||||
# Service specific
|
||||
|
||||
def _head_request(self, url: str) -> int:
|
||||
try:
|
||||
return self.session.head(url, timeout=10).status_code
|
||||
except Exception:
|
||||
return 0
|
||||
|
||||
def _check_and_add_track(
|
||||
self, best_track: Video, quality_info: dict, source_bitrate: int
|
||||
) -> Video | None:
|
||||
playlist_uri = best_track.data["hls"]["playlist"].uri
|
||||
playlist_text = self.session.get(playlist_uri).text
|
||||
|
||||
string_to_replace = f"-{source_bitrate}"
|
||||
replacement_string = f"-{quality_info['bitrate']}"
|
||||
|
||||
lines = []
|
||||
for line in playlist_text.splitlines():
|
||||
if "redirector.googlevideo.com" in line:
|
||||
continue
|
||||
|
||||
if string_to_replace in line:
|
||||
line = line.replace(string_to_replace, replacement_string)
|
||||
|
||||
lines.append(line)
|
||||
|
||||
modified_playlist_text = "\n".join(lines)
|
||||
playlist_obj = m3u8.loads(modified_playlist_text)
|
||||
|
||||
if not playlist_obj.segments:
|
||||
return None
|
||||
|
||||
first_segment = playlist_obj.segments[0].uri
|
||||
if self._head_request(first_segment) == 200:
|
||||
playlist_file = config.directories.cache / "TEN" / f"playlist_{quality_info['quality']}.m3u8"
|
||||
playlist_obj.dump(playlist_file)
|
||||
|
||||
video = Video(
|
||||
id_=f"{best_track.id}-{quality_info['quality']}",
|
||||
url=best_track.url,
|
||||
height=quality_info["height"],
|
||||
width=quality_info["width"],
|
||||
bitrate=quality_info["bitrate"],
|
||||
language=best_track.language,
|
||||
codec=best_track.codec,
|
||||
range_=best_track.range,
|
||||
fps=best_track.fps,
|
||||
descriptor=best_track.descriptor,
|
||||
data=best_track.data.copy(),
|
||||
from_file=playlist_file,
|
||||
)
|
||||
return video
|
||||
return None
|
||||
|
||||
def _add_tracks(self, tracks: Tracks) -> Tracks:
|
||||
if not tracks.videos:
|
||||
return tracks
|
||||
|
||||
best_track = max(tracks.videos, key=lambda t: t.height or 0)
|
||||
|
||||
source_bitrate = {
|
||||
1080: "5000000",
|
||||
720: "3000000",
|
||||
540: "1500000",
|
||||
360: "750000",
|
||||
}.get(best_track.height)
|
||||
|
||||
all_qualities = [
|
||||
{"quality": "540p", "bitrate": 1500000, "height": 540, "width": 960},
|
||||
{"quality": "720p", "bitrate": 3000000, "height": 720, "width": 1280},
|
||||
{"quality": "1080p", "bitrate": 5000000, "height": 1080, "width": 1920},
|
||||
]
|
||||
|
||||
qualities_to_check = [
|
||||
q for q in all_qualities if q["height"] > best_track.height
|
||||
]
|
||||
|
||||
if not qualities_to_check:
|
||||
return tracks
|
||||
|
||||
with ThreadPoolExecutor(max_workers=len(qualities_to_check)) as executor:
|
||||
future_to_track = {
|
||||
executor.submit(self._check_and_add_track, best_track, quality, source_bitrate): quality
|
||||
for quality in qualities_to_check
|
||||
}
|
||||
|
||||
for future in concurrent.futures.as_completed(future_to_track):
|
||||
new_track = future.result()
|
||||
if new_track:
|
||||
tracks.add(new_track)
|
||||
|
||||
return tracks
|
||||
|
||||
|
||||
def _shows(self, show_id: str) -> dict:
|
||||
show = self._request("GET", f'{self.endpoints["showsApiEndpoint"]}/{show_id}')
|
||||
|
||||
return show[0] if isinstance(show, list) else show
|
||||
|
||||
def _fetch_episode(self, url: str) -> list:
|
||||
return self._request("GET", url)
|
||||
|
||||
def _series(self, content: dict) -> Episode:
|
||||
season_list = content.get("seasons")
|
||||
if not season_list:
|
||||
raise ValueError("Could not find a season list for this title")
|
||||
|
||||
seasons = [
|
||||
season.get("menuItems", [])[0].get("apiEndpoint")
|
||||
for season in season_list
|
||||
if season.get("menuItems", [])
|
||||
and season.get("menuItems", [])[0].get("menuTitle", "").lower()
|
||||
== "episodes"
|
||||
]
|
||||
|
||||
if not seasons:
|
||||
raise ValueError("Could not find a season list for this title")
|
||||
|
||||
with ThreadPoolExecutor(max_workers=10) as executor:
|
||||
results = list(executor.map(self._fetch_episode, seasons))
|
||||
|
||||
titles = []
|
||||
for result in results:
|
||||
for episode in result:
|
||||
ep_number = episode.get("episode")
|
||||
sea_number = episode.get("season")
|
||||
titles.append(
|
||||
Episode(
|
||||
id_=episode.get("id"),
|
||||
service=self.__class__,
|
||||
name=episode.get("vodTitle", "").split(" - ")[-1],
|
||||
season=int(sea_number) if sea_number and sea_number.isdigit() else 0,
|
||||
number=int(ep_number) if ep_number and ep_number.isdigit() else 0,
|
||||
title=episode.get("tvShow"),
|
||||
data=episode,
|
||||
)
|
||||
)
|
||||
|
||||
return titles
|
||||
|
||||
def _movie(self, data: dict) -> Movie:
|
||||
endpoint = next(
|
||||
(
|
||||
season.get("menuItems", [])[0].get("apiEndpoint")
|
||||
for season in data.get("seasons", [])
|
||||
if season.get("menuItems", [])
|
||||
),
|
||||
None,
|
||||
)
|
||||
if not endpoint:
|
||||
raise ValueError("Could not find an endpoint for this title")
|
||||
|
||||
movie = self._request("GET", endpoint)[0]
|
||||
|
||||
return [
|
||||
Movie(
|
||||
id_=movie.get("id"),
|
||||
service=self.__class__,
|
||||
name=movie.get("title"),
|
||||
year=movie.get("season"),
|
||||
data=movie,
|
||||
)
|
||||
]
|
||||
|
||||
def _episode(self, video_id: str) -> Episode:
|
||||
data = self._request("GET", f"{self.endpoints['videosApiEndpoint']}/{video_id}")
|
||||
|
||||
ep_number = data.get("episode")
|
||||
sea_number = data.get("season")
|
||||
return [
|
||||
Episode(
|
||||
id_=data.get("id"),
|
||||
service=self.__class__,
|
||||
name=data.get("vodTitle", "").split(" - ")[-1],
|
||||
season=int(sea_number) if sea_number and sea_number.isdigit() else 0,
|
||||
number=int(ep_number) if ep_number and ep_number.isdigit() else 0,
|
||||
title=data.get("tvShow"),
|
||||
data=data,
|
||||
)
|
||||
]
|
||||
|
||||
def _get_html(self, url: str) -> Optional[str]:
|
||||
page = self.session.get(url).text
|
||||
pattern = re.compile(r"const showPageData = ({.*?});", re.DOTALL)
|
||||
|
||||
match = pattern.search(page)
|
||||
if not match:
|
||||
raise ValueError(
|
||||
" - Failed to parse HTML. Page Data not found in the source code."
|
||||
)
|
||||
|
||||
page_data = match.group(1)
|
||||
|
||||
try:
|
||||
data = json.loads(page_data)
|
||||
except json.JSONDecodeError as e:
|
||||
raise json.JSONDecodeError(f"Failed to parse JSON: {e}")
|
||||
|
||||
show_id = data.get("video", {}).get("showUrlCode")
|
||||
if not show_id:
|
||||
raise ValueError(" - showUrlCode not found in the source code.")
|
||||
|
||||
return show_id
|
||||
|
||||
def _signature_header(self, url: str) -> str:
|
||||
timestamp = int(time.time())
|
||||
message = f"{timestamp}:{url}".encode("utf-8")
|
||||
api_key = bytes.fromhex(self.config["api_key"])
|
||||
signature = hmac.new(api_key, message, hashlib.sha256).hexdigest()
|
||||
return f"{timestamp}_{signature}"
|
||||
|
||||
def _auth_header(self) -> str:
|
||||
now_utc = datetime.now(timezone.utc)
|
||||
timestamp_str = now_utc.strftime("%Y%m%d%H%M%S")
|
||||
encoded_bytes = base64.b64encode(timestamp_str.encode("utf-8"))
|
||||
return encoded_bytes.decode("ascii")
|
||||
|
||||
def _request(self, method: str, url: str, **kwargs: Any) -> Any[dict | str]:
|
||||
if method == "GET":
|
||||
self.session.headers.update(
|
||||
{
|
||||
"X-N10-SIG": self._signature_header(url),
|
||||
"tp-acceptfeature": "v1/fw;v1/drm;v2/live",
|
||||
"tp-platform": "UAP",
|
||||
}
|
||||
)
|
||||
elif method == "POST":
|
||||
self.session.headers.update({"X-Network-Ten-Auth": self._auth_header()})
|
||||
|
||||
prep = self.session.prepare_request(Request(method, url, **kwargs))
|
||||
|
||||
response = self.session.send(prep)
|
||||
if response.status_code not in (200, 201):
|
||||
raise ConnectionError(f"{response.text}")
|
||||
|
||||
try:
|
||||
return json.loads(response.content)
|
||||
|
||||
except json.JSONDecodeError:
|
||||
return True if "true" in response.text else False
|
||||
|
||||
@staticmethod
|
||||
def _sanitize(title: str) -> str:
|
||||
title = title.lower()
|
||||
title = title.replace("&", "and")
|
||||
title = re.sub(r"[:;/()]", "", title)
|
||||
title = re.sub(r"[ ]", "-", title)
|
||||
title = re.sub(r"[\\*!?¿,'\"<>|$#`’]", "", title)
|
||||
title = re.sub(rf"[{'.'}]{{2,}}", ".", title)
|
||||
title = re.sub(rf"[{'_'}]{{2,}}", "_", title)
|
||||
title = re.sub(rf"[{'-'}]{{2,}}", "-", title)
|
||||
title = re.sub(rf"[{' '}]{{2,}}", " ", title)
|
||||
return title
|
||||
9
TEN/config.yaml
Normal file
9
TEN/config.yaml
Normal file
@ -0,0 +1,9 @@
|
||||
headers:
|
||||
User-Agent: 10play/7.4.0.500325 Android UAP
|
||||
|
||||
endpoints:
|
||||
config: https://10.com.au/api/v1/config
|
||||
auth: https://10.com.au/api/user/auth
|
||||
query: https://vod.ten.com.au/api/videos/bcquery # androidapps-v2
|
||||
|
||||
api_key: "b918ff793563080c5821c89ee6c415c363cb36d369db1020369ac4b405a0211d"
|
||||
305
TUBI/__init__.py
Normal file
305
TUBI/__init__.py
Normal file
@ -0,0 +1,305 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import hashlib
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import uuid
|
||||
from collections.abc import Generator
|
||||
from http.cookiejar import MozillaCookieJar
|
||||
from typing import Any, Optional
|
||||
|
||||
import click
|
||||
from langcodes import Language
|
||||
from pyplayready.cdm import Cdm as PlayReadyCdm
|
||||
from unshackle.core.credential import Credential
|
||||
from unshackle.core.downloaders import aria2c, requests
|
||||
from unshackle.core.manifests import DASH
|
||||
from unshackle.core.search_result import SearchResult
|
||||
from unshackle.core.service import Service
|
||||
from unshackle.core.titles import Episode, Movie, Movies, Series, Title_T, Titles_T
|
||||
from unshackle.core.tracks import Audio, Chapter, Chapters, Subtitle, Track, Tracks
|
||||
|
||||
|
||||
class TUBI(Service):
|
||||
"""
|
||||
Service code for TubiTV streaming service (https://tubitv.com/)
|
||||
|
||||
\b
|
||||
Version: 1.0.6
|
||||
Author: stabbedbybrick
|
||||
Authorization: Cookies (Optional)
|
||||
Geofence: Locked to whatever region the user is in (API only)
|
||||
Robustness:
|
||||
Widevine:
|
||||
L3: 1080p, AAC2.0
|
||||
PlayReady:
|
||||
SL2000: 1080p, AAC2.0
|
||||
Clear:
|
||||
1080p, AAC2.0
|
||||
|
||||
\b
|
||||
Tips:
|
||||
- Input can be complete title URL or just the path:
|
||||
/series/300001423/gotham
|
||||
/tv-shows/200024793/s01-e01-pilot
|
||||
/movies/589279/the-outsiders
|
||||
- Use '-v H.265' to request HEVC tracks.
|
||||
|
||||
\b
|
||||
Notes:
|
||||
- Authentication is currently not required, but cookies are used if provided.
|
||||
- If 1080p exists, it's currently only available as H.265.
|
||||
- Unshackle fails to mux properly when n_m3u8dl_re is used, so aria2c is forced as downloader.
|
||||
- Search is currently disabled.
|
||||
"""
|
||||
|
||||
TITLE_RE = r"^(?:https?://(?:www\.)?tubitv\.com?)?/(?:[a-z]{2}-[a-z]{2}/)?(?P<type>movies|series|tv-shows)/(?P<id>[a-z0-9-]+)"
|
||||
|
||||
@staticmethod
|
||||
@click.command(name="TUBI", short_help="https://tubitv.com/", help=__doc__)
|
||||
@click.argument("title", type=str)
|
||||
@click.pass_context
|
||||
def cli(ctx, **kwargs):
|
||||
return TUBI(ctx, **kwargs)
|
||||
|
||||
def __init__(self, ctx, title):
|
||||
self.title = title
|
||||
super().__init__(ctx)
|
||||
|
||||
cdm = ctx.obj.cdm
|
||||
self.drm_system = "playready" if isinstance(cdm, PlayReadyCdm) else "widevine"
|
||||
|
||||
vcodec = ctx.parent.params.get("vcodec")
|
||||
self.vcodec = "H264" if vcodec is None else "H265"
|
||||
|
||||
def authenticate(self, cookies: Optional[MozillaCookieJar] = None, credential: Optional[Credential] = None) -> None:
|
||||
super().authenticate(cookies, credential)
|
||||
self.auth_token = None
|
||||
if cookies is not None:
|
||||
self.auth_token = next((cookie.value for cookie in cookies if cookie.name == "at"), None)
|
||||
self.session.headers.update({"Authorization": f"Bearer {self.auth_token}"})
|
||||
|
||||
# Disable search for now
|
||||
# def search(self) -> Generator[SearchResult, None, None]:
|
||||
# params = {
|
||||
# "search": self.title,
|
||||
# "include_linear": "true",
|
||||
# "include_channels": "false",
|
||||
# "is_kids_mode": "false",
|
||||
# }
|
||||
|
||||
# r = self.session.get(self.config["endpoints"]["search"], params=params)
|
||||
# r.raise_for_status()
|
||||
# results = r.json()
|
||||
# from devine.core.console import console
|
||||
# console.print(results)
|
||||
# exit()
|
||||
|
||||
# for result in results:
|
||||
# label = "series" if result["type"] == "s" else "movies" if result["type"] == "v" else result["type"]
|
||||
# title = (
|
||||
# result.get("title", "")
|
||||
# .lower()
|
||||
# .replace(" ", "-")
|
||||
# .replace(":", "")
|
||||
# .replace("(", "")
|
||||
# .replace(")", "")
|
||||
# .replace(".", "")
|
||||
# )
|
||||
# yield SearchResult(
|
||||
# id_=f"https://tubitv.com/{label}/{result.get('id')}/{title}",
|
||||
# title=result.get("title"),
|
||||
# description=result.get("description"),
|
||||
# label=label,
|
||||
# url=f"https://tubitv.com/{label}/{result.get('id')}/{title}",
|
||||
# )
|
||||
|
||||
def get_titles(self) -> Titles_T:
|
||||
try:
|
||||
kind, content_id = (re.match(self.TITLE_RE, self.title).group(i) for i in ("type", "id"))
|
||||
except Exception:
|
||||
raise ValueError("Could not parse ID from title - is the URL correct?")
|
||||
|
||||
params = {
|
||||
"app_id": "tubitv",
|
||||
"platform": "web", # web, android, androidtv
|
||||
"device_id": str(uuid.uuid4()),
|
||||
"content_id": content_id,
|
||||
"limit_resolutions[]": [
|
||||
"h264_1080p",
|
||||
"h265_1080p",
|
||||
],
|
||||
"video_resources[]": [
|
||||
"dash_widevine_nonclearlead",
|
||||
"dash_playready_psshv0",
|
||||
"dash",
|
||||
],
|
||||
}
|
||||
|
||||
if kind == "tv-shows":
|
||||
content = self.session.get(self.config["endpoints"]["content"], params=params)
|
||||
content.raise_for_status()
|
||||
series_id = "0" + content.json().get("series_id")
|
||||
params.update({"content_id": int(series_id)})
|
||||
data = self.session.get(self.config["endpoints"]["content"], params=params).json()
|
||||
|
||||
return Series(
|
||||
[
|
||||
Episode(
|
||||
id_=episode["id"],
|
||||
service=self.__class__,
|
||||
title=data["title"],
|
||||
season=int(season.get("id", 0)),
|
||||
number=int(episode.get("episode_number", 0)),
|
||||
name=episode["title"].split("-")[1],
|
||||
year=data.get("year"),
|
||||
language=Language.find(episode.get("lang", "en")).to_alpha3(),
|
||||
data=episode,
|
||||
)
|
||||
for season in data["children"]
|
||||
for episode in season["children"]
|
||||
if episode["id"] == content_id
|
||||
]
|
||||
)
|
||||
|
||||
if kind == "series":
|
||||
r = self.session.get(self.config["endpoints"]["content"], params=params)
|
||||
r.raise_for_status()
|
||||
data = r.json()
|
||||
|
||||
return Series(
|
||||
[
|
||||
Episode(
|
||||
id_=episode["id"],
|
||||
service=self.__class__,
|
||||
title=data["title"],
|
||||
season=int(season.get("id", 0)),
|
||||
number=int(episode.get("episode_number", 0)),
|
||||
name=episode["title"].split("-")[1],
|
||||
year=data.get("year"),
|
||||
language=Language.find(episode.get("lang") or "en").to_alpha3(),
|
||||
data=episode,
|
||||
)
|
||||
for season in data["children"]
|
||||
for episode in season["children"]
|
||||
]
|
||||
)
|
||||
|
||||
if kind == "movies":
|
||||
r = self.session.get(self.config["endpoints"]["content"], params=params)
|
||||
r.raise_for_status()
|
||||
data = r.json()
|
||||
return Movies(
|
||||
[
|
||||
Movie(
|
||||
id_=data["id"],
|
||||
service=self.__class__,
|
||||
year=data.get("year"),
|
||||
name=data["title"],
|
||||
language=Language.find(data.get("lang", "en")).to_alpha3(),
|
||||
data=data,
|
||||
)
|
||||
]
|
||||
)
|
||||
|
||||
def get_tracks(self, title: Title_T) -> Tracks:
|
||||
if not (resources := title.data.get("video_resources")):
|
||||
self.log.error(" - Failed to obtain video resources. Check geography settings.")
|
||||
self.log.info(f"Title is available in: {title.data.get('country')}")
|
||||
sys.exit(1)
|
||||
|
||||
codecs = [x.get("codec") for x in resources]
|
||||
if not any(self.vcodec in x for x in codecs):
|
||||
raise ValueError(f"Could not find a {self.vcodec} video resource for this title")
|
||||
|
||||
resource = next((
|
||||
x for x in resources
|
||||
if self.drm_system in x.get("type", "") and self.vcodec in x.get("codec", "")
|
||||
), None) or next((
|
||||
x for x in resources
|
||||
if self.drm_system not in x.get("type", "") and
|
||||
"dash" in x.get("type", "") and
|
||||
self.vcodec in x.get("codec", "")
|
||||
), None)
|
||||
if not resource:
|
||||
raise ValueError("Could not find a video resource for this title")
|
||||
|
||||
manifest = resource.get("manifest", {}).get("url")
|
||||
if not manifest:
|
||||
raise ValueError("Could not find a manifest for this title")
|
||||
|
||||
title.data["license_url"] = resource.get("license_server", {}).get("url")
|
||||
|
||||
tracks = DASH.from_url(url=manifest, session=self.session).to_tracks(language=title.language)
|
||||
for track in tracks:
|
||||
rep_base = track.data["dash"]["representation"].find("BaseURL")
|
||||
if rep_base is not None:
|
||||
base_url = os.path.dirname(track.url)
|
||||
track_base = rep_base.text
|
||||
track.url = f"{base_url}/{track_base}"
|
||||
track.descriptor = Track.Descriptor.URL
|
||||
track.downloader = aria2c
|
||||
|
||||
if isinstance(track, Audio):
|
||||
role = track.data["dash"]["adaptation_set"].find("Role")
|
||||
if role is not None and role.get("value") in ["description", "alternative", "alternate"]:
|
||||
track.descriptive = True
|
||||
|
||||
if title.data.get("subtitles"):
|
||||
tracks.add(
|
||||
Subtitle(
|
||||
id_=hashlib.md5(title.data["subtitles"][0]["url"].encode()).hexdigest()[0:6],
|
||||
url=title.data["subtitles"][0]["url"],
|
||||
codec=Subtitle.Codec.from_mime(title.data["subtitles"][0]["url"][-3:]),
|
||||
language=title.data["subtitles"][0].get("lang_alpha3", title.language),
|
||||
downloader=requests,
|
||||
is_original_lang=True,
|
||||
forced=False,
|
||||
sdh=False,
|
||||
)
|
||||
)
|
||||
return tracks
|
||||
|
||||
def get_chapters(self, title: Title_T) -> Chapters:
|
||||
if not (cue_points := title.data.get("credit_cuepoints")):
|
||||
return Chapters()
|
||||
|
||||
chapters = []
|
||||
if cue_points.get("recap_start"):
|
||||
chapters.append(Chapter(name="Recap", timestamp=float(cue_points["recap_start"])))
|
||||
if cue_points.get("intro_start") and cue_points.get("intro_end"):
|
||||
chapters.append(Chapter(name="Intro", timestamp=float(cue_points["intro_start"])))
|
||||
chapters.append(Chapter(timestamp=float(cue_points["intro_end"])))
|
||||
if cue_points.get("early_credits_start"):
|
||||
chapters.append(Chapter(name="Early Credits", timestamp=float(cue_points["early_credits_start"])))
|
||||
if cue_points.get("postlude"):
|
||||
chapters.append(Chapter(name="End Credits", timestamp=float(cue_points["postlude"])))
|
||||
|
||||
if not any(c.timestamp == "00:00:00.000" for c in chapters):
|
||||
chapters.append(Chapter(timestamp=0))
|
||||
|
||||
return sorted(chapters, key=lambda x: x.timestamp)
|
||||
|
||||
def get_widevine_service_certificate(self, **_: Any) -> str:
|
||||
return None
|
||||
|
||||
def get_widevine_license(self, *, challenge: bytes, title: Episode | Movie, track: Any) -> bytes | str | None:
|
||||
if not (license_url := title.data.get("license_url")):
|
||||
return None
|
||||
|
||||
r = self.session.post(url=license_url, data=challenge)
|
||||
if r.status_code != 200:
|
||||
raise ConnectionError(r.text)
|
||||
|
||||
return r.content
|
||||
|
||||
def get_playready_license(self, *, challenge: bytes, title: Episode | Movie, track: Any) -> bytes | str | None:
|
||||
if not (license_url := title.data.get("license_url")):
|
||||
return None
|
||||
|
||||
r = self.session.post(url=license_url, data=challenge)
|
||||
if r.status_code != 200:
|
||||
raise ConnectionError(r.text)
|
||||
|
||||
return r.content
|
||||
5
TUBI/config.yaml
Normal file
5
TUBI/config.yaml
Normal file
@ -0,0 +1,5 @@
|
||||
endpoints:
|
||||
content: https://uapi.adrise.tv/cms/content # https://content-cdn.production-public.tubi.io/api/v2/content
|
||||
search: https://search.production-public.tubi.io/api/v1/search
|
||||
|
||||
|
||||
308
TVNZ/__init__.py
Normal file
308
TVNZ/__init__.py
Normal file
@ -0,0 +1,308 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
from collections.abc import Generator
|
||||
from datetime import timedelta
|
||||
from http.cookiejar import MozillaCookieJar
|
||||
from typing import Any, Optional, Union
|
||||
from urllib.parse import urljoin, urlparse
|
||||
|
||||
import click
|
||||
from click import Context
|
||||
from lxml import etree
|
||||
from pywidevine.cdm import Cdm as WidevineCdm
|
||||
from requests import Request
|
||||
from unshackle.core.credential import Credential
|
||||
from unshackle.core.manifests.dash import DASH
|
||||
from unshackle.core.search_result import SearchResult
|
||||
from unshackle.core.service import Service
|
||||
from unshackle.core.titles import Episode, Movie, Movies, Series
|
||||
from unshackle.core.tracks import Chapters, Tracks
|
||||
|
||||
|
||||
class TVNZ(Service):
|
||||
"""
|
||||
\b
|
||||
Service code for TVNZ streaming service (https://www.tvnz.co.nz).
|
||||
|
||||
\b
|
||||
Version: 1.0.2
|
||||
Author: stabbedbybrick
|
||||
Authorization: Credentials
|
||||
Robustness:
|
||||
L3: 1080p, AAC2.0
|
||||
|
||||
\b
|
||||
Tips:
|
||||
- Input can be comlete URL or path:
|
||||
SHOW: /shows/tulsa-king
|
||||
EPISODE: /shows/tulsa-king/episodes/s1-e1
|
||||
MOVIE: /shows/the-revenant
|
||||
SPORT: /sport/tennis/wta-tour/guadalajara-open-final
|
||||
|
||||
"""
|
||||
|
||||
GEOFENCE = ("nz",)
|
||||
|
||||
@staticmethod
|
||||
@click.command(name="TVNZ", short_help="https://www.tvnz.co.nz", help=__doc__)
|
||||
@click.argument("title", type=str)
|
||||
@click.pass_context
|
||||
def cli(ctx: Context, **kwargs: Any) -> TVNZ:
|
||||
return TVNZ(ctx, **kwargs)
|
||||
|
||||
def __init__(self, ctx: Context, title: str):
|
||||
self.title = title
|
||||
super().__init__(ctx)
|
||||
|
||||
self.session.headers.update(self.config["headers"])
|
||||
|
||||
def search(self) -> Generator[SearchResult, None, None]:
|
||||
params = {
|
||||
"q": self.title.strip(),
|
||||
"includeTypes": "all",
|
||||
}
|
||||
|
||||
results = self._request("GET", "/api/v1/android/play/search", params=params)["results"]
|
||||
|
||||
for result in results:
|
||||
yield SearchResult(
|
||||
id_=result["page"].get("url"),
|
||||
title=result.get("title"),
|
||||
description=result.get("synopsis"),
|
||||
label=result.get("type"),
|
||||
url="https://www.tvnz.co.nz" + result["page"].get("url"),
|
||||
)
|
||||
|
||||
def authenticate(self, cookies: Optional[MozillaCookieJar] = None, credential: Optional[Credential] = None) -> None:
|
||||
super().authenticate(cookies, credential)
|
||||
if not credential:
|
||||
raise EnvironmentError("Service requires Credentials for Authentication.")
|
||||
|
||||
cache = self.cache.get(f"tokens_{credential.sha1}")
|
||||
|
||||
if cache and not cache.expired:
|
||||
self.log.info(" + Using cached Tokens...")
|
||||
tokens = cache.data
|
||||
else:
|
||||
self.log.info(" + Logging in...")
|
||||
payload = {"email": credential.username, "password": credential.password, "keepMeLoggedIn": True}
|
||||
|
||||
response = self.session.post(
|
||||
self.config["endpoints"]["base_api"] + "/api/v1/androidtv/consumer/login", json=payload
|
||||
)
|
||||
response.raise_for_status()
|
||||
if not response.headers.get("aat"):
|
||||
raise ValueError("Failed to authenticate: " + response.text)
|
||||
|
||||
tokens = {
|
||||
"access_token": response.headers.get("aat"),
|
||||
"aft_token": response.headers.get("aft"), # ?
|
||||
}
|
||||
|
||||
cache.set(tokens, expiration=response.headers.get("aat_expires_in"))
|
||||
|
||||
self.session.headers.update({"Authorization": "Bearer {}".format(tokens["access_token"])})
|
||||
|
||||
# Disable SSL verification due to issues with newer versions of requests library.
|
||||
self.session.verify = False
|
||||
|
||||
def get_titles(self) -> Union[Movies, Series]:
|
||||
try:
|
||||
path = urlparse(self.title).path
|
||||
except Exception as e:
|
||||
raise ValueError("Could not parse ID from title: {}".format(e))
|
||||
|
||||
page = self._request("GET", "/api/v4/androidtv/play/page/{}".format(path))
|
||||
|
||||
if page["layout"].get("video"):
|
||||
title = page.get("title", "").replace("Episodes", "")
|
||||
video = self._request("GET", page["layout"]["video"].get("href"))
|
||||
episodes = self._episode(video, title)
|
||||
return Series(episodes)
|
||||
|
||||
else:
|
||||
module = page["layout"]["slots"]["main"]["modules"][0]
|
||||
label = module.get("label", "")
|
||||
lists = module.get("lists")
|
||||
title = page.get("title", "").replace(label, "")
|
||||
|
||||
seasons = [x.get("href") for x in lists]
|
||||
|
||||
episodes = []
|
||||
for season in seasons:
|
||||
data = self._request("GET", season)
|
||||
episodes.extend([x for x in data["_embedded"].values()])
|
||||
|
||||
while data.get("nextPage"):
|
||||
data = self._request("GET", data["nextPage"])
|
||||
episodes.extend([x for x in data["_embedded"].values()])
|
||||
|
||||
if label in ("Episodes", "Stream"):
|
||||
episodes = self._show(episodes, title)
|
||||
return Series(episodes)
|
||||
|
||||
elif label in ("Movie", "Movies"):
|
||||
movie = self._movie(episodes, title)
|
||||
return Movies(movie)
|
||||
|
||||
def get_tracks(self, title: Union[Movie, Episode]) -> Tracks:
|
||||
metadata = title.data.get("publisherMetadata") or title.data.get("media")
|
||||
if not metadata:
|
||||
self.log.error("Unable to find metadata for this episode")
|
||||
return
|
||||
|
||||
source = metadata.get("type") or metadata.get("source")
|
||||
video_id = metadata.get("brightcoveVideoId") or metadata.get("id")
|
||||
account_id = metadata.get("brightcoveAccountId") or metadata.get("accountId")
|
||||
playback = title.data.get("playbackHref", "")
|
||||
|
||||
self.drm_token = None
|
||||
if source != "brightcove":
|
||||
data = self._request("GET", playback)
|
||||
self.license = (
|
||||
data["encryption"]["licenseServers"]["widevine"]
|
||||
if data["encryption"].get("drmEnabled")
|
||||
else None
|
||||
)
|
||||
self.drm_token = data["encryption"].get("drmToken")
|
||||
source_manifest = data["streaming"]["dash"].get("url")
|
||||
|
||||
else:
|
||||
data = self._request(
|
||||
"GET", self.config["endpoints"]["brightcove"].format(account_id, video_id),
|
||||
headers={"BCOV-POLICY": self.config["policy"]},
|
||||
)
|
||||
|
||||
self.license = next((
|
||||
x["key_systems"]["com.widevine.alpha"]["license_url"]
|
||||
for x in data["sources"]
|
||||
if x.get("key_systems").get("com.widevine.alpha")),
|
||||
None,
|
||||
)
|
||||
source_manifest = next((
|
||||
x["src"] for x in data["sources"]
|
||||
if x.get("key_systems").get("com.widevine.alpha")),
|
||||
None,
|
||||
)
|
||||
|
||||
manifest = self.trim_duration(source_manifest)
|
||||
tracks = DASH.from_text(manifest, source_manifest).to_tracks(title.language)
|
||||
|
||||
for track in tracks.audio:
|
||||
role = track.data["dash"]["representation"].find("Role")
|
||||
if role is not None and role.get("value") in ["description", "alternative", "alternate"]:
|
||||
track.descriptive = True
|
||||
|
||||
return tracks
|
||||
|
||||
def get_chapters(self, title: Union[Movie, Episode]) -> Chapters:
|
||||
return Chapters()
|
||||
|
||||
def get_widevine_service_certificate(self, **_: Any) -> str:
|
||||
return WidevineCdm.common_privacy_cert
|
||||
|
||||
def get_widevine_license(self, challenge: bytes, **_: Any) -> str:
|
||||
if not self.license:
|
||||
return None
|
||||
|
||||
headers = {"Authorization": f"Bearer {self.drm_token}"} if self.drm_token else self.session.headers
|
||||
r = self.session.post(self.license, headers=headers, data=challenge)
|
||||
r.raise_for_status()
|
||||
|
||||
return r.content
|
||||
|
||||
# Service specific
|
||||
|
||||
def _show(self, episodes: list, title: str) -> Episode:
|
||||
return [
|
||||
Episode(
|
||||
id_=episode.get("videoId"),
|
||||
service=self.__class__,
|
||||
title=title,
|
||||
season=int(episode.get("seasonNumber")) if episode.get("seasonNumber") else 0,
|
||||
number=int(episode.get("episodeNumber")) if episode.get("episodeNumber") else 0,
|
||||
name=episode.get("title"),
|
||||
language="en",
|
||||
data=episode,
|
||||
)
|
||||
for episode in episodes
|
||||
]
|
||||
|
||||
def _movie(self, movies: list, title: str) -> Movie:
|
||||
return [
|
||||
Movie(
|
||||
id_=movie.get("videoId"),
|
||||
service=self.__class__,
|
||||
name=title,
|
||||
year=None,
|
||||
language="en",
|
||||
data=movie,
|
||||
)
|
||||
for movie in movies
|
||||
]
|
||||
|
||||
def _episode(self, video: dict, title: str) -> Episode:
|
||||
kind = video.get("type")
|
||||
name = video.get("title")
|
||||
|
||||
if kind == "sportVideo" and video.get("_embedded"):
|
||||
_type = next((x for x in video["_embedded"].values() if x.get("type") == "competition"), None)
|
||||
title = _type.get("title") if _type else title
|
||||
name = video.get("title", "") + " " + video.get("phase", "")
|
||||
|
||||
return [
|
||||
Episode(
|
||||
id_=video.get("videoId"),
|
||||
service=self.__class__,
|
||||
title=title,
|
||||
season=int(video.get("seasonNumber")) if video.get("seasonNumber") else 0,
|
||||
number=int(video.get("episodeNumber")) if video.get("episodeNumber") else 0,
|
||||
name=name,
|
||||
language="en",
|
||||
data=video,
|
||||
)
|
||||
]
|
||||
|
||||
def _request(
|
||||
self,
|
||||
method: str,
|
||||
api: str,
|
||||
params: dict = None,
|
||||
headers: dict = None,
|
||||
payload: dict = None,
|
||||
) -> Any[dict | str]:
|
||||
url = urljoin(self.config["endpoints"]["base_api"], api)
|
||||
if headers:
|
||||
self.session.headers.update(headers)
|
||||
|
||||
prep = self.session.prepare_request(Request(method, url, params=params, json=payload))
|
||||
response = self.session.send(prep)
|
||||
|
||||
try:
|
||||
data = json.loads(response.content)
|
||||
|
||||
if data.get("message"):
|
||||
raise ConnectionError(f"{response.status_code} - {data.get('message')}")
|
||||
|
||||
return data
|
||||
|
||||
except Exception as e:
|
||||
raise ConnectionError("Request failed: {} - {}".format(response.status_code, response.text))
|
||||
|
||||
def trim_duration(self, source_manifest: str) -> str:
|
||||
"""
|
||||
The last segment on all tracks return a 404 for some reason, causing a failed download.
|
||||
So we trim the duration by exactly one segment to account for that.
|
||||
|
||||
TODO: Calculate the segment duration instead of assuming length.
|
||||
"""
|
||||
manifest = DASH.from_url(source_manifest, self.session).manifest
|
||||
period_duration = manifest.get("mediaPresentationDuration")
|
||||
period_duration = DASH.pt_to_sec(period_duration)
|
||||
|
||||
hours, minutes, seconds = str(timedelta(seconds=period_duration - 6)).split(":")
|
||||
new_duration = f"PT{hours}H{minutes}M{seconds}S"
|
||||
manifest.set("mediaPresentationDuration", new_duration)
|
||||
|
||||
return etree.tostring(manifest, encoding="unicode")
|
||||
9
TVNZ/config.yaml
Normal file
9
TVNZ/config.yaml
Normal file
@ -0,0 +1,9 @@
|
||||
headers:
|
||||
User-Agent: "AndroidTV/!/!"
|
||||
x-tvnz-api-client-id: "androidtv/!.!.!"
|
||||
|
||||
endpoints:
|
||||
base_api: "https://apis-public-prod.tech.tvnz.co.nz"
|
||||
brightcove: "https://edge.api.brightcove.com/playback/v1/accounts/{}/videos/{}"
|
||||
|
||||
policy: "BCpkADawqM0IurzupiJKMb49WkxM__ngDMJ3GOQBhN2ri2Ci_lHwDWIpf4sLFc8bANMc-AVGfGR8GJNgxGqXsbjP1gHsK2Fpkoj6BSpwjrKBnv1D5l5iGPvVYCo"
|
||||
194
UKTV/__init__.py
Normal file
194
UKTV/__init__.py
Normal file
@ -0,0 +1,194 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import re
|
||||
from collections.abc import Generator
|
||||
from datetime import timedelta
|
||||
from typing import Any, Union
|
||||
|
||||
import click
|
||||
from click import Context
|
||||
from lxml import etree
|
||||
from unshackle.core.manifests.dash import DASH
|
||||
from unshackle.core.search_result import SearchResult
|
||||
from unshackle.core.service import Service
|
||||
from unshackle.core.titles import Episode, Movie, Movies, Series
|
||||
from unshackle.core.tracks import Chapter, Chapters, Tracks
|
||||
|
||||
|
||||
class UKTV(Service):
|
||||
"""
|
||||
Service code for 'U' (formerly UKTV Play) streaming service (https://u.co.uk/).
|
||||
|
||||
\b
|
||||
Version: 1.0.1
|
||||
Author: stabbedbybrick
|
||||
Authorization: None
|
||||
Robustness:
|
||||
L3: 1080p
|
||||
|
||||
\b
|
||||
Tips:
|
||||
- Use complete title URL as input:
|
||||
SERIES: https://u.co.uk/shows/love-me/watch-online
|
||||
EPISODE: https://u.co.uk/shows/love-me/series-1/episode-1/6355269425112
|
||||
|
||||
"""
|
||||
|
||||
GEOFENCE = ("gb",)
|
||||
ALIASES = ("uktvplay", "u",)
|
||||
|
||||
@staticmethod
|
||||
@click.command(name="UKTV", short_help="https://u.co.uk/", help=__doc__)
|
||||
@click.argument("title", type=str)
|
||||
@click.pass_context
|
||||
def cli(ctx: Context, **kwargs: Any) -> UKTV:
|
||||
return UKTV(ctx, **kwargs)
|
||||
|
||||
def __init__(self, ctx: Context, title: str):
|
||||
self.title = title
|
||||
super().__init__(ctx)
|
||||
|
||||
self.session.headers.update({"user-agent": "okhttp/4.7.2"})
|
||||
self.base = self.config["endpoints"]["base"]
|
||||
|
||||
def search(self) -> Generator[SearchResult, None, None]:
|
||||
r = self.session.get(self.base + f"search/?q={self.title}")
|
||||
r.raise_for_status()
|
||||
results = r.json()
|
||||
|
||||
for result in results:
|
||||
link = "https://u.co.uk/shows/{}/watch-online"
|
||||
|
||||
yield SearchResult(
|
||||
id_=link.format(result.get("slug")),
|
||||
title=result.get("name"),
|
||||
description=result.get("synopsis"),
|
||||
label=result.get("type"),
|
||||
url=link.format(result.get("slug")),
|
||||
)
|
||||
|
||||
def get_titles(self) -> Union[Movies, Series]:
|
||||
slug, video = self.parse_title(self.title)
|
||||
|
||||
r = self.session.get(self.base + f"brand/?slug={slug}")
|
||||
r.raise_for_status()
|
||||
data = r.json()
|
||||
|
||||
series = [series["id"] for series in data["series"]]
|
||||
seasons = [self.session.get(self.base + f"series/?id={i}").json() for i in series]
|
||||
|
||||
if video:
|
||||
episodes = [
|
||||
Episode(
|
||||
id_=episode.get("video_id"),
|
||||
service=self.__class__,
|
||||
title=episode.get("brand_name"),
|
||||
season=int(episode.get("series_number", 0)),
|
||||
number=int(episode.get("episode_number", 0)),
|
||||
name=episode.get("name"),
|
||||
language="en",
|
||||
data=episode,
|
||||
)
|
||||
for season in seasons
|
||||
for episode in season["episodes"]
|
||||
if int(episode.get("video_id")) == int(video)
|
||||
]
|
||||
else:
|
||||
episodes = [
|
||||
Episode(
|
||||
id_=episode.get("video_id"),
|
||||
service=self.__class__,
|
||||
title=episode.get("brand_name"),
|
||||
season=int(episode.get("series_number", 0)),
|
||||
number=int(episode.get("episode_number", 0)),
|
||||
name=episode.get("name"),
|
||||
language="en",
|
||||
data=episode,
|
||||
)
|
||||
for season in seasons
|
||||
for episode in season["episodes"]
|
||||
]
|
||||
|
||||
return Series(episodes)
|
||||
|
||||
def get_tracks(self, title: Union[Movie, Episode]) -> Tracks:
|
||||
r = self.session.get(
|
||||
self.config["endpoints"]["playback"].format(id=title.id),
|
||||
headers=self.config["headers"],
|
||||
)
|
||||
r.raise_for_status()
|
||||
data = r.json()
|
||||
|
||||
self.license = next((
|
||||
x["key_systems"]["com.widevine.alpha"]["license_url"]
|
||||
for x in data["sources"]
|
||||
if x.get("key_systems").get("com.widevine.alpha")),
|
||||
None,
|
||||
)
|
||||
source_manifest = next((
|
||||
x["src"] for x in data["sources"]
|
||||
if x.get("key_systems").get("com.widevine.alpha")),
|
||||
None,
|
||||
)
|
||||
if not self.license or not source_manifest:
|
||||
raise ValueError("Failed to get license or manifest")
|
||||
|
||||
manifest = self.trim_duration(source_manifest)
|
||||
tracks = DASH.from_text(manifest, source_manifest).to_tracks(title.language)
|
||||
|
||||
for track in tracks.audio:
|
||||
role = track.data["dash"]["representation"].find("Role")
|
||||
if role is not None and role.get("value") in ["description", "alternative", "alternate"]:
|
||||
track.descriptive = True
|
||||
|
||||
return tracks
|
||||
|
||||
def get_chapters(self, title: Union[Movie, Episode]) -> Chapters:
|
||||
chapters = []
|
||||
if title.data.get("credits_cuepoint"):
|
||||
chapters = [Chapter(name="Credits", timestamp=title.data.get("credits_cuepoint"))]
|
||||
|
||||
return Chapters(chapters)
|
||||
|
||||
def get_widevine_service_certificate(self, **_: Any) -> str:
|
||||
return None
|
||||
|
||||
def get_widevine_license(self, challenge: bytes, **_: Any) -> bytes:
|
||||
r = self.session.post(url=self.license, data=challenge)
|
||||
if r.status_code != 200:
|
||||
raise ConnectionError(r.text)
|
||||
return r.content
|
||||
|
||||
# Service specific functions
|
||||
|
||||
@staticmethod
|
||||
def parse_title(title: str) -> tuple[str, str]:
|
||||
title_re = (
|
||||
r"^(?:https?://(?:www\.)?u\.co.uk/shows/)?"
|
||||
r"(?P<slug>[a-z0-9-]+)(?:/[a-z0-9-]+/[a-z0-9-]+/(?P<vid>[0-9-]+))?"
|
||||
)
|
||||
|
||||
try:
|
||||
slug, video = (re.match(title_re, title).group(i) for i in ("slug", "vid"))
|
||||
except Exception:
|
||||
raise ValueError("Could not parse ID from title - is the URL correct?")
|
||||
|
||||
return slug, video
|
||||
|
||||
@staticmethod
|
||||
def trim_duration(source_manifest: str) -> str:
|
||||
"""
|
||||
The last segment on all tracks return a 404 for some reason, causing a failed download.
|
||||
So we trim the duration by exactly one segment to account for that.
|
||||
|
||||
TODO: Calculate the segment duration instead of assuming length.
|
||||
"""
|
||||
manifest = DASH.from_url(source_manifest).manifest
|
||||
period_duration = manifest.get("mediaPresentationDuration")
|
||||
period_duration = DASH.pt_to_sec(period_duration)
|
||||
|
||||
hours, minutes, seconds = str(timedelta(seconds=period_duration - 6)).split(":")
|
||||
new_duration = f"PT{hours}H{minutes}M{seconds}S"
|
||||
manifest.set("mediaPresentationDuration", new_duration)
|
||||
|
||||
return etree.tostring(manifest, encoding="unicode")
|
||||
9
UKTV/config.yaml
Normal file
9
UKTV/config.yaml
Normal file
@ -0,0 +1,9 @@
|
||||
headers:
|
||||
BCOV-POLICY: BCpkADawqM2ZEz-kf0i2xEP9VuhJF_DB5boH7YAeSx5EHDSNFFl4QUoHZ3bKLQ9yWboSOBNyvZKm4HiZrqMNRxXm-laTAnmls1QOL7_kUM3Eij4KjQMz0epMs3WIedg64fnRxQTX6XubGE9p
|
||||
User-Agent: Dalvik/2.1.0 (Linux; U; Android 12; SM-A226B Build/SP1A.210812.016)
|
||||
Host: edge.api.brightcove.com
|
||||
Connection: keep-alive
|
||||
|
||||
endpoints:
|
||||
base: https://vschedules.uktv.co.uk/vod/
|
||||
playback: https://edge.api.brightcove.com/playback/v1/accounts/1242911124001/videos/{id}
|
||||
452
VIDO/__init__.py
Normal file
452
VIDO/__init__.py
Normal file
@ -0,0 +1,452 @@
|
||||
import re
|
||||
import uuid
|
||||
import xml.etree.ElementTree as ET
|
||||
from urllib.parse import urljoin
|
||||
from hashlib import md5
|
||||
from typing import Optional, Union
|
||||
from http.cookiejar import CookieJar
|
||||
from langcodes import Language
|
||||
|
||||
import click
|
||||
|
||||
from unshackle.core.credential import Credential
|
||||
from unshackle.core.manifests import HLS, DASH
|
||||
from unshackle.core.service import Service
|
||||
from unshackle.core.titles import Episode, Movie, Movies, Series, Title_T, Titles_T
|
||||
from unshackle.core.tracks import Chapter, Tracks, Subtitle
|
||||
from unshackle.core.constants import AnyTrack
|
||||
from datetime import datetime, timezone
|
||||
|
||||
|
||||
class VIDO(Service):
|
||||
"""
|
||||
Vidio.com service, Series and Movies, login required.
|
||||
Version: 2.3.0
|
||||
|
||||
Supports URLs like:
|
||||
• https://www.vidio.com/premier/2978/giligilis (Series)
|
||||
• https://www.vidio.com/watch/7454613-marantau-short-movie (Movie)
|
||||
|
||||
Security: HD@L3 (Widevine DRM when available)
|
||||
"""
|
||||
|
||||
TITLE_RE = r"^https?://(?:www\.)?vidio\.com/(?:premier|series|watch)/(?P<id>\d+)"
|
||||
GEOFENCE = ("ID",)
|
||||
|
||||
@staticmethod
|
||||
@click.command(name="VIDO", short_help="https://vidio.com (login required)")
|
||||
@click.argument("title", type=str)
|
||||
@click.pass_context
|
||||
def cli(ctx, **kwargs):
|
||||
return VIDO(ctx, **kwargs)
|
||||
|
||||
def __init__(self, ctx, title: str):
|
||||
super().__init__(ctx)
|
||||
|
||||
match = re.match(self.TITLE_RE, title)
|
||||
if not match:
|
||||
raise ValueError(f"Unsupported or invalid Vidio URL: {title}")
|
||||
self.content_id = match.group("id")
|
||||
|
||||
self.is_movie = "watch" in title
|
||||
|
||||
# Static app identifiers from Android traffic
|
||||
self.API_AUTH = "laZOmogezono5ogekaso5oz4Mezimew1"
|
||||
self.USER_AGENT = "vidioandroid/7.14.6-e4d1de87f2 (3191683)"
|
||||
self.API_APP_INFO = "android/15/7.14.6-e4d1de87f2-3191683"
|
||||
self.VISITOR_ID = str(uuid.uuid4())
|
||||
|
||||
# Auth state
|
||||
self._email = None
|
||||
self._user_token = None
|
||||
self._access_token = None
|
||||
|
||||
# DRM state
|
||||
self.license_url = None
|
||||
self.custom_data = None
|
||||
self.cdm = ctx.obj.cdm
|
||||
|
||||
def authenticate(self, cookies: Optional[CookieJar] = None, credential: Optional[Credential] = None) -> None:
|
||||
if not credential or not credential.username or not credential.password:
|
||||
raise ValueError("Vidio requires email and password login.")
|
||||
|
||||
self._email = credential.username
|
||||
password = credential.password
|
||||
|
||||
cache_key = f"auth_tokens_{self._email}"
|
||||
cache = self.cache.get(cache_key)
|
||||
|
||||
# Check if valid tokens are already in the cache
|
||||
if cache and not cache.expired:
|
||||
self.log.info("Using cached authentication tokens")
|
||||
cached_data = cache.data
|
||||
self._user_token = cached_data.get("user_token")
|
||||
self._access_token = cached_data.get("access_token")
|
||||
if self._user_token and self._access_token:
|
||||
return
|
||||
|
||||
# If no valid cache, proceed with login
|
||||
self.log.info("Authenticating with username and password")
|
||||
headers = {
|
||||
"referer": "android-app://com.vidio.android",
|
||||
"x-api-platform": "app-android",
|
||||
"x-api-auth": self.API_AUTH,
|
||||
"user-agent": self.USER_AGENT,
|
||||
"x-api-app-info": self.API_APP_INFO,
|
||||
"accept-language": "en",
|
||||
"content-type": "application/x-www-form-urlencoded",
|
||||
"x-visitor-id": self.VISITOR_ID,
|
||||
}
|
||||
|
||||
data = f"login={self._email}&password={password}"
|
||||
r = self.session.post("https://api.vidio.com/api/login", headers=headers, data=data)
|
||||
r.raise_for_status()
|
||||
|
||||
auth_data = r.json()
|
||||
self._user_token = auth_data["auth"]["authentication_token"]
|
||||
self._access_token = auth_data["auth_tokens"]["access_token"]
|
||||
self.log.info(f"Authenticated as {self._email}")
|
||||
|
||||
try:
|
||||
expires_at_str = auth_data["auth_tokens"]["access_token_expires_at"]
|
||||
expires_at_dt = datetime.fromisoformat(expires_at_str)
|
||||
now_utc = datetime.now(timezone.utc)
|
||||
expiration_in_seconds = max(0, int((expires_at_dt - now_utc).total_seconds()))
|
||||
self.log.info(f"Token expires in {expiration_in_seconds / 60:.2f} minutes.")
|
||||
except (KeyError, ValueError) as e:
|
||||
self.log.warning(f"Could not parse token expiration: {e}. Defaulting to 1 hour.")
|
||||
expiration_in_seconds = 3600
|
||||
|
||||
cache.set({
|
||||
"user_token": self._user_token,
|
||||
"access_token": self._access_token
|
||||
}, expiration=expiration_in_seconds)
|
||||
|
||||
def _headers(self):
|
||||
if not self._user_token or not self._access_token:
|
||||
raise RuntimeError("Not authenticated. Call authenticate() first.")
|
||||
return {
|
||||
"referer": "android-app://com.vidio.android",
|
||||
"x-api-platform": "app-android",
|
||||
"x-api-auth": self.API_AUTH,
|
||||
"user-agent": self.USER_AGENT,
|
||||
"x-api-app-info": self.API_APP_INFO,
|
||||
"x-visitor-id": self.VISITOR_ID,
|
||||
"x-user-email": self._email,
|
||||
"x-user-token": self._user_token,
|
||||
"x-authorization": self._access_token,
|
||||
"accept-language": "en",
|
||||
"accept": "application/json",
|
||||
"accept-charset": "UTF-8",
|
||||
"content-type": "application/vnd.api+json",
|
||||
}
|
||||
|
||||
def _extract_subtitles_from_mpd(self, mpd_url: str) -> list[Subtitle]:
|
||||
"""
|
||||
Manually parse the MPD to extract subtitle tracks.
|
||||
Handles plain VTT format (for free content).
|
||||
"""
|
||||
subtitles = []
|
||||
|
||||
try:
|
||||
r = self.session.get(mpd_url)
|
||||
r.raise_for_status()
|
||||
mpd_content = r.text
|
||||
|
||||
# Get base URL for resolving relative paths
|
||||
base_url = mpd_url.rsplit('/', 1)[0] + '/'
|
||||
|
||||
# Remove namespace for easier parsing
|
||||
mpd_content_clean = re.sub(r'\sxmlns="[^"]+"', '', mpd_content)
|
||||
root = ET.fromstring(mpd_content_clean)
|
||||
|
||||
for adaptation_set in root.findall('.//AdaptationSet'):
|
||||
content_type = adaptation_set.get('contentType', '')
|
||||
|
||||
if content_type != 'text':
|
||||
continue
|
||||
|
||||
lang = adaptation_set.get('lang', 'und')
|
||||
|
||||
for rep in adaptation_set.findall('Representation'):
|
||||
mime_type = rep.get('mimeType', '')
|
||||
|
||||
# Handle plain VTT (free content)
|
||||
if mime_type == 'text/vtt':
|
||||
segment_list = rep.find('SegmentList')
|
||||
if segment_list is not None:
|
||||
for segment_url in segment_list.findall('SegmentURL'):
|
||||
media = segment_url.get('media')
|
||||
if media:
|
||||
full_url = urljoin(base_url, media)
|
||||
|
||||
# Determine if auto-generated
|
||||
is_auto = '-auto' in lang
|
||||
clean_lang = lang.replace('-auto', '')
|
||||
|
||||
subtitle = Subtitle(
|
||||
id_=md5(full_url.encode()).hexdigest()[0:16],
|
||||
url=full_url,
|
||||
codec=Subtitle.Codec.WebVTT,
|
||||
language=Language.get(clean_lang),
|
||||
forced=False,
|
||||
sdh=False,
|
||||
)
|
||||
|
||||
subtitles.append(subtitle)
|
||||
self.log.debug(f"Found VTT subtitle: {lang} -> {full_url}")
|
||||
|
||||
except Exception as e:
|
||||
self.log.warning(f"Failed to extract subtitles from MPD: {e}")
|
||||
|
||||
return subtitles
|
||||
|
||||
def get_titles(self) -> Titles_T:
|
||||
headers = self._headers()
|
||||
|
||||
if self.is_movie:
|
||||
r = self.session.get(f"https://api.vidio.com/api/videos/{self.content_id}/detail", headers=headers)
|
||||
r.raise_for_status()
|
||||
video_data = r.json()["video"]
|
||||
year = None
|
||||
if video_data.get("publish_date"):
|
||||
try:
|
||||
year = int(video_data["publish_date"][:4])
|
||||
except (ValueError, TypeError):
|
||||
pass
|
||||
return Movies([
|
||||
Movie(
|
||||
id_=video_data["id"],
|
||||
service=self.__class__,
|
||||
name=video_data["title"],
|
||||
description=video_data.get("description", ""),
|
||||
year=year,
|
||||
language=Language.get("id"),
|
||||
data=video_data,
|
||||
)
|
||||
])
|
||||
else:
|
||||
r = self.session.get(f"https://api.vidio.com/content_profiles/{self.content_id}", headers=headers)
|
||||
r.raise_for_status()
|
||||
root = r.json()["data"]
|
||||
series_title = root["attributes"]["title"]
|
||||
|
||||
r_playlists = self.session.get(
|
||||
f"https://api.vidio.com/content_profiles/{self.content_id}/playlists",
|
||||
headers=headers
|
||||
)
|
||||
r_playlists.raise_for_status()
|
||||
playlists_data = r_playlists.json()
|
||||
|
||||
# Use metadata to identify season playlists
|
||||
season_playlist_ids = set()
|
||||
if "meta" in playlists_data and "playlist_group" in playlists_data["meta"]:
|
||||
for group in playlists_data["meta"]["playlist_group"]:
|
||||
if group.get("type") == "season":
|
||||
season_playlist_ids.update(group.get("playlist_ids", []))
|
||||
|
||||
season_playlists = []
|
||||
for pl in playlists_data["data"]:
|
||||
playlist_id = int(pl["id"])
|
||||
name = pl["attributes"]["name"].lower()
|
||||
|
||||
if season_playlist_ids:
|
||||
if playlist_id in season_playlist_ids:
|
||||
season_playlists.append(pl)
|
||||
else:
|
||||
if ("season" in name or name == "episode" or name == "episodes") and \
|
||||
"trailer" not in name and "extra" not in name:
|
||||
season_playlists.append(pl)
|
||||
|
||||
if not season_playlists:
|
||||
raise ValueError("No season playlists found for this series.")
|
||||
|
||||
def extract_season_number(pl):
|
||||
name = pl["attributes"]["name"]
|
||||
match = re.search(r"season\s*(\d+)", name, re.IGNORECASE)
|
||||
if match:
|
||||
return int(match.group(1))
|
||||
elif name.lower() in ["season", "episodes", "episode"]:
|
||||
return 1
|
||||
else:
|
||||
return 0
|
||||
|
||||
season_playlists.sort(key=extract_season_number)
|
||||
|
||||
all_episodes = []
|
||||
|
||||
for playlist in season_playlists:
|
||||
playlist_id = playlist["id"]
|
||||
season_number = extract_season_number(playlist)
|
||||
|
||||
if season_number == 0:
|
||||
season_number = 1
|
||||
|
||||
self.log.debug(f"Processing playlist '{playlist['attributes']['name']}' as Season {season_number}")
|
||||
|
||||
page = 1
|
||||
while True:
|
||||
r_eps = self.session.get(
|
||||
f"https://api.vidio.com/content_profiles/{self.content_id}/playlists/{playlist_id}/videos",
|
||||
params={
|
||||
"page[number]": page,
|
||||
"page[size]": 20,
|
||||
"sort": "order",
|
||||
"included": "upcoming_videos"
|
||||
},
|
||||
headers=headers,
|
||||
)
|
||||
r_eps.raise_for_status()
|
||||
page_data = r_eps.json()
|
||||
|
||||
for raw_ep in page_data["data"]:
|
||||
attrs = raw_ep["attributes"]
|
||||
ep_number = len([e for e in all_episodes if e.season == season_number]) + 1
|
||||
all_episodes.append(
|
||||
Episode(
|
||||
id_=int(raw_ep["id"]),
|
||||
service=self.__class__,
|
||||
title=series_title,
|
||||
season=season_number,
|
||||
number=ep_number,
|
||||
name=attrs["title"],
|
||||
description=attrs.get("description", ""),
|
||||
language=Language.get("id"),
|
||||
data=raw_ep,
|
||||
)
|
||||
)
|
||||
|
||||
if not page_data["links"].get("next"):
|
||||
break
|
||||
page += 1
|
||||
|
||||
if not all_episodes:
|
||||
raise ValueError("No episodes found in any season.")
|
||||
|
||||
return Series(all_episodes)
|
||||
|
||||
def get_tracks(self, title: Title_T) -> Tracks:
|
||||
headers = self._headers()
|
||||
headers.update({
|
||||
"x-device-brand": "samsung",
|
||||
"x-device-model": "SM-A525F",
|
||||
"x-device-form-factor": "phone",
|
||||
"x-device-soc": "Qualcomm SM7125",
|
||||
"x-device-os": "Android 15 (API 35)",
|
||||
"x-device-android-mpc": "0",
|
||||
"x-device-cpu-arch": "arm64-v8a",
|
||||
"x-device-platform": "android",
|
||||
"x-app-version": "7.14.6-e4d1de87f2-3191683",
|
||||
})
|
||||
|
||||
video_id = str(title.id)
|
||||
url = f"https://api.vidio.com/api/stream/v1/video_data/{video_id}?initialize=true"
|
||||
|
||||
r = self.session.get(url, headers=headers)
|
||||
r.raise_for_status()
|
||||
stream = r.json()
|
||||
|
||||
if not isinstance(stream, dict):
|
||||
raise ValueError("Vidio returned invalid stream data.")
|
||||
|
||||
# Extract DRM info
|
||||
custom_data = stream.get("custom_data") or {}
|
||||
license_servers = stream.get("license_servers") or {}
|
||||
widevine_data = custom_data.get("widevine") if isinstance(custom_data, dict) else None
|
||||
license_url = license_servers.get("drm_license_url") if isinstance(license_servers, dict) else None
|
||||
|
||||
# Get stream URLs, check all possible HLS and DASH fields
|
||||
# HLS URLs (prefer in this order)
|
||||
hls_url = (
|
||||
stream.get("stream_hls_url") or
|
||||
stream.get("stream_token_hls_url") or
|
||||
stream.get("stream_token_url") # This is also HLS (m3u8)
|
||||
)
|
||||
|
||||
# DASH URLs
|
||||
dash_url = stream.get("stream_dash_url") or stream.get("stream_token_dash_url")
|
||||
|
||||
has_drm = widevine_data and license_url and dash_url and isinstance(widevine_data, str)
|
||||
|
||||
if has_drm:
|
||||
# DRM content: must use DASH
|
||||
self.log.info("Widevine DRM detected, using DASH")
|
||||
self.custom_data = widevine_data
|
||||
self.license_url = license_url
|
||||
tracks = DASH.from_url(dash_url, session=self.session).to_tracks(language=title.language)
|
||||
|
||||
elif hls_url:
|
||||
# Non-DRM: prefer HLS (H.264, proper frame_rate metadata)
|
||||
self.log.info("No DRM detected, using HLS")
|
||||
self.custom_data = None
|
||||
self.license_url = None
|
||||
tracks = HLS.from_url(hls_url, session=self.session).to_tracks(language=title.language)
|
||||
|
||||
# Clear HLS subtitles (they're segmented and incompatible)
|
||||
if tracks.subtitles:
|
||||
self.log.debug("Clearing HLS subtitles (incompatible format)")
|
||||
tracks.subtitles.clear()
|
||||
|
||||
# Get subtitles from DASH manifest (plain VTT) if available
|
||||
if dash_url:
|
||||
self.log.debug("Extracting subtitles from DASH manifest")
|
||||
manual_subs = self._extract_subtitles_from_mpd(dash_url)
|
||||
if manual_subs:
|
||||
for sub in manual_subs:
|
||||
tracks.add(sub)
|
||||
self.log.info(f"Added {len(manual_subs)} subtitle tracks from DASH")
|
||||
|
||||
elif dash_url:
|
||||
# Fallback to DASH only if no HLS available
|
||||
self.log.warning("No HLS available, using DASH (VP9 codec - may have issues)")
|
||||
self.custom_data = None
|
||||
self.license_url = None
|
||||
tracks = DASH.from_url(dash_url, session=self.session).to_tracks(language=title.language)
|
||||
|
||||
# Try manual subtitle extraction for non-DRM DASH
|
||||
if not tracks.subtitles:
|
||||
manual_subs = self._extract_subtitles_from_mpd(dash_url)
|
||||
if manual_subs:
|
||||
for sub in manual_subs:
|
||||
tracks.add(sub)
|
||||
else:
|
||||
raise ValueError("No playable stream (DASH or HLS) available.")
|
||||
|
||||
self.log.info(f"Found {len(tracks.videos)} video tracks, {len(tracks.audio)} audio tracks, {len(tracks.subtitles)} subtitle tracks")
|
||||
|
||||
return tracks
|
||||
|
||||
def get_chapters(self, title: Title_T) -> list[Chapter]:
|
||||
return []
|
||||
|
||||
def search(self):
|
||||
raise NotImplementedError("Search not implemented for Vidio.")
|
||||
|
||||
def get_widevine_service_certificate(self, **_) -> Union[bytes, str, None]:
|
||||
return None
|
||||
|
||||
def get_widevine_license(self, *, challenge: bytes, title: Title_T, track: AnyTrack) -> bytes:
|
||||
if not self.license_url or not self.custom_data:
|
||||
raise ValueError("DRM license info missing.")
|
||||
|
||||
headers = {
|
||||
"User-Agent": "Mozilla/5.0 (X11; Linux x86_64; rv:143.0) Gecko/20100101 Firefox/143.0",
|
||||
"Referer": "https://www.vidio.com/",
|
||||
"Origin": "https://www.vidio.com",
|
||||
"pallycon-customdata-v2": self.custom_data,
|
||||
"Content-Type": "application/octet-stream",
|
||||
}
|
||||
|
||||
self.log.debug(f"Requesting Widevine license from: {self.license_url}")
|
||||
response = self.session.post(
|
||||
self.license_url,
|
||||
data=challenge,
|
||||
headers=headers
|
||||
)
|
||||
|
||||
if not response.ok:
|
||||
error_summary = response.text[:200] if response.text else "No response body"
|
||||
raise Exception(f"License request failed ({response.status_code}): {error_summary}")
|
||||
|
||||
return response.content
|
||||
|
||||
5
VIDO/config.yaml
Normal file
5
VIDO/config.yaml
Normal file
@ -0,0 +1,5 @@
|
||||
endpoints:
|
||||
content_profile: "https://api.vidio.com/content_profiles/{content_id}"
|
||||
playlists: "https://api.vidio.com/content_profiles/{content_id}/playlists"
|
||||
playlist_videos: "https://api.vidio.com/content_profiles/{content_id}/playlists/{playlist_id}/videos"
|
||||
stream: "https://api.vidio.com/api/stream/v1/video_data/{video_id}?initialize=true"
|
||||
328
VIKI/__init__.py
Normal file
328
VIKI/__init__.py
Normal file
@ -0,0 +1,328 @@
|
||||
import base64
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
from http.cookiejar import CookieJar
|
||||
from typing import Optional, Generator
|
||||
|
||||
import click
|
||||
from unshackle.core.search_result import SearchResult
|
||||
from unshackle.core.constants import AnyTrack
|
||||
from unshackle.core.credential import Credential
|
||||
from unshackle.core.manifests import DASH
|
||||
from unshackle.core.service import Service
|
||||
from unshackle.core.titles import Movie, Movies, Series, Episode, Title_T, Titles_T
|
||||
from unshackle.core.tracks import Chapter, Tracks, Subtitle
|
||||
from unshackle.core.drm import Widevine
|
||||
from langcodes import Language
|
||||
|
||||
|
||||
class VIKI(Service):
|
||||
"""
|
||||
Service code for Rakuten Viki (viki.com)
|
||||
Version: 1.4.0
|
||||
|
||||
Authorization: Required cookies (_viki_session, device_id).
|
||||
Security: FHD @ L3 (Widevine)
|
||||
|
||||
Supports:
|
||||
• Movies and TV Series
|
||||
"""
|
||||
|
||||
TITLE_RE = r"^(?:https?://(?:www\.)?viki\.com)?/(?:movies|tv)/(?P<id>\d+c)-.+$"
|
||||
GEOFENCE = ()
|
||||
NO_SUBTITLES = False
|
||||
|
||||
@staticmethod
|
||||
@click.command(name="VIKI", short_help="https://viki.com")
|
||||
@click.argument("title", type=str)
|
||||
@click.pass_context
|
||||
def cli(ctx, **kwargs):
|
||||
return VIKI(ctx, **kwargs)
|
||||
|
||||
def __init__(self, ctx, title: str):
|
||||
super().__init__(ctx)
|
||||
|
||||
m = re.match(self.TITLE_RE, title)
|
||||
if not m:
|
||||
self.search_term = title
|
||||
self.title_url = None
|
||||
return
|
||||
|
||||
self.container_id = m.group("id")
|
||||
self.title_url = title
|
||||
self.video_id: Optional[str] = None
|
||||
self.api_access_key: Optional[str] = None
|
||||
self.drm_license_url: Optional[str] = None
|
||||
|
||||
self.cdm = ctx.obj.cdm
|
||||
if self.config is None:
|
||||
raise EnvironmentError("Missing service config for VIKI.")
|
||||
|
||||
def authenticate(self, cookies: Optional[CookieJar] = None, credential: Optional[Credential] = None) -> None:
|
||||
super().authenticate(cookies, credential)
|
||||
|
||||
if not cookies:
|
||||
raise PermissionError("VIKI requires a cookie file for authentication.")
|
||||
|
||||
session_cookie = next((c for c in cookies if c.name == "_viki_session"), None)
|
||||
device_cookie = next((c for c in cookies if c.name == "device_id"), None)
|
||||
|
||||
if not session_cookie or not device_cookie:
|
||||
raise PermissionError("Your cookie file is missing '_viki_session' or 'device_id'.")
|
||||
|
||||
self.session.headers.update({
|
||||
"User-Agent": "Mozilla/5.0 (X11; Linux x86_64; rv:143.0) Gecko/20100101 Firefox/143.0",
|
||||
"X-Viki-App-Ver": "14.64.0",
|
||||
"X-Viki-Device-ID": device_cookie.value,
|
||||
"Origin": "https://www.viki.com",
|
||||
"Referer": "https://www.viki.com/",
|
||||
})
|
||||
self.log.info("VIKI authentication cookies loaded successfully.")
|
||||
|
||||
def get_titles(self) -> Titles_T:
|
||||
if not self.title_url:
|
||||
raise ValueError("No URL provided to process.")
|
||||
|
||||
self.log.debug(f"Scraping page for API access key: {self.title_url}")
|
||||
r_page = self.session.get(self.title_url)
|
||||
r_page.raise_for_status()
|
||||
|
||||
match = re.search(r'"token":"([^"]+)"', r_page.text)
|
||||
if not match:
|
||||
raise RuntimeError("Failed to extract API access key from page source.")
|
||||
|
||||
self.api_access_key = match.group(1)
|
||||
self.log.debug(f"Extracted API access key: {self.api_access_key[:10]}...")
|
||||
|
||||
url = self.config["endpoints"]["container"].format(container_id=self.container_id)
|
||||
params = {
|
||||
"app": self.config["params"]["app"],
|
||||
"token": self.api_access_key,
|
||||
}
|
||||
r = self.session.get(url, params=params)
|
||||
r.raise_for_status()
|
||||
data = r.json()
|
||||
|
||||
content_type = data.get("type")
|
||||
if content_type == "film":
|
||||
return self._parse_movie(data)
|
||||
elif content_type == "series":
|
||||
return self._parse_series(data)
|
||||
else:
|
||||
self.log.error(f"Unknown content type '{content_type}' found.")
|
||||
return Movies([])
|
||||
|
||||
def _parse_movie(self, data: dict) -> Movies:
|
||||
name = data.get("titles", {}).get("en", "Unknown Title")
|
||||
year = int(data["created_at"][:4]) if "created_at" in data else None
|
||||
description = data.get("descriptions", {}).get("en", "")
|
||||
original_lang_code = data.get("origin", {}).get("language", "en")
|
||||
self.video_id = data.get("watch_now", {}).get("id")
|
||||
|
||||
if not self.video_id:
|
||||
raise ValueError(f"Could not find a playable video ID for container {self.container_id}.")
|
||||
|
||||
return Movies([
|
||||
Movie(
|
||||
id_=self.container_id,
|
||||
service=self.__class__,
|
||||
name=name,
|
||||
year=year,
|
||||
description=description,
|
||||
language=Language.get(original_lang_code),
|
||||
data=data,
|
||||
)
|
||||
])
|
||||
|
||||
def _parse_series(self, data: dict) -> Series:
|
||||
"""Parse series metadata and fetch episodes."""
|
||||
series_name = data.get("titles", {}).get("en", "Unknown Title")
|
||||
year = int(data["created_at"][:4]) if "created_at" in data else None
|
||||
description = data.get("descriptions", {}).get("en", "")
|
||||
original_lang_code = data.get("origin", {}).get("language", "en")
|
||||
|
||||
self.log.info(f"Parsing series: {series_name}")
|
||||
|
||||
# Fetch episode list IDs
|
||||
episodes_url = self.config["endpoints"]["episodes"].format(container_id=self.container_id)
|
||||
params = {
|
||||
"app": self.config["params"]["app"],
|
||||
"token": self.api_access_key,
|
||||
"direction": "asc",
|
||||
"with_upcoming": "true",
|
||||
"sort": "number",
|
||||
"blocked": "true",
|
||||
"only_ids": "true"
|
||||
}
|
||||
|
||||
r = self.session.get(episodes_url, params=params)
|
||||
r.raise_for_status()
|
||||
episodes_data = r.json()
|
||||
|
||||
episode_ids = episodes_data.get("response", [])
|
||||
self.log.info(f"Found {len(episode_ids)} episodes")
|
||||
|
||||
episodes = []
|
||||
for idx, ep_id in enumerate(episode_ids, 1):
|
||||
# Fetch individual episode metadata
|
||||
ep_url = self.config["endpoints"]["episode_meta"].format(video_id=ep_id)
|
||||
ep_params = {
|
||||
"app": self.config["params"]["app"],
|
||||
"token": self.api_access_key,
|
||||
}
|
||||
|
||||
try:
|
||||
r_ep = self.session.get(ep_url, params=ep_params)
|
||||
r_ep.raise_for_status()
|
||||
ep_data = r_ep.json()
|
||||
|
||||
ep_number = ep_data.get("number", idx)
|
||||
ep_title = ep_data.get("titles", {}).get("en", "")
|
||||
ep_description = ep_data.get("descriptions", {}).get("en", "")
|
||||
|
||||
# If no episode title, use generic name
|
||||
if not ep_title:
|
||||
ep_title = f"Episode {ep_number}"
|
||||
|
||||
# Store the video_id in the data dict
|
||||
ep_data["video_id"] = ep_id
|
||||
|
||||
self.log.debug(f"Episode {ep_number}: {ep_title} ({ep_id})")
|
||||
|
||||
episodes.append(
|
||||
Episode(
|
||||
id_=ep_id,
|
||||
service=self.__class__,
|
||||
title=series_name, # Series title
|
||||
season=1, # VIKI typically doesn't separate seasons clearly
|
||||
number=ep_number,
|
||||
name=ep_title, # Episode title
|
||||
description=ep_description,
|
||||
language=Language.get(original_lang_code),
|
||||
data=ep_data
|
||||
)
|
||||
)
|
||||
except Exception as e:
|
||||
self.log.warning(f"Failed to fetch episode {ep_id}: {e}")
|
||||
# Create a basic episode entry even if metadata fetch fails
|
||||
episodes.append(
|
||||
Episode(
|
||||
id_=ep_id,
|
||||
service=self.__class__,
|
||||
title=series_name,
|
||||
season=1,
|
||||
number=idx,
|
||||
name=f"Episode {idx}",
|
||||
description="",
|
||||
language=Language.get(original_lang_code),
|
||||
data={"video_id": ep_id} # Store video_id in data
|
||||
)
|
||||
)
|
||||
|
||||
# Return Series with just the episodes list
|
||||
return Series(episodes)
|
||||
|
||||
def get_tracks(self, title: Title_T) -> Tracks:
|
||||
# For episodes, get the video_id from the data dict
|
||||
if isinstance(title, Episode):
|
||||
self.video_id = title.data.get("video_id")
|
||||
if not self.video_id:
|
||||
# Fallback to episode id if video_id not in data
|
||||
self.video_id = title.data.get("id")
|
||||
elif not self.video_id:
|
||||
raise RuntimeError("video_id not set. Call get_titles() first.")
|
||||
|
||||
if not self.video_id:
|
||||
raise ValueError("Could not determine video_id for this title")
|
||||
|
||||
self.log.info(f"Getting tracks for video ID: {self.video_id}")
|
||||
|
||||
url = self.config["endpoints"]["playback"].format(video_id=self.video_id)
|
||||
r = self.session.get(url)
|
||||
r.raise_for_status()
|
||||
data = r.json()
|
||||
|
||||
# Get the DRM-protected manifest from queue
|
||||
manifest_url = None
|
||||
for item in data.get("queue", []):
|
||||
if item.get("type") == "video" and item.get("format") == "mpd":
|
||||
manifest_url = item.get("url")
|
||||
break
|
||||
|
||||
if not manifest_url:
|
||||
raise ValueError("No DRM-protected manifest URL found in queue")
|
||||
|
||||
self.log.debug(f"Found DRM-protected manifest URL: {manifest_url}")
|
||||
|
||||
# Create headers for manifest download
|
||||
manifest_headers = {
|
||||
"User-Agent": "Mozilla/5.0 (X11; Linux x86_64; rv:143.0) Gecko/20100101 Firefox/143.0",
|
||||
"Accept": "*/*",
|
||||
"Accept-Language": "en",
|
||||
"Accept-Encoding": "gzip, deflate, br, zstd",
|
||||
"X-Viki-App-Ver": "14.64.0",
|
||||
"X-Viki-Device-ID": self.session.headers.get("X-Viki-Device-ID", ""),
|
||||
"Origin": "https://www.viki.com",
|
||||
"Referer": "https://www.viki.com/",
|
||||
"Connection": "keep-alive",
|
||||
"Sec-Fetch-Dest": "empty",
|
||||
"Sec-Fetch-Mode": "cors",
|
||||
"Sec-Fetch-Site": "cross-site",
|
||||
"Pragma": "no-cache",
|
||||
"Cache-Control": "no-cache",
|
||||
}
|
||||
|
||||
# Parse tracks from the DRM-protected manifest
|
||||
tracks = DASH.from_url(manifest_url, session=self.session).to_tracks(language=title.language)
|
||||
|
||||
# Subtitles
|
||||
title_language = title.language.language
|
||||
subtitles = []
|
||||
for sub in data.get("subtitles", []):
|
||||
sub_url = sub.get("src")
|
||||
lang_code = sub.get("srclang")
|
||||
if not sub_url or not lang_code:
|
||||
continue
|
||||
|
||||
subtitles.append(
|
||||
Subtitle(
|
||||
id_=lang_code,
|
||||
url=sub_url,
|
||||
language=Language.get(lang_code),
|
||||
is_original_lang=lang_code == title_language,
|
||||
codec=Subtitle.Codec.WebVTT,
|
||||
name=sub.get("label", lang_code.upper()).split(" (")[0]
|
||||
)
|
||||
)
|
||||
tracks.subtitles = subtitles
|
||||
|
||||
# Store DRM license URL (only dt3) at service level
|
||||
drm_b64 = data.get("drm")
|
||||
if drm_b64:
|
||||
drm_data = json.loads(base64.b64decode(drm_b64))
|
||||
self.drm_license_url = drm_data.get("dt3") # Use dt3 as requested
|
||||
else:
|
||||
self.log.warning("No DRM info found, assuming unencrypted stream.")
|
||||
|
||||
return tracks
|
||||
|
||||
def get_widevine_license(self, *, challenge: bytes, title: Title_T, track: AnyTrack) -> bytes:
|
||||
if not hasattr(self, 'drm_license_url') or not self.drm_license_url:
|
||||
raise ValueError("DRM license URL not available.")
|
||||
|
||||
r = self.session.post(
|
||||
self.drm_license_url,
|
||||
data=challenge,
|
||||
headers={"Content-type": "application/octet-stream"}
|
||||
)
|
||||
r.raise_for_status()
|
||||
return r.content
|
||||
|
||||
def search(self) -> Generator[SearchResult, None, None]:
|
||||
self.log.warning("Search not yet implemented for VIKI.")
|
||||
return
|
||||
yield
|
||||
|
||||
def get_chapters(self, title: Title_T) -> list[Chapter]:
|
||||
return []
|
||||
8
VIKI/config.yaml
Normal file
8
VIKI/config.yaml
Normal file
@ -0,0 +1,8 @@
|
||||
params:
|
||||
app: "100000a"
|
||||
endpoints:
|
||||
container: "https://api.viki.io/v4/containers/{container_id}.json"
|
||||
episodes: "https://api.viki.io/v4/series/{container_id}/episodes.json" # New
|
||||
episode_meta: "https://api.viki.io/v4/videos/{video_id}.json" # New
|
||||
playback: "https://www.viki.com/api/videos/{video_id}"
|
||||
search: "https://api.viki.io/v4/search/all.json"
|
||||
264
VRT/__init__.py
Normal file
264
VRT/__init__.py
Normal file
@ -0,0 +1,264 @@
|
||||
import json
|
||||
import re
|
||||
import time
|
||||
import base64
|
||||
import warnings # Added
|
||||
from http.cookiejar import CookieJar
|
||||
from typing import Optional, List
|
||||
from langcodes import Language
|
||||
|
||||
import click
|
||||
import jwt
|
||||
from bs4 import XMLParsedAsHTMLWarning # Added
|
||||
from collections.abc import Generator
|
||||
from unshackle.core.search_result import SearchResult
|
||||
from unshackle.core.constants import AnyTrack
|
||||
from unshackle.core.credential import Credential
|
||||
from unshackle.core.manifests import DASH
|
||||
from unshackle.core.service import Service
|
||||
from unshackle.core.titles import Episode, Movie, Movies, Series, Title_T, Titles_T
|
||||
from unshackle.core.tracks import Chapter, Tracks, Subtitle
|
||||
|
||||
# Ignore the BeautifulSoup XML warning caused by STPP subtitles
|
||||
warnings.filterwarnings("ignore", category=XMLParsedAsHTMLWarning)
|
||||
|
||||
# GraphQL Fragments and Queries
|
||||
FRAGMENTS = """
|
||||
fragment tileFragment on Tile {
|
||||
... on ITile {
|
||||
title
|
||||
action { ... on LinkAction { link } }
|
||||
}
|
||||
}
|
||||
"""
|
||||
|
||||
QUERY_PROGRAM = """
|
||||
query VideoProgramPage($pageId: ID!) {
|
||||
page(id: $pageId) {
|
||||
... on ProgramPage {
|
||||
title
|
||||
components {
|
||||
__typename
|
||||
... on PaginatedTileList { listId title }
|
||||
... on StaticTileList { listId title }
|
||||
... on ContainerNavigation {
|
||||
items {
|
||||
title
|
||||
components {
|
||||
__typename
|
||||
... on PaginatedTileList { listId }
|
||||
... on StaticTileList { listId }
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
"""
|
||||
|
||||
QUERY_PAGINATED_LIST = FRAGMENTS + """
|
||||
query PaginatedTileListPage($listId: ID!, $after: ID) {
|
||||
list(listId: $listId) {
|
||||
... on PaginatedTileList {
|
||||
paginatedItems(first: 50, after: $after) {
|
||||
edges { node { ...tileFragment } }
|
||||
pageInfo { endCursor hasNextPage }
|
||||
}
|
||||
}
|
||||
... on StaticTileList {
|
||||
items { ...tileFragment }
|
||||
}
|
||||
}
|
||||
}
|
||||
"""
|
||||
|
||||
QUERY_PLAYBACK = """
|
||||
query EpisodePage($pageId: ID!) {
|
||||
page(id: $pageId) {
|
||||
... on PlaybackPage {
|
||||
title
|
||||
player { modes { streamId } }
|
||||
}
|
||||
}
|
||||
}
|
||||
"""
|
||||
|
||||
class VRT(Service):
|
||||
"""
|
||||
Service code for VRT MAX (vrt.be)
|
||||
Version: 2.1.1
|
||||
Auth: Gigya + OIDC flow
|
||||
Security: FHD @ L3 (Widevine)
|
||||
Supports:
|
||||
- Movies: https://www.vrt.be/vrtmax/a-z/rikkie-de-ooievaar-2/
|
||||
Series: https://www.vrt.be/vrtmax/a-z/schaar-steen-papier/
|
||||
"""
|
||||
|
||||
TITLE_RE = r"^(?:https?://(?:www\.)?vrt\.be/vrtmax/a-z/)?(?P<slug>[^/]+)(?:/(?P<season_num>\d+)/(?P<episode_slug>[^/]+))?/?$"
|
||||
|
||||
@staticmethod
|
||||
@click.command(name="VRT", short_help="https://www.vrt.be/vrtmax/")
|
||||
@click.argument("title", type=str)
|
||||
@click.pass_context
|
||||
def cli(ctx, **kwargs):
|
||||
return VRT(ctx, **kwargs)
|
||||
|
||||
def __init__(self, ctx, title: str):
|
||||
super().__init__(ctx)
|
||||
self.cdm = ctx.obj.cdm
|
||||
|
||||
m = re.match(self.TITLE_RE, title)
|
||||
if m:
|
||||
self.slug = m.group("slug")
|
||||
self.is_series_root = m.group("episode_slug") is None
|
||||
if "vrtmax/a-z" in title:
|
||||
self.page_id = "/" + title.split("vrt.be/")[1].split("?")[0]
|
||||
else:
|
||||
self.page_id = f"/vrtmax/a-z/{self.slug}/"
|
||||
else:
|
||||
self.search_term = title
|
||||
|
||||
self.access_token = None
|
||||
self.video_token = None
|
||||
|
||||
def authenticate(self, cookies: Optional[CookieJar] = None, credential: Optional[Credential] = None) -> None:
|
||||
cache = self.cache.get("auth_data")
|
||||
if cache and not cache.expired:
|
||||
self.log.info("Using cached VRT session.")
|
||||
self.access_token = cache.data["access_token"]
|
||||
self.video_token = cache.data["video_token"]
|
||||
return
|
||||
|
||||
if not credential or not credential.username or not credential.password: return
|
||||
|
||||
self.log.info(f"Logging in to VRT as {credential.username}...")
|
||||
login_params = {
|
||||
"apiKey": self.config["settings"]["api_key"],
|
||||
"loginID": credential.username,
|
||||
"password": credential.password,
|
||||
"format": "json",
|
||||
"sdk": "Android_6.1.0"
|
||||
}
|
||||
r = self.session.post(self.config["endpoints"]["gigya_login"], data=login_params)
|
||||
gigya_data = r.json()
|
||||
if gigya_data.get("errorCode") != 0: raise PermissionError("Gigya login failed")
|
||||
|
||||
sso_params = {"UID": gigya_data["UID"], "UIDSignature": gigya_data["UIDSignature"], "signatureTimestamp": gigya_data["signatureTimestamp"]}
|
||||
r = self.session.get(self.config["endpoints"]["vrt_sso"], params=sso_params)
|
||||
|
||||
match = re.search(r'var response = "(.*?)";', r.text)
|
||||
token_data = json.loads(match.group(1).replace('\\"', '"'))
|
||||
self.access_token = token_data["tokens"]["access_token"]
|
||||
self.video_token = token_data["tokens"]["video_token"]
|
||||
|
||||
decoded = jwt.decode(self.access_token, options={"verify_signature": False})
|
||||
cache.set(data={"access_token": self.access_token, "video_token": self.video_token}, expiration=int(decoded["exp"] - time.time()) - 300)
|
||||
|
||||
def _get_gql_headers(self):
|
||||
return {
|
||||
"x-vrt-client-name": self.config["settings"]["client_name"],
|
||||
"x-vrt-client-version": self.config["settings"]["client_version"],
|
||||
"x-vrt-zone": "default",
|
||||
"authorization": f"Bearer {self.access_token}" if self.access_token else None,
|
||||
"Content-Type": "application/json"
|
||||
}
|
||||
|
||||
def get_titles(self) -> Titles_T:
|
||||
if not self.is_series_root:
|
||||
r = self.session.post(self.config["endpoints"]["graphql"], json={"query": QUERY_PLAYBACK, "variables": {"pageId": self.page_id}}, headers=self._get_gql_headers())
|
||||
data = r.json()["data"]["page"]
|
||||
return Movies([Movie(id_=data["player"]["modes"][0]["streamId"], service=self.__class__, name=data["title"], language=Language.get("nl"), data={"page_id": self.page_id})])
|
||||
|
||||
r = self.session.post(self.config["endpoints"]["graphql"], json={"query": QUERY_PROGRAM, "variables": {"pageId": self.page_id}}, headers=self._get_gql_headers())
|
||||
program_data = r.json().get("data", {}).get("page")
|
||||
if not program_data:
|
||||
raise ValueError(f"Series page not found: {self.page_id}")
|
||||
|
||||
series_name = program_data["title"]
|
||||
episodes = []
|
||||
list_ids = []
|
||||
|
||||
for comp in program_data.get("components", []):
|
||||
typename = comp.get("__typename")
|
||||
if typename in ("PaginatedTileList", "StaticTileList") and "listId" in comp:
|
||||
list_ids.append((comp.get("title") or "Episodes", comp["listId"]))
|
||||
elif typename == "ContainerNavigation":
|
||||
for item in comp.get("items", []):
|
||||
item_title = item.get("title", "Episodes")
|
||||
for sub in item.get("components", []):
|
||||
if "listId" in sub:
|
||||
list_ids.append((item_title, sub["listId"]))
|
||||
|
||||
seen_lists = set()
|
||||
unique_list_ids = []
|
||||
for title, lid in list_ids:
|
||||
if lid not in seen_lists:
|
||||
unique_list_ids.append((title, lid))
|
||||
seen_lists.add(lid)
|
||||
|
||||
for season_title, list_id in unique_list_ids:
|
||||
after = None
|
||||
while True:
|
||||
r_list = self.session.post(self.config["endpoints"]["graphql"], json={"query": QUERY_PAGINATED_LIST, "variables": {"listId": list_id, "after": after}}, headers=self._get_gql_headers())
|
||||
list_resp = r_list.json().get("data", {}).get("list")
|
||||
if not list_resp: break
|
||||
|
||||
items_container = list_resp.get("paginatedItems")
|
||||
nodes = [e["node"] for e in items_container["edges"]] if items_container else list_resp.get("items", [])
|
||||
|
||||
for node in nodes:
|
||||
if not node.get("action"): continue
|
||||
link = node["action"]["link"]
|
||||
s_match = re.search(r'/(\d+)/.+s(\d+)a(\d+)', link)
|
||||
episodes.append(Episode(
|
||||
id_=link,
|
||||
service=self.__class__,
|
||||
title=series_name,
|
||||
season=int(s_match.group(2)) if s_match else 1,
|
||||
number=int(s_match.group(3)) if s_match else 0,
|
||||
name=node["title"],
|
||||
language=Language.get("nl"),
|
||||
data={"page_id": link}
|
||||
))
|
||||
|
||||
if items_container and items_container["pageInfo"]["hasNextPage"]:
|
||||
after = items_container["pageInfo"]["endCursor"]
|
||||
else:
|
||||
break
|
||||
|
||||
if not episodes:
|
||||
raise ValueError("No episodes found for this series.")
|
||||
|
||||
return Series(episodes)
|
||||
|
||||
def get_tracks(self, title: Title_T) -> Tracks:
|
||||
page_id = title.data["page_id"]
|
||||
r_meta = self.session.post(self.config["endpoints"]["graphql"], json={"query": QUERY_PLAYBACK, "variables": {"pageId": page_id}}, headers=self._get_gql_headers())
|
||||
stream_id = r_meta.json()["data"]["page"]["player"]["modes"][0]["streamId"]
|
||||
|
||||
p_info = base64.urlsafe_b64encode(json.dumps(self.config["player_info"]).encode()).decode().replace("=", "")
|
||||
r_tok = self.session.post(self.config["endpoints"]["player_token"], json={"identityToken": self.video_token, "playerInfo": f"eyJhbGciOiJIUzI1NiJ9.{p_info}."})
|
||||
vrt_player_token = r_tok.json()["vrtPlayerToken"]
|
||||
|
||||
r_agg = self.session.get(self.config["endpoints"]["aggregator"].format(stream_id=stream_id), params={"client": self.config["settings"]["client_id"], "vrtPlayerToken": vrt_player_token})
|
||||
agg_data = r_agg.json()
|
||||
|
||||
dash_url = next(u["url"] for u in agg_data["targetUrls"] if u["type"] == "mpeg_dash")
|
||||
tracks = DASH.from_url(dash_url, session=self.session).to_tracks(language=title.language)
|
||||
self.drm_token = agg_data["drm"]
|
||||
|
||||
for sub in agg_data.get("subtitleUrls", []):
|
||||
tracks.add(Subtitle(id_=sub.get("label", "nl"), url=sub["url"], codec=Subtitle.Codec.WebVTT, language=Language.get(sub.get("language", "nl"))))
|
||||
|
||||
for tr in tracks.videos + tracks.audio:
|
||||
if tr.drm: tr.drm.license = lambda challenge, **kw: self.get_widevine_license(challenge, title, tr)
|
||||
|
||||
return tracks
|
||||
|
||||
def get_widevine_license(self, challenge: bytes, title: Title_T, track: AnyTrack) -> bytes:
|
||||
r = self.session.post(self.config["endpoints"]["license"], data=challenge, headers={"x-vudrm-token": self.drm_token, "Origin": "https://www.vrt.be", "Referer": "https://www.vrt.be/"})
|
||||
return r.content
|
||||
|
||||
def get_chapters(self, title: Title_T) -> list[Chapter]:
|
||||
return []
|
||||
18
VRT/config.yaml
Normal file
18
VRT/config.yaml
Normal file
@ -0,0 +1,18 @@
|
||||
endpoints:
|
||||
gigya_login: "https://accounts.eu1.gigya.com/accounts.login"
|
||||
vrt_sso: "https://www.vrt.be/vrtmax/sso/login"
|
||||
graphql: "https://www.vrt.be/vrtnu-api/graphql/v1"
|
||||
player_token: "https://media-services-public.vrt.be/vualto-video-aggregator-web/rest/external/v2/tokens"
|
||||
aggregator: "https://media-services-public.vrt.be/media-aggregator/v2/media-items/{stream_id}"
|
||||
license: "https://widevine-proxy.drm.technology/proxy"
|
||||
|
||||
settings:
|
||||
api_key: "3_qhEcPa5JGFROVwu5SWKqJ4mVOIkwlFNMSKwzPDAh8QZOtHqu6L4nD5Q7lk0eXOOG"
|
||||
client_name: "WEB"
|
||||
client_id: "vrtnu-web@PROD"
|
||||
client_version: "1.5.15"
|
||||
|
||||
player_info:
|
||||
drm: { widevine: "L3" }
|
||||
platform: "desktop"
|
||||
app: { type: "browser", name: "Firefox", version: "146.0" }
|
||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user